answer stringlengths 17 10.2M |
|---|
package com.mesosphere.dcos.cassandra.executor.backup;
import static com.mesosphere.dcos.cassandra.executor.backup.azure.PageBlobOutputStream.ORIGINAL_SIZE_KEY;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URISyntaxException;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.security.InvalidKeyException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import java.util.Optional;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xerial.snappy.SnappyInputStream;
import org.xerial.snappy.SnappyOutputStream;
import com.mesosphere.dcos.cassandra.common.tasks.backup.BackupRestoreContext;
import com.mesosphere.dcos.cassandra.executor.backup.azure.PageBlobInputStream;
import com.mesosphere.dcos.cassandra.executor.backup.azure.PageBlobOutputStream;
import com.microsoft.azure.storage.CloudStorageAccount;
import com.microsoft.azure.storage.StorageException;
import com.microsoft.azure.storage.blob.CloudBlobClient;
import com.microsoft.azure.storage.blob.CloudBlobContainer;
import com.microsoft.azure.storage.blob.CloudPageBlob;
import com.microsoft.azure.storage.blob.ListBlobItem;
/**
* Implements a BackupStorageDriver that provides upload and download
* functionality to an Azure Storage using Page Blobs.
* Page Blobs allow for 1TB file sizes.
* Page Blobs require a Storage Account (but NOT a blob storage account)
*/
public class AzureStorageDriver implements BackupStorageDriver {
private static final Logger logger = LoggerFactory.getLogger(AzureStorageDriver.class);
private static final int DEFAULT_PART_SIZE_UPLOAD = 4 * 1024 * 1024; // Chunk size set to 4MB
private static final int DEFAULT_PART_SIZE_DOWNLOAD = 4 * 1024 * 1024; // Chunk size set to 4MB
@Override
public void upload(final BackupRestoreContext ctx) throws Exception {
final String accountName = ctx.getAccountId();
final String accountKey = ctx.getSecretKey();
final String localLocation = ctx.getLocalLocation();
final String backupName = ctx.getName();
final String nodeId = ctx.getNodeId();
final String key = String.format("%s/%s", backupName, nodeId);
final String containerName = StringUtils.lowerCase(getContainerName(ctx.getExternalLocation()));
final CloudBlobContainer container = getCloudBlobContainer(accountName, accountKey, containerName);
final File dataDirectory = new File(localLocation);
if (container == null || !dataDirectory.isDirectory()) {
logger.error("Error uploading snapshots. Unable to connect to {}, for container {} or Directory {} doesn't exist.",
ctx.getExternalLocation(), containerName, localLocation);
return;
}
// Ex: data/<keyspace>/<cf>/snapshots/</snapshot-dir>/<files>
for (final File keyspaceDir : dataDirectory.listFiles()) {
if (keyspaceDir.isFile()) {
// Skip any files in the data directory.
// Only enter keyspace directory.
continue;
}
logger.info("Entering keyspace: {}", keyspaceDir.getName());
for (final File cfDir : keyspaceDir.listFiles()) {
logger.info("Entering column family: {}", cfDir.getName());
final File snapshotDir = new File(cfDir, "snapshots");
final File backupDir = new File(snapshotDir, backupName);
if (!StorageUtil.isValidBackupDir(keyspaceDir, cfDir, snapshotDir, backupDir)) {
logger.info("Skipping directory: {}", snapshotDir.getAbsolutePath());
continue;
}
logger.info(
"Valid backup directories. KeyspaceDir: {} | ColumnFamilyDir: {} | SnapshotDir: {} | BackupName: {}",
keyspaceDir.getAbsolutePath(), cfDir.getAbsolutePath(),
snapshotDir.getAbsolutePath(), backupName);
final Optional<File> snapshotDirectory = StorageUtil.getValidSnapshotDirectory(snapshotDir, backupName);
logger.info("Valid snapshot directory: {}", snapshotDirectory.isPresent());
if (snapshotDirectory.isPresent()) {
logger.info("Going to upload directory: {}", snapshotDirectory.get().getAbsolutePath());
uploadDirectory(snapshotDirectory.get().getAbsolutePath(), container, containerName, key,
keyspaceDir.getName(), cfDir.getName());
} else {
logger.warn(
"Snapshots directory: {} doesn't contain the current backup directory: {}",
snapshotDir.getName(), backupName);
}
}
}
logger.info("Done uploading snapshots for backup: {}", backupName);
}
private void uploadDirectory(final String localLocation,
final CloudBlobContainer azureContainer,
final String containerName,
final String key,
final String keyspaceName,
final String cfName) throws Exception {
final LinkedList<Exception> exceptions = new LinkedList<>();
logger.info(
"uploadDirectory() localLocation: {}, containerName: {}, key: {}, keyspaceName: {}, cfName: {}",
localLocation, containerName, key, keyspaceName, cfName);
Files.walk(FileSystems.getDefault().getPath(localLocation)).forEach(filePath -> {
final File file = filePath.toFile();
if (file.isFile()) {
final String fileKey = key + "/" + keyspaceName + "/" + cfName + "/" + file.getName();
try {
uploadFile(azureContainer, fileKey, file);
}
catch (final Exception e)
{
exceptions.add(e);
}
}
});
if(exceptions.size() >0)
throw new Exception(exceptions.toString());
}
private void uploadFile(final CloudBlobContainer container, final String fileKey, final File sourceFile) throws Exception{
PageBlobOutputStream pageBlobOutputStream = null;
SnappyOutputStream compress = null;
BufferedOutputStream bufferedOutputStream = null;
try (BufferedInputStream inputStream = new BufferedInputStream(new FileInputStream(sourceFile))) {
logger.info("Initiating upload for file: {} | key: {}",
sourceFile.getAbsolutePath(), fileKey);
final CloudPageBlob blob = container.getPageBlobReference(fileKey);
pageBlobOutputStream = new PageBlobOutputStream(blob);
bufferedOutputStream = new BufferedOutputStream(pageBlobOutputStream);
logger.info("Creating Snappy output stream");
compress = new SnappyOutputStream(bufferedOutputStream, DEFAULT_PART_SIZE_UPLOAD);
logger.info("Streams initialized. Starting upload");
IOUtils.copy(inputStream, compress, DEFAULT_PART_SIZE_UPLOAD);
logger.info("Upload Complete");
} catch (StorageException | URISyntaxException | IOException e) {
logger.error("Unable to store blob", e);
} catch (final Exception e)
{
logger.error("Exception during Upload", e);
throw e;
}
finally {
IOUtils.closeQuietly(compress); // super important that the compress close is called first in order to flush
IOUtils.closeQuietly(bufferedOutputStream);
IOUtils.closeQuietly(pageBlobOutputStream);
}
}
private void uploadStream(final CloudBlobContainer container, final String fileKey, final BufferedInputStream sourceStream) throws Exception{
PageBlobOutputStream pageBlobOutputStream = null;
SnappyOutputStream compress = null;
BufferedOutputStream bufferedOutputStream = null;
try (BufferedInputStream inputStream = sourceStream) {
final CloudPageBlob blob = container.getPageBlobReference(fileKey);
pageBlobOutputStream = new PageBlobOutputStream(blob);
bufferedOutputStream = new BufferedOutputStream(pageBlobOutputStream);
logger.info("Creating Snappy output stream");
compress = new SnappyOutputStream(bufferedOutputStream, DEFAULT_PART_SIZE_UPLOAD);
logger.info("Streams initialized. Starting upload");
IOUtils.copy(inputStream, compress, DEFAULT_PART_SIZE_UPLOAD);
logger.info("Upload Complete");
} catch (StorageException | URISyntaxException | IOException e) {
logger.error("Unable to store blob", e);
}
catch (final Exception e)
{
logger.error("Exception during Upload", e);
throw e;
}
finally {
IOUtils.closeQuietly(compress); // super important that the compress close is called first in order to flush
IOUtils.closeQuietly(bufferedOutputStream);
IOUtils.closeQuietly(pageBlobOutputStream);
}
}
@Override
public void uploadSchema(final BackupRestoreContext ctx, final String schema) throws Exception{
// Path: <backupname/node-id/schema.cql>
final String accountName = ctx.getAccountId();
final String accountKey = ctx.getSecretKey();
final String backupName = ctx.getName();
final String nodeId = ctx.getNodeId();
final String key = String.format("%s/%s", backupName, nodeId);
final String containerName = StringUtils.lowerCase(getContainerName(ctx.getExternalLocation()));
final CloudBlobContainer container = getCloudBlobContainer(accountName, accountKey, containerName);
final BufferedInputStream inputStream = new BufferedInputStream(IOUtils.toInputStream(schema),schema.length());
if (container == null) {
logger.error("Error uploading schema. Unable to connect to {}, for container {}doesn't exist.",
ctx.getExternalLocation(), containerName);
}
final String fileKey = key + "/schema.cql";
uploadStream(container, fileKey,inputStream);
return;
}
@Override
public void download(final BackupRestoreContext ctx) throws IOException {
final String accountName = ctx.getAccountId();
final String accountKey = ctx.getSecretKey();
final String localLocation = ctx.getLocalLocation();
final String backupName = ctx.getName();
final String nodeId = ctx.getNodeId();
final String containerName = StringUtils.lowerCase(getContainerName(ctx.getExternalLocation()));
final CloudBlobContainer container = getCloudBlobContainer(accountName, accountKey, containerName);
if (container == null) {
logger.error("Error uploading snapshots. Unable to connect to {}, for container {}.",
ctx.getExternalLocation(), containerName, localLocation);
return;
}
final String keyPrefix = String.format("%s/%s", backupName, nodeId);
final Map<String, Long> snapshotFileKeys = getSnapshotFileKeys(container, keyPrefix);
logger.info("Snapshot files for this node: {}", snapshotFileKeys);
for (final String fileKey : snapshotFileKeys.keySet()) {
downloadFile(localLocation, container, fileKey, snapshotFileKeys.get(fileKey));
}
}
private void downloadFile(
final String localLocation, final CloudBlobContainer container, final String fileKey, final long originalSize) {
logger.info("Downloading | Local location {} | fileKey: {} | Size: {}", localLocation, fileKey, originalSize);
final String fileLocation = localLocation + File.separator + fileKey;
final File file = new File(fileLocation);
// Only create parent directory once, if it doesn't exist.
if (!createParentDir(file)) {
logger.error("Unable to create parent directories!");
return;
}
InputStream inputStream = null;
SnappyInputStream compress = null;
try (
FileOutputStream fileOutputStream = new FileOutputStream(file, true);
BufferedOutputStream bos = new BufferedOutputStream(fileOutputStream)) {
final CloudPageBlob pageBlobReference = container.getPageBlobReference(fileKey);
inputStream = new PageBlobInputStream(pageBlobReference);
compress = new SnappyInputStream(inputStream);
IOUtils.copy(compress, bos, DEFAULT_PART_SIZE_DOWNLOAD);
} catch (final Exception e) {
logger.error("Unable to write file: {}", fileKey, e);
} finally {
IOUtils.closeQuietly(compress);
IOUtils.closeQuietly(inputStream);
}
}
@Override
public String downloadSchema(final BackupRestoreContext ctx) throws Exception { // Path: <backupname/node-id/schema.cql>
String schema="";
final String accountName = ctx.getAccountId();
final String accountKey = ctx.getSecretKey();
final String backupName = ctx.getName();
final String nodeId = ctx.getNodeId();
final String key = String.format("%s/%s", backupName, nodeId);
final String containerName = StringUtils.lowerCase(getContainerName(ctx.getExternalLocation()));
final CloudBlobContainer container = getCloudBlobContainer(accountName, accountKey, containerName);
if (container == null) {
logger.error("Error downloading schema. Unable to connect to {}, for container {}.",
ctx.getExternalLocation(), containerName);
return schema;
}
final String fileKey = key + "/schema.cql";
final CloudPageBlob pageBlobReference = container.getPageBlobReference(fileKey);
if(!pageBlobReference.exists()){
logger.error("Error downloading schema. Unable to find schema on container {}",
containerName);
return schema;
}
InputStream inputStream = null;
SnappyInputStream compress = null;
try
{
inputStream = new PageBlobInputStream(pageBlobReference);
compress = new SnappyInputStream(inputStream);
final OutputStream outputStream = new ByteArrayOutputStream();
IOUtils.copy(compress,outputStream);
schema = outputStream.toString();
} catch (final Exception e) {
logger.error("Unable to download schema : {}", fileKey, e);
} finally {
IOUtils.closeQuietly(compress);
IOUtils.closeQuietly(inputStream);
}
return schema;
}
private String getContainerName(final String externalLocation) {
return externalLocation.substring("azure://".length()).replace("/", "");
}
private CloudBlobContainer getCloudBlobContainer(final String accountName, final String accountKey, final String containerName) {
CloudBlobContainer container = null;
if (StringUtils.isNotBlank(containerName)) {
final String storageConnectionString = "DefaultEndpointsProtocol=https"
+ ";AccountName=" + accountName
+ ";AccountKey=" + accountKey;
try {
final CloudStorageAccount account = CloudStorageAccount.parse(storageConnectionString);
final CloudBlobClient serviceClient = account.createCloudBlobClient();
container = serviceClient.getContainerReference(containerName);
container.createIfNotExists();
} catch (StorageException | URISyntaxException | InvalidKeyException e) {
logger.error("Error connecting to container for account {} and container name {}", accountName, containerName, e);
}
}
return container;
}
private boolean createParentDir(final File file) {
final File parentDir = new File(file.getParent());
if (!parentDir.isDirectory()) {
final boolean parentDirCreated = parentDir.mkdirs();
if (!parentDirCreated) {
logger.error("Error creating parent directory for file: {}. Skipping to next");
return false;
}
}
return true;
}
private Map<String, Long> getSnapshotFileKeys(final CloudBlobContainer container, final String keyPrefix) {
Map<String, Long> snapshotFiles = new HashMap<>();
try {
for (final ListBlobItem item : container.listBlobs(keyPrefix, true)) {
if (item instanceof CloudPageBlob) {
final CloudPageBlob cloudBlob = (CloudPageBlob) item;
snapshotFiles.put(cloudBlob.getName(), getOriginalFileSize(cloudBlob));
}
}
} catch (final StorageException e) {
logger.error("Unable to retrieve metadata.", e);
// all or none
snapshotFiles = new HashMap<>();
}
return snapshotFiles;
}
private long getOriginalFileSize(final CloudPageBlob pageBlobReference) throws StorageException {
long size = 0;
pageBlobReference.downloadAttributes();
final HashMap<String, String> map = pageBlobReference.getMetadata();
if (map != null && map.size() > 0) {
try {
size = Long.parseLong(map.get(ORIGINAL_SIZE_KEY));
} catch (final Exception e) {
logger.error("File size metadata missing or is not a number.");
}
}
return size;
}
} |
package org.smof.collection;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.bson.types.ObjectId;
import org.smof.element.Element;
import org.smof.element.InvalidIdException;
import com.google.gson.Gson;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.model.Filters;
@SuppressWarnings("javadoc")
public abstract class AbstractNoSQLCollection<T extends Element> implements NoSQLCollection<T> {
protected final MongoCollection<Document> collection;
protected final Gson jsonManager;
private final Class<T> type;
protected AbstractNoSQLCollection(final Gson jsonManager, final MongoCollection<Document> collection, final Class<T> type) {
this.collection = collection;
this.jsonManager = jsonManager;
this.type = type;
}
@Override
public boolean add(final T element) {
final Document jsonObject;
final ObjectId id;
final Document result = collection.find(getUniqueCondition(element)).first();
final boolean added = result == null;
try {
if(result == null) {
jsonObject = Document.parse(jsonManager.toJson(element, type));
id = new ObjectId();
jsonObject.append(Element.ID, id);
collection.insertOne(jsonObject);
element.setID(id.toString());
}
else {
element.setID(result.getObjectId(Element.ID).toString());
}
} catch(InvalidIdException e) {
e.printStackTrace();
}
return added;
}
@Override
public Stream<T> getAll() {
return find(null);
}
protected final Stream<T> find(Bson condition) {
final FindIterable<Document> result;
if(condition == null) {
result = collection.find();
} else {
result = collection.find(condition);
}
return StreamSupport.stream(result.spliterator(), false)
.map(d -> jsonManager.fromJson(d.toJson(), type));
}
@Override
public T lookup(final String id) {
final Document result = collection.find(Filters.eq(Element.ID, new ObjectId(id))).first();
return jsonManager.fromJson(result.toJson(), type);
}
@Override
public Set<T> lookupAll(final Iterable<T> ids) {
final Set<T> elements = new LinkedHashSet<>();
ids.forEach(i -> elements.add(lookup(i.getID())));
return elements;
}
protected abstract Bson getUniqueCondition(T element);
@Override
public void update(final T element) {
final Bson query = Filters.eq(Element.ID, new ObjectId(element.getID()));
collection.findOneAndReplace(query, Document.parse(jsonManager.toJson(element, type)));
}
@Override
public T get(final T element) {
final Document result = collection.find(getUniqueCondition(element)).first();
if(result == null) {
return null;
}
return jsonManager.fromJson(result.toJson(), type);
}
} |
package org.sosy_lab.java_smt.example;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import org.sosy_lab.common.ShutdownNotifier;
import org.sosy_lab.common.configuration.Configuration;
import org.sosy_lab.common.configuration.InvalidConfigurationException;
import org.sosy_lab.common.log.BasicLogManager;
import org.sosy_lab.common.log.LogManager;
import org.sosy_lab.java_smt.SolverContextFactory;
import org.sosy_lab.java_smt.SolverContextFactory.Solvers;
import org.sosy_lab.java_smt.api.BooleanFormula;
import org.sosy_lab.java_smt.api.IntegerFormulaManager;
import org.sosy_lab.java_smt.api.InterpolatingProverEnvironment;
import org.sosy_lab.java_smt.api.NumeralFormula.IntegerFormula;
import org.sosy_lab.java_smt.api.SolverContext;
import org.sosy_lab.java_smt.api.SolverException;
/** Examples for Craig/sequential/tree interpolation. */
public class Interpolation {
private Interpolation() {
// never called
}
public static void main(String... args)
throws InvalidConfigurationException, SolverException, InterruptedException {
// set up a basic environment
Configuration config = Configuration.defaultConfiguration();
LogManager logger = BasicLogManager.create(config);
ShutdownNotifier notifier = ShutdownNotifier.createDummy();
// choose solver
Solvers solver = Solvers.SMTINTERPOL; // works for all interpolation strategies
// setup context
try (SolverContext context =
SolverContextFactory.createSolverContext(config, logger, notifier, solver);
InterpolatingProverEnvironment<?> prover =
context.newProverEnvironmentWithInterpolation()) {
IntegerFormulaManager imgr = context.getFormulaManager().getIntegerFormulaManager();
// example
prover.push();
interpolateExample(prover, imgr, logger);
prover.pop();
// and another example
prover.push();
interpolateProgramTrace(prover, imgr, logger);
prover.pop();
}
}
private static <T> void interpolateExample(
InterpolatingProverEnvironment<T> prover, IntegerFormulaManager imgr, LogManager logger)
throws InterruptedException, SolverException {
// create some variables.
IntegerFormula x = imgr.makeVariable("x");
IntegerFormula y = imgr.makeVariable("y");
IntegerFormula zero = imgr.makeNumber(0);
// create and assert some formulas.
// instead of 'named' formulas, we return a 'handle' (of generic type T)
T ip0 = prover.addConstraint(imgr.greaterThan(x, y));
T ip1 = prover.addConstraint(imgr.equal(x, zero));
T ip2 = prover.addConstraint(imgr.greaterThan(y, zero));
// check for satisfiability
boolean unsat = prover.isUnsat();
Preconditions.checkState(unsat, "the example for interpolation should be UNSAT");
List<BooleanFormula> itps;
// example 1a :
// get a sequence of interpolants for three formulas: (get-interpolants IP_0 IP_1 IP_2).
itps = prover.getSeqInterpolants0(Lists.newArrayList(ip0, ip1, ip2));
logger.log(Level.INFO, "1a :: Interpolants for [{ip0},{ip1},{ip2}] are:", itps);
// example 1b :
// alternative solution ... with more code and partitioned formulas.
Set<T> partition0 = Collections.singleton(ip0);
Set<T> partition1 = Collections.singleton(ip1);
Set<T> partition2 = Collections.singleton(ip2);
itps = prover.getSeqInterpolants(Lists.newArrayList(partition0, partition1, partition2));
logger.log(Level.INFO, "1b :: Interpolants for [{ip0},{ip1},{ip2}] are:", itps);
// example 2a :
// get a sequence of interpolants for two formulas: (get-interpolants IP_1 (and IP_0 IP_2)).
Set<T> partition3 = Collections.singleton(ip0);
Set<T> partition4 = new HashSet<>();
partition4.add(ip1);
partition4.add(ip2);
itps = prover.getSeqInterpolants(Lists.newArrayList(partition3, partition4));
logger.log(Level.INFO, "2a :: Interpolants for [{ip0},{ip1,ip2}] are:", itps);
// example 2b :
// alternative solution, works when there are exactly two (!) groups of formulas.
// only one part is given as parameter, the rest is taken from the already asserted formulas.
BooleanFormula itp = prover.getInterpolant(Lists.newArrayList(ip0));
logger.log(Level.INFO, "2b :: Interpolants for [{ip0},{ip1,ip2}] are:", itp);
}
private static <T> void interpolateProgramTrace(
InterpolatingProverEnvironment<T> prover, IntegerFormulaManager imgr, LogManager logger)
throws InterruptedException, SolverException {
// create some variables.
// primed variable needed for 'self-assignments', alternatively use SSA-indices.
IntegerFormula i = imgr.makeVariable("i");
IntegerFormula i1 = imgr.makeVariable("i'");
IntegerFormula j = imgr.makeVariable("j");
IntegerFormula k = imgr.makeVariable("k");
IntegerFormula k1 = imgr.makeVariable("k'");
IntegerFormula zero = imgr.makeNumber(0);
IntegerFormula one = imgr.makeNumber(1);
IntegerFormula fifty = imgr.makeNumber(50);
// create and assert some formulas.
List<BooleanFormula> programTrace =
Lists.newArrayList(
imgr.equal(i, zero),
imgr.equal(k, j),
imgr.lessThan(i, fifty),
imgr.equal(i1, imgr.add(i, one)),
imgr.equal(k1, imgr.add(k, one)),
imgr.greaterOrEquals(i1, fifty),
imgr.equal(j, zero),
imgr.lessThan(k1, fifty));
// assert all formulas in the prover
List<T> handles = Lists.newArrayList();
for (BooleanFormula step : programTrace) {
handles.add(prover.addConstraint(step));
}
// check for satisfiability
boolean unsat = prover.isUnsat();
Preconditions.checkState(unsat, "the example for interpolation should be UNSAT");
// get a sequence of interpolants for the program trace.
List<BooleanFormula> itps = prover.getSeqInterpolants0(handles);
logger.log(Level.INFO, "Interpolants for the program trace are:", itps);
}
} |
package org.opencb.cellbase.lib.mongodb.db;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.DB;
import com.mongodb.QueryBuilder;
import org.broad.tribble.readers.TabixReader;
import org.opencb.biodata.models.variant.annotation.ConsequenceType;
import org.opencb.biodata.models.variation.GenomicVariant;
import org.opencb.cellbase.core.lib.api.variation.VariantAnnotationDBAdaptor;
import org.opencb.cellbase.core.lib.dbquery.QueryOptions;
import org.opencb.cellbase.core.lib.dbquery.QueryResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.UnknownHostException;
import java.util.*;
//import java.util.logging.Logger;
public class VariantAnnotationMongoDBAdaptor extends MongoDBAdaptor implements VariantAnnotationDBAdaptor {
// private DBCollection mongoVariationPhenotypeDBCollection;
private int coreChunkSize = 5000;
private int regulatoryChunkSize = 2000; //TODO: load this value from properties
private static Map<String, Map<String,Boolean>> isSynonymousCodon = new HashMap<>();
private static Map<String, List<String>> aToCodon = new HashMap<>(20);
private static Map<String, String> codonToA = new HashMap<>();
private static Map<String, Integer> biotypes = new HashMap<>(30);
private static Map<Character, Character> complementaryNt = new HashMap<>();
static {
///// GENETIC CODE ////////////////////////////////////////////////
aToCodon.put("ALA",new ArrayList<String>());
aToCodon.get("ALA").add("GCT"); aToCodon.get("ALA").add("GCC"); aToCodon.get("ALA").add("GCA"); aToCodon.get("ALA").add("GCG");
aToCodon.put("ARG",new ArrayList<String>());
aToCodon.get("ARG").add("CGT"); aToCodon.get("ARG").add("CGC"); aToCodon.get("ARG").add("CGA"); aToCodon.get("ARG").add("CGG");
aToCodon.get("ARG").add("AGA"); aToCodon.get("ARG").add("AGG");
aToCodon.put("ASN", new ArrayList<String>());
aToCodon.get("ASN").add("AAT"); aToCodon.get("ASN").add("AAC");
aToCodon.put("ASP", new ArrayList<String>());
aToCodon.get("ASP").add("GAT"); aToCodon.get("ASP").add("GAC");
aToCodon.put("CYS", new ArrayList<String>());
aToCodon.get("CYS").add("TGT"); aToCodon.get("CYS").add("TGC");
aToCodon.put("GLN", new ArrayList<String>());
aToCodon.get("GLN").add("CAA"); aToCodon.get("GLN").add("CAG");
aToCodon.put("GLU", new ArrayList<String>());
aToCodon.get("GLU").add("GAA"); aToCodon.get("GLU").add("GAG");
aToCodon.put("GLY",new ArrayList<String>());
aToCodon.get("GLY").add("GGT"); aToCodon.get("GLY").add("GGC"); aToCodon.get("GLY").add("GGA"); aToCodon.get("GLY").add("GGG");
aToCodon.put("HIS",new ArrayList<String>());
aToCodon.get("HIS").add("CAT"); aToCodon.get("HIS").add("CAC");
aToCodon.put("ILE",new ArrayList<String>());
aToCodon.get("ILE").add("ATT"); aToCodon.get("ILE").add("ATC"); aToCodon.get("ILE").add("ATA");
aToCodon.put("LEU",new ArrayList<String>());
aToCodon.get("LEU").add("TTA"); aToCodon.get("LEU").add("TTG"); aToCodon.get("LEU").add("CTT"); aToCodon.get("LEU").add("CTC");
aToCodon.get("LEU").add("CTA"); aToCodon.get("LEU").add("CTG");
aToCodon.put("LYS", new ArrayList<String>());
aToCodon.get("LYS").add("AAA"); aToCodon.get("LYS").add("AAG");
aToCodon.put("MET", new ArrayList<String>());
aToCodon.get("MET").add("ATG");
aToCodon.put("PHE",new ArrayList<String>());
aToCodon.get("PHE").add("TTT"); aToCodon.get("PHE").add("TTC");
aToCodon.put("PRO",new ArrayList<String>());
aToCodon.get("PRO").add("CCT"); aToCodon.get("PRO").add("CCC"); aToCodon.get("PRO").add("CCA"); aToCodon.get("PRO").add("CCG");
aToCodon.put("SER",new ArrayList<String>());
aToCodon.get("SER").add("TCT"); aToCodon.get("SER").add("TCC"); aToCodon.get("SER").add("TCA"); aToCodon.get("SER").add("TCG");
aToCodon.get("SER").add("AGT"); aToCodon.get("SER").add("AGC");
aToCodon.put("THR",new ArrayList<String>());
aToCodon.get("THR").add("ACT"); aToCodon.get("THR").add("ACC"); aToCodon.get("THR").add("ACA"); aToCodon.get("THR").add("ACG");
aToCodon.put("TRP",new ArrayList<String>());
aToCodon.get("TRP").add("TGG");
aToCodon.put("TYR",new ArrayList<String>());
aToCodon.get("TYR").add("TAT"); aToCodon.get("TYR").add("TAC");
aToCodon.put("VAL",new ArrayList<String>());
aToCodon.get("VAL").add("GTT"); aToCodon.get("VAL").add("GTC"); aToCodon.get("VAL").add("GTA"); aToCodon.get("VAL").add("GTG");
aToCodon.put("STOP",new ArrayList<String>());
aToCodon.get("STOP").add("TAA"); aToCodon.get("STOP").add("TGA"); aToCodon.get("STOP").add("TAG");
for(String aa : aToCodon.keySet()) {
for(String codon : aToCodon.get(aa)) {
isSynonymousCodon.put(codon, new HashMap<String, Boolean>());
codonToA.put(codon, aa);
}
}
for(String codon1 : isSynonymousCodon.keySet()) {
Map<String,Boolean> codonEntry = isSynonymousCodon.get(codon1);
for(String codon2 : isSynonymousCodon.keySet()) {
codonEntry.put(codon2,false);
}
}
for(String aa : aToCodon.keySet()) {
for(String codon1 : aToCodon.get(aa)) {
for(String codon2 : aToCodon.get(aa)) {
isSynonymousCodon.get(codon1).put(codon2,true);
}
}
}
biotypes.put("3prime_overlapping_ncrna",0);
biotypes.put("IG_C_gene",1);
biotypes.put("IG_C_pseudogene",2);
biotypes.put("IG_D_gene",3);
biotypes.put("IG_J_gene",4);
biotypes.put("IG_J_pseudogene",5);
biotypes.put("IG_V_gene",6);
biotypes.put("IG_V_pseudogene",7);
biotypes.put("Mt_rRNA",8);
biotypes.put("Mt_tRNA",9);
biotypes.put("TR_C_gene",10);
biotypes.put("TR_D_gene",11);
biotypes.put("TR_J_gene",12);
biotypes.put("TR_J_pseudogene",13);
biotypes.put("TR_V_gene",14);
biotypes.put("TR_V_pseudogene",15);
biotypes.put("antisense",16);
biotypes.put("lincRNA",17);
biotypes.put("miRNA",18);
biotypes.put("misc_RNA",19);
biotypes.put("polymorphic_pseudogene",20);
biotypes.put("processed_pseudogene",21);
biotypes.put("processed_transcript",22);
biotypes.put("protein_coding",23);
biotypes.put("pseudogene",24);
biotypes.put("rRNA",25);
biotypes.put("sense_intronic",26);
biotypes.put("sense_overlapping",27);
biotypes.put("snRNA",28);
biotypes.put("snoRNA",29);
biotypes.put("nonsense_mediated_decay",30);
biotypes.put("unprocessed_pseudogene",31);
biotypes.put("transcribed_unprocessed_pseudogene",32);
biotypes.put("retained_intron",33);
biotypes.put("non_stop_decay",34);
biotypes.put("unitary_pseudogene",35);
biotypes.put("translated_processed_pseudogene",36);
biotypes.put("transcribed_processed_pseudogene",37);
biotypes.put("tRNA_pseudogene",38);
biotypes.put("snoRNA_pseudogene",39);
biotypes.put("snRNA_pseudogene",40);
biotypes.put("scRNA_pseudogene",41);
biotypes.put("rRNA_pseudogene",42);
biotypes.put("misc_RNA_pseudogene",43);
biotypes.put("miRNA_pseudogene",44);
biotypes.put("non_coding",45);
biotypes.put("ambiguous_orf",46);
biotypes.put("known_ncrna",47);
biotypes.put("retrotransposed",48);
biotypes.put("transcribed_unitary_pseudogene",49);
biotypes.put("translated_unprocessed_pseudogene",50);
biotypes.put("LRG_gene",51);
complementaryNt.put('A','T');
complementaryNt.put('C','G');
complementaryNt.put('G','C');
complementaryNt.put('T','A');
}
public VariantAnnotationMongoDBAdaptor(DB db, String species, String assembly) {
super(db, species, assembly);
}
public VariantAnnotationMongoDBAdaptor(DB db, String species, String assembly, int coreChunkSize) {
super(db, species, assembly);
this.coreChunkSize = coreChunkSize;
}
private Boolean regionsOverlap(Integer region1Start, Integer region1End, Integer region2Start, Integer region2End) {
// return ((region2Start>=region1Start && region2Start<=region1End) || (region2End>=region1Start && region2End<=region1End) || (region1Start>=region2Start && region1End<=region2End));
return ((region2Start >= region1Start || region2End >= region1Start) && (region2Start <= region1End || region2End <= region1End));
}
private Boolean isStopCodon(String codon) {
if(codon.equals("TAA") || codon.equals("TGA") || codon.equals("TAG")) {
return true;
}
return false;
}
private Boolean gainsStopCodon(String sequence) {
int i = 0;
Boolean stop = false;
do {
String codon = sequence.substring(i, i + 3);
stop = (codon.equals("TAA") || codon.equals("TGA") || codon.equals("TAG"));
i++;
} while((i<sequence.length()) && !stop);
return stop;
}
private void solveCodingExonEffect(String previousCodonNucleotides, String exonSequence, Integer exonStart, Integer exonEnd, Integer variantStart, Integer variantEnd,
String variantRef, String variantAlt, List<String> consequenceTypeList) {
Integer variantPhaseShift = (variantStart-(exonStart-previousCodonNucleotides.length())) % 3;
Integer modifiedCodonRelativeStart = variantStart-variantPhaseShift-exonStart;
String modifiedCodonPrefix;
String newCodon;
if(variantAlt.equals("-")) { // Deletion
consequenceTypeList.add("feature_truncation");
if(variantStart >= exonStart && variantEnd <= exonEnd) { // Deletion does not go beyond exon limits
if(modifiedCodonRelativeStart < 0) {
modifiedCodonPrefix = previousCodonNucleotides+exonSequence.substring(0,variantStart-exonStart);
} else {
modifiedCodonPrefix = exonSequence.substring(modifiedCodonRelativeStart, modifiedCodonRelativeStart + variantPhaseShift);
}
if(variantRef.length()%3 == 0) {
if (variantPhaseShift == 0) { // Check deletion starts at the first position of a codon
consequenceTypeList.add("inframe_deletion"); // TODO: check that I correctly interpreted the meaning of this consequence type
}
} else {
consequenceTypeList.add("frameshift_variant");
newCodon = modifiedCodonPrefix+exonSequence.substring(variantEnd-exonStart+1,variantEnd-exonStart+1+(3-modifiedCodonPrefix.length()));
if(isStopCodon(newCodon)) {
consequenceTypeList.add("stop_gained");
}
}
}
} else {
if(variantRef.equals("-")) { // Insertion TODO: I've seen insertions within Cellbase-mongo with a ref != -
consequenceTypeList.add("feature_elongation");
if(variantAlt.length()%3 == 0) {
if (variantPhaseShift == 0) { // Check insertion starts at the first position of a codon
consequenceTypeList.add("inframe_insertion"); // TODO: check that I correctly interpreted the meaning of this consequence type
if (gainsStopCodon(variantAlt)) {
consequenceTypeList.add("stop_gained");
}
} else {
if (modifiedCodonRelativeStart < 0) {
modifiedCodonPrefix = previousCodonNucleotides + exonSequence.substring(0, variantStart - exonStart);
} else {
modifiedCodonPrefix = exonSequence.substring(modifiedCodonRelativeStart, modifiedCodonRelativeStart + variantPhaseShift);
}
if (gainsStopCodon(modifiedCodonPrefix + variantAlt)) {
consequenceTypeList.add("stop_gained");
}
}
} else {
consequenceTypeList.add("frameshift_variant");
if (modifiedCodonRelativeStart < 0) {
modifiedCodonPrefix = previousCodonNucleotides + exonSequence.substring(0, variantStart - exonStart);
} else {
modifiedCodonPrefix = exonSequence.substring(modifiedCodonRelativeStart, modifiedCodonRelativeStart + variantPhaseShift);
}
if (gainsStopCodon(modifiedCodonPrefix + variantAlt)) {
consequenceTypeList.add("stop_gained");
}
}
} else { // SNV
String referenceCodon = exonSequence.substring(modifiedCodonRelativeStart, modifiedCodonRelativeStart + 3);
char[] modifiedCodonArray = referenceCodon.toCharArray();
modifiedCodonArray[variantPhaseShift] = variantAlt.toCharArray()[0];
if(isSynonymousCodon.get(referenceCodon).get(String.valueOf(modifiedCodonArray))){
consequenceTypeList.add("synonymous_variant");
} else {
consequenceTypeList.add("missense_variant");
}
}
}
}
private void solvePositiveCodingEffect(Boolean splicing, String transcriptSequence, Integer cdnaCodingStart, Integer cdnaCodingEnd,
Integer cdnaVariantStart, Integer cdnaVariantEnd, String variantRef, String variantAlt,
HashSet<String> SoNames, ConsequenceType consequenceTypeTemplate) {
// TODO: lo q hay dentro de esta funcion es copia pega de solveCodingExonEffect. Arreglarlo. Es basicamente igual,
// TODO: una vez aqui dentro ya se q la variante esta entre cdnaVariantStart y cdnaVariantEnd. Hay que comprobar
// TODO: los codones incio/fin. El resto es igual, solo q antes de ponerse a identificar el codon que modifica la variante
// TODO: hay que comprobar si es un splicing o no. En caso de ser un splicing q no se haga nada, no hay prediccion posible
Integer variantPhaseShift = (cdnaVariantStart-cdnaCodingStart) % 3;
String modifiedCodonPrefix,altSuffix;
String newCodon;
if(cdnaVariantStart != null && cdnaVariantStart<(cdnaCodingStart+3)) { // cdnaVariantStart=null if variant is intronic
SoNames.add("initiator_codon_variant");
}
if(cdnaVariantEnd != null && cdnaVariantEnd>(cdnaCodingEnd-3)) {
if(cdnaVariantStart==cdnaVariantEnd) { // It is a SNV
int modifiedCodonStart = cdnaVariantStart - variantPhaseShift;
String referenceCodon = transcriptSequence.substring(modifiedCodonStart-1, modifiedCodonStart + 2);
char[] modifiedCodonArray = referenceCodon.toCharArray();
modifiedCodonArray[variantPhaseShift] = variantAlt.toCharArray()[0];
if (isSynonymousCodon.get(referenceCodon).get(String.valueOf(modifiedCodonArray))) {
SoNames.add("stop_retained_variant");
} else {
SoNames.add("stop_lost");
}
// Fill consequenceTypeTemplate.codon leaving only the nt that changes in uppercase. Careful with upper/lower case letters
char[] referenceCodonArray = referenceCodon.toLowerCase().toCharArray();
referenceCodonArray[variantPhaseShift] = Character.toUpperCase(referenceCodonArray[variantPhaseShift]);
modifiedCodonArray = String.valueOf(modifiedCodonArray).toLowerCase().toCharArray();
modifiedCodonArray[variantPhaseShift] = Character.toUpperCase(modifiedCodonArray[variantPhaseShift]);
consequenceTypeTemplate.setCodon(String.valueOf(referenceCodonArray)+"/"+String.valueOf(modifiedCodonArray));
} else {
SoNames.add("stop_lost");
}
}
if(variantAlt.equals("-")) { // Deletion
SoNames.add("feature_truncation");
if(!splicing) {
if (variantRef.length() % 3 == 0) {
SoNames.add("inframe_deletion"); // TODO: check that I correctly interpreted the meaning of this consequence type
} else {
SoNames.add("frameshift_variant");
// modifiedCodonPrefix = transcriptSequence.substring(cdnaVariantStart-variantPhaseShift, cdnaVariantStart);
// if (gainsStopCodon(modifiedCodonPrefix+transcriptSequence.substring(cdnaVariantEnd+1,cdnaCodingEnd-2))) {
// consequenceTypeList.add("stop_gained");
}
}
} else {
if(variantRef.equals("-")) { // Insertion TODO: I've seen insertions within Cellbase-mongo with a ref != -
SoNames.add("feature_elongation");
if(!splicing) {
if(variantAlt.length()%3 == 0) {
SoNames.add("inframe_insertion"); // TODO: check that I correctly interpreted the meaning of this consequence type
} else {
SoNames.add("frameshift_variant");
}
}
} else { // SNV
if(!splicing) {
int modifiedCodonStart = cdnaVariantStart - variantPhaseShift;
String referenceCodon = transcriptSequence.substring(modifiedCodonStart-1, modifiedCodonStart + 2); // -1 and +2 because of base 0 String indexing
char[] modifiedCodonArray = referenceCodon.toCharArray();
modifiedCodonArray[variantPhaseShift] = variantAlt.toCharArray()[0];
if (isSynonymousCodon.get(referenceCodon).get(String.valueOf(modifiedCodonArray))) {
SoNames.add("synonymous_variant");
} else {
SoNames.add("missense_variant");
}
// Set consequenceTypeTemplate.aChange
consequenceTypeTemplate.setaChange(codonToA.get(referenceCodon) + "/" + codonToA.get(String.valueOf(modifiedCodonArray)));
// Set consequenceTypeTemplate.codon leaving only the nt that changes in uppercase. Careful with upper/lower case letters
char[] referenceCodonArray = referenceCodon.toLowerCase().toCharArray();
referenceCodonArray[variantPhaseShift] = Character.toUpperCase(referenceCodonArray[variantPhaseShift]);
modifiedCodonArray = String.valueOf(modifiedCodonArray).toLowerCase().toCharArray();
modifiedCodonArray[variantPhaseShift] = Character.toUpperCase(modifiedCodonArray[variantPhaseShift]);
consequenceTypeTemplate.setCodon(String.valueOf(referenceCodonArray)+"/"+String.valueOf(modifiedCodonArray));
}
}
}
}
private void solveNegativeCodingEffect(Boolean splicing, String transcriptSequence, Integer cdnaCodingStart, Integer cdnaCodingEnd,
Integer cdnaVariantStart, Integer cdnaVariantEnd, String variantRef, String variantAlt,
HashSet<String> SoNames, ConsequenceType consequenceTypeTemplate) {
// TODO: lo q hay dentro de esta funcion es copia pega de solveCodingExonEffect. Arreglarlo. Es basicamente igual,
// TODO: una vez aqui dentro ya se q la variante esta entre cdnaVariantStart y cdnaVariantEnd. Hay que comprobar
// TODO: los codones incio/fin. El resto es igual, solo q antes de ponerse a identificar el codon que modifica la variante
// TODO: hay que comprobar si es un splicing o no. En caso de ser un splicing q no se haga nada, no hay prediccion posible
Integer variantPhaseShift = (cdnaVariantStart-cdnaCodingStart) % 3;
String modifiedCodonPrefix,altSuffix;
String newCodon;
if(cdnaVariantStart != null && cdnaVariantStart<(cdnaCodingStart+3)) { // cdnaVariantStart=null if variant is intronic
SoNames.add("initiator_codon_variant");
}
if(cdnaVariantEnd != null && cdnaVariantEnd>(cdnaCodingEnd-3)) {
if(cdnaVariantStart==cdnaVariantEnd) {
int modifiedCodonStart = cdnaVariantStart - variantPhaseShift;
String reverseCodon = new StringBuilder(transcriptSequence.substring(transcriptSequence.length()-modifiedCodonStart-2,
transcriptSequence.length()-modifiedCodonStart)+1).reverse().toString(); // Rigth limit of the substring sums +1 because substring does not include that position
char[] referenceCodon = reverseCodon.toCharArray();
referenceCodon[0] = complementaryNt.get(referenceCodon[0]);
referenceCodon[1] = complementaryNt.get(referenceCodon[1]);
referenceCodon[2] = complementaryNt.get(referenceCodon[2]);
char[] modifiedCodonArray = referenceCodon.clone();
modifiedCodonArray[variantPhaseShift] = complementaryNt.get(variantAlt.toCharArray()[0]);
if (isSynonymousCodon.get(String.valueOf(referenceCodon)).get(String.valueOf(modifiedCodonArray))) {
SoNames.add("stop_retained_variant");
} else {
SoNames.add("stop_lost");
}
// Fill consequenceTypeTemplate.codon leaving only the nt that changes in uppercase. Careful with upper/lower case letters
char[] referenceCodonArray = String.valueOf(referenceCodon).toLowerCase().toCharArray();
referenceCodonArray[variantPhaseShift] = Character.toUpperCase(referenceCodonArray[variantPhaseShift]);
modifiedCodonArray = String.valueOf(modifiedCodonArray).toLowerCase().toCharArray();
modifiedCodonArray[variantPhaseShift] = Character.toUpperCase(modifiedCodonArray[variantPhaseShift]);
consequenceTypeTemplate.setCodon(String.valueOf(referenceCodonArray)+"/"+String.valueOf(modifiedCodonArray));
} else {
SoNames.add("stop_lost");
}
}
if(variantAlt.equals("-")) { // Deletion
SoNames.add("feature_truncation");
if(!splicing) {
if (variantRef.length() % 3 == 0) {
SoNames.add("inframe_deletion"); // TODO: check that I correctly interpreted the meaning of this consequence type
} else {
SoNames.add("frameshift_variant");
// modifiedCodonPrefix = transcriptSequence.substring(cdnaVariantStart-variantPhaseShift, cdnaVariantStart);
// if (gainsStopCodon(modifiedCodonPrefix+transcriptSequence.substring(cdnaVariantEnd+1,cdnaCodingEnd-2))) {
// consequenceTypeList.add("stop_gained");
}
}
} else {
if(variantRef.equals("-")) { // Insertion TODO: I've seen insertions within Cellbase-mongo with a ref != -
SoNames.add("feature_elongation");
if(!splicing) {
if(variantAlt.length()%3 == 0) {
SoNames.add("inframe_insertion"); // TODO: check that I correctly interpreted the meaning of this consequence type
} else {
SoNames.add("frameshift_variant");
}
}
} else { // SNV
if(!splicing) {
int modifiedCodonStart = cdnaVariantStart - variantPhaseShift;
String reverseCodon = new StringBuilder(transcriptSequence.substring(transcriptSequence.length()-modifiedCodonStart-2,
transcriptSequence.length()-modifiedCodonStart+1)).reverse().toString(); // Rigth limit of the substring sums +1 because substring does not include that position
char[] referenceCodon = reverseCodon.toCharArray();
referenceCodon[0] = complementaryNt.get(referenceCodon[0]);
referenceCodon[1] = complementaryNt.get(referenceCodon[1]);
referenceCodon[2] = complementaryNt.get(referenceCodon[2]);
char[] modifiedCodonArray = referenceCodon.clone();
modifiedCodonArray[variantPhaseShift] = complementaryNt.get(variantAlt.toCharArray()[0]);
if (isSynonymousCodon.get(String.valueOf(referenceCodon)).get(String.valueOf(modifiedCodonArray))) {
SoNames.add("synonymous_variant");
} else {
SoNames.add("missense_variant");
}
// Set consequenceTypeTemplate.aChange
consequenceTypeTemplate.setaChange(codonToA.get(String.valueOf(referenceCodon)) + "/" + codonToA.get(String.valueOf(modifiedCodonArray)));
// Fill consequenceTypeTemplate.codon leaving only the nt that changes in uppercase. Careful with upper/lower case letters
char[] referenceCodonArray = String.valueOf(referenceCodon).toLowerCase().toCharArray();
referenceCodonArray[variantPhaseShift] = Character.toUpperCase(referenceCodonArray[variantPhaseShift]);
modifiedCodonArray = String.valueOf(modifiedCodonArray).toLowerCase().toCharArray();
modifiedCodonArray[variantPhaseShift] = Character.toUpperCase(modifiedCodonArray[variantPhaseShift]);
consequenceTypeTemplate.setCodon(String.valueOf(referenceCodonArray)+"/"+String.valueOf(modifiedCodonArray));
}
}
}
}
private void solveCodingPositiveTranscriptEffect(Boolean splicing, String transcriptSequence, Integer transcriptStart, Integer transcriptEnd, Integer genomicCodingStart,
Integer genomicCodingEnd, Integer variantStart, Integer variantEnd,
Integer cdnaCodingStart, Integer cdnaCodingEnd, Integer cdnaVariantStart,
Integer cdnaVariantEnd, String variantRef, String variantAlt,
HashSet<String> SoNames, ConsequenceType consequenceTypeTemplate) {
if(variantStart < genomicCodingStart) {
if(transcriptStart<genomicCodingStart) { // Check transcript has 5 UTR
SoNames.add("5_prime_UTR_variant");
}
if(variantEnd >= genomicCodingStart) { // Deletion that removes initiator codon
SoNames.add("initiator_codon_variant");
SoNames.add("coding_sequence_variant");
}
} else {
if(variantStart <= genomicCodingEnd) { // Variant start within coding region
if(cdnaVariantStart!=null) { // cdnaVariantStart may be null if variantStart falls in an intron
int cdsVariantStart = cdnaVariantStart - cdnaCodingStart + 1;
consequenceTypeTemplate.setCdsPosition(cdsVariantStart);
consequenceTypeTemplate.setaPosition((cdsVariantStart - 1) / 3);
}
SoNames.add("coding_sequence_variant");
if(variantEnd <= genomicCodingEnd) { // Variant end also within coding region
solvePositiveCodingEffect(splicing, transcriptSequence, cdnaCodingStart, cdnaCodingEnd, cdnaVariantStart,
cdnaVariantEnd, variantRef, variantAlt, SoNames, consequenceTypeTemplate);
} else {
if(transcriptEnd>genomicCodingEnd) {// Check transcript has 3 UTR)
SoNames.add("3_prime_UTR_variant");
}
SoNames.add("stop_lost");
}
} else {
if(transcriptEnd>genomicCodingEnd) {// Check transcript has 3 UTR)
SoNames.add("3_prime_UTR_variant");
}
}
}
}
private void solveCodingNegativeTranscriptEffect(Boolean splicing, String transcriptSequence, Integer transcriptStart, Integer transcriptEnd, Integer genomicCodingStart,
Integer genomicCodingEnd, Integer variantStart, Integer variantEnd,
Integer cdnaCodingStart, Integer cdnaCodingEnd, Integer cdnaVariantStart,
Integer cdnaVariantEnd, String variantRef, String variantAlt,
HashSet<String> SoNames, ConsequenceType consequenceTypeTemplate) {
if(variantEnd > genomicCodingEnd) {
if(transcriptEnd>genomicCodingEnd) { // Check transcript has 5 UTR
SoNames.add("5_prime_UTR_variant");
}
if(variantStart <= genomicCodingEnd) { // Deletion that removes initiator codon
SoNames.add("initiator_codon_variant");
SoNames.add("coding_sequence_variant");
}
} else {
if(variantEnd >= genomicCodingStart) { // Variant end within coding region
if(cdnaVariantStart!=null) { // cdnaVariantStart may be null if variantEnd falls in an intron
int cdsVariantStart = cdnaVariantStart - cdnaCodingStart + 1;
consequenceTypeTemplate.setCdsPosition(cdsVariantStart);
consequenceTypeTemplate.setaPosition((cdsVariantStart - 1) / 3);
}
SoNames.add("coding_sequence_variant");
if(variantStart >= genomicCodingStart) { // Variant start also within coding region
solveNegativeCodingEffect(splicing, transcriptSequence, cdnaCodingStart, cdnaCodingEnd, cdnaVariantStart,
cdnaVariantEnd, variantRef, variantAlt, SoNames, consequenceTypeTemplate);
} else {
if(transcriptStart<genomicCodingStart) {// Check transcript has 3 UTR)
SoNames.add("3_prime_UTR_variant");
}
SoNames.add("stop_lost");
}
} else {
if(transcriptStart>genomicCodingStart) {// Check transcript has 3 UTR)
SoNames.add("3_prime_UTR_variant");
}
}
}
}
private void solveJunction(Integer spliceSite1, Integer spliceSite2, Integer variantStart, Integer variantEnd, HashSet<String> SoNames,
String leftSpliceSiteTag, String rightSpliceSiteTag, Boolean[] junctionSolution) {
// Boolean splicing = false;
// Boolean intron = false;
// Boolean notdonor = true;
// Boolean notacceptor = true;
junctionSolution[0] = false;
junctionSolution[1] = false;
if(regionsOverlap(spliceSite1,spliceSite2,variantStart,variantEnd)) {
SoNames.add("intron_variant");
// intron = true;
if(variantStart>=spliceSite1 && variantEnd<=spliceSite2) {
junctionSolution[1] = true;
}
}
if(regionsOverlap(spliceSite1-3,spliceSite1+7,variantStart,variantEnd)) {
SoNames.add("splice_region_variant");
junctionSolution[0] = true;
if(regionsOverlap(spliceSite1,spliceSite1+1,variantStart,variantEnd)) {
SoNames.add(leftSpliceSiteTag); // donor/acceptor depending on transcript strand
// notdonor = false;
}
}
if(regionsOverlap(spliceSite2-7,spliceSite2+3,variantStart,variantEnd)) {
SoNames.add("splice_region_variant");
junctionSolution[0] = true;
if(regionsOverlap(spliceSite2-1,spliceSite2,variantStart,variantEnd)) {
SoNames.add(rightSpliceSiteTag); // donor/acceptor depending on transcript strand
// notacceptor = false;
}
}
}
@Override
public QueryResult getAllConsequenceTypesByVariant(GenomicVariant variant, QueryOptions options) {
Logger logger = LoggerFactory.getLogger(this.getClass());
HashSet<String> SoNames = new HashSet<>();
List<ConsequenceType> consequenceTypeList = new ArrayList<>();
QueryResult queryResult = new QueryResult();
QueryBuilder builderGene = null;
QueryBuilder builderRegulatory = null;
BasicDBList transcriptInfoList, exonInfoList;
BasicDBObject transcriptInfo, exonInfo;
BasicDBObject geneInfo;
BasicDBObject regulatoryInfo;
Integer geneStart, geneEnd, transcriptStart, transcriptEnd, exonStart, exonEnd, genomicCodingStart, genomicCodingEnd;
Integer cdnaCodingStart, cdnaCodingEnd, cdnaExonStart, cdnaExonEnd, cdnaVariantStart, cdnaVariantEnd, prevSpliceSite;
Integer regulatoryStart, regulatoryEnd;
Integer variantStart = variant.getPosition();
Integer variantEnd = variant.getPosition()+variant.getReference().length()-1; //TODO: Check deletion input format to ensure that variantEnd is correctly calculated
String geneStrand, transcriptStrand, exonSequence, transcriptSequence;
String regulatoryChromosome, regulatoryType;
String nextCodonNucleotides = "";
String ensemblTranscriptId;
String geneName;
String ensemblGeneId;
int transcriptBiotype;
long dbTimeStart, dbTimeEnd;
Boolean splicing, coding, exonsRemain, variantAhead, exonVariant, TFBSFound;
int exonCounter,i;
ConsequenceType consequenceTypeTemplate = new ConsequenceType();
// Get all genes surrounding the variant +-5kb
builderGene = QueryBuilder.start("chromosome").is(variant.getChromosome()).and("end")
.greaterThanEquals(variant.getPosition()-5000).and("start").lessThanEquals(variantEnd+5000); // variantEnd is used rather than variant.getPosition() to account for deletions which end falls within the 5kb left area of the gene
// Get all regulatory regions surrounding the variant
String chunkId = getChunkPrefix(variant.getChromosome(), variant.getPosition(), regulatoryChunkSize);
BasicDBList chunksId = new BasicDBList();
chunksId.add(chunkId);
builderRegulatory = QueryBuilder.start("chunkIds").in(chunksId).and("start").lessThanEquals(variantEnd).and("end")
.greaterThanEquals(variant.getPosition()); // variantEnd is used rather than variant.getPosition() to account for deletions which end falls within the 5kb left area of the gene
// Execute query and calculate time
mongoDBCollection = db.getCollection("gene");
dbTimeStart = System.currentTimeMillis();
QueryResult geneQueryResult = executeQuery(variant.toString(), builderGene.get(), options);
mongoDBCollection = db.getCollection("regulatory_region");
QueryResult regulatoryQueryResult = executeQuery(variant.toString(), builderRegulatory.get(), options);
dbTimeEnd = System.currentTimeMillis();
BasicDBList geneInfoList = (BasicDBList) geneQueryResult.getResult();
for(Object geneInfoObject: geneInfoList) {
geneInfo = (BasicDBObject) geneInfoObject;
geneStart = (Integer) geneInfo.get("start");
geneEnd = (Integer) geneInfo.get("end");
geneStrand = (String) geneInfo.get("strand");
geneName = (String) geneInfo.get("name");
ensemblGeneId = (String) geneInfo.get("id");
consequenceTypeTemplate.setGeneName((String) geneInfo.get("name"));
consequenceTypeTemplate.setEnsemblGeneId((String) geneInfo.get("id"));
transcriptInfoList = (BasicDBList) geneInfo.get("transcripts");
for(Object transcriptInfoObject: transcriptInfoList) {
transcriptInfo = (BasicDBObject) transcriptInfoObject;
ensemblTranscriptId = (String) transcriptInfo.get("id");
transcriptStart = (Integer) transcriptInfo.get("start");
transcriptEnd = (Integer) transcriptInfo.get("end");
transcriptStrand = (String) transcriptInfo.get("strand");
try {
transcriptBiotype = biotypes.get((String) transcriptInfo.get("biotype"));
} catch (NullPointerException e) {
logger.info("WARNING: biotype not found within the list of hardcoded biotypes - "+transcriptInfo.get("biotype"));
logger.info("WARNING: transcript: "+ensemblTranscriptId);
logger.info("WARNING: setting transcript biotype to non_coding ");
transcriptBiotype = 45;
}
SoNames.clear();
consequenceTypeTemplate.setEnsemblTranscriptId(ensemblTranscriptId);
consequenceTypeTemplate.setcDnaPosition(null);
consequenceTypeTemplate.setCdsPosition(null);
consequenceTypeTemplate.setaPosition(null);
consequenceTypeTemplate.setaChange(null);
consequenceTypeTemplate.setCodon(null);
consequenceTypeTemplate.setStrand((String) geneInfo.get("strand"));
consequenceTypeTemplate.setBiotype((String) transcriptInfo.get("biotype"));
if(transcriptStrand.equals("+")) {
solveTranscriptFlankingRegions(SoNames, transcriptStart, transcriptEnd, variantStart, variantEnd,
"upstream_gene_variant", "downstream_gene_variant");
// Check variant falls within transcript start/end coordinates
if(regionsOverlap(transcriptStart,transcriptEnd,variantStart,variantEnd)) {
switch (transcriptBiotype) {
case 1:
case 2:
case 3:
case 4:
case 5:
case 6:
case 7:
case 16:
case 20:
case 21:
case 23:
case 24:
case 35:
case 36:
case 51: // LRG_gene
solveCodingPositiveTranscript(variant, SoNames, transcriptInfo, transcriptStart,
transcriptEnd, variantStart, variantEnd, consequenceTypeTemplate);
for(String SoName : SoNames) {
consequenceTypeList.add(new ConsequenceType(consequenceTypeTemplate.getGeneName(),
consequenceTypeTemplate.getEnsemblGeneId(),
consequenceTypeTemplate.getEnsemblTranscriptId(),
consequenceTypeTemplate.getStrand(),
consequenceTypeTemplate.getBiotype(),
consequenceTypeTemplate.getcDnaPosition(),
consequenceTypeTemplate.getCdsPosition(),
consequenceTypeTemplate.getaPosition(),
consequenceTypeTemplate.getaChange(),
consequenceTypeTemplate.getCodon(), SoName));
}
break;
case 30:
SoNames.add("NMD_transcript_variant");
case 0:
case 17:
case 18:
case 19:
case 22: // processed_transcript
case 25:
case 26:
case 27:
case 28:
case 29:
case 31: // unprocessed_pseudogene
case 32: // transcribed_unprocessed_pseudogene
case 37: // transcribed_processed_pseudogene
case 33:
case 34:
case 38:
case 39:
case 40:
case 41:
case 42:
case 43:
case 44:
case 45:
case 46:
case 47:
case 48:
case 49:
case 50:
SoNames.add("non_coding_transcript_variant");
exonVariant = solveNonCodingPositiveTranscript(variant, SoNames, transcriptInfo,
transcriptStart, transcriptEnd, variantStart, variantEnd, consequenceTypeTemplate);
if(transcriptBiotype==18 && exonVariant) {
SoNames.add("mature_miRNA_variant");
}
for(String SoName : SoNames) {
consequenceTypeList.add(new ConsequenceType(consequenceTypeTemplate.getGeneName(),
consequenceTypeTemplate.getEnsemblGeneId(),
consequenceTypeTemplate.getEnsemblTranscriptId(),
consequenceTypeTemplate.getStrand(),
consequenceTypeTemplate.getBiotype(),
consequenceTypeTemplate.getcDnaPosition(), SoName));
}
break;
}
} else { // Variant does not overlap gene region, just has upstream/downstream annotations
for(String SoName : SoNames) {
consequenceTypeList.add(new ConsequenceType(consequenceTypeTemplate.getGeneName(),
consequenceTypeTemplate.getEnsemblGeneId(),
consequenceTypeTemplate.getEnsemblTranscriptId(),
consequenceTypeTemplate.getStrand(),
consequenceTypeTemplate.getBiotype(), SoName));
}
}
} else {
solveTranscriptFlankingRegions(SoNames, transcriptStart, transcriptEnd, variantStart,
variantEnd, "downstream_gene_variant",
"upstream_gene_variant");
// Check variant falls within transcript start/end coordinates
if(regionsOverlap(transcriptStart,transcriptEnd,variantStart,variantEnd)) {
switch (transcriptBiotype) {
case 1:
case 2:
case 3:
case 4:
case 5:
case 6:
case 7:
case 16:
case 20:
case 21:
case 23:
case 24:
case 35:
case 36:
case 51: // LRG_gene
solveCodingNegativeTranscript(variant, SoNames, transcriptInfo,
transcriptStart, transcriptEnd, variantStart, variantEnd, consequenceTypeTemplate);
for(String SoName : SoNames) {
consequenceTypeList.add(new ConsequenceType(consequenceTypeTemplate.getGeneName(),
consequenceTypeTemplate.getEnsemblGeneId(),
consequenceTypeTemplate.getEnsemblTranscriptId(),
consequenceTypeTemplate.getStrand(),
consequenceTypeTemplate.getBiotype(),
consequenceTypeTemplate.getcDnaPosition(),
consequenceTypeTemplate.getCdsPosition(),
consequenceTypeTemplate.getaPosition(),
consequenceTypeTemplate.getaChange(),
consequenceTypeTemplate.getCodon(), SoName));
}
break;
case 30:
SoNames.add("NMD_transcript_variant");
case 0:
case 17:
case 18:
case 19:
case 22: // processed_transcript
case 25:
case 26:
case 27:
case 28:
case 29:
case 31: // unprocessed_pseudogene
case 32: // transcribed_unprocessed_pseudogene
case 37: // transcribed_processed_pseudogene
case 33:
case 34:
case 38:
case 39:
case 40:
case 41:
case 42:
case 43:
case 44:
case 45:
case 46:
case 47:
case 48:
case 49:
case 50:
SoNames.add("non_coding_transcript_variant");
exonVariant = solveNonCodingNegativeTranscript(variant, SoNames, transcriptInfo,
transcriptStart, transcriptEnd, variantStart, variantEnd, consequenceTypeTemplate);
if(transcriptBiotype==18 && exonVariant) {
SoNames.add("mature_miRNA_variant");
}
for(String SoName : SoNames) {
consequenceTypeList.add(new ConsequenceType(consequenceTypeTemplate.getGeneName(),
consequenceTypeTemplate.getEnsemblGeneId(),
consequenceTypeTemplate.getEnsemblTranscriptId(),
consequenceTypeTemplate.getStrand(),
consequenceTypeTemplate.getBiotype(),
consequenceTypeTemplate.getcDnaPosition(), SoName));
}
break;
}
} else { // Variant does not overlap gene region, just has upstream/downstream annotations
for(String SoName : SoNames) {
consequenceTypeList.add(new ConsequenceType(consequenceTypeTemplate.getGeneName(),
consequenceTypeTemplate.getEnsemblGeneId(),
consequenceTypeTemplate.getEnsemblTranscriptId(),
consequenceTypeTemplate.getStrand(),
consequenceTypeTemplate.getBiotype(), SoName));
}
}
}
}
}
BasicDBList regulatoryInfoList = (BasicDBList) regulatoryQueryResult.getResult();
if(!regulatoryInfoList.isEmpty()) {
consequenceTypeList.add(new ConsequenceType("regulatory_region_variant"));
i = 0;
do {
regulatoryInfo = (BasicDBObject) regulatoryInfoList.get(i);
regulatoryType = (String) regulatoryInfo.get("featureType");
TFBSFound = regulatoryType.equals("TF_binding_site") || regulatoryType.equals("TF_binding_site_motif");
i++;
} while(i<regulatoryInfoList.size() && !TFBSFound);
if(TFBSFound) {
consequenceTypeList.add(new ConsequenceType("TF_binding_site_variant"));
}
} else {
int b;
b = 1;
}
if(consequenceTypeList.size()==0) {
consequenceTypeList.add(new ConsequenceType("intergenic_variant"));
}
// setting queryResult fields
queryResult.setId(variant.toString());
queryResult.setDBTime((dbTimeEnd - dbTimeStart));
queryResult.setNumResults(SoNames.size());
queryResult.setResult(consequenceTypeList);
return queryResult;
// List<QueryResult> queryResults = new ArrayList<>(variants.size());
// TabixReader currentTabix = null;
// String line = "";
// long dbTimeStart, dbTimeEnd;
// String document = "";
// try {
// currentTabix = new TabixReader(applicationProperties.getProperty("VARIANT_ANNOTATION.FILENAME"));
// for(GenomicVariant genomicVariant: variants) {
// System.out.println(">>>"+genomicVariant);
// TabixReader.Iterator it = currentTabix.query(genomicVariant.getChromosome() + ":" + genomicVariant.getPosition() + "-" + genomicVariant.getPosition());
// String[] fields = null;
// dbTimeStart = System.currentTimeMillis();
// while (it != null && (line = it.next()) != null) {
// fields = line.split("\t");
// document = fields[2];
//// System.out.println(fields[2]);
//// listRecords = factory.create(source, line);
//// if(listRecords.size() > 0){
//// tabixRecord = listRecords.get(0);
//// if (tabixRecord.getReference().equals(record.getReference()) && tabixRecord.getAlternate().equals(record.getAlternate())) {
//// controlBatch.add(tabixRecord);
//// map.put(record, cont++);
// break;
//// List<GenomicVariantEffect> a = genomicVariantEffectPredictor.getAllEffectsByVariant(variants.get(0), genes, null);
// dbTimeEnd = System.currentTimeMillis();
// QueryResult queryResult = new QueryResult();
// queryResult.setDBTime((dbTimeEnd - dbTimeStart));
// queryResult.setNumResults(1);
// queryResult.setResult(document);
// queryResults.add(queryResult);
// } catch (IOException e) {
// e.printStackTrace();
}
private void solveTranscriptFlankingRegions(HashSet<String> SoNames, Integer transcriptStart,
Integer transcriptEnd, Integer variantStart, Integer variantEnd,
String leftRegionTag, String rightRegionTag) {
// Variant overlaps with -5kb region
if(regionsOverlap(transcriptStart-5000, transcriptStart-1, variantStart, variantEnd)) {
SoNames.add("5KB_" + leftRegionTag);
// Variant overlaps with -2kb region
if(regionsOverlap(transcriptStart-2000, transcriptStart-1, variantStart, variantEnd)) {
SoNames.add(leftRegionTag);
}
}
// Variant overlaps with +5kb region
if(regionsOverlap(transcriptEnd+1, transcriptEnd+5000, variantStart, variantEnd)) {
SoNames.add("5KB_" + rightRegionTag);
// Variant overlaps with +2kb region
if(regionsOverlap(transcriptEnd+1, transcriptEnd+2000, variantStart, variantEnd)) {
SoNames.add(rightRegionTag);
}
}
}
private void solveCodingPositiveTranscript(GenomicVariant variant, HashSet<String> SoNames,
BasicDBObject transcriptInfo, Integer transcriptStart,
Integer transcriptEnd, Integer variantStart, Integer variantEnd,
ConsequenceType consequenceTypeTemplate) {
Integer genomicCodingStart;
Integer genomicCodingEnd;
Integer cdnaCodingStart;
Integer cdnaCodingEnd;
BasicDBList exonInfoList;
BasicDBObject exonInfo;
Integer exonStart;
Integer exonEnd;
String transcriptSequence;
Boolean variantAhead;
Integer cdnaExonEnd;
Integer cdnaVariantStart;
Integer cdnaVariantEnd;
Boolean splicing;
int exonCounter;
Integer prevSpliceSite;
Boolean[] junctionSolution = {false, false};
genomicCodingStart = (Integer) transcriptInfo.get("genomicCodingStart");
genomicCodingEnd = (Integer) transcriptInfo.get("genomicCodingEnd");
cdnaCodingStart = (Integer) transcriptInfo.get("cdnaCodingStart");
cdnaCodingEnd = (Integer) transcriptInfo.get("cdnaCodingEnd");
exonInfoList = (BasicDBList) transcriptInfo.get("exons");
exonInfo = (BasicDBObject) exonInfoList.get(0);
exonStart = (Integer) exonInfo.get("start");
exonEnd = (Integer) exonInfo.get("end");
transcriptSequence = (String) exonInfo.get("sequence");
variantAhead = true; // we need a first iteration within the while to ensure junction is solved in case needed
cdnaExonEnd = (exonEnd - exonStart + 1);
cdnaVariantStart = null;
cdnaVariantEnd = null;
junctionSolution[0] = false;
junctionSolution[1] = false;
splicing = false;
if(variantStart >= exonStart) {
if(variantStart <= exonEnd) { // Variant start within the exon
cdnaVariantStart = cdnaExonEnd - (exonEnd - variantStart);
consequenceTypeTemplate.setcDnaPosition(cdnaVariantStart);
if(variantEnd <= exonEnd) {
cdnaVariantEnd = cdnaExonEnd - (exonEnd - variantEnd);
}
}
} else {
if(variantEnd <= exonEnd) {
// We do not contemplate that variant end can be located before this exon since this is the first exon
cdnaVariantEnd = cdnaExonEnd - (exonEnd - variantEnd);
} // Variant includes the whole exon. Variant start is located before the exon, variant end is located after the exon
}
exonCounter = 1;
while(exonCounter<exonInfoList.size() && !splicing && variantAhead) { // This is not a do-while since we cannot call solveJunction until
exonInfo = (BasicDBObject) exonInfoList.get(exonCounter); // next exon has been loaded
exonStart = (Integer) exonInfo.get("start");
prevSpliceSite = exonEnd+1;
exonEnd = (Integer) exonInfo.get("end");
transcriptSequence = transcriptSequence + ((String) exonInfo.get("sequence"));
solveJunction(prevSpliceSite, exonStart-1, variantStart, variantEnd, SoNames,
"splice_donor_variant", "splice_acceptor_variant", junctionSolution);
splicing = (splicing || junctionSolution[0]);
if(variantStart >= exonStart) {
cdnaExonEnd += (exonEnd - exonStart + 1);
if(variantStart <= exonEnd) { // Variant start within the exon
cdnaVariantStart = cdnaExonEnd - (exonEnd - variantStart);
consequenceTypeTemplate.setcDnaPosition(cdnaVariantStart);
if(variantEnd <= exonEnd) {
cdnaVariantEnd = cdnaExonEnd - (exonEnd - variantEnd);
}
}
} else {
if(variantEnd <= exonEnd) {
if(variantEnd >= exonStart) {
cdnaVariantEnd = cdnaExonEnd - (exonEnd - variantEnd);
} else { // Variant does not include this exon, variant is located before this exon
variantAhead = false;
}
} else { // Variant includes the whole exon. Variant start is located before the exon, variant end is located after the exon
cdnaExonEnd += (exonEnd - exonStart + 1);
}
}
exonCounter++;
}
if(!junctionSolution[1]) {
solveCodingPositiveTranscriptEffect(splicing, transcriptSequence, transcriptStart, transcriptEnd, genomicCodingStart, genomicCodingEnd,
variantStart, variantEnd, cdnaCodingStart, cdnaCodingEnd, cdnaVariantStart, cdnaVariantEnd,
variant.getReference(), variant.getAlternative(), SoNames, consequenceTypeTemplate);
}
}
private void solveCodingNegativeTranscript(GenomicVariant variant, HashSet<String> SoNames,
BasicDBObject transcriptInfo, Integer transcriptStart,
Integer transcriptEnd, Integer variantStart, Integer variantEnd,
ConsequenceType consequenceTypeTemplate) {
Integer genomicCodingStart;
Integer genomicCodingEnd;
Integer cdnaCodingStart;
Integer cdnaCodingEnd;
BasicDBList exonInfoList;
BasicDBObject exonInfo;
Integer exonStart;
Integer exonEnd;
String transcriptSequence;
Boolean variantAhead;
Integer cdnaExonEnd;
Integer cdnaVariantStart;
Integer cdnaVariantEnd;
Boolean splicing;
int exonCounter;
Integer prevSpliceSite;
Boolean[] junctionSolution = {false, false};
genomicCodingStart = (Integer) transcriptInfo.get("genomicCodingStart");
genomicCodingEnd = (Integer) transcriptInfo.get("genomicCodingEnd");
cdnaCodingStart = (Integer) transcriptInfo.get("cdnaCodingStart");
cdnaCodingEnd = (Integer) transcriptInfo.get("cdnaCodingEnd");
exonInfoList = (BasicDBList) transcriptInfo.get("exons");
exonInfo = (BasicDBObject) exonInfoList.get(0);
exonStart = (Integer) exonInfo.get("start");
exonEnd = (Integer) exonInfo.get("end");
transcriptSequence = (String) exonInfo.get("sequence");
variantAhead = true; // we need a first iteration within the while to ensure junction is solved in case needed
cdnaExonEnd = (exonEnd-exonStart+1); // cdnaExonEnd poinst to the same base than exonStart
cdnaVariantStart = null; // cdnaVariantStart points to the same base than variantEnd
cdnaVariantEnd = null; // cdnaVariantEnd points to the same base than variantStart
junctionSolution[0] = false;
junctionSolution[1] = false;
splicing = false;
if(variantEnd <= exonEnd) {
if(variantEnd >= exonStart) { // Variant end within the exon
cdnaVariantStart = cdnaExonEnd - (variantEnd - exonStart);
consequenceTypeTemplate.setcDnaPosition(cdnaVariantStart);
if(variantStart >= exonStart) {
cdnaVariantEnd = cdnaExonEnd - (variantStart - exonStart);
}
}
} else {
if(variantStart >= exonStart) {
// We do not contemplate that variant end can be located before this exon since this is the first exon
cdnaVariantEnd = cdnaExonEnd - (variantEnd - exonStart);
} // Variant includes the whole exon. Variant end is located before the exon, variant start is located after the exon
}
exonCounter = 1;
while(exonCounter<exonInfoList.size() && !splicing && variantAhead) { // This is not a do-while since we cannot call solveJunction until
exonInfo = (BasicDBObject) exonInfoList.get(exonCounter); // next exon has been loaded
prevSpliceSite = exonStart-1;
exonStart = (Integer) exonInfo.get("start");
exonEnd = (Integer) exonInfo.get("end");
transcriptSequence = ((String) exonInfo.get("sequence"))+transcriptSequence;
solveJunction(exonEnd+1, prevSpliceSite, variantStart, variantEnd, SoNames,
"splice_acceptor_variant", "splice_donor_variant", junctionSolution);
splicing = (splicing || junctionSolution[0]);
if(variantEnd <= exonEnd) {
cdnaExonEnd += (exonEnd - exonStart + 1);
if(variantEnd >= exonStart) { // Variant end within the exon
cdnaVariantStart = cdnaExonEnd - (variantEnd - exonStart);
consequenceTypeTemplate.setcDnaPosition(cdnaVariantStart);
if(variantStart >= exonStart) {
cdnaVariantEnd = cdnaExonEnd - (variantStart - exonStart);
}
}
} else {
if(variantStart >= exonStart) {
if(variantStart <= exonEnd) {
cdnaVariantEnd = cdnaExonEnd - (variantStart - exonStart);
} else { // Variant does not include this exon, variant is located before this exon
variantAhead = false;
}
} else { // Variant includes the whole exon. Variant start is located before the exon, variant end is located after the exon
cdnaExonEnd += (exonEnd - exonStart + 1);
}
}
exonCounter++;
}
if(!junctionSolution[1]) {
solveCodingNegativeTranscriptEffect(splicing, transcriptSequence, transcriptStart, transcriptEnd, genomicCodingStart, genomicCodingEnd,
variantStart, variantEnd, cdnaCodingStart, cdnaCodingEnd, cdnaVariantStart, cdnaVariantEnd,
variant.getReference(), variant.getAlternative(), SoNames, consequenceTypeTemplate);
}
}
private Boolean solveNonCodingPositiveTranscript(GenomicVariant variant, HashSet<String> SoNames,
BasicDBObject transcriptInfo, Integer transcriptStart,
Integer transcriptEnd, Integer variantStart, Integer variantEnd,
ConsequenceType consequenceTypeTemplate) {
BasicDBList exonInfoList;
BasicDBObject exonInfo;
Integer exonStart;
Integer exonEnd;
String transcriptSequence;
Boolean variantAhead;
Integer cdnaExonEnd;
Boolean splicing;
int exonCounter;
Integer prevSpliceSite;
Boolean[] junctionSolution = {false, false};
exonInfoList = (BasicDBList) transcriptInfo.get("exons");
exonInfo = (BasicDBObject) exonInfoList.get(0);
exonStart = (Integer) exonInfo.get("start");
exonEnd = (Integer) exonInfo.get("end");
transcriptSequence = (String) exonInfo.get("sequence");
variantAhead = true; // we need a first iteration within the while to ensure junction is solved in case needed
cdnaExonEnd = (exonEnd - exonStart + 1);
junctionSolution[0] = false;
junctionSolution[1] = false;
splicing = false;
if(variantStart >= exonStart) {
if(variantStart <= exonEnd) { // Variant start within the exon. Set cdnaPosition in consequenceTypeTemplate
consequenceTypeTemplate.setcDnaPosition(cdnaExonEnd - (exonEnd - variantStart));
}
}
exonCounter = 1;
while(exonCounter<exonInfoList.size() && !splicing && variantAhead) { // This is not a do-while since we cannot call solveJunction until
exonInfo = (BasicDBObject) exonInfoList.get(exonCounter); // next exon has been loaded
exonStart = (Integer) exonInfo.get("start");
prevSpliceSite = exonEnd+1;
exonEnd = (Integer) exonInfo.get("end");
transcriptSequence = transcriptSequence + ((String) exonInfo.get("sequence"));
solveJunction(prevSpliceSite, exonStart-1, variantStart, variantEnd, SoNames,
"splice_donor_variant", "splice_acceptor_variant", junctionSolution);
splicing = (splicing || junctionSolution[0]);
if(variantStart >= exonStart) {
cdnaExonEnd += (exonEnd - exonStart + 1);
if(variantStart <= exonEnd) { // Variant start within the exon. Set cdnaPosition in consequenceTypeTemplate
consequenceTypeTemplate.setcDnaPosition(cdnaExonEnd - (exonEnd - variantStart));
}
} else {
if(variantEnd <= exonEnd) {
if(variantEnd < exonStart) { // Variant does not include this exon, variant is located before this exon
variantAhead = false;
}
} else { // Variant includes the whole exon. Variant start is located before the exon, variant end is located after the exon
cdnaExonEnd += (exonEnd - exonStart + 1);
}
}
exonCounter++;
}
if(!junctionSolution[1]) {
SoNames.add("non_coding_transcript_exon_variant");
}
return junctionSolution[1];
}
private Boolean solveNonCodingNegativeTranscript(GenomicVariant variant, HashSet<String> SoNames,
BasicDBObject transcriptInfo, Integer transcriptStart,
Integer transcriptEnd, Integer variantStart, Integer variantEnd,
ConsequenceType consequenceTypeTemplate) {
BasicDBList exonInfoList;
BasicDBObject exonInfo;
Integer exonStart;
Integer exonEnd;
String transcriptSequence;
Boolean variantAhead;
Integer cdnaExonEnd;
Boolean splicing;
int exonCounter;
Integer prevSpliceSite;
Boolean[] junctionSolution = {false, false};
exonInfoList = (BasicDBList) transcriptInfo.get("exons");
exonInfo = (BasicDBObject) exonInfoList.get(0);
exonStart = (Integer) exonInfo.get("start");
exonEnd = (Integer) exonInfo.get("end");
transcriptSequence = (String) exonInfo.get("sequence");
variantAhead = true; // we need a first iteration within the while to ensure junction is solved in case needed
cdnaExonEnd = (exonEnd-exonStart+1); // cdnaExonEnd poinst to the same base than exonStart
junctionSolution[0] = false;
junctionSolution[1] = false;
splicing = false;
if(variantEnd <= exonEnd) {
if(variantEnd >= exonStart) { // Variant end within the exon
consequenceTypeTemplate.setcDnaPosition(cdnaExonEnd - (variantEnd - exonStart));
}
}
exonCounter = 1;
while(exonCounter<exonInfoList.size() && !splicing && variantAhead) { // This is not a do-while since we cannot call solveJunction until
exonInfo = (BasicDBObject) exonInfoList.get(exonCounter); // next exon has been loaded
prevSpliceSite = exonStart-1;
exonStart = (Integer) exonInfo.get("start");
exonEnd = (Integer) exonInfo.get("end");
transcriptSequence = ((String) exonInfo.get("sequence"))+transcriptSequence;
solveJunction(exonEnd+1, prevSpliceSite, variantStart, variantEnd, SoNames,
"splice_acceptor_variant", "splice_donor_variant", junctionSolution);
splicing = (splicing || junctionSolution[0]);
if(variantEnd <= exonEnd) {
cdnaExonEnd += (exonEnd - exonStart + 1);
if(variantEnd >= exonStart) { // Variant end within the exon
consequenceTypeTemplate.setcDnaPosition(cdnaExonEnd - (variantEnd - exonStart));
}
} else {
if(variantStart >= exonStart) {
if(variantStart > exonEnd) { // Variant does not include this exon, variant is located before this exon
variantAhead = false;
}
} else { // Variant includes the whole exon. Variant start is located before the exon, variant end is located after the exon
cdnaExonEnd += (exonEnd - exonStart + 1);
}
}
exonCounter++;
}
if(!junctionSolution[1]) {
SoNames.add("non_coding_transcript_exon_variant");
}
return junctionSolution[1];
}
//TODO: ConsequenceTypeList TIENE AHORA UNA LISTA DE String. UNA VEZ SE TENGA LA LISTA COMPLETA DE CT PARA CADA TRANSCRITO, HABRA Q RECORRERLA Y CREAR LOS OBJETOS ConsequenceType
//TODO: CORRESPONDIENTES RELLENANDO EL RESTO DE CAMPOS: GEN, CDNAPOS, CDSPOS, ETC.
@Override
public List<QueryResult> getAllConsequenceTypesByVariantList(List<GenomicVariant> variants, QueryOptions options) {
List<QueryResult> queryResults = new ArrayList<>(variants.size());
// try {
for (GenomicVariant genomicVariant : variants) {
queryResults.add(getAllConsequenceTypesByVariant(genomicVariant, options));
}
// } catch (IOException e) {
// e.printStackTrace();
return queryResults;
}
@Override
public QueryResult getAllEffectsByVariant(GenomicVariant variant, QueryOptions options) {
return null;
}
@Override
public List<QueryResult> getAllEffectsByVariantList(List<GenomicVariant> variants, QueryOptions options) {
List<QueryResult> queryResults = new ArrayList<>(variants.size());
TabixReader currentTabix = null;
String line = "";
long dbTimeStart, dbTimeEnd;
String document = "";
try {
currentTabix = new TabixReader(applicationProperties.getProperty("VARIANT_ANNOTATION.FILENAME"));
for(GenomicVariant genomicVariant: variants) {
System.out.println(">>>"+genomicVariant);
TabixReader.Iterator it = currentTabix.query(genomicVariant.getChromosome() + ":" + genomicVariant.getPosition() + "-" + genomicVariant.getPosition());
String[] fields = null;
dbTimeStart = System.currentTimeMillis();
while (it != null && (line = it.next()) != null) {
fields = line.split("\t");
document = fields[2];
// System.out.println(fields[2]);
// listRecords = factory.create(source, line);
// if(listRecords.size() > 0){
// tabixRecord = listRecords.get(0);
// if (tabixRecord.getReference().equals(record.getReference()) && tabixRecord.getAlternate().equals(record.getAlternate())) {
// controlBatch.add(tabixRecord);
// map.put(record, cont++);
break;
}
// List<GenomicVariantEffect> a = genomicVariantEffectPredictor.getAllEffectsByVariant(variants.get(0), genes, null);
dbTimeEnd = System.currentTimeMillis();
QueryResult queryResult = new QueryResult();
queryResult.setDBTime((dbTimeEnd - dbTimeStart));
queryResult.setNumResults(1);
queryResult.setResult(document);
queryResults.add(queryResult);
}
} catch (IOException e) {
e.printStackTrace();
}
return queryResults;
}
} |
package io.cloudslang.lang.compiler.modeller.transformers;
import io.cloudslang.lang.entities.bindings.Argument;
import org.apache.commons.collections4.MapUtils;
import org.springframework.stereotype.Component;
import java.io.Serializable;
import java.util.*;
@Component
public class DoTransformer implements Transformer<Map<String, List>, List<Argument>> {
@Override
public List<Argument> transform(Map<String, List> rawData) {
List<Argument> result = new ArrayList<>();
if (MapUtils.isEmpty(rawData)) {
return result;
} else if (rawData.size() > 1) {
throw new RuntimeException("Task has to many keys under the 'do' keyword,\n" +
"May happen due to wrong indentation");
}
// TODO - task args - support one liner syntax
Map.Entry<String, List> argumentsEntry = rawData.entrySet().iterator().next();
if (argumentsEntry.getValue() == null) {
return result;
}
for (Object rawArgument : argumentsEntry.getValue()) {
Argument argument = transformArgument(rawArgument);
result.add(argument);
}
return result;
}
@Override
public List<Scope> getScopes() {
return Collections.singletonList(Scope.BEFORE_TASK);
}
@Override
public String keyToTransform() {
return null;
}
private Argument transformArgument(Object rawArgument) {
// - some_arg
// this is our default behaviour that if the user specifies only a key, the key is also the ref we look for
if (rawArgument instanceof String) {
String argumentName = (String) rawArgument;
return new Argument(argumentName, null);
} else if (rawArgument instanceof Map) {
@SuppressWarnings("unchecked")
Map.Entry<String, Serializable> entry = ((Map<String, Serializable>) rawArgument).entrySet().iterator().next();
Serializable entryValue = entry.getValue();
if(entryValue == null){
throw new RuntimeException("Could not transform task argument : " +
rawArgument + ". Since it has a null value.\n" +
"Make sure a value is specified or that indentation is properly done."
);
}
// - some_input: some_expression
return new Argument(entry.getKey(), entryValue.toString());
}
throw new RuntimeException("Could not transform task argument : " + rawArgument);
}
} |
package io.cattle.platform.api.resource.jooq;
import io.cattle.platform.api.auth.Policy;
import io.cattle.platform.api.resource.AbstractObjectResourceManager;
import io.cattle.platform.api.utils.ApiUtils;
import io.cattle.platform.engine.process.ExitReason;
import io.cattle.platform.engine.process.ProcessInstanceException;
import io.cattle.platform.engine.process.impl.ProcessCancelException;
import io.cattle.platform.engine.process.impl.ProcessExecutionExitException;
import io.cattle.platform.lock.exception.FailedToAcquireLockException;
import io.cattle.platform.object.jooq.utils.JooqUtils;
import io.cattle.platform.object.meta.MapRelationship;
import io.cattle.platform.object.meta.ObjectMetaDataManager;
import io.cattle.platform.object.meta.Relationship;
import io.cattle.platform.object.meta.Relationship.RelationshipType;
import io.cattle.platform.util.exception.ExceptionUtils;
import io.github.ibuildthecloud.gdapi.context.ApiContext;
import io.github.ibuildthecloud.gdapi.exception.ClientVisibleException;
import io.github.ibuildthecloud.gdapi.factory.SchemaFactory;
import io.github.ibuildthecloud.gdapi.model.Include;
import io.github.ibuildthecloud.gdapi.model.ListOptions;
import io.github.ibuildthecloud.gdapi.model.Pagination;
import io.github.ibuildthecloud.gdapi.model.Schema;
import io.github.ibuildthecloud.gdapi.model.Sort;
import io.github.ibuildthecloud.gdapi.request.ApiRequest;
import io.github.ibuildthecloud.gdapi.util.ResponseCodes;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import org.jooq.Condition;
import org.jooq.Configuration;
import org.jooq.DSLContext;
import org.jooq.JoinType;
import org.jooq.SelectQuery;
import org.jooq.Table;
import org.jooq.TableField;
import org.jooq.exception.DataAccessException;
import org.jooq.impl.DSL;
import org.jooq.impl.DefaultDSLContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractJooqResourceManager extends AbstractObjectResourceManager {
private static final Logger log = LoggerFactory.getLogger(AbstractJooqResourceManager.class);
Configuration configuration;
protected DSLContext create() {
return new DefaultDSLContext(configuration);
}
@Override
protected Object listInternal(SchemaFactory schemaFactory, String type, Map<Object, Object> criteria, ListOptions options) {
return listInternal(schemaFactory, type, criteria, options, null);
}
protected Object listInternal(SchemaFactory schemaFactory, String type, Map<Object, Object> criteria, ListOptions options, Map<Table<?>, Condition> joins) {
Class<?> clz = getClass(schemaFactory, type, criteria, true);
if (clz == null) {
return null;
}
/* Use core schema, parent may not be authorized */
type = getObjectManager().getSchemaFactory().getSchemaName(clz);
Table<?> table = JooqUtils.getTableFromRecordClass(clz);
Sort sort = options == null ? null : options.getSort();
Pagination pagination = options == null ? null : options.getPagination();
Include include = options == null ? null : options.getInclude();
if (table == null)
return null;
SelectQuery<?> query = create().selectQuery();
MultiTableMapper mapper = addTables(schemaFactory, query, type, table, criteria, include, pagination, joins);
addJoins(query, joins);
addConditions(schemaFactory, query, type, table, criteria);
addSort(schemaFactory, type, sort, query);
addLimit(schemaFactory, type, pagination, query);
List<?> result = mapper == null ? query.fetch() : query.fetchInto(mapper);
processPaginationResult(result, pagination, mapper);
return result;
}
protected void addJoins(SelectQuery<?> query, Map<Table<?>, Condition> joins) {
if (joins == null) {
return;
}
for (Map.Entry<Table<?>, Condition> entry : joins.entrySet()) {
query.addJoin(entry.getKey(), JoinType.LEFT_OUTER_JOIN, entry.getValue());
}
}
protected void processPaginationResult(List<?> result, Pagination pagination, MultiTableMapper mapper) {
Integer limit = pagination == null ? null : pagination.getLimit();
if (limit == null) {
return;
}
long offset = getOffset(pagination);
boolean partial = false;
if (mapper == null) {
partial = result.size() > limit;
if (partial) {
result.remove(result.size() - 1);
}
} else {
partial = mapper.getResultSize() > limit;
}
if (partial) {
Pagination paginationResponse = new Pagination(limit);
paginationResponse.setPartial(true);
paginationResponse.setNext(ApiContext.getUrlBuilder().next("m" + (offset + limit)));
pagination.setResponse(paginationResponse);
} else {
pagination.setResponse(new Pagination(limit));
}
}
protected int getOffset(Pagination pagination) {
Object marker = getMarker(pagination);
if (marker == null) {
return 0;
} else if (marker instanceof String) {
/*
* Important to check that marker is a string. If you don't then
* somebody could use the marker functionality to deobfuscate ID's
* and find their long value.
*/
try {
return Integer.parseInt((String) marker);
} catch (NumberFormatException nfe) {
return 0;
}
}
return 0;
}
protected Class<?> getClass(SchemaFactory schemaFactory, String type, Map<Object, Object> criteria, boolean alterCriteria) {
Schema schema = schemaFactory.getSchema(type);
Class<?> clz = schemaFactory.getSchemaClass(type);
Schema clzSchema = schemaFactory.getSchema(clz);
if (clz != null && (clzSchema == null || !schema.getId().equals(clzSchema.getId())) && alterCriteria) {
criteria.put(ObjectMetaDataManager.KIND_FIELD, type);
}
return clz;
}
protected MultiTableMapper addTables(SchemaFactory schemaFactory, SelectQuery<?> query, String type, Table<?> table, Map<Object, Object> criteria,
Include include, Pagination pagination, Map<Table<?>, Condition> joins) {
if ((joins == null || joins.size() == 0) && (include == null || include.getLinks().size() == 0)) {
query.addFrom(table);
return null;
}
MultiTableMapper tableMapper = new MultiTableMapper(getMetaDataManager(), pagination);
tableMapper.map(table);
if (include == null) {
query.addSelect(tableMapper.getFields());
query.addFrom(table);
return tableMapper;
}
List<Relationship> rels = new ArrayList<Relationship>();
rels.add(null);
for (Map.Entry<String, Relationship> entry : getLinkRelationships(schemaFactory, type, include).entrySet()) {
Relationship rel = entry.getValue();
Table<?> childTable = JooqUtils.getTableFromRecordClass(rel.getObjectType());
if (childTable == null) {
throw new IllegalStateException("Failed to find table for type [" + rel.getObjectType() + "]");
} else {
String key = rel.getRelationshipType() == RelationshipType.REFERENCE ? ApiUtils.SINGLE_ATTACHMENT_PREFIX + rel.getName() : rel.getName();
tableMapper.map(key, childTable);
rels.add(rel);
}
}
List<Table<?>> tables = tableMapper.getTables();
query.addSelect(tableMapper.getFields());
query.addFrom(table);
for (int i = 0; i < tables.size(); i++) {
Relationship rel = rels.get(i);
Table<?> toTable = tables.get(i);
if (rel != null) {
if (rel.getRelationshipType() == RelationshipType.MAP) {
addMappingJoins(query, toTable, schemaFactory, type, table, toTable.getName(), (MapRelationship) rel);
} else {
query.addJoin(toTable, JoinType.LEFT_OUTER_JOIN, getJoinCondition(schemaFactory, type, table, toTable.getName(), rel));
}
}
}
return tableMapper;
}
protected void addMappingJoins(SelectQuery<?> query, Table<?> toTable, SchemaFactory schemaFactory, String fromType, Table<?> from, String asName,
MapRelationship rel) {
Table<?> mappingTable = JooqUtils.getTableFromRecordClass(rel.getMappingType());
/*
* We don't required the mapping type to be visible external, that's why
* we use the schemaFactory from the objectManager, because it is the
* superset schemaFactory.
*/
String mappingType = getObjectManager().getSchemaFactory().getSchemaName(rel.getMappingType());
TableField<?, Object> fieldFrom = JooqUtils.getTableField(getMetaDataManager(), fromType, ObjectMetaDataManager.ID_FIELD);
TableField<?, Object> fieldTo = JooqUtils.getTableField(getMetaDataManager(), mappingType, rel.getPropertyName());
TableField<?, Object> fieldRemoved = JooqUtils.getTableField(getMetaDataManager(), mappingType, ObjectMetaDataManager.REMOVED_FIELD);
org.jooq.Condition cond = fieldFrom.eq(fieldTo.getTable().field(fieldTo.getName())).and(
fieldRemoved == null ? DSL.trueCondition() : fieldRemoved.isNull());
query.addJoin(mappingTable, JoinType.LEFT_OUTER_JOIN, cond);
fieldFrom = JooqUtils.getTableField(getMetaDataManager(), mappingType, rel.getOtherRelationship().getPropertyName());
fieldTo = JooqUtils.getTableField(getMetaDataManager(), schemaFactory.getSchemaName(rel.getObjectType()), ObjectMetaDataManager.ID_FIELD);
cond = fieldFrom.eq(fieldTo.getTable().asTable(asName).field(fieldTo.getName()));
query.addJoin(toTable, JoinType.LEFT_OUTER_JOIN, cond);
}
protected org.jooq.Condition getJoinCondition(SchemaFactory schemaFactory, String fromType, Table<?> from, String asName, Relationship rel) {
TableField<?, Object> fieldFrom = null;
TableField<?, Object> fieldTo = null;
switch (rel.getRelationshipType()) {
case REFERENCE:
fieldFrom = JooqUtils.getTableField(getMetaDataManager(), fromType, rel.getPropertyName());
fieldTo = JooqUtils.getTableField(getMetaDataManager(), schemaFactory.getSchemaName(rel.getObjectType()), ObjectMetaDataManager.ID_FIELD);
break;
case CHILD:
fieldFrom = JooqUtils.getTableField(getMetaDataManager(), fromType, ObjectMetaDataManager.ID_FIELD);
fieldTo = JooqUtils.getTableField(getMetaDataManager(), schemaFactory.getSchemaName(rel.getObjectType()), rel.getPropertyName());
break;
default:
throw new IllegalArgumentException("Illegal Relationship type [" + rel.getRelationshipType() + "]");
}
if (fieldFrom == null || fieldTo == null) {
throw new IllegalStateException("Failed to construction join query for [" + fromType + "] [" + from + "] [" + rel + "]");
}
return fieldFrom.eq(fieldTo.getTable().as(asName).field(fieldTo.getName()));
}
protected void addConditions(SchemaFactory schemaFactory, SelectQuery<?> query, String type, Table<?> table, Map<Object, Object> criteria) {
org.jooq.Condition condition = JooqUtils.toConditions(getMetaDataManager(), type, criteria);
if (condition != null) {
query.addConditions(condition);
}
}
@Override
protected Object getMapLink(String fromType, String id, MapRelationship rel, ApiRequest request) {
SchemaFactory schemaFactory = request.getSchemaFactory();
/*
* We don't required the mapping type to be visible external, that's why
* we use the schemaFactory from the objectManager, because it is the
* superset schemaFactory.
*/
String mappingType = getObjectManager().getSchemaFactory().getSchemaName(rel.getMappingType());
String type = schemaFactory.getSchemaName(rel.getObjectType());
Map<Table<?>, Condition> joins = new LinkedHashMap<Table<?>, Condition>();
Map<Object, Object> criteria = new LinkedHashMap<Object, Object>();
if (mappingType == null || type == null) {
return null;
}
Table<?> mappingTable = JooqUtils.getTable(schemaFactory, rel.getMappingType());
TableField<?, Object> fieldFrom = JooqUtils.getTableField(getMetaDataManager(), type, ObjectMetaDataManager.ID_FIELD);
TableField<?, Object> fieldTo = JooqUtils.getTableField(getMetaDataManager(), mappingType, rel.getOtherRelationship().getPropertyName());
TableField<?, Object> fieldRemoved = JooqUtils.getTableField(getMetaDataManager(), mappingType, ObjectMetaDataManager.REMOVED_FIELD);
TableField<?, Object> fromTypeIdField = JooqUtils.getTableField(getMetaDataManager(), mappingType, rel.getSelfRelationship().getPropertyName());
org.jooq.Condition cond = fieldFrom.eq(fieldTo.getTable().field(fieldTo.getName())).and(
fieldRemoved == null ? DSL.trueCondition() : fieldRemoved.isNull());
joins.put(mappingTable, cond);
criteria.put(Condition.class, fromTypeIdField.eq(id));
return listInternal(schemaFactory, type, criteria, new ListOptions(request), joins);
}
protected void addLimit(SchemaFactory schemaFactory, String type, Pagination pagination, SelectQuery<?> query) {
if (pagination == null || pagination.getLimit() == null) {
return;
}
int limit = pagination.getLimit() + 1;
int offset = getOffset(pagination);
query.addLimit(offset, limit);
}
protected void addSort(SchemaFactory schemaFactory, String type, Sort sort, SelectQuery<?> query) {
if (sort != null) {
TableField<?, Object> sortField = JooqUtils.getTableField(getMetaDataManager(), type, sort.getName());
if (sortField == null) {
return;
}
switch (sort.getOrderEnum()) {
case DESC:
query.addOrderBy(sortField.desc());
break;
default:
query.addOrderBy(sortField.asc());
}
}
TableField<?, Object> idSort = JooqUtils.getTableField(getMetaDataManager(), type, ObjectMetaDataManager.ID_FIELD);
if (idSort == null) {
return;
}
if (sort != null) {
switch (sort.getOrderEnum()) {
case DESC:
query.addOrderBy(idSort.desc());
break;
default:
query.addOrderBy(idSort.asc());
}
}
else {
query.addOrderBy(idSort.asc());
}
}
@Override
protected void addAccountAuthorization(boolean byId, boolean byLink, String type, Map<Object, Object> criteria, Policy policy) {
super.addAccountAuthorization(byId, byLink, type, criteria, policy);
if (!policy.isOption(Policy.LIST_ALL_ACCOUNTS)) {
if (policy.isOption(Policy.AUTHORIZED_FOR_ALL_ACCOUNTS) && (byId || byLink)) {
return;
}
TableField<?, Object> accountField = JooqUtils.getTableField(getMetaDataManager(), type, ObjectMetaDataManager.ACCOUNT_FIELD);
TableField<?, Object> publicField = JooqUtils.getTableField(getMetaDataManager(), type, ObjectMetaDataManager.PUBLIC_FIELD);
Object accountValue = criteria.get(ObjectMetaDataManager.ACCOUNT_FIELD);
if (accountField == null || publicField == null || accountValue == null) {
return;
}
criteria.remove(ObjectMetaDataManager.ACCOUNT_FIELD);
Condition accountCondition = null;
if (accountValue instanceof io.github.ibuildthecloud.gdapi.condition.Condition) {
accountCondition = accountField.in(((io.github.ibuildthecloud.gdapi.condition.Condition) accountValue).getValues());
} else {
accountCondition = accountField.eq(accountValue);
}
criteria.put(Condition.class, publicField.isTrue().or(accountCondition));
}
}
@Override
protected Object removeFromStore(String type, String id, Object obj, ApiRequest request) {
Table<?> table = JooqUtils.getTableFromRecordClass(JooqUtils.getRecordClass(request.getSchemaFactory(), obj.getClass()));
TableField<?, Object> idField = JooqUtils.getTableField(getMetaDataManager(), type, ObjectMetaDataManager.ID_FIELD);
int result = create().delete(table).where(idField.eq(id)).execute();
if (result != 1) {
log.error("While deleting type [{}] and id [{}] got a result of [{}]", type, id, result);
throw new ClientVisibleException(ResponseCodes.CONFLICT);
}
return obj;
}
@Override
public boolean handleException(Throwable t, ApiRequest apiRequest) {
if (t instanceof ProcessInstanceException) {
t = ExceptionUtils.getRootCause(t);
}
if (t instanceof ProcessExecutionExitException && ((ProcessExecutionExitException) t).getExitReason() == ExitReason.RESOURCE_BUSY) {
log.info("Resource busy : {}", t.getMessage());
throw new ClientVisibleException(ResponseCodes.CONFLICT);
} else if (t instanceof FailedToAcquireLockException) {
log.info("Failed to lock : {}", t.getMessage());
throw new ClientVisibleException(ResponseCodes.CONFLICT);
} else if (t instanceof ProcessCancelException) {
log.info("Process cancel : {}", t.getMessage());
throw new ClientVisibleException(ResponseCodes.CONFLICT);
} else if (t instanceof DataAccessException) {
log.info("Database error : {}", t.getMessage());
throw new ClientVisibleException(ResponseCodes.CONFLICT);
}
return super.handleException(t, apiRequest);
}
public Configuration getConfiguration() {
return configuration;
}
@Inject
public void setConfiguration(Configuration configuration) {
this.configuration = configuration;
}
} |
package edu.duke.cabig.c3pr.aspects.springaop;
import java.util.List;
import org.apache.log4j.Logger;
import org.aspectj.lang.annotation.AfterReturning;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.mail.MailException;
import org.springframework.mail.MailSender;
import org.springframework.mail.SimpleMailMessage;
import edu.duke.cabig.c3pr.domain.PersonUser;
import edu.duke.cabig.c3pr.domain.repository.CSMUserRepository;
import edu.duke.cabig.c3pr.utils.RoleBasedHealthcareSitesAndStudiesDTO;
import gov.nih.nci.logging.api.util.StringUtils;
@Aspect
public class UserEmailConfirmationAspect {
private MailSender mailSender;
private SimpleMailMessage accountCreatedTemplateMessage;
private CSMUserRepository csmRepository;
private Logger log = Logger.getLogger(UserEmailConfirmationAspect.class);
private String changeURL;
private boolean sendEmail = false;
@Required
public void setChangeURL(String changeURL) {
this.changeURL = changeURL;
}
@Before("execution(* edu.duke.cabig.c3pr.domain.repository.impl.PersonUserRepositoryImpl.createOrModifyResearchStaffWithUserAndAssignRoles(..))"
+ " && args(researchStaff, username, listAssociation)")
public void beforeCreateResearchStaffWithCSMUserAndAssignRoles(PersonUser researchStaff, String username, List<RoleBasedHealthcareSitesAndStudiesDTO> listAssociation) {
if(researchStaff.getLoginId() == null){
sendEmail = true;
} else {
sendEmail = false;
log.debug("Dont send user creation email for the edit user flow.");
}
}
@AfterReturning("execution(* edu.duke.cabig.c3pr.domain.repository.impl.PersonUserRepositoryImpl.createOrModifyResearchStaffWithUserAndAssignRoles(..))"
+ " && args(researchStaff, username, listAssociation)")
public void createResearchStaffWithCSMUserAndAssignRoles(PersonUser researchStaff, String username, List<RoleBasedHealthcareSitesAndStudiesDTO> listAssociation) {
if(sendEmail){
sendEmail(researchStaff);
} else {
log.debug("Not sending email for the edit user flow.");
}
}
@Before("execution(* edu.duke.cabig.c3pr.domain.repository.impl.PersonUserRepositoryImpl.createOrModifyUserWithoutResearchStaffAndAssignRoles(..))"
+ " && args(researchStaff, username, listAssociation)")
public void beforeCreateCSMUser(PersonUser researchStaff, String username, List<RoleBasedHealthcareSitesAndStudiesDTO> listAssociation) {
if(researchStaff.getLoginId() == null){
sendEmail = true;
} else {
sendEmail = false;
log.debug("Dont send user creation email for the edit user flow.");
}
}
@AfterReturning("execution(* edu.duke.cabig.c3pr.domain.repository.impl.PersonUserRepositoryImpl.createOrModifyUserWithoutResearchStaffAndAssignRoles(..))"
+ " && args(researchStaff, username, listAssociation)")
public void createCSMUser(PersonUser researchStaff, String username, List<RoleBasedHealthcareSitesAndStudiesDTO> listAssociation) {
if(sendEmail){
sendEmail(researchStaff);
} else {
log.debug("Not sending email for the edit user flow.");
}
}
@AfterReturning("execution(* edu.duke.cabig.c3pr.domain.repository.impl.PersonUserRepositoryImpl.createSuperUser(..))"
+ " && args(researchStaff, username , listAssociation)")
public void createSuperUser(PersonUser researchStaff, String username, List<RoleBasedHealthcareSitesAndStudiesDTO> listAssociation) {
if(!StringUtils.isBlank(username)){
sendEmail(researchStaff);
} else {
log.debug("Not sending email for the edit user flow.");
}
}
private void sendEmail(PersonUser personUser) {
try {
if(!StringUtils.isBlank(personUser.getLoginId())){
SimpleMailMessage msg = new SimpleMailMessage( this.accountCreatedTemplateMessage);
msg.setTo(personUser.getEmail());
msg.setText("A new C3PR account has been created for you.\n"
+ "Your username is follows:\n"
+ "Username: " + csmRepository.getUsernameById(personUser.getLoginId())
+ "\n"
+ "You must create your password before you can login. In order to do so please visit this URL:\n"
+ "\n"
+ changeURL + "&token=" + personUser.getToken() + "\n"
+ "\n"
+ "Regards\n"
+ "The C3PR Notification System.\n");
log.debug("Trying to send user account confirmation email. URL is " + changeURL + "&token=" + personUser.getToken());
this.mailSender.send(msg);
}
}
catch (MailException e) {
log.error("Could not send email due to " + e.getMessage(),e);
// just log it for now
}
}
@Required
public void setMailSender(MailSender mailSender) {
this.mailSender = mailSender;
}
@Required
public void setAccountCreatedTemplateMessage(SimpleMailMessage accountCreatedTemplateMessage) {
this.accountCreatedTemplateMessage = accountCreatedTemplateMessage;
}
@Required
public void setCsmRepository(CSMUserRepository csmRepository) {
this.csmRepository = csmRepository;
}
} |
package com.codenvy.mbstorage.sql;
import org.flywaydb.core.Flyway;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
/**
* Initialize database structures.
*
* @author Sergii Kabashniuk
*/
public class StorageInitializer {
private final Flyway flyway;
@Inject
public StorageInitializer(JndiDataSourcedConnectionFactory dataSourcedConnectionFactory) throws SQLException {
flyway = new Flyway();
flyway.setDataSource(dataSourcedConnectionFactory.getDataSource());
flyway.setLocations(getScriptLocation());
}
public StorageInitializer(DataSource dataSource, boolean cleanOnValidationError) throws SQLException {
flyway = new Flyway();
flyway.setCleanOnValidationError(cleanOnValidationError);
flyway.setDataSource(dataSource);
flyway.setLocations(getScriptLocation());
}
/**
* Drops all objects (tables, views, procedures, triggers, ...) in the configured schemas.
*/
public void clean() {
flyway.clean();
}
@PostConstruct
public void init() {
//TODO remove before prod update.
clean();
flyway.migrate();
}
private String getScriptLocation() throws SQLException {
try (Connection connection = flyway.getDataSource().getConnection()) {
DatabaseMetaData metdadata = connection.getMetaData();
switch (metdadata.getDatabaseProductName()) {
case "PostgreSQL":
return "db/migration/postgresql";
case "MySQL":
return "db/migration/mysql";
case "HSQL Database Engine":
return "db/migration/hsqldb";
default:
throw new RuntimeException("Unknown database " + metdadata.getDatabaseProductName());
}
}
}
} |
package org.openforis.collect.remoting.service;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.openforis.collect.manager.RecordManager;
import org.openforis.collect.manager.SessionManager;
import org.openforis.collect.metamodel.proxy.CodeListItemProxy;
import org.openforis.collect.model.CollectRecord;
import org.openforis.collect.model.CollectSurvey;
import org.openforis.collect.model.FieldSymbol;
import org.openforis.collect.model.User;
import org.openforis.collect.model.proxy.RecordProxy;
import org.openforis.collect.persistence.RecordPersistenceException;
import org.openforis.collect.remoting.service.UpdateRequestOperation.Method;
import org.openforis.collect.web.session.SessionState;
import org.openforis.collect.web.session.SessionState.RecordState;
import org.openforis.idm.metamodel.AttributeDefinition;
import org.openforis.idm.metamodel.BooleanAttributeDefinition;
import org.openforis.idm.metamodel.CodeAttributeDefinition;
import org.openforis.idm.metamodel.CodeListItem;
import org.openforis.idm.metamodel.CoordinateAttributeDefinition;
import org.openforis.idm.metamodel.DateAttributeDefinition;
import org.openforis.idm.metamodel.EntityDefinition;
import org.openforis.idm.metamodel.ModelVersion;
import org.openforis.idm.metamodel.NodeDefinition;
import org.openforis.idm.metamodel.NumberAttributeDefinition;
import org.openforis.idm.metamodel.NumberAttributeDefinition.Type;
import org.openforis.idm.metamodel.RangeAttributeDefinition;
import org.openforis.idm.metamodel.Schema;
import org.openforis.idm.metamodel.TimeAttributeDefinition;
import org.openforis.idm.metamodel.validation.ValidationResults;
import org.openforis.idm.model.Attribute;
import org.openforis.idm.model.Code;
import org.openforis.idm.model.CodeAttribute;
import org.openforis.idm.model.Entity;
import org.openforis.idm.model.Field;
import org.openforis.idm.model.IntegerRange;
import org.openforis.idm.model.Node;
import org.openforis.idm.model.NodePointer;
import org.openforis.idm.model.NumericRange;
import org.openforis.idm.model.RealRange;
import org.openforis.idm.model.Record;
import org.openforis.idm.model.expression.ExpressionFactory;
import org.openforis.idm.model.expression.ModelPathExpression;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
/**
* @author M. Togna
* @author S. Ricci
*
*/
public class DataService {
@Autowired
private SessionManager sessionManager;
@Autowired
private RecordManager recordManager;
@Transactional
public RecordProxy loadRecord(int id, int step) throws RecordPersistenceException {
CollectSurvey survey = getActiveSurvey();
User user = getUserInSession();
CollectRecord record = recordManager.checkout(survey, user, id, step);
//record.updateNodeStates();
Entity rootEntity = record.getRootEntity();
recordManager.addEmptyNodes(rootEntity);
SessionState sessionState = sessionManager.getSessionState();
sessionState.setActiveRecord(record);
sessionState.setActiveRecordState(RecordState.SAVED);
return new RecordProxy(record);
}
/**
*
* @param rootEntityName
* @param offset
* @param toIndex
* @param orderByFieldName
* @param filter
*
* @return map with "count" and "records" items
*/
@Transactional
public Map<String, Object> getRecordSummaries(String rootEntityName, int offset, int maxNumberOfRows, String orderByFieldName, String filter) {
Map<String, Object> result = new HashMap<String, Object>();
SessionState sessionState = sessionManager.getSessionState();
CollectSurvey activeSurvey = sessionState.getActiveSurvey();
Schema schema = activeSurvey.getSchema();
EntityDefinition rootEntityDefinition = schema.getRootEntityDefinition(rootEntityName);
String rootEntityDefinitionName = rootEntityDefinition.getName();
int count = recordManager.getCountRecords(activeSurvey, rootEntityDefinitionName);
List<CollectRecord> summaries = recordManager.getSummaries(activeSurvey, rootEntityDefinitionName, offset, maxNumberOfRows, orderByFieldName, filter);
List<RecordProxy> proxies = new ArrayList<RecordProxy>();
for (CollectRecord summary : summaries) {
proxies.add(new RecordProxy(summary));
}
result.put("count", count);
result.put("records", proxies);
return result;
}
@Transactional
public RecordProxy createRecord(String rootEntityName, String versionName) throws RecordPersistenceException {
SessionState sessionState = sessionManager.getSessionState();
User user = sessionState.getUser();
CollectSurvey activeSurvey = sessionState.getActiveSurvey();
ModelVersion version = activeSurvey.getVersion(versionName);
Schema schema = activeSurvey.getSchema();
EntityDefinition rootEntityDefinition = schema.getRootEntityDefinition(rootEntityName);
CollectRecord record = recordManager.create(activeSurvey, rootEntityDefinition, user, version.getName());
Entity rootEntity = record.getRootEntity();
recordManager.addEmptyNodes(rootEntity);
sessionState.setActiveRecord((CollectRecord) record);
sessionState.setActiveRecordState(RecordState.NEW);
RecordProxy recordProxy = new RecordProxy(record);
return recordProxy;
}
@Transactional
public void deleteRecord(int id) throws RecordPersistenceException {
SessionState sessionState = sessionManager.getSessionState();
User user = sessionState.getUser();
recordManager.delete(id, user);
sessionManager.clearActiveRecord();
}
@Transactional
public void saveActiveRecord() {
SessionState sessionState = sessionManager.getSessionState();
CollectRecord record = sessionState.getActiveRecord();
User user = sessionState.getUser();
record.setModifiedDate(new Date());
record.setModifiedBy(user);
try {
recordManager.save(record);
sessionState.setActiveRecordState(RecordState.SAVED);
} catch (RecordPersistenceException e) {
//it should never be thrown
throw new RuntimeException("Unexpected error saving record");
}
}
@Transactional
public void deleteActiveRecord() throws RecordPersistenceException {
SessionState sessionState = sessionManager.getSessionState();
User user = sessionState.getUser();
Record record = sessionState.getActiveRecord();
recordManager.delete(record.getId(), user);
sessionManager.clearActiveRecord();
}
public List<UpdateResponse> updateActiveRecord(UpdateRequest request) {
List<UpdateRequestOperation> operations = request.getOperations();
List<UpdateResponse> updateResponses = new ArrayList<UpdateResponse>();
for (UpdateRequestOperation operation : operations) {
Collection<UpdateResponse> responses = processUpdateRequestOperation(operation);
updateResponses.addAll(responses);
}
return updateResponses;
}
@SuppressWarnings("unchecked")
private Collection<UpdateResponse> processUpdateRequestOperation(UpdateRequestOperation operation) {
SessionState sessionState = sessionManager.getSessionState();
CollectRecord record = sessionState.getActiveRecord();
Integer parentEntityId = operation.getParentEntityId();
Entity parentEntity = (Entity) record.getNodeByInternalId(parentEntityId);
Integer nodeId = operation.getNodeId();
Integer fieldIndex = operation.getFieldIndex();
String nodeName = operation.getNodeName();
Node<?> node = null;
if(nodeId != null) {
node = record.getNodeByInternalId(nodeId);
}
NodeDefinition nodeDef = ((EntityDefinition) parentEntity.getDefinition()).getChildDefinition(nodeName);
String requestValue = operation.getValue();
String remarks = operation.getRemarks();
FieldSymbol symbol = operation.getSymbol();
Method method = operation.getMethod();
Map<Integer, UpdateResponse> responseMap = new HashMap<Integer, UpdateResponse>();
Set<NodePointer> relReqDependencies = null;
Set<Attribute<?,?>> checkDependencies = null;
List<Entity> ancestors = null;
Attribute<? extends AttributeDefinition, ?> attribute = null;
switch (method) {
case ADD :
Node<?> createdNode = addNode(parentEntity, nodeDef, requestValue, symbol, remarks);
UpdateResponse response = getUpdateResponse(responseMap, createdNode.getInternalId());
response.setCreatedNode(createdNode);
relReqDependencies = recordManager. clearRelevanceRequiredStates(createdNode);
if(createdNode instanceof Attribute){
attribute = (Attribute<? extends AttributeDefinition, ?>) createdNode;
checkDependencies = recordManager.clearValidationResults(attribute);
checkDependencies.add(attribute);
}
relReqDependencies.add(new NodePointer(createdNode.getParent(), createdNode.getName()));
ancestors = createdNode.getAncestors();
break;
case UPDATE:
attribute = (Attribute<AttributeDefinition, ?>) node;
ancestors = attribute.getAncestors();
response = getUpdateResponse(responseMap, attribute.getInternalId());
Map<Integer, Object> updatedFieldValues = new HashMap<Integer, Object>();
if (fieldIndex < 0) {
Object value = null;
if (requestValue != null) {
value = parseCompositeAttributeValue(parentEntity, attribute.getDefinition(), requestValue);
}
recordManager.setAttributeValue(attribute, value, remarks);
for (int idx = 0; idx < attribute.getFieldCount(); idx++) {
Field<?> field = attribute.getField(idx);
Object fieldValue = field.getValue();
updatedFieldValues.put(idx, fieldValue);
recordManager.setFieldValue(attribute, fieldValue, remarks, symbol, idx);
}
} else {
Object value = parseFieldValue(parentEntity,
attribute.getDefinition(), requestValue, fieldIndex);
recordManager.setFieldValue(attribute, value, remarks, symbol, fieldIndex);
Field<?> field = attribute.getField(fieldIndex);
updatedFieldValues.put(fieldIndex, field.getValue());
}
response.setUpdatedFieldValues(updatedFieldValues);
relReqDependencies = recordManager.clearRelevanceRequiredStates(attribute);
checkDependencies = recordManager.clearValidationResults(attribute);
relReqDependencies.add(new NodePointer(attribute.getParent(), attribute.getName()));
checkDependencies.add(attribute);
break;
case DELETE:
relReqDependencies = new HashSet<NodePointer>();
checkDependencies = new HashSet<Attribute<?,?>>();
deleteNode(node, relReqDependencies, checkDependencies, responseMap);
break;
}
prepareUpdateResponse(responseMap, relReqDependencies, checkDependencies, ancestors);
return responseMap.values();
}
private void deleteNode(Node<?> node,Set<NodePointer> relevanceRequiredDependencies, Set<Attribute<?,?>> checkDependencies, Map<Integer, UpdateResponse> responseMap){
Stack<Node<?>> dependenciesStack = new Stack<Node<?>>();
Stack<Node<?>> nodesToRemove = new Stack<Node<?>>();
dependenciesStack.push(node);
Set<NodePointer> relevantDependencies = new HashSet<NodePointer>();
Set<NodePointer> requiredDependencies = new HashSet<NodePointer>();
while(!dependenciesStack.isEmpty()){
Node<?> n = dependenciesStack.pop();
nodesToRemove.push(n);
relevantDependencies.addAll(n.getRelevantDependencies());
requiredDependencies.addAll(n.getRequiredDependencies());
if(n instanceof Entity){
Entity entity = (Entity) n;
List<Node<? extends NodeDefinition>> children = entity.getChildren();
for (Node<? extends NodeDefinition> child : children) {
dependenciesStack.push(child);
}
} else {
Attribute<?,?> attr = (Attribute<?, ?>) n;
checkDependencies.addAll(attr.getCheckDependencies());
}
}
while(!nodesToRemove.isEmpty()){
Node<?> n = nodesToRemove.pop();
recordManager.deleteNode(n);
UpdateResponse resp = getUpdateResponse(responseMap, node.getInternalId());
resp.setDeletedNodeId(node.getInternalId());
}
//clear dependencies
recordManager.clearRelevantDependencies(relevantDependencies);
requiredDependencies.addAll(relevantDependencies);
recordManager.clearRequiredDependencies(requiredDependencies);
recordManager.clearValidationResults(checkDependencies);
relevanceRequiredDependencies.addAll(requiredDependencies);
}
private void prepareUpdateResponse(Map<Integer, UpdateResponse> responseMap, Set<NodePointer> relevanceReqquiredDependencies, Set<Attribute<?, ?>> validtionResultsDependencies, List<Entity> ancestors) {
if (ancestors != null) {
for (Entity entity : ancestors) {
// entity could be root definition
Entity parent = entity.getParent();
if (parent != null && !parent.isDetached()) {
UpdateResponse response = getUpdateResponse(responseMap, parent.getInternalId());
String childName = entity.getName();
response.setMinCountValid(childName, parent.validateMinCount(childName));
response.setRelevant(childName, parent.isRelevant(childName));
response.setRequired(childName, parent.isRequired(childName));
}
}
}
if (relevanceReqquiredDependencies != null) {
for (NodePointer nodePointer : relevanceReqquiredDependencies) {
Entity entity = nodePointer.getEntity();
if (!entity.isDetached()) {
String childName = nodePointer.getChildName();
UpdateResponse response = getUpdateResponse(responseMap, entity.getInternalId());
response.setRelevant(childName, entity.isRelevant(childName));
response.setRequired(childName, entity.isRequired(childName));
response.setMinCountValid(childName, entity.validateMinCount(childName));
}
}
}
if (validtionResultsDependencies != null) {
for (Attribute<?, ?> checkDepAttr : validtionResultsDependencies) {
if (!checkDepAttr.isDetached()) {
checkDepAttr.clearValidationResults();
ValidationResults results = checkDepAttr.validateValue();
UpdateResponse response = getUpdateResponse(responseMap, checkDepAttr.getInternalId());
response.setAttributeValidationResults(results);
}
}
}
}
private UpdateResponse getUpdateResponse(Map<Integer, UpdateResponse> responseMap, int nodeId){
UpdateResponse response = responseMap.get(nodeId);
if(response == null){
response = new UpdateResponse(nodeId);
responseMap.put(nodeId, response);
}
return response;
}
@SuppressWarnings("unchecked")
private Node<?> addNode(Entity parentEntity, NodeDefinition nodeDef, String requestValue, FieldSymbol symbol, String remarks) {
if(nodeDef instanceof AttributeDefinition) {
AttributeDefinition def = (AttributeDefinition) nodeDef;
Attribute<?, ?> attribute = (Attribute<?, ?>) def.createNode();
parentEntity.add(attribute);
if(StringUtils.isNotBlank(requestValue)) {
Object value = parseCompositeAttributeValue(parentEntity, (AttributeDefinition) nodeDef, requestValue);
((Attribute<?, Object>) attribute).setValue(value);
}
if(symbol != null || remarks != null) {
Character symbolChar = null;
if(symbol != null) {
symbolChar = symbol.getCode();
}
Field<?> firstField = attribute.getField(0);
firstField.setSymbol(symbolChar);
firstField.setRemarks(remarks);
}
parentEntity.add(attribute);
return attribute;
} else {
Entity e = recordManager.addEntity(parentEntity, nodeDef.getName());
return e;
}
}
private Object parseFieldValue(Entity parentEntity, AttributeDefinition def, String value, Integer fieldIndex) {
Object fieldValue = null;
if(StringUtils.isBlank(value)) {
return null;
}
if(def instanceof BooleanAttributeDefinition) {
fieldValue = Boolean.parseBoolean(value);
} else if(def instanceof CoordinateAttributeDefinition) {
if(fieldIndex != null) {
if(fieldIndex == 2) {
fieldValue = value;
} else {
fieldValue = Double.valueOf(value);
}
}
} else if(def instanceof DateAttributeDefinition) {
Integer val = Integer.valueOf(value);
fieldValue = val;
} else if(def instanceof NumberAttributeDefinition) {
NumberAttributeDefinition numberDef = (NumberAttributeDefinition) def;
Type type = numberDef.getType();
Number number = null;
switch(type) {
case INTEGER:
number = Integer.valueOf(value);
break;
case REAL:
number = Double.valueOf(value);
break;
}
if(number != null) {
fieldValue = number;
}
} else if(def instanceof RangeAttributeDefinition) {
RangeAttributeDefinition.Type type = ((RangeAttributeDefinition) def).getType();
Number number = null;
switch(type) {
case INTEGER:
number = Integer.valueOf(value);
break;
case REAL:
number = Double.valueOf(value);
break;
}
if(number != null) {
fieldValue = number;
}
} else if(def instanceof TimeAttributeDefinition) {
fieldValue = Integer.valueOf(value);
} else {
fieldValue = value;
}
return fieldValue;
}
private Object parseCompositeAttributeValue(Entity parentEntity, AttributeDefinition defn, String value) {
Object result;
if(defn instanceof CodeAttributeDefinition) {
Record record = parentEntity.getRecord();
ModelVersion version = record .getVersion();
result = parseCode(parentEntity, (CodeAttributeDefinition) defn, value, version );
} else if(defn instanceof RangeAttributeDefinition) {
RangeAttributeDefinition rangeDef = (RangeAttributeDefinition) defn;
RangeAttributeDefinition.Type type = rangeDef.getType();
NumericRange<?> range = null;
switch(type) {
case INTEGER:
range = IntegerRange.parseIntegerRange(value);
break;
case REAL:
range = RealRange.parseRealRange(value);
break;
}
result = range;
} else {
throw new IllegalArgumentException("Invalid AttributeDefinition: expected CodeAttributeDefinition or RangeAttributeDefinition");
}
return result;
}
@Transactional
public void promoteActiveRecord() throws RecordPersistenceException {
SessionState sessionState = sessionManager.getSessionState();
CollectRecord record = sessionState.getActiveRecord();
User user = sessionState.getUser();
recordManager.promote(record, user);
recordManager.unlock(record, user);
sessionManager.clearActiveRecord();
}
@Transactional
public void demoteActiveRecord() throws RecordPersistenceException {
SessionState sessionState = sessionManager.getSessionState();
CollectRecord record = sessionState.getActiveRecord();
User user = sessionState.getUser();
recordManager.demote(record, user);
recordManager.unlock(record, user);
sessionManager.clearActiveRecord();
}
public void updateNodeHierarchy(Node<? extends NodeDefinition> node, int newPosition) {
}
public List<String> find(String context, String query) {
return null;
}
/**
* remove the active record from the current session
* @throws RecordPersistenceException
*/
public void clearActiveRecord() throws RecordPersistenceException {
SessionState sessionState = this.sessionManager.getSessionState();
CollectRecord activeRecord = sessionState.getActiveRecord();
User user = sessionState.getUser();
if(RecordState.SAVED == sessionState.getActiveRecordState()) {
this.recordManager.unlock(activeRecord, user);
}
this.sessionManager.clearActiveRecord();
}
/**
* Gets the code list items assignable to the specified attribute and matching the specified codes.
*
* @param parentEntityId
* @param attrName
* @param codes
* @return
*/
public List<CodeListItemProxy> getCodeListItems(int parentEntityId, String attrName, String[] codes){
CollectRecord record = getActiveRecord();
Entity parent = (Entity) record.getNodeByInternalId(parentEntityId);
CodeAttributeDefinition def = (CodeAttributeDefinition) parent.getDefinition().getChildDefinition(attrName);
List<CodeListItem> items = getAssignableCodeListItems(parent, def);
List<CodeListItem> filteredItems = new ArrayList<CodeListItem>();
if(codes != null && codes.length > 0) {
//filter by specified codes
for (CodeListItem item : items) {
for (String code : codes) {
if(item.getCode().equals(code)) {
filteredItems.add(item);
}
}
}
}
List<CodeListItemProxy> result = CodeListItemProxy.fromList(filteredItems);
return result;
}
/**
* Gets the code list items assignable to the specified attribute.
*
* @param parentEntityId
* @param attrName
* @return
*/
public List<CodeListItemProxy> findAssignableCodeListItems(int parentEntityId, String attrName){
CollectRecord record = getActiveRecord();
Entity parent = (Entity) record.getNodeByInternalId(parentEntityId);
CodeAttributeDefinition def = (CodeAttributeDefinition) parent.getDefinition().getChildDefinition(attrName);
List<CodeListItem> items = getAssignableCodeListItems(parent, def);
List<CodeListItemProxy> result = CodeListItemProxy.fromList(items);
List<Node<?>> selectedCodes = parent.getAll(attrName);
CodeListItemProxy.setSelectedItems(result, selectedCodes);
return result;
}
/**
* Finds a list of code list items assignable to the specified attribute and matching the passed codes
*
* @param parentEntityId
* @param attributeName
* @param codes
* @return
*/
public List<CodeListItemProxy> findAssignableCodeListItems(int parentEntityId, String attributeName, String[] codes) {
CollectRecord record = getActiveRecord();
Entity parent = (Entity) record.getNodeByInternalId(parentEntityId);
CodeAttributeDefinition def = (CodeAttributeDefinition) parent.getDefinition().getChildDefinition(attributeName);
List<CodeListItem> items = getAssignableCodeListItems(parent, def);
List<CodeListItemProxy> result = new ArrayList<CodeListItemProxy>();
for (String code : codes) {
CodeListItem item = findCodeListItem(items, code);
if(item != null) {
CodeListItemProxy proxy = new CodeListItemProxy(item);
result.add(proxy);
}
}
return result;
}
private User getUserInSession() {
SessionState sessionState = getSessionManager().getSessionState();
User user = sessionState.getUser();
return user;
}
private CollectSurvey getActiveSurvey() {
SessionState sessionState = getSessionManager().getSessionState();
CollectSurvey activeSurvey = sessionState.getActiveSurvey();
return activeSurvey;
}
protected CollectRecord getActiveRecord() {
SessionState sessionState = getSessionManager().getSessionState();
CollectRecord activeRecord = sessionState.getActiveRecord();
return activeRecord;
}
protected SessionManager getSessionManager() {
return sessionManager;
}
protected RecordManager getRecordManager() {
return recordManager;
}
/**
* Start of CodeList utility methods
*
* TODO move them to a better location
*/
private List<CodeListItem> getAssignableCodeListItems(Entity parent, CodeAttributeDefinition def) {
CollectRecord record = getActiveRecord();
ModelVersion version = record.getVersion();
List<CodeListItem> items = null;
if(StringUtils.isEmpty(def.getParentExpression())){
items = def.getList().getItems();
} else {
CodeAttribute parentCodeAttribute = getCodeParent(parent, def);
if(parentCodeAttribute!=null){
CodeListItem parentCodeListItem = parentCodeAttribute.getCodeListItem();
if(parentCodeListItem != null) {
//TODO exception if parent not specified
items = parentCodeListItem.getChildItems();
}
}
}
List<CodeListItem> result = new ArrayList<CodeListItem>();
if(items != null) {
for (CodeListItem item : items) {
if(version.isApplicable(item)) {
result.add(item);
}
}
}
return result;
}
private CodeAttribute getCodeParent(Entity context, CodeAttributeDefinition def) {
try {
String parentExpr = def.getParentExpression();
ExpressionFactory expressionFactory = context.getRecord().getSurveyContext().getExpressionFactory();
ModelPathExpression expression = expressionFactory.createModelPathExpression(parentExpr);
Node<?> parentNode = expression.evaluate(context, null);
if (parentNode != null && parentNode instanceof CodeAttribute) {
return (CodeAttribute) parentNode;
}
} catch (Exception e) {
// return null;
}
return null;
}
private CodeListItem findCodeListItem(List<CodeListItem> siblings, String code) {
String adaptedCode = code.trim();
adaptedCode = adaptedCode.toUpperCase();
//remove initial zeros
adaptedCode = adaptedCode.replaceFirst("^0+", "");
adaptedCode = Pattern.quote(adaptedCode);
for (CodeListItem item : siblings) {
String itemCode = item.getCode();
Pattern pattern = Pattern.compile("^[0]*" + adaptedCode + "$");
Matcher matcher = pattern.matcher(itemCode);
if(matcher.find()) {
return item;
}
}
return null;
}
private Code parseCode(Entity parent, CodeAttributeDefinition def, String value, ModelVersion version) {
List<CodeListItem> items = getAssignableCodeListItems(parent, def);
Code code = parseCode(value, items, version);
return code;
}
private Code parseCode(String value, List<CodeListItem> codeList, ModelVersion version) {
Code code = null;
String[] strings = value.split(":");
String codeStr = null;
String qualifier = null;
switch(strings.length) {
case 2:
qualifier = strings[1].trim();
case 1:
codeStr = strings[0].trim();
break;
default:
//TODO throw error: invalid parameter
}
CodeListItem codeListItem = findCodeListItem(codeList, codeStr);
if(codeListItem != null) {
code = new Code(codeListItem.getCode(), qualifier);
}
if (code == null) {
code = new Code(codeStr, qualifier);
}
return code;
}
} |
package com.yahoo.vespa.model.builder.xml.dom.chains.search;
import com.yahoo.binaryprefix.BinaryPrefix;
import com.yahoo.component.chain.dependencies.Dependencies;
import com.yahoo.component.chain.model.ChainedComponentModel;
import com.yahoo.component.ComponentId;
import com.yahoo.component.chain.model.ChainSpecification;
import com.yahoo.search.searchchain.model.federation.FederationOptions;
import com.yahoo.search.searchchain.model.federation.HttpProviderSpec;
import com.yahoo.search.searchchain.model.federation.LocalProviderSpec;
import com.yahoo.text.XML;
import com.yahoo.config.model.producer.AbstractConfigProducer;
import com.yahoo.vespa.model.builder.xml.dom.BinaryScaledAmountParser;
import com.yahoo.vespa.model.builder.xml.dom.chains.ComponentsBuilder;
import com.yahoo.vespa.model.container.search.searchchain.HttpProvider;
import com.yahoo.vespa.model.container.search.searchchain.HttpProviderSearcher;
import com.yahoo.vespa.model.container.search.searchchain.LocalProvider;
import com.yahoo.vespa.model.container.search.searchchain.Provider;
import com.yahoo.vespa.model.container.search.searchchain.Source;
import org.w3c.dom.Element;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
/**
* Builds a provider from xml.
* The demangling of provider types is taken care of here,
* since the mangling is an intrinsic of the configuration language,
* not the model itself.
*
* @author tonytv
*/
public class DomProviderBuilder extends DomGenericTargetBuilder<Provider> {
/**
* Retrieves all possible provider specific parameters
*/
private static class ProviderReader {
final String type;
final String path;
final Double cacheWeight;
final Integer retries;
final Double readTimeout;
final Double connectionTimeout;
final Double connectionPoolTimeout;
final String clusterName;
final List<HttpProviderSpec.Node> nodes;
final String certificateApplicationId;
final Integer certificateTtl;
final Integer certificateRetryWait;
final HttpProviderSpec.Node certificateProxy; // Just re-using the Node class, as it matches our needs
final Integer cacheSizeMB;
ProviderReader(Element providerElement) {
type = readType(providerElement);
path = readPath(providerElement);
cacheWeight = readCacheWeight(providerElement);
cacheSizeMB = readCacheSize(providerElement);
clusterName = readCluster(providerElement);
readTimeout = readReadTimeout(providerElement);
connectionTimeout = readConnectionTimeout(providerElement);
connectionPoolTimeout = readConnectionPoolTimeout(providerElement);
retries = readRetries(providerElement);
nodes = readNodes(providerElement);
certificateApplicationId = readCertificateApplicationId(providerElement);
certificateTtl = readCertificateTtl(providerElement);
certificateRetryWait = readCertificateRetryWait(providerElement);
certificateProxy = readCertificateProxy(providerElement);
}
private String getAttributeOrNull(Element element, String name) {
String value = element.getAttribute(name);
return value.isEmpty() ? null : value;
}
private String readPath(Element providerElement) {
return getAttributeOrNull(providerElement, "path");
}
private String readCluster(Element providerElement) {
return getAttributeOrNull(providerElement, "cluster");
}
private Double readCacheWeight(Element providerElement) {
String cacheWeightString = getAttributeOrNull(providerElement, "cacheweight");
return (cacheWeightString == null)? null : Double.parseDouble(cacheWeightString);
}
private Integer readCacheSize(Element providerElement) {
String cacheSize = getAttributeOrNull(providerElement, "cachesize");
return (cacheSize == null)? null : (int)BinaryScaledAmountParser.parse(cacheSize).as(BinaryPrefix.mega);
}
private Integer readRetries(Element providerElement) {
String retriesString = getAttributeOrNull(providerElement, "retries");
return (retriesString == null) ? null : Integer.parseInt(retriesString);
}
private Double readReadTimeout(Element providerElement) {
String timeoutString = getAttributeOrNull(providerElement, "readtimeout");
return (timeoutString == null) ? null : TimeParser.seconds(timeoutString);
}
private Double readConnectionTimeout(Element providerElement) {
String timeoutString = getAttributeOrNull(providerElement, "connectiontimeout");
return (timeoutString == null) ? null : TimeParser.seconds(timeoutString);
}
private Double readConnectionPoolTimeout(Element providerElement) {
String timeoutString = getAttributeOrNull(providerElement, "connectionpooltimeout");
return (timeoutString == null) ? null : TimeParser.seconds(timeoutString);
}
private String readCertificateApplicationId(Element providerElement) {
return getAttributeOrNull(providerElement, "yca-application-id");
}
private Integer readCertificateTtl(Element providerElement) {
String x = getAttributeOrNull(providerElement, "yca-cache-ttl");
return (x == null) ? null : TimeParser.seconds(x).intValue();
}
private Integer readCertificateRetryWait(Element providerElement) {
String x = getAttributeOrNull(providerElement, "yca-cache-retry-wait");
return (x == null) ? null : TimeParser.seconds(x).intValue();
}
private HttpProviderSpec.Node readCertificateProxy(Element providerElement) {
Element certificateProxySpec = XML.getChild(providerElement, "yca-proxy");
if (certificateProxySpec == null) {
return null; // no proxy
}
if(getAttributeOrNull(certificateProxySpec, "host") == null) {
return new HttpProviderSpec.Node(null, 0); // default proxy
}
return readNode(certificateProxySpec);
}
private List<HttpProviderSpec.Node> readNodes(Element providerElement) {
Element nodesSpec = XML.getChild(providerElement, "nodes");
if (nodesSpec == null) {
return null;
}
List<HttpProviderSpec.Node> nodes = new ArrayList<>();
for (Element nodeSpec : XML.getChildren(nodesSpec, "node")) {
nodes.add(readNode(nodeSpec));
}
return nodes;
}
private HttpProviderSpec.Node readNode(Element nodeElement) {
String host = getAttributeOrNull(nodeElement, "host");
// The direct calls to parse methods below works because the schema
// guarantees us no null references
int port = Integer.parseInt(getAttributeOrNull(nodeElement, "port"));
return new HttpProviderSpec.Node(host, port);
}
private String readType(Element providerElement) {
return getAttributeOrNull(providerElement, "type");
}
}
public DomProviderBuilder(Map<String, ComponentsBuilder.ComponentType> outerSearcherTypeByComponentName) {
super(outerSearcherTypeByComponentName);
}
@Override
protected Provider buildChain(AbstractConfigProducer ancestor, Element providerElement,
ChainSpecification specWithoutInnerComponents) {
ProviderReader providerReader = new ProviderReader(providerElement);
if (providerReader.certificateApplicationId == null && providerReader.certificateProxy != null) {
throw new IllegalArgumentException(
"Provider '" + specWithoutInnerComponents.componentId +
"' must have a certificate application ID, since a certificate store proxy is given");
}
FederationOptions federationOptions = readFederationOptions(providerElement);
Provider provider = buildProvider(specWithoutInnerComponents, providerReader, federationOptions);
Collection<Source> sources = buildSources(ancestor, providerElement);
addSources(provider, sources);
return provider;
}
private Collection<Source> buildSources(AbstractConfigProducer ancestor, Element providerElement) {
List<Source> sources = new ArrayList<>();
for (Element sourceElement : XML.getChildren(providerElement, "source")) {
sources.add(new DomSourceBuilder(outerComponentTypeByComponentName).build(ancestor, sourceElement));
}
return sources;
}
private void addSources(Provider provider, Collection<Source> sources) {
for (Source source : sources) {
provider.addSource(source);
}
}
private Provider buildProvider(ChainSpecification specWithoutInnerSearchers,
ProviderReader providerReader, FederationOptions federationOptions) {
if (providerReader.type == null) {
return buildEmptyHttpProvider(specWithoutInnerSearchers, providerReader, federationOptions);
} else if (HttpProviderSpec.includesType(providerReader.type)) {
return buildHttpProvider(specWithoutInnerSearchers, providerReader, federationOptions);
} else if (LocalProviderSpec.includesType(providerReader.type)) {
return buildLocalProvider(specWithoutInnerSearchers, providerReader, federationOptions);
} else {
throw new RuntimeException("Unknown provider type '" + providerReader.type + "'");
}
}
private Provider buildLocalProvider(ChainSpecification specWithoutInnerSearchers, ProviderReader providerReader, FederationOptions federationOptions) {
try {
ensureEmpty(specWithoutInnerSearchers.componentId, providerReader.cacheWeight, providerReader.path, providerReader.nodes,
providerReader.readTimeout, providerReader.connectionTimeout, providerReader.connectionPoolTimeout,
providerReader.retries, providerReader.certificateApplicationId, providerReader.certificateTtl,
providerReader.certificateRetryWait, providerReader.certificateProxy);
return new LocalProvider(specWithoutInnerSearchers,
federationOptions,
new LocalProviderSpec(providerReader.clusterName, providerReader.cacheSizeMB));
} catch (Exception e) {
throw new RuntimeException("Failed creating local provider " + specWithoutInnerSearchers.componentId, e);
}
}
private Provider buildHttpProvider(ChainSpecification specWithoutInnerSearchers, ProviderReader providerReader, FederationOptions federationOptions) {
ensureEmpty(specWithoutInnerSearchers.componentId, providerReader.clusterName);
Provider httpProvider = buildEmptyHttpProvider(specWithoutInnerSearchers, providerReader, federationOptions);
httpProvider.addInnerComponent(new HttpProviderSearcher(
new ChainedComponentModel(
HttpProviderSpec.toBundleInstantiationSpecification(HttpProviderSpec.Type.valueOf(providerReader.type)),
Dependencies.emptyDependencies())));
return httpProvider;
}
private Provider buildEmptyHttpProvider(ChainSpecification specWithoutInnerSearchers, ProviderReader providerReader, FederationOptions federationOptions) {
ensureEmpty(specWithoutInnerSearchers.componentId, providerReader.clusterName);
return new HttpProvider(specWithoutInnerSearchers,
federationOptions,
new HttpProviderSpec(
providerReader.cacheWeight,
providerReader.path,
providerReader.nodes,
providerReader.certificateApplicationId,
providerReader.certificateTtl,
providerReader.certificateRetryWait,
providerReader.certificateProxy,
providerReader.cacheSizeMB,
connectionParameters(providerReader)));
}
private HttpProviderSpec.ConnectionParameters connectionParameters(ProviderReader providerReader) {
return new HttpProviderSpec.ConnectionParameters(
providerReader.readTimeout,
providerReader.connectionTimeout,
providerReader.connectionPoolTimeout,
providerReader.retries);
}
private void ensureEmpty(ComponentId componentId, Object... objects) {
for (Object object : objects) {
if (object != null) {
throw new RuntimeException("Invalid provider option in provider '" + componentId + "': value='" + object + "'");
}
}
}
} |
package cliente;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.IOException;
import java.net.SocketException;
import java.net.UnknownHostException;
import javax.swing.*;
import javax.swing.event.MenuListener;
import partida.Partida;
public class ClienteFlotaSockets {
// Modifica todas las llamadas al objeto de la clase Partida
// por llamadas al objeto de la clase AuxiliarClienteFlota.
/**
* Implementa el juego 'Hundir la flota' mediante una interfaz grafica (GUI)
*/
/** Estados posibles de las casillas del tablero */
private static final int AGUA = -1, TOCADO = -2, HUNDIDO = -3;
/** Parametros por defecto de una partida */
private static final int NUMFILAS=8, NUMCOLUMNAS=8, NUMBARCOS=6;
private Partida partida = null; // Objeto con los datos de la partida en juego
private JFrame frame = null; // Tablero de juego
private JLabel estado = null; // Texto en el panel de estado
private JButton buttons[][] = null; // Botones asociados a las casillas de la partida
private static AuxiliarClienteFlota auxiliarCliente;
/** Atributos de la partida en juego */
private int numFilas, numColumnas, numBarcos, quedan, disparos;
/**
* Programa principal. Crea y lanza un nuevo juego
* @param args no se utiliza
*/
public static void main(String[] args) {
ClienteFlotaSockets juego = new ClienteFlotaSockets();
try {
auxiliarCliente=new AuxiliarClienteFlota("localhost", "1234");
} catch (SocketException e) {
System.out.println("Error de socket AuxiliarClienteFlota");
e.printStackTrace();
} catch (UnknownHostException e) {
System.out.println("Host no encontrado");
e.printStackTrace();
} catch (IOException e) {
System.out.println("Error de socket AuxiliarClienteFlota E/S");
e.printStackTrace();
}
juego.ejecuta();
}
/**
* Lanza una nueva hebra que establece los atributos del juego y dibuja la interfaz grafica: tablero
*/
private void ejecuta() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
dibujaTablero();
}
});
}
/**
* Dibuja el tablero de juego y crea la partida inicial
*/
private void dibujaTablero() {
frame = new JFrame();
frame.setLayout(new BorderLayout());
frame.setVisible(true);
anyadeMenu(); //Invoca al metodo que anyade los botones del menu
anyadeGrid(NUMFILAS, NUMCOLUMNAS); //Invoca al metodo que anyade los botones del mar
partida=new Partida(NUMFILAS, NUMCOLUMNAS, NUMBARCOS); //Crea una partida nueva
disparos = partida.getDisparos();
quedan = partida.getQuedan();
anyadePanelEstado("Intentos: " + disparos + " Barcos restantes: " + quedan);
frame.setSize(500, 500);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
}
/**
* Anyade el menu de opciones del juego
*/
private void anyadeMenu() {
MenuListener e = new MenuListener();
JMenuBar mb = new JMenuBar();
frame.setJMenuBar(mb);
JMenu menu = new JMenu("Opciones");
mb.add(menu);
JMenuItem salir = new JMenuItem("Salir");
salir.setActionCommand("salir");
salir.addActionListener(e);
menu.add(salir);
JMenuItem nuevaPartida = new JMenuItem("Nueva partida");
nuevaPartida.setActionCommand("nueva");
nuevaPartida.addActionListener(e);
menu.add(nuevaPartida);
JMenuItem solucion = new JMenuItem("Mostrar solución");
solucion.setActionCommand("solucion");
solucion.addActionListener(e);
menu.add(solucion);
}
/**
* Anyade el panel con las casillas del mar y sus etiquetas.
* Cada casilla sera un boton con su correspondiente escuchador
* @param nf numero de filas
* @param nc numero de columnas
*/
private void anyadeGrid(int nf, int nc) {
JPanel casillas= new JPanel(new GridLayout(NUMFILAS+1, NUMCOLUMNAS+2));
String[] vectorLetras={"A","B","C","D","E","F","G","H"};
ButtonListener e=new ButtonListener();
buttons=new JButton[NUMFILAS][NUMCOLUMNAS];
//Anyade la primerqa fila con los numeros
casillas.add(new JLabel(""));
casillas.add(new JLabel("1",JLabel.CENTER));
casillas.add(new JLabel("2",JLabel.CENTER));
casillas.add(new JLabel("3",JLabel.CENTER));
casillas.add(new JLabel("4",JLabel.CENTER));
casillas.add(new JLabel("5",JLabel.CENTER));
casillas.add(new JLabel("6",JLabel.CENTER));
casillas.add(new JLabel("7",JLabel.CENTER));
casillas.add(new JLabel("8",JLabel.CENTER));
casillas.add(new JLabel(""));
for(int i=0;i<NUMFILAS;i++){
for(int j=0;j<NUMCOLUMNAS+2;j++){
if(j==0){
casillas.add(new JLabel(vectorLetras[i],JLabel.CENTER)); //Anyade la letra correspondiente al numero de fila
continue;
}
if(j==NUMCOLUMNAS+1){
casillas.add(new JLabel(vectorLetras[i],JLabel.CENTER)); //Anyade la letra correspondiente al numero de fila
continue;
}
JButton boton=new JButton();
int [] posicion={i,j-1}; //Guarda la posicion del boton que ocupa en la matriz
boton.addActionListener(e);
boton.putClientProperty("posicion",posicion); //Asigna la posicion del boton en la matriz al propio boton
buttons [i][j-1]=boton; //Anyade el boton a la matriz de botones
casillas.add(boton); //Anyade el boton al panel Grid
}
}
frame.getContentPane().add(casillas, BorderLayout.CENTER);
}
/**
* Anyade el panel de estado al tablero
* @param cadena cadena inicial del panel de estado
*/
private void anyadePanelEstado(String cadena) {
JPanel panelEstado = new JPanel();
estado = new JLabel(cadena);
panelEstado.add(estado);
frame.getContentPane().add(panelEstado, BorderLayout.SOUTH);
}
/**
* Cambia la cadena mostrada en el panel de estado
* @param cadenaEstado nuevo estado
*/
private void cambiaEstado(String cadenaEstado) {
estado.setText(cadenaEstado);
}
/**
* Muestra la solucion de la partida y marca la partida como finalizada
*/
private void muestraSolucion() {
//Recorre la matriz de botones y cambia el color del boton segun el valor que tiene el mar en la misma posicion
try {
String[] solucion=auxiliarCliente.getSolucion();
for(int i=0;i<NUMFILAS;i++){
for (int j = 0; j < NUMCOLUMNAS; j++) {
buttons[i][j].setBackground(Color.blue);
buttons[i][j].setEnabled(false); //Deshabilita el boton
}
}
for(int i=0;i<solucion.length;i++){
String[] barcoInfo=solucion[i].split("
int fi = Integer.parseInt(barcoInfo[0]);
int ci = Integer.parseInt(barcoInfo[1]);
int t = Integer.parseInt(barcoInfo[3]);
if(barcoInfo[2].equals("V")){
for(int j=0;j<t;i++){
buttons[fi + j][ci].setBackground(Color.red);
}
}else{
for(int j = 0; j < t; i++) {
buttons[fi][ci + j].setBackground(Color.red);
}
}
}
} catch (IOException e) {
System.out.println("Error al recuperar solucion");
e.printStackTrace();
}
}
/**
* Limpia las casillas del tablero
*/
private void limpiaTablero() {
//Recorre la matriz de botones y recupera el color original del boton
for (int i = 0; i < NUMFILAS; i++) {
for (int j = 0; j < NUMCOLUMNAS; j++) {
buttons[i][j].setBackground(null);
buttons[i][j].setEnabled(true); //Vuelve a habilitar el boton
}
}
try {
auxiliarCliente.nuevaPartida(NUMFILAS, NUMCOLUMNAS, NUMBARCOS);
} catch (IOException e) {
System.out.println("Error al crear partida");
e.printStackTrace();
}
disparos = partida.getDisparos();
quedan = partida.getQuedan();
cambiaEstado("Intentos: " + disparos + " Barcos restantes: " + quedan);
}
/**
* Clase interna que escucha el menu de Opciones del tablero
*
*/
private class MenuListener implements ActionListener {
@Override
public void actionPerformed(ActionEvent e) {
String opcion=e.getActionCommand();
switch(opcion){
case"salir":
auxiliarCliente.fin();
System.exit(0);
break;
case "nueva":
limpiaTablero();
break;
case "solucion":
muestraSolucion();
break;
}
}
}
/**
* Clase interna que escucha cada uno de los botones del tablero
* Para poder identificar el boton que ha generado el evento se pueden usar las propiedades
* de los componentes, apoyandose en los metodos putClientProperty y getClientProperty
*/
private class ButtonListener implements ActionListener {
@Override
public void actionPerformed(ActionEvent e) {
JButton boton = (JButton) e.getSource();
int [] pos = (int[]) boton.getClientProperty("posicion");
try {
int valor = auxiliarCliente.pruebaCasilla(pos[0], pos[1]);
if(valor == AGUA)
boton.setBackground(Color.blue);
else if(valor == TOCADO)
boton.setBackground(Color.yellow);
else {
//El barco se hunde.
String[] datosBarco = auxiliarCliente.getBarco(valor).split("#"); //Obtenemos un vector con las propiedades del Barco
int fi = Integer.parseInt(datosBarco[0]);
int ci = Integer.parseInt(datosBarco[1]);
int t = Integer.parseInt(datosBarco[3]);
if(datosBarco[2].equals("V")) {
for(int i = 0; i < t; i++) {
buttons[fi + i][ci].setBackground(Color.red);
}
} else {
for(int i = 0; i < t; i++) {
buttons[fi][ci + i].setBackground(Color.red);
}
}
quedan = auxiliarCliente.getQuedanBarcos();
}
disparos = auxiliarCliente.getDisparos();
} catch (IOException ex) {
ex.printStackTrace();
}
//Si ya no quedan barcos la partida termina.
if(quedan == 0)
muestraSolucion();
cambiaEstado("Intentos: " + disparos + " Barcos restantes: " + quedan);
//boton.setEnabled(false);
} // end actionPerformed
} // end class ButtonListener
} |
package fi.nls.oskari.control.statistics.user;
import fi.nls.oskari.annotation.OskariActionRoute;
import fi.nls.oskari.control.*;
import fi.nls.oskari.control.statistics.GetIndicatorMetadataHandler;
import fi.nls.oskari.control.statistics.data.StatisticalIndicator;
import fi.nls.oskari.log.LogFactory;
import fi.nls.oskari.log.Logger;
import fi.nls.oskari.service.OskariComponentManager;
import fi.nls.oskari.util.JSONHelper;
import fi.nls.oskari.util.ResponseHelper;
import org.json.JSONException;
import org.oskari.statistics.user.StatisticalIndicatorService;
/**
* Deletes indicator that the user has previously saved.
* Only allows deletion of the users own indicators
*/
@OskariActionRoute("DeleteUserIndicator")
public class DeleteUserIndicatorHandler extends ActionHandler {
private static final Logger LOG = LogFactory.getLogger(DeleteUserIndicatorHandler.class);
private StatisticalIndicatorService indicatorService;
@Override
public void init() {
super.init();
if (indicatorService == null) {
indicatorService = OskariComponentManager.getComponentOfType(StatisticalIndicatorService.class);
}
}
public void handleAction(ActionParameters params) throws ActionException {
// user indicators are user content so deleting one requires to be logged in
params.requireLoggedInUser();
int id = params.getRequiredParamInt(ActionConstants.PARAM_ID);
StatisticalIndicator ind = indicatorService.findById(id, params.getUser().getId());
if(ind == null) {
// or might not be the owner
throw new ActionParamsException("Unknown indicator: " + id );
}
int year = params.getHttpParam("year", -1);
if(year != -1) {
// if year present, regionset is also required
int regionset = params.getRequiredParamInt("regionset");
indicatorService.deleteIndicatorData(id, regionset, year);
} else if (!indicatorService.delete(id, params.getUser().getId())) {
// remove the whole indicator
throw new ActionParamsException("Indicator wasn't removed: " + + id);
}
LOG.info("Deleted indicator", id);
try {
ResponseHelper.writeResponse(params, GetIndicatorMetadataHandler.toJSON(ind));
} catch (JSONException ex) {
ResponseHelper.writeResponse(params, JSONHelper.createJSONObject("deleted", id));
}
}
} |
import java.io.*;
import java.util.*;
/**
* Write a description of class SecretWord here.
*
* @author (your name)
* @version (a version number or a date)
*/
public class SecretWord
{
ArrayList<String> letters = new ArrayList<String>();
List<Boolean> m;
/**
* Constructor for objects of class SecretWord
*/
public SecretWord() throws FileNotFoundException
{
String s = FileReader1.choose(new File("README.TXT"));
for (int i = 0; i<s.length();i++)
{
letters.add(s.substring(i,i+1));
}
}
/**
* An example of a method - replace this comment with your own
*
* @param y a sample parameter for a method
* @return the sum of x and y
*/
public String toString()
{
return "";
}
} |
package cc.topicexplorer.database.tables.documenttopic;
/** MIT-JOOQ-START
import static jooq.generated.Tables.DOCUMENT_TERM_TOPIC;
import static jooq.generated.Tables.DOCUMENT_TOPIC;
import static jooq.generated.Tables.DOCUMENT;
import static jooq.generated.Tables.TOPIC;
MIT-JOOQ-ENDE */
import java.sql.SQLException;
import java.util.Set;
import org.apache.log4j.Logger;
import cc.topicexplorer.commands.TableFillCommand;
import com.google.common.collect.Sets;
public class DocumentTopicFill extends TableFillCommand {
private static final Logger logger = Logger.getLogger(DocumentTopicFill.class);
@Override
public void setTableName() {
/**
* MIT-JOOQ-START tableName = DOCUMENT_TOPIC.getName(); MIT-JOOQ-ENDE
*/
/** OHNE_JOOQ-START */
tableName = "DOCUMENT_TOPIC";
/** OHNE_JOOQ-ENDE */
}
@Override
public void fillTable() {
/**
* MIT-JOOQ-START String sql = "INSERT INTO " + DOCUMENT_TOPIC.getName() + "(" +
* DOCUMENT_TOPIC.DOCUMENT_ID.getName() + ", " + DOCUMENT_TOPIC.TOPIC_ID.getName() + ", " +
* DOCUMENT_TOPIC.NUMBER_OF_TOKEN_TOPIC_IN_DOCUMENT.getName() + ", " +
* DOCUMENT_TOPIC.PR_DOCUMENT_GIVEN_TOPIC.getName() + ", " + DOCUMENT_TOPIC.PR_TOPIC_GIVEN_DOCUMENT.getName() +
* ") " + " select " + DOCUMENT_TERM_TOPIC.getName() + "." + DOCUMENT_TERM_TOPIC.DOCUMENT_ID.getName() + ", " +
* DOCUMENT_TERM_TOPIC.getName() + "." + DOCUMENT_TERM_TOPIC.TOPIC_ID.getName() + ", " + "count(*), " +
* "cast(count(*) AS DECIMAL(65,30)) / cast(" + TOPIC.getName() + "." + TOPIC.NUMBER_OF_TOKENS.getName() +
* " AS DECIMAL(65,30)), " + "cast(count(*) AS DECIMAL(65,30)) / cast(" + DOCUMENT.getName() + "." +
* DOCUMENT.NUMBER_OF_TOKENS.getName() + " AS DECIMAL(65,30)) " + " from " + DOCUMENT_TERM_TOPIC.getName() +
* " join " + DOCUMENT.getName() + " on ( " + DOCUMENT_TERM_TOPIC.getName() + "." +
* DOCUMENT_TERM_TOPIC.DOCUMENT_ID.getName() + " = " + DOCUMENT.getName() + "." + DOCUMENT.DOCUMENT_ID.getName()
* + ")" + " join " + TOPIC.getName() + " on (" + DOCUMENT_TERM_TOPIC.getName() + "." +
* DOCUMENT_TERM_TOPIC.TOPIC_ID.getName() + " = " + TOPIC.getName() + "." + TOPIC.TOPIC_ID.getName() + ")" +
* " group by " + DOCUMENT_TERM_TOPIC.getName() + "." + DOCUMENT_TERM_TOPIC.DOCUMENT_ID.getName() + ", " +
* DOCUMENT_TERM_TOPIC.getName() + "." + DOCUMENT_TERM_TOPIC.TOPIC_ID.getName() + ", " + DOCUMENT.getName() +
* "." + DOCUMENT.NUMBER_OF_TOKENS.getName() + ", " + TOPIC.getName() + "." + TOPIC.NUMBER_OF_TOKENS.getName();
*
* database.executeUpdateQuery(sql);
*
* database.executeUpdateQuery("ALTER TABLE " + DOCUMENT_TOPIC.getName() + " ADD KEY PRIMARY_IDX(" +
* DOCUMENT_TOPIC.DOCUMENT_ID.getName() + "," + DOCUMENT_TOPIC.TOPIC_ID.getName() + ")," +
* " ADD KEY TOPIC_DOCUMENT_IDX (" + DOCUMENT_TOPIC.TOPIC_ID.getName() + "," +
* DOCUMENT_TOPIC.DOCUMENT_ID.getName() + ")," + " ADD KEY TOPIC_PR_DOCUMENT_GIVEN_TOPIC_IDX (" +
* DOCUMENT_TOPIC.TOPIC_ID.getName() + "," + DOCUMENT_TOPIC.PR_DOCUMENT_GIVEN_TOPIC.getName() + ")," +
* " ADD KEY DOCUMENT_PR_DOKUMENT_GIVEN_TOPIC_IDX (" + DOCUMENT_TOPIC.DOCUMENT_ID.getName() + "," +
* DOCUMENT_TOPIC.PR_TOPIC_GIVEN_DOCUMENT.getName() + ")"); MIT-JOOQ-ENDE
*/
/** OHNE_JOOQ-START */
// @formatter:off
String sql = "INSERT INTO " + "DOCUMENT_TOPIC" + "("
+ "DOCUMENT_TOPIC.DOCUMENT_ID" + ", "
+ "DOCUMENT_TOPIC.TOPIC_ID" + ", "
+ "DOCUMENT_TOPIC.NUMBER_OF_TOKEN_TOPIC_IN_DOCUMENT"
+ ", " + "DOCUMENT_TOPIC.PR_DOCUMENT_GIVEN_TOPIC"
+ ", " + "DOCUMENT_TOPIC.PR_TOPIC_GIVEN_DOCUMENT"
+ ") " + " select "
+ "X.DOCUMENT_ID, X.TOPIC_ID, X.document_topic_count, "
+ "cast(X.document_topic_count AS DECIMAL(65,30)) / cast(TOPIC.NUMBER_OF_TOKENS AS DECIMAL(65,30)), "
+ "cast(X.document_topic_count AS DECIMAL(65,30)) / cast(DOCUMENT.NUMBER_OF_TOKENS AS DECIMAL(65,30)) "
+ "from (SELECT DOCUMENT_ID, TOPIC_ID, count(*) as document_topic_count "
+ "from DOCUMENT_TERM_TOPIC "
+ "group by DOCUMENT_TERM_TOPIC.DOCUMENT_ID, DOCUMENT_TERM_TOPIC.TOPIC_ID) X "
+ "join DOCUMENT on (X.DOCUMENT_ID = DOCUMENT.DOCUMENT_ID) "
+ "join TOPIC on (X.TOPIC_ID = TOPIC.TOPIC_ID) ";
//@formatter:on
try {
database.executeUpdateQuery(sql);
// @formatter:off
database.executeUpdateQuery("ALTER TABLE " + "DOCUMENT_TOPIC"
+ " ADD KEY PRIMARY_IDX(" + "DOCUMENT_ID"
+ "," + "TOPIC_ID" + "),"
+ " ADD KEY TOPIC_DOCUMENT_IDX ("
+ "TOPIC_ID" + ","
+ "DOCUMENT_ID" + "),"
+ " ADD KEY TOPIC_PR_DOCUMENT_GIVEN_TOPIC_IDX ("
+ "TOPIC_ID" + ","
+ "PR_DOCUMENT_GIVEN_TOPIC" + "),"
+ " ADD KEY DOCUMENT_PR_DOKUMENT_GIVEN_TOPIC_IDX ("
+ "DOCUMENT_ID" + ","
+ "PR_TOPIC_GIVEN_DOCUMENT" + ")");
// @formatter:on
} catch (SQLException e) {
logger.error("Table " + this.tableName + " could not be filled properly.");
throw new RuntimeException(e);
}
/** OHNE_JOOQ-ENDE */
}
@Override
public Set<String> getAfterDependencies() {
return Sets.newHashSet();
}
@Override
public Set<String> getBeforeDependencies() {
return Sets.newHashSet("DocumentTopicCreate", "DocumentTermTopicFill", "DocumentFill", "TopicFill");
}
@Override
public Set<String> getOptionalAfterDependencies() {
return Sets.newHashSet();
}
@Override
public Set<String> getOptionalBeforeDependencies() {
return Sets.newHashSet();
}
} |
package org.eclipse.birt.core.script.functionservice.impl;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.eclipse.birt.core.exception.BirtException;
import org.eclipse.birt.core.script.functionservice.IScriptFunction;
import org.eclipse.birt.core.script.functionservice.IScriptFunctionCategory;
import org.mozilla.javascript.Context;
import org.mozilla.javascript.Scriptable;
public class FunctionProvider
{
private static Logger logger = Logger.getLogger( FunctionProvider.class
.getName( ) );
private static String PROVIDER_CLASS = "org.eclipse.birt.core.internal.function.impl.FunctionProviderImpl"; //$NON-NLS-1$
private static IFunctionProvider instance;
/**
* Set the current function provider impl.
* @param provider
*/
public static void setFunctionProvider( IFunctionProvider provider )
{
if ( FunctionProvider.instance == null )
{
FunctionProvider.instance = provider;
}
else
{
logger.warning( "FunctionProvider should not set twice." ); //$NON-NLS-1$
}
}
protected synchronized static IFunctionProvider getFunctionProvider( )
{
if ( instance == null )
{
try
{
Class<?> clazz = Class.forName( PROVIDER_CLASS );
if ( clazz != null )
{
instance = (IFunctionProvider) clazz.newInstance( );
}
}
catch ( Exception ex )
{
logger.log( Level.WARNING,
"failed to initialize IFunctionProvider instance", ex ); //$NON-NLS-1$
}
}
return instance;
}
/**
* Return all the categories defined by extensions.
*
* @return
* @throws BirtException
*/
public static IScriptFunctionCategory[] getCategories( )
throws BirtException
{
IFunctionProvider provider = getFunctionProvider( );
if ( provider != null )
return provider.getCategories( );
return new IScriptFunctionCategory[]{};
}
/**
* Return the functions that defined in a category.
*
* @param categoryName
* @return
* @throws BirtException
*/
public static IScriptFunction[] getFunctions( String categoryName )
throws BirtException
{
IFunctionProvider provider = getFunctionProvider( );
if ( provider != null )
return provider.getFunctions( categoryName );
return new IScriptFunction[0];
}
/**
* Register script functions to scope.
*
* @param cx
* @param scope
* @throws BirtException
*/
public static void registerScriptFunction( Context cx, Scriptable scope )
throws BirtException
{
IFunctionProvider provider = getFunctionProvider( );
if ( provider != null )
provider.registerScriptFunction( cx, scope );
}
} |
package org.hisp.dhis.android.core.category;
import android.support.test.runner.AndroidJUnit4;
import org.hisp.dhis.android.core.common.UidsHelper;
import org.hisp.dhis.android.core.data.database.MockIntegrationShould;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.List;
import java.util.Map;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
@RunWith(AndroidJUnit4.class)
public class CategoryModuleMockIntegrationShould extends MockIntegrationShould {
@BeforeClass
public static void setUpAll() throws Exception {
downloadMetadata();
}
@Test
public void allow_access_to_combos_without_children() {
List<CategoryCombo> combos = d2.categoryModule().categoryCombos.get();
assertThat(combos.size(), is(2));
for (CategoryCombo combo : combos) {
assertThat(combo.categories() == null, is(true));
assertThat(combo.categoryOptionCombos() == null, is(true));
}
}
@Test
public void allow_access_to_combos_with_category_option_combos() {
List<CategoryCombo> combos = d2.categoryModule().categoryCombos.getWithAllChildren();
assertThat(combos.size(), is(2));
for (CategoryCombo combo : combos) {
assertThat(combo.categoryOptionCombos() == null, is(false));
}
}
@Test
public void allow_access_to_combos_with_categories() {
List<CategoryCombo> combos = d2.categoryModule().categoryCombos.getWithAllChildren();
assertThat(combos.size(), is(2));
for (CategoryCombo combo : combos) {
assertThat(combo.categories() == null, is(false));
}
}
@Test
public void allow_access_to_combo_by_uid_without_children() {
CategoryCombo combo = d2.categoryModule().categoryCombos.uid("m2jTvAj5kkm").get();
assertThat(combo.uid(), is("m2jTvAj5kkm"));
assertThat(combo.code(), is("BIRTHS"));
assertThat(combo.name(), is("Births"));
assertThat(combo.categories() == null, is(true));
assertThat(combo.categoryOptionCombos() == null, is(true));
}
@Test
public void allow_access_to_combo_by_uid_with_category_option_combos() {
CategoryCombo combo = d2.categoryModule().categoryCombos.uid("m2jTvAj5kkm").getWithAllChildren();
assertThat(combo.uid(), is("m2jTvAj5kkm"));
assertThat(combo.code(), is("BIRTHS"));
assertThat(combo.name(), is("Births"));
List<CategoryOptionCombo> optionCombos = combo.categoryOptionCombos();
assertThat(optionCombos == null, is(false));
assertThat(optionCombos.size(), is(1));
assertThat(optionCombos.iterator().next().name(), is("Trained TBA, At PHU"));
}
@Test
public void allow_access_to_combo_by_uid_with_sorted_categories() {
CategoryCombo combo = d2.categoryModule().categoryCombos.uid("m2jTvAj5kkm").getWithAllChildren();
assertThat(combo.uid(), is("m2jTvAj5kkm"));
assertThat(combo.code(), is("BIRTHS"));
assertThat(combo.name(), is("Births"));
List<Category> categories = combo.categories();
assertThat(categories == null, is(false));
assertThat(categories.size(), is(2));
Category category0 = categories.get(0);
assertThat(category0.uid(), is("KfdsGBcoiCa"));
assertThat(category0.code(), is("BIRTHS_ATTENDED"));
Category category1 = categories.get(1);
assertThat(category1.uid(), is("cX5k9anHEHd"));
assertThat(category1.code(), is("GENDER"));
}
@Test
public void allow_access_to_categories_without_children() {
List<Category> categories = d2.categoryModule().categories.get();
assertThat(categories.size(), is(4));
}
@Test
public void allow_access_to_category_by_uid_without_children() {
Category category = d2.categoryModule().categories.uid("vGs6omsRekv").get();
assertThat(category.uid(), is("vGs6omsRekv"));
assertThat(category.name(), is("default"));
assertThat(category.dataDimensionType(), is("DISAGGREGATION"));
}
@Test
public void allow_access_to_categories_with_category_options() {
List<Category> categories = d2.categoryModule().categories.getWithAllChildren();
assertThat(categories.size(), is(4));
for (Category category : categories) {
assertThat(category.categoryOptions() == null, is(false));
}
}
@Test
public void allow_access_to_category_by_uid_with_sorted_category_options() {
Category category = d2.categoryModule().categories.uid("KfdsGBcoiCa").getWithAllChildren();
assertThat(category.uid(), is("KfdsGBcoiCa"));
assertThat(category.name(), is("Births attended by"));
assertThat(category.dataDimensionType(), is("DISAGGREGATION"));
List<CategoryOption> categoryOptions = category.categoryOptions();
assertThat(categoryOptions == null, is(false));
assertThat(categoryOptions.size(), is(3));
CategoryOption categoryOption0 = categoryOptions.get(0);
assertThat(categoryOption0.uid(), is("TNYQzTHdoxL"));
assertThat(categoryOption0.code(), is("MCH_AIDES"));
CategoryOption categoryOption1 = categoryOptions.get(1);
assertThat(categoryOption1.uid(), is("TXGfLxZlInA"));
assertThat(categoryOption1.code(), is("SECHN"));
CategoryOption categoryOption2 = categoryOptions.get(2);
assertThat(categoryOption2.uid(), is("uZUnebiT5DI"));
assertThat(categoryOption2.code(), is("TRAINED_TBA"));
}
@Test
public void allow_access_to_category_option_combos_without_children() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos.get();
assertThat(categoryOptionCombos.size(), is(2));
}
@Test
public void allow_access_to_category_option_combos_with_category_options() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos.getWithAllChildren();
assertThat(categoryOptionCombos.size(), is(2));
for (CategoryOptionCombo categoryOptionCombo : categoryOptionCombos) {
assertThat(categoryOptionCombo.categoryOptions() == null, is(false));
}
}
@Test
public void allow_access_to_category_option_combo_by_uid_without_children() {
CategoryOptionCombo categoryOptionCombo = d2.categoryModule().categoryOptionCombos.uid("Gmbgme7z9BF").get();
assertThat(categoryOptionCombo.uid(), is("Gmbgme7z9BF"));
assertThat(categoryOptionCombo.name(), is("Trained TBA, At PHU"));
}
@Test
public void allow_access_to_category_option_combo_by_uid_with_category_options() {
CategoryOptionCombo categoryOptionCombo = d2.categoryModule().categoryOptionCombos.uid("Gmbgme7z9BF").getWithAllChildren();
assertThat(categoryOptionCombo.uid(), is("Gmbgme7z9BF"));
assertThat(categoryOptionCombo.name(), is("Trained TBA, At PHU"));
List<CategoryOption> categoryOptions = categoryOptionCombo.categoryOptions();
assertThat(categoryOptions == null, is(false));
assertThat(categoryOptions.size(), is(2));
Map<String, CategoryOption> categoryOptionsMap = UidsHelper.mapByUid(categoryOptions);
CategoryOption categoryOption0 = categoryOptionsMap.get("uZUnebiT5DI");
assertThat(categoryOption0.uid(), is("uZUnebiT5DI"));
assertThat(categoryOption0.name(), is("Trained TBA"));
CategoryOption categoryOption1 = categoryOptionsMap.get("Fp4gVHbRvEV");
assertThat(categoryOption1.uid(), is("Fp4gVHbRvEV"));
assertThat(categoryOption1.name(), is("At PHU"));
}
@Test
public void allow_access_to_category_combos_without_children() {
List<CategoryOption> categoryOptions = d2.categoryModule().categoryOptions.get();
assertThat(categoryOptions.size(), is(8));
}
@Test
public void allow_access_to_category_combo_by_uid_without_children() {
CategoryOption categoryOption = d2.categoryModule().categoryOptions.uid("apsOixVZlf1").get();
assertThat(categoryOption.uid(), is("apsOixVZlf1"));
assertThat(categoryOption.name(), is("Female"));
assertThat(categoryOption.code(), is("FMLE"));
}
} |
package org.datasyslab.geospark.formatMapper.shapefileParser.shapes;
import org.apache.commons.io.FilenameUtils;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.CombineFileSplit;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.log4j.Logger;
import org.datasyslab.geospark.formatMapper.shapefileParser.parseUtils.shp.ShapeType;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.IntBuffer;
public class CombineShapeReader
extends RecordReader<ShapeKey, PrimitiveShape>
{
/**
* id of input path of .shp file
*/
private FileSplit shpSplit = null;
/**
* id of input path of .shx file
*/
private FileSplit shxSplit = null;
/**
* id of input path of .dbf file
*/
private FileSplit dbfSplit = null;
/**
* RecordReader for .shp file
*/
private ShapeFileReader shapeFileReader = null;
/**
* RecordReader for .dbf file
*/
private DbfFileReader dbfFileReader = null;
/**
* suffix of attribute file
*/
private final static String DBF_SUFFIX = "dbf";
/**
* suffix of shape record file
*/
private final static String SHP_SUFFIX = "shp";
/**
* suffix of index file
*/
private final static String SHX_SUFFIX = "shx";
/**
* flag of whether .dbf exists
*/
private boolean hasDbf = false;
/**
* flag of whether having next .dbf record
*/
private boolean hasNextDbf = false;
/**
* dubug logger
*/
final static Logger logger = Logger.getLogger(CombineShapeReader.class);
/**
* cut the combined split into FileSplit for .shp, .shx and .dbf
*
* @param split
* @param context
* @throws IOException
* @throws InterruptedException
*/
public void initialize(InputSplit split, TaskAttemptContext context)
throws IOException, InterruptedException
{
CombineFileSplit fileSplit = (CombineFileSplit) split;
Path[] paths = fileSplit.getPaths();
for (int i = 0; i < paths.length; ++i) {
String suffix = FilenameUtils.getExtension(paths[i].toString()).toLowerCase();
if (suffix.equals(SHP_SUFFIX)) { shpSplit = new FileSplit(paths[i], fileSplit.getOffset(i), fileSplit.getLength(i), fileSplit.getLocations()); }
else if (suffix.equals(SHX_SUFFIX)) { shxSplit = new FileSplit(paths[i], fileSplit.getOffset(i), fileSplit.getLength(i), fileSplit.getLocations()); }
else if (suffix.equals(DBF_SUFFIX)) { dbfSplit = new FileSplit(paths[i], fileSplit.getOffset(i), fileSplit.getLength(i), fileSplit.getLocations()); }
}
// if shape file doesn't exists, throw an IOException
if (shpSplit == null) { throw new IOException("Can't find .shp file."); }
else {
if (shxSplit != null) {
// shape file exists, extract .shp with .shx
// first read all indexes into memory
Path filePath = shxSplit.getPath();
FileSystem fileSys = filePath.getFileSystem(context.getConfiguration());
FSDataInputStream shxInpuStream = fileSys.open(filePath);
shxInpuStream.skip(24);
int shxFileLength = shxInpuStream.readInt() * 2 - 100; // get length in bytes, exclude header
// skip following 72 bytes in header
shxInpuStream.skip(72);
byte[] bytes = new byte[shxFileLength];
// read all indexes into memory, skip first 50 bytes(header)
shxInpuStream.readFully(bytes, 0, bytes.length);
IntBuffer buffer = ByteBuffer.wrap(bytes).asIntBuffer();
int[] indexes = new int[shxFileLength / 4];
buffer.get(indexes);
shapeFileReader = new ShapeFileReader(indexes);
}
else {
shapeFileReader = new ShapeFileReader(); // no index, construct with no parameter
}
shapeFileReader.initialize(shpSplit, context);
}
if (dbfSplit != null) {
dbfFileReader = new DbfFileReader();
dbfFileReader.initialize(dbfSplit, context);
hasDbf = true;
}
else { hasDbf = false; }
}
public boolean nextKeyValue()
throws IOException, InterruptedException
{
boolean hasNextShp = shapeFileReader.nextKeyValue();
if (hasDbf) { hasNextDbf = dbfFileReader.nextKeyValue(); }
int curShapeType = shapeFileReader.getCurrentValue().getTypeID();
while (hasNextShp && curShapeType == ShapeType.UNDEFINED.getId()) {
hasNextShp = shapeFileReader.nextKeyValue();
if (hasDbf) { hasNextDbf = dbfFileReader.nextKeyValue(); }
curShapeType = shapeFileReader.getCurrentValue().getTypeID();
}
// check if records match in .shp and .dbf
if (hasDbf) {
if (hasNextShp && !hasNextDbf) {
Exception e = new Exception("shape record loses attributes in .dbf file at ID=" + shapeFileReader.getCurrentKey().getIndex());
e.printStackTrace();
}
else if (!hasNextShp && hasNextDbf) {
Exception e = new Exception("Redundant attributes in .dbf exists");
e.printStackTrace();
}
}
return hasNextShp;
}
public ShapeKey getCurrentKey()
throws IOException, InterruptedException
{
return shapeFileReader.getCurrentKey();
}
public PrimitiveShape getCurrentValue()
throws IOException, InterruptedException
{
PrimitiveShape value = new PrimitiveShape(shapeFileReader.getCurrentValue());
if (hasDbf && hasNextDbf) { value.setAttributes(dbfFileReader.getCurrentValue()); }
return value;
}
public float getProgress()
throws IOException, InterruptedException
{
return shapeFileReader.getProgress();
}
public void close()
throws IOException
{
shapeFileReader.close();
}
} |
package es.tid.cosmos.mobility.itineraries;
import java.io.IOException;
import com.twitter.elephantbird.mapreduce.io.ProtobufWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.Reducer;
import es.tid.cosmos.mobility.data.ItinMovementUtil;
import es.tid.cosmos.mobility.data.MobDataUtil;
import es.tid.cosmos.mobility.data.MobProtocol.ItinTime;
import es.tid.cosmos.mobility.data.MobProtocol.MobData;
/**
*
* @author dmicol
*/
public class ItinMoveClientPoisReducer extends Reducer<LongWritable,
ProtobufWritable<MobData>, LongWritable, ProtobufWritable<MobData>> {
private final static int MAX_MINUTES_IN_MOVES = 360;
private final static int MIN_MINUTES_IN_MOVES = 0;
@Override
protected void reduce(LongWritable key,
Iterable<ProtobufWritable<MobData>> values,
Context context) throws IOException, InterruptedException {
ItinTime prevLoc;
ItinTime curLoc = null;
for (ProtobufWritable<MobData> value : values) {
value.setConverter(MobData.class);
MobData mobData = value.get();
prevLoc = curLoc;
curLoc = mobData.getItinTime();
if (prevLoc == null) {
// We are analyzing the first record, so move on to the next one
continue;
}
if (curLoc.getBts() != prevLoc.getBts()) {
int difMonth = curLoc.getDate().getMonth()
- prevLoc.getDate().getMonth();
if (difMonth > 1) {
continue;
}
int difDay = curLoc.getDate().getDay()
- prevLoc.getDate().getDay();
int difHour = curLoc.getTime().getHour()
- prevLoc.getTime().getHour();
int difMin = curLoc.getTime().getMinute()
- prevLoc.getTime().getMinute();
int nMinsMonth;
switch (prevLoc.getDate().getMonth()) {
case 4: case 6: case 9: case 11:
nMinsMonth = 1440 * 30;
break;
case 2:
nMinsMonth = 1440 * 28;
break;
default:
nMinsMonth = 1440 * 31;
}
int distance = (nMinsMonth * difMonth) + (1440 * difDay)
+ (60 * difHour) + difMin;
// Filter movements by diff of time
if (distance <= MAX_MINUTES_IN_MOVES &&
distance >= MIN_MINUTES_IN_MOVES) {
ProtobufWritable<MobData> move = MobDataUtil.createAndWrap(
ItinMovementUtil.create(prevLoc, curLoc));
context.write(key, move);
}
}
}
}
} |
package org.bimserver.servlets;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.GregorianCalendar;
import org.bimserver.BimServer;
import org.bimserver.endpoints.EndPoint;
import org.bimserver.longaction.LongAction;
import org.bimserver.longaction.LongDownloadOrCheckoutAction;
import org.bimserver.longaction.LongStreamingDownloadAction;
import org.bimserver.models.log.AccessMethod;
import org.bimserver.plugins.serializers.ProgressReporter;
import org.bimserver.plugins.serializers.SerializerException;
import org.bimserver.plugins.serializers.Writer;
import org.bimserver.shared.StreamingSocketInterface;
import org.bimserver.shared.exceptions.ServerException;
import org.bimserver.shared.exceptions.UserException;
import org.bimserver.shared.interfaces.NotificationInterface;
import org.bimserver.shared.interfaces.RemoteServiceInterface;
import org.bimserver.utils.GrowingByteBuffer;
import org.bimserver.webservices.InvalidTokenException;
import org.bimserver.webservices.ServiceMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
public class Streamer implements EndPoint {
private static final int ONE_MB = 1024 * 1024;
private static final Logger LOGGER = LoggerFactory.getLogger(Streamer.class);
private long uoid;
private long endpointid;
private BimServer bimServer;
private NotificationInterface notificationInterface;
private RemoteServiceInterface remoteServiceInterface;
private StreamingSocketInterface streamingSocketInterface;
private String token;
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
public Streamer(StreamingSocketInterface streamingSocketInterface, BimServer bimServer) {
this.streamingSocketInterface = streamingSocketInterface;
this.bimServer = bimServer;
notificationInterface = bimServer.getReflectorFactory().createReflector(NotificationInterface.class, new JsonWebsocketReflector(bimServer.getServicesMap(), streamingSocketInterface));
remoteServiceInterface = bimServer.getReflectorFactory().createReflector(RemoteServiceInterface.class, new JsonWebsocketReflector(bimServer.getServicesMap(), streamingSocketInterface));
}
public void onOpen() {
ObjectNode welcome = OBJECT_MAPPER.createObjectNode();
welcome.put("welcome", new GregorianCalendar().getTimeInMillis());
streamingSocketInterface.send(welcome);
}
public void onText(Reader reader) {
try {
ObjectNode request = OBJECT_MAPPER.readValue(reader, ObjectNode.class);
if (request.has("hb")) {
// Heartbeat, ignore
} else if (request.has("action")) {
if (request.get("action").asText().equals("download")) {
final long topicId = request.get("topicId").asLong();
bimServer.getExecutorService().execute(new Runnable() {
public void run() {
Writer writer = null;
try {
LongAction<?> longAction = bimServer.getLongActionManager().getLongAction(topicId);
if (longAction instanceof LongStreamingDownloadAction) {
LongStreamingDownloadAction longStreamingDownloadAction = (LongStreamingDownloadAction) longAction;
writer = longStreamingDownloadAction.getMessagingStreamingSerializer();
} else {
LongDownloadOrCheckoutAction longDownloadAction = (LongDownloadOrCheckoutAction) longAction;
// NPE happens here sometimes when using the viewer??
if (longDownloadAction == null) {
LOGGER.error("No long download actions for " + topicId);
} else {
writer = longDownloadAction.getMessagingSerializer();
return;
}
}
boolean writeMessage = true;
// TODO pool the buffers
// TODO whenever a large object has been sent,
// the large buffer stays in memory until
// websocket closes...
ReusableLittleEndianDataOutputStream byteArrayOutputStream = new ReusableLittleEndianDataOutputStream();
GrowingByteBuffer growingByteBuffer = byteArrayOutputStream.getGrowingByteBuffer();
ProgressReporter progressReporter = new ProgressReporter() {
@Override
public void update(long progress, long max) {
longAction.updateProgress("test", (int) ((progress * 100) / max));
}
@Override
public void setTitle(String title) {
}
};
int messagesSent = 0;
// streamingSocketInterface.enableBatching();
int bytesInThisBuffer = 0;
// long start = System.nanoTime();
// for (int i=0; i<100; i++) {
// byteArrayOutputStream.reset();
// byteArrayOutputStream.writeLongUnchecked(topicId);
// if (i == 99) {
// growingByteBuffer.put((byte)6);
// growingByteBuffer.put(new byte[10000000]);
// ByteBuffer newBuffer =
// ByteBuffer.allocate(growingByteBuffer.usedSize());
// newBuffer.put(growingByteBuffer.array(), 0,
// growingByteBuffer.usedSize());
// streamingSocketInterface.send(newBuffer.array(),
// 0, newBuffer.capacity());
// bytes += newBuffer.capacity() + 8;
// messagesSent++;
byteArrayOutputStream.writeLongUnchecked(topicId);
byteArrayOutputStream.writeLongUnchecked(0);
do {
writeMessage = writer.writeMessage(byteArrayOutputStream, progressReporter);
messagesSent++;
// TODO we can just keep track of time, and for example always flush when nothing was sent in a second. Need to keep in mind that there could 63 other threads writing...
if (growingByteBuffer.usedSize() >= ONE_MB || !writeMessage) {
ByteBuffer newBuffer = ByteBuffer.wrap(growingByteBuffer.array(), 0, growingByteBuffer.usedSize());
streamingSocketInterface.sendBlocking(newBuffer);
byteArrayOutputStream.reset();
byteArrayOutputStream.writeLongUnchecked(topicId);
byteArrayOutputStream.writeLongUnchecked(0);
}
} while (writeMessage);
ByteBuffer endMessage = ByteBuffer.allocate(16).order(ByteOrder.LITTLE_ENDIAN);
endMessage.putLong(topicId);
endMessage.putLong(1);
endMessage.position(0);
streamingSocketInterface.sendBlocking(endMessage);
// streamingSocketInterface.flush();
// long end = System.nanoTime();
// LOGGER.info(messagesSent + " messages written
// " + Formatters.bytesToString(bytes) + " in "
// + ((end - start) / 1000000) + " ms");
} catch (IOException e) {
LOGGER.error("", e);
// Probably closed/F5-ed browser
} catch (SerializerException e) {
LOGGER.error("", e);
} finally {
try {
if (writer != null) {
writer.close();
}
} catch (IOException e) {
LOGGER.error("", e);
}
}
}
});
}
} else if (request.has("request")) {
bimServer.getExecutorService().execute(new Runnable() {
@Override
public void run() {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(baos);
bimServer.getJsonHandler().execute(request, null, outputStreamWriter);
try {
outputStreamWriter.close();
} catch (IOException e) {
e.printStackTrace();
}
streamingSocketInterface.sendAsText(baos.toByteArray());
}
});
} else if (request.has("token")) {
token = request.get("token").asText();
try {
ServiceMap serviceMap = bimServer.getServiceFactory().get(token, AccessMethod.JSON);
uoid = serviceMap.getBimServerAuthInterface().getLoggedInUser().getOid();
this.endpointid = bimServer.getEndPointManager().register(this);
ObjectNode endpointMessage = OBJECT_MAPPER.createObjectNode();
// Next 4 lines are redundant, but added to comply with
// bimbots interface
endpointMessage.put("type", "endpoint");
ObjectNode payload = OBJECT_MAPPER.createObjectNode();
payload.put("endpointid", endpointid);
endpointMessage.set("payload", payload);
endpointMessage.put("endpointid", endpointid);
streamingSocketInterface.send(endpointMessage);
} catch (InvalidTokenException e) {
ObjectNode enpointMessage = OBJECT_MAPPER.createObjectNode();
enpointMessage.put("error", "Invalid token");
streamingSocketInterface.send(enpointMessage);
} catch (UserException e) {
LOGGER.error("", e);
} catch (ServerException e) {
LOGGER.error("", e);
}
}
} catch (JsonParseException e1) {
LOGGER.error("", e1);
} catch (JsonMappingException e1) {
LOGGER.error("", e1);
} catch (IOException e1) {
LOGGER.error("", e1);
}
}
@Override
public void cleanup() {
bimServer.getEndPointManager().unregister(endpointid);
}
public void onClose() {
LOGGER.debug("onClose, unregistering endpoint " + this.getEndPointId());
bimServer.getEndPointManager().unregister(this);
}
@Override
public long getEndPointId() {
return endpointid;
}
@Override
public NotificationInterface getNotificationInterface() {
return notificationInterface;
}
@Override
public RemoteServiceInterface getRemoteServiceInterface() {
return remoteServiceInterface;
}
@Override
public long getUoid() {
return uoid;
}
@Override
public String toString() {
return "Streamer with endpoint " + endpointid;
}
public String getToken() {
return token;
}
public StreamingSocketInterface getStreamingSocketInterface() {
return streamingSocketInterface;
}
} |
package de.peeeq.wurstscript.translation.lua.translation;
import de.peeeq.datastructures.UnionFind;
import de.peeeq.wurstio.TimeTaker;
import de.peeeq.wurstscript.jassIm.*;
import de.peeeq.wurstscript.luaAst.*;
import de.peeeq.wurstscript.translation.imoptimizer.ImOptimizer;
import de.peeeq.wurstscript.translation.imtranslation.FunctionFlagEnum;
import de.peeeq.wurstscript.translation.imtranslation.GetAForB;
import de.peeeq.wurstscript.translation.imtranslation.ImTranslator;
import de.peeeq.wurstscript.translation.imtranslation.NormalizeNames;
import de.peeeq.wurstscript.types.TypesHelper;
import de.peeeq.wurstscript.utils.Utils;
import org.jetbrains.annotations.NotNull;
import java.util.*;
public class LuaTranslator {
final ImProg prog;
final LuaCompilationUnit luaModel;
private final Set<String> usedNames = new HashSet<>(Arrays.asList(
// reserved function names
"print", "tostring", "error",
// keywords:
"and",
"break",
"do",
"else",
"elseif",
"end",
"false",
"for",
"function",
"if",
"in",
"local",
"nil",
"not",
"or",
"repeat",
"return",
"then",
"true",
"until",
"while"
));
List<ExprTranslation.TupleFunc> tupleEqualsFuncs = new ArrayList<>();
List<ExprTranslation.TupleFunc> tupleCopyFuncs = new ArrayList<>();
GetAForB<ImVar, LuaVariable> luaVar = new GetAForB<ImVar, LuaVariable>() {
@Override
public LuaVariable initFor(ImVar a) {
return LuaAst.LuaVariable(uniqueName(a.getName()), LuaAst.LuaNoExpr());
}
};
GetAForB<ImFunction, LuaFunction> luaFunc = new GetAForB<ImFunction, LuaFunction>() {
@Override
public LuaFunction initFor(ImFunction a) {
return LuaAst.LuaFunction(uniqueName(a.getName()), LuaAst.LuaParams(), LuaAst.LuaStatements());
}
};
public GetAForB<ImMethod, LuaMethod> luaMethod = new GetAForB<ImMethod, LuaMethod>() {
@Override
public LuaMethod initFor(ImMethod a) {
LuaExpr receiver = LuaAst.LuaExprVarAccess(luaClassVar.getFor(a.attrClass()));
return LuaAst.LuaMethod(receiver, a.getName(), LuaAst.LuaParams(), LuaAst.LuaStatements());
}
};
GetAForB<ImClass, LuaVariable> luaClassVar = new GetAForB<ImClass, LuaVariable>() {
@Override
public LuaVariable initFor(ImClass a) {
return LuaAst.LuaVariable(uniqueName(a.getName()), LuaAst.LuaNoExpr());
}
};
GetAForB<ImClass, LuaVariable> luaClassMetaTableVar = new GetAForB<ImClass, LuaVariable>() {
@Override
public LuaVariable initFor(ImClass a) {
return LuaAst.LuaVariable(uniqueName(a.getName() + "_mt"), LuaAst.LuaNoExpr());
}
};
GetAForB<ImClass, LuaMethod> luaClassInitMethod = new GetAForB<ImClass, LuaMethod>() {
@Override
public LuaMethod initFor(ImClass a) {
LuaExprVarAccess receiver = LuaAst.LuaExprVarAccess(luaClassVar.getFor(a));
return LuaAst.LuaMethod(receiver, uniqueName("create"), LuaAst.LuaParams(), LuaAst.LuaStatements());
}
};
LuaFunction arrayInitFunction = LuaAst.LuaFunction(uniqueName("defaultArray"), LuaAst.LuaParams(), LuaAst.LuaStatements());
LuaFunction stringConcatFunction = LuaAst.LuaFunction(uniqueName("stringConcat"), LuaAst.LuaParams(), LuaAst.LuaStatements());
private final ImTranslator imTr;
public LuaTranslator(ImProg prog, ImTranslator imTr) {
this.prog = prog;
this.imTr = imTr;
luaModel = LuaAst.LuaCompilationUnit();
}
protected String uniqueName(String name) {
int i = 0;
String rname = name;
while (usedNames.contains(rname)) {
rname = name + ++i;
}
usedNames.add(rname);
return rname;
}
public LuaCompilationUnit translate() {
prog.flatten(imTr);
normalizeMethodNames();
// NormalizeNames.normalizeNames(prog);
createArrayInitFunction();
createStringConcatFunction();
for (ImVar v : prog.getGlobals()) {
translateGlobal(v);
}
// first add class variables
for (ImClass c : prog.getClasses()) {
LuaVariable classVar = luaClassVar.getFor(c);
luaModel.add(classVar);
}
for (ImClass c : prog.getClasses()) {
translateClass(c);
}
for (ImFunction f : prog.getFunctions()) {
translateFunc(f);
}
for (ImClass c : prog.getClasses()) {
initClassTables(c);
}
cleanStatements();
return luaModel;
}
private void normalizeMethodNames() {
// group related methods
UnionFind<ImMethod> methodUnions = new UnionFind<>();
for (ImClass c : prog.getClasses()) {
for (ImMethod m : c.getMethods()) {
methodUnions.find(m);
for (ImMethod subMethod : m.getSubMethods()) {
methodUnions.union(m, subMethod);
}
}
}
// give all related methods the same name
for (Map.Entry<ImMethod, Set<ImMethod>> entry : methodUnions.groups().entrySet()) {
String name = uniqueName(entry.getKey().getName());
for (ImMethod method : entry.getValue()) {
method.setName(name);
}
}
}
private void createStringConcatFunction() {
String[] code = {
"if x then",
" if y then return x .. y else return x end",
"else",
" return y",
"end"
};
stringConcatFunction.getParams().add(LuaAst.LuaVariable("x", LuaAst.LuaNoExpr()));
stringConcatFunction.getParams().add(LuaAst.LuaVariable("y", LuaAst.LuaNoExpr()));
for (String c : code) {
stringConcatFunction.getBody().add(LuaAst.LuaLiteral(c));
}
luaModel.add(stringConcatFunction);
}
private void createArrayInitFunction() {
/*
function defaultArray(d)
local t = {}
local mt = {__index = function (table, key)
local v = d()
table[key] = v
return v
end}
setmetatable(t, mt)
return t
end
*/
String[] code = {
"local t = {}",
"local mt = {__index = function (table, key)",
" local v = d()",
" table[key] = v",
" return v",
"end}",
"setmetatable(t, mt)",
"return t"
};
arrayInitFunction.getParams().add(LuaAst.LuaVariable("d", LuaAst.LuaNoExpr()));
for (String c : code) {
arrayInitFunction.getBody().add(LuaAst.LuaLiteral(c));
}
luaModel.add(arrayInitFunction);
}
private void cleanStatements() {
luaModel.accept(new LuaModel.DefaultVisitor() {
@Override
public void visit(LuaStatements stmts) {
super.visit(stmts);
cleanStatements(stmts);
}
});
}
private void cleanStatements(LuaStatements stmts) {
ListIterator<LuaStatement> it = stmts.listIterator();
while (it.hasNext()) {
LuaStatement s = it.next();
if (s instanceof LuaExprNull) {
it.remove();
} else if (s instanceof LuaExpr) {
LuaExpr e = (LuaExpr) s;
if (!(e instanceof LuaCallExpr || e instanceof LuaLiteral) || e instanceof LuaExprFunctionCallE) {
e.setParent(null);
LuaVariable exprTemp = LuaAst.LuaVariable("wurstExpr", e);
it.set(exprTemp);
}
}
}
}
private void translateFunc(ImFunction f) {
LuaFunction lf = luaFunc.getFor(f);
if (f.isNative()) {
LuaNatives.get(lf);
} else {
// translate parameters
for (ImVar p : f.getParameters()) {
LuaVariable pv = luaVar.getFor(p);
lf.getParams().add(pv);
}
if (f.hasFlag(FunctionFlagEnum.IS_VARARG)) {
LuaVariable lastParam = luaVar.getFor(Utils.getLast(f.getParameters()));
lastParam.setName("...");
}
// translate local variables
for (ImVar local : f.getLocals()) {
LuaVariable luaLocal = luaVar.getFor(local);
luaLocal.setInitialValue(defaultValue(local.getType()));
lf.getBody().add(luaLocal);
}
// translate body:
translateStatements(lf.getBody(), f.getBody());
}
if (f.isExtern()) {
// only add the function if it is not yet defined:
String name = lf.getName();
luaModel.add(LuaAst.LuaIf(
LuaAst.LuaExprFuncRef(lf),
LuaAst.LuaStatements(),
LuaAst.LuaStatements(
LuaAst.LuaAssignment(LuaAst.LuaLiteral(name), LuaAst.LuaExprFunctionAbstraction(
lf.getParams().copy(),
lf.getBody().copy()
))
)
));
} else {
luaModel.add(lf);
}
}
void translateStatements(List<LuaStatement> res, ImStmts stmts) {
for (ImStmt s : stmts) {
s.translateStmtToLua(res, this);
}
}
public LuaStatements translateStatements(ImStmts stmts) {
LuaStatements r = LuaAst.LuaStatements();
translateStatements(r, stmts);
return r;
}
private void translateClass(ImClass c) {
LuaVariable classVar = luaClassVar.getFor(c);
LuaMethod initMethod = luaClassInitMethod.getFor(c);
luaModel.add(initMethod);
classVar.setInitialValue(emptyTable());
// translate functions
for (ImFunction f : c.getFunctions()) {
translateFunc(f);
luaFunc.getFor(f).setName(uniqueName(c.getName() + "_" + f.getName()));
}
createClassInitFunction(c, classVar, initMethod);
}
private void createClassInitFunction(ImClass c, LuaVariable classVar, LuaMethod initMethod) {
// create init function:
LuaStatements body = initMethod.getBody();
// local new_inst = { ... }
LuaTableFields initialFieldValues = LuaAst.LuaTableFields();
LuaVariable newInst = LuaAst.LuaVariable("new_inst", LuaAst.LuaTableConstructor(initialFieldValues));
for (ImVar field : c.getFields()) {
initialFieldValues.add(
LuaAst.LuaTableNamedField(field.getName(), defaultValue(field.getType()))
);
}
body.add(newInst);
// setmetatable(new_inst, {__index = classVar})
body.add(LuaAst.LuaExprFunctionCallByName("setmetatable", LuaAst.LuaExprlist(
LuaAst.LuaExprVarAccess(newInst),
LuaAst.LuaTableConstructor(LuaAst.LuaTableFields(
LuaAst.LuaTableNamedField("__index", LuaAst.LuaExprVarAccess(classVar))
))
)));
body.add(LuaAst.LuaReturn(LuaAst.LuaExprVarAccess(newInst)));
}
private void initClassTables(ImClass c) {
LuaVariable classVar = luaClassVar.getFor(c);
// create methods:
Set<String> methods = new HashSet<>();
createMethods(c, classVar, methods);
// set supertype metadata:
LuaTableFields superClasses = LuaAst.LuaTableFields();
collectSuperClasses(superClasses, c, new HashSet<>());
luaModel.add(LuaAst.LuaAssignment(LuaAst.LuaExprFieldAccess(
LuaAst.LuaExprVarAccess(classVar),
ExprTranslation.WURST_SUPERTYPES),
LuaAst.LuaTableConstructor(superClasses)
));
// set typeid metadata:
luaModel.add(LuaAst.LuaAssignment(LuaAst.LuaExprFieldAccess(
LuaAst.LuaExprVarAccess(classVar),
ExprTranslation.TYPE_ID),
LuaAst.LuaExprIntVal("" + prog.attrTypeId().get(c))
));
}
private void createMethods(ImClass c, LuaVariable classVar, Set<String> methods) {
for (ImMethod method : c.getMethods()) {
if (methods.contains(method.getName())) {
continue;
}
methods.add(method.getName());
if (method.getIsAbstract()) {
continue;
}
luaModel.add(LuaAst.LuaAssignment(LuaAst.LuaExprFieldAccess(
LuaAst.LuaExprVarAccess(classVar),
method.getName()),
LuaAst.LuaExprFuncRef(luaFunc.getFor(method.getImplementation()))
));
}
// also create links for inherited methods
for (ImClassType sc : c.getSuperClasses()) {
createMethods(sc.getClassDef(), classVar, methods);
}
}
@NotNull
private LuaTableConstructor emptyTable() {
return LuaAst.LuaTableConstructor(LuaAst.LuaTableFields());
}
private void collectSuperClasses(LuaTableFields superClasses, ImClass c, Set<ImClass> visited) {
if (visited.contains(c)) {
return;
}
superClasses.add(LuaAst.LuaTableExprField(LuaAst.LuaExprVarAccess(luaClassVar.getFor(c)), LuaAst.LuaExprBoolVal(true)));
visited.add(c);
for (ImClassType sc : c.getSuperClasses()) {
collectSuperClasses(superClasses, sc.getClassDef(), visited);
}
}
private void translateGlobal(ImVar v) {
LuaVariable lv = luaVar.getFor(v);
lv.setInitialValue(defaultValue(v.getType()));
luaModel.add(lv);
}
private LuaExpr defaultValue(ImType type) {
return type.match(new ImType.Matcher<LuaExpr>() {
@Override
public LuaExpr case_ImTupleType(ImTupleType tt) {
LuaTableFields tableFields = LuaAst.LuaTableFields();
for (int i = 0; i < tt.getNames().size(); i++) {
tableFields.add(LuaAst.LuaTableSingleField(defaultValue(tt.getTypes().get(i))));
}
return LuaAst.LuaTableConstructor(
tableFields
);
}
@Override
public LuaExpr case_ImVoid(ImVoid imVoid) {
return LuaAst.LuaExprNull();
}
@Override
public LuaExpr case_ImClassType(ImClassType imClassType) {
return LuaAst.LuaExprNull();
}
@Override
public LuaExpr case_ImArrayTypeMulti(ImArrayTypeMulti at) {
ImType baseType;
if (at.getArraySize().size() <= 1) {
baseType = at.getEntryType();
} else {
List<Integer> arraySizes = new ArrayList<>(at.getArraySize());
arraySizes.remove(0);
baseType = JassIm.ImArrayTypeMulti(at.getEntryType(), arraySizes);
}
return LuaAst.LuaExprFunctionCall(arrayInitFunction,
LuaAst.LuaExprlist(
LuaAst.LuaExprFunctionAbstraction(LuaAst.LuaParams(),
LuaAst.LuaStatements(
LuaAst.LuaReturn(defaultValue(baseType))
)
)
));
}
@Override
public LuaExpr case_ImSimpleType(ImSimpleType st) {
if (TypesHelper.isIntType(st)) {
return LuaAst.LuaExprIntVal("0");
} else if (TypesHelper.isBoolType(st)) {
return LuaAst.LuaExprBoolVal(false);
} else if (TypesHelper.isRealType(st)) {
return LuaAst.LuaExprRealVal("0.");
}
return LuaAst.LuaExprNull();
}
@Override
public LuaExpr case_ImArrayType(ImArrayType imArrayType) {
return emptyTable();
}
@Override
public LuaExpr case_ImTypeVarRef(ImTypeVarRef imTypeVarRef) {
return LuaAst.LuaExprNull();
}
});
}
public LuaExprOpt translateOptional(ImExprOpt e) {
if (e instanceof ImExpr) {
ImExpr imExpr = (ImExpr) e;
return imExpr.translateToLua(this);
}
return LuaAst.LuaNoExpr();
}
public LuaExprlist translateExprList(ImExprs exprs) {
LuaExprlist r = LuaAst.LuaExprlist();
for (ImExpr e : exprs) {
r.add(e.translateToLua(this));
}
return r;
}
public int getTypeId(ImClass classDef) {
return prog.attrTypeId().get(classDef);
}
} |
package com.parc.ccn.security.access;
import java.security.InvalidKeyException;
import java.security.Key;
import java.sql.Timestamp;
import javax.crypto.spec.SecretKeySpec;
import javax.xml.stream.XMLStreamException;
import com.parc.ccn.Library;
import com.parc.ccn.data.ContentName;
import com.parc.ccn.library.profiles.AccessControlProfile;
import com.parc.ccn.library.profiles.VersionMissingException;
import com.parc.ccn.library.profiles.VersioningProfile;
import com.parc.ccn.security.crypto.CCNDigestHelper;
import com.parc.ccn.security.crypto.KeyDerivationFunction;
public class NodeKey {
/**
* Default data key length in bytes. No real reason this can't be bumped up to 32. It
* acts as the seed for a KDF, not an encryption key.
*/
public static final int DEFAULT_NODE_KEY_LENGTH = 16;
/**
* The keys we're wrapping are really seeds for a KDF, not keys in their own right.
* Eventually we'll use CMAC, so call them AES...
*/
public static final String DEFAULT_NODE_KEY_ALGORITHM = "AES";
/**
* The node this key is associated with, with _access_ information stripped.
*/
private ContentName _nodeName;
/**
* The full name of the stored node key that is either this key itself,
* or the ancestor node key this is derived from, including its version information.
*/
private ContentName _storedNodeKeyName;
private byte [] _storedNodeKeyID;
/**
* The unwrapped node key
*/
private Key _nodeKey;
public NodeKey(ContentName nodeKeyName, byte [] unwrappedNodeKey) {
this(nodeKeyName, new SecretKeySpec(unwrappedNodeKey, DEFAULT_NODE_KEY_ALGORITHM));
}
public NodeKey(ContentName nodeKeyName, Key unwrappedNodeKey) {
if ((null == nodeKeyName) || (null == unwrappedNodeKey)) {
throw new IllegalArgumentException("NodeKey: key name and key cannot be null!");
}
if (!VersioningProfile.isVersioned(nodeKeyName)) {
throw new IllegalArgumentException("Expect stored node key name to be versioned: " + nodeKeyName);
}
_storedNodeKeyName = nodeKeyName;
_storedNodeKeyID = generateKeyID(unwrappedNodeKey.getEncoded());
_nodeKey = unwrappedNodeKey;
_nodeName = AccessControlProfile.accessRoot(nodeKeyName);
if ((null == _nodeName) || (!AccessControlProfile.isNodeKeyName(nodeKeyName))) {
throw new IllegalArgumentException("NodeKey: key name " + nodeKeyName + " is not a valid node key name.");
}
}
protected NodeKey(ContentName nodeName, byte [] derivedNodeKey,
ContentName ancestorNodeKeyName, byte [] ancestorNodeKeyID) {
if (!VersioningProfile.isVersioned(ancestorNodeKeyName)) {
throw new IllegalArgumentException("Expect stored node key name to be versioned: " + ancestorNodeKeyName);
}
_storedNodeKeyName = ancestorNodeKeyName;
_storedNodeKeyID = ancestorNodeKeyID;
_nodeName = nodeName;
_nodeKey = new SecretKeySpec(derivedNodeKey, DEFAULT_NODE_KEY_ALGORITHM);
}
public NodeKey computeDescendantNodeKey(ContentName descendantNodeName, String keyLabel) throws InvalidKeyException, XMLStreamException {
if (!nodeName().isPrefixOf(descendantNodeName)) {
throw new IllegalArgumentException("Node " + descendantNodeName + " is not a child of this node " + nodeName());
}
byte [] derivedKey = KeyDerivationFunction.DeriveKeyForNode(nodeName(), nodeKey().getEncoded(), keyLabel, descendantNodeName);
return new NodeKey(descendantNodeName, derivedKey, storedNodeKeyName(), storedNodeKeyID());
}
public ContentName nodeName() { return _nodeName; }
public ContentName storedNodeKeyName() { return _storedNodeKeyName; }
public byte [] storedNodeKeyID() { return _storedNodeKeyID; }
public Key nodeKey() { return _nodeKey; }
public boolean isDerivedNodeKey() {
return (!nodeName().isPrefixOf(storedNodeKeyName()));
}
public Timestamp nodeKeyVersion() {
try {
return VersioningProfile.getVersionAsTimestamp(storedNodeKeyName());
} catch (VersionMissingException e) {
Library.logger().warning("Unexpected: name that was confirmed to have a version on construction throws a VersionMissingException: " + storedNodeKeyName());
throw new IllegalStateException("Unexpected: name that was confirmed to have a version on construction throws a VersionMissingException: " + storedNodeKeyName());
}
}
public byte [] generateKeyID() {
return generateKeyID(nodeKey().getEncoded());
}
public static byte [] generateKeyID(byte [] key) {
return CCNDigestHelper.digest(key);
}
} |
package hu.elte.txtuml.export.cpp.wizardz;
import java.util.Arrays;
import java.util.List;
public enum DemoExpectedLines {
MACHINE(Arrays.asList("Machine and users are starting.",
"Machine::Initialize",
"Machine: initializing...",
"Machine enters state: 'off'",
"User::Initialize",
"User: initializing...",
"User::Initialize",
"User: initializing...",
"User::Working",
"User: working...",
"User: starting to work...",
"User: work finished...",
"Machine exits state: 'off'",
"Machine::SwitchOn",
"Machine: switching on...",
"Machine enters state: 'on'",
"Machine exits state: 'on'",
"Machine::SwitchOff",
"Machine: switching off...",
"Machine enters state: 'off'",
"Machine exits state: 'off'",
"Machine::SwitchOn",
"Machine: switching on...",
"Machine enters state: 'on'")),
MONITORING(Arrays.asList("ResourceMonitor::TInit",
"ResourceMonitor::TOpenRead",
"ResourceMonitor::TOpenWrite",
"ResourceMonitor::TReadAgain",
"ResourceMonitor::TWriteAgain",
"ResourceMonitor::TCloseRead",
"ResourceMonitor::TCloseWrite",
"ResourceMonitor::TErrRead",
"ResourceMonitor::TErrWrite",
"Aggregator::TInit",
"Aggregator::TPrintReport",
"Aggregator::TWriteError",
"Aggregator::TReadError",
"Alert::TInitialize",
"Alert::TIncreaseLevel",
"Alert::TStartAlert",
"Alert::TStopAlert",
"!!! Critical number of errors detected !!!",
"+++ Back to normal operation +++")),
PRODUCER_CONSUMER(Arrays.asList("Consumer::Initialize",
"Consumer::DoRequest",
"Consumer::Stop",
"Producer::Initialize",
"Producer::DoOffer",
"Producer::Stop",
"Storage::Initialize",
"Storage::CanAccept",
"Storage::CannotAccept",
"Storage::CanServe",
"Storage::CannotServe")),
TRAIN(Arrays.asList("Engine::Init_Stopped",
"Engine::Stopped_Working",
"Engine::Working_Stopped",
"Gearbox::Init_Neutral",
"Gearbox::Neutral_Forwards",
"Gearbox::Neutral_Backwards",
"Gearbox::Forwards_Neutral",
"Gearbox::Backwards_Neutral",
"Lamp::Init_Dark",
"Lamp::Dark_Light",
"Lamp::Light_Dark",
"Lamp::Light_Dark2",
"Backwards_subSM::BInit_B1",
"Backwards_subSM::B1_B2",
"Backwards_subSM::B2_B1",
"Forwards_subSM::FInit_F1",
"Forwards_subSM::F1_F2",
"Forwards_subSM::F2_F1"));
private List<String> expectedLines;
DemoExpectedLines(List<String> expectedLines){
this.expectedLines = expectedLines;
}
public List<String> getLines(){
return expectedLines;
}
} |
package io.branch.referral;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.Animation;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.TranslateAnimation;
/**
* <p>Class for creating a Dialog which open and closes with an animation to the content view </p>
*/
public class AnimatedDialog extends Dialog {
private boolean isClosing_ = false;
public AnimatedDialog(Context context) {
super(context);
init(context);
}
public AnimatedDialog(Context context, int theme) {
super(context, theme);
init(context);
}
public AnimatedDialog(Context context, boolean cancelable, OnCancelListener cancelListener) {
super(context, cancelable, cancelListener);
init(context);
}
/**
* <p> Opens the dialog with an animation to the content View.</p>
*/
@Override
public void show() {
slideOpen();
}
/**
* <p> Cancels the dialog with an animation to the content View.</p>
*/
@Override
public void cancel() {
slideClose();
}
@Override
public void setContentView(int layoutResID) {
setDialogWindowAttributes();
super.setContentView(layoutResID);
}
private void init(Context context) {
setDialogWindowAttributes();
// Listen for the backpress in order to dismiss the dialog with animation
setOnKeyListener(new OnKeyListener() {
@Override
public boolean onKey(DialogInterface arg0, int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
slideClose();
}
return true;
}
});
}
/**
* Set the window attributes for the invite dialog.
*/
public void setDialogWindowAttributes() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT));
getWindow().addFlags(WindowManager.LayoutParams.FLAG_DIM_BEHIND);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
WindowManager.LayoutParams lp = new WindowManager.LayoutParams();
lp.copyFrom(getWindow().getAttributes());
lp.width = WindowManager.LayoutParams.MATCH_PARENT;
lp.height = WindowManager.LayoutParams.MATCH_PARENT;
lp.gravity = Gravity.BOTTOM;
lp.dimAmount = 0.8f;
getWindow().setAttributes(lp);
getWindow().setWindowAnimations(android.R.anim.slide_in_left);
setCanceledOnTouchOutside(true);
}
/**
* </p> Opens the dialog with a translation animation to the content view </p>
*/
private void slideOpen() {
TranslateAnimation slideUp = new TranslateAnimation(Animation.RELATIVE_TO_SELF, 0, Animation.RELATIVE_TO_SELF, 0, Animation.RELATIVE_TO_SELF, 1.0f, Animation.RELATIVE_TO_SELF, 0f);
slideUp.setDuration(500);
slideUp.setInterpolator(new AccelerateInterpolator());
((ViewGroup) getWindow().getDecorView()).getChildAt(0).startAnimation(slideUp);
super.show();
}
/**
* </p> Closes the dialog with a translation animation to the content view </p>
*/
private void slideClose() {
if (!isClosing_) {
isClosing_ = true;
TranslateAnimation slideDown = new TranslateAnimation(Animation.RELATIVE_TO_SELF, 0, Animation.RELATIVE_TO_SELF, 0, Animation.RELATIVE_TO_SELF, 0.0f, Animation.RELATIVE_TO_SELF, 1f);
slideDown.setDuration(500);
slideDown.setInterpolator(new DecelerateInterpolator());
((ViewGroup) getWindow().getDecorView()).getChildAt(0).startAnimation(slideDown);
slideDown.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationStart(Animation animation) {
}
@Override
public void onAnimationEnd(Animation animation) {
dismiss();
}
@Override
public void onAnimationRepeat(Animation animation) {
}
});
}
}
} |
package net.automatalib.words;
import java.io.IOException;
import java.util.AbstractList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import net.automatalib.commons.util.array.ArrayWritable;
import net.automatalib.commons.util.strings.AbstractPrintable;
import net.automatalib.commons.util.strings.StringUtil;
/**
* A word is an ordered sequence of symbols. {@link Word}s are generally immutable,
* i.e., a single {@link Word} object will never change (unless symbol objects are modified,
* which is however highly discouraged).
* <p>
* This class provides the following static methods for creating words in the most common scenarios:
* <ul>
* <li> {@link #epsilon()} returns the empty word of length 0
* <li> {@link #fromLetter(Object)} turns a single letter into a word of length 1
* <li> {@link #fromSymbols(Object...)} creates a word from an array of symbols
* <li> {@link #fromArray(Object[], int, int)} creates a word from a subrange of a symbols array
* <li> {@link #fromList(List)} creates a word from a {@link List} of symbols
* </ul>
* <p>
* Modification operations like {@link #append(Object)} or {@link #concat(Word...)} create
* new objects, subsequently invoking these operations on the respective objects returned is
* therefore highly inefficient. If words need to be dynamically created, a {@link WordBuilder}
* should be used.
* <p>
* This is an abstract base class for word representations. Implementing classes only need to implement
* <ul>
* <li> {@link #getSymbol(int)}
* <li> {@link #length()}
* </ul>
* <p>
* However, for the sake of efficiency it is highly encouraged to overwrite the other methods
* as well, providing specialized realizations.
*
* @param <I> symbol class
*
* @author Malte Isberner <malte.isberner@gmail.com>
*/
public abstract class Word<I> extends AbstractPrintable implements ArrayWritable<I>, Iterable<I> {
/**
* Retrieves the empty word.
* @return the empty word.
* @see Collections#emptyList()
*/
@SuppressWarnings("unchecked")
public static <I> Word<I> epsilon() {
return (Word<I>)(Word<?>)EmptyWord.INSTANCE;
}
/**
* Constructs a word from a single letter.
* @param letter the letter
* @return a word consisting of only this letter
*/
public static <I> Word<I> fromLetter(I letter) {
return new LetterWord<>(letter);
}
/**
* Creates a word from an array of symbols.
* @param symbols the symbol array
* @return a word containing the symbols in the specified array
*/
@SafeVarargs
public static <I> Word<I> fromSymbols(I ...symbols) {
if(symbols.length == 0)
return epsilon();
if(symbols.length == 1)
return fromLetter(symbols[0]);
Object[] array = new Object[symbols.length];
System.arraycopy(symbols, 0, array, 0, symbols.length);
return new SharedWord<>(symbols);
}
/**
* Creates a word from a subrange of an array of symbols. Note that to
* ensure immutability, internally a copy of the array is made.
* @param symbols the symbols array
* @param offset the starting index in the array
* @param length the length of the resulting word (from the starting index on)
* @return the word consisting of the symbols in the range
*/
public static <I> Word<I> fromArray(I[] symbols, int offset, int length) {
if(length == 0)
return epsilon();
if(length == 1)
return fromLetter(symbols[offset]);
Object[] array = new Object[length];
System.arraycopy(symbols, offset, array, 0, length);
return new SharedWord<>(symbols);
}
/**
* Creates a word from a list of symbols
* @param symbolList the list of symbols
* @return the resulting word
*/
public static <I> Word<I> fromList(List<? extends I> symbolList) {
int siz = symbolList.size();
if(siz == 0)
return epsilon();
if(siz == 1)
return Word.<I>fromLetter(symbolList.get(0));
return new SharedWord<>(symbolList);
}
public static Word<Character> fromString(String str) {
int len = str.length();
Character[] chars = new Character[str.length()];
for(int i = 0; i < len; i++)
chars[i] = Character.valueOf(str.charAt(i));
return new SharedWord<>(chars);
}
/*
* General word iterator
*/
private class Iterator implements java.util.Iterator<I> {
private int index = 0;
/*
* (non-Javadoc)
* @see java.util.Iterator#hasNext()
*/
@Override
public boolean hasNext() {
return (index < Word.this.length());
}
/*
* (non-Javadoc)
* @see java.util.Iterator#next()
*/
@Override
public I next() {
return Word.this.getSymbol(index++);
}
/*
* (non-Javadoc)
* @see java.util.Iterator#remove()
*/
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
/*
* Representing a word as a list.
*/
private class AsList extends AbstractList<I> {
/*
* (non-Javadoc)
* @see java.util.AbstractList#get(int)
*/
@Override
public I get(int index) {
return Word.this.getSymbol(index);
}
/*
* (non-Javadoc)
* @see java.util.AbstractCollection#size()
*/
@Override
public int size() {
return Word.this.length();
}
/*
* (non-Javadoc)
* @see java.util.AbstractList#iterator()
*/
@Override
public java.util.Iterator<I> iterator() {
return Word.this.iterator();
}
}
/**
* Return symbol that is at the specified position
* @param i the position
* @return symbol at position i, <tt>null</tt> if no such symbol exists
*/
public abstract I getSymbol(int index);
/**
* Retrieves the length of this word.
* @return the length of this word.
*/
public abstract int length();
/*
* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
int hash = 5;
for(I sym : this) {
hash *= 89;
hash += (sym != null) ? sym.hashCode() : 0;
}
return hash;
}
/*
* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object other) {
if(this == other)
return true;
if(other == null)
return false;
if(!(other instanceof Word))
return false;
Word<?> otherWord = (Word<?>)other;
int len = otherWord.length();
if(len != length())
return false;
java.util.Iterator<I> thisIt = iterator();
java.util.Iterator<?> otherIt = otherWord.iterator();
while(thisIt.hasNext()) {
I thisSym = thisIt.next();
Object otherSym = otherIt.next();
if(!Objects.equals(thisSym, otherSym))
return false;
}
return true;
}
/*
* (non-Javadoc)
* @see net.automatalib.commons.util.strings.Printable#print(java.lang.Appendable)
*/
@Override
public void print(Appendable a) throws IOException {
if(isEmpty()) {
a.append('ε');
}
else {
StringUtil.appendIterable(a, this, " ");
}
}
/*
* (non-Javadoc)
* @see net.automatalib.commons.util.array.ArrayWritable#size()
*/
@Override
public final int size() {
return length();
}
/**
* Retrieves a word representing the specified subrange of this word.
* As words are immutable, this function usually can be realized quite efficient
* (implementing classes should take care of this).
*
* @param fromIndex the first index, inclusive.
* @param toIndex the last index, exclusive.
* @return the word representing the specified subrange.
*/
public final Word<I> subWord(int fromIndex, int toIndex) {
if(fromIndex < 0 || toIndex < fromIndex || toIndex > length())
throw new IndexOutOfBoundsException("Invalid subword range [" + fromIndex + ", " + toIndex + ")");
return _subWord(fromIndex, toIndex);
}
/**
* Retrieves the subword of this word starting at the given index and extending
* until the end of this word. Calling this method is equivalent to calling
* <pre>w.subWord(fromIndex, w.length())</pre>
* @param fromIndex the first index, inclusive
* @return the word representing the specified subrange
*/
public final Word<I> subWord(int fromIndex) {
if(fromIndex <= 0) {
if(fromIndex == 0)
return this;
throw new IndexOutOfBoundsException("Invalid subword range [" + fromIndex + ",)");
}
return _subWord(fromIndex, length());
}
/**
* Internal subword operation implementation. In contrast to {@link #subWord(int, int)},
* no range checks need to be performed. As this method is flagged as <tt>protected</tt>,
* implementations may rely on the specified indices being valid.
* @param fromIndex the first index, inclusive (guaranteed to be valid)
* @param toIndex the last index, exclusive (guaranteed to be valid)
* @return the word representing the specified subrange
*/
protected Word<I> _subWord(int fromIndex, int toIndex) {
int len = toIndex - fromIndex;
Object[] array = new Object[len];
writeToArray(fromIndex, array, 0, len);
return new SharedWord<>(array);
}
/*
* (non-Javadoc)
* @see java.lang.Iterable#iterator()
*/
@Override
public java.util.Iterator<I> iterator() {
return new Iterator();
}
/*
* (non-Javadoc)
* @see net.automatalib.commons.util.array.ArrayWritable#writeToArray(int, java.lang.Object[], int, int)
*/
@Override
public void writeToArray(int offset, Object[] array, int tgtOffset, int length) {
int idx = offset, arrayIdx = tgtOffset;
while(length
array[arrayIdx++] = getSymbol(idx++);
}
/**
* Retrieves a {@link List} view on the contents of this word.
* @return an unmodifiable list of the contained symbols.
*/
public List<I> asList() {
return new AsList();
}
/**
* Retrieves a prefix of the given length. If <code>length</code>
* is negative, then a prefix consisting of all but the last
* <code>-length</code> symbols is returned.
*
* @param prefixLen the length of the prefix (may be negative, see above).
* @return the prefix of the given length.
*/
public final Word<I> prefix(int prefixLen) {
if(prefixLen < 0)
prefixLen = length() + prefixLen;
return subWord(0, prefixLen);
}
/**
* Retrieves a suffix of the given length. If <code>length</code> is
* negative, then a suffix consisting of all but the first
* <code>-length</code> symbols is returned.
*
* @param suffixLen the length of the suffix (may be negative, see above).
* @return the suffix of the given length.
*/
public final Word<I> suffix(int suffixLen) {
int wordLen = length();
int startIdx = (suffixLen < 0) ? -suffixLen : (wordLen - suffixLen);
return subWord(startIdx, wordLen);
}
/**
* Retrieves the list of all prefixes of this word. In the default implementation,
* the prefixes are lazily instantiated upon the respective calls of {@link List#get(int)}
* or {@link Iterator#next()}.
* @param longestFirst whether to start with the longest prefix (otherwise, the first prefix
* in the list will be the shortest).
* @return a (non-materialized) list containing all prefixes
*/
public List<Word<I>> prefixes(boolean longestFirst) {
return new SubwordList<>(this, true, longestFirst);
}
/**
* Retrieves the list of all suffixes of this word. In the default implementation,
* the suffixes are lazily instantiated upon the respective calls of {@link List#get(int)}
* or {@link Iterator#next()}.
* @param longestFirst whether to start with the longest suffix (otherwise, the first suffix
* in the list will be the shortest).
* @return a (non-materialized) list containing all suffix
*/
public List<Word<I>> suffixes(boolean longestFirst) {
return new SubwordList<>(this, false, longestFirst);
}
/**
* Retrieves the next word after this in canonical order. Figuratively
* speaking, if there are <tt>k</tt> alphabet symbols, one can think of
* a word of length <tt>n</tt> as an <tt>n</tt>-digit radix-<tt>k</tt>
* representation of the number. The next word in canonical order
* is the representation for the number represented by this word plus one.
* @param sigma the alphabet
* @return the next word in canonical order
*/
public Word<I> canonicalNext(Alphabet<I> sigma) {
int len = length();
Object[] symbols = new Object[len];
writeToArray(0, symbols, 0, len);
int alphabetSize = sigma.size();
int i = 0;
boolean overflow = true;
for(I sym : this) {
int nextIdx = (sigma.getSymbolIndex(sym) + 1) % alphabetSize;
symbols[i++] = sigma.getSymbol(nextIdx);
if(nextIdx != 0) {
overflow = false;
break;
}
}
while(i < len) {
symbols[i] = getSymbol(i);
i++;
}
if(overflow) {
Object[] newSymbols = new Object[len+1];
newSymbols[0] = sigma.getSymbol(0);
System.arraycopy(symbols, 0, newSymbols, 1, len);
symbols = newSymbols;
}
return new SharedWord<>(symbols);
}
/**
* Retrieves the last symbol of this word.
* @return the last symbol of this word.
*/
public I lastSymbol() {
return getSymbol(length() - 1);
}
/**
* Retrieves the first symbol of this word.
* @return the first symbol of this word
*/
public I firstSymbol() {
return getSymbol(0);
}
/**
* Appends a symbol to this word and returns the result as a new word.
* @param symbol the symbol to append
* @return the word plus the given symbol
*/
public Word<I> append(I symbol) {
int len = length();
Object[] array = new Object[len + 1];
writeToArray(0, array, 0, len);
array[len] = symbol;
return new SharedWord<>(array);
}
/**
* Prepends a symbol to this word and returns the result as a new word.
* @param symbol the symbol to prepend
* @return the given symbol plus to word.
*/
public Word<I> prepend(I symbol) {
int len = length();
Object[] array = new Object[len+1];
array[0] = symbol;
writeToArray(0, array, 1, len);
return new SharedWord<>(array);
}
/**
* Concatenates this word with several other words and returns the result
* as a new word.
*
* Note that this method cannot be overridden. Implementing classes need to
* override the {@link #_concat(Word...)} method instead.
*
* @param words the words to concatenate with this word
* @return the result of the concatenation
* @see #_concat(Word...)
*/
@SafeVarargs
public final Word<I> concat(Word<I> ...words) {
return _concat(words);
}
/**
* Realizes the concatenation of this word with several other words.
* @param words the words to concatenate
* @return the results of the concatenation
*/
@SuppressWarnings("unchecked")
protected Word<I> _concat(Word<I> ...words) {
if(words.length == 0)
return this;
int len = length();
int totalSize = len;
for(int i = 0; i < words.length; i++)
totalSize += words[i].length();
Object[] array = new Object[totalSize];
writeToArray(0, array, 0, len);
int currOfs = len;
for(int i = 0; i < words.length; i++) {
Word<I> w = words[i];
int wLen = w.length();
w.writeToArray(0, array, currOfs, wLen);
currOfs += wLen;
}
return new SharedWord<>(array);
}
/**
* Checks if this word is a prefix of another word.
* @param other the other word
* @return <tt>true</tt> if this word is a prefix of the other word, <tt>false</tt>
* otherwise.
*/
public boolean isPrefixOf(Word<I> other) {
int len = length(), otherLen = other.length();
if(otherLen < len)
return false;
for(int i = 0; i < len; i++) {
I sym1 = getSymbol(i), sym2 = other.getSymbol(i);
if(!Objects.equals(sym1, sym2))
return false;
}
return true;
}
/**
* Determines the longest common prefix of this word and another
* word.
* @param other the other word
* @return the longest common prefix of this word and the other word
*/
public Word<I> longestCommonPrefix(Word<I> other) {
int len = length(), otherLen = other.length();
int maxIdx = (len < otherLen) ? len : otherLen;
int i = 0;
while(i < maxIdx) {
I sym1 = getSymbol(i), sym2 = getSymbol(i);
if(!Objects.equals(sym1, sym2))
break;
i++;
}
return prefix(i);
}
/**
* Checks if this word is a suffix of another word
* @param other the other word
* @return <tt>true</tt> if this word is a suffix of the other word, <tt>false</tt>
* otherwise.
*/
public boolean isSuffixOf(Word<I> other) {
int len = length(), otherLen = other.length();
if(otherLen < len)
return false;
int ofs = otherLen - len;
for(int i = 0; i < len; i++) {
I sym1 = getSymbol(i), sym2 = other.getSymbol(ofs + i);
if(!Objects.equals(sym1, sym2))
return false;
}
return true;
}
/**
* Determines the longest common suffix of this word and another word.
* @param other the other word
* @return the longest common suffix
*/
public Word<I> longestCommonSuffix(Word<I> other) {
int len = length(), otherLen = other.length();
int minLen = (len < otherLen) ? len : otherLen;
int idx1 = len, idx2 = otherLen;
int i = 0;
while(i < minLen) {
I sym1 = getSymbol(--idx1), sym2 = other.getSymbol(--idx2);
if(!Objects.equals(sym1, sym2))
break;
i++;
}
return suffix(i);
}
/**
* Retrieves a "flattened" version of this word, i.e., without any hierarchical structure
* attached.
* This can be helpful if {@link Word} is subclassed to allow representing, e.g., a concatenation
* dynamically, but due to performance concerns not too many levels of indirection
* should be introduced.
* @return a flattened version of this word.
*/
public Word<I> flatten() {
int len = length();
Object[] array = new Object[len];
writeToArray(0, array, 0, len);
return new SharedWord<>(array);
}
public Word<I> trimmed() {
int len = length();
Object[] array = new Object[len];
writeToArray(0, array, 0, len);
return new SharedWord<>(array);
}
/**
* Checks if this word is empty, i.e., contains no symbols.
* @return <tt>true</tt> if this word is empty, <tt>false</tt> otherwise.
*/
public boolean isEmpty() {
return (length() == 0);
}
} |
package edu.mines.alterego;
import android.content.Context;
import android.content.ContentValues;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.database.Cursor;
import android.util.Log;
import android.util.Pair;
import android.widget.ListView;
import java.util.Date;
import java.util.ArrayList;
public class CharacterDBHelper extends SQLiteOpenHelper {
private static final String DB_NAME = "alterego";
private static final int DB_VERSION = 1;
public CharacterDBHelper(Context context) {
super(context, DB_NAME, null, DB_VERSION);
}
/**
* For an SQLiteOpenHelper, the onCreate method is called if and only if
* the database-name in question does not already exist. Theoretically,
* this should only happen once ever, and after the one time, updates
* will be applied for schema updates.
*/
@Override
public void onCreate(SQLiteDatabase database) {
/*
* Game table: Base unifying game_id construct
* The game is used to reference
*/
database.execSQL("CREATE TABLE IF NOT EXISTS game ( " +
"game_id INTEGER PRIMARY KEY AUTOINCREMENT," +
"name TEXT" +
")");
database.execSQL("CREATE TABLE IF NOT EXISTS character ( character_id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT, description TEXT, FOREIGN KEY(game_id) REFERENCES game(game_id) )");
database.execSQL("CREATE TABLE IF NOT EXISTS inventory_item ( "+
"inventory_item_id INTEGER PRIMARY KEY AUTOINCREMENT," +
"character_id INTEGER," +
"FOREIGN KEY(character_id) REFERENCES character(character_id)" +
")");
database.execSQL("CREATE TABLE IF NOT EXISTS character_stat ( " +
"character_stat_id INTEGER PRIMARY KEY AUTOINCREMENT," +
"character_id INTEGER," +
"stat_value INTEGER," +
"stat_name INTEGER," +
"description/usage/etc INTEGER," +
"category_id INTEGER," +
"FOREIGN KEY(character_id) REFERENCES character(character_id)" +
"FOREIGN KEY(category_id) REFERENCES category(category_id)" +
")");
database.execSQL("CREATE TABLE IF NOT EXISTS item_stat ( " +
"item_stat_id INTEGER PRIMARY KEY AUTOINCREMENT," +
"inventory_item_id INTEGER," +
"stat_value INTEGER," +
"stat_name INTEGER," +
"description/usage/etc INTEGER," +
"category_id INTEGER," +
"FOREIGN KEY(category_id) REFERENCES category(category_id)" +
"FOREIGN KEY(inventory_item_id) REFERENCES inventory_item(inventory_item_id)" +
")");
database.execSQL("CREATE TABLE IF NOT EXISTS category ( " +
"category_id INTEGER PRIMARY KEY AUTOINCREMENT," +
"category_name TEXT" +
")");
database.execSQL("CREATE TABLE IF NOT EXISTS note ( " +
"note_id INTEGER PRIMARY KEY AUTOINCREMENT," +
"game_id INTEGER," +
"note TEXT" +
"FOREIGN KEY(game_id) REFERENCES game(game_id)" +
")");
/* Example DDL from Matt's Quidditch scoring app
database.execSQL("CREATE TABLE IF NOT EXISTS score ( " +
"score_id INTEGER PRIMARY KEY AUTOINCREMENT, " +
"score_datetime INTEGER, " +
"team_id INTEGER, " + // team_id is a number identifying the team. In this first revision, it will be 0 or 1 for left and right
"amount INTEGER, " +
"snitch INTEGER, " +
"game_id INTEGER, " +
"FOREIGN KEY(game_id) REFERENCES game(game_id) )");
*/
}
@Override
public void onUpgrade(SQLiteDatabase database, int oldVersion, int newVersion) {
// Do nothing.
}
public ArrayList< Pair <Integer, String> > getGames() {
Cursor dbGames = getWritableDatabase().rawQuery("SELECT * from game", null);
dbGames.moveToFirst();
ArrayList<Pair<Integer, String>> games = new ArrayList<Pair<Integer, String>>();
while( !dbGames.isAfterLast()) {
games.add(new Pair<Integer, String> (dbGames.getInt(0), dbGames.getString(1) ) );
dbGames.moveToNext();
}
dbGames.close();
return games;
}
} |
package org.jfree.data.general.junit;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.jfree.data.KeyToGroupMap;
import org.jfree.data.Range;
import org.jfree.data.category.CategoryDataset;
import org.jfree.data.category.DefaultCategoryDataset;
import org.jfree.data.general.DatasetUtilities;
import org.jfree.data.general.DefaultPieDataset;
import org.jfree.data.general.PieDataset;
import org.jfree.data.xy.DefaultTableXYDataset;
import org.jfree.data.xy.TableXYDataset;
import org.jfree.data.xy.XYDataset;
import org.jfree.data.xy.XYSeries;
import org.jfree.data.xy.XYSeriesCollection;
/**
* Tests for the {@link DatasetUtilities} class.
*/
public class DatasetUtilitiesTests extends TestCase {
private static final double EPSILON = 0.0000000001;
/**
* Returns the tests as a test suite.
*
* @return The test suite.
*/
public static Test suite() {
return new TestSuite(DatasetUtilitiesTests.class);
}
/**
* Constructs a new set of tests.
*
* @param name the name of the tests.
*/
public DatasetUtilitiesTests(String name) {
super(name);
}
/**
* Some tests to verify that Java does what I think it does!
*/
public void testJava() {
assertTrue(Double.isNaN(Math.min(1.0, Double.NaN)));
assertTrue(Double.isNaN(Math.max(1.0, Double.NaN)));
}
/**
* Some tests for the calculatePieDatasetTotal() method.
*/
public void testCalculatePieDatasetTotal() {
DefaultPieDataset d = new DefaultPieDataset();
assertEquals(0.0, DatasetUtilities.calculatePieDatasetTotal(d),
EPSILON);
d.setValue("A", 1.0);
assertEquals(1.0, DatasetUtilities.calculatePieDatasetTotal(d),
EPSILON);
d.setValue("B", 3.0);
assertEquals(4.0, DatasetUtilities.calculatePieDatasetTotal(d),
EPSILON);
}
/**
* Some tests for the findDomainBounds() method.
*/
public void testFindDomainBounds() {
XYDataset dataset = createXYDataset1();
Range r = DatasetUtilities.findDomainBounds(dataset);
assertEquals(1.0, r.getLowerBound(), EPSILON);
assertEquals(3.0, r.getUpperBound(), EPSILON);
}
/**
* Some tests for the iterateDomainBounds() method.
*/
public void testIterateDomainBounds() {
XYDataset dataset = createXYDataset1();
Range r = DatasetUtilities.iterateDomainBounds(dataset);
assertEquals(1.0, r.getLowerBound(), EPSILON);
assertEquals(3.0, r.getUpperBound(), EPSILON);
}
/**
* Some tests for the findRangeExtent() method.
*/
public void testFindRangeBounds1() {
CategoryDataset dataset = createCategoryDataset1();
Range r = DatasetUtilities.findRangeBounds(dataset);
assertEquals(1.0, r.getLowerBound(), EPSILON);
assertEquals(6.0, r.getUpperBound(), EPSILON);
}
/**
* Some tests for the findRangeBounds() method.
*/
public void testFindRangeBounds2() {
XYDataset dataset = createXYDataset1();
Range r = DatasetUtilities.findRangeBounds(dataset);
assertEquals(100.0, r.getLowerBound(), EPSILON);
assertEquals(105.0, r.getUpperBound(), EPSILON);
}
/**
* Some tests for the iterateCategoryRangeBounds() method.
*/
public void testIterateCategoryRangeBounds() {
CategoryDataset dataset = createCategoryDataset1();
Range r = DatasetUtilities.iterateRangeBounds(dataset, false);
assertEquals(1.0, r.getLowerBound(), EPSILON);
assertEquals(6.0, r.getUpperBound(), EPSILON);
}
/**
* Some tests for the iterateRangeBounds() method.
*/
public void testIterateRangeBounds() {
XYDataset dataset = createXYDataset1();
Range r = DatasetUtilities.iterateRangeBounds(dataset);
assertEquals(100.0, r.getLowerBound(), EPSILON);
assertEquals(105.0, r.getUpperBound(), EPSILON);
}
/**
* Check the range returned when a series contains a null value.
*/
public void testIterateRangeBounds2() {
XYSeries s1 = new XYSeries("S1");
s1.add(1.0, 1.1);
s1.add(2.0, null);
s1.add(3.0, 3.3);
XYSeriesCollection dataset = new XYSeriesCollection(s1);
Range r = DatasetUtilities.iterateRangeBounds(dataset);
assertEquals(1.1, r.getLowerBound(), EPSILON);
assertEquals(3.3, r.getUpperBound(), EPSILON);
}
/**
* Some tests for the findMinimumDomainValue() method.
*/
public void testFindMinimumDomainValue() {
XYDataset dataset = createXYDataset1();
Number minimum = DatasetUtilities.findMinimumDomainValue(dataset);
assertEquals(new Double(1.0), minimum);
}
/**
* Some tests for the findMaximumDomainValue() method.
*/
public void testFindMaximumDomainValue() {
XYDataset dataset = createXYDataset1();
Number maximum = DatasetUtilities.findMaximumDomainValue(dataset);
assertEquals(new Double(3.0), maximum);
}
/**
* Some tests for the findMinimumRangeValue() method.
*/
public void testFindMinimumRangeValue() {
CategoryDataset d1 = createCategoryDataset1();
Number min1 = DatasetUtilities.findMinimumRangeValue(d1);
assertEquals(new Double(1.0), min1);
XYDataset d2 = createXYDataset1();
Number min2 = DatasetUtilities.findMinimumRangeValue(d2);
assertEquals(new Double(100.0), min2);
}
/**
* Some tests for the findMaximumRangeValue() method.
*/
public void testFindMaximumRangeValue() {
CategoryDataset d1 = createCategoryDataset1();
Number max1 = DatasetUtilities.findMaximumRangeValue(d1);
assertEquals(new Double(6.0), max1);
XYDataset dataset = createXYDataset1();
Number maximum = DatasetUtilities.findMaximumRangeValue(dataset);
assertEquals(new Double(105.0), maximum);
}
/**
* A quick test of the min and max range value methods.
*/
public void testMinMaxRange() {
DefaultCategoryDataset dataset = new DefaultCategoryDataset();
dataset.addValue(100.0, "Series 1", "Type 1");
dataset.addValue(101.1, "Series 1", "Type 2");
Number min = DatasetUtilities.findMinimumRangeValue(dataset);
assertTrue(min.doubleValue() < 100.1);
Number max = DatasetUtilities.findMaximumRangeValue(dataset);
assertTrue(max.doubleValue() > 101.0);
}
/**
* A test to reproduce bug report 803660.
*/
public void test803660() {
DefaultCategoryDataset dataset = new DefaultCategoryDataset();
dataset.addValue(100.0, "Series 1", "Type 1");
dataset.addValue(101.1, "Series 1", "Type 2");
Number n = DatasetUtilities.findMaximumRangeValue(dataset);
assertTrue(n.doubleValue() > 101.0);
}
/**
* A simple test for the cumulative range calculation. The sequence of
* "cumulative" values are considered to be { 0.0, 10.0, 25.0, 18.0 } so
* the range should be 0.0 -> 25.0.
*/
public void testCumulativeRange1() {
DefaultCategoryDataset dataset = new DefaultCategoryDataset();
dataset.addValue(10.0, "Series 1", "Start");
dataset.addValue(15.0, "Series 1", "Delta 1");
dataset.addValue(-7.0, "Series 1", "Delta 2");
Range range = DatasetUtilities.findCumulativeRangeBounds(dataset);
assertEquals(0.0, range.getLowerBound(), 0.00000001);
assertEquals(25.0, range.getUpperBound(), 0.00000001);
}
/**
* A further test for the cumulative range calculation.
*/
public void testCumulativeRange2() {
DefaultCategoryDataset dataset = new DefaultCategoryDataset();
dataset.addValue(-21.4, "Series 1", "Start Value");
dataset.addValue(11.57, "Series 1", "Delta 1");
dataset.addValue(3.51, "Series 1", "Delta 2");
dataset.addValue(-12.36, "Series 1", "Delta 3");
dataset.addValue(3.39, "Series 1", "Delta 4");
dataset.addValue(38.68, "Series 1", "Delta 5");
dataset.addValue(-43.31, "Series 1", "Delta 6");
dataset.addValue(-29.59, "Series 1", "Delta 7");
dataset.addValue(35.30, "Series 1", "Delta 8");
dataset.addValue(5.0, "Series 1", "Delta 9");
Range range = DatasetUtilities.findCumulativeRangeBounds(dataset);
assertEquals(-49.51, range.getLowerBound(), 0.00000001);
assertEquals(23.39, range.getUpperBound(), 0.00000001);
}
/**
* Test the creation of a dataset from an array.
*/
public void testCreateCategoryDataset1() {
String[] rowKeys = {"R1", "R2", "R3"};
String[] columnKeys = {"C1", "C2"};
double[][] data = new double[3][];
data[0] = new double[] {1.1, 1.2};
data[1] = new double[] {2.1, 2.2};
data[2] = new double[] {3.1, 3.2};
CategoryDataset dataset = DatasetUtilities.createCategoryDataset(
rowKeys, columnKeys, data);
assertTrue(dataset.getRowCount() == 3);
assertTrue(dataset.getColumnCount() == 2);
}
/**
* Test the creation of a dataset from an array. This time is should fail
* because the array dimensions are around the wrong way.
*/
public void testCreateCategoryDataset2() {
boolean pass = false;
String[] rowKeys = {"R1", "R2", "R3"};
String[] columnKeys = {"C1", "C2"};
double[][] data = new double[2][];
data[0] = new double[] {1.1, 1.2, 1.3};
data[1] = new double[] {2.1, 2.2, 2.3};
CategoryDataset dataset = null;
try {
dataset = DatasetUtilities.createCategoryDataset(rowKeys,
columnKeys, data);
}
catch (IllegalArgumentException e) {
pass = true; // got it!
}
assertTrue(pass);
assertTrue(dataset == null);
}
public void testMaximumStackedRangeValue() {
double v1 = 24.3;
double v2 = 14.2;
double v3 = 33.2;
double v4 = 32.4;
double v5 = 26.3;
double v6 = 22.6;
Number answer = new Double(Math.max(v1 + v2 + v3, v4 + v5 + v6));
DefaultCategoryDataset d = new DefaultCategoryDataset();
d.addValue(v1, "Row 0", "Column 0");
d.addValue(v2, "Row 1", "Column 0");
d.addValue(v3, "Row 2", "Column 0");
d.addValue(v4, "Row 0", "Column 1");
d.addValue(v5, "Row 1", "Column 1");
d.addValue(v6, "Row 2", "Column 1");
Number max = DatasetUtilities.findMaximumStackedRangeValue(d);
assertTrue(max.equals(answer));
}
/**
* Some checks for the findStackedRangeBounds() method.
*/
public void testFindStackedRangeBoundsForCategoryDataset1() {
CategoryDataset d1 = createCategoryDataset1();
Range r = DatasetUtilities.findStackedRangeBounds(d1);
assertEquals(0.0, r.getLowerBound(), EPSILON);
assertEquals(15.0, r.getUpperBound(), EPSILON);
d1 = createCategoryDataset2();
r = DatasetUtilities.findStackedRangeBounds(d1);
assertEquals(-2.0, r.getLowerBound(), EPSILON);
assertEquals(2.0, r.getUpperBound(), EPSILON);
}
/**
* Some checks for the findStackedRangeBounds() method.
*/
public void testFindStackedRangeBoundsForCategoryDataset2() {
CategoryDataset d1 = new DefaultCategoryDataset();
Range r = DatasetUtilities.findStackedRangeBounds(d1);
assertTrue(r == null);
}
/**
* Some checks for the findStackedRangeBounds() method.
*/
public void testFindStackedRangeBoundsForTableXYDataset1() {
TableXYDataset d2 = createTableXYDataset1();
Range r = DatasetUtilities.findStackedRangeBounds(d2);
assertEquals(-2.0, r.getLowerBound(), EPSILON);
assertEquals(2.0, r.getUpperBound(), EPSILON);
}
/**
* Some checks for the findStackedRangeBounds() method.
*/
public void testFindStackedRangeBoundsForTableXYDataset2() {
DefaultTableXYDataset d = new DefaultTableXYDataset();
Range r = DatasetUtilities.findStackedRangeBounds(d);
assertEquals(r, new Range(0.0, 0.0));
}
/**
* Tests the stacked range extent calculation.
*/
public void testStackedRangeWithMap() {
CategoryDataset d = createCategoryDataset1();
KeyToGroupMap map = new KeyToGroupMap("G0");
map.mapKeyToGroup("R2", "G1");
Range r = DatasetUtilities.findStackedRangeBounds(d, map);
assertEquals(0.0, r.getLowerBound(), EPSILON);
assertEquals(9.0, r.getUpperBound(), EPSILON);
}
/**
* Some checks for the isEmptyOrNull(XYDataset) method.
*/
public void testIsEmptyOrNullXYDataset() {
XYSeriesCollection dataset = null;
assertTrue(DatasetUtilities.isEmptyOrNull(dataset));
dataset = new XYSeriesCollection();
assertTrue(DatasetUtilities.isEmptyOrNull(dataset));
XYSeries s1 = new XYSeries("S1");
dataset.addSeries(s1);
assertTrue(DatasetUtilities.isEmptyOrNull(dataset));
s1.add(1.0, 2.0);
assertFalse(DatasetUtilities.isEmptyOrNull(dataset));
s1.clear();
assertTrue(DatasetUtilities.isEmptyOrNull(dataset));
}
/**
* Some checks for the limitPieDataset() methods.
*/
public void testLimitPieDataset() {
// check that empty dataset is handled OK
DefaultPieDataset d1 = new DefaultPieDataset();
PieDataset d2 = DatasetUtilities.createConsolidatedPieDataset(d1,
"Other", 0.05);
assertEquals(0, d2.getItemCount());
// check that minItem limit is observed
d1.setValue("Item 1", 1.0);
d1.setValue("Item 2", 49.50);
d1.setValue("Item 3", 49.50);
d2 = DatasetUtilities.createConsolidatedPieDataset(d1, "Other", 0.05);
assertEquals(3, d2.getItemCount());
assertEquals("Item 1", d2.getKey(0));
assertEquals("Item 2", d2.getKey(1));
assertEquals("Item 3", d2.getKey(2));
// check that minItem limit is observed
d1.setValue("Item 4", 1.0);
d2 = DatasetUtilities.createConsolidatedPieDataset(d1, "Other", 0.05,
2);
// and that simple aggregation works
assertEquals(3, d2.getItemCount());
assertEquals("Item 2", d2.getKey(0));
assertEquals("Item 3", d2.getKey(1));
assertEquals("Other", d2.getKey(2));
assertEquals(new Double(2.0), d2.getValue("Other"));
}
/**
* Creates a dataset for testing.
*
* @return A dataset.
*/
private CategoryDataset createCategoryDataset1() {
DefaultCategoryDataset result = new DefaultCategoryDataset();
result.addValue(1.0, "R0", "C0");
result.addValue(1.0, "R1", "C0");
result.addValue(1.0, "R2", "C0");
result.addValue(4.0, "R0", "C1");
result.addValue(5.0, "R1", "C1");
result.addValue(6.0, "R2", "C1");
return result;
}
/**
* Creates a dataset for testing.
*
* @return A dataset.
*/
private CategoryDataset createCategoryDataset2() {
DefaultCategoryDataset result = new DefaultCategoryDataset();
result.addValue(1.0, "R0", "C0");
result.addValue(-2.0, "R1", "C0");
result.addValue(2.0, "R0", "C1");
result.addValue(-1.0, "R1", "C1");
return result;
}
/**
* Creates a dataset for testing.
*
* @return A dataset.
*/
private XYDataset createXYDataset1() {
XYSeries series1 = new XYSeries("S1");
series1.add(1.0, 100.0);
series1.add(2.0, 101.0);
series1.add(3.0, 102.0);
XYSeries series2 = new XYSeries("S2");
series2.add(1.0, 103.0);
series2.add(2.0, null);
series2.add(3.0, 105.0);
XYSeriesCollection result = new XYSeriesCollection();
result.addSeries(series1);
result.addSeries(series2);
result.setIntervalWidth(0.0);
return result;
}
/**
* Creates a sample dataset for testing purposes.
*
* @return A sample dataset.
*/
private TableXYDataset createTableXYDataset1() {
DefaultTableXYDataset dataset = new DefaultTableXYDataset();
XYSeries s1 = new XYSeries("Series 1", true, false);
s1.add(1.0, 1.0);
s1.add(2.0, 2.0);
dataset.addSeries(s1);
XYSeries s2 = new XYSeries("Series 2", true, false);
s2.add(1.0, -2.0);
s2.add(2.0, -1.0);
dataset.addSeries(s2);
return dataset;
}
} |
package uk.gov.nationalarchives.droid.command.action;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.IOException;
import java.net.URI;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.ResourceBundle;
import org.apache.commons.io.FileUtils;
import uk.gov.nationalarchives.droid.command.container.ContainerContentIdentifierFactory;
import uk.gov.nationalarchives.droid.command.container.ContainerContentIdentifier;
import uk.gov.nationalarchives.droid.container.ContainerSignature;
import uk.gov.nationalarchives.droid.container.ContainerSignatureDefinitions;
import uk.gov.nationalarchives.droid.container.ContainerSignatureMatchCollection;
import uk.gov.nationalarchives.droid.container.ContainerSignatureSaxParser;
import uk.gov.nationalarchives.droid.container.FileFormatMapping;
import uk.gov.nationalarchives.droid.container.TriggerPuid;
import uk.gov.nationalarchives.droid.core.BinarySignatureIdentifier;
import uk.gov.nationalarchives.droid.core.interfaces.IdentificationRequest;
import uk.gov.nationalarchives.droid.core.interfaces.IdentificationResult;
import uk.gov.nationalarchives.droid.core.interfaces.IdentificationResultCollection;
import uk.gov.nationalarchives.droid.core.interfaces.RequestIdentifier;
import uk.gov.nationalarchives.droid.core.interfaces.resource.FileSystemIdentificationRequest;
import uk.gov.nationalarchives.droid.core.interfaces.resource.RequestMetaData;
import uk.gov.nationalarchives.droid.core.SignatureParseException;
import uk.gov.nationalarchives.droid.core.interfaces.archive.ArchiveFormatResolver;
import uk.gov.nationalarchives.droid.core.interfaces.archive.ContainerIdentifier;
/**
* @author rbrennan
*
*/
public class NoProfileRunCommand implements DroidCommand {
private String signatureFile;
private String containerSignatureFile;
private String[] resources;
private boolean recursive;
private boolean openContainers;
private String[] extensions;
private LocationResolver locationResolver;
private BinarySignatureIdentifier binarySignatureIdentifier;
private ContainerSignatureSaxParser contSigParser;
private ContainerSignatureMatchCollection matches;
private boolean quietFlag = false; // default quiet flag value
private List<ContainerSignature> containerSignatures;
private List<FileFormatMapping> fileFormatMapping;
private List<TriggerPuid> triggerPuid;
private ContainerSignatureDefinitions containerSignatureDefinitions;
private ContainerContentIdentifierFactory containerContentIdentifierFactory;
private ArchiveFormatResolver containerFormatResolver;
/**
* {@inheritDoc}
*/
@Override
public void execute() throws CommandExecutionException {
if(!this.quietFlag)
this.outputRuntimeInformation();
File dirToSearch = new File(resources[0]);
if (!dirToSearch.isDirectory())
throw new CommandExecutionException("Resources directory not found");
binarySignatureIdentifier = new BinarySignatureIdentifier();
File sigFile = new File(signatureFile);
if (!sigFile.exists())
throw new CommandExecutionException("Signature file not found");
binarySignatureIdentifier.setSignatureFile(signatureFile);
try {
binarySignatureIdentifier.init();
} catch (SignatureParseException e) {
throw new CommandExecutionException ("Can't parse signature file");
}
binarySignatureIdentifier.setMaxBytesToScan(-1);
openContainers = false;
if (this.containerSignatureFile != null) {
File contSigFile = new File(containerSignatureFile);
if (!contSigFile.exists())
throw new CommandExecutionException("Container signature file not found");
try {
InputStream in = new FileInputStream(contSigFile);
contSigParser = new ContainerSignatureSaxParser();
containerSignatureDefinitions = contSigParser.parse(in);
fileFormatMapping = containerSignatureDefinitions.getFormats();
triggerPuid = containerSignatureDefinitions.getTiggerPuids();
openContainers = true;
} catch (SignatureParseException e) {
throw new CommandExecutionException ("Can't parse container signature file");
} catch (Exception e) {
throw new CommandExecutionException(e);
}
}
Collection<File> matchedFiles = FileUtils.listFiles(dirToSearch,
this.extensions, this.recursive);
for (File file : matchedFiles) {
URI resourceUri = file.toURI();
RequestMetaData metaData = new RequestMetaData(file.length(),
file.lastModified(), file.getName());
RequestIdentifier identifier = new RequestIdentifier(resourceUri);
identifier.setParentId(1L);
IdentificationRequest request = null;
InputStream in = null;
try {
request = new FileSystemIdentificationRequest(metaData, identifier);
try {
in = new FileInputStream(file);
request.open(in);
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
throw new CommandExecutionException(e);
}
}
}
IdentificationResultCollection results =
binarySignatureIdentifier.matchBinarySignatures(request);
IdentificationResultCollection containerResults = new IdentificationResultCollection(request);
if (results.getResults().size() > 0) {
for (IdentificationResult identResult : results.getResults()) {
String puid = identResult.getPuid();
if (puid != null) {
if (openContainers) {
final TriggerPuid containerPuid = getTriggerPuidByPuid(puid);
if (containerPuid != null) {
final ContainerContentIdentifier containerIdentifier =
getContainerContentIdentifierFactory()
.getContainerContentIdentifier(containerPuid.getContainerType(), containerSignatureDefinitions);
containerResults = containerIdentifier.process(file, puid, containerResults);
}
}
}
}
}
IdentificationResultCollection finalResults = new IdentificationResultCollection(request);
if (containerResults.getResults().size() > 0) {
finalResults = containerResults;
} else if (results.getResults().size() > 0) {
finalResults = results;
}
if (finalResults.getResults().size() > 0) {
binarySignatureIdentifier.removeLowerPriorityHits(finalResults);
}
if (finalResults.getResults().size() > 0) {
for (IdentificationResult identResult : finalResults.getResults()) {
System.out.println(file.getAbsolutePath() + "," + identResult.getPuid());
}
} else {
System.out.println(file.getAbsolutePath() + ",Unknown");
}
} catch (IOException e) {
throw new CommandExecutionException(e);
} finally {
if (request != null) {
try {
request.close();
} catch (IOException e) {
throw new CommandExecutionException(e);
}
}
}
}
}
private TriggerPuid getTriggerPuidByPuid(final String puid) {
for (final TriggerPuid tp : triggerPuid) {
if (tp.getPuid().equals(puid)) {
return tp;
}
}
return null;
}
/**
* @param resources the resources to set
*/
public void setResources(String[] resources) {
this.resources = resources;
}
public void setSignatureFile(String signatureFile) {
this.signatureFile = signatureFile;
}
public void setContainerSignatureFile(String containerSignatureFile) {
this.containerSignatureFile = containerSignatureFile;
}
public void setRecursive(boolean recursive) {
this.recursive = recursive;
}
public void setExtensionFilter(String[] extensions) {
// No need to normalize extensions arr if empty, listFiles accepts null value
this.extensions = extensions;
}
public void setQuiet(boolean quiet) {
this.quietFlag = quiet;
}
/**
* @param locationResolver the locationResolver to set
*/
public void setLocationResolver(LocationResolver locationResolver) {
this.locationResolver = locationResolver;
}
private void outputRuntimeInformation() {
String versionString = ResourceBundle.getBundle("options").getString("version_no");
System.out.println("DROID " + versionString + " No Profile mode: Runtime Information");
System.out.println("Binary signature file: " + this.signatureFile);
if (this.containerSignatureFile == null)
System.out.println("Container signature file: None");
else
System.out.println("Container signature file: " + this.containerSignatureFile);
if (this.extensions == null)
System.out.println("Extension filter: No filter set");
else
System.out.println("Extension filter: " + Arrays.toString(this.extensions).replace("[", "").replace("]", "").trim());
if (this.recursive == false)
System.out.println("Recurse folders: False");
else
System.out.println("Recurse folders: True");
}
public ContainerContentIdentifierFactory getContainerContentIdentifierFactory() {
return containerContentIdentifierFactory;
}
public void setContainerContentIdentifierFactory(ContainerContentIdentifierFactory containerContentIdentifierFactory) {
this.containerContentIdentifierFactory = containerContentIdentifierFactory;
}
} |
package org.eclipse.birt.report.engine.executor.buffermgr;
public class Table
{
/**
* should we increase the columns size dynamically.
*/
protected boolean dynamicColumns = true;
/**
* should we omit the empty cells when the cell conflict with drop areas.
*/
protected boolean omitEmptyCell = true;
/**
* rows in the table layout
*/
Row[] rows;
int rowCount;
int colCount;
int nextColId;
int rowBufferSize;
int colBufferSize;
public Table( int rowSize, int colSize)
{
nextColId = -1;
rowCount = 0;
colCount = 0;
ensureSize(rowSize, colSize);
}
public Table()
{
this(10, 10);
}
/**
* reset the table model.
*
*/
public void reset( )
{
fillEmptyCells( 0, 0, rowBufferSize, colBufferSize );
nextColId = -1;
rowCount = 0;
colCount = 0;
}
public int getRowCount( )
{
return rowCount;
}
public int getColCount( )
{
return colCount;
}
/**
* create a row in the table model
*
* @param content
* row content
*/
public void createRow( Object content )
{
ensureSize( rowCount + 1, colCount );
Row row = rows[rowCount];
assert ( row.rowId == rowCount );
row.content = content;
if ( rowCount > 0 )
{
Cell[] cells = row.cells;
Cell[] lastCells = rows[rowCount - 1].cells;
for ( int cellId = 0; cellId < colCount; cellId++ )
{
Cell cell = lastCells[cellId];
if ( cell.status == Cell.CELL_SPANED )
{
cell = cell.getCell( );
}
if ( cell.status == Cell.CELL_USED )
{
if ( cell.rowSpan < 0 || cell.rowId + cell.rowSpan > rowCount )
{
cells[cellId] = Cell.createSpanCell( rowCount, cellId,
cell );
}
}
}
}
rowCount++;
nextColId = 0;
}
/**
* create a cell in the current row.
*
* if the cell content is not empty
* put it into the table
* if the cell is empty:
* if the cell has been used, drop the cell
* else, put it into the table.
*
* @param cellId
* column index of the cell.
* @param rowSpan
* row span of the cell
* @param colSpan
* col span of the cell
* @param content
* cell content
*/
public void createCell( int cellId, int rowSpan, int colSpan,
IContent content )
{
if ( cellId == -1 )
{
cellId = getNextEmptyCell( );
}
int rowId = rowCount - 1;
ensureSize( rowId + 1, cellId + 1);
Cell cell = rows[rowId].cells[cellId];
int status = cell.getStatus( );
if ( status != Cell.CELL_EMPTY )
{
if ( omitEmptyCell && (content == null || content.isEmpty( )) )
{
//content is empty, and the cell is used by others,
//omit empty cell is set to true, so just skip
//the empty one.
return;
}
if ( status == Cell.CELL_USED )
{
removeCell( cell );
}
else if ( status == Cell.CELL_SPANED )
{
Cell used = cell.getCell( );
assert ( used.status == Cell.CELL_USED );
if ( used.rowId < rowId )
{
//the conflict cell is above current row, so reduce the row
// span.
resizeCell( used, rowId - used.rowId, used.colSpan );
}
else
{
//the confict cell in the same row, so reduce the column
// span.
assert used.rowId == rowId;
assert used.colId < cellId;
resizeCell( used, used.rowSpan, cellId - used.colId );
}
}
}
//now the cell is empty
colSpan = getMaxColSpan( cellId, colSpan );
ensureSize( rowCount, cellId + colSpan );
Cell newCell = Cell.createCell( rowId, cellId, rowSpan, colSpan,
content );
Cell[] cells = rows[rowId].cells;
rows[rowId].cells[cellId] = newCell;
nextColId = cellId + colSpan;
for ( cellId = cellId + 1; cellId < nextColId; cellId++ )
{
cells[cellId] = Cell.createSpanCell( rowId, cellId, newCell );
}
if ( nextColId > colCount )
{
colCount = nextColId;
}
}
public void resolveDropCells( )
{
if ( rowCount <= 0 )
{
return;
}
Cell[] cells = rows[rowCount - 1].cells;
for ( int cellId = 0; cellId < colCount; cellId++ )
{
Cell cell = cells[cellId];
if ( cell.status == Cell.CELL_SPANED )
{
cell = cell.getCell( );
}
if ( cell.status == Cell.CELL_USED )
{
cell.rowSpan = rowCount - cell.rowId;
}
}
}
public void resolveDropCells( int bandId )
{
if ( rowCount <= 0 )
{
return;
}
Cell[] cells = rows[rowCount - 1].cells;
for ( int cellId = 0; cellId < colCount; cellId++ )
{
Cell cell = cells[cellId];
if ( cell.status == Cell.CELL_SPANED )
{
cell = cell.getCell( );
}
if ( cell.status == Cell.CELL_USED )
{
if ( cell.rowSpan == bandId )
{
cell.rowSpan = rowCount - cell.rowId;
}
}
}
}
public boolean hasDropCell( )
{
if ( rowCount <= 0 )
{
return false;
}
Cell[] cells = rows[rowCount - 1].cells;
for ( int cellId = 0; cellId < colCount; cellId++ )
{
Cell cell = cells[cellId];
if ( cell.status == Cell.CELL_SPANED )
{
cell = cell.getCell( );
}
if ( cell.status == Cell.CELL_USED )
{
if ( cell.rowSpan < 0 || cell.rowSpan > rowCount - cell.rowId )
{
return true;
}
}
}
return false;
}
/**
* get the next empty cell.
*
* @return
*/
protected int getNextEmptyCell( )
{
assert rowCount > 0;
Cell[] cells = rows[rowCount - 1].cells;
for ( int colId = nextColId; colId < colCount; colId++ )
{
if ( cells[colId].status == Cell.CELL_EMPTY )
{
return colId;
}
}
if ( dynamicColumns )
{
return colCount;
}
return colCount - 1;
}
protected int getMaxColSpan( int colId, int colSpan )
{
int checkSize = colCount - colId;
if ( checkSize > colSpan )
checkSize = colSpan;
Cell[] cells = rows[rowCount - 1].cells;
for ( int i = 1; i < checkSize; i++ )
{
if ( cells[colId + i].status != Cell.CELL_EMPTY )
{
return i;
}
}
if ( dynamicColumns )
{
return colSpan;
}
return checkSize;
}
protected void ensureSize( int newRowBufferSize, int newColBufferSize )
{
if ( newRowBufferSize > rowBufferSize )
{
Row[] newRows = new Row[newRowBufferSize];
if ( rows != null )
{
System.arraycopy( rows, 0, newRows, 0, rowCount );
}
for ( int rowId = rowBufferSize; rowId < newRowBufferSize; rowId++ )
{
Row row = new Row( rowId);
Cell[] cells = new Cell[colBufferSize];
for ( int colId = 0; colId < colBufferSize; colId++ )
{
cells[colId] = Cell.EMPTY_CELL;
}
row.cells = cells;
newRows[rowId] = row;
}
rows = newRows;
rowBufferSize = newRowBufferSize;
}
if ( newColBufferSize > colBufferSize )
{
for ( int rowId = 0; rowId < rowBufferSize; rowId++ )
{
Row row = rows[rowId];
Cell[] newCells = new Cell[newColBufferSize];
if ( row.cells != null )
{
System.arraycopy( row.cells, 0, newCells, 0, colBufferSize );
}
for ( int colId = colBufferSize; colId < newColBufferSize; colId++ )
{
newCells[colId] = Cell.EMPTY_CELL;
}
row.cells = newCells;
}
colBufferSize = newColBufferSize;
}
}
/**
* remove the cell from table layout buffer. The grid cell used by this cell
* fills EMPTY_CELL.
*
* @param rowId
* row index
* @param colId
* column index
*/
protected void removeCell( Cell cell )
{
int rowId = cell.rowId;
int colId = cell.colId;
int rowSpan = cell.rowSpan;
int colSpan = cell.colSpan;
if ( rowSpan < 0 )
rowSpan = rowCount - rowId;
if ( colId + colSpan > colCount )
colSpan = colCount - colId;
fillEmptyCells( rowId, colId, rowSpan, colSpan );
}
/**
* fill empty cells in the table.
*
* @param rowId
* row index
* @param colId
* col index
* @param rowSize
* fill area size
* @param colSize
* fill area size
*/
protected void fillEmptyCells( int rowId, int colId, int rowSize,
int colSize )
{
int lastRowId = rowId + rowSize;
int lastColId = colId + colSize;
if (lastRowId > rowCount) lastRowId = rowCount;
if (lastColId > colCount) lastColId = colCount;
for ( int i = rowId; i < lastRowId; i++ )
{
Cell[] cells = rows[i].cells;
for ( int j = colId; j < lastColId; j++ )
{
cells[j] = Cell.EMPTY_CELL;
}
}
}
/**
* we never change both the row span and col span at the same time.
*
* @param cell
* the cell to be changed
* @param newRowSpan
* new row span
* @param newColSpan
* new col span
*/
protected void resizeCell( Cell cell, int newRowSpan, int newColSpan )
{
assert cell.status == Cell.CELL_USED;
int rowId = cell.rowId;
int colId = cell.colId;
int rowSpan = cell.rowSpan;
if ( rowSpan <= 0 )
{
rowSpan = rowCount - rowId;
}
int colSpan = cell.colSpan;
assert rowSpan >= newRowSpan && colSpan >= newColSpan;
fillEmptyCells( rowId, colId + newColSpan, rowSpan, colSpan
- newColSpan);
fillEmptyCells( rowId + newRowSpan, colId, rowSpan - newRowSpan,
newColSpan );
cell.colSpan = newColSpan;
cell.rowSpan = newRowSpan;
}
public Cell getCell(int rowId, int colId)
{
return rows[rowId].cells[colId];
}
public Row getRow(int rowId)
{
return rows[rowId];
}
} |
package it.unibz.inf.ontop.answering.logging.impl;
import com.google.common.collect.ImmutableSet;
import com.google.inject.Inject;
import it.unibz.inf.ontop.answering.logging.QueryLogger;
import it.unibz.inf.ontop.answering.logging.impl.ClassAndPropertyExtractor.ClassesAndProperties;
import it.unibz.inf.ontop.exception.OntopReformulationException;
import it.unibz.inf.ontop.injection.OntopReformulationSettings;
import it.unibz.inf.ontop.iq.IQ;
import it.unibz.inf.ontop.spec.ontology.InconsistentOntologyException;
import org.apache.commons.rdf.api.IRI;
import javax.annotation.Nullable;
import java.io.PrintStream;
import java.sql.Timestamp;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.UUID;
import java.util.stream.Collectors;
public class QueryLoggerImpl implements QueryLogger {
protected static final String REFORMATION_EXC_MSG = "query:exception-reformulation";
protected static final String EVALUATION_EXC_MSG = "query:exception-evaluation";
protected static final String CONNECTION_EXC_MSG = "query:exception-connection";
protected static final String CONVERSION_EXC_MSG = "query:exception-conversion";
protected static final String SPARQL_QUERY_KEY = "sparqlQuery";
protected static final String REFORMULATED_QUERY_KEY = "reformulatedQuery";
protected static final String CLASSES_KEY = "classesUsedInQuery";
protected static final String PROPERTIES_KEY = "propertiesUsedInQuery";
protected static final String TABLES_KEY = "tables";
private static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX");
private final UUID queryId;
private final long creationTime;
private final PrintStream outputStream;
private final OntopReformulationSettings settings;
private final boolean disabled;
private final String applicationName;
private long reformulationTime;
private long unblockedResulSetTime;
private final ClassAndPropertyExtractor classAndPropertyExtractor;
private final RelationNameExtractor relationNameExtractor;
@Nullable
private ImmutableSet<IRI> classes, properties;
@Nullable
private ImmutableSet<String> relationNames;
@Nullable
private String sparqlQueryString;
@Inject
protected QueryLoggerImpl(OntopReformulationSettings settings,
ClassAndPropertyExtractor classAndPropertyExtractor,
RelationNameExtractor relationNameExtractor) {
this(System.out, settings, classAndPropertyExtractor, relationNameExtractor);
}
protected QueryLoggerImpl(PrintStream outputStream, OntopReformulationSettings settings,
ClassAndPropertyExtractor classAndPropertyExtractor,
RelationNameExtractor relationNameExtractor) {
this.disabled = !settings.isQueryLoggingEnabled();
this.outputStream = outputStream;
this.settings = settings;
this.classAndPropertyExtractor = classAndPropertyExtractor;
this.relationNameExtractor = relationNameExtractor;
this.queryId = UUID.randomUUID();
creationTime = System.currentTimeMillis();
applicationName = settings.getApplicationName();
reformulationTime = -1;
unblockedResulSetTime = -1;
}
@Override
public void declareReformulationFinishedAndSerialize(IQ reformulatedQuery, boolean wasCached) {
if (disabled)
return;
String reformulatedQueryString = settings.isReformulatedQueryIncludedIntoQueryLog()
? serializeEntry(REFORMULATED_QUERY_KEY, reformulatedQuery.toString())
: "";
reformulationTime = System.currentTimeMillis();
// TODO: use a proper framework
String json = String.format(
"{\"@timestamp\": \"%s\", " +
"\"application\": \"%s\", " +
"\"message\": \"query:reformulated\", " +
"\"payload\": { " +
"\"queryId\": \"%s\", " +
"%s %s %s %s %s" +
"\"reformulationDuration\": %d, " +
"\"reformulationCacheHit\": %b } }",
serializeTimestamp(reformulationTime),
applicationName,
queryId,
serializeEntry(SPARQL_QUERY_KEY, sparqlQueryString),
reformulatedQueryString,
serializeArrayEntry(CLASSES_KEY, classes),
serializeArrayEntry(PROPERTIES_KEY, properties),
serializeArrayEntry(TABLES_KEY, relationNames),
reformulationTime - creationTime,
wasCached);
outputStream.println(json);
}
@Override
public void declareResultSetUnblockedAndSerialize() {
if (disabled)
return;
unblockedResulSetTime = System.currentTimeMillis();
if (reformulationTime == -1)
throw new IllegalStateException("Reformulation should have been declared as finished");
// TODO: use a proper framework
String json = String.format(
"{\"@timestamp\": \"%s\", \"application\": \"%s\", \"message\": \"query:result-set-unblocked\", \"payload\": { \"queryId\": \"%s\", \"executionBeforeUnblockingDuration\": %d } }",
serializeTimestamp(unblockedResulSetTime),
applicationName,
queryId,
unblockedResulSetTime - reformulationTime);
outputStream.println(json);
}
@Override
public void declareLastResultRetrievedAndSerialize(long resultCount) {
if (disabled)
return;
long lastResultFetchedTime = System.currentTimeMillis();
if (unblockedResulSetTime == -1)
throw new IllegalStateException("Result set should have been declared as unblocked");
// TODO: use a proper framework
String json = String.format(
"{\"@timestamp\": \"%s\", \"application\": \"%s\" \"message\": \"query:last-result-fetched\", \"payload\": { \"queryId\": \"%s\", \"executionAndFetchingDuration\": %d, \"totalDuration\": %d, \"resultCount\": %d } }",
serializeTimestamp(lastResultFetchedTime),
applicationName,
queryId,
lastResultFetchedTime - reformulationTime,
lastResultFetchedTime - creationTime,
resultCount);
outputStream.println(json);
}
@Override
public void declareReformulationException(OntopReformulationException e) {
declareException(e, REFORMATION_EXC_MSG);
}
@Override
public void declareEvaluationException(Exception e) {
declareException(e, EVALUATION_EXC_MSG);
}
@Override
public void declareConnectionException(Exception e) {
declareException(e, CONNECTION_EXC_MSG);
}
@Override
public void declareConversionException(InconsistentOntologyException e) {
declareException(e, CONVERSION_EXC_MSG);
}
@Override
public void setSparqlQuery(String sparqlQuery) {
if (disabled || (!settings.isSparqlQueryIncludedIntoQueryLog()))
return;
if (sparqlQueryString != null)
throw new IllegalStateException("Already specified SPARQL query");
sparqlQueryString = sparqlQuery;
}
@Override
public void setSparqlIQ(IQ sparqlIQ) {
if (disabled || (!settings.areClassesAndPropertiesIncludedIntoQueryLog()))
return;
ClassesAndProperties classesAndProperties = classAndPropertyExtractor.extractClassesAndProperties(sparqlIQ);
classes = classesAndProperties.getClasses();
properties = classesAndProperties.getProperties();
}
@Override
public void setPlannedQuery(IQ plannedQuery) {
if (disabled || (!settings.areTablesIncludedIntoQueryLog()))
return;
relationNames = relationNameExtractor.extractRelationNames(plannedQuery);
}
protected void declareException(Exception e, String exceptionType) {
if (disabled)
return;
// TODO: use a proper framework
String json = String.format(
"{\"@timestamp\": \"%s\", \"application\": \"%s\" \"message\": \"%s\", \"payload\": { \"queryId\": \"%s\", \"exception\": %s} }",
serializeTimestamp(System.currentTimeMillis()),
applicationName,
exceptionType,
queryId,
e.getMessage());
outputStream.println(json);
}
protected String serializeTimestamp(long time) {
return DATE_FORMAT.format(new Timestamp(time));
}
protected String serializeEntry(String key, @Nullable String value) {
return (value == null)
? ""
: String.format("\"%s\": \"%s\", ", key, escapeDoubleQuotes(value));
}
protected String escapeDoubleQuotes(String value) {
return value.replaceAll("\"", "\\\"");
}
protected String serializeArrayEntry(String key, ImmutableSet<? extends Object> arguments) {
if (arguments == null)
return "";
return String.format("[%s]", arguments.stream()
.map(a -> escapeDoubleQuotes(a.toString()))
.collect(Collectors.joining(", ")));
}
} |
package edu.emory.cci.aiw.cvrg.eureka.services.config;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Singleton;
/**
* Looks up the application properties file (eureka-services.properties) and
* presents the values contained in the file to the rest of the application.
*
* @author hrathod
*
*/
@Singleton
public class ApplicationProperties {
/**
* The class level logger.
*/
private static final Logger LOGGER = LoggerFactory
.getLogger(ApplicationProperties.class);
/**
* Name of the properties file that is required for application
* configuration.
*/
private static final String PROPERTIES_FILE = "/eureka-services.properties";
/**
* Holds an instance of the properties object which contains all the
* application configuration properties.
*/
private final Properties properties;
/**
* No-arg constructor, looks for the application configuration file and
* loads up the properties.
*/
public ApplicationProperties() {
InputStream inputStream = this.getClass().getResourceAsStream(
PROPERTIES_FILE);
if (inputStream == null) {
throw new RuntimeException(
"Missing application configuration file: "
+ PROPERTIES_FILE);
}
this.properties = new Properties();
try {
this.properties.load(inputStream);
} catch (IOException e) {
throw new RuntimeException(e);
}
try {
inputStream.close();
} catch (IOException e) {
// do nothing
}
}
/**
* Get the URL to access the back-end's configuration information for a
* user.
*
* @return A string containing the back-end's configuration end-point URL.
*/
public String getEtlConfGetUrl() {
return this.properties.getProperty("eureka.services.etl.conf.get.url");
}
/**
* Get the URL to submit a new configuration item to the ETL backend
* service.
*
* @return A string containing the back-end's configuration submission URL.
*/
public String getEtlConfSubmitUrl() {
return this.properties
.getProperty("eureka.services.etl.conf.submit.url");
}
/**
* Get the URL to access the back-end's job submission end-point.
*
* @return A string containing the back-end's job submission end-point.
*/
public String getEtlJobSubmitUrl() {
return this.properties
.getProperty("eureka.services.etl.job.submit.url");
}
/**
* Get the URL to access the back-end's job status update information
* end-point.
*
* @return A string containing the back-end's job status update information
* end-point.
*/
public String getEtlJobUpdateUrl() {
return this.properties
.getProperty("eureka.services.etl.job.update.url");
}
/**
* Get the size of the job executor thread pool.
*
* @return The size of the job executor thread pool from the configuration
* file, or 5 as the default if no value can be determined.
*/
public int getJobPoolSize() {
return this.getIntValue("eureka.services.jobpool.size", 5);
}
/**
* Get the number of hours to keep a user registration without verification,
* before deleting it from the database.
*
* @return The number of hours listed in the configuration, and 24 if the
* configuration is not found.
*/
public int getRegistrationTimeout() {
return this.getIntValue("eureka.services.registration.timeout", 24);
}
/**
* Get the verification base URL, to be used in sending a verification email
* to the user.
*
* @return The verification base URL, as found in the application
* configuration file.
*/
public String getVerificationUrl() {
return this.properties.getProperty("eureka.services.email.verify.url");
}
/**
* Get the verification email subject line.
*
* @return The subject for the verification email.
*/
public String getVerificationEmailSubject() {
return this.properties
.getProperty("eureka.services.email.verify.subject");
}
/**
* Get the activation email subject line.
*
* @return The subject for the activation email.
*/
public String getActivationEmailSubject() {
return this.properties
.getProperty("eureka.services.email.activation.subject");
}
/**
* Get the base URL for the application front-end.
*
* @return The base URL.
*/
public String getApplicationUrl() {
return this.properties.getProperty("eureka.services.url");
}
/**
* Get the support email address for the application.
*
* @return The support email address.
*/
public String getSupportEmail() {
return this.properties.getProperty("eureka.services.support.email");
}
/**
* Get the password change email subject line.
*
* @return The email subject line.
*/
public String getPasswordChangeEmailSubject() {
return this.properties
.getProperty("eureka.services.email.password.subject");
}
public String getPasswordResetEmailSubject() {
return this.properties
.getProperty("eureka.services.email.reset.subject");
}
/**
* Utility method to get an int from the properties file.
*
* @param propertyName The name of the property.
* @param defaultValue The default value to return, if the property is not
* found, or is malformed.
* @return The property value, as an int.
*/
private int getIntValue(final String propertyName, int defaultValue) {
int result;
String property = this.properties.getProperty(propertyName);
try {
result = Integer.parseInt(property);
} catch (NumberFormatException e) {
LOGGER.warn("Invalid integer property in configuration: {}",
propertyName);
result = defaultValue;
}
return result;
}
} |
package edu.emory.cci.aiw.cvrg.eureka.services.resource;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import com.google.inject.Inject;
import edu.emory.cci.aiw.cvrg.eureka.common.comm.PropositionWrapper;
import edu.emory.cci.aiw.cvrg.eureka.common.entity.Proposition;
import edu.emory.cci.aiw.cvrg.eureka.services.dao.PropositionDao;
import edu.emory.cci.aiw.cvrg.eureka.services.dao.UserDao;
/**
* REST Web Service
*
* @author hrathod
*/
@Path("/proposition")
public class PropositionResource {
private final PropositionDao propositionDao;
private final UserDao userDao;
/**
* Creates a new instance of PropositionResource
*/
@Inject
public PropositionResource(PropositionDao inPropositionDao,
UserDao inUserDao) {
this.propositionDao = inPropositionDao;
this.userDao = inUserDao;
}
@GET
@Path("/system/list")
@Produces(MediaType.APPLICATION_JSON)
public List<PropositionWrapper> getSystemPropositions() {
// TODO: Call the ETL REST endpoint to get the real list of root level elements here.
List<PropositionWrapper> wrappers = new ArrayList<PropositionWrapper>();
wrappers.add(wrap(this.fetchSystemProposition("test-key")));
return wrappers;
}
@GET
@Path("/system/{propKey}")
@Produces(MediaType.APPLICATION_JSON)
public PropositionWrapper getSystemProposition(
@PathParam("propKey") String inKey) {
return wrap(fetchSystemProposition(inKey));
}
@GET
@Path("/user/list/{userId}")
@Produces(MediaType.APPLICATION_JSON)
public List<Proposition> getUserPropositions(
@PathParam("userId") Long inUserId) {
return this.userDao.retrieve(inUserId).getPropositions();
}
@GET
@Path("/user/get/{propId}")
@Produces(MediaType.APPLICATION_JSON)
public PropositionWrapper getUserProposition(
@PathParam("propId") Long inPropositionId) {
PropositionWrapper wrapper = null;
Proposition proposition = this.propositionDao.retrieve(inPropositionId);
if (proposition != null) {
if (proposition.isInSystem()) {
wrapper =
wrap(this.fetchSystemProposition(proposition.getKey()));
} else {
wrapper = wrap(proposition);
}
}
return wrapper;
}
@PUT
@Path("/user/update")
@Consumes(MediaType.APPLICATION_JSON)
public void updateProposition(PropositionWrapper inWrapper) {
Proposition proposition = unwrap(inWrapper);
proposition.setLastModified(new Date());
this.propositionDao.update(proposition);
}
@POST
@Path("/user/create")
@Consumes(MediaType.APPLICATION_JSON)
public void insertProposition(PropositionWrapper inWrapper) {
Proposition proposition = unwrap(inWrapper);
Date now = new Date();
proposition.setCreated(now);
proposition.setLastModified(now);
this.propositionDao.create(proposition);
}
private Proposition fetchSystemProposition(String inId) {
// TODO: Call the ETL REST endpoint to get the proposition.
Proposition proposition = new Proposition();
proposition.setAbbrevDisplayName(inId);
return proposition;
}
private Proposition unwrap(PropositionWrapper inWrapper) {
Proposition proposition;
List<Proposition> targets = new ArrayList<Proposition>();
if (inWrapper.getId() != null) {
Long id = Long.valueOf(inWrapper.getId());
proposition = this.propositionDao.retrieve(id);
} else {
proposition = new Proposition();
}
for (Long id : inWrapper.getUserTargets()) {
targets.add(this.propositionDao.retrieve(id));
}
for (String key : inWrapper.getSystemTargets()) {
Proposition p = this.propositionDao.getByKey(key);
if (p == null) {
p = new Proposition();
p.setKey(key);
p.setInSystem(true);
}
targets.add(p);
}
if (inWrapper.getType() == PropositionWrapper.Type.AND) {
proposition.setAbstractedFrom(targets);
} else{
proposition.setInverseIsA(targets);
}
proposition.setKey(inWrapper.getKey());
proposition.setAbbrevDisplayName(inWrapper.getAbbrevDisplayName());
proposition.setDisplayName(inWrapper.getDisplayName());
proposition.setInSystem(inWrapper.isInSystem());
proposition.setUser(this.userDao.retrieve(inWrapper.getUserId()));
return proposition;
}
private PropositionWrapper.Type getType(Proposition inProposition) {
if ((inProposition.getTemporalPattern() != null) || (
(inProposition.getAbstractedFrom() != null) && (!inProposition
.getAbstractedFrom().isEmpty()))) {
return PropositionWrapper.Type.AND;
} else {
return PropositionWrapper.Type.OR;
}
}
private List<Proposition> getTargets(Proposition inProposition,
PropositionWrapper.Type inType) {
List<Proposition> propositions;
List<Proposition> targets;
if (inType == PropositionWrapper.Type.AND) {
targets = inProposition.getAbstractedFrom();
} else {
targets = inProposition.getInverseIsA();
}
if (targets == null) {
propositions = new ArrayList<Proposition>();
} else {
propositions = targets;
}
return propositions;
}
private PropositionWrapper wrap(Proposition inProposition) {
PropositionWrapper wrapper = new PropositionWrapper();
PropositionWrapper.Type type = this.getType(inProposition);
List<String> systemTargets = new ArrayList<String>();
List<Long> userTargets = new ArrayList<Long>();
for (Proposition target : this.getTargets(inProposition, type)) {
if (target.isInSystem()) {
systemTargets.add(target.getKey());
} else {
userTargets.add(target.getId());
}
}
if (inProposition.getId() != null) {
wrapper.setId(String.valueOf(inProposition.getId()));
}
if (inProposition.getUser() != null) {
wrapper.setUserId(inProposition.getUser().getId());
}
wrapper.setInSystem(inProposition.isInSystem());
wrapper.setType(type);
wrapper.setAbbrevDisplayName(inProposition.getAbbrevDisplayName());
wrapper.setDisplayName(inProposition.getDisplayName());
wrapper.setKey(inProposition.getKey());
wrapper.setSystemTargets(systemTargets);
wrapper.setUserTargets(userTargets);
return wrapper;
}
} |
package org.eclipse.hawkbit.ddi.client;
import java.io.IOException;
import java.io.InputStream;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
import org.eclipse.hawkbit.ddi.client.resource.RootControllerResourceClient;
import org.eclipse.hawkbit.ddi.client.strategy.PersistenceStrategy;
import org.eclipse.hawkbit.ddi.json.model.DdiActionFeedback;
import org.eclipse.hawkbit.ddi.json.model.DdiArtifact;
import org.eclipse.hawkbit.ddi.json.model.DdiChunk;
import org.eclipse.hawkbit.ddi.json.model.DdiControllerBase;
import org.eclipse.hawkbit.ddi.json.model.DdiDeploymentBase;
import org.eclipse.hawkbit.ddi.json.model.DdiResult;
import org.eclipse.hawkbit.ddi.json.model.DdiResult.FinalResult;
import org.eclipse.hawkbit.ddi.json.model.DdiStatus;
import org.eclipse.hawkbit.ddi.json.model.DdiStatus.ExecutionStatus;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.hateoas.Link;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
/**
* DDI example client based on defualt DDI feign client.
*/
public class DdiExampleClient implements Runnable {
private static final Logger LOGGER = LoggerFactory.getLogger(DdiExampleClient.class);
private final String controllerId;
private Long actionIdOfLastInstalltion;
private final RootControllerResourceClient rootControllerResourceClient;
private final PersistenceStrategy persistenceStrategy;
private DdiClientStatus clientStatus;
private FinalResult finalReusltOfCurrentUpdate;
/**
* Constructor for the DDI example client.
*
* @param baseUrl
* the base url of the hawkBit server
* @param controllerId
* the controller id that will be simulated
* @param tenant
* the tenant
* @param persistenceStrategy
* the persistence strategy for downloading artifacts
*/
public DdiExampleClient(final String baseUrl, final String controllerId, final String tenant,
final PersistenceStrategy persistenceStrategy) {
this.controllerId = controllerId;
this.rootControllerResourceClient = new DdiDefaultFeignClient(baseUrl, tenant)
.getRootControllerResourceClient();
this.actionIdOfLastInstalltion = null;
this.persistenceStrategy = persistenceStrategy;
this.clientStatus = DdiClientStatus.DOWN;
}
@Override
public void run() {
clientStatus = DdiClientStatus.UP;
ResponseEntity<DdiControllerBase> response;
while (clientStatus == DdiClientStatus.UP) {
LOGGER.info(" Controller {} polling from hawkBit server", controllerId);
response = rootControllerResourceClient.getControllerBase(controllerId);
final String pollingTimeFormReponse = response.getBody().getConfig().getPolling().getSleep();
final LocalTime localtime = LocalTime.parse(pollingTimeFormReponse);
final long pollingIntervalInMillis = localtime.toNanoOfDay();
final Link controllerDeploymentBaseLink = response.getBody().getLink("deploymentBase");
if (controllerDeploymentBaseLink != null) {
final Long actionId = getActionIdOutOfLink(controllerDeploymentBaseLink);
final Integer resource = getResourceOutOfLink(controllerDeploymentBaseLink);
if (actionId != actionIdOfLastInstalltion) {
finalReusltOfCurrentUpdate = FinalResult.NONE;
startDownload(actionId, resource);
finishUpdateProcess(actionId);
actionIdOfLastInstalltion = actionId;
}
}
try {
Thread.sleep(pollingIntervalInMillis);
} catch (final InterruptedException e) {
LOGGER.error("Error during sleep");
}
}
}
/**
* Stop the DDI example client
*/
public void stop() {
clientStatus = DdiClientStatus.DOWN;
}
private void startDownload(final Long actionId, final Integer resource) {
final ResponseEntity<DdiDeploymentBase> respone = rootControllerResourceClient
.getControllerBasedeploymentAction(controllerId, Long.valueOf(actionId), Integer.valueOf(resource));
final DdiDeploymentBase ddiDeploymentBase = respone.getBody();
final List<DdiChunk> chunks = ddiDeploymentBase.getDeployment().getChunks();
for (final DdiChunk chunk : chunks) {
final List<DdiArtifact> artifactList = chunk.getArtifacts();
final Link downloadLink = ddiDeploymentBase.getDeployment().getChunks().get(0).getArtifacts().get(0)
.getLink("download-http");
final String[] downloadLinkSep = downloadLink.getHref().split(Pattern.quote("/"));
final Long softwareModuleId = Long.valueOf(downloadLinkSep[8]);
for (final DdiArtifact ddiArtifact : artifactList) {
if (finalReusltOfCurrentUpdate != FinalResult.FAILURE) {
downloadArtifact(actionId, softwareModuleId, ddiArtifact.getFilename());
}
}
}
}
private void downloadArtifact(final Long actionId, final Long softwareModuleId, final String artifact) {
sendFeedBackMessage(actionId, ExecutionStatus.PROCEEDING, FinalResult.NONE,
"Starting download of artifact " + artifact);
LOGGER.info("Starting download of artifact " + artifact);
final ResponseEntity<InputStream> responseDownloadArtifact = rootControllerResourceClient
.downloadArtifact(controllerId, softwareModuleId, artifact);
final HttpStatus statsuCode = responseDownloadArtifact.getStatusCode();
LOGGER.info("Finished download with stataus {}", statsuCode);
try {
persistenceStrategy.handleInputStream(responseDownloadArtifact.getBody(), artifact);
sendFeedBackMessage(actionId, ExecutionStatus.PROCEEDING, FinalResult.NONE,
"Downloaded artifact " + artifact);
} catch (final IOException e) {
sendFeedBackMessage(actionId, ExecutionStatus.PROCEEDING, FinalResult.NONE,
"Downloaded of artifact " + artifact + "failed");
finalReusltOfCurrentUpdate = FinalResult.FAILURE;
}
}
private void sendFeedBackMessage(final Long actionId, final ExecutionStatus executionStatus,
final FinalResult finalResult, final String message) {
final DdiResult result = new DdiResult(finalResult, null);
final List<String> details = new ArrayList<>();
details.add(message);
final DdiStatus ddiStatus = new DdiStatus(executionStatus, result, details);
final String time = String.valueOf(LocalDateTime.now());
final DdiActionFeedback feedback = new DdiActionFeedback(actionId, time, ddiStatus);
rootControllerResourceClient.postBasedeploymentActionFeedback(feedback, controllerId, actionId);
LOGGER.info("Sent feedback message to HaktBit");
}
private void finishUpdateProcess(final Long actionId) {
if (finalReusltOfCurrentUpdate == FinalResult.FAILURE) {
sendFeedBackMessage(actionId, ExecutionStatus.CLOSED, FinalResult.FAILURE, "Error during update process");
}
if (finalReusltOfCurrentUpdate == FinalResult.NONE) {
sendFeedBackMessage(actionId, ExecutionStatus.CLOSED, FinalResult.SUCESS,
"Simulated installation successful");
}
}
private Long getActionIdOutOfLink(final Link controllerDeploymentBaseLink) {
final String[] ending = splitControllerDeploymentBaseLinkInActionIdAndResource(controllerDeploymentBaseLink);
return Long.valueOf(ending[0]);
}
private Integer getResourceOutOfLink(final Link controllerDeploymentBaseLink) {
final String[] ending = splitControllerDeploymentBaseLinkInActionIdAndResource(controllerDeploymentBaseLink);
return Integer.valueOf(ending[1].substring(2));
}
private String[] splitControllerDeploymentBaseLinkInActionIdAndResource(final Link controllerDeploymentBaseLink) {
final String link = controllerDeploymentBaseLink.getHref();
final String[] segments = link.split(Pattern.quote("/"));
return segments[8].split(Pattern.quote("?"));
}
/**
* Enum for DDI running status.
*/
public enum DdiClientStatus {
UP, DOWN;
}
} |
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.*;
import java.util.*;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
public class SqlHandler {
private Connection con = null;
public SqlHandler () throws SQLException
{
//Class.forName("com.mysql.jdbc.Driver");
con = DriverManager.getConnection("jdbc:mysql://stusql.dcs.shef.ac.uk/team017?user=team017&password=33b55883");
}
/**
* Attempts to get an instance of a treatment type from the database, given a treatment type name.
* @return TreatmentType the wanted treatment type, or null if not found.
*/
public Treatment getTreatmentType (String name) throws SQLException
{
// Construct SQL command below to get a list of all treatment types of that name (should be one):
String getData = "SELECT (type, cost) FROM treatmentTypes WHERE type = ?";
PreparedStatement statement = con.prepareStatement(getData);
statement.setString(1, name);
ResultSet res = statement.executeQuery();
if(res.getFetchSize() == 0)
return null;
else{
return (new Treatment(res.getString("type"), res.getDouble("cost")));
}
}
public void setTreatmentType (Treatment t) throws SQLException{
PreparedStatement statement;
if (t.getTreatmentType() == null)
{
statement = con.prepareStatement("INSERT INTO treatmentTypes (type, cost) VALUES (?,?)");
statement.setString(1, t.getTreatmentType());
statement.setDouble(2, t.getCost());
}else{
statement = con.prepareStatement("UPDATE treatmentTypes SET (cost) = ? WHERE type = ?");
statement.setDouble(1, t.getCost());
statement.setString(2, t.getTreatmentType());
}
statement.execute();
}
public Address getAddress(Address a) throws SQLException {
PreparedStatement statement;
String getData = "SELECT * FROM addresses WHERE number = ? AND postCode = ?";
statement = con.prepareStatement(getData);
statement.setString (1, a.getHouseNumber());
statement.setString (2, a.getPostCode());
ResultSet res = statement.executeQuery();
String getCount = "SELECT COUNT(*) AS count FROM addresses WHERE number = ? AND postCode = ?";
PreparedStatement countState;
countState = con.prepareStatement(getCount);
countState.setString (1, a.getHouseNumber());
countState.setString (2, a.getPostCode());
ResultSet count = countState.executeQuery();
count.first();
if(count.getInt("count") == 0){
return null;
}else{
res.first();
return (new Address(res.getString("number"), res.getString("street"),res.getString("district"),res.getString("city"),res.getString("postCode")));
}
}
public Address[] getAllAddresses() throws SQLException {
PreparedStatement statement = con.prepareStatement("SELECT * FROM addresses");
ResultSet rs = statement.executeQuery();
ArrayList<Address> result = new ArrayList<Address>();
while(rs.next())
{
result.add(new Address(rs.getString("number"), rs.getString("street"), rs.getString("district"), rs.getString("city"), rs.getString("postCode")));
}
return result.toArray(new Address[result.size()]);
}
public void setAddress (Address a) throws SQLException {
PreparedStatement statement;
if (getAddress(a) == null)
{
String add = "INSERT INTO addresses (number,street,district,city,postCode)"
+ "VALUES (?,?,?,?,?)";
statement = con.prepareStatement(add);
statement.setString (1, a.getHouseNumber());
statement.setString (2, a.getStreetName());
statement.setString (3, a.getDistrict());
statement.setString (4, a.getCity());
statement.setString (5, a.getPostCode());
statement.execute();
}else{
String update = "UPDATE`addresses` SET number = ?, street = ?, district = ?, city = ?, postCode = ? WHERE name = ? AND postCode = ?";
statement = con.prepareStatement(update);
statement.setString (1, a.getHouseNumber());
statement.setString (2, a.getStreetName());
statement.setString (3, a.getDistrict());
statement.setString (4, a.getCity());
statement.setString (5, a.getPostCode());
statement.setString (6, a.getHouseNumber());
statement.setString (7, a.getPostCode());
}
}
public Address getAddressNumPC(String num, String postC) throws SQLException{
PreparedStatement statement;
String getData = "SELECT * FROM addresses WHERE number = ? AND postCode = ?";
statement = con.prepareStatement(getData);
statement.setString(1,num);
statement.setString(2,postC);
String getCount = "SELECT COUNT(*) AS count FROM addresses WHERE number = ? AND postCode = ?";
ResultSet res = statement.executeQuery();
PreparedStatement countState;
countState = con.prepareStatement(getCount);
countState.setString (1, num);
countState.setString (2, postC);
ResultSet count = countState.executeQuery();
count.first();
if(count.getInt("count") == 0){
return null;
}else{
res.first();
return (new Address(res.getString("number"), res.getString("street"),res.getString("district"),res.getString("city"),res.getString("postCode")));
}
}
public void addPatient (Patient p) throws SQLException {
PreparedStatement statement;
String add = "INSERT INTO patients (title,firstName,lastName,birthDate,phone,houseNumber,postCode,healthPlan)"
+ "VALUES (?,?,?,?,?,?,?,?)";
statement = con.prepareStatement(add);
statement.setString (1, p.getTitle());
statement.setString (2, p.getFirstName());
statement.setString (3, p.getLastName());
statement.setDate (4, p.getBirthDate());
statement.setLong (5, p.getPhone());
statement.setString (6, p.getAddress().getHouseNumber());
statement.setString (7, p.getAddress().getPostCode());
statement.setString (8, p.getHealthcarePlan().getName());
statement.execute();
}
public Patient getPatientById(int pId) throws SQLException {
PreparedStatement statement;
String data = "SELECT * FROM patients WHERE patientID = ?";
statement = con.prepareStatement(data);
statement.setInt(1,pId);
ResultSet res = statement.executeQuery();
PreparedStatement countState;
String getCount = "SELECT COUNT(*) AS count FROM patients WHERE patientID = ?";
countState = con.prepareStatement(getCount);
countState.setInt(1,pId);
ResultSet count = countState.executeQuery();
count.first();
if(count.getInt("count") == 0){
return null;
}else{
res.first();
return( new Patient(res.getString("title"),res.getString("firstname"),res.getString("lastname"),res.getDate("birthDate"),res.getLong("phone"),getHealthcarePlan(res.getString("healthPlan")),getAddressNumPC(res.getString("houseNumber"),res.getString("postCode")),res.getInt("patientID")));
}
}
/*public getPatient (String patientName) throws SQLException {
PreparedStatement statement;
String getDate = "SElECT * FROM patient WHERE
}*/
//add can't overlap apppointments
public void addAppointment(Appointment a) throws SQLException {
PreparedStatement statement;
String add = "INSERT INTO appointments (patientID,date,startTime,endTime,partner,paid)"
+ "VALUES (?,?,?,?,?,?)";
statement = con.prepareStatement(add);
statement.setInt (1, a.getPatient().getPatientID());
statement.setDate (2, a.getDate());
statement.setTime(3, a.getStartTime());
statement.setTime (4, a.getEndTime());
statement.setString (5, a.getPartner());
statement.setBoolean (6, a.isPaid());
statement.execute();
}
public void removeAppointment(Appointment a) throws SQLException{
String removeApp = "DELETE FROM appointments WHERE type = (?,?,?)";
PreparedStatement statement = con.prepareStatement(removeApp);
statement.setDate (1, a.getDate());
statement.setTime(2, a.getStartTime());
statement.setString(3, a.getPartner());
statement.execute();
}
public Appointment[] getAppointmentsByDay(java.sql.Date date) throws SQLException {
PreparedStatement statement;
String getAppointments = "SELECT * FROM appointments WHERE date = ?";
statement = con.prepareStatement(getAppointments);
System.out.print("date " +date);
java.sql.Date formatDate = new java.sql.Date (date.getYear()-1900, date.getMonth()-1, date.getDate());
statement.setDate(1, formatDate);
System.out.println("date "+formatDate);
ResultSet res = statement.executeQuery();
ArrayList<Appointment> result = new ArrayList<Appointment>();
System.out.println("result " +result);
while(res.next())
{
result.add(new Appointment(getPatientById(res.getInt("patientID")),res.getDate("date"),res.getTime("startTime"),res.getTime("endTime"),res.getString("partner"),res.getBoolean("paid"),getTreatmentByTimeDatePartner(res.getTime("startTime"),res.getDate("date"),res.getString("partner"))));
}
return result.toArray(new Appointment[result.size()]);
}
public ArrayList<Treatment> getTreatmentByTimeDatePartner(java.sql.Time time,java.sql.Date date, String partner) throws SQLException {
PreparedStatement statement;
String getTreatments = "SELECT * FROM treatments NATURAL JOIN treatmentTypes WHERE date = ? AND startTime = ? AND partner = ?";
statement = con.prepareStatement(getTreatments);
statement.setDate(1, date);
statement.setTime(2, time);
statement.setString(3, partner);
ResultSet res = statement.executeQuery();
ArrayList<Treatment> result = new ArrayList<Treatment>();
while(res.next())
{
result.add(new Treatment(res.getString("type"),res.getDouble("cost")));
}
return result;
}
public Appointment[] getAppointmentsByPatientID(int patientID) throws SQLException {
PreparedStatement statement;
String getAppointments = "SELECT * FROM appointments WHERE patientID = ?";
statement = con.prepareStatement(getAppointments);
statement.setInt(1, patientID);
ResultSet res = statement.executeQuery();
ArrayList<Appointment> result = new ArrayList<Appointment>();
while(res.next())
{
result.add(new Appointment(getPatientById(res.getInt("patientID")),res.getDate("date"),res.getTime("startTime"),res.getTime("endTime"),res.getString("partner"),res.getBoolean("paid"),getTreatmentByTimeDatePartner(res.getTime("startTime"),res.getDate("date"),res.getString("partner"))));
}
return result.toArray(new Appointment[result.size()]);
}
public HealthcarePlan[] getAllHealthcarePlans() throws SQLException {
PreparedStatement statement = con.prepareStatement("SELECT * FROM healthcarePlans");
ResultSet rs = statement.executeQuery();
ArrayList<HealthcarePlan> result = new ArrayList<HealthcarePlan>();
while(rs.next())
{
result.add(new HealthcarePlan(rs.getString("name"), rs.getInt("checkups"), rs.getInt("hygiene"), rs.getInt("repairs"), rs.getDouble("monthlyCost")));
}
return result.toArray(new HealthcarePlan[result.size()]);
}
public HealthcarePlan getHealthcarePlan(String planName) throws SQLException{
PreparedStatement statement;
String getData = "SELECT name,checkups,hygiene,repairs,monthlyCost FROM healthcarePlans WHERE name = ? ";
statement = con.prepareStatement(getData);
statement.setString(1, planName);
ResultSet res = statement.executeQuery();
PreparedStatement countState;
String getCount = "SELECT COUNT(*) AS count FROM healthcarePlans WHERE name = ?";
countState = con.prepareStatement(getCount);
countState.setString(1, planName);
ResultSet count = countState.executeQuery();
count.first();
if(count.getInt("count") == 0){
return null;
}else{
res.first();
return (new HealthcarePlan(res.getString("name"),res.getInt("checkups"),res.getInt("hygiene"),res.getInt("repairs"),res.getFloat("monthlyCost")));
}
}
// get appointments pass patient - and returns list of associated appointments
//function to change healthCareplan and update amount of check ups had etc...
// get appointments pass patient - and returns list of associated appointments
public void addHealthcarePlan(HealthcarePlan hp) throws SQLException{
PreparedStatement statement;
if (getHealthcarePlan(hp.getName()) == null)
{
String add = "INSERT INTO healthcarePlans (name,checkups,hygiene,repairs,monthlyCost)"
+ "VALUES (?,?,?,?,?)";
statement = con.prepareStatement(add);
statement.setString (1, hp.getName());
statement.setInt (2, hp.getCheckups());
statement.setInt (3, hp.getHygienes());
statement.setInt (4, hp.getRepairs());
statement.setDouble (5,hp.getMonthlyCost());
statement.execute();
}else{
String update = "UPDATE healthcarePlans SET name = ?, checkups = ?, hygiene = ?, repairs = ?, monthlyCost = ? WHERE name = ?";
statement = con.prepareStatement(update);
statement.setString (1, hp.getName());
statement.setInt (2, hp.getCheckups());
statement.setInt (3, hp.getHygienes());
statement.setInt (4, hp.getRepairs());
statement.setDouble (5,hp.getMonthlyCost());
statement.setString(6, hp.getName());
statement.execute();
}
}
public void closeConnection ()
{
if(con != null)
{
try{
con.close();
System.out.println("SQL database connection closed.");
}catch(SQLException e){
System.out.println("Fatal error:");
e.printStackTrace();
System.exit(0);
}
}
}
public static void main (String[]args){
//Address address = new Address("egg","poop","eggs","poop","eggs");
//HealthcarePlan plan = new HealthcarePlan("NHS",6,5,6,50.00);
//java.sql.Date date = new java.sql.Date(1994,06,05);
//java.sql.Date date1 = new java.sql.Date(2015,12,01);
//System.out.println(date1);
//(String name,int checks,int hygienes,int repairs, double cost)
//Patient patient = new Patient("Miss","Piggy","Frog",date,87881402011L,plan,address);
try{
//HealthcarePlan test = (new SqlHandler().getHealthcarePlan("NHS"));
//new SqlHandler().addHealthcarePlan(plan);
//new SqlHandler().addPatient(patient);
Appointment[] a = new SqlHandler().getAppointmentsByPatientID(7);
System.out.println(a[0]);
System.out.println(a[1]);
//System.out.println(test);
}catch (SQLException ex){
ex.printStackTrace();
System.out.println("error"+ ex);
}
}
} |
import java.applet.Applet;
import java.awt.AlphaComposite;
import java.awt.Checkbox;
import java.awt.Color;
import java.awt.Component;
import java.awt.Cursor;
import java.awt.Event;
import java.awt.FileDialog;
import java.awt.Font;
import java.awt.FontMetrics;
import java.awt.Frame;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.MediaTracker;
import java.awt.RenderingHints;
import java.awt.TextField;
import java.awt.Toolkit;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.StringReader;
import java.net.Socket;
import java.net.URL;
import java.util.concurrent.ThreadLocalRandom;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import javax.swing.JOptionPane;
public class StageMaker extends Applet implements Runnable {
private final CheckPoints cp = new CheckPoints();
private final Trackers t = new Trackers();
private static final long serialVersionUID = 2444709970063151411L;
private final int maxpart = 65;
/**
* just here to avoid crashes
*/
private final int bumppart = maxpart + 1;
private final static byte PART_ROADS = 0;
private final static byte PART_RAMPS = 1;
private final static byte PART_OBSTACLES = 2;
private final static byte PART_CHECKPOINTS = 3;
private final static byte PART_FIXHOOPS = 4;
private final static byte PART_TREES = 5;
private final static byte PART_BUMP = 6;
private final static byte PART_CUSTOM = 7;
/**
* leave at false unless you really know what you're doing!!!
*/
private final boolean floats = false;
/*if (sptyp == 0) // PART ROADS
partroads();
if (sptyp == 1) // PART RAMPS
partramps();
if (sptyp == 2) // PART OBSTACLES
partobst();
if (sptyp == 5) // PART TREES
partrees();*/
private final String[][] addeda = new String[20][5000];
private int adrot = 0;
private int apx = 0;
private int apy = 0;
private int arrcnt = 0;
private boolean arrng = false;
/*
* "road", "froad", "twister2", "twister1", "turn", "offroad", "bumproad", "offturn",
"nroad", "nturn", "roblend", "noblend", "rnblend", "roadend", "offroadend", "hpground", "ramp30",
"cramp35", "dramp15", "dhilo15", "slide10", "takeoff", "sramp22", "offbump", "offramp", "sofframp",
"halfpipe", "spikes", "rail", "thewall", "checkpoint", "fixpoint", "offcheckpoint",
* */
private final int[][] atp = {
{
0, 2800, 0, -2800
}, {
0, 2800, 0, -2800
}, {
1520, 2830, -1520, -2830
}, {
-1520, 2830, 1520, -2830
}, {
0, -1750, 1750, 0
}, {
0, 2800, 0, -2800
}, {
0, 2800, 0, -2800
}, {
0, -1750, 1750, 0
}, {
0, 2800, 0, -2800
}, {
0, -1750, 1750, 0
}, {
0, 2800, 0, -2800
}, {
0, 2800, 0, -2800
}, {
0, 560, 0, -560
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
385, 980, 385, -980
}, {
0, 0, 0, -600
}, {
0, 0, 0, 0
}, {
0, 2164, 0, -2164
}, {
0, 2164, 0, -2164
}, {
0, 3309, 0, -1680
}, {
0, 1680, 0, -3309
}, {
350, 0, -350, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
1810, 980, 1810, -980
}, {
0, 0, 0, 0
}, {
0, 500, 0, -500
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 2800, 0, -2800
}, {
0, 2800, 0, -2800
}, {
0, 1680, 0, -3309
}, {
0, 2800, 0, -2800
}, {
0, 2800, 0, -2800
}, {
0, 2800, 0, -2800
}, {
700, 1400, 700, -1400
}, {
0, -1480, 0, -1480
}, {
0, 0, 0, 0
}, {
350, 0, -350, 0
}, {
0, 0, 0, 0
}, {
700, 0, -700, 0
}, {
0, 0, 0, 0
}, {
0, -2198, 0, 1482
}, {
0, -1319, 0, 1391
}, {
0, -1894, 0, 2271
}, {
0, -826, 0, 839
}, {
0, -1400, 0, 1400
}, {
0, -1400, 0, 1400
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}, {
0, 0, 0, 0
}
};
private int avon = 0;
private final ContO[] bco = new ContO[maxpart + 5];
private String bstage = "\r\nmaxr(11,28500,-5600)\r\nmaxb(9,-8000,-12300)\r\nmaxl(11,-14700,-5600)\r\nmaxt(9,44800,-12300)\r\n";
private final Image[] btgame = new Image[2];
// Removed unused code found by UCDetector
// int btn = 0;
// Removed unused code found by UCDetector
// int[] bw = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
// Removed unused code found by UCDetector
// int[] bx = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
// Removed unused code found by UCDetector
// int[] by = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
private final int[] cfade = {
255, 220, 220
};
private final int[] cgrnd = {
205, 200, 200
};
private int chi = -1;
private final int[] cldd = {
210, 210, 210, 1, -1000
};
private int cntout = 0;
private final ContO[] co = new ContO[10000];
private final int[] csky = {
170, 220, 255
};
private final String[] discp = {
"NormalRoad : Basic asphalt road.\nAttaches correctly to the following other parts :\n\n'NormalRoad Turn', 'NormalRoad End', 'NormalRoad TwistedLeft', 'NormalRoad TwistedRight', 'NormalRoad Edged',\n'NormalRoad-Raised Ramp', 'Normal-Off-Road Blend' and 'Halfpipe-Normal-Road Blend'\n\n",
"NormalRoad Edged : Asphalt road with edged side blocks (a destructive road).\nAttaches correctly to the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad TwistedLeft', 'NormalRoad TwistedRight',\n'NormalRoad-Raised Ramp', 'Normal-Off-Road Blend' and 'Halfpipe-Normal-Road Blend'\n\n",
"NormalRoad TwistedRight : Asphalt road twisted towards the right.\nAttaches correctly to the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad Twistedleft',\n'NormalRoad-Raised Ramp', 'Normal-Off-Road Blend' and 'Halfpipe-Normal-Road Blend'\n\n",
"NormalRoad TwistedLeft : Asphalt road twisted towards the left.\nAttaches correctly to the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedRight',\n'NormalRoad-Raised Ramp', 'Normal-Off-Road Blend' and 'Halfpipe-Normal-Road Blend'\n\n",
"NormalRoad Turn : Asphalt corner road turn.\nAttaches correctly to the following other parts :\n\n'NormalRoad', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft', 'NormalRoad TwistedRight',\n'NormalRoad-Raised Ramp', 'Normal-Off-Road Blend' and 'Halfpipe-Normal-Road Blend'\n\n",
"OffRoad : Basic sandy dirt-road.\nAttaches correctly to the following other parts :\n\n'OffRoad Turn', 'OffRoad End', 'OffRoad BumpyGreen', 'OffRoad-BumpySides Start', 'Off-Halfpipe-Road Blend'\nand 'Normal-Off-Road Blend'\n\n",
"OffRoad BumpyGreen : Dirt-road with bumpy greenery in the middle.\nAttaches correctly to the following other parts :\n\n'OffRoad', 'OffRoad Turn', 'OffRoad End', 'OffRoad-BumpySides Start', 'Off-Halfpipe-Road Blend'\nand 'Normal-Off-Road Blend'\n\n",
"OffRoad Turn : Dirt-road corner turn.\nAttaches correctly to the following other parts :\n\n'OffRoad', 'OffRoad End', 'OffRoad BumpyGreen', ' OffRoad-BumpySides Start', 'Off-Halfpipe-Road Blend'\nand 'Normal-Off-Road Blend'\n\n",
"HalfpipeRoad : Basic road for the half-pipe ramp.\nAttaches correctly to the following other parts :\n\n'Off-Halfpipe-Road Blend', 'HalfpipeRoad', 'HalfpipeRoad Turn', 'HalfpipeRoad-Ramp Filler'\nand 'Halfpipe-Normal-Road Blend'\n\n",
"HalfpipeRoad Turn : Half-pipe corner road turn.\nAttaches correctly to the following other parts :\n\n'HalfpipeRoad', 'Off-Halfpipe-Road Blend', 'HalfpipeRoad' and 'Halfpipe-Normal-Road Blend'\n\n",
"Normal-Off-Road Blend : Road blend between the normal asphalt road and the dirt-road.\nAttaches correctly to the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft',\n'NormalRoad TwistedRight', 'NormalRoad-Raised Ramp', 'Halfpipe-Normal-Road Blend' 'OffRoad', 'OffRoad Turn',\n'OffRoad End', 'OffRoad BumpyGreen', ' OffRoad-BumpySides Start' and 'Off-Halfpipe-Road Blend'\n\n",
"Off-Halfpipe-Road Blend : Road blend between the dirt-road and the half-pipe road.\nAttaches correctly to the following other parts :\n\n'OffRoad', 'OffRoad Turn', 'OffRoad End', 'OffRoad BumpyGreen', 'OffRoad-BumpySides Start',\n'HalfpipeRoad', 'HalfpipeRoad Turn', 'Halfpipe-Normal-Road Blend' and 'Normal-Off-Road Blend'\n\n",
"Halfpipe-Normal-Road Blend : Road blend between the normal asphalt road and the half-pipe road.\nAttaches correctly to the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft',\n'NormalRoad TwistedRight', 'NormalRoad-Raised Ramp', 'HalfpipeRoad', 'Off-Halfpipe-Road Blend', 'HalfpipeRoad'\nand 'Off-Halfpipe-Road Blend'\n\n",
"NormalRoad End : The end part of the normal asphalt road.\nAttaches correctly to the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad Edged', 'NormalRoad TwistedLeft', 'NormalRoad TwistedRight',\n'NormalRoad-Raised Ramp', 'Normal-Off-Road Blend' and 'Halfpipe-Normal-Road Blend'\n\n",
"OffRoad End : The end part of the dirt-road.\nAttaches correctly to the following other parts :\n\n'OffRoad', 'OffRoad Turn', 'OffRoad BumpyGreen', ' OffRoad-BumpySides Start', 'Off-Halfpipe-Road Blend'\nand 'Normal-Off-Road Blend'\n\n",
"HalfpipeRoad-Ramp Filler : A part that gets placed between the half-pipe road and the half-pipe ramp to extend the distance in between.\nAttaches correctly to the following other parts :\n\n'HalfpipeRoad' and 'Halfpipe'\n\n",
"Basic Ramp : Basic 30 degree asphalt ramp.\nAttaches correctly over and to the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft'\nand 'NormalRoad TwistedRight'\n\n",
"Crash Ramp : A 35 degree ramp with big side blocks for crashing into.\nAttaches correctly over and to the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft'\nand 'NormalRoad TwistedRight'\n\n",
"Two-Way Ramp : Two way 15 degree inclined ramp.\nAttaches correctly over and to the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft'\nand 'NormalRoad TwistedRight'\n\n",
"Two-Way High-Low Ramp : Two way 15 degree inclined ramp, with peeked side for an optional higher car jump.\nAttaches correctly over and to the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft'\nand 'NormalRoad TwistedRight'\n\n",
"Landing Ramp : A ramp that is both a landing inclination and an obstacle as well, it is usually placed just after another normal ramp.\nAttaches correctly over and to the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft'\nand 'NormalRoad TwistedRight'\n\n",
"Big-Takeoff Ramp: A big takeoff ramp for getting huge heights with the cars.\nAttaches correctly over and to the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft'\nand 'NormalRoad TwistedRight'\n\n",
"Small Ramp : A small ramp that can be placed on either side of the road.\nAttaches correctly over and to the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft'\nand 'NormalRoad TwistedRight'\n\n",
"Offroad Bump Ramp : A small bump ramp that is to be placed over the off-road dirt tracks.\nAttaches correctly over and to the following other parts :\n\n'OffRoad', 'OffRoad Turn', 'OffRoad End', 'OffRoad BumpyGreen', 'OffRoad-BumpySides Start'\nand 'OffRoad-BumpySides'\n\n",
"Offroad Big Ramp : The big off-road dirt mountain like ramp!\nAttaches correctly over and to the following other parts :\n\n'OffRoad', 'OffRoad Turn', 'OffRoad End', 'OffRoad BumpyGreen', 'OffRoad-BumpySides Start'\nand 'OffRoad-BumpySides'\n\n",
"Offroad Ramp : Normal sized off-road dirt track ramp!\nAttaches correctly over and to the following other parts :\n\n'OffRoad', 'OffRoad Turn', 'OffRoad End', 'OffRoad BumpyGreen', 'OffRoad-BumpySides Start'\nand 'OffRoad-BumpySides'\n\n",
"Halfpipe : The Half-pipe ramp, two of these ramps opposite each other create a half-pipe for the cars!\nAttaches correctly over and to the following other parts :\n\n'HalfpipeRoad', 'HalfpipeRoad Turn' and 'HalfpipeRoad-Ramp Filler'\n\n",
"Spiky Pillars : An obstacle that is usually placed after a ramp for the cars to crash onto if they did not jump high or far enough!\nAttaches correctly over following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft'\nand 'NormalRoad TwistedRight'\n\n",
"Rail Doorway : A rail doorway that works as an obstacle for cars flying above it or cars driving through it!\nAttaches correctly over following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft'\nand 'NormalRoad TwistedRight'\n\n",
"The Wall",
"Checkpoint : The checkpoint part that ultimately decides how you stage is raced, place carefully with thought.\n(Any stage must have at least two checkpoints to work).\nMounts correctly over the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft',\n'NormalRoad TwistedRight', 'OffRoad', 'OffRoad Turn', 'OffRoad End', 'OffRoad BumpyGreen',\n'OffRoad-BumpySides Start', 'OffRoad-BumpySides', 'Rollercoaster Start/End' and 'Rollercoaster Road 2,3,4 and 5'\n\n",
"Fixing Hoop : The fixing hoop that fixes a car when it flies through it! You can add a max of 5 fixing hoops per stage.\nPlace it anywhere in the stage at an height your choose, the only important thing is that it needs to be reachable by the cars.",
"Checkpoint : The checkpoint part that ultimately decides how you stage is raced, place carefully with thought.\n(Any stage must have at least two checkpoints to work).\nMounts correctly over the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft',\n'NormalRoad TwistedRight', 'OffRoad', 'OffRoad Turn', 'OffRoad End', 'OffRoad BumpyGreen',\n'OffRoad-BumpySides Start', 'OffRoad-BumpySides', 'Rollercoaster Start/End' and 'Rollercoaster Road 2,3,4 and 5'\n\n",
"OffRoad BumpySides : Off-road dirt track with bumpy sandbar sides.\nAttaches correctly to the following other parts :\n\n'OffRoad-BumpySides Start'\n\n",
"OffRoad-BumpySides Start: The start of the off-road dirt track with bumpy sandbar sides.\nAttaches correctly to the following other parts :\n\n'OffRoad', 'OffRoad Turn', 'OffRoad End', 'OffRoad BumpyGreen', 'OffRoad-BumpySides',\n'Off-Halfpipe-Road Blend' and 'Normal-Off-Road Blend'\n\n",
"NormalRoad-Raised Ramp: The start of the raised above the ground road (NormalRoad Raised).\nAttaches correctly to the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft',\n'NormalRoad TwistedRight' and 'NormalRoad Raised'\n\n",
"NormalRoad Raised : Normal road raised above the ground, cars must avoid falling off it when driving on it.\nAttaches correctly to the following other parts :\n\n'NormalRoad-Raised Ramp'\n\n",
"The Start1", "The Start2",
"Tunnel Side Ramp: A ramp that can be used to create a tunnel like road with an open top or can be used as a wall ramp!\nAttaches correctly over only the 'NormalRoad' part.",
"Launch Pad Ramp: A ramp that launches your car fully upwards like a rocket, it also has sides to lock any car climbing it!\nAttaches correctly over following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad TwistedLeft'\nand 'NormalRoad TwistedRight'\n\n",
"The Net: An obstacle part that is to be placed in the center of the road right after a ramp, the idea is that the\ncars jumping the ramp should try to go over it or through it without getting caught crashing (without getting\ncaught in it, getting caught in the net!).\nAttaches correctly over following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft'\nand 'NormalRoad TwistedRight'\n\n",
"Speed Ramp: A ramp that is designed to have the perfect angle to catapult your car the furthest when doing forward loops, it is half the roads width.\nAttaches correctly over following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft'\nand 'NormalRoad TwistedRight'\n\n",
"Offroad Hill Ramp: An offroad hill ramp that has two different inclines from the front and back to jump.\nAttaches correctly over the following other parts :\n\n'OffRoad', 'OffRoad Turn', 'OffRoad End', 'OffRoad BumpyGreen', 'OffRoad-BumpySides Start'\nand 'OffRoad-BumpySides'\n\n",
"Bump Slide: A small bump obstacle that is to be placed on the sides of the road or in the center.\nAttaches correctly over the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft'\nand 'NormalRoad TwistedRight'\n\n",
"Offroad Big Hill Ramp: An offroad big hill ramp that has two different inclines from the front and back to jump.\nAttaches correctly over the following other parts :\n\n'OffRoad', 'OffRoad Turn', 'OffRoad End', 'OffRoad BumpyGreen', 'OffRoad-BumpySides Start'\nand 'OffRoad-BumpySides'\n\n",
"Rollercoaster Start/End: The ramp that starts the Rollercoaster Road and ends it.\nAttaches correctly over and to following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft',\n 'NormalRoad TwistedRight' and 'Rollercoaster Start/End'\n\n",
"Rollercoaster Road1\nAttaches correctly to only 'Rollercoaster Start/End', 'Rollercoaster Road2' and itself.\n\n",
"Rollercoaster Road3\nAttaches correctly to only 'Rollercoaster Road2', 'Rollercoaster Road4' and itself.\n\n",
"Rollercoaster Road4\nAttaches correctly to only 'Rollercoaster Road3', 'Rollercoaster Road5' and itself.\n\n",
"Rollercoaster Road2\nAttaches correctly to only 'Rollercoaster Road1', 'Rollercoaster Road3' and itself.\n\n",
"Rollercoaster Road5\nAttaches correctly to only 'Rollercoaster Road4' and itself.\n\n",
"Offroad Dirt-Pile: A dirt pile obstacle that is to be placed anywhere in the middle of the road.\nAttaches correctly over the following other parts :\n\n'OffRoad', 'OffRoad Turn', 'OffRoad End', 'OffRoad-BumpySides Start' and 'OffRoad-BumpySides'\n\n",
"Offroad Dirt-Pile: A dirt pile obstacle that is to be placed anywhere in the middle of the road.\nAttaches correctly over the following other parts :\n\n'OffRoad', 'OffRoad Turn', 'OffRoad End', 'OffRoad-BumpySides Start' and 'OffRoad-BumpySides'\n\n",
"Checkpoint : The checkpoint part that ultimately decides how you stage is raced, place carefully with thought.\n(Any stage must have at least two checkpoints to work).\nMounts correctly over the following other parts :\n\n'NormalRoad', 'NormalRoad Turn', 'NormalRoad End', 'NormalRoad Edged', 'NormalRoad TwistedLeft',\n'NormalRoad TwistedRight', 'OffRoad', 'OffRoad Turn', 'OffRoad End', 'OffRoad BumpyGreen',\n'OffRoad-BumpySides Start', 'OffRoad-BumpySides', 'Rollercoaster Start/End' and 'Rollercoaster Road 2,3,4 and 5'\n\n",
"Trees/Cactus are decorative stage parts that should be placed outside the race track on the ground and NEVER on any road part or ramp!\nTrees/Cactus are not to be used as obstacles of the race course!\nThey are to be used as out of path ground decoration only.\n\n",
"Trees/Cactus are decorative stage parts that should be placed outside the race track on the ground and NEVER on any road part or ramp!\nTrees/Cactus are not to be used as obstacles of the race course!\nThey are to be used as out of path ground decoration only.\n\n",
"Trees/Cactus are decorative stage parts that should be placed outside the race track on the ground and NEVER on any road part or ramp!\nTrees/Cactus are not to be used as obstacles of the race course!\nThey are to be used as out of path ground decoration only.\n\n",
"Trees/Cactus are decorative stage parts that should be placed outside the race track on the ground and NEVER on any road part or ramp!\nTrees/Cactus are not to be used as obstacles of the race course!\nThey are to be used as out of path ground decoration only.\n\n",
"Trees/Cactus are decorative stage parts that should be placed outside the race track on the ground and NEVER on any road part or ramp!\nTrees/Cactus are not to be used as obstacles of the race course!\nThey are to be used as out of path ground decoration only.\n\n",
"Trees/Cactus are decorative stage parts that should be placed outside the race track on the ground and NEVER on any road part or ramp!\nTrees/Cactus are not to be used as obstacles of the race course!\nThey are to be used as out of path ground decoration only.\n\n",
"Trees/Cactus are decorative stage parts that should be placed outside the race track on the ground and NEVER on any road part or ramp!\nTrees/Cactus are not to be used as obstacles of the race course!\nThey are to be used as out of path ground decoration only.\n\n",
"Trees/Cactus are decorative stage parts that should be placed outside the race track on the ground and NEVER on any road part or ramp!\nTrees/Cactus are not to be used as obstacles of the race course!\nThey are to be used as out of path ground decoration only.\n\n",
"Trees/Cactus are decorative stage parts that should be placed outside the race track on the ground and NEVER on any road part or ramp!\nTrees/Cactus are not to be used as obstacles of the race course!\nThey are to be used as out of path ground decoration only.\n\n",
"Trees/Cactus are decorative stage parts that should be placed outside the race track on the ground and NEVER on any road part or ramp!\nTrees/Cactus are not to be used as obstacles of the race course!\nThey are to be used as out of path ground decoration only.\n\n",
"Trees/Cactus are decorative stage parts that should be placed outside the race track on the ground and NEVER on any road part or ramp!\nTrees/Cactus are not to be used as obstacles of the race course!\nThey are to be used as out of path ground decoration only.\n\n",
"Ground Piles are to be paced outside the race track on the ground and NEVER on any road part or ramp!\nThey are to be used as ground decoration and out of race course obstacles (ground obstacles)!\n\n"
};
private boolean down = false;
private int dtab = 0;
private int dtabed = -1;
private boolean epart = false;
private int errd = 0;
private final String[] errlo = {
"The maximum allocated memory for the stage's part's details has been exerted.\nPlease decrease the amount of parts in the stage that have more details then average.",
"The maximum amount of road points allowed in the track has been exceeded.\nPlease remove some of the road parts that are in the circler path of the track (the parts that are between the checkpoints).\nOr try to remove some of the extra checkpoints in the track as well.",
"The maximum allowed area for a track (the area in between its walls) has been exceeded.\nPlease try to place parts only inside the current allowed area, inside the area between the current maximum wall placements.",
"The maximum number of parts allowed per stage has been exceeded.\nPlease remove some of the already extra parts placed in order to make space.",
"The maximum number of Fixing Hoops allowed per stage is 5!\nPlease remove the extra Fixing Hoops from your stage to have only 5 main ones left.",
"Unknown Error, please make sure the stage you are handling is saved correctly.\nPlease go to the 'Build' tab and press 'Save & Preview'.",
"There needs to be at least 2 checkpoints in the Stage in order for the game to work.\nPlease go to the 'Build' tab and select 'Checkpoint' in the Part Selection menu to add more cp.",
"The name of the stage is too long!\nPlease go to the 'Stage' tab, click 'Rename Stage' and give your stage a shorter name."
};
private int esp = -1;
private boolean exwist = false;
private int fgen = 0;
private final TextField fixh = new TextField("2000", 5);
private int flyh = 0;
private boolean focuson = true;
private final int[] fogn = {
60, 0
};
private FontMetrics ftm;
private int hf = 2000;
private int hi = -1;
private final float[][] hsb = {
{
0.5F, 0.875F, 0.5F
}, {
0.5F, 0.875F, 0.5F
}, {
0.5F, 0.875F, 0.5F
}
};
private boolean left = false;
private int logged = 0;
private Image logo;
private int lsp = -1;
private String ltrackname = "";
private int lxm = 0;
// Removed unused code found by UCDetector
// private int lym = 0;
private final Medium m = new Medium();
private final String[] maker = new String[20];
private final TextField mgen = new TextField("", 10);
private boolean mousdr = false;
private int mouseon = -1;
private int mousePressed = 0;
private final String[] mystages = new String[20];
private final int[] nad = new int[20];
private final Smenu nlaps = new Smenu(40);
private int nms = 0;
private int nob = 0;
private int nundo = 0;
private final int[] ocheckp = {
5, 6, 7, 11, 14, 33, 34, 38
};
private Image offImage;
private boolean onbtgame = false;
// Removed unused code found by UCDetector
private boolean onfly = false;
private boolean onoff = false;
private int origfade = 5000;
private boolean overcan = false;
private final Smenu part = new Smenu(500);
// Removed unused code found by UCDetector
// boolean[] pessd = { false, false, false, false, false, false, false, false, false, false, false, false, false,
// false, false, false, false, false, false, false, false, false, false, false };
private final Checkbox pfog = new Checkbox("Linked Blend");
private boolean pgen = false;
private float phd = 2L + Math.round(ThreadLocalRandom.current().nextDouble() * 4.0);
private boolean preop = false;
private final Smenu ptyp = new Smenu(40);
private final int[] pubt = new int[20];
private final Smenu pubtyp = new Smenu(40);
private float pwd = 2L + Math.round(ThreadLocalRandom.current().nextDouble() * 4.0);
private final int[] rcheckp = {
0, 1, 2, 3, 4, 12, 13, 37
};
private Graphics2D rd;
private boolean right = false;
private int rot = 0;
private final Image[] sd = new Image[2];
private int seq = 0;
private boolean seqn = false;
private boolean setcur = false;
private int sfase = 0;
private final Image[] sl = new Image[2];
private final Smenu slstage = new Smenu(2000);
private final int[] snap = {
50, 50, 50
};
private int selectedPart = 0;
private int selectedMenuPart = 0;
private int selectedPartType = 0;
private final Image[] sr = new Image[2];
private final TextField srch = new TextField("", 38);
private String sstage = "";
private String stagename = "";
private final Smenu strtyp = new Smenu(40);
private final Image[] su = new Image[2];
private String suser = "Horaks";
private int sx = 0;
private int sy = -10000;
private int sz = 1500;
private int tab = 0;
private int tabed = -1;
private final int[] texture = {
0, 0, 0, 10
};
private Thread thredo;
private final TextField tnick = new TextField("", 15);
private final TextField tpass = new TextField("", 15);
private RadicalMod track = new RadicalMod();
private String trackname = "";
private final Smenu tracks = new Smenu(2000);
private int tracksize = 111;
private int trackvol = 200;
private String tstage = ""
+ "snap(0,0,0)\r\nsky(191,215,255)\r\nclouds(255,255,255,5,-1000)\r\nfog(195,207,230)\r\nground(192,194,202)\r\ntexture(0,0,0,50)\r\nfadefrom(5000)\r\ndensity(5)\n\rmountains("
+ (int) (ThreadLocalRandom.current().nextDouble() * 100000.0) + ")\r\nnlaps(5)\r\n\r\n";
private String ttstage = "";
private final String[] undos = new String[5000];
private boolean up = false;
private int vx = 0;
private int vxz = 0;
private int vy = 0;
private int vz = 0;
private final Smenu witho = new Smenu(40);
private int xm = 0;
private int xnob = 0;
private int ym = 0;
private final Image[] zi = new Image[2];
private final Image[] zo = new Image[2];
private boolean zoomi = false;
private boolean zoomo = false;
private boolean button(final String string, final int i, final int i381, final int i382, final boolean bool) {
rd.setFont(new Font("Arial", 1, 12));
ftm = rd.getFontMetrics();
final int i383 = ftm.stringWidth(string);
boolean bool384 = false;
boolean bool385 = false;
if (string.equals(" Cancel ") && epart && Math.abs(xm - i) < i383 / 2 + 12 && Math.abs(ym - i381 + 5) < 10) {
overcan = true;
}
if (Math.abs(xm - i) < i383 / 2 + 12 && Math.abs(ym - i381 + 5) < 10 && mousePressed == 1) {
bool384 = true;
} else {
bool384 = false;
}
if (Math.abs(xm - i) < i383 / 2 + 12 && Math.abs(ym - i381 + 5) < 10 && mousePressed == -1) {
mousePressed = 0;
bool385 = true;
}
boolean bool386 = false;
if (bool) {
if (tab == 0) {
rd.setColor(new Color(207, 207, 207));
}
if (tab == 1) {
rd.setColor(new Color(200, 200, 200));
}
if (tab == 2) {
rd.setColor(new Color(170, 170, 170));
}
if (tab != 3) {
rd.drawRect(i - i383 / 2 - 15, i381 - (22 - i382), i383 + 29, 34 - i382 * 2);
if (i382 == 2 && tab == 1) {
rd.setColor(new Color(220, 220, 220));
rd.fillRect(i - i383 / 2 - 15, i381 - (22 - i382), i383 + 29, 34 - i382 * 2);
}
} else {
bool386 = true;
}
}
if (!bool384) {
rd.setColor(new Color(220, 220, 220));
if (bool386) {
rd.setColor(new Color(230, 230, 230));
}
rd.fillRect(i - i383 / 2 - 10, i381 - (17 - i382), i383 + 20, 25 - i382 * 2);
rd.setColor(new Color(240, 240, 240));
if (bool386) {
rd.setColor(new Color(255, 255, 255));
}
rd.drawLine(i - i383 / 2 - 10, i381 - (17 - i382), i + i383 / 2 + 10, i381 - (17 - i382));
rd.drawLine(i - i383 / 2 - 10, i381 - (18 - i382), i + i383 / 2 + 10, i381 - (18 - i382));
rd.setColor(new Color(240, 240, 240));
rd.drawLine(i - i383 / 2 - 9, i381 - (19 - i382), i + i383 / 2 + 9, i381 - (19 - i382));
rd.setColor(new Color(200, 200, 200));
if (bool386) {
rd.setColor(new Color(192, 192, 192));
}
rd.drawLine(i + i383 / 2 + 10, i381 - (17 - i382), i + i383 / 2 + 10, i381 + 7 - i382);
rd.drawLine(i + i383 / 2 + 11, i381 - (17 - i382), i + i383 / 2 + 11, i381 + 7 - i382);
rd.setColor(new Color(200, 200, 200));
if (bool386) {
rd.setColor(new Color(192, 192, 192));
}
rd.drawLine(i + i383 / 2 + 12, i381 - (16 - i382), i + i383 / 2 + 12, i381 + 6 - i382);
rd.drawLine(i - i383 / 2 - 10, i381 + 7 - i382, i + i383 / 2 + 10, i381 + 7 - i382);
rd.drawLine(i - i383 / 2 - 10, i381 + 8 - i382, i + i383 / 2 + 10, i381 + 8 - i382);
rd.setColor(new Color(200, 200, 200));
rd.drawLine(i - i383 / 2 - 9, i381 + 9 - i382, i + i383 / 2 + 9, i381 + 9 - i382);
rd.setColor(new Color(240, 240, 240));
if (bool386) {
rd.setColor(new Color(255, 255, 255));
}
rd.drawLine(i - i383 / 2 - 10, i381 - (17 - i382), i - i383 / 2 - 10, i381 + 7 - i382);
rd.drawLine(i - i383 / 2 - 11, i381 - (17 - i382), i - i383 / 2 - 11, i381 + 7 - i382);
rd.setColor(new Color(240, 240, 240));
rd.drawLine(i - i383 / 2 - 12, i381 - (16 - i382), i - i383 / 2 - 12, i381 + 6 - i382);
rd.setColor(new Color(0, 0, 0));
if (string.equals(" Keyboard Controls ")) {
rd.setColor(new Color(100, 100, 100));
}
rd.drawString(string, i - i383 / 2, i381);
} else {
rd.setColor(new Color(220, 220, 220));
rd.fillRect(i - i383 / 2 - 10, i381 - (17 - i382), i383 + 20, 25 - i382 * 2);
rd.setColor(new Color(192, 192, 192));
rd.drawLine(i - i383 / 2 - 10, i381 - (17 - i382), i + i383 / 2 + 10, i381 - (17 - i382));
rd.drawLine(i - i383 / 2 - 10, i381 - (18 - i382), i + i383 / 2 + 10, i381 - (18 - i382));
rd.drawLine(i - i383 / 2 - 9, i381 - (19 - i382), i + i383 / 2 + 9, i381 - (19 - i382));
rd.setColor(new Color(247, 247, 247));
rd.drawLine(i + i383 / 2 + 10, i381 - (17 - i382), i + i383 / 2 + 10, i381 + 7 - i382);
rd.drawLine(i + i383 / 2 + 11, i381 - (17 - i382), i + i383 / 2 + 11, i381 + 7 - i382);
rd.drawLine(i + i383 / 2 + 12, i381 - (16 - i382), i + i383 / 2 + 12, i381 + 6 - i382);
rd.drawLine(i - i383 / 2 - 10, i381 + 7 - i382, i + i383 / 2 + 10, i381 + 7 - i382);
rd.drawLine(i - i383 / 2 - 10, i381 + 8 - i382, i + i383 / 2 + 10, i381 + 8 - i382);
rd.drawLine(i - i383 / 2 - 9, i381 + 9 - i382, i + i383 / 2 + 9, i381 + 9 - i382);
rd.setColor(new Color(192, 192, 192));
rd.drawLine(i - i383 / 2 - 10, i381 - (17 - i382), i - i383 / 2 - 10, i381 + 7 - i382);
rd.drawLine(i - i383 / 2 - 11, i381 - (17 - i382), i - i383 / 2 - 11, i381 + 7 - i382);
rd.drawLine(i - i383 / 2 - 12, i381 - (16 - i382), i - i383 / 2 - 12, i381 + 6 - i382);
rd.setColor(new Color(0, 0, 0));
if (string.equals(" Keyboard Controls ")) {
rd.setColor(new Color(100, 100, 100));
}
rd.drawString(string, i - i383 / 2 + 1, i381 + 1);
}
return bool385;
}
private void copyesp(final boolean bool) {
selectedPart = co[esp].colok;
rot = co[esp].roofat;
if (selectedPart == 2) {
rot -= 30;
}
if (selectedPart == 3) {
rot += 30;
}
if (selectedPart == 15) {
rot += 90;
}
if (selectedPart == 20) {
rot += 180;
}
if (selectedPart == 26) {
rot -= 90;
}
if (selectedPart == 0) {
selectedPartType = 0;
selectedMenuPart = 0;
}
if (selectedPart == 4) {
selectedPartType = 0;
selectedMenuPart = 1;
}
if (selectedPart == 13) {
selectedPartType = 0;
selectedMenuPart = 2;
}
if (selectedPart == 3) {
selectedPartType = 0;
selectedMenuPart = 3;
}
if (selectedPart == 2) {
selectedPartType = 0;
selectedMenuPart = 4;
}
if (selectedPart == 1) {
selectedPartType = 0;
selectedMenuPart = 5;
}
if (selectedPart == 35) {
selectedPartType = 0;
selectedMenuPart = 6;
}
if (selectedPart == 36) {
selectedPartType = 0;
selectedMenuPart = 7;
}
if (selectedPart == 10) {
selectedPartType = 0;
selectedMenuPart = 8;
}
if (selectedPart == 5) {
selectedPartType = 0;
selectedMenuPart = 9;
}
if (selectedPart == 7) {
selectedPartType = 0;
selectedMenuPart = 10;
}
if (selectedPart == 14) {
selectedPartType = 0;
selectedMenuPart = 11;
}
if (selectedPart == 6) {
selectedPartType = 0;
selectedMenuPart = 12;
}
if (selectedPart == 34) {
selectedPartType = 0;
selectedMenuPart = 13;
}
if (selectedPart == 33) {
selectedPartType = 0;
selectedMenuPart = 14;
}
if (selectedPart == 11) {
selectedPartType = 0;
selectedMenuPart = 15;
}
if (selectedPart == 8) {
selectedPartType = 0;
selectedMenuPart = 16;
}
if (selectedPart == 9) {
selectedPartType = 0;
selectedMenuPart = 17;
}
if (selectedPart == 15) {
selectedPartType = 0;
selectedMenuPart = 18;
}
if (selectedPart == 12) {
selectedPartType = 0;
selectedMenuPart = 19;
}
if (selectedPart == 46) {
selectedPartType = 0;
selectedMenuPart = 20;
}
if (selectedPart == 47) {
selectedPartType = 0;
selectedMenuPart = 21;
}
if (selectedPart == 48) {
selectedPartType = 0;
selectedMenuPart = 23;
}
if (selectedPart == 49) {
selectedPartType = 0;
selectedMenuPart = 24;
}
if (selectedPart == 50) {
selectedPartType = 0;
selectedMenuPart = 22;
}
if (selectedPart == 51) {
selectedPartType = 0;
selectedMenuPart = 25;
}
if (selectedPart == 16) {
selectedPartType = 1;
selectedMenuPart = 0;
}
if (selectedPart == 18) {
selectedPartType = 1;
selectedMenuPart = 1;
}
if (selectedPart == 19) {
selectedPartType = 1;
selectedMenuPart = 2;
}
if (selectedPart == 22) {
selectedPartType = 1;
selectedMenuPart = 3;
}
if (selectedPart == 17) {
selectedPartType = 1;
selectedMenuPart = 4;
}
if (selectedPart == 21) {
selectedPartType = 1;
selectedMenuPart = 5;
}
if (selectedPart == 20) {
selectedPartType = 1;
selectedMenuPart = 6;
}
if (selectedPart == 39) {
selectedPartType = 1;
selectedMenuPart = 7;
}
if (selectedPart == 42) {
selectedPartType = 1;
selectedMenuPart = 8;
}
if (selectedPart == 40) {
selectedPartType = 1;
selectedMenuPart = 9;
}
if (selectedPart == 23) {
selectedPartType = 1;
selectedMenuPart = 10;
}
if (selectedPart == 25) {
selectedPartType = 1;
selectedMenuPart = 11;
}
if (selectedPart == 24) {
selectedPartType = 1;
selectedMenuPart = 12;
}
if (selectedPart == 43) {
selectedPartType = 1;
selectedMenuPart = 13;
}
if (selectedPart == 45) {
selectedPartType = 1;
selectedMenuPart = 14;
}
if (selectedPart == 26) {
selectedPartType = 1;
selectedMenuPart = 15;
}
if (selectedPart == 27) {
selectedPartType = 2;
selectedMenuPart = 0;
}
if (selectedPart == 28) {
selectedPartType = 2;
selectedMenuPart = 1;
}
if (selectedPart == 41) {
selectedPartType = 2;
selectedMenuPart = 2;
}
if (selectedPart == 44) {
selectedPartType = 2;
selectedMenuPart = 3;
}
if (selectedPart == 52) {
selectedPartType = 2;
selectedMenuPart = 4;
}
if (selectedPart == 53) {
selectedPartType = 2;
selectedMenuPart = 5;
}
if (selectedPart == 30 || selectedPart == 32 || selectedPart == 54) {
selectedPartType = 3;
selectedMenuPart = 0;
}
if (selectedPart == 31) {
selectedPartType = 4;
selectedMenuPart = 0;
}
if (selectedPart == 55) {
selectedPartType = 5;
selectedMenuPart = 0;
}
if (selectedPart == 56) {
selectedPartType = 5;
selectedMenuPart = 1;
}
if (selectedPart == 57) {
selectedPartType = 5;
selectedMenuPart = 2;
}
if (selectedPart == 58) {
selectedPartType = 5;
selectedMenuPart = 3;
}
if (selectedPart == 59) {
selectedPartType = 5;
selectedMenuPart = 4;
}
if (selectedPart == 60) {
selectedPartType = 5;
selectedMenuPart = 5;
}
if (selectedPart == 61) {
selectedPartType = 5;
selectedMenuPart = 6;
}
if (selectedPart == 62) {
selectedPartType = 5;
selectedMenuPart = 7;
}
if (selectedPart == 63) {
selectedPartType = 5;
selectedMenuPart = 8;
}
if (selectedPart == 64) {
selectedPartType = 5;
selectedMenuPart = 9;
}
if (selectedPart == 65) {
selectedPartType = 5;
selectedMenuPart = 10;
}
if (selectedPart == 66) {
selectedPartType = 5;
selectedMenuPart = 11;
}
if (selectedPart > 66) {
selectedPartType = 7;
selectedMenuPart = selectedPart - 55;
}
if (selectedPart == bumppart) {
if (bool) {
fgen = co[esp].srz;
} else {
fgen = 0;
}
pwd = co[esp].srx;
phd = co[esp].sry;
pgen = false;
selectedPartType = 6;
}
if (selectedPartType == PART_ROADS) {
partroads();
part.setVisible(true);
}
if (selectedPartType == PART_RAMPS) {
partramps();
part.setVisible(true);
}
if (selectedPartType == PART_OBSTACLES) {
partobst();
part.setVisible(true);
}
if (selectedPartType == PART_TREES) {
partrees();
part.setVisible(true);
}
if (selectedPartType == PART_CUSTOM) {
partcustom();
part.setVisible(true);
}
ptyp.select(selectedPartType);
part.select(selectedMenuPart);
}
private void delstage(final String string) {
try {
final File file = new File("mystages/" + string + ".txt");
file.delete();
slstage.remove(string);
slstage.select(0);
} catch (final Exception exception) {
JOptionPane.showMessageDialog(null, "Unable to delete file! Error Deatials:\n"
+ exception, "Stage Maker", 1);
}
}
private void deltrack() {
try {
final File file = new File("mystages/mymusic/" + tracks.getSelectedItem() + ".zip");
file.delete();
if (trackname.equals(tracks.getSelectedItem())) {
trackname = "";
sortop();
savefile();
}
tracks.remove(tracks.getSelectedItem());
tracks.select(0);
} catch (final Exception exception) {
JOptionPane.showMessageDialog(null, "Unable to delete file! Error Deatials:\n"
+ exception, "Stage Maker", 1);
}
}
private void drawms() {
boolean bool = false;
if (pubtyp.draw(rd, xm, ym, mousdr, 550, false)) {
bool = true;
}
if (slstage.draw(rd, xm, ym, mousdr, 550, false)) {
bool = true;
}
if (strtyp.draw(rd, xm, ym, mousdr, 550, false)) {
bool = true;
}
int i = 0;
if (preop) {
i = -1000;
}
if (part.draw(rd, xm, ym + i, mousdr && !preop, 550, false)) {
bool = true;
}
if (ptyp.draw(rd, xm, ym, mousdr, 550, false)) {
bool = true;
preop = true;
} else {
preop = false;
}
if (nlaps.draw(rd, xm, ym, mousdr, 550, true)) {
bool = true;
}
if (tracks.draw(rd, xm, ym, mousdr, 550, true)) {
bool = true;
}
if (witho.draw(rd, xm, ym, mousdr, 550, true)) {
bool = true;
}
if (bool) {
mousePressed = 0;
}
}
private void fixtext(final TextField textfield) {
String string = textfield.getText();
string = string.replace('\"', '
final String string330 = "\\";
String string331 = "";
int i = 0;
int i332 = -1;
rd.setFont(new Font("Arial", 1, 12));
ftm = rd.getFontMetrics();
for (; i < string.length(); i++) {
final String string333 = "" + string.charAt(i);
if (string333.equals("|") || string333.equals(",") || string333.equals("(") || string333.equals(")")
|| string333.equals("#") || string333.equals(string330) || string333.equals("!")
|| string333.equals("?") || string333.equals("~") || string333.equals(".") || string333.equals("@")
|| string333.equals("$") || string333.equals("%") || string333.equals("^") || string333.equals("&")
|| string333.equals("*") || string333.equals("+") || string333.equals("=") || string333.equals(">")
|| string333.equals("<") || string333.equals("/") || string333.equals(";") || string333.equals(":")
|| ftm.stringWidth(string331) > 274) {
i332 = i;
} else {
string331 = "" + string331 + string333;
}
}
if (i332 != -1) {
textfield.setText(string331);
textfield.select(i332, i332);
}
}
private Image getImage(final String string) {
final Image image = Toolkit.getDefaultToolkit().createImage(string);
final MediaTracker mediatracker = new MediaTracker(this);
mediatracker.addImage(image, 0);
try {
mediatracker.waitForID(0);
} catch (final Exception exception) {
}
return image;
}
private int getint(final String string, final String string354, final int i) {
int i355 = 0;
String string356 = "";
for (int i357 = string.length() + 1; i357 < string354.length(); i357++) {
final String string358 = "" + string354.charAt(i357);
if (string358.equals(",") || string358.equals(")")) {
i355++;
i357++;
}
if (i355 == i) {
string356 = "" + string356 + string354.charAt(i357);
}
}
return Integer.valueOf(string356).intValue();
}
private String getstring(final String string, final String string349, final int i) {
int i350 = 0;
String string351 = "";
for (int i352 = string.length() + 1; i352 < string349.length(); i352++) {
final String string353 = "" + string349.charAt(i352);
if (string353.equals(",") || string353.equals(")")) {
i350++;
i352++;
}
if (i350 == i) {
string351 = "" + string351 + string349.charAt(i352);
}
}
return string351;
}
private String getSvalue(final String string, final String string376, final int i) {
String string377 = "";
int i378 = 0;
for (int i379 = string.length() + 1; i379 < string376.length() && i378 <= i; i379++) {
final String string380 = "" + string376.charAt(i379);
if (string380.equals(",") || string380.equals(")")) {
i378++;
} else if (i378 == i) {
string377 = "" + string377 + string380;
}
}
return string377;
}
private void hidefields() {
pubtyp.setVisible(false);
tpass.setVisible(false);
tnick.setVisible(false);
witho.setVisible(false);
strtyp.setVisible(false);
srch.setVisible(false);
slstage.setVisible(false);
tracks.setVisible(false);
nlaps.setVisible(false);
pfog.setVisible(false);
fixh.setVisible(false);
mgen.setVisible(false);
ptyp.setVisible(false);
part.setVisible(false);
}
@Override
public void init() {
setBackground(new Color(0, 0, 0));
offImage = createImage(800, 550);
if (offImage != null) {
rd = (Graphics2D) offImage.getGraphics();
}
rd.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
setLayout(null);
slstage.setFont(new Font("Arial", 1, 13));
slstage.add(rd, "Select a Stage... ");
slstage.setForeground(new Color(63, 80, 110));
slstage.setBackground(new Color(209, 217, 230));
srch.setFont(new Font("Arial", 1, 12));
srch.setBackground(new Color(255, 255, 255));
srch.setForeground(new Color(0, 0, 0));
strtyp.setFont(new Font("Arial", 1, 12));
strtyp.add(rd, "NormalRoad");
strtyp.add(rd, "OffRoad");
strtyp.setBackground(new Color(63, 80, 110));
strtyp.setForeground(new Color(209, 217, 230));
ptyp.setFont(new Font("Arial", 1, 12));
ptyp.add(rd, "Roads");
ptyp.add(rd, "Ramps");
ptyp.add(rd, "Obstacles");
ptyp.add(rd, "Checkpoint");
ptyp.add(rd, "Fixing Hoop");
ptyp.add(rd, "Trees");
ptyp.add(rd, "Ground Pile");
ptyp.add(rd, "Custom Parts");
ptyp.setBackground(new Color(63, 80, 110));
ptyp.setForeground(new Color(209, 217, 230));
part.setFont(new Font("Arial", 1, 12));
part.add(rd, "Halfpipe-Normal-Road Blend");
part.setBackground(new Color(63, 80, 110));
part.setForeground(new Color(209, 217, 230));
fixh.setFont(new Font("Arial", 1, 12));
fixh.setBackground(new Color(255, 255, 255));
fixh.setForeground(new Color(0, 0, 0));
mgen.setFont(new Font("Arial", 1, 12));
mgen.setBackground(new Color(255, 255, 255));
mgen.setForeground(new Color(0, 0, 0));
pfog.setFont(new Font("Arial", 1, 12));
pfog.setBackground(new Color(225, 225, 225));
pfog.setForeground(new Color(0, 0, 0));
nlaps.setFont(new Font("Arial", 1, 12));
for (int i = 0; i < 15; i++) {
nlaps.add(rd, " " + (i + 1) + " ");
}
nlaps.setBackground(new Color(63, 80, 110));
nlaps.setForeground(new Color(209, 217, 230));
tracks.setFont(new Font("Arial", 1, 12));
tracks.add(rd, "Select MOD Track");
tracks.setForeground(new Color(63, 80, 110));
tracks.setBackground(new Color(209, 217, 230));
witho.setFont(new Font("Arial", 1, 12));
witho.add(rd, "With other cars");
witho.add(rd, "Alone");
witho.setBackground(new Color(63, 80, 110));
witho.setForeground(new Color(209, 217, 230));
tnick.setFont(new Font("Arial", 1, 13));
tnick.setBackground(new Color(255, 255, 255));
tnick.setForeground(new Color(0, 0, 0));
tpass.setFont(new Font("Arial", 1, 13));
tpass.setEchoChar('*');
tpass.setBackground(new Color(255, 255, 255));
tpass.setForeground(new Color(0, 0, 0));
pubtyp.setFont(new Font("Arial", 1, 13));
pubtyp.add(rd, "Private");
pubtyp.add(rd, "Public");
pubtyp.add(rd, "Super Public");
pubtyp.setBackground(new Color(63, 80, 110));
pubtyp.setForeground(new Color(209, 217, 230));
add(tnick);
add(tpass);
add(srch);
add(fixh);
add(mgen);
add(pfog);
hidefields();
}
@Override
public boolean keyDown(final Event event, final int i) {
if (focuson) {
if (i == 42 || i == 10 || i == 56 || i == 119 || i == 87 || i == 43 || i == 61) {
zoomi = true;
}
if (i == 47 || i == 8 || i == 50 || i == 115 || i == 83 || i == 45) {
zoomo = true;
}
if (i == 1006) {
left = true;
}
if (i == 1007) {
right = true;
}
if (i == 1005) {
down = true;
}
if (i == 1004) {
up = true;
}
}
return false;
}
@Override
public boolean keyUp(final Event event, final int i) {
if (i == 42 || i == 10 || i == 56 || i == 119 || i == 87 || i == 43 || i == 61) {
zoomi = false;
}
if (i == 47 || i == 8 || i == 50 || i == 115 || i == 83 || i == 45) {
zoomo = false;
}
if (i == 1006) {
left = false;
}
if (i == 1007) {
right = false;
}
if (i == 1005) {
down = false;
}
if (i == 1004) {
up = false;
}
return false;
}
private void loadbase() {
final String[] strings = {
"road", "froad", "twister2", "twister1", "turn", "offroad", "bumproad", "offturn", "nroad", "nturn",
"roblend", "noblend", "rnblend", "roadend", "offroadend", "hpground", "ramp30", "cramp35", "dramp15",
"dhilo15", "slide10", "takeoff", "sramp22", "offbump", "offramp", "sofframp", "halfpipe", "spikes",
"rail", "thewall", "checkpoint", "fixpoint", "offcheckpoint",
"sideoff", "bsideoff", "uprise", "riseroad", "sroad", "soffroad", "tside", "launchpad", "thenet",
"speedramp", "offhill", "slider", "uphill", "roll1", "roll2", "roll3", "roll4", "roll5", "roll6",
"opile1", "opile2", "aircheckpoint", "tree1", "tree2", "tree3", "tree4", "tree5", "tree6", "tree7",
"tree8", "cac1", "cac2", "cac3"//, "housetest"
};
try {
final File file = new File("data/models.zip");
final ZipInputStream zipinputstream = new ZipInputStream(new FileInputStream(file));
ZipEntry zipentry = zipinputstream.getNextEntry();
for (; zipentry != null; zipentry = zipinputstream.getNextEntry()) {
int i = -1;
for (int i176 = 0; i176 < strings.length; i176++)
if (zipentry.getName().startsWith(strings[i176])) {
i = i176;
}
if (i != -1) {
int i177 = (int) zipentry.getSize();
final byte[] is = new byte[i177];
int i178 = 0;
int i179;
for (; i177 > 0; i177 -= i179) {
i179 = zipinputstream.read(is, i178, i177);
i178 += i179;
}
bco[i] = new ContO(is, m, t);
for (int i180 = 0; i180 < bco[i].npl; i180++) {
bco[i].p[i180].loadprojf();
//if (i == 31)
// bco[i].elec = true;
}
}
}
zipinputstream.close();
bco[bumppart] = new ContO((int) (10000.0 * ThreadLocalRandom.current().nextDouble()), (int) pwd, (int) phd, m, t, 0, 0, 0);
} catch (final Exception exception) {
JOptionPane.showMessageDialog(null, "Unable to load file 'data/models.zip'!\nError:\n"
+ exception, "Stage Maker", 1);
}
System.gc();
}
private void loadsettings() {
try {
final File file = new File("mystages/settings.data");
if (file.exists()) {
final BufferedReader bufferedreader = new BufferedReader(new FileReader(file));
String string = bufferedreader.readLine();
if (string != null) {
sstage = string;
stagename = sstage;
}
string = bufferedreader.readLine();
if (string != null) {
suser = string;
if (!suser.equals("Horaks")) {
tnick.setText(suser);
}
}
bufferedreader.close();
}
} catch (final Exception exception) {
}
}
@Override
public boolean mouseDown(final Event event, final int x, final int y) {
mousdr = true;
xm = x - apx;
ym = y - apy;
mousePressed = 1;
requestFocus();
focuson = true;
return false;
}
@Override
public boolean mouseDrag(final Event event, final int x, final int y) {
mousdr = true;
xm = x - apx;
ym = y - apy;
return false;
}
@Override
public boolean mouseMove(final Event event, final int x, final int y) {
xm = x - apx;
ym = y - apy;
if (xm > 620 && xm < 774 && ym > 0 && ym < 23) {
if (!onbtgame) {
onbtgame = true;
setCursor(new Cursor(12));
}
} else if (onbtgame) {
onbtgame = false;
setCursor(new Cursor(0));
}
return false;
}
@Override
public boolean mouseUp(final Event event, final int i, final int i172) {
mousdr = false;
xm = i - apx;
ym = i172 - apy;
if (mousePressed == 1) {
mousePressed = -1;
}
if (onbtgame) {
Madness.game();
}
return false;
}
private void movefield(final Component component, int i, int i169, final int i170, final int i171) {
i += apx;
i169 += apy;
if (component.getX() != i || component.getY() != i169 || component.getWidth() != i170
|| component.getHeight() != i171) {
component.setBounds(i, i169, i170, i171);
}
}
private void newstage() {
if (!srch.getText().equals("")) {
final File file = new File("mystages/" + srch.getText() + ".txt");
if (!file.exists()) {
stagename = srch.getText();
tstage = ""
+ "snap(0,0,0)\r\nsky(191,215,255)\r\nclouds(255,255,255,5,-1000)\r\nfog(195,207,230)\r\nground(192,194,202)\r\ntexture(0,0,0,50)\r\nfadefrom(5000)\r\ndensity(5)\r\nmountains("
+ (int) (ThreadLocalRandom.current().nextDouble() * 100000.0) + ")\r\nnlaps(5)\r\n\r\n";
if (strtyp.getSelectedIndex() == 1) {
bstage = "set(48,0,0,0)\r\n";
} else {
bstage = "set(47,0,0,0)\r\n";
}
this.bstage = "" + this.bstage
+ "\r\nmaxl(3,-7200,-4800)\r\nmaxb(3,-7200,-4800)\r\nmaxr(3,7200,-4800)\r\nmaxt(3,7200,-4800)\r\n";
savefile();
strtyp.setVisible(false);
srch.setVisible(false);
sfase = 0;
tabed = -2;
} else {
JOptionPane.showMessageDialog(null, "A stage with that name already exists, please choose another name!", "Stage Maker", 1);
}
} else {
JOptionPane.showMessageDialog(null, "Please enter a stage name first!", "Stage Maker", 1);
}
}
private void openhlink() {
Madness.openurl("http:
}
private void openlink() {
Madness.openurl("http:
}
private boolean ovbutton(final String string, final int i, final int i387) {
rd.setFont(new Font("Arial", 0, 12));
ftm = rd.getFontMetrics();
if (string.equals("X") || string.equals("Download")) {
rd.setFont(new Font("Arial", 1, 12));
ftm = rd.getFontMetrics();
}
final int i388 = ftm.stringWidth(string);
final int i389 = 4;
boolean bool = false;
boolean bool390 = false;
if (Math.abs(xm - i) < i388 / 2 + 12 && Math.abs(ym - i387 + 5) < 10 && mousePressed == 1) {
bool = true;
} else {
bool = false;
}
if (Math.abs(xm - i) < i388 / 2 + 12 && Math.abs(ym - i387 + 5) < 10 && mousePressed == -1) {
mousePressed = 0;
bool390 = true;
}
if (!bool) {
rd.setColor(new Color(220, 220, 220));
rd.fillRect(i - i388 / 2 - 10, i387 - (17 - i389), i388 + 20, 25 - i389 * 2);
rd.setColor(new Color(240, 240, 240));
rd.drawLine(i - i388 / 2 - 10, i387 - (17 - i389), i + i388 / 2 + 10, i387 - (17 - i389));
rd.drawLine(i - i388 / 2 - 10, i387 - (18 - i389), i + i388 / 2 + 10, i387 - (18 - i389));
rd.setColor(new Color(240, 240, 240));
rd.drawLine(i - i388 / 2 - 9, i387 - (19 - i389), i + i388 / 2 + 9, i387 - (19 - i389));
rd.setColor(new Color(200, 200, 200));
rd.drawLine(i + i388 / 2 + 10, i387 - (17 - i389), i + i388 / 2 + 10, i387 + 7 - i389);
rd.drawLine(i + i388 / 2 + 11, i387 - (17 - i389), i + i388 / 2 + 11, i387 + 7 - i389);
rd.setColor(new Color(200, 200, 200));
rd.drawLine(i + i388 / 2 + 12, i387 - (16 - i389), i + i388 / 2 + 12, i387 + 6 - i389);
rd.drawLine(i - i388 / 2 - 10, i387 + 7 - i389, i + i388 / 2 + 10, i387 + 7 - i389);
rd.drawLine(i - i388 / 2 - 10, i387 + 8 - i389, i + i388 / 2 + 10, i387 + 8 - i389);
rd.setColor(new Color(200, 200, 200));
rd.drawLine(i - i388 / 2 - 9, i387 + 9 - i389, i + i388 / 2 + 9, i387 + 9 - i389);
rd.setColor(new Color(240, 240, 240));
rd.drawLine(i - i388 / 2 - 10, i387 - (17 - i389), i - i388 / 2 - 10, i387 + 7 - i389);
rd.drawLine(i - i388 / 2 - 11, i387 - (17 - i389), i - i388 / 2 - 11, i387 + 7 - i389);
rd.setColor(new Color(240, 240, 240));
rd.drawLine(i - i388 / 2 - 12, i387 - (16 - i389), i - i388 / 2 - 12, i387 + 6 - i389);
rd.setColor(new Color(0, 0, 0));
if (string.equals("X")) {
rd.setColor(new Color(255, 0, 0));
}
if (string.equals("Download")) {
rd.setColor(new Color(0, 64, 128));
}
rd.drawString(string, i - i388 / 2, i387);
} else {
rd.setColor(new Color(220, 220, 220));
rd.fillRect(i - i388 / 2 - 10, i387 - (17 - i389), i388 + 20, 25 - i389 * 2);
rd.setColor(new Color(192, 192, 192));
rd.drawLine(i - i388 / 2 - 10, i387 - (17 - i389), i + i388 / 2 + 10, i387 - (17 - i389));
rd.drawLine(i - i388 / 2 - 10, i387 - (18 - i389), i + i388 / 2 + 10, i387 - (18 - i389));
rd.drawLine(i - i388 / 2 - 9, i387 - (19 - i389), i + i388 / 2 + 9, i387 - (19 - i389));
rd.setColor(new Color(247, 247, 247));
rd.drawLine(i + i388 / 2 + 10, i387 - (17 - i389), i + i388 / 2 + 10, i387 + 7 - i389);
rd.drawLine(i + i388 / 2 + 11, i387 - (17 - i389), i + i388 / 2 + 11, i387 + 7 - i389);
rd.drawLine(i + i388 / 2 + 12, i387 - (16 - i389), i + i388 / 2 + 12, i387 + 6 - i389);
rd.drawLine(i - i388 / 2 - 10, i387 + 7 - i389, i + i388 / 2 + 10, i387 + 7 - i389);
rd.drawLine(i - i388 / 2 - 10, i387 + 8 - i389, i + i388 / 2 + 10, i387 + 8 - i389);
rd.drawLine(i - i388 / 2 - 9, i387 + 9 - i389, i + i388 / 2 + 9, i387 + 9 - i389);
rd.setColor(new Color(192, 192, 192));
rd.drawLine(i - i388 / 2 - 10, i387 - (17 - i389), i - i388 / 2 - 10, i387 + 7 - i389);
rd.drawLine(i - i388 / 2 - 11, i387 - (17 - i389), i - i388 / 2 - 11, i387 + 7 - i389);
rd.drawLine(i - i388 / 2 - 12, i387 - (16 - i389), i - i388 / 2 - 12, i387 + 6 - i389);
rd.setColor(new Color(0, 0, 0));
if (string.equals("X")) {
rd.setColor(new Color(255, 0, 0));
}
if (string.equals("Download")) {
rd.setColor(new Color(0, 64, 128));
}
rd.drawString(string, i - i388 / 2 + 1, i387 + 1);
}
return bool390;
}
@Override
public void paint(final Graphics graphics) {
apx = getWidth() / 2 - 400;
apy = getHeight() / 2 - 275;
graphics.drawImage(offImage, apx, apy, this);
}
private void partobst() {
part.removeAll();
part.add(rd, "Spiky Pillars");
part.add(rd, "Rail Doorway");
part.add(rd, "The Net");
part.add(rd, "Bump Slide");
part.add(rd, "Offroad Dirt-Pile 1");
part.add(rd, "Offroad Dirt-Pile 2");
}
private void partramps() {
part.removeAll();
part.add(rd, "Basic Ramp");
part.add(rd, "Two-Way Ramp");
part.add(rd, "Two-Way High-Low Ramp");
part.add(rd, "Small Ramp");
part.add(rd, "Crash Ramp");
part.add(rd, "Big-Takeoff Ramp");
part.add(rd, "Landing Ramp");
part.add(rd, "Tunnel Side Ramp");
part.add(rd, "Speed Ramp");
part.add(rd, "Launch Pad Ramp");
part.add(rd, "Offroad Bump Ramp");
part.add(rd, "Offroad Ramp");
part.add(rd, "Offroad Big Ramp");
part.add(rd, "Offroad Hill Ramp");
part.add(rd, "Offroad Big Hill Ramp");
part.add(rd, "Halfpipe");
}
private void partrees() {
part.removeAll();
part.add(rd, "Tree 1");
part.add(rd, "Tree 2");
part.add(rd, "Tree 3");
part.add(rd, "Tree 4");
part.add(rd, "Tree 5");
part.add(rd, "Palm Tree 1");
part.add(rd, "Palm Tree 2");
part.add(rd, "Palm Tree 3");
part.add(rd, "Cactus 1");
part.add(rd, "Cactus 2");
part.add(rd, "Cactus 3");
}
private void partcustom() {
part.removeAll();
part.add(rd, "
}
private void partroads() {
part.removeAll();
part.add(rd, "NormalRoad");
part.add(rd, "NormalRoad Turn");
part.add(rd, "NormalRoad End");
part.add(rd, "NormalRoad TwistedLeft");
part.add(rd, "NormalRoad TwistedRight");
part.add(rd, "NormalRoad Edged");
part.add(rd, "NormalRoad-Raised Ramp");
part.add(rd, "NormalRoad Raised");
part.add(rd, "Normal-Off-Road Blend");
part.add(rd, "OffRoad");
part.add(rd, "OffRoad Turn");
part.add(rd, "OffRoad End");
part.add(rd, "OffRoad BumpyGreen");
part.add(rd, "OffRoad-BumpySides Start");
part.add(rd, "OffRoad BumpySides");
part.add(rd, "Off-Halfpipe-Road Blend");
part.add(rd, "HalfpipeRoad");
part.add(rd, "HalfpipeRoad Turn");
part.add(rd, "HalfpipeRoad-Ramp Filler");
part.add(rd, "Halfpipe-Normal-Road Blend");
part.add(rd, "Rollercoaster Start/End");
part.add(rd, "Rollercoaster Road1");
part.add(rd, "Rollercoaster Road2");
part.add(rd, "Rollercoaster Road3");
part.add(rd, "Rollercoaster Road4");
part.add(rd, "Rollercoaster Road5");
}
private int py(final int i, final int i343, final int i344, final int i345) {
return (int) Math.sqrt((i - i343) * (i - i343) + (i344 - i345) * (i344 - i345));
}
private int pyn(final int i, final int i346, final int i347, final int i348) {
return (i - i346) / 100 * ((i - i346) / 100) + (i347 - i348) / 100 * ((i347 - i348) / 100);
}
private void readstage(final int i) {
errd = 0;
trackname = "";
t.nt = 0;
nob = 0;
xnob = 0;
cp.n = 0;
cp.nsp = 0;
cp.fn = 0;
cp.haltall = false;
cp.wasted = 0;
cp.catchfin = 0;
m.ground = 250;
m.lightson = false;
if (i == 0) {
m.snap[0] = 0;
m.snap[1] = 0;
m.snap[2] = 0;
}
if (i == 3) {
tstage = "";
bstage = "";
}
String string = bstage;
if (i == 1 || i == 2) {
string = "" + tstage + "\r\n" + bstage + "";
}
int i181 = 0;
int i182 = 100;
int i183 = 0;
int i184 = 100;
boolean bool = true;
boolean bool185 = true;
String string186 = "";
try {
BufferedReader bufferedreader;
if (i == 3) {
final File file = new File("mystages/" + stagename + ".txt");
bufferedreader = new BufferedReader(new FileReader(file));
nundo = 0;
} else {
bufferedreader = new BufferedReader(new InputStreamReader(new DataInputStream(new ByteArrayInputStream(string.getBytes()))));
}
String string187;
while ((string187 = bufferedreader.readLine()) != null) {
string186 = "" + string187.trim();
if (string186.startsWith("sky")) {
csky[0] = getint("sky", string186, 0);
csky[1] = getint("sky", string186, 1);
csky[2] = getint("sky", string186, 2);
m.setsky(csky[0], csky[1], csky[2]);
if (i == 3) {
this.tstage = "" + this.tstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("ground")) {
cgrnd[0] = getint("ground", string186, 0);
cgrnd[1] = getint("ground", string186, 1);
cgrnd[2] = getint("ground", string186, 2);
m.setgrnd(cgrnd[0], cgrnd[1], cgrnd[2]);
if (i == 3) {
this.tstage = "" + this.tstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("polys")) {
m.setpolys(getint("polys", string186, 0), getint("polys", string186, 1), getint("polys", string186, 2));
if (i == 3) {
this.tstage = "" + this.tstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("fog")) {
cfade[0] = getint("fog", string186, 0);
cfade[1] = getint("fog", string186, 1);
cfade[2] = getint("fog", string186, 2);
m.setfade(cfade[0], cfade[1], cfade[2]);
if (i == 3) {
this.tstage = "" + this.tstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("texture")) {
texture[0] = getint("texture", string186, 0);
texture[1] = getint("texture", string186, 1);
texture[2] = getint("texture", string186, 2);
texture[3] = getint("texture", string186, 3);
m.setexture(texture[0], texture[1], texture[2], texture[3]);
if (i == 3) {
this.tstage = "" + this.tstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("clouds")) {
cldd[0] = getint("clouds", string186, 0);
cldd[1] = getint("clouds", string186, 1);
cldd[2] = getint("clouds", string186, 2);
cldd[3] = getint("clouds", string186, 3);
cldd[4] = getint("clouds", string186, 4);
m.setcloads(cldd[0], cldd[1], cldd[2], cldd[3], cldd[4]);
if (i == 3) {
this.tstage = "" + this.tstage + "" + string186 + "\r\n";
}
}
if (i != 2 && string186.startsWith("snap")) {
m.setsnap(getint("snap", string186, 0), getint("snap", string186, 1), getint("snap", string186, 2));
if (i == 3) {
this.tstage = "" + this.tstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("density")) {
m.fogd = (getint("density", string186, 0) + 1) * 2 - 1;
if (m.fogd < 1) {
m.fogd = 1;
}
if (m.fogd > 30) {
m.fogd = 30;
}
if (i == 3) {
this.tstage = "" + this.tstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("mountains")) {
m.mgen = getint("mountains", string186, 0);
if (i == 3) {
this.tstage = "" + this.tstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("fadefrom")) {
m.fadfrom(getint("fadefrom", string186, 0));
origfade = m.fade[0];
if (i == 3) {
this.tstage = "" + this.tstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("lightson")) {
m.lightson = true;
if (i == 3) {
this.tstage = "" + this.tstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("nlaps")) {
cp.nlaps = getint("nlaps", string186, 0);
if (cp.nlaps < 1) {
cp.nlaps = 1;
}
if (cp.nlaps > 15) {
cp.nlaps = 15;
}
if (i == 3) {
this.tstage = "" + this.tstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("soundtrack")) {
trackname = getstring("soundtrack", string186, 0);
trackvol = getint("soundtrack", string186, 1);
tracksize = getint("soundtrack", string186, 2);
if (i == 3) {
this.tstage = "" + this.tstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("set")) {
int i201 = getint("set", string186, 0);
if (i201 >= 10 && i201 <= 25) {
m.loadnew = true;
}
i201 -= 10;
co[nob] = new ContO(bco[i201], getint("set", string186, 1), m.ground
- bco[i201].grat, getint("set", string186, 2), getint("set", string186, 3));
co[nob].roofat = getint("set", string186, 3);
co[nob].colok = i201;
if (string186.indexOf(")p") != -1) {
cp.x[cp.n] = getint("chk", string186, 1);
cp.z[cp.n] = getint("chk", string186, 2);
cp.y[cp.n] = 0;
cp.typ[cp.n] = 0;
if (string186.indexOf(")pt") != -1) {
cp.typ[cp.n] = -1;
}
if (string186.indexOf(")pr") != -1) {
cp.typ[cp.n] = -2;
}
if (string186.indexOf(")po") != -1) {
cp.typ[cp.n] = -3;
}
if (string186.indexOf(")ph") != -1) {
cp.typ[cp.n] = -4;
}
cp.n++;
}
xnob++;
nob++;
if (i == 3) {
if (bool185) {
this.bstage = "" + this.bstage + "\r\n";
bool185 = false;
}
this.bstage = "" + this.bstage + "" + string186 + "\r\n";
}
if (m.loadnew) {
m.loadnew = false;
}
}
if (string186.startsWith("chk")) {
int i204 = getint("chk", string186, 0);
i204 -= 10;
int i205 = m.ground - bco[i204].grat;
if (i204 == 54) {
i205 = getint("chk", string186, 4);
}
co[nob] = new ContO(bco[i204], getint("chk", string186, 1), i205, getint("chk", string186, 2), getint("chk", string186, 3));
co[nob].roofat = getint("chk", string186, 3);
co[nob].colok = i204;
cp.x[cp.n] = getint("chk", string186, 1);
cp.z[cp.n] = getint("chk", string186, 2);
cp.y[cp.n] = i205;
if (getint("chk", string186, 3) == 0) {
cp.typ[cp.n] = 1;
} else {
cp.typ[cp.n] = 2;
}
cp.pcs = cp.n;
cp.n++;
co[nob].checkpoint = cp.nsp + 1;
if (string186.indexOf(")r") != -1) {
co[nob].wh = cp.nsp + 1;
}
cp.nsp++;
xnob++;
nob++;
if (i == 3) {
if (bool185) {
this.bstage = "" + this.bstage + "\r\n";
bool185 = false;
}
this.bstage = "" + this.bstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("fix")) {
int i208 = getint("fix", string186, 0);
i208 -= 10;
co[nob] = new ContO(bco[i208], getint("fix", string186, 1), getint("fix", string186, 3), getint("fix", string186, 2), getint("fix", string186, 4));
co[nob].roofat = getint("fix", string186, 4);
co[nob].colok = i208;
cp.fx[cp.fn] = getint("fix", string186, 1);
cp.fz[cp.fn] = getint("fix", string186, 2);
cp.fy[cp.fn] = getint("fix", string186, 3);
co[nob].elec = true;
if (getint("fix", string186, 4) != 0) {
cp.roted[cp.fn] = true;
co[nob].roted = true;
} else {
cp.roted[cp.fn] = false;
}
if (string186.indexOf(")s") != -1) {
cp.special[cp.fn] = true;
} else {
cp.special[cp.fn] = false;
}
cp.fn++;
xnob++;
nob++;
if (i == 3) {
if (bool185) {
this.bstage = "" + this.bstage + "\r\n";
bool185 = false;
}
this.bstage = "" + this.bstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("pile")) {
co[nob] = new ContO(getint("pile", string186, 0), getint("pile", string186, 1), getint("pile", string186, 2), m, t, getint("pile", string186, 3), getint("pile", string186, 4), m.ground);
co[nob].srz = getint("pile", string186, 0);
co[nob].srx = getint("pile", string186, 1);
co[nob].sry = getint("pile", string186, 2);
co[nob].colok = bumppart;
xnob++;
nob++;
if (i == 3) {
if (bool185) {
this.bstage = "" + this.bstage + "\r\n";
bool185 = false;
}
this.bstage = "" + this.bstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("maxr")) {
final int i213 = getint("maxr", string186, 0);
final int i214 = getint("maxr", string186, 1);
i181 = i214;
final int i215 = getint("maxr", string186, 2);
for (int i216 = 0; i216 < i213; i216++) {
co[nob] = new ContO(bco[29], i214, m.ground - bco[29].grat, i216 * 4800 + i215, 0);
if (i == 0) {
xnob++;
} else {
nob++;
}
}
if (i == 3) {
if (bool) {
this.bstage = "" + this.bstage + "\r\n";
bool = false;
}
this.bstage = "" + this.bstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("maxl")) {
final int i219 = getint("maxl", string186, 0);
final int i220 = getint("maxl", string186, 1);
i182 = i220;
final int i221 = getint("maxl", string186, 2);
for (int i222 = 0; i222 < i219; i222++) {
co[nob] = new ContO(bco[29], i220, m.ground - bco[29].grat, i222 * 4800 + i221, 180);
if (i == 0) {
xnob++;
} else {
nob++;
}
}
if (i == 3) {
if (bool) {
this.bstage = "" + this.bstage + "\r\n";
bool = false;
}
this.bstage = "" + this.bstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("maxt")) {
final int i225 = getint("maxt", string186, 0);
final int i226 = getint("maxt", string186, 1);
i183 = i226;
final int i227 = getint("maxt", string186, 2);
for (int i228 = 0; i228 < i225; i228++) {
co[nob] = new ContO(bco[29], i228 * 4800 + i227, m.ground - bco[29].grat, i226, 90);
if (i == 0) {
xnob++;
} else {
nob++;
}
}
if (i == 3) {
if (bool) {
this.bstage = "" + this.bstage + "\r\n";
bool = false;
}
this.bstage = "" + this.bstage + "" + string186 + "\r\n";
}
}
if (string186.startsWith("maxb")) {
final int i231 = getint("maxb", string186, 0);
final int i232 = getint("maxb", string186, 1);
i184 = i232;
final int i233 = getint("maxb", string186, 2);
for (int i234 = 0; i234 < i231; i234++) {
co[nob] = new ContO(bco[29], i234 * 4800 + i233, m.ground - bco[29].grat, i232, -90);
if (i == 0) {
xnob++;
} else {
nob++;
}
}
if (i == 3) {
if (bool) {
this.bstage = "" + this.bstage + "\r\n";
bool = false;
}
this.bstage = "" + this.bstage + "" + string186 + "\r\n";
}
}
}
bufferedreader.close();
m.newpolys(i182, i181 - i182, i184, i183 - i184, t, nob);
m.newclouds(i182, i181, i184, i183);
m.newmountains(i182, i181, i184, i183);
m.newstars();
} catch (final Exception exception) {
System.out.println("Error in stage " + stagename);
System.out.println("" + exception);
System.out.println("At line: " + string186);
errd = 6;
if (cp.fn >= 5) {
errd = 5;
}
if (t.nt >= 670000) {
errd = 1;
}
if (cp.n >= 10000) {
errd = 2;
}
if (nob >= 10000) {
errd = 4;
}
}
if (m.nrw * m.ncl >= 16000) {
errd = 3;
}
if (xnob >= 10000) {
errd = 4;
}
if (i == 3 && bstage.indexOf("set(47,0,0,0)") == -1 && bstage.indexOf("set(48,0,0,0)") == -1) {
this.bstage = "" + this.bstage + "set(47,0,0,0)\r\n";
}
}
private void removesp() {
if (nundo < 5000) {
undos[nundo] = bstage;
nundo++;
}
String string = "";
System.out.println("roof: " + co[esp].roofat);
if (!floats) {
if (co[esp].colok != 30 && co[esp].colok != 31 && co[esp].colok != 32 && co[esp].colok != bumppart) {
string = "set(" + (co[esp].colok + 10) + "," + co[esp].x + "," + co[esp].z + "," + co[esp].roofat
+ ")";
}
if (co[esp].colok == 31) {
string = "fix(" + (co[esp].colok + 10) + "," + co[esp].x + "," + co[esp].z + "," + co[esp].y + ","
+ co[esp].roofat + ")";
}
if (co[esp].colok == 30 || co[esp].colok == 32) {
string = "chk(" + (co[esp].colok + 10) + "," + co[esp].x + "," + co[esp].z + "," + co[esp].roofat
+ ")";
}
if (co[esp].colok == 54) {
string = "chk(" + (co[esp].colok + 10) + "," + co[esp].x + "," + co[esp].z + "," + co[esp].roofat
+ "," + co[esp].y + ")";
}
if (co[esp].colok == bumppart) {
string = "pile(" + co[esp].srz + "," + co[esp].srx + "," + co[esp].sry + "," + co[esp].x + ","
+ co[esp].z + ")";
}
} else {
if (co[esp].colok != 30 && co[esp].colok != 31 && co[esp].colok != 32 && co[esp].colok != bumppart) {
string = "set(" + (co[esp].colok + 10) + "," + co[esp].x + "," + co[esp].z + "," + co[esp].y + ","
+ co[esp].roofat + ")";
}
if (co[esp].colok == 31) {
string = "fix(" + (co[esp].colok + 10) + "," + co[esp].x + "," + co[esp].z + "," + co[esp].y + ","
+ co[esp].roofat + ")";
}
if (co[esp].colok == 30 || co[esp].colok == 32) {
string = "chk(" + (co[esp].colok + 10) + "," + co[esp].x + "," + co[esp].z + "," + co[esp].y + ","
+ co[esp].roofat + ")";
}
if (co[esp].colok == 54) {
string = "chk(" + (co[esp].colok + 10) + "," + co[esp].x + "," + co[esp].z + "," + co[esp].y + ","
+ co[esp].roofat + ")";
}
if (co[esp].colok == bumppart) {
string = "pile(" + co[esp].srz + "," + co[esp].srx + "," + co[esp].sry + "," + co[esp].x + ","
+ co[esp].z + ")";
}
}
final int i = bstage.indexOf(string);
int i166 = i + string.length();
int i167 = -1;
int i168 = bstage.indexOf("set", i166);
if (i168 != -1) {
i167 = i168;
}
i168 = bstage.indexOf("chk", i166);
if (i168 != -1 && i168 < i167) {
i167 = i168;
}
i168 = bstage.indexOf("fix", i166);
if (i168 != -1 && i168 < i167) {
i167 = i168;
}
if (i167 == -1) {
i167 = bstage.indexOf("\r\n", i166);
if (i167 != -1) {
i167++;
}
}
if (i167 != -1) {
i166 = i167;
}
if (i != -1) {
bstage = "" + bstage.substring(0, i) + "" + bstage.substring(i166, bstage.length()) + "";
}
readstage(0);
}
private void renstage(final String string) {
if (string.equals("")) {
JOptionPane.showMessageDialog(null, "Please Enter a New Stage Name!\n", "Stage Maker", 1);
} else {
try {
final File file = new File("mystages/" + stagename + ".txt");
final File file329 = new File("mystages/" + string + ".txt");
if (file.renameTo(file329)) {
stagename = string;
sfase = 0;
hidefields();
tabed = -2;
} else {
JOptionPane.showMessageDialog(null, "Unable to rename stage to: '" + string
+ "', possible reason: stage name already used!\n", "Stage Maker", 1);
}
} catch (final Exception exception) {
JOptionPane.showMessageDialog(null, "Unable to rename file! Error Deatials:\n"
+ exception, "Stage Maker", 1);
}
}
}
private void rot(final int[] is, final int[] is334, final int i, final int i335, final int i336, final int i337) {
if (i336 != 0) {
for (int i338 = 0; i338 < i337; i338++) {
final int i339 = is[i338];
final int i340 = is334[i338];
is[i338] = i + (int) ((i339 - i) * m.cos(i336) - (i340 - i335) * m.sin(i336));
is334[i338] = i335 + (int) ((i339 - i) * m.sin(i336) + (i340 - i335) * m.cos(i336));
}
}
}
@Override
public void run() {
thredo.setPriority(10);
btgame[0] = getImage("data/backtogame1.gif");
btgame[1] = getImage("data/backtogame2.gif");
logo = getImage("data/stagemakerlogo.gif");
for (int i = 0; i < 2; i++) {
su[i] = getImage("data/su" + (i + 1) + ".gif");
sl[i] = getImage("data/sl" + (i + 1) + ".gif");
sd[i] = getImage("data/sd" + (i + 1) + ".gif");
sr[i] = getImage("data/sr" + (i + 1) + ".gif");
zi[i] = getImage("data/zi" + (i + 1) + ".gif");
zo[i] = getImage("data/zo" + (i + 1) + ".gif");
}
loadbase();
loadsettings();
if (Madness.testdrive != 0) {
if (Madness.testcar.equals("Failx12")) {
JOptionPane.showMessageDialog(null, "Failed to load stage! Please make sure stage is saved properly before Test Drive.", "Stage Maker", 1);
thredo.stop();
} else {
stagename = Madness.testcar;
errd = 0;
readstage(3);
if (errd == 0) {
tab = 2;
dtab = 6;
witho.select(Madness.testdrive - 3);
}
}
Madness.testcar = "";
Madness.testdrive = 0;
}
requestFocus();
// my code to print the mouse pos every second
new Thread() {
@Override
public void run() {
while (true) {
System.out.println("x: " + ((xm - 505) * (Math.abs(sy) / m.focusPoint) + sx));
System.out.println("z: " + ((290 - ym) * (Math.abs(sy) / m.focusPoint) + sz));
System.out.println("y: " + (m.ground - bco[selectedPart].grat));
System.out.println("rot: " + (rot + adrot));
try {
sleep(1000L); //time in milisseconds it will wait before printing again
//set this too low and the game will lag like hell
} catch (final InterruptedException e) {
}
}
}
}.start();
while (!exwist) {
rd.setColor(new Color(225, 225, 225));
rd.fillRect(0, 25, 800, 525);
rd.setColor(new Color(0, 0, 0));
if (tab != tabed) {
hidefields();
}
if (tab == 0) {
if (tabed != tab) {
slstage.removeAll();
slstage.maxl = 360;
slstage.add(rd, "Select a Stage ");
final String[] strings = new File("mystages/").list();
if (strings != null) {
for (final String string : strings)
if (string.toLowerCase().endsWith(".txt")) {
slstage.add(rd, string.substring(0, string.length() - 4));
}
}
if (stagename.equals("")) {
slstage.select(0);
} else {
slstage.select(stagename);
if (stagename.equals(slstage.getSelectedItem())) {
readstage(3);
sx = 0;
sz = 1500;
sy = -10000;
} else {
stagename = "";
slstage.select(0);
}
}
mouseon = -1;
sfase = 0;
}
rd.drawImage(logo, 261, 35, null);
if (xm > 261 && xm < 538 && ym > 35 && ym < 121) {
if (mouseon == -1) {
mouseon = 3;
setCursor(new Cursor(12));
}
} else if (mouseon == 3) {
mouseon = -1;
setCursor(new Cursor(0));
}
if (mouseon == 3 && mousePressed == -1) {
openhlink();
}
rd.setFont(new Font("Arial", 1, 13));
ftm = rd.getFontMetrics();
if (xm > 200 && xm < 550 && ym > 467 && ym < 504) {
if (mouseon == -1) {
mouseon = 2;
setCursor(new Cursor(12));
}
} else if (mouseon == 2) {
mouseon = -1;
setCursor(new Cursor(0));
}
if (mouseon == 2) {
rd.setColor(new Color(0, 64, 128));
} else {
rd.setColor(new Color(0, 0, 0));
}
rd.drawString("For the Stage Maker Homepage, Development Center and Forums :", 400
- ftm.stringWidth("For the Stage Maker Homepage, Development Center and Forums :") / 2, 480);
rd.setColor(new Color(0, 128, 255));
String string = "http:
rd.drawString(string, 400 - ftm.stringWidth(string) / 2, 500);
if (mouseon == 2) {
rd.setColor(new Color(0, 128, 255));
} else {
rd.setColor(new Color(0, 64, 128));
}
rd.drawLine(400 - ftm.stringWidth(string) / 2, 501, 400 + ftm.stringWidth(string) / 2, 501);
if (mouseon == 2 && mousePressed == -1) {
openhlink();
}
final int i = -110;
if (xm > 150 && xm < 600 && ym > 467 + i && ym < 504 + i) {
if (mouseon == -1) {
mouseon = 1;
setCursor(new Cursor(12));
}
} else if (mouseon == 1) {
mouseon = -1;
setCursor(new Cursor(0));
}
if (mouseon == 1) {
rd.setColor(new Color(0, 64, 128));
} else {
rd.setColor(new Color(0, 0, 0));
}
rd.drawString("For help and a detailed step by step description on how to use the Stage Maker :", 400
- ftm.stringWidth("For help and a detailed step by step description on how to use the Stage Maker :")
/ 2, 480 + i);
rd.setColor(new Color(0, 128, 255));
string = "http:
rd.drawString(string, 400 - ftm.stringWidth(string) / 2, 500 + i);
if (mouseon == 1) {
rd.setColor(new Color(0, 128, 255));
} else {
rd.setColor(new Color(0, 64, 128));
}
rd.drawLine(400 - ftm.stringWidth(string) / 2, 501 + i, 400 + ftm.stringWidth(string) / 2, 501 + i);
if (mouseon == 1 && mousePressed == -1) {
openlink();
}
final int i0 = -60;
final int i1 = 70;
rd.setColor(new Color(0, 0, 0));
rd.drawRect(227 - i1, 194 + i0, 346 + i1 * 2, 167 + i1 / 5);
if (sfase == 0) {
rd.drawString("Select Stage to Edit", 400 - ftm.stringWidth("Select Stage to Edit") / 2, 230 + i0);
slstage.move(220, 240 + i0);
if (slstage.getWidth() != 360) {
slstage.setSize(360, 21);
}
if (!slstage.isShowing()) {
slstage.setVisible(true);
}
if (button(" Make new Stage ", 400, 296 + i0, 0, true)) {
srch.setText("");
slstage.setVisible(false);
sfase = 1;
}
if (button(" Rename Stage ", 325, 336 + i0, 0, false))
if (!stagename.equals("")) {
slstage.setVisible(false);
srch.setText(stagename);
sfase = 2;
} else {
JOptionPane.showMessageDialog(null, "Please select a stage to rename first.", "Stage Maker", 1);
}
if (button(" Delete Stage ", 475, 336 + i0, 0, false))
if (!stagename.equals("")) {
if (JOptionPane.showConfirmDialog(null, ""
+ "Are you sure you want to permanently delete this stage?\n\n" + stagename
+ "\n\n", "Stage Maker", 0) == 0) {
delstage(stagename);
}
} else {
JOptionPane.showMessageDialog(null, "Please select a stage to delete first.", "Stage Maker", 1);
}
if (slstage.getSelectedIndex() != 0) {
if (!stagename.equals(slstage.getSelectedItem())) {
stagename = slstage.getSelectedItem();
readstage(3);
sx = 0;
sz = 1500;
sy = -10000;
requestFocus();
}
} else {
stagename = "";
}
}
if (sfase == 1) {
rd.drawString("Make a new Stage", 400 - ftm.stringWidth("Make a new Stage") / 2, 220 + i0);
rd.setFont(new Font("Arial", 1, 12));
rd.drawString("New stage name :", 200, 246 + i0);
movefield(srch, 310, 231 + i0, 290, 23);
if (!srch.isShowing()) {
srch.setVisible(true);
srch.requestFocus();
}
fixtext(srch);
rd.drawString("Starting line type :", 293, 272 + i0);
strtyp.move(408, 256 + i0);
if (!strtyp.isShowing()) {
strtyp.setVisible(true);
}
if (button(" Make Stage ", 400, 311 + i0, 0, true)) {
newstage();
}
if (button(" Cancel ", 400, 351 + i0, 0, false)) {
strtyp.setVisible(false);
srch.setVisible(false);
sfase = 0;
}
}
if (sfase == 2) {
rd.drawString("Rename Stage : " + stagename + "", 400
- ftm.stringWidth("Rename Stage : " + stagename + "") / 2, 230 + i0);
rd.setFont(new Font("Arial", 1, 12));
rd.drawString("New name :", 218, 266 + i0);
if (!srch.isShowing()) {
srch.setVisible(true);
srch.requestFocus();
}
movefield(srch, 292, 251 + i0, 290, 23);
fixtext(srch);
if (button(" Rename Stage ", 400, 306 + i0, 0, true)) {
renstage(srch.getText());
}
if (button(" Cancel ", 400, 346 + i0, 0, false)) {
srch.setVisible(false);
sfase = 0;
}
}
}
if (tab == 1) {
if (tabed != tab) {
m.trk = 2;
readstage(0);
if (selectedPartType == PART_ROADS) {
partroads();
}
if (selectedPartType == PART_RAMPS) {
partramps();
}
if (selectedPartType == PART_OBSTACLES) {
partobst();
}
if (selectedPartType == PART_TREES) {
partrees();
}
if (selectedPartType == PART_CUSTOM) {
partcustom();
}
onoff = false;
setCursor(new Cursor(0));
setcur = false;
epart = false;
arrng = false;
if (nob == 1) {
selectedPartType = 0;
if (co[0].colok == 38) {
selectedMenuPart = 9;
} else {
selectedMenuPart = 0;
}
}
mouseon = -1;
}
if (selectedPartType == PART_ROADS) {
if (selectedMenuPart == 0) {
selectedPart = 0;
}
if (selectedMenuPart == 1) {
selectedPart = 4;
}
if (selectedMenuPart == 2) {
selectedPart = 13;
}
if (selectedMenuPart == 3) {
selectedPart = 3;
}
if (selectedMenuPart == 4) {
selectedPart = 2;
}
if (selectedMenuPart == 5) {
selectedPart = 1;
}
if (selectedMenuPart == 6) {
selectedPart = 35;
}
if (selectedMenuPart == 7) {
selectedPart = 36;
}
if (selectedMenuPart == 8) {
selectedPart = 10;
}
if (selectedMenuPart == 9) {
selectedPart = 5;
}
if (selectedMenuPart == 10) {
selectedPart = 7;
}
if (selectedMenuPart == 11) {
selectedPart = 14;
}
if (selectedMenuPart == 12) {
selectedPart = 6;
}
if (selectedMenuPart == 13) {
selectedPart = 34;
}
if (selectedMenuPart == 14) {
selectedPart = 33;
}
if (selectedMenuPart == 15) {
selectedPart = 11;
}
if (selectedMenuPart == 16) {
selectedPart = 8;
}
if (selectedMenuPart == 17) {
selectedPart = 9;
}
if (selectedMenuPart == 18) {
selectedPart = 15;
}
if (selectedMenuPart == 19) {
selectedPart = 12;
}
if (selectedMenuPart == 20) {
selectedPart = 46;
}
if (selectedMenuPart == 21) {
selectedPart = 47;
}
if (selectedMenuPart == 22) {
selectedPart = 50;
}
if (selectedMenuPart == 23) {
selectedPart = 48;
}
if (selectedMenuPart == 24) {
selectedPart = 49;
}
if (selectedMenuPart == 25) {
selectedPart = 51;
}
}
if (selectedPartType == PART_RAMPS) {
if (selectedMenuPart == 0) {
selectedPart = 16;
}
if (selectedMenuPart == 1) {
selectedPart = 18;
}
if (selectedMenuPart == 2) {
selectedPart = 19;
}
if (selectedMenuPart == 3) {
selectedPart = 22;
}
if (selectedMenuPart == 4) {
selectedPart = 17;
}
if (selectedMenuPart == 5) {
selectedPart = 21;
}
if (selectedMenuPart == 6) {
selectedPart = 20;
}
if (selectedMenuPart == 7) {
selectedPart = 39;
}
if (selectedMenuPart == 8) {
selectedPart = 42;
}
if (selectedMenuPart == 9) {
selectedPart = 40;
}
if (selectedMenuPart == 10) {
selectedPart = 23;
}
if (selectedMenuPart == 11) {
selectedPart = 25;
}
if (selectedMenuPart == 12) {
selectedPart = 24;
}
if (selectedMenuPart == 13) {
selectedPart = 43;
}
if (selectedMenuPart == 14) {
selectedPart = 45;
}
if (selectedMenuPart == 15) {
selectedPart = 26;
}
}
if (selectedPartType == PART_OBSTACLES) {
if (selectedMenuPart == 0) {
selectedPart = 27;
}
if (selectedMenuPart == 1) {
selectedPart = 28;
}
if (selectedMenuPart == 2) {
selectedPart = 41;
}
if (selectedMenuPart == 3) {
selectedPart = 44;
}
if (selectedMenuPart == 4) {
selectedPart = 52;
}
if (selectedMenuPart == 5) {
selectedPart = 53;
}
}
if (selectedPartType == PART_CHECKPOINTS)
if (onfly) {
selectedPart = 54;
} else if (!onoff) {
selectedPart = 30;
} else {
selectedPart = 32;
}
if (selectedPartType == PART_FIXHOOPS) {
selectedPart = 31;
}
if (selectedPartType == PART_TREES) {
if (selectedMenuPart == 0) {
selectedPart = 55;
}
if (selectedMenuPart == 1) {
selectedPart = 56;
}
if (selectedMenuPart == 2) {
selectedPart = 57;
}
if (selectedMenuPart == 3) {
selectedPart = 58;
}
if (selectedMenuPart == 4) {
selectedPart = 59;
}
if (selectedMenuPart == 5) {
selectedPart = 60;
}
if (selectedMenuPart == 6) {
selectedPart = 61;
}
if (selectedMenuPart == 7) {
selectedPart = 62;
}
if (selectedMenuPart == 8) {
selectedPart = 63;
}
if (selectedMenuPart == 9) {
selectedPart = 64;
}
if (selectedMenuPart == 10) {
selectedPart = 65;
}
if (selectedMenuPart == 11) {
selectedPart = 66;
}
}
if (selectedPartType == PART_CUSTOM) {
selectedPart = selectedMenuPart + 67;
if (selectedMenuPart == 0) {
selectedPart = 0;
}
}
if (selectedPartType == PART_BUMP) {
if (!pgen) {
int i = (int) (10000.0 * ThreadLocalRandom.current().nextDouble());
if (fgen != 0) {
i = fgen;
fgen = 0;
}
bco[bumppart] = new ContO(i, (int) pwd, (int) phd, m, t, 0, 0, 0);
bco[bumppart].srz = i;
bco[bumppart].srx = (int) pwd;
bco[bumppart].sry = (int) phd;
pgen = true;
seq = 3;
}
selectedPart = bumppart;
rot = 0;
} else if (pgen) {
pgen = false;
pwd = 2L + Math.round(ThreadLocalRandom.current().nextDouble() * 4.0);
phd = 2L + Math.round(ThreadLocalRandom.current().nextDouble() * 4.0);
}
if (selectedPart == 30 || selectedPart == 31 || selectedPart == 32 || selectedPart == 54) {
if (rot == -90) {
rot = 90;
}
if (rot == 180) {
rot = 0;
}
}
adrot = 0;
if (selectedPart == 2) {
adrot = -30;
}
if (selectedPart == 3) {
adrot = 30;
}
if (selectedPart == 15) {
adrot = 90;
}
if (selectedPart == 20) {
adrot = 180;
}
if (selectedPart == 26) {
adrot = 90;
}
rd.setColor(new Color(200, 200, 200));
rd.fillRect(248, 63, 514, 454);
m.trk = 2;
m.zy = 90;
m.xz = 0;
m.iw = 248;
m.w = 762;
m.ih = 63;
m.h = 517;
m.cx = 505;
m.cy = 290;
m.x = sx - m.cx;
m.z = sz - m.cz;
m.y = sy;
int i = 0;
final int[] is = new int[10000]; // stageselect limit
for (int i2 = 0; i2 < nob; i2++)
if (co[i2].dist != 0) {
is[i] = i2;
i++;
} else {
co[i2].d(rd);
}
final int[] is3 = new int[i];
for (int i4 = 0; i4 < i; i4++) {
is3[i4] = 0;
}
for (int i5 = 0; i5 < i; i5++) {
for (int i6 = i5 + 1; i6 < i; i6++)
if (co[is[i5]].dist != co[is[i6]].dist) {
if (co[is[i5]].dist < co[is[i6]].dist) {
is3[i5]++;
} else {
is3[i6]++;
}
} else if (i6 > i5) {
is3[i5]++;
} else {
is3[i6]++;
}
}
for (int i7 = 0; i7 < i; i7++) {
for (int i8 = 0; i8 < i; i8++)
if (is3[i8] == i7) {
if (is[i8] == hi) {
m.trk = 3;
}
if (is[i8] == chi && !co[is[i8]].errd) {
final int i9 = m.cx + (int) ((co[is[i8]].x - m.x - m.cx) * m.cos(m.xz)
- (co[is[i8]].z - m.z - m.cz) * m.sin(m.xz));
final int i10 = m.cz + (int) ((co[is[i8]].x - m.x - m.cx) * m.sin(m.xz)
+ (co[is[i8]].z - m.z - m.cz) * m.cos(m.xz));
final int i11 = m.cy + (int) ((co[is[i8]].y - m.y - m.cy) * m.cos(m.zy)
- (i10 - m.cz) * m.sin(m.zy));
final int i12 = m.cz + (int) ((co[is[i8]].y - m.y - m.cy) * m.sin(m.zy)
+ (i10 - m.cz) * m.cos(m.zy));
final int i13 = 1000000 / Math.abs(sy);
final Graphics2D graphics2d = rd;
graphics2d.setComposite(AlphaComposite.getInstance(3, 0.7F));
rd.setColor(new Color(0, 164, 255));
rd.fillOval(Utility.xs(i9, i12, m) - i13 / 2, Utility.ys(i11, i12, m)
- i13 / 2, i13, i13);
graphics2d.setComposite(AlphaComposite.getInstance(3, 1.0F));
rd.setColor(new Color(0, 0, 0));
rd.setFont(new Font("Arial", 1, 12));
ftm = rd.getFontMetrics();
rd.drawString("NO# " + (arrcnt + 1) + "", Utility.xs(i9, i12, m)
- ftm.stringWidth("NO# " + (arrcnt + 1) + "") / 2, Utility.ys(i11, i12, m)
- i13 / 2);
}
if (arrng && (co[is[i8]].colok == 30 || co[is[i8]].colok == 32 || co[is[i8]].colok == 54)
&& co[is[i8]].errd) {
final int i14 = m.cx + (int) ((co[is[i8]].x - m.x - m.cx) * m.cos(m.xz)
- (co[is[i8]].z - m.z - m.cz) * m.sin(m.xz));
final int i15 = m.cz + (int) ((co[is[i8]].x - m.x - m.cx) * m.sin(m.xz)
+ (co[is[i8]].z - m.z - m.cz) * m.cos(m.xz));
final int i16 = m.cy + (int) ((co[is[i8]].y - m.y - m.cy) * m.cos(m.zy)
- (i15 - m.cz) * m.sin(m.zy));
final int i17 = m.cz + (int) ((co[is[i8]].y - m.y - m.cy) * m.sin(m.zy)
+ (i15 - m.cz) * m.cos(m.zy));
final int i18 = 1000000 / Math.abs(sy);
final Graphics2D graphics2d = rd;
graphics2d.setComposite(AlphaComposite.getInstance(3, 0.5F));
rd.setColor(new Color(255, 128, 0));
rd.fillOval(Utility.xs(i14, i17, m) - i18 / 2, Utility.ys(i16, i17, m)
- i18 / 2, i18, i18);
graphics2d.setComposite(AlphaComposite.getInstance(3, 1.0F));
rd.setColor(new Color(0, 0, 0));
rd.setFont(new Font("Arial", 1, 12));
ftm = rd.getFontMetrics();
rd.drawString("NO# " + co[is[i8]].wh + "", Utility.xs(i14, i17, m)
- ftm.stringWidth("NO# " + co[is[i8]].wh + "") / 2, Utility.ys(i16, i17, m)
- i18 / 2);
}
co[is[i8]].d(rd);
if (m.trk == 3) {
m.trk = 2;
}
}
}
if (xm > 248 && xm < 762 && ym > 63 && ym < 517) {
if (!epart && !arrng) { // CALCULATES MOUSE POSITION AND PLACES SHIT
bco[selectedPart].x = (xm - 505) * (Math.abs(sy) / m.focusPoint) + sx;
bco[selectedPart].z = (290 - ym) * (Math.abs(sy) / m.focusPoint) + sz;
bco[selectedPart].y = m.ground - bco[selectedPart].grat;
bco[selectedPart].xz = rot + adrot;
int i19 = 200;
int i20 = 0;
int i21 = 0;
final int[] is22 = {
bco[selectedPart].x + atp[selectedPart][0], bco[selectedPart].x + atp[selectedPart][2]
};
final int[] is23 = {
bco[selectedPart].z + atp[selectedPart][1], bco[selectedPart].z + atp[selectedPart][3]
};
rot(is22, is23, bco[selectedPart].x, bco[selectedPart].z, rot, 2);
int i24 = 0;
onfly = false;
int i25 = 500;
for (int i26 = 0; i26 < nob; i26++) {
final int[] is27 = {
co[i26].x + atp[co[i26].colok][0], co[i26].x + atp[co[i26].colok][2]
};
final int[] is28 = {
co[i26].z + atp[co[i26].colok][1], co[i26].z + atp[co[i26].colok][3]
};
int i29 = co[i26].roofat;
if (co[i26].colok == 2) {
i29 += 30;
}
if (co[i26].colok == 3) {
i29 -= 30;
}
if (co[i26].colok == 15) {
i29 -= 90;
}
if (co[i26].colok == 20) {
i29 -= 180;
}
if (co[i26].colok == 26) {
i29 -= 90;
}
rot(is27, is28, co[i26].x, co[i26].z, i29, 2);
if (selectedPart <= 54) {
int i30 = py(is27[0], is22[0], is28[0], is23[0]);
if (i30 < i19 && i30 != 0) {
i19 = i30;
i20 = is27[0] - is22[0];
i21 = is28[0] - is23[0];
}
i30 = py(is27[1], is22[0], is28[1], is23[0]);
if (i30 < i19 && i30 != 0) {
i19 = i30;
i20 = is27[1] - is22[0];
i21 = is28[1] - is23[0];
}
i30 = py(is27[1], is22[1], is28[1], is23[1]);
if (i30 < i19 && i30 != 0) {
i19 = i30;
i20 = is27[1] - is22[1];
i21 = is28[1] - is23[1];
}
i30 = py(is27[0], is22[1], is28[0], is23[1]);
if (i30 < i19 && i30 != 0) {
i19 = i30;
i20 = is27[0] - is22[1];
i21 = is28[0] - is23[1];
}
}
if (selectedPartType == PART_CHECKPOINTS && py(is27[0], is22[0], is28[0], is23[0]) != 0
&& py(is27[1], is22[0], is28[1], is23[0]) != 0) {
for (final int element : rcheckp)
if (co[i26].colok == element) {
if (py(is27[0], is22[0], is28[0], is23[0]) <= i24 || i24 == 0) {
i24 = py(is27[0], is22[0], is28[0], is23[0]);
onoff = false;
}
if (py(is27[1], is22[0], is28[1], is23[0]) <= i24) {
i24 = py(is27[1], is22[0], is28[1], is23[0]);
onoff = false;
}
}
for (final int element : ocheckp)
if (co[i26].colok == element) {
if (py(is27[0], is22[0], is28[0], is23[0]) <= i24 || i24 == 0) {
i24 = py(is27[0], is22[0], is28[0], is23[0]);
onoff = true;
}
if (py(is27[1], is22[0], is28[1], is23[0]) <= i24) {
i24 = py(is27[1], is22[0], is28[1], is23[0]);
onoff = true;
}
}
}
if (selectedPart > 12 && selectedPart < 33 || selectedPart == 35 || selectedPart == 36
|| selectedPart >= 39 && selectedPart <= 54) {
if ((rot == 0 || rot == 180 || selectedPart == 26 || selectedPart == 15)
&& (i29 == 0 || i29 == 180 || selectedPart == 26 || selectedPart == 15)) {
if (Math.abs(is27[0] - is22[0]) < 200) {
i20 = is27[0] - is22[0];
}
if (Math.abs(is27[0] - is22[1]) < 200) {
i20 = is27[0] - is22[1];
}
if (Math.abs(is27[1] - is22[1]) < 200) {
i20 = is27[1] - is22[1];
}
if (Math.abs(is27[1] - is22[0]) < 200) {
i20 = is27[1] - is22[0];
}
}
if ((rot == 90 || rot == -90 || selectedPart == 26 || selectedPart == 15)
&& (i29 == 90 || i29 == -90 || selectedPart == 26 || selectedPart == 15)) {
if (Math.abs(is28[0] - is23[0]) < 200) {
i21 = is28[0] - is23[0];
}
if (Math.abs(is28[0] - is23[1]) < 200) {
i21 = is28[0] - is23[1];
}
if (Math.abs(is28[1] - is23[1]) < 200) {
i21 = is28[1] - is23[1];
}
if (Math.abs(is28[1] - is23[0]) < 200) {
i21 = is28[1] - is23[0];
}
}
}
if (selectedPartType == PART_CHECKPOINTS && co[i26].colok >= 46 && co[i26].colok <= 51) {
final int[] is33 = {
2, 3, 5, 2, 3, 3
};
if ((Math.abs(co[i26].roofat) == 180 || co[i26].roofat == 0) && rot == 0
&& Math.abs(bco[selectedPart].x - co[i26].x) < 500
&& Math.abs(bco[selectedPart].z - co[i26].z) < 3000) {
for (int i34 = 0; i34 < is33[co[i26].colok - 46]; i34++) {
for (int i35 = 0; i35 < co[i26].p[i34].n; i35++)
if (py(bco[selectedPart].x, co[i26].x, bco[selectedPart].z, co[i26].z
+ co[i26].p[i34].oz[i35]) < i25) {
i25 = py(bco[selectedPart].x, co[i26].x, bco[selectedPart].z, co[i26].z
+ co[i26].p[i34].oz[i35]);
flyh = co[i26].p[i34].oy[i35] - 28 + m.ground;
i20 = co[i26].x - bco[selectedPart].x;
i21 = co[i26].z + co[i26].p[i34].oz[i35] - bco[selectedPart].z;
onfly = true;
}
}
}
if (Math.abs(co[i26].roofat) == 90 && rot == 90
&& Math.abs(bco[selectedPart].z - co[i26].z) < 500
&& Math.abs(bco[selectedPart].x - co[i26].x) < 3000) {
for (int i36 = 0; i36 < is33[co[i26].colok - 46]; i36++) {
for (int i37 = 0; i37 < co[i26].p[i36].n; i37++)
if (py(bco[selectedPart].z, co[i26].z, bco[selectedPart].x, co[i26].x
+ co[i26].p[i36].ox[i37]) < i25) {
i25 = py(bco[selectedPart].z, co[i26].z, bco[selectedPart].x, co[i26].x
+ co[i26].p[i36].ox[i37]);
flyh = co[i26].p[i36].oy[i37] - 28 + m.ground;
i21 = co[i26].z - bco[selectedPart].z;
i20 = co[i26].x + co[i26].p[i36].ox[i37] - bco[selectedPart].x;
onfly = true;
}
}
}
}
}
bco[selectedPart].x += i20;
bco[selectedPart].z += i21;
final int i38 = bco[selectedPart].xy;
final int i39 = bco[selectedPart].zy;
if (selectedPart == 31) {
bco[selectedPart].y = -hf;
if (bco[selectedPart].y > -500) {
bco[selectedPart].y = -500;
}
} else {
bco[selectedPart].xy = 0;
}
if (selectedPart == 54) {
bco[selectedPart].y = flyh;
}
bco[selectedPart].zy = 0;
if (cntout == 0) {
if (mouseon == -1) {
bco[selectedPart].d(rd);
if (!setcur) {
setCursor(new Cursor(13));
setcur = true;
}
if (mousePressed == -1) {
if (nundo < 5000) {
undos[nundo] = bstage;
nundo++;
}
if (bco[selectedPart].xz == 270) {
bco[selectedPart].xz = -90;
}
if (bco[selectedPart].xz == 360) {
bco[selectedPart].xz = 0;
}
errd = 0;
boolean bool = false;
if (xnob < 10000) { //piece limit
System.out.println("place check");
if (selectedPart != 31 && selectedPart != 54 && selectedPart != bumppart) {
try {
System.out.println("placed");
System.out.println("" + selectedPart);
System.out.println("" + bco[selectedPart]);
co[nob] = new ContO(bco[selectedPart], bco[selectedPart].x, m.ground
- bco[selectedPart].grat, bco[selectedPart].z, bco[selectedPart].xz);
co[nob].roofat = bco[selectedPart].xz;
co[nob].colok = selectedPart;
nob++;
} catch (final Exception exception) {
errd = 1;
}
}
if (selectedPart == 31)
if (cp.fn < 5) {
co[nob] = new ContO(bco[selectedPart], bco[selectedPart].x, bco[selectedPart].y, bco[selectedPart].z, bco[selectedPart].xz);
co[nob].roofat = bco[selectedPart].xz;
co[nob].colok = selectedPart;
nob++;
fixh.setText("" + Math.abs(bco[selectedPart].y) + "");
} else {
errd = 5;
}
if (selectedPart == 54) {
try {
co[nob] = new ContO(bco[selectedPart], bco[selectedPart].x, bco[selectedPart].y, bco[selectedPart].z, bco[selectedPart].xz);
co[nob].roofat = bco[selectedPart].xz;
co[nob].colok = selectedPart;
nob++;
} catch (final Exception exception) {
errd = 1;
}
}
if (selectedPart == bumppart) {
co[nob] = new ContO(bco[bumppart].srz, bco[bumppart].srx, bco[bumppart].sry, m, t, bco[bumppart].x, bco[bumppart].z, bco[selectedPart].y);
co[nob].srz = bco[bumppart].srz;
co[nob].srx = bco[bumppart].srx;
co[nob].sry = bco[bumppart].sry;
co[nob].colok = selectedPart;
nob++;
}
} else {
errd = 4;
}
if (errd == 0) {
sortstage();
readstage(0);
bool = true;
if (selectedPart == bumppart) {
pgen = false;
}
if (selectedPart == 52 || selectedPart == 53
|| selectedPart >= 55 && selectedPart <= 65) {
seq = 3;
bco[selectedPart].xy = 0;
bco[selectedPart].zy = 0;
boolean bool40 = false;
if (rot == 0 && !bool40) {
rot = 90;
bool40 = true;
}
if (rot == 90 && !bool40) {
rot = 180;
bool40 = true;
}
if (rot == 180 && !bool40) {
rot = -90;
bool40 = true;
}
if (rot == -90 && !bool40) {
rot = 0;
bool40 = true;
}
}
}
if (errd != 0) {
JOptionPane.showMessageDialog(null, ""
+ "Error! Unable to place part!\nReason:\n" + errlo[errd - 1]
+ "\n\n", "Stage Maker", 0);
if (bool) {
nundo
bstage = undos[nundo];
readstage(0);
}
}
lxm = bco[selectedPart].x;
//lym = bco[selectedPart].z;
cntout = 10;
}
}
} else {
if (lxm != bco[selectedPart].x && lxm != bco[selectedPart].z) {
cntout
}
if (setcur) {
setCursor(new Cursor(0));
setcur = false;
}
}
bco[selectedPart].xy = i38;
bco[selectedPart].zy = i39;
} else {
if (epart)
if (esp == -1 && !overcan) {
hi = -1;
int i41 = 0;
for (int i42 = 0; i42 < nob; i42++) {
final int i43 = m.cx + (int) ((co[i42].x - m.x - m.cx) * m.cos(m.xz)
- (co[i42].z - m.z - m.cz) * m.sin(m.xz));
final int i44 = m.cz + (int) ((co[i42].x - m.x - m.cx) * m.sin(m.xz)
+ (co[i42].z - m.z - m.cz) * m.cos(m.xz));
final int i45 = m.cy + (int) ((co[i42].y - m.y - m.cy) * m.cos(m.zy)
- (i44 - m.cz) * m.sin(m.zy));
final int i46 = m.cz + (int) ((co[i42].y - m.y - m.cy) * m.sin(m.zy)
+ (i44 - m.cz) * m.cos(m.zy));
if (xm > Utility.xs(i43 - co[i42].maxR, i46, m)
&& xm < Utility.xs(i43 + co[i42].maxR, i46, m)
&& ym > Utility.ys(i45 - co[i42].maxR, i46, m)
&& ym < Utility.ys(i45 + co[i42].maxR, i46, m) && co[i42].colok != 37
&& co[i42].colok != 38)
if (hi == -1) {
hi = i42;
i41 = py(xm, Utility.xs(i43, i46, m), ym, Utility.ys(i45, i46, m));
} else if (py(xm, Utility.xs(i43, i46, m), ym, Utility.ys(i45, i46, m)) <= i41) {
hi = i42;
i41 = py(xm, Utility.xs(i43, i46, m), ym, Utility.ys(i45, i46, m));
}
}
if (hi != -1) {
if (!setcur) {
setCursor(new Cursor(13));
setcur = true;
}
if (mousePressed == -1) {
esp = hi;
mousePressed = 0;
}
} else if (setcur) {
setCursor(new Cursor(0));
setcur = false;
}
} else if (setcur) {
setCursor(new Cursor(0));
setcur = false;
}
if (arrng) {
chi = -1;
int i47 = 5000;
for (int i48 = 0; i48 < nob; i48++)
if ((co[i48].colok == 30 || co[i48].colok == 32 || co[i48].colok == 54)
&& !co[i48].errd) {
final int i49 = m.cx + (int) ((co[i48].x - m.x - m.cx) * m.cos(m.xz)
- (co[i48].z - m.z - m.cz) * m.sin(m.xz));
final int i50 = m.cz + (int) ((co[i48].x - m.x - m.cx) * m.sin(m.xz)
+ (co[i48].z - m.z - m.cz) * m.cos(m.xz));
final int i51 = m.cy + (int) ((co[i48].y - m.y - m.cy) * m.cos(m.zy)
- (i50 - m.cz) * m.sin(m.zy));
final int i52 = m.cz + (int) ((co[i48].y - m.y - m.cy) * m.sin(m.zy)
+ (i50 - m.cz) * m.cos(m.zy));
if (xm > Utility.xs(i49 - co[i48].maxR, i52, m)
&& xm < Utility.xs(i49 + co[i48].maxR, i52, m)
&& ym > Utility.ys(i51 - co[i48].maxR, i52, m)
&& ym < Utility.ys(i51 + co[i48].maxR, i52, m)
&& py(xm, Utility.xs(i49, i52, m), ym, Utility.ys(i51, i52, m)) <= i47) {
chi = i48;
i47 = py(xm, Utility.xs(i49, i52, m), ym, Utility.ys(i51, i52, m));
}
}
if (chi != -1) {
if (!setcur) {
setCursor(new Cursor(13));
setcur = true;
}
if (mousePressed == -1) {
arrcnt++;
co[chi].wh = arrcnt;
co[chi].errd = true;
mousePressed = 0;
}
} else if (setcur) {
setCursor(new Cursor(0));
setcur = false;
}
}
}
} else if (setcur) {
setCursor(new Cursor(0));
setcur = false;
}
if (epart && esp != -1)
if (co[esp].dist != 0) {
m.cx = 505;
m.cy = 290;
m.x = sx - m.cx;
m.z = sz - m.cz;
m.y = sy;
final int i53 = m.cx + (int) ((co[esp].x - m.x - m.cx) * m.cos(m.xz)
- (co[esp].z - m.z - m.cz) * m.sin(m.xz));
final int i54 = m.cz + (int) ((co[esp].x - m.x - m.cx) * m.sin(m.xz)
+ (co[esp].z - m.z - m.cz) * m.cos(m.xz));
final int i55 = m.cy
+ (int) ((co[esp].y - m.y - m.cy) * m.cos(m.zy) - (i54 - m.cz) * m.sin(m.zy));
final int i56 = m.cz
+ (int) ((co[esp].y - m.y - m.cy) * m.sin(m.zy) + (i54 - m.cz) * m.cos(m.zy));
final int i57 = Utility.xs(i53, i56, m);
final int i58 = Utility.ys(i55, i56, m);
rd.setColor(new Color(225, 225, 225));
rd.fillRect(i57, i58, 90, 88);
rd.setColor(new Color(138, 147, 160));
rd.drawRect(i57, i58, 90, 88);
if (button(" Edit ", i57 + 45, i58 + 22, 3, false)) {
copyesp(true);
removesp();
lxm = 0;
//lym = 0;
cntout = 2;
epart = false;
}
if (button(" Remove ", i57 + 45, i58 + 49, 3, false)) {
removesp();
esp = -1;
mousePressed = 0;
}
if (button(" Copy ", i57 + 45, i58 + 76, 3, false)) {
copyesp(false);
lxm = 0;
//lym = 0;
cntout = 2;
epart = false;
}
rd.setColor(new Color(255, 0, 0));
rd.drawString("x", i57 + 82, i58 - 2);
if (xm > 248 && xm < 762 && ym > 63 && ym < 517 && mousePressed == 1
&& (xm < i57 || xm > i57 + 90 || ym < i58 || ym > i58 + 88)) {
esp = -1;
mousePressed = 0;
}
} else {
esp = -1;
}
rd.setColor(new Color(225, 225, 225));
rd.fillRect(248, 25, 514, 38);
rd.fillRect(0, 25, 248, 530);
rd.fillRect(248, 517, 514, 38);
rd.fillRect(762, 25, 38, 530);
if (selectedPartType == PART_BUMP) {
rd.setColor(new Color(0, 0, 0));
rd.setFont(new Font("Arial", 1, 12));
rd.drawString("Radius:", 11, 97);
rd.drawString("Height:", 14, 117);
boolean bool = false;
if (xm > 57 && xm < 204 && ym > 90 && ym < 99) {
bool = true;
}
rd.setColor(new Color(136, 148, 170));
if (bool || mouseon == 1) {
rd.drawRect(57, 90, 147, 8);
rd.setColor(new Color(0, 0, 0));
}
rd.drawLine(57, 94, 204, 94);
if (mouseon == 1) {
pwd = (xm - 57) / 36.75F + 2.0F;
if (pwd < 2.0F) {
pwd = 2.0F;
}
if (pwd > 6.0F) {
pwd = 6.0F;
}
}
rd.drawRect((int) (57.0F + (pwd - 2.0F) * 36.75F), 90, 2, 8);
boolean bool59 = false;
if (xm > 57 && xm < 204 && ym > 110 && ym < 119) {
bool59 = true;
}
rd.setColor(new Color(136, 148, 170));
if (bool59 || mouseon == 2) {
rd.drawRect(57, 110, 147, 8);
rd.setColor(new Color(0, 0, 0));
}
rd.drawLine(57, 114, 204, 114);
if (mouseon == 2) {
phd = (xm - 57) / 36.75F + 2.0F;
if (phd < 2.0F) {
phd = 2.0F;
}
if (phd > 6.0F) {
phd = 6.0F;
}
}
rd.drawRect((int) (57.0F + (phd - 2.0F) * 36.75F), 110, 2, 8);
if (mousePressed == 1) {
if (bool) {
mouseon = 1;
}
if (bool59) {
mouseon = 2;
}
} else {
if (mouseon == 1 || mouseon == 2) {
pgen = false;
}
mouseon = -1;
}
}
int i60 = 0;
if (xm > 482 && xm < 529 && ym > 35 && ym < 61 || up) {
i60 = 1;
if (mousePressed == 1 || up) {
sz += 500;
}
}
rd.drawImage(su[i60], 482, 35, null);
i60 = 0;
if (xm > 482 && xm < 529 && ym > 519 && ym < 545 || down) {
i60 = 1;
if (mousePressed == 1 || down) {
sz -= 500;
}
}
rd.drawImage(sd[i60], 482, 519, null);
i60 = 0;
if (xm > 220 && xm < 246 && ym > 264 && ym < 311 || left) {
i60 = 1;
if (mousePressed == 1 || left) {
sx -= 500;
}
}
rd.drawImage(sl[i60], 220, 264, null);
i60 = 0;
if (xm > 764 && xm < 790 && ym > 264 && ym < 311 || right) {
i60 = 1;
if (mousePressed == 1 || right) {
sx += 500;
}
}
rd.drawImage(sr[i60], 764, 264, null);
i60 = 0;
if (xm > 616 && xm < 677 && ym > 30 && ym < 61 || zoomi) {
i60 = 1;
if (mousePressed == 1 || zoomi) {
sy += 500;
if (sy > -2500) {
sy = -2500;
}
}
}
rd.drawImage(zi[i60], 616, 30, null);
i60 = 0;
if (xm > 690 && xm < 751 && ym > 30 && ym < 61 || zoomo) {
i60 = 1;
if (mousePressed == 1 || zoomo) {
sy -= 500;
if (sy < -55000) {
sy = -55000;
}
}
}
rd.drawImage(zo[i60], 690, 30, null);
if ((epart || arrng) && sy < -36000) {
sy = -36000;
}
rd.setFont(new Font("Arial", 1, 11));
ftm = rd.getFontMetrics();
rd.setColor(new Color(0, 0, 0));
rd.drawString("Part Selection", 11, 47);
rd.setFont(new Font("Arial", 1, 13));
ftm = rd.getFontMetrics();
ptyp.move(10, 50);
if (!ptyp.isShowing()) {
ptyp.setVisible(true);
ptyp.select(selectedPartType);
}
if (selectedPartType != ptyp.getSelectedIndex()) {
selectedPartType = ptyp.getSelectedIndex();
if (selectedPartType == PART_ROADS) {
partroads();
part.setVisible(true);
}
if (selectedPartType == PART_RAMPS) {
partramps();
part.setVisible(true);
}
if (selectedPartType == PART_OBSTACLES) {
partobst();
part.setVisible(true);
}
if (selectedPartType == PART_TREES) {
partrees();
part.setVisible(true);
}
if (selectedPartType == PART_CUSTOM) {
partcustom();
part.setVisible(true);
}
selectedMenuPart = 0;
part.select(selectedMenuPart);
requestFocus();
fixh.setText("2000");
focuson = false;
}
part.move(10, 80);
part.setSize(200, 21);
if (selectedPartType == PART_ROADS || selectedPartType == PART_RAMPS
|| selectedPartType == PART_OBSTACLES || selectedPartType == PART_TREES
|| selectedPartType == PART_CUSTOM) {
if (!part.isShowing()) {
part.setVisible(true);
part.select(selectedMenuPart);
}
} else if (part.isShowing()) {
part.setVisible(false);
}
if (selectedMenuPart != part.getSelectedIndex()) {
selectedMenuPart = part.getSelectedIndex();
focuson = false;
}
if (selectedPartType == PART_CHECKPOINTS) {
rd.drawString("Checkpoint", 110 - ftm.stringWidth("Checkpoint") / 2, 120);
}
if (selectedPartType == PART_FIXHOOPS) {
rd.drawString("Fixing Hoop", 110 - ftm.stringWidth("Fixing Hoop") / 2, 120);
}
if (lsp != selectedPart) {
seq = 3;
bco[selectedPart].xy = 0;
bco[selectedPart].zy = 0;
lsp = selectedPart;
epart = false;
arrng = false;
}
if (xm > 10 && xm < 210 && ym > 130 && ym < 334) {
if (seq >= 3)
if (seq == 20 || !seqn) {
seq = 0;
bco[selectedPart].xy = 0;
bco[selectedPart].zy = 0;
} else {
seq++;
}
seqn = true;
rd.setColor(new Color(210, 210, 210));
} else {
rd.setColor(new Color(200, 200, 200));
seqn = false;
}
rd.fillRect(10, 130, 200, 200);
if ((selectedPart == 30 || selectedPart == 32 || selectedPart == 54)
&& button(" Rearrange Checkpoints > ", 110, 315, 2, true)) {
mousePressed = 0;
epart = false;
if (!arrng) {
arrcnt = 0;
for (int i61 = 0; i61 < nob; i61++)
if (co[i61].colok == 30 || co[i61].colok == 32 || co[i61].colok == 54) {
co[i61].errd = false;
}
arrng = true;
} else {
arrng = false;
}
}
if (seqn && mousePressed == -1)
if (selectedPart != bumppart) {
boolean bool = false;
if (rot == 0 && !bool) {
rot = 90;
bool = true;
}
if (rot == 90 && !bool) {
rot = 180;
bool = true;
}
if (rot == 180 && !bool) {
rot = -90;
bool = true;
}
if (rot == -90 && !bool) {
rot = 0;
bool = true;
}
if (selectedPart == 30 || selectedPart == 31 || selectedPart == 32) {
if (rot == -90) {
rot = 90;
}
if (rot == 180) {
rot = 0;
}
}
seq = 5;
bco[selectedPart].xy = 0;
bco[selectedPart].zy = 0;
epart = false;
arrng = false;
} else {
pgen = false;
pwd = 2L + Math.round(ThreadLocalRandom.current().nextDouble() * 4.0);
phd = 2L + Math.round(ThreadLocalRandom.current().nextDouble() * 4.0);
}
if (selectedPart == 31) {
rd.setFont(new Font("Arial", 1, 12));
rd.setColor(new Color(0, 0, 0));
rd.drawString("Height:", 62, 280);
movefield(fixh, 107, 266, 50, 20);
if (fixh.hasFocus()) {
focuson = false;
}
if (!fixh.isShowing()) {
fixh.setVisible(true);
}
rd.setFont(new Font("Arial", 0, 11));
ftm = rd.getFontMetrics();
rd.drawString("(Height off the ground... )", 110
- ftm.stringWidth("(Height off the ground... )") / 2, 300);
if (fixh.getText().equals("")) {
fixh.setText("0");
fixh.select(0, 0);
}
try {
hf = Integer.valueOf(fixh.getText()).intValue();
if (hf > 8000) {
hf = 8000;
fixh.setText("8000");
}
} catch (final Exception exception) {
hf = 2000;
fixh.setText("2000");
}
} else if (fixh.isShowing()) {
fixh.setVisible(false);
}
// CAMERA POSITION FOR THE CONTO PREVIEW SHIT
m.trk = 2;
m.zy = 90;
m.xz = 0;
m.iw = 10;
m.w = 210;
m.ih = 130;
m.h = 330;
m.cx = 110;
m.cy = 230;
m.x = -110;
m.z = -230;
m.y = -15000;
if (selectedPartType == PART_RAMPS && selectedPart != 20 && selectedPart != 21 && selectedPart != 43
&& selectedPart != 45) {
m.y = -10000;
}
if (selectedPartType == PART_OBSTACLES && selectedPart != 41) {
m.y = -7600;
}
if (selectedPartType == PART_CHECKPOINTS || selectedPartType == PART_FIXHOOPS) {
m.y = -5000;
}
if (selectedPartType == PART_TREES) {
m.y = -3000;
m.z = 150;
}
if (selectedPartType == PART_BUMP) {
m.y = -7600;
}
if (selectedPart == 31) {
m.z = -500;
if (rot != 0) {
bco[selectedPart].roted = true;
} else {
bco[selectedPart].roted = false;
}
}
bco[selectedPart].x = 0;
bco[selectedPart].y = 0;
bco[selectedPart].z = 0;
bco[selectedPart].xz = rot + adrot;
bco[selectedPart].d(rd);
int i62 = 1;
if (selectedPartType == PART_ROADS || selectedPartType == PART_RAMPS
|| selectedPartType == PART_CUSTOM) {
if (selectedPart != 26 && selectedPart != 20) {
if (rot == -90 || rot == 0) {
i62 = -1;
}
} else {
if (selectedPart == 26 && (rot == -90 || rot == 180)) {
i62 = -1;
}
if (selectedPart == 20 && (rot == 90 || rot == 180)) {
i62 = -1;
}
}
if (seq == 2) {
bco[selectedPart].xy -= 5 * i62;
if (bco[selectedPart].xy == 0) {
seq = 3;
}
}
if (seq == 1) {
seq = 2;
}
if (seq == 0) {
bco[selectedPart].xy += 5 * i62;
if (bco[selectedPart].xy == 85 * i62) {
seq = 1;
}
}
}
if (selectedPartType == PART_OBSTACLES || selectedPartType == PART_CHECKPOINTS
|| selectedPartType == PART_FIXHOOPS || selectedPartType == PART_BUMP) {
if (rot == -90 || rot == 180) {
i62 = -1;
}
if (seq == 2) {
bco[selectedPart].zy += 5 * i62;
if (bco[selectedPart].zy == 0) {
seq = 3;
}
}
if (seq == 1) {
seq = 2;
}
if (seq == 0) {
bco[selectedPart].zy -= 5 * i62;
if (bco[selectedPart].zy == -(85 * i62)) {
seq = 1;
}
}
}
if (selectedPartType == PART_TREES) {
if (rot == -90 || rot == 180) {
i62 = -1;
}
boolean bool = false;
if (rot == -90 || rot == 90) {
bool = true;
}
if (!bool) {
bco[selectedPart].xy = 0;
} else {
bco[selectedPart].zy = 0;
}
if (seq == 2)
if (!bool) {
bco[selectedPart].zy += 5 * i62;
if (bco[selectedPart].zy == 0) {
seq = 3;
}
} else {
bco[selectedPart].xy -= 5 * i62;
if (bco[selectedPart].xy == 0) {
seq = 3;
}
}
if (seq == 1) {
seq = 2;
}
if (seq == 0)
if (!bool) {
bco[selectedPart].zy -= 5 * i62;
if (bco[selectedPart].zy == -(85 * i62)) {
seq = 1;
}
} else {
bco[selectedPart].xy += 5 * i62;
if (bco[selectedPart].xy == 85 * i62) {
seq = 1;
}
}
}
if (selectedPart != bumppart) {
if (button(" Rotate ", 110, 348, 3, true)) {
boolean bool = false;
if (rot == 0 && !bool) {
rot = 90;
bool = true;
}
if (rot == 90 && !bool) {
rot = 180;
bool = true;
}
if (rot == 180 && !bool) {
rot = -90;
bool = true;
}
if (rot == -90 && !bool) {
rot = 0;
bool = true;
}
if (selectedPart == 30 || selectedPart == 31 || selectedPart == 32) {
if (rot == -90) {
rot = 90;
}
if (rot == 180) {
rot = 0;
}
}
seq = 3;
bco[selectedPart].xy = 0;
bco[selectedPart].zy = 0;
epart = false;
arrng = false;
}
} else if (button(" Generate New ", 110, 348, 3, true)) {
pgen = false;
pwd = 2L + Math.round(ThreadLocalRandom.current().nextDouble() * 4.0);
phd = 2L + Math.round(ThreadLocalRandom.current().nextDouble() * 4.0);
}
if (button(">", 191, 348, 3, true) && (selectedPartType == PART_ROADS || selectedPartType == PART_RAMPS
|| selectedPartType == PART_OBSTACLES || selectedPartType == PART_TREES
|| selectedPartType == PART_CUSTOM)) {
selectedMenuPart++;
if (selectedMenuPart == part.getItemCount()) {
selectedMenuPart = 0;
}
part.select(selectedMenuPart);
epart = false;
arrng = false;
}
if (button("<", 28, 348, 3, true) && (selectedPartType == PART_ROADS || selectedPartType == PART_RAMPS
|| selectedPartType == PART_OBSTACLES || selectedPartType == PART_TREES
|| selectedPartType == PART_CUSTOM)) {
selectedMenuPart
if (selectedMenuPart == -1) {
selectedMenuPart = part.getItemCount() - 1;
}
part.select(selectedMenuPart);
epart = false;
arrng = false;
}
if (button(" < Undo ", 204, 404, 0, true)) {
epart = false;
arrng = false;
if (nundo > 0) {
nundo
bstage = undos[nundo];
readstage(0);
}
}
if (button(" Remove / Edit Part ", 172, 454, 0, true)) {
if (!epart) {
epart = true;
} else {
epart = false;
}
arrng = false;
esp = -1;
}
if (button(" Go to > Startline ", 175, 504, 0, true)) {
sx = 0;
sz = 1500;
}
if (button(" About Part ", 164, 66, 3, false)) {
JOptionPane.showMessageDialog(null, discp[selectedPart], "Stage Maker", 1);
}
if (button(" Keyboard Controls ", 691, 536, 3, false)) {
JOptionPane.showMessageDialog(null, "Instead of clicking the triangular buttons around the Building Area to scroll, you can use:\n[ Keyboard Arrows ]\n\nYou can also zoom in and out using the following keys:\n[+] & [-] or [8] & [2] or [Enter] & [Backspace]\n\n", "Stage Maker", 1);
}
if (button(" Save ", 280, 50, 0, false)) {
epart = false;
arrng = false;
savefile();
}
if (button(" Save & Preview ", 380, 50, 0, false)) {
epart = false;
arrng = false;
savefile();
hidefields();
tab = 2;
}
rd.setFont(new Font("Arial", 1, 12));
ftm = rd.getFontMetrics();
rd.setColor(new Color(0, 0, 0));
int i63 = 0;
final int i64 = (int) (xnob / 10000.0F * 200.0F); // limits
int i65 = i64;
final int i66 = (int) (t.nt / 670000.0F * 200.0F); // limits
if (i66 > i65) {
i65 = i66;
i63 = 1;
}
final int i67 = (int) (cp.n / 10000.0F * 200.0F); // limits
if (i67 > i65) {
i65 = i67;
i63 = 2;
}
final int i68 = (int) (m.nrw * m.ncl / 9999999.0F * 200.0F); // medium
// limit...does
// exist?
if (i68 > i65) {
i65 = i68;
i63 = 3;
}
if (i65 > 200) {
i65 = 200;
}
if (i65 <= 100) {
rd.setColor(new Color(100 + i65, 225, 30));
} else {
rd.setColor(new Color(200, 325 - i65, 30));
}
rd.fillRect(167, 531, i65, 9);
if (button("Memory Consumption :", 85, 540, 3, false)) {
JOptionPane.showMessageDialog(null, "Memory Consumption Details\n\nNumber of Parts: "
+ i64 / 2 + " %\nPart's Details: " + i66 / 2 + " %\nRoad Points: " + i67 / 2
+ " %\nStage Area: " + i68 / 2 + " %\n \n", "Stage Maker", 1);
}
rd.setColor(new Color(0, 0, 0));
rd.drawRect(167, 531, 200, 9);
final String[] strings = {
"Number of Parts", "Part's Details", "Road Points", "Stage Area"
};
rd.drawString(strings[i63], 267 - ftm.stringWidth(strings[i63]) / 2, 540);
rd.drawString("" + i65 / 2 + " % used", 375, 540);
if (overcan) {
overcan = false;
}
if (epart) {
if (esp == -1) {
rd.setColor(new Color(0, 0, 0));
rd.drawString("Click on any part to Edit >", 257, 454);
if (button(" Cancel ", 323, 474, 4, false)) {
epart = false;
}
}
} else {
if (hi != -1) {
hi = -1;
}
if (esp != -1) {
esp = -1;
}
}
if (arrng) {
rd.setColor(new Color(0, 0, 0));
rd.drawString("Click on Checkpoint NO# " + (arrcnt + 1) + " >", 257, 80);
if (button(" Cancel ", 330, 100, 4, false)) {
arrng = false;
}
if (arrcnt == cp.nsp) {
sortstage();
JOptionPane.showMessageDialog(null, "Checkpoints Arranged!\nPress Save and Test Drive to check the new checkpoint order.\n", "Stage Maker", 1);
arrng = false;
}
} else if (chi != -1) {
chi = -1;
}
}
if (tab == 2) {
if (tabed != tab) {
m.trk = 0;
readstage(1);
setCursor(new Cursor(0));
setcur = false;
vxz = 0;
vx = sx - 400;
vz = sz - m.cz - 8000;
vy = -1500;
dtabed = -1;
}
m.trk = 0;
m.zy = 6;
m.iw = 10;
m.w = 790;
m.ih = 35;
m.h = 445;
m.cx = 400;
m.cy = 215;
m.xz = vxz;
m.x = vx;
m.z = vz;
m.y = vy;
m.d(rd);
int i = 0;
final int[] is = new int[10000]; // stageselect limit
for (int i69 = 0; i69 < nob; i69++)
if (co[i69].dist != 0) {
is[i] = i69;
i++;
} else {
co[i69].d(rd);
}
final int[] is70 = new int[i];
for (int i71 = 0; i71 < i; i71++) {
is70[i71] = 0;
}
for (int i72 = 0; i72 < i; i72++) {
for (int i73 = i72 + 1; i73 < i; i73++)
if (co[is[i72]].dist != co[is[i73]].dist) {
if (co[is[i72]].dist < co[is[i73]].dist) {
is70[i72]++;
} else {
is70[i73]++;
}
} else if (i73 > i72) {
is70[i72]++;
} else {
is70[i73]++;
}
}
for (int i74 = 0; i74 < i; i74++) {
for (int i75 = 0; i75 < i; i75++)
if (is70[i75] == i74) {
if (is[i75] == hi) {
m.trk = 3;
}
co[is[i75]].d(rd);
if (m.trk == 3) {
m.trk = 2;
}
}
}
if (up) {
vz += 500.0F * m.cos(m.xz);
vx += 500.0F * m.sin(m.xz);
}
if (down) {
vz -= 500.0F * m.cos(m.xz);
vx -= 500.0F * m.sin(m.xz);
}
if (left) {
vxz -= 5;
}
if (right) {
vxz += 5;
}
if (zoomi) {
vy += 100;
if (vy > -500) {
vy = -500;
}
}
if (zoomo) {
vy -= 100;
if (vy < -5000) {
vy = -5000;
}
}
rd.setColor(new Color(225, 225, 225));
rd.fillRect(0, 25, 10, 525);
rd.fillRect(790, 25, 10, 525);
rd.fillRect(10, 25, 780, 10);
rd.fillRect(10, 445, 780, 105);
rd.setFont(new Font("Arial", 1, 12));
ftm = rd.getFontMetrics();
final String[] strings = {
"Controls", "Atmosphere", "Colors", "Scenery", "Laps", "Sound Track", "Test Drive"
};
final int[] is76 = {
10, 10, 121, 111
};
final int[] is77 = {
425, 445, 445, 425
};
for (int i78 = 0; i78 < 7; i78++) {
rd.setColor(new Color(170, 170, 170));
if (xm > is76[0] && xm < is76[3] && ym > 425 && ym < 445) {
rd.setColor(new Color(190, 190, 190));
}
if (dtab == i78) {
rd.setColor(new Color(225, 225, 225));
}
rd.fillPolygon(is76, is77, 4);
rd.setColor(new Color(0, 0, 0));
rd.drawString(strings[i78], i78 * 111 + 62 - ftm.stringWidth(strings[i78]) / 2, 439);
if (xm > is76[0] && xm < is76[3] && ym > 425 && ym < 445 && mousePressed == -1 && mouseon == -1) {
dtab = i78;
}
for (int i79 = 0; i79 < 4; i79++) {
is76[i79] += 111;
}
}
if (tabed == tab && dtab != dtabed) {
if (!ttstage.equals("")) {
tstage = ttstage;
ttstage = "";
}
readstage(1);
hidefields();
}
if (dtab == 0) {
rd.setColor(new Color(0, 0, 0));
rd.drawString("Use the [ Keyboard Arrows ] to navigate through the stage.", 20, 470);
rd.drawString("[Left] & [Right] arrows are for rotating. [Up] & [Down] arrows are for moving forwards and backwards.", 20, 490);
rd.drawString("For moving vertically down and up use the following keys: [+] & [-] or [8] & [2] or [Enter] & [Backspace]", 20, 520);
}
if (dtab == 2) {
if (dtabed != dtab) {
Color.RGBtoHSB(csky[0], csky[1], csky[2], hsb[0]);
Color.RGBtoHSB(cfade[0], cfade[1], cfade[2], hsb[1]);
Color.RGBtoHSB(cgrnd[0], cgrnd[1], cgrnd[2], hsb[2]);
for (int i80 = 0; i80 < 3; i80++) {
final float f = hsb[i80][1];
hsb[i80][1] = hsb[i80][2];
hsb[i80][2] = f;
}
if (hsb[1][1] == (hsb[0][1] + hsb[2][1]) / 2.0F && hsb[1][0] == hsb[2][0]
&& hsb[1][2] == hsb[2][2]) {
pfog.setState(true);
} else {
pfog.setState(false);
}
ttstage = "";
mouseon = -1;
}
if (mousePressed != 1) {
if ((mouseon >= 6 || mouseon < 3) && mouseon != -1) {
if (ttstage.equals("")) {
ttstage = tstage;
}
sortop();
readstage(1);
}
mouseon = -1;
}
final String[] strings81 = {
"Sky", "Dust / Fog", "Ground"
};
for (int i82 = 0; i82 < 3; i82++) {
rd.setColor(new Color(0, 0, 0));
rd.drawString(strings81[i82], 107 + 195 * i82 - ftm.stringWidth(strings81[i82]) / 2, 461);
for (int i83 = 0; i83 < 150; i83++) {
rd.setColor(Color.getHSBColor((float) (i83 * 0.006667), 1.0F, 1.0F));
rd.drawLine(32 + i83 + 195 * i82, 467, 32 + i83 + 195 * i82, 474);
}
for (int i84 = 0; i84 < 150; i84++) {
rd.setColor(Color.getHSBColor(0.0F, 0.0F, 0.5F + i84 * 0.00333F));
rd.drawLine(32 + i84 + 195 * i82, 483, 32 + i84 + 195 * i82, 490);
}
for (int i85 = 0; i85 < 150; i85++) {
rd.setColor(Color.getHSBColor(hsb[i82][0], 0.0F + (float) (i85 * 0.001667), hsb[i82][1]));
rd.drawLine(32 + i85 + 195 * i82, 499, 32 + i85 + 195 * i82, 506);
}
for (int i86 = 0; i86 < 3; i86++) {
rd.setColor(new Color(0, 0, 0));
float f = hsb[i82][i86] * 150.0F;
if (i86 == 1) {
float f87 = 0.75F;
if (i82 == 0) {
f87 = 0.85F;
}
if (i82 == 1) {
f87 = 0.8F;
}
f = (hsb[i82][i86] - f87) / 0.001F;
}
if (i86 == 2) {
f = hsb[i82][i86] * 600.0F;
}
if (f < 0.0F) {
f = 0.0F;
}
if (f > 150.0F) {
f = 150.0F;
}
rd.drawLine((int) (32 + 195 * i82 + f), 467 + i86 * 16, (int) (32 + 195 * i82 + f), 474
+ i86 * 16);
rd.drawLine((int) (33 + 195 * i82 + f), 467 + i86 * 16, (int) (33 + 195 * i82 + f), 474
+ i86 * 16);
rd.fillRect((int) (31 + 195 * i82 + f), 475 + i86 * 16, 4, 2);
rd.drawLine((int) (30 + 195 * i82 + f), 477 + i86 * 16, (int) (35 + 195 * i82 + f), 477
+ i86 * 16);
if (xm > 29 + 195 * i82 && xm < 185 + 195 * i82 && ym > 468 + i86 * 16
&& ym < 477 + i86 * 16 && mousePressed == 1 && mouseon == -1) {
mouseon = i86 + i82 * 3;
}
if (mouseon == i86 + i82 * 3) {
if (i86 == 0) {
hsb[i82][i86] = (xm - (32 + 195 * i82)) / 150.0F;
}
if (i86 == 1) {
float f88 = 0.75F;
if (i82 == 0) {
f88 = 0.85F;
}
if (i82 == 1) {
f88 = 0.8F;
}
hsb[i82][i86] = f88 + (xm - (32 + 195 * i82)) * 0.001F;
if (hsb[i82][i86] < f88) {
hsb[i82][i86] = f88;
}
if (hsb[i82][i86] > f88 + 0.15F) {
hsb[i82][i86] = f88 + 0.15F;
}
}
if (i86 == 2) {
hsb[i82][i86] = (xm - (32 + 195 * i82)) / 600.0F;
if (hsb[i82][i86] > 0.25) {
hsb[i82][i86] = 0.25F;
}
}
if (hsb[i82][i86] > 1.0F) {
hsb[i82][i86] = 1.0F;
}
if (hsb[i82][i86] < 0.0F) {
hsb[i82][i86] = 0.0F;
}
}
}
}
movefield(pfog, 258, 511, 200, 23);
if (!pfog.isShowing()) {
pfog.setVisible(true);
}
if (pfog.getState()) {
rd.setComposite(AlphaComposite.getInstance(3, 0.25F));
rd.setColor(new Color(0, 0, 0));
rd.fillRect(215, 464, 175, 47);
rd.setComposite(AlphaComposite.getInstance(3, 1.0F));
hsb[1][1] = (hsb[0][1] + hsb[2][1]) / 2.0F;
hsb[1][0] = hsb[2][0];
hsb[1][2] = hsb[2][2];
}
Color color = Color.getHSBColor(hsb[0][0], hsb[0][2], hsb[0][1]);
m.setsky(color.getRed(), color.getGreen(), color.getBlue());
csky[0] = color.getRed();
csky[1] = color.getGreen();
csky[2] = color.getBlue();
color = Color.getHSBColor(hsb[1][0], hsb[1][2], hsb[1][1]);
m.setfade(color.getRed(), color.getGreen(), color.getBlue());
cfade[0] = color.getRed();
cfade[1] = color.getGreen();
cfade[2] = color.getBlue();
color = Color.getHSBColor(hsb[2][0], hsb[2][2], hsb[2][1]);
m.setgrnd(color.getRed(), color.getGreen(), color.getBlue());
cgrnd[0] = color.getRed();
cgrnd[1] = color.getGreen();
cgrnd[2] = color.getBlue();
if (button(" Reset ", 650, 510, 0, true)) {
if (!ttstage.equals("")) {
tstage = ttstage;
ttstage = "";
}
readstage(1);
dtabed = -2;
}
if (button(" Save ", 737, 510, 0, true)) {
sortop();
ttstage = "";
savefile();
}
}
if (dtab == 3) {
if (dtabed != dtab) {
Color.RGBtoHSB(cldd[0], cldd[1], cldd[2], hsb[0]);
Color.RGBtoHSB(texture[0], texture[1], texture[2], hsb[1]);
mgen.setText("" + m.mgen + "");
mouseon = -1;
ttstage = "";
}
if (mousePressed != 1) {
if (mouseon == 0 || mouseon == 1 || mouseon == 2 || mouseon == 6) {
if (ttstage.equals("")) {
ttstage = tstage;
}
sortop();
readstage(1);
}
mouseon = -1;
}
rd.setFont(new Font("Arial", 1, 12));
ftm = rd.getFontMetrics();
rd.setColor(new Color(0, 0, 0));
rd.drawString("Clouds", 32, 461);
for (int i89 = 0; i89 < 150; i89++) {
rd.setColor(Color.getHSBColor(i89 * 0.006667F, 1.0F, 1.0F));
rd.drawLine(32 + i89 + 0, 467, 32 + i89 + 0, 474);
}
for (int i90 = 0; i90 < 150; i90++) {
rd.setColor(Color.getHSBColor(0.0F, 0.0F, 0.75F + i90 * 0.001667F));
rd.drawLine(32 + i90 + 0, 483, 32 + i90 + 0, 490);
}
for (int i91 = 0; i91 < 150; i91++) {
rd.setColor(Color.getHSBColor(hsb[0][0], i91 * 0.003333F, hsb[0][2]));
rd.drawLine(32 + i91 + 0, 499, 32 + i91 + 0, 506);
}
rd.setFont(new Font("Arial", 0, 11));
ftm = rd.getFontMetrics();
rd.setColor(new Color(0, 0, 0));
rd.drawString("Blend:", 32, 529);
rd.setColor(new Color(0, 0, 0));
rd.fillRect(70, 522, 112, 2);
rd.fillRect(70, 528, 112, 2);
float f = 0.0F;
int i92 = 255;
for (int i93 = 0; i93 < 112; i93++) {
i92 = (int) (255.0F / (f + 1.0F));
if (i92 > 255) {
i92 = 255;
}
if (i92 < 0) {
i92 = 0;
}
f += 0.02F;
rd.setColor(new Color(i92, i92, i92));
rd.drawLine(70 + i93, 524, 70 + i93, 527);
}
rd.setColor(new Color(0, 0, 0));
rd.drawString("Height", 202 - ftm.stringWidth("Height") / 2, 461);
rd.drawLine(202, 467, 202, 530);
for (int i94 = 0; i94 < 8; i94++) {
rd.drawLine(202, 466 + i94 * 8, 202 + 8 - i94, 466 + i94 * 8);
}
rd.setFont(new Font("Arial", 1, 12));
ftm = rd.getFontMetrics();
rd.setColor(new Color(0, 0, 0));
rd.drawString("Ground Texture", 257, 471);
for (int i95 = 0; i95 < 150; i95++) {
rd.setColor(Color.getHSBColor(i95 * 0.006667F, 1.0F, 1.0F));
rd.drawLine(32 + i95 + 225, 477, 32 + i95 + 225, 484);
}
for (int i96 = 0; i96 < 150; i96++) {
rd.setColor(Color.getHSBColor(hsb[1][0], i96 * 0.006667F, i96 * 0.006667F));
rd.drawLine(32 + i96 + 225, 493, 32 + i96 + 225, 500);
}
rd.setFont(new Font("Arial", 0, 11));
ftm = rd.getFontMetrics();
rd.setColor(new Color(0, 0, 0));
rd.drawString("Blend:", 257, 523);
rd.setColor(new Color(0, 0, 0));
rd.fillRect(295, 516, 112, 2);
rd.fillRect(295, 522, 112, 2);
f = 0.0F;
i92 = 255;
for (int i97 = 0; i97 < 112; i97++) {
i92 = (int) (255.0F / (f + 1.0F));
if (i92 > 255) {
i92 = 255;
}
if (i92 < 0) {
i92 = 0;
}
f += 0.02F;
rd.setColor(new Color(i92, i92, i92));
rd.drawLine(70 + i97 + 225, 518, 70 + i97 + 225, 521);
}
for (int i98 = 0; i98 < 2; i98++) {
int i99 = 3;
if (i98 == 1) {
i99 = 2;
}
for (int i100 = 0; i100 < i99; i100++) {
int i101 = i100;
if (i100 == 1) {
i101 = 2;
}
if (i100 == 2) {
i101 = 1;
}
rd.setColor(new Color(0, 0, 0));
float f102 = hsb[i98][i101] * 150.0F;
if (i100 == 1 && i98 == 0) {
final float f103 = 0.75F;
f102 = (hsb[i98][i101] - f103) / 0.001667F;
}
if (i100 == 2 && i98 == 0) {
f102 = hsb[i98][i101] / 0.003333F;
}
if (f102 < 0.0F) {
f102 = 0.0F;
}
if (f102 > 150.0F) {
f102 = 150.0F;
}
rd.drawLine((int) (32 + 225 * i98 + f102), 467 + i100 * 16
+ 10 * i98, (int) (32 + 225 * i98 + f102), 474 + i100 * 16 + 10 * i98);
rd.drawLine((int) (33 + 225 * i98 + f102), 467 + i100 * 16
+ 10 * i98, (int) (33 + 225 * i98 + f102), 474 + i100 * 16 + 10 * i98);
rd.fillRect((int) (31 + 225 * i98 + f102), 475 + i100 * 16 + 10 * i98, 4, 2);
rd.drawLine((int) (30 + 225 * i98 + f102), 477 + i100 * 16
+ 10 * i98, (int) (35 + 225 * i98 + f102), 477 + i100 * 16 + 10 * i98);
if (xm > 29 + 225 * i98 && xm < 185 + 225 * i98 && ym > 468 + i100 * 16 + 10 * i98
&& ym < 477 + i100 * 16 + 10 * i98 && mousePressed == 1 && mouseon == -1) {
mouseon = i100 + i98 * 3;
}
if (mouseon == i100 + i98 * 3) {
hsb[i98][i101] = (xm - (32 + 225 * i98)) * 0.006667F;
if (i100 == 1 && i98 == 1) {
hsb[i98][1] = (xm - (32 + 225 * i98)) * 0.006667F;
if (hsb[i98][1] > 1.0F) {
hsb[i98][1] = 1.0F;
}
if (hsb[i98][1] < 0.0F) {
hsb[i98][1] = 0.0F;
}
}
if (i100 == 1 && i98 == 0) {
final float f104 = 0.75F;
hsb[i98][i101] = f104 + (xm - (32 + 225 * i98)) * 0.001667F;
if (hsb[i98][i101] < f104) {
hsb[i98][i101] = f104;
}
}
if (i100 == 2 && i98 == 0) {
hsb[i98][i101] = (xm - (32 + 225 * i98)) * 0.003333F;
if (hsb[i98][i101] > 0.5) {
hsb[i98][i101] = 0.5F;
}
}
if (hsb[i98][i101] > 1.0F) {
hsb[i98][i101] = 1.0F;
}
if (hsb[i98][i101] < 0.0F) {
hsb[i98][i101] = 0.0F;
}
}
}
rd.setColor(new Color(0, 0, 0));
float f105 = (texture[3] - 20) * 2.8F;
if (i98 == 0) {
f105 = cldd[3] * 11.2F;
}
if (f105 < 0.0F) {
f105 = 0.0F;
}
if (f105 > 112.0F) {
f105 = 112.0F;
}
rd.drawLine((int) (70 + 225 * i98 + f105), 522 - 6 * i98, (int) (70 + 225 * i98 + f105), 529
- 6 * i98);
rd.drawLine((int) (71 + 225 * i98 + f105), 522 - 6 * i98, (int) (71 + 225 * i98 + f105), 529
- 6 * i98);
rd.fillRect((int) (69 + 225 * i98 + f105), 530 - 6 * i98, 4, 2);
rd.drawLine((int) (68 + 225 * i98 + f105), 532 - 6 * i98, (int) (73 + 225 * i98 + f105), 532
- 6 * i98);
if (xm > 67 + 225 * i98 && xm < 185 + 225 * i98 && ym > 522 - 6 * i98 && ym < 532 - 6 * i98
&& mousePressed == 1 && mouseon == -1) {
mouseon = 6 + i98;
}
}
if (mouseon == 6) {
cldd[3] = (int) ((xm - 70) / 11.2F);
if (cldd[3] < 0) {
cldd[3] = 0;
}
if (cldd[3] > 10) {
cldd[3] = 10;
}
}
if (mouseon == 7) {
texture[3] = (int) ((xm - 70 - 225) / 2.8 + 20.0);
if (texture[3] < 20) {
texture[3] = 20;
}
if (texture[3] > 60) {
texture[3] = 60;
}
}
rd.setColor(new Color(0, 128, 255));
float f106 = (1500 - Math.abs(cldd[4])) / 15.625F;
if (f106 > 64.0F) {
f106 = 64.0F;
}
if (f106 < 0.0F) {
f106 = 0.0F;
}
rd.drawRect(199, (int) (465.0F + f106), 12, 2);
if (xm > 197 && xm < 213 && ym > 463 && ym < 533 && mousePressed == 1 && mouseon == -1) {
mouseon = 8;
}
if (mouseon == 8) {
cldd[4] = -(int) ((530 - ym) * 15.625F + 500.0F);
if (cldd[4] > -500) {
cldd[4] = -500;
}
if (cldd[4] < -1500) {
cldd[4] = -1500;
}
}
Color color = Color.getHSBColor(hsb[0][0], hsb[0][1], hsb[0][2]);
m.setcloads(color.getRed(), color.getGreen(), color.getBlue(), cldd[3], cldd[4]);
cldd[0] = color.getRed();
cldd[1] = color.getGreen();
cldd[2] = color.getBlue();
color = Color.getHSBColor(hsb[1][0], hsb[1][1], hsb[1][2]);
m.setexture(color.getRed(), color.getGreen(), color.getBlue(), texture[3]);
texture[0] = color.getRed();
texture[1] = color.getGreen();
texture[2] = color.getBlue();
rd.setFont(new Font("Arial", 1, 12));
rd.setColor(new Color(0, 0, 0));
rd.drawString("Mountains", 452, 465);
rd.setFont(new Font("Arial", 0, 11));
rd.drawString("Mountain Generator Key:", 452, 480);
movefield(mgen, 452, 484, 120, 20);
if (mgen.hasFocus()) {
focuson = false;
}
if (!mgen.isShowing()) {
mgen.setVisible(true);
}
if (button(" Generate New ", 512, 525, 3, true)) {
m.mgen = (int) (ThreadLocalRandom.current().nextDouble() * 100000.0);
mgen.setText("" + m.mgen + "");
if (ttstage.equals("")) {
ttstage = tstage;
}
sortop();
readstage(1);
}
if (!mgen.getText().equals("" + m.mgen + "")) {
try {
final int i107 = Integer.valueOf(mgen.getText()).intValue();
m.mgen = i107;
if (ttstage.equals("")) {
ttstage = tstage;
}
sortop();
readstage(1);
} catch (final Exception exception) {
mgen.setText("" + m.mgen + "");
}
}
if (button(" Reset ", 650, 510, 0, true)) {
if (!ttstage.equals("")) {
tstage = ttstage;
ttstage = "";
}
readstage(1);
dtabed = -2;
}
if (button(" Save ", 737, 510, 0, true)) {
sortop();
ttstage = "";
savefile();
}
}
if (dtab == 1) {
if (dtabed != dtab) {
for (int i108 = 0; i108 < 3; i108++) {
snap[i108] = (int) (m.snap[i108] / 1.2F + 50.0F);
}
fogn[0] = (8 - ((m.fogd + 1) / 2 - 1)) * 20;
fogn[1] = (m.fade[0] - 5000) / 30;
}
rd.setColor(new Color(0, 0, 0));
rd.drawString("Atmosphere RGB Mask", 20, 461);
rd.setColor(new Color(128, 128, 128));
rd.drawLine(10, 457, 17, 457);
rd.drawLine(260, 457, 152, 457);
rd.drawLine(10, 457, 10, 546);
rd.drawLine(260, 457, 260, 527);
rd.drawLine(260, 527, 360, 527);
rd.drawLine(10, 546, 360, 546);
rd.drawLine(360, 527, 360, 546);
final String[] strings109 = {
"Red", "Green", "Blue"
};
final int[] is110 = {
32, 20, 29
};
int i111 = 38;
int i112 = -70;
for (int i113 = 0; i113 < 3; i113++) {
rd.setColor(new Color(0, 0, 0));
rd.drawString("" + strings109[i113] + " :", is110[i113], 447 + i113 * 24 + i111);
rd.drawLine(140 + i112, 443 + i113 * 24 + i111, 230 + i112, 443 + i113 * 24 + i111);
for (int i114 = 1; i114 < 10; i114++) {
rd.drawLine(140 + 10 * i114 + i112, 443 - i114 + i113 * 24 + i111, 140 + 10 * i114
+ i112, 443 + i114 + i113 * 24 + i111);
}
rd.setColor(new Color(255, 0, 0));
final int i115 = (int) (snap[i113] / 1.1111F / 10.0F);
rd.fillRect(138 + (int) (snap[i113] / 1.1111F) + i112, 443 - i115 + i113 * 24
+ i111, 5, i115 * 2 + 1);
rd.setColor(new Color(255, 128, 0));
rd.drawRect(139 + (int) (snap[i113] / 1.1111F) + i112, 434 + i113 * 24 + i111, 2, 18);
if (button(" - ", 260 + i112, 447 + i113 * 24 + i111, 4, false)) {
snap[i113] -= 2;
if (snap[i113] < 0) {
snap[i113] = 0;
}
}
if (button(" + ", 300 + i112, 447 + i113 * 24 + i111, 4, false)) {
if (snap[0] + snap[1] + snap[2] > 200) {
for (int i116 = 0; i116 < 3; i116++)
if (i116 != i113) {
snap[i116]
if (snap[i116] < 0) {
snap[i116] = 0;
}
}
}
snap[i113] += 2;
if (snap[i113] > 100) {
snap[i113] = 100;
}
}
}
if (m.snap[0] != (int) (snap[0] * 1.2F - 60.0F) || m.snap[1] != (int) (snap[1] * 1.2F - 60.0F)
|| m.snap[2] != (int) (snap[2] * 1.2F - 60.0F)) {
for (int i117 = 0; i117 < 3; i117++) {
m.snap[i117] = (int) (snap[i117] * 1.2F - 60.0F);
}
readstage(2);
}
rd.setColor(new Color(0, 0, 0));
rd.drawString("Car Lights :", 265, 541);
if (snap[0] + snap[1] + snap[2] > 110) {
rd.drawString("Off", 335, 541);
m.lightson = false;
} else {
rd.setColor(new Color(0, 200, 0));
rd.drawString("On", 335, 541);
m.lightson = true;
}
final int i118 = 33;
rd.setColor(new Color(0, 0, 0));
rd.drawString("Dust/Fog Properties", 280 + i118, 461);
rd.setColor(new Color(128, 128, 128));
rd.drawLine(270 + i118, 457, 277 + i118, 457);
rd.drawLine(540 + i118, 457, 393 + i118, 457);
rd.drawLine(270 + i118, 457, 270 + i118, 522);
rd.drawLine(540 + i118, 457, 540 + i118, 522);
rd.drawLine(270 + i118, 522, 540 + i118, 522);
final String[] strings119 = {
"Density", "Near / Far"
};
final int[] is120 = {
292 + i118, 280 + i118
};
final int[] is121 = {
20, 10
};
i111 = 38;
i112 = 210 + i118;
for (int i122 = 0; i122 < 2; i122++) {
rd.setColor(new Color(0, 0, 0));
rd.drawString("" + strings119[i122] + " :", is120[i122], 447 + i122 * 24 + i111);
rd.drawLine(140 + i112, 443 + i122 * 24 + i111, 230 + i112, 443 + i122 * 24 + i111);
for (int i123 = 1; i123 < 10; i123++) {
rd.drawLine(140 + 10 * i123 + i112, 443 - i123 + i122 * 24 + i111, 140 + 10 * i123
+ i112, 443 + i123 + i122 * 24 + i111);
}
rd.setColor(new Color(255, 0, 0));
final int i124 = (int) (fogn[i122] / 1.1111F / 10.0F);
rd.fillRect(138 + (int) (fogn[i122] / 1.1111F) + i112, 443 - i124 + i122 * 24
+ i111, 5, i124 * 2 + 1);
rd.setColor(new Color(255, 128, 0));
rd.drawRect(139 + (int) (fogn[i122] / 1.1111F) + i112, 434 + i122 * 24 + i111, 2, 18);
if (button(" - ", 260 + i112, 447 + i122 * 24 + i111, 4, false)) {
fogn[i122] -= is121[i122];
if (fogn[i122] < 0) {
fogn[i122] = 0;
}
}
if (button(" + ", 300 + i112, 447 + i122 * 24 + i111, 4, false)) {
fogn[i122] += is121[i122];
if (fogn[i122] > 100) {
fogn[i122] = 100;
}
}
}
m.fogd = (8 - fogn[0] / 20 + 1) * 2 - 1;
m.fadfrom(5000 + fogn[1] * 30);
origfade = m.fade[0];
if (button(" Reset ", 650, 510, 0, true)) {
dtabed = -2;
}
if (button(" Save ", 737, 510, 0, true)) {
sortop();
savefile();
}
}
if (dtab == 4) {
if (dtabed != dtab && cp.nlaps - 1 >= 0 && cp.nlaps - 1 <= 14) {
nlaps.select(cp.nlaps - 1);
}
rd.setColor(new Color(0, 0, 0));
rd.drawString("Set the number of laps for this stage:", 130, 496);
nlaps.move(348, 480);
if (!nlaps.isShowing()) {
nlaps.setVisible(true);
}
if (cp.nlaps != nlaps.getSelectedIndex() + 1) {
cp.nlaps = nlaps.getSelectedIndex() + 1;
requestFocus();
}
if (button(" Reset ", 530, 496, 0, true)) {
dtabed = -2;
}
if (button(" Save ", 617, 496, 0, true)) {
sortop();
savefile();
}
}
if (dtab == 5) {
if (dtabed != dtab) {
tracks.removeAll();
tracks.maxl = 200;
tracks.add(rd, "The Play List - MOD Tracks");
final String[] strings125 = new File("mystages/mymusic/").list();
if (strings125 != null) {
for (final String element : strings125)
if (element.toLowerCase().endsWith(".zip")) {
tracks.add(rd, element.substring(0, element.length() - 4));
}
}
if (ltrackname.equals("")) {
if (trackname.equals("")) {
tracks.select(0);
} else {
tracks.select(trackname);
}
} else {
tracks.select(ltrackname);
}
mouseon = -1;
}
tracks.move(10, 450);
if (tracks.getWidth() != 200) {
tracks.setSize(200, 21);
}
if (!tracks.isShowing()) {
tracks.setVisible(true);
}
if (track.playing && track.loaded == 2) {
if (button(" Stop ", 110, 495, 2, false)) {
track.setPaused(true);
}
if (!ltrackname.equals(tracks.getSelectedItem())) {
track.setPaused(true);
}
if (xm > 10 && xm < 210 && ym > 516 && ym < 534) {
if (mousePressed == 1) {
mouseon = 1;
}
rd.setColor(new Color(0, 164, 242));
} else {
rd.setColor(new Color(120, 210, 255));
}
rd.drawRect(10, 516, 200, 18);
rd.setColor(new Color(200, 200, 200));
rd.drawLine(10, 523, 210, 523);
rd.setColor(new Color(0, 0, 0));
rd.drawLine(10, 524, 210, 524);
rd.drawLine(10, 525, 210, 525);
rd.drawLine(10, 526, 210, 526);
rd.setColor(new Color(255, 255, 255));
rd.drawLine(10, 527, 210, 527);
int i127 = (int) ((1.0F - (float) track.sClip.stream.available() / (float) avon) * 200.0F);
if (mouseon == 1) {
i127 = xm - 10;
if (i127 < 0) {
i127 = 0;
}
if (i127 > 200) {
i127 = 200;
}
if (mousePressed != 1) {
track.sClip.stream.reset();
track.sClip.stream.skip((long) (i127 / 200.0F * avon));
mouseon = -1;
}
}
rd.setColor(new Color(0, 0, 0));
rd.drawRect(8 + i127, 516, 4, 18);
rd.setColor(new Color(0, 164, 242));
rd.drawLine(10 + i127, 520, 10 + i127, 518);
rd.drawLine(10 + i127, 530, 10 + i127, 532);
} else if (tracks.getSelectedIndex() != 0 && button(" Play > ", 110, 495, 2, false)) {
if (!ltrackname.equals(tracks.getSelectedItem())) {
track.unload();
track = new RadicalMod("mystages/mymusic/" + tracks.getSelectedItem()
+ ".zip", 300, 8000, 125, true, false);
if (track.loaded == 2) {
avon = track.sClip.stream.available();
ltrackname = tracks.getSelectedItem();
} else {
ltrackname = "";
}
}
if (!ltrackname.equals("")) {
track.play();
} else {
JOptionPane.showMessageDialog(null, "Failed to load '" + tracks.getSelectedItem()
+ "', please make sure it is a valid MOD Track!", "Stage Maker", 1);
}
}
if (tracks.getSelectedIndex() != 0) {
if (button(" Set as the stage's Sound Track > ", 330, 466, 2, false)) {
if (!ltrackname.equals(tracks.getSelectedItem())) {
track.unload();
track = new RadicalMod("mystages/mymusic/" + tracks.getSelectedItem()
+ ".zip", 300, 8000, 125, true, false);
if (track.loaded == 2) {
avon = track.sClip.stream.available();
ltrackname = tracks.getSelectedItem();
} else {
ltrackname = "";
}
}
if (!ltrackname.equals("")) {
trackname = ltrackname;
trackvol = (int) (220.0F / (track.rvol / 3750.0F));
try {
final File file = new File("mystages/mymusic/" + trackname + ".zip");
tracksize = (int) (file.length() / 1024L);
if (tracksize > 250) {
JOptionPane.showMessageDialog(null, "Cannot use '"
+ tracks.getSelectedItem()
+ "' as the sound track!\nIts file size is bigger then 250KB.\n\n", "Stage Maker", 1);
trackname = "";
}
} catch (final Exception exception) {
tracksize = 111;
}
} else {
JOptionPane.showMessageDialog(null, "Failed to load '" + tracks.getSelectedItem()
+ "', please make sure it is a valid MOD Track!", "Stage Maker", 1);
}
}
if (button(" X Delete ", 258, 495, 2, false) && JOptionPane.showConfirmDialog(null, ""
+ "Are you sure you want to permanently delete this MOD Track from your Play List?\n\n"
+ tracks.getSelectedItem()
+ "\n\n> If you delete this Track from the Play List you will not be able to use it for other stages as well! \n\n", "Stage Maker", 0) == 0) {
deltrack();
}
}
if (button(" Add a new MOD Track from file . . . ", 330, 530, 0, false)
&& JOptionPane.showConfirmDialog(null, "The game only accepts MOD format music files for the game ('.mod' file extension).\nA good place to find MOD Tracks is the modarchive.com, all the current MOD Tracks\nthat are distributed with the game are from the modarchive.com.\n\nTo find out more about MOD Tracks and to learn how to compose & remix your own\nmusic, please read the section of the Stage Maker help about it.\n\nThe MOD Track needs to be compressed in a zip file to be added here, please make\nsure the MOD Track you wish to add to your stages sound track play list is zipped before\nadding it here.\nThe ZIP file must also be less then 250KB in size.\n\nIs the track you are about to insert a MOD Track in a ZIP file that is less then 250KB?\n", "Stage Maker", 0) == 0) {
File file = null;
final FileDialog filedialog = new FileDialog(new Frame(), "Stage Maker - Add MOD Track file to stage sound track play list!");
filedialog.setFile("*.zip");
filedialog.setMode(0);
filedialog.setVisible(true);
try {
if (filedialog.getFile() != null) {
file = new File("" + filedialog.getDirectory() + "" + filedialog.getFile() + "");
}
} catch (final Exception exception) {
}
if (file != null) {
try {
if (file.length() / 1024L < 250L) {
File file128 = new File("mystages/mymusic/");
if (!file128.exists()) {
file128.mkdirs();
}
file128 = new File("mystages/mymusic/" + file.getName() + "");
final FileInputStream fileinputstream = new FileInputStream(file);
final FileOutputStream fileoutputstream = new FileOutputStream(file128);
final byte[] is129 = new byte[1024];
int i130;
while ((i130 = fileinputstream.read(is129)) > 0) {
fileoutputstream.write(is129, 0, i130);
}
fileinputstream.close();
fileoutputstream.close();
tracks.removeAll();
tracks.add(rd, "Select MOD Track ");
final String[] strings131 = new File("mystages/mymusic/").list();
if (strings131 != null) {
for (final String element : strings131)
if (element.toLowerCase().endsWith(".zip")) {
tracks.add(rd, element.substring(0, element.length() - 4));
}
}
tracks.select(file.getName().substring(0, file.getName().length() - 4));
} else {
JOptionPane.showMessageDialog(null, "The selected file is larger then 250KB in size and therefore cannot be added!", "Stage Maker", 1);
}
} catch (final Exception exception) {
JOptionPane.showMessageDialog(null, "Unable to copy file! Error Deatials:\n"
+ exception, "Stage Maker", 1);
}
}
}
final int i133 = 200;
rd.setColor(new Color(0, 0, 0));
rd.drawString("Sound Track", 280 + i133, 461);
String string = trackname;
if (string.equals("")) {
string = "No Sound Track set.";
} else if (button(" < Remove Track ", 378, 495, 2, false)) {
trackname = "";
}
rd.drawString(string, 629 - ftm.stringWidth(string) / 2, 482);
rd.setColor(new Color(128, 128, 128));
rd.drawLine(270 + i133, 457, 277 + i133, 457);
rd.drawLine(589 + i133, 457, 353 + i133, 457);
rd.drawLine(270 + i133, 457, 270 + i133, 497);
rd.drawLine(589 + i133, 457, 589 + i133, 497);
rd.drawLine(270 + i133, 497, 589 + i133, 497);
if (button(" Reset ", 576, 530, 0, true)) {
ltrackname = "";
dtabed = -2;
}
if (button(" Save ", 663, 530, 0, true)) {
sortop();
savefile();
}
}
if (dtab == 6) {
rd.setColor(new Color(0, 0, 0));
rd.setFont(new Font("Arial", 1, 13));
ftm = rd.getFontMetrics();
rd.drawString("Test Drive the Stage", 400 - ftm.stringWidth("Test Drive the Stage") / 2, 470);
witho.move(342, 480);
if (!witho.isShowing()) {
witho.setVisible(true);
}
if (button(" TEST DRIVE! ", 400, 530, 0, true)) {
savefile();
errd = 0;
readstage(3);
if (cp.nsp < 2) {
errd = 7;
}
if (errd == 0) {
Madness.testcar = stagename;
Madness.testdrive = witho.getSelectedIndex() + 3;
Madness.game();
} else {
JOptionPane.showMessageDialog(null, ""
+ "Error! This stage is not ready for a test drive!\nReason:\n" + errlo[errd - 1]
+ "\n\n", "Stage Maker", 0);
}
}
}
if (dtabed != dtab)
if (dtabed == -2) {
dtabed = -1;
} else {
dtabed = dtab;
}
}
if (tab == 3) {
rd.setFont(new Font("Arial", 1, 13));
rd.setColor(new Color(0, 0, 0));
rd.drawString("Publish Stage : [ " + stagename + " ]", 30, 50);
rd.drawString("Publishing Type :", 30, 80);
pubtyp.move(150, 63);
if (!pubtyp.isShowing()) {
pubtyp.setVisible(true);
pubtyp.select(1);
}
rd.setColor(new Color(0, 0, 0));
rd.setFont(new Font("Arial", 0, 12));
if (pubtyp.getSelectedIndex() == 0) {
rd.drawString("Private : This means only you can have your stage in your account and no one else can add", 268, 72);
rd.drawString("it to their account to play it!", 268, 88);
}
if (pubtyp.getSelectedIndex() == 1) {
rd.drawString("Public : This means anyone can add this stage to their account to play it, but only you can", 268, 72);
rd.drawString("download it to your Stage Maker and edit it (no one else but you can edit it).", 268, 88);
}
if (pubtyp.getSelectedIndex() == 2) {
rd.drawString("Super Public : This means anyone can add this stage to their account to play it and can also", 268, 72);
rd.drawString("download it to their stage Maker, edit it and publish it.", 268, 88);
}
rd.setFont(new Font("Arial", 1, 12));
ftm = rd.getFontMetrics();
rd.drawString("Stage Name", 180 - ftm.stringWidth("Stage Name") / 2, 138);
rd.drawString("Created By", 400 - ftm.stringWidth("Created By") / 2, 138);
rd.drawString("Added By", 500 - ftm.stringWidth("Added By") / 2, 138);
rd.drawString("Publish Type", 600 - ftm.stringWidth("Publish Type") / 2, 138);
rd.drawString("Options", 720 - ftm.stringWidth("Options") / 2, 138);
rd.drawLine(350, 129, 350, 140);
rd.drawLine(450, 129, 450, 140);
rd.drawLine(550, 129, 550, 140);
rd.drawLine(650, 129, 650, 140);
rd.drawRect(10, 140, 780, 402);
if (button(" Publish > ", 102, 110, 0, true)) {
if (logged == 0) {
JOptionPane.showMessageDialog(null, "Please login to retrieve your account first before publishing!", "Stage Maker", 1);
}
if (logged == 3 || logged == -1) {
savefile();
errd = 0;
readstage(3);
if (cp.nsp < 2) {
errd = 7;
}
rd.setFont(new Font("Arial", 1, 12));
ftm = rd.getFontMetrics();
if (ftm.stringWidth(stagename) > 274) {
errd = 8;
}
if (errd == 0) {
int i = 0;
for (int i134 = 0; i134 < nms; i134++)
if (mystages[i134].equals(stagename)
&& maker[i134].toLowerCase().equals(tnick.getText().toLowerCase())) {
i = JOptionPane.showConfirmDialog(null, "Replace your already online stage '"
+ stagename + "' with this one?", "Stage Maker", 0);
}
if (i == 0) {
setCursor(new Cursor(3));
rd.setFont(new Font("Arial", 1, 13));
ftm = rd.getFontMetrics();
rd.setColor(new Color(225, 225, 225));
rd.fillRect(11, 141, 779, 401);
rd.setColor(new Color(0, 0, 0));
rd.drawString("Connecting to Server...", 400
- ftm.stringWidth("Connecting to Server...") / 2, 250);
repaint();
int i135 = -1;
try {
final Socket socket = new Socket("multiplayer.needformadness.com", 7061);
final BufferedReader bufferedreader = new BufferedReader(new InputStreamReader(socket.getInputStream()));
final PrintWriter printwriter = new PrintWriter(socket.getOutputStream(), true);
printwriter.println("20|" + tnick.getText() + "|" + tpass.getText() + "|"
+ stagename + "|" + pubtyp.getSelectedIndex() + "|");
String string = bufferedreader.readLine();
if (string != null) {
i135 = servervalue(string, 0);
}
if (i135 == 0) {
String string136 = " Publishing Stage ";
final String string137 = "" + tstage + "\r\n" + bstage + "";
final BufferedReader stagebufferedreader = new BufferedReader(new StringReader(string137));
String string139;
while ((string139 = stagebufferedreader.readLine()) != null) {
string139 = string139.trim();
printwriter.println(string139);
rd.setColor(new Color(225, 225, 225));
rd.fillRect(11, 141, 779, 401);
rd.setColor(new Color(0, 0, 0));
rd.drawString(string136, 400 - ftm.stringWidth(string136) / 2, 250);
string136 = "| " + string136 + " |";
if (string136.equals("| | | | | | | | | | | | | | | | | | | | | | | | Publishing Stage | | | | | | | | | | | | | | | | | | | | | | | |")) {
string136 = " Publishing Stage ";
}
repaint();
try {
if (thredo != null) {
}
Thread.sleep(10L);
} catch (final InterruptedException interruptedexception) {
}
}
printwriter.println("QUITX1111");
rd.setColor(new Color(225, 225, 225));
rd.fillRect(11, 141, 779, 401);
rd.setColor(new Color(0, 0, 0));
rd.drawString("Creating the stage online...", 400
- ftm.stringWidth("Creating the stage online...") / 2, 250);
rd.drawString("This may take a couple of minutes, please wait...", 400
- ftm.stringWidth("This may take a couple of minutes, please wait...")
/ 2, 280);
repaint();
string = bufferedreader.readLine();
if (string != null) {
i135 = servervalue(string, 0);
} else {
i135 = -1;
}
if (i135 == 0) {
rd.setColor(new Color(225, 225, 225));
rd.fillRect(11, 141, 779, 401);
rd.setColor(new Color(0, 0, 0));
rd.drawString("Uploading stage's sound track...", 400
- ftm.stringWidth("Uploading Stage's Sound Track...") / 2, 250);
rd.drawString("This may take a couple of minutes, please wait...", 400
- ftm.stringWidth("This may take a couple of minutes, please wait...")
/ 2, 280);
repaint();
final File file = new File("mystages/mymusic/" + trackname + ".zip");
if (!trackname.equals("") && file.exists()) {
final int i140 = (int) file.length();
printwriter.println("track|" + trackname + "|" + i140 + "|");
string = bufferedreader.readLine();
if (string != null) {
i135 = servervalue(string, 0);
} else {
i135 = -2;
}
if (i135 == 0) {
final FileInputStream fileinputstream = new FileInputStream(file);
final byte[] is = new byte[i140];
fileinputstream.read(is);
fileinputstream.close();
final DataOutputStream dataoutputstream = new DataOutputStream(socket.getOutputStream());
dataoutputstream.write(is, 0, i140);
string = bufferedreader.readLine();
if (string != null) {
i135 = servervalue(string, 0);
} else {
i135 = -2;
}
}
if (i135 == -67) {
i135 = 0;
}
} else {
printwriter.println("END");
string = bufferedreader.readLine();
}
}
}
socket.close();
} catch (final Exception exception) {
i135 = -1;
}
setCursor(new Cursor(0));
boolean bool = false;
if (i135 == 0) {
logged = 1;
bool = true;
}
if (i135 == 3) {
JOptionPane.showMessageDialog(null, "Unable to publish stage.\nReason:\n"
+ errlo[6] + "\n\n", "Stage Maker", 1);
bool = true;
}
if (i135 == 4) {
JOptionPane.showMessageDialog(null, ""
+ "Unable to publish stage.\nReason:\nStage name used (" + stagename
+ ").\nThe name '" + stagename
+ "' is already used by another published stage.\nPlease rename your stage.\n\n", "Stage Maker", 1);
bool = true;
}
if (i135 == 5) {
JOptionPane.showMessageDialog(null, "Unable to create stage online! Unknown Error. Please try again later.", "Stage Maker", 1);
bool = true;
}
if (i135 > 5) {
JOptionPane.showMessageDialog(null, "Unable to publish stage fully! Unknown Error. Please try again later.", "Stage Maker", 1);
bool = true;
}
if (i135 == -4) {
logged = 1;
JOptionPane.showMessageDialog(null, "Unable to upload sound track!\nReason:\nAnother MOD Track is already uploaded with the same name, please rename your Track.\nOpen your 'mystages' folder then open 'mymusic' to find your MOD Track to rename it.\n\n", "Stage Maker", 1);
bool = true;
}
if (i135 == -3) {
logged = 1;
JOptionPane.showMessageDialog(null, "Unable to upload sound track!\nReason:\nYour MOD Track\u2019s file size is too large, Track file size must be less then 250KB to be accepted.\n\n", "Stage Maker", 1);
bool = true;
}
if (i135 == -2) {
logged = 1;
JOptionPane.showMessageDialog(null, "Unable to upload sound track! Unknown Error. Please try again later.", "Stage Maker", 1);
bool = true;
}
if (!bool) {
JOptionPane.showMessageDialog(null, "Unable to publish stage! Unknown Error.", "Stage Maker", 1);
}
}
} else {
JOptionPane.showMessageDialog(null, ""
+ "Error! This stage is not ready for publishing!\nReason:\n" + errlo[errd - 1]
+ "\n\n", "Stage Maker", 0);
}
}
}
if (logged == 3) {
for (int i = 0; i < nms; i++) {
rd.setColor(new Color(235, 235, 235));
if (xm > 11 && xm < 789 && ym > 142 + i * 20 && ym < 160 + i * 20) {
rd.setColor(new Color(255, 255, 255));
}
rd.fillRect(11, 142 + i * 20, 778, 18);
rd.setFont(new Font("Arial", 0, 12));
ftm = rd.getFontMetrics();
rd.setColor(new Color(0, 0, 0));
rd.drawString(mystages[i], 180 - ftm.stringWidth(mystages[i]) / 2, 156 + i * 20);
rd.setColor(new Color(155, 155, 155));
rd.drawLine(350, 145 + i * 20, 350, 157 + i * 20);
if (pubt[i] != -1) {
rd.drawLine(450, 145 + i * 20, 450, 157 + i * 20);
rd.drawLine(550, 145 + i * 20, 550, 157 + i * 20);
rd.drawLine(650, 145 + i * 20, 650, 157 + i * 20);
boolean bool = false;
if (maker[i].toLowerCase().equals(tnick.getText().toLowerCase())) {
bool = true;
rd.setColor(new Color(0, 64, 0));
rd.drawString("You", 400 - ftm.stringWidth("You") / 2, 156 + i * 20);
} else {
rd.setColor(new Color(0, 0, 64));
rd.drawString(maker[i], 400 - ftm.stringWidth(maker[i]) / 2, 156 + i * 20);
}
if (nad[i] > 1) {
if (ovbutton("" + nad[i] + " Players", 500, 156 + i * 20)) {
String string = "[ " + mystages[i]
+ " ] has been added by the following players to their accounts: \n\n";
int i141 = 0;
for (int i142 = 0; i142 < nad[i]; i142++) {
if (++i141 == 17) {
string = "" + string + "\n";
i141 = 1;
}
string = "" + string + addeda[i][i142];
if (i142 != nad[i] - 1)
if (i142 != nad[i] - 2) {
string = "" + string + ", ";
} else if (i141 == 16) {
string = "" + string + "\nand ";
i141 = 0;
} else {
string = "" + string + " and ";
}
}
string = "" + string + "\n \n \n";
JOptionPane.showMessageDialog(null, string, "Stage Maker", 1);
}
} else {
rd.setColor(new Color(0, 0, 64));
rd.drawString("None", 500 - ftm.stringWidth("None") / 2, 156 + i * 20);
}
if (pubt[i] == 0) {
rd.setColor(new Color(0, 0, 64));
rd.drawString("Private", 600 - ftm.stringWidth("Private") / 2, 156 + i * 20);
}
if (pubt[i] == 1) {
rd.setColor(new Color(0, 0, 64));
rd.drawString("Public", 600 - ftm.stringWidth("Public") / 2, 156 + i * 20);
}
if (pubt[i] == 2) {
rd.setColor(new Color(0, 64, 0));
rd.drawString("Super Public", 600 - ftm.stringWidth("Super Public") / 2, 156 + i * 20);
}
if ((pubt[i] == 2 || bool) && ovbutton("Download", 700, 156 + i * 20)) {
int i143 = 0;
for (int i144 = 0; i144 < slstage.getItemCount(); i144++)
if (mystages[i].equals(slstage.getItem(i144))) {
i143 = JOptionPane.showConfirmDialog(null, "Replace the local "
+ mystages[i]
+ " in your 'mystages' folder with the published online copy?", "Stage Maker", 0);
}
if (i143 == 0) {
setCursor(new Cursor(3));
rd.setFont(new Font("Arial", 1, 13));
ftm = rd.getFontMetrics();
rd.setColor(new Color(225, 225, 225));
rd.fillRect(11, 141, 779, 401);
rd.setColor(new Color(0, 0, 0));
rd.drawString("Downloading stage, please wait...", 400
- ftm.stringWidth("Downloading stage, please wait...") / 2, 250);
repaint();
try {
String string = "http://multiplayer.needformadness.com/tracks/"
+ mystages[i] + ".radq?reqlo=" + (int) (ThreadLocalRandom.current().nextDouble() * 1000.0) + "";
string = string.replace(' ', '_');
URL url = new URL(string);
int i145 = url.openConnection().getContentLength();
DataInputStream datainputstream = new DataInputStream(url.openStream());
byte[] is = new byte[i145];
datainputstream.readFully(is);
datainputstream.close();
ZipInputStream zipinputstream;
if (is[0] == 80 && is[1] == 75 && is[2] == 3) {
zipinputstream = new ZipInputStream(new ByteArrayInputStream(is));
} else {
final byte[] is146 = new byte[i145 - 40];
for (int i147 = 0; i147 < i145 - 40; i147++) {
int i148 = 20;
if (i147 >= 500) {
i148 = 40;
}
is146[i147] = is[i147 + i148];
}
zipinputstream = new ZipInputStream(new ByteArrayInputStream(is146));
}
final ZipEntry zipentry = zipinputstream.getNextEntry();
if (zipentry != null) {
String string149 = "";
int i150 = Integer.valueOf(zipentry.getName()).intValue();
final byte[] is151 = new byte[i150];
int i152 = 0;
int i153;
for (; i150 > 0; i150 -= i153) {
i153 = zipinputstream.read(is151, i152, i150);
i152 += i153;
}
String string154 = new String(is151);
string154 = "" + string154 + "\n";
String string155 = "";
int i156 = 0;
int i157 = string154.indexOf("\n", 0);
while (i157 != -1 && i156 < string154.length()) {
String string158 = string154.substring(i156, i157);
string158 = string158.trim();
i156 = i157 + 1;
i157 = string154.indexOf("\n", i156);
if (!string158.startsWith("stagemaker(")
&& !string158.startsWith("publish(")) {
string155 = "" + string155 + "" + string158 + "\r\n";
} else {
string155 = string155.trim();
string155 = "" + string155 + "\r\n";
}
if (string158.startsWith("soundtrack")) {
string149 = getstring("soundtrack", string158, 0);
}
}
string155 = string155.trim();
string155 = "" + string155 + "\r\n\r\n";
File file = new File("mystages/");
if (!file.exists()) {
file.mkdirs();
}
file = new File("mystages/" + mystages[i] + ".txt");
final BufferedWriter bufferedwriter = new BufferedWriter(new FileWriter(file));
bufferedwriter.write(string155);
bufferedwriter.close();
zipinputstream.close();
if (!string149.equals("")) {
try {
rd.setColor(new Color(0, 0, 0));
rd.drawString("Downloading stage's sound track...", 400
- ftm.stringWidth("Downloading stage's sound track...")
/ 2, 280);
repaint();
string = "http://multiplayer.needformadness.com/tracks/music/"
+ string149 + ".zip";
string = string.replace(' ', '_');
url = new URL(string);
i145 = url.openConnection().getContentLength();
file = new File("mystages/mymusic/" + string149 + ".zip");
if (file.exists())
if (file.length() == i145) {
i143 = 1;
} else {
i143 = JOptionPane.showConfirmDialog(null, ""
+ "Another track named '" + string149
+ "' already exists in your Sound Tracks folder!\nReplace it with the one attached to this stage?", "Stage Maker", 0);
}
if (i143 == 0) {
datainputstream = new DataInputStream(url.openStream());
is = new byte[i145];
datainputstream.readFully(is);
datainputstream.close();
final FileOutputStream fileoutputstream = new FileOutputStream(file);
fileoutputstream.write(is);
fileoutputstream.close();
}
} catch (final Exception exception) {
}
}
setCursor(new Cursor(0));
JOptionPane.showMessageDialog(null, "" + mystages[i]
+ " has been successfully downloaded!", "Stage Maker", 1);
} else {
JOptionPane.showMessageDialog(null, "Unable to download stage. Unknown Error! \nPlease try again later.", "Stage Maker", 1);
}
} catch (final Exception exception) {
JOptionPane.showMessageDialog(null, "Unable to download stage. Unknown Error! \nPlease try again later.", "Stage Maker", 1);
}
}
}
} else {
rd.drawString("- Error Loading this stage's info! -", 550
- ftm.stringWidth("- Error Loading this stage's info! -") / 2, 156 + i * 20);
}
if (ovbutton("X", 765, 156 + i * 20) && JOptionPane.showConfirmDialog(null, "Remove "
+ mystages[i] + " from your account?", "Stage Maker", 0) == 0) {
setCursor(new Cursor(3));
int i160 = -1;
try {
final Socket socket = new Socket("multiplayer.needformadness.com", 7061);
final BufferedReader bufferedreader = new BufferedReader(new InputStreamReader(socket.getInputStream()));
final PrintWriter printwriter = new PrintWriter(socket.getOutputStream(), true);
printwriter.println("19|" + tnick.getText() + "|" + tpass.getText() + "|"
+ mystages[i] + "|");
final String string = bufferedreader.readLine();
if (string != null) {
i160 = servervalue(string, 0);
}
socket.close();
} catch (final Exception exception) {
i160 = -1;
}
if (i160 == 0) {
logged = 1;
} else {
setCursor(new Cursor(0));
JOptionPane.showMessageDialog(null, "Failed to remove " + mystages[i]
+ " from your account. Unknown Error! \nPlease try again later.", "Stage Maker", 1);
}
}
}
}
if (logged == 2) {
for (int i = 0; i < nms; i++) {
rd.setFont(new Font("Arial", 1, 13));
ftm = rd.getFontMetrics();
rd.setColor(new Color(225, 225, 225));
rd.fillRect(50, 150, 600, 150);
rd.setColor(new Color(0, 0, 0));
rd.drawString("Loading " + mystages[i] + "\u2018s info...", 400
- ftm.stringWidth("Loading " + mystages[i] + "\u2018s info...") / 2, 220);
repaint();
maker[i] = "Unkown";
pubt[i] = -1;
nad[i] = 0;
String string = "";
try {
String string161 = "http://multiplayer.needformadness.com/tracks/" + mystages[i]
+ ".txt?reqlo=" + (int) (ThreadLocalRandom.current().nextDouble() * 1000.0) + "";
string161 = string161.replace(' ', '_');
final URL url = new URL(string161);
final BufferedReader bufferedreader = new BufferedReader(new InputStreamReader(new DataInputStream(url.openStream())));
while ((string = bufferedreader.readLine()) != null) {
string = "" + string.trim();
if (string.startsWith("details")) {
maker[i] = getSvalue("details", string, 0);
pubt[i] = Utility.getvalue("details", string, 1);
boolean bool = false;
while (!bool) {
addeda[i][nad[i]] = getSvalue("details", string, 2 + nad[i]);
if (addeda[i][nad[i]].equals("")) {
bool = true;
} else {
nad[i]++;
}
}
}
}
} catch (final Exception exception) {
}
}
setCursor(new Cursor(0));
logged = 3;
}
if (logged == -1) {
rd.setFont(new Font("Arial", 1, 13));
ftm = rd.getFontMetrics();
rd.setColor(new Color(0, 0, 0));
rd.drawString("Account empty, no published stages found.", 400
- ftm.stringWidth("Account empty, no published stages found.") / 2, 220);
rd.drawString("Click \u2018Publish\u2019 above to begin.", 400
- ftm.stringWidth("Click \u2018Publish\u2019 above to begin.") / 2, 280);
rd.setFont(new Font("Arial", 0, 12));
ftm = rd.getFontMetrics();
rd.drawString("The maximum number of stages your account can have at once is 20 stages.", 400
- ftm.stringWidth("The maximum number of stages your account can have at once is 20 stages.")
/ 2, 320);
}
if (logged == 1) {
rd.setColor(new Color(225, 225, 225));
rd.fillRect(11, 141, 779, 401);
rd.setFont(new Font("Arial", 1, 13));
ftm = rd.getFontMetrics();
rd.setColor(new Color(0, 0, 0));
rd.drawString("Loading your account's stage list...", 400
- ftm.stringWidth("Loading your account's stage list...") / 2, 220);
repaint();
nms = 0;
String string = "";
try {
final URL url = new URL("http://multiplayer.needformadness.com/tracks/lists/"
+ tnick.getText() + ".txt?reqlo=" + (int) (ThreadLocalRandom.current().nextDouble() * 1000.0) + "");
final BufferedReader bufferedreader = new BufferedReader(new InputStreamReader(new DataInputStream(url.openStream())));
while ((string = bufferedreader.readLine()) != null) {
string = "" + string.trim();
if (string.startsWith("mystages")) {
boolean bool = true;
while (bool && nms < 20) {
mystages[nms] = getSvalue("mystages", string, nms);
if (mystages[nms].equals("")) {
bool = false;
} else {
nms++;
}
}
}
}
if (nms > 0) {
logged = 2;
} else {
setCursor(new Cursor(0));
logged = -1;
}
bufferedreader.close();
} catch (final Exception exception) {
final String string162 = "" + exception;
if (string162.indexOf("FileNotFound") != -1) {
setCursor(new Cursor(0));
logged = -1;
} else {
logged = 0;
JOptionPane.showMessageDialog(null, "Unable to connect to server at this moment, please try again later.", "Stage Maker", 1);
}
}
}
if (logged == 0) {
rd.setFont(new Font("Arial", 0, 12));
ftm = rd.getFontMetrics();
rd.drawString("The maximum number of stages your account can have at once is 20 stages.", 400
- ftm.stringWidth("The maximum number of stages your account can have at once is 20 stages.")
/ 2, 180);
rd.setFont(new Font("Arial", 1, 13));
ftm = rd.getFontMetrics();
rd.drawString("Login to Retrieve your Account Stages", 400
- ftm.stringWidth("Login to Retrieve your Account Stages") / 2, 220);
rd.drawString("Nickname:", 376 - ftm.stringWidth("Nickname:") - 14, 266);
if (!tnick.isShowing()) {
tnick.setVisible(true);
}
movefield(tnick, 376, 250, 129, 23);
rd.drawString("Password:", 376 - ftm.stringWidth("Password:") - 14, 296);
if (!tpass.isShowing()) {
tpass.setVisible(true);
}
movefield(tpass, 376, 280, 129, 23);
if (button(" Login ", 400, 340, 0, true)) {
setCursor(new Cursor(3));
int i = -1;
try {
final Socket socket = new Socket("multiplayer.needformadness.com", 7061);
final BufferedReader bufferedreader = new BufferedReader(new InputStreamReader(socket.getInputStream()));
final PrintWriter printwriter = new PrintWriter(socket.getOutputStream(), true);
printwriter.println("1|" + tnick.getText().toLowerCase() + "|" + tpass.getText()
+ "|");
final String string = bufferedreader.readLine();
if (string != null) {
i = servervalue(string, 0);
}
socket.close();
} catch (final Exception exception) {
i = -1;
}
if (i == 0 || i == 3 || i > 10) {
tnick.setVisible(false);
tpass.setVisible(false);
logged = 1;
savesettings();
}
if (i == 1 || i == 2) {
setCursor(new Cursor(0));
JOptionPane.showMessageDialog(null, "Sorry. Incorrect Nickname or Password!", "Stage Maker", 0);
}
if (i == -167) {
setCursor(new Cursor(0));
JOptionPane.showMessageDialog(null, "Sorry. Trial accounts are not allowed to publish cars & stages, please register a full account!", "Stage Maker", 0);
}
if (i == -1) {
setCursor(new Cursor(0));
JOptionPane.showMessageDialog(null, "Unable to connect to server at this moment, please try again later.", "Stage Maker", 1);
}
}
rd.setFont(new Font("Arial", 1, 13));
ftm = rd.getFontMetrics();
rd.drawString("Not registered yet?", 400 - ftm.stringWidth("Not registered yet?") / 2, 450);
if (button(" Register Now! ", 400, 480, 0, true)) {
Madness.openurl("http://multiplayer.needformadness.com/register.html");
}
rd.setFont(new Font("Arial", 0, 12));
ftm = rd.getFontMetrics();
rd.drawString("Register to publish your stages to the multiplayer game!", 400
- ftm.stringWidth("Register to publish your stages to the multiplayer game!") / 2, 505);
}
}
if (tabed != tab)
if (tabed == -2) {
tabed = -1;
} else {
tabed = tab;
}
rd.setColor(new Color(0, 0, 0));
rd.fillRect(0, 0, 800, 25);
if (!onbtgame) {
rd.drawImage(btgame[0], 620, 0, null);
} else {
rd.drawImage(btgame[1], 620, 0, null);
}
rd.setFont(new Font("Arial", 1, 13));
ftm = rd.getFontMetrics();
final String[] strings = {
"Stage", "Build", "View & Edit", "Publish"
};
final int[] is = {
0, 0, 100, 90
};
final int[] is163 = {
0, 25, 25, 0
};
int i = 4;
if (stagename.equals("") || sfase != 0) {
tab = 0;
i = 1;
}
for (int i164 = 0; i164 < i; i164++) {
rd.setColor(new Color(170, 170, 170));
if (xm > is[0] && xm < is[3] && ym > 0 && ym < 25) {
rd.setColor(new Color(200, 200, 200));
}
if (tab == i164) {
rd.setColor(new Color(225, 225, 225));
}
rd.fillPolygon(is, is163, 4);
rd.setColor(new Color(0, 0, 0));
rd.drawString(strings[i164], i164 * 100 + 45 - ftm.stringWidth(strings[i164]) / 2, 17);
if (xm > is[0] && xm < is[3] && ym > 0 && ym < 25 && mousePressed == -1) {
tab = i164;
}
for (int i165 = 0; i165 < 4; i165++) {
is[i165] += 100;
}
}
if (mousePressed == -1) {
mousePressed = 0;
}
drawms();
repaint();
if (!exwist) {
try {
if (thredo != null) {
}
Thread.sleep(40L);
} catch (final InterruptedException interruptedexception) {
}
}
}
track.unload();
track = null;
rd.dispose();
System.gc();
//bco[selectedPart].x = ;
//bco[selectedPart].z = ;
//bco[selectedPart].y =;
//bco[selectedPart].xz = ;
}
private void savefile() {
try {
File file = new File("mystages/");
if (!file.exists()) {
file.mkdirs();
}
file = new File("mystages/" + stagename + ".txt");
final BufferedWriter bufferedwriter = new BufferedWriter(new FileWriter(file));
bufferedwriter.write(tstage);
bufferedwriter.write(bstage);
bufferedwriter.close();
} catch (final Exception exception) {
JOptionPane.showMessageDialog(null, "Unable to save file! Error Deatials:\n"
+ exception, "Stage Maker", 1);
}
savesettings();
}
private void savesettings() {
if (!sstage.equals(stagename) || !suser.equals(tnick.getText())) {
final String string = "" + stagename + "\n" + tnick.getText() + "\n\n";
sstage = stagename;
suser = tnick.getText();
try {
File file = new File("mystages/");
if (!file.exists()) {
file.mkdirs();
}
file = new File("mystages/settings.data");
final BufferedWriter bufferedwriter = new BufferedWriter(new FileWriter(file));
bufferedwriter.write(string);
bufferedwriter.close();
} catch (final Exception exception) {
}
}
}
// Removed unused code found by UCDetector
// public String serverSvalue(final String string, final int i) {
// String string365 = "";
// try {
// int i366 = 0;
// int i367 = 0;
// int i368 = 0;
// String string369 = "";
// String string370 = "";
// for (; i366 < string.length() && i368 != 2; i366++) {
// string369 = "" + ("") + (string.charAt(i366));
// if (string369.equals("|")) {
// i367++;
// if (i368 == 1 || i367 > i)
// i368 = 2;
// } else if (i367 == i) {
// string370 = "" + (string370) + (string369);
// i368 = 1;
// string365 = string370;
// } catch (final Exception exception) {
// return string365;
private int servervalue(final String string, final int i) {
int i359 = -1;
try {
int i360 = 0;
int i361 = 0;
int i362 = 0;
String string363 = "";
String string364 = "";
for (; i360 < string.length() && i362 != 2; i360++) {
string363 = "" + string.charAt(i360);
if (string363.equals("|")) {
i361++;
if (i362 == 1 || i361 > i) {
i362 = 2;
}
} else if (i361 == i) {
string364 = "" + string364 + string363;
i362 = 1;
}
}
if (string364.equals("")) {
string364 = "-1";
}
i359 = Integer.valueOf(string364).intValue();
} catch (final Exception exception) {
}
return i359;
}
private void sortop() {
tstage = "snap(" + m.snap[0] + "," + m.snap[1] + "," + m.snap[2] + ")\r\nsky(" + csky[0] + "," + csky[1]
+ "," + csky[2] + ")\r\nfog(" + cfade[0] + "," + cfade[1] + "," + cfade[2] + ")\r\nclouds(" + cldd[0]
+ "," + cldd[1] + "," + cldd[2] + "," + cldd[3] + "," + cldd[4] + ")\r\nground(" + cgrnd[0] + ","
+ cgrnd[1] + "," + cgrnd[2] + ")\r\ntexture(" + texture[0] + "," + texture[1] + "," + texture[2] + ","
+ texture[3] + ")\r\nfadefrom(" + origfade + ")\r\ndensity(" + ((m.fogd + 1) / 2 - 1)
+ ")\r\nmountains(" + m.mgen + ")\r\nnlaps(" + cp.nlaps + ")\r\n";
if (!trackname.equals("")) {
this.tstage = "" + this.tstage + "soundtrack(" + trackname + "," + trackvol
+ "," + tracksize + ")\r\n";
}
for (int i = 0; i < 3; i++) {
snap[i] = (int) (m.snap[i] / 1.2F + 50.0F);
}
if (snap[0] + snap[1] + snap[2] <= 110) {
this.tstage = "" + this.tstage + "lightson()\r\n";
}
this.tstage = "" + this.tstage + "\r\n";
}
private void sortstage() {
final int[] is = new int[nob * 2];
final int[] is242 = new int[nob * 2];
for (int i = 0; i < nob; i++) {
is[i] = 0;
}
int i = 0;
int i243 = 0;
is242[i243] = 0;
i243++;
boolean bool = false;
int i244 = 0;
while (!bool) {
final int[] is245 = {
co[i].x + atp[co[i].colok][0], co[i].x + atp[co[i].colok][2]
};
final int[] is246 = {
co[i].z + atp[co[i].colok][1], co[i].z + atp[co[i].colok][3]
};
int i247 = co[i].roofat;
if (co[i].colok == 2) {
i247 += 30;
}
if (co[i].colok == 3) {
i247 -= 30;
}
if (co[i].colok == 15) {
i247 -= 90;
}
if (co[i].colok == 20) {
i247 -= 180;
}
if (co[i].colok == 26) {
i247 -= 90;
}
rot(is245, is246, co[i].x, co[i].z, i247, 2);
int i248 = -1;
int i249 = -1;
if (i244 != 0) {
for (int i250 = 0; i250 < nob; i250++) {
boolean bool251 = false;
if (i243 == 2 && i250 == 0) {
bool251 = true;
}
if (i != i250 && !bool251 && is[i250] == 0 && (co[i250].colok <= 14 || co[i250].colok >= 33)
&& (co[i250].colok < 39 || co[i250].colok >= 46) && co[i250].colok < 52) {
int i252 = 0;
if (co[i250].colok != 2 && co[i250].colok != 3 && co[i250].colok != 4 && co[i250].colok != 7
&& co[i250].colok != 9) {
if (i244 == 1 && co[i250].z > co[i].z && Math.abs(co[i250].x - co[i].x) < 1000
&& (co[i250].roofat == 180 || co[i250].roofat == 0)) {
i252 = 1;
}
if (i244 == 2 && co[i250].z < co[i].z && Math.abs(co[i250].x - co[i].x) < 1000
&& (co[i250].roofat == 180 || co[i250].roofat == 0)) {
i252 = 1;
}
if (i244 == 3 && co[i250].x > co[i].x && Math.abs(co[i250].z - co[i].z) < 1000
&& (co[i250].roofat == 90 || co[i250].roofat == -90)) {
i252 = 1;
}
if (i244 == 4 && co[i250].x < co[i].x && Math.abs(co[i250].z - co[i].z) < 1000
&& (co[i250].roofat == 90 || co[i250].roofat == -90)) {
i252 = 1;
}
} else {
i252 = 2;
}
if (i252 != 0) {
final int[] is253 = {
co[i250].x + atp[co[i250].colok][0], co[i250].x + atp[co[i250].colok][2]
};
final int[] is254 = {
co[i250].z + atp[co[i250].colok][1], co[i250].z + atp[co[i250].colok][3]
};
i247 = co[i250].roofat;
if (co[i250].colok == 2) {
i247 += 30;
}
if (co[i250].colok == 3) {
i247 -= 30;
}
if (co[i250].colok == 15) {
i247 -= 90;
}
if (co[i250].colok == 20) {
i247 -= 180;
}
if (co[i250].colok == 26) {
i247 -= 90;
}
rot(is253, is254, co[i250].x, co[i250].z, i247, 2);
if (i250 != 0) {
final int i256 = pyn(is253[0], is245[0], is254[0], is246[0]);
if (i256 >= 0 && (i256 < 100 || i252 != 2) && (i256 < i248 || i248 == -1)) {
i248 = i256;
i249 = i250;
}
}
int i257 = pyn(is253[1], is245[0], is254[1], is246[0]);
if (i257 >= 0 && (i257 < 100 || i252 != 2) && (i257 < i248 || i248 == -1)) {
i248 = i257;
i249 = i250;
}
if (i != 0) {
if (i250 != 0) {
i257 = pyn(is253[0], is245[1], is254[0], is246[1]);
if (i257 >= 0 && (i257 < 100 || i252 != 2) && i257 < i248) {
i248 = i257;
i249 = i250;
}
}
i257 = pyn(is253[1], is245[1], is254[1], is246[1]);
if (i257 >= 0 && (i257 < 100 || i252 != 2) && i257 < i248) {
i248 = i257;
i249 = i250;
}
}
}
}
}
}
if (i249 == -1) {
for (int i258 = 0; i258 < nob; i258++) {
boolean bool259 = false;
if (i243 == 2 && i258 == 0) {
bool259 = true;
}
if (i != i258 && !bool259 && is[i258] == 0 && (co[i258].colok <= 14 || co[i258].colok >= 33)
&& (co[i258].colok < 39 || co[i258].colok >= 46) && co[i258].colok < 52) {
final int[] is260 = {
co[i258].x + atp[co[i258].colok][0], co[i258].x + atp[co[i258].colok][2]
};
final int[] is261 = {
co[i258].z + atp[co[i258].colok][1], co[i258].z + atp[co[i258].colok][3]
};
i247 = co[i258].roofat;
if (co[i258].colok == 2) {
i247 += 30;
}
if (co[i258].colok == 3) {
i247 -= 30;
}
if (co[i258].colok == 15) {
i247 -= 90;
}
if (co[i258].colok == 20) {
i247 -= 180;
}
if (co[i258].colok == 26) {
i247 -= 90;
}
rot(is260, is261, co[i258].x, co[i258].z, i247, 2);
if (i258 != 0) {
final int i263 = pyn(is260[0], is245[0], is261[0], is246[0]);
if (i263 >= 0 && (i263 < i248 || i248 == -1)) {
i248 = i263;
i249 = i258;
}
}
int i264 = pyn(is260[1], is245[0], is261[1], is246[0]);
if (i264 >= 0 && (i264 < i248 || i248 == -1)) {
i248 = i264;
i249 = i258;
}
if (i != 0) {
if (i258 != 0) {
i264 = pyn(is260[0], is245[1], is261[0], is246[1]);
if (i264 >= 0 && i264 < i248) {
i248 = i264;
i249 = i258;
}
}
i264 = pyn(is260[1], is245[1], is261[1], is246[1]);
if (i264 >= 0 && i264 < i248) {
i248 = i264;
i249 = i258;
}
}
}
}
}
if (i249 != -1) {
i244 = 0;
if (co[i249].colok != 2 && co[i249].colok != 3 && co[i249].colok != 4 && co[i249].colok != 7
&& co[i249].colok != 9) {
if ((co[i249].roofat == 180 || co[i249].roofat == 0) && co[i249].z > co[i].z) {
i244 = 1;
}
if ((co[i249].roofat == 180 || co[i249].roofat == 0) && co[i249].z < co[i].z) {
i244 = 2;
}
if ((co[i249].roofat == 90 || co[i249].roofat == -90) && co[i249].x > co[i].x) {
i244 = 3;
}
if ((co[i249].roofat == 90 || co[i249].roofat == -90) && co[i249].x < co[i].x) {
i244 = 4;
}
}
if (co[i249].colok == 4 || co[i249].colok == 7 || co[i249].colok == 9) {
is[i249] = 2;
} else {
is[i249] = 1;
}
if (co[i249].colok >= 46 && co[i249].colok <= 51) {
is[i249] = 6;
}
i = i249;
if (i249 == 0) {
is[0] = 1;
bool = true;
} else {
is242[i243] = i249;
i243++;
}
} else {
is[0] = 1;
bool = true;
}
}
for (int i265 = 0; i265 < nob; i265++)
if (is[i265] == 0 && (co[i265].colok <= 14 || co[i265].colok >= 33)
&& (co[i265].colok < 39 || co[i265].colok >= 46) && co[i265].colok < 52) {
is242[i243] = i265;
i243++;
}
for (int i266 = 0; i266 < i243; i266++)
if (co[is242[i266]].colok >= 46 && co[is242[i266]].colok <= 51) {
for (int i267 = i266 + 1; i267 < i243; i267++) {
final int i268 = pyn(co[is242[i266]].x, co[is242[i267]].x, co[is242[i266]].z, co[is242[i267]].z);
if (i268 >= 0 && (co[is242[i267]].colok < 46 || co[is242[i266]].colok > 51)
&& i268 < (co[is242[i266]].maxR + co[is242[i267]].maxR) / 100
* ((co[is242[i266]].maxR + co[is242[i267]].maxR) / 100)) {
final int i269 = is242[i267];
for (int i270 = i267; i270 > i266; i270
is242[i270] = is242[i270 - 1];
}
is242[i266] = i269;
is[is242[i266]] = 0;
i266++;
}
}
}
int i271 = 1;
for (int i272 = 0; i272 < cp.nsp; i272++) {
for (int i273 = 0; i273 < nob; i273++)
if (co[i273].wh == i272 + 1 && (co[i273].colok == 30 || co[i273].colok == 32 || co[i273].colok == 54)) {
int i274 = -1;
int i275 = -1;
for (int i276 = i271; i276 < i243; i276++)
if (co[is242[i276]].colok != 30 && co[is242[i276]].colok != 32 && co[is242[i276]].colok != 54) {
final int i277 = pyn(co[i273].x, co[is242[i276]].x, co[i273].z, co[is242[i276]].z);
if (i277 >= 0 && (i277 < i274 || i274 == -1)) {
i274 = i277;
i275 = i276;
}
}
if (i275 != -1) {
is[is242[i275]] = 0;
for (int i278 = i243; i278 > i275; i278
is242[i278] = is242[i278 - 1];
}
is242[i275 + 1] = i273;
i271 = i275 + 1;
i243++;
} else {
is242[i243] = i273;
i271 = i243;
i243++;
}
}
}
for (int i279 = 0; i279 < nob; i279++)
if (co[i279].wh == 0 && (co[i279].colok == 30 || co[i279].colok == 32 || co[i279].colok == 54)) {
int i280 = -1;
int i281 = -1;
for (int i282 = i271; i282 < i243; i282++)
if (co[is242[i282]].colok != 30 && co[is242[i282]].colok != 32 && co[is242[i282]].colok != 54) {
final int i283 = pyn(co[i279].x, co[is242[i282]].x, co[i279].z, co[is242[i282]].z);
if (i283 >= 0 && (i283 < i280 || i280 == -1)) {
i280 = i283;
i281 = i282;
}
}
if (i281 != -1) {
is[is242[i281]] = 0;
for (int i284 = i243; i284 > i281; i284
is242[i284] = is242[i284 - 1];
}
is242[i281 + 1] = i279;
i243++;
} else {
is242[i243] = i279;
i243++;
}
}
for (int i285 = 0; i285 < nob; i285++)
if (co[i285].colok == 31) {
int i286 = -1;
int i287 = -1;
for (int i288 = 0; i288 < i243; i288++) {
final int i289 = pyn(co[i285].x, co[is242[i288]].x, co[i285].z, co[is242[i288]].z);
if (i289 >= 0 && (i289 < i286 || i286 == -1)) {
i286 = i289;
i287 = i288;
}
}
if (i287 != -1) {
for (int i290 = i243; i290 > i287; i290
is242[i290] = is242[i290 - 1];
}
is242[i287] = i285;
i243++;
} else {
is242[i243] = i285;
i243++;
}
}
for (int i291 = 0; i291 < nob; i291++)
if (co[i291].colok == 15 || co[i291].colok == 27 || co[i291].colok == 28 || co[i291].colok == 41
|| co[i291].colok == 44 || co[i291].colok == 52 || co[i291].colok == 53) {
int i292 = -1;
for (int i293 = 0; i293 < i243; i293++)
if ((co[is242[i293]].colok <= 14 || co[is242[i293]].colok >= 33) && co[is242[i293]].colok < 39) {
final int i294 = pyn(co[i291].x, co[is242[i293]].x, co[i291].z, co[is242[i293]].z);
if (i294 >= 0 && i294 < (co[i291].maxR + co[is242[i293]].maxR) / 100
* ((co[i291].maxR + co[is242[i293]].maxR) / 100)) {
i292 = i293;
}
}
if (i292 != -1) {
for (int i295 = i243; i295 > i292; i295
is242[i295] = is242[i295 - 1];
}
is242[i292 + 1] = i291;
i243++;
} else {
is242[i243] = i291;
i243++;
}
}
for (int i296 = 0; i296 < nob; i296++)
if (co[i296].colok >= 16 && co[i296].colok <= 25 || co[i296].colok == 40 || co[i296].colok == 42
|| co[i296].colok == 43 || co[i296].colok == 45) {
int i297 = -1;
for (int i298 = 0; i298 < i243; i298++)
if ((co[is242[i298]].colok <= 14 || co[is242[i298]].colok >= 33) && co[is242[i298]].colok < 39) {
final int i299 = pyn(co[i296].x, co[is242[i298]].x, co[i296].z, co[is242[i298]].z);
if (i299 >= 0 && i299 < (co[i296].maxR + co[is242[i298]].maxR) / 100
* ((co[i296].maxR + co[is242[i298]].maxR) / 100)) {
if (is[is242[i298]] != 0) {
is[is242[i298]] = 0;
if (co[i296].colok != 20) {
is[i296] = 3;
} else {
is[i296] = 5;
}
}
i297 = i298;
}
}
if (i297 != -1) {
}
if (i297 != -1) {
for (int i300 = i243; i300 > i297; i300
is242[i300] = is242[i300 - 1];
}
is242[i297 + 1] = i296;
i243++;
} else {
is242[i243] = i296;
i243++;
}
}
for (int i301 = 0; i301 < nob; i301++)
if (co[i301].colok == 26 || co[i301].colok == 39) {
boolean bool302 = false;
if (ThreadLocalRandom.current().nextDouble() > ThreadLocalRandom.current().nextDouble()) {
bool302 = true;
if (co[i301].colok == 39)
if (ThreadLocalRandom.current().nextDouble() > ThreadLocalRandom.current().nextDouble()) {
bool302 = false;
} else if (ThreadLocalRandom.current().nextDouble() > ThreadLocalRandom.current().nextDouble()) {
bool302 = false;
}
}
int i303 = -1;
for (int i304 = 0; i304 < i243; i304++)
if ((co[is242[i304]].colok <= 14 || co[is242[i304]].colok >= 33) && co[is242[i304]].colok < 39) {
final int i305 = pyn(co[i301].x, co[is242[i304]].x, co[i301].z, co[is242[i304]].z);
if (i305 >= 0 && i305 < (co[i301].maxR + co[is242[i304]].maxR) / 100
* ((co[i301].maxR + co[is242[i304]].maxR) / 100)) {
boolean bool306 = false;
if (co[i301].colok == 26) {
if (co[i301].roofat == 90 && co[is242[i304]].x > co[i301].x) {
bool306 = true;
}
if (co[i301].roofat == -90 && co[is242[i304]].x < co[i301].x) {
bool306 = true;
}
if (co[i301].roofat == 0 && co[is242[i304]].z < co[i301].z) {
bool306 = true;
}
if (co[i301].roofat == 180 && co[is242[i304]].z > co[i301].z) {
bool306 = true;
}
}
if (co[i301].colok == 39) {
if (co[i301].roofat == 90 && co[is242[i304]].z > co[i301].z) {
bool306 = true;
}
if (co[i301].roofat == -90 && co[is242[i304]].z < co[i301].z) {
bool306 = true;
}
if (co[i301].roofat == 0 && co[is242[i304]].x > co[i301].x) {
bool306 = true;
}
if (co[i301].roofat == 180 && co[is242[i304]].x < co[i301].x) {
bool306 = true;
}
}
if (bool306) {
if (is[is242[i304]] == 1 && bool302) {
is[is242[i304]] = 0;
is[i301] = 4;
}
i303 = i304;
}
}
}
if (i303 != -1) {
for (int i307 = i243; i307 > i303; i307
is242[i307] = is242[i307 - 1];
}
is242[i303 + 1] = i301;
i243++;
} else {
is242[i243] = i301;
i243++;
}
}
for (int i308 = 0; i308 < nob; i308++)
if (co[i308].colok >= 55 && co[i308].colok <= maxpart || co[i308].colok == bumppart) {
is242[i243] = i308;
i243++;
}
int i309 = 0;
int i310 = 0;
int i311 = 0;
int i312 = 0;
bstage = "";
for (int i313 = 0; i313 < i243; i313++) {
if (co[is242[i313]].colok != 30 && co[is242[i313]].colok != 31 && co[is242[i313]].colok != 32
&& co[is242[i313]].colok != 54 && co[is242[i313]].colok != bumppart) {
String string = "";
if (is[is242[i313]] == 1) {
string = "p";
}
if (is[is242[i313]] == 2) {
string = "pt";
}
if (is[is242[i313]] == 3) {
string = "pr";
}
if (is[is242[i313]] == 4) {
string = "ph";
}
if (is[is242[i313]] == 5) {
string = "pl";
}
if (is[is242[i313]] == 6) {
string = "pr";
}
System.out.println("placing");
System.out.println("roof2: " + co[is242[i313]].roofat);
if (co[is242[i313]].roofat == 250) {
}
if (!floats) {
this.bstage = "" + this.bstage + "set(" + (co[is242[i313]].colok + 10)
+ "," + co[is242[i313]].x + "," + co[is242[i313]].z + "," + co[is242[i313]].roofat + ")"
+ string + "\r\n";
} else {
this.bstage = "" + this.bstage + "set(" + (co[is242[i313]].colok + 10)
+ "," + co[is242[i313]].x + "," + co[is242[i313]].z + "," + co[is242[i313]].y + ","
+ co[is242[i313]].roofat + ")" + string + "\r\n";
}
}
if (co[is242[i313]].colok == 30 || co[is242[i313]].colok == 32) {
if (co[is242[i313]].roofat == 180) {
co[is242[i313]].roofat = 0;
}
String string = "";
if (co[is242[i313]].wh != 0) {
string = "r";
}
if (floats) {
this.bstage = "" + this.bstage + "chk(" + (co[is242[i313]].colok + 10)
+ "," + co[is242[i313]].x + "," + co[is242[i313]].z + "," + co[is242[i313]].y + ","
+ co[is242[i313]].roofat + ")" + string + "\r\n";
} else {
this.bstage = "" + this.bstage + "chk(" + (co[is242[i313]].colok + 10)
+ "," + co[is242[i313]].x + "," + co[is242[i313]].z + "," + co[is242[i313]].roofat + ")"
+ string + "\r\n";
}
}
if (co[is242[i313]].colok == 54) {
if (co[is242[i313]].roofat == 180) {
co[is242[i313]].roofat = 0;
}
String string = "";
if (co[is242[i313]].wh != 0) {
string = "r";
}
// this.bstage = "" + (this.bstage) + ("chk(")
// + (co[is242[i313]].colok + 10) + (",") + (co[is242[i313]].x) + (",")
// + (co[is242[i313]].z) + (",") + (co[is242[i313]].y) + (",") + (co[is242[i313]].roofat)
// + (")") + (string) + ("\r\n");
if (floats) {
this.bstage = "" + this.bstage + "chk(" + (co[is242[i313]].colok + 10)
+ "," + co[is242[i313]].x + "," + co[is242[i313]].z + "," + co[is242[i313]].y + ","
+ co[is242[i313]].roofat + ")" + string + "\r\n";
} else {
this.bstage = "" + this.bstage + "chk(" + (co[is242[i313]].colok + 10)
+ "," + co[is242[i313]].x + "," + co[is242[i313]].z + "," + co[is242[i313]].roofat + ")"
+ string + "\r\n";
}
}
if (co[is242[i313]].colok == 31) {
this.bstage = "" + this.bstage + "fix(" + (co[is242[i313]].colok + 10)
+ "," + co[is242[i313]].x + "," + co[is242[i313]].z + "," + co[is242[i313]].y + ","
+ co[is242[i313]].roofat + ")\r\n";
}
if (co[is242[i313]].colok == bumppart) {
this.bstage = "" + this.bstage + "pile(" + co[is242[i313]].srz + ","
+ co[is242[i313]].srx + "," + co[is242[i313]].sry + "," + co[is242[i313]].x + ","
+ co[is242[i313]].z + ")\r\n";
}
if (co[is242[i313]].x + co[is242[i313]].maxR > i309) {
i309 = co[is242[i313]].x + co[is242[i313]].maxR;
}
if (co[is242[i313]].x - co[is242[i313]].maxR < i311) {
i311 = co[is242[i313]].x - co[is242[i313]].maxR;
}
if (co[is242[i313]].z + co[is242[i313]].maxR > i310) {
i310 = co[is242[i313]].z + co[is242[i313]].maxR;
}
if (co[is242[i313]].z - co[is242[i313]].maxR < i312) {
i312 = co[is242[i313]].z - co[is242[i313]].maxR;
}
}
int i319 = i311 - 0;
int i320 = i309 + 0;
final int i321 = (int) ((i320 - i319) / 4800.0F) + 1;
int i322 = (i321 * 4800 - (i320 - i319)) / 2;
i319 -= i322;
i320 += i322;
final int i323 = i319 + 2400;
int i324 = i312 - 0;
int i325 = i310 + 0;
final int i326 = (int) ((i325 - i324) / 4800.0F) + 1;
i322 = (i326 * 4800 - (i325 - i324)) / 2;
i324 -= i322;
i325 += i322;
final int i327 = i324 + 2400;
this.bstage = "" + this.bstage + "\r\nmaxl(" + i326 + "," + i319 + "," + i327
+ ")\r\nmaxb(" + i321 + "," + i324 + "," + i323 + ")\r\nmaxr(" + i326 + "," + i320 + "," + i327
+ ")\r\nmaxt(" + i321 + "," + i325 + "," + i323 + ")\r\n";
}
@Override
public void start() {
if (thredo == null) {
thredo = new Thread(this);
}
thredo.start();
}
@Override
public void stop() {
exwist = true;
}
@Override
public void update(final Graphics graphics) {
paint(graphics);
}
} |
package mondrian.rolap;
import java.util.List;
import java.util.ArrayList;
import mondrian.olap.Connection;
import mondrian.olap.Query;
import mondrian.olap.Result;
import mondrian.olap.Util;
import mondrian.rolap.cache.CachePool;
import mondrian.rolap.cache.HardSmartCache;
import mondrian.test.FoodMartTestCase;
import mondrian.spi.impl.DataSourceChangeListenerImpl;
import mondrian.spi.impl.DataSourceChangeListenerImpl2;
import org.apache.log4j.Logger;
/**
* Tests for testing the DataSourceChangeListener plugin.
*
* @author Bart Pappyn
* @since Jan 05, 2007
* @version $Id$
*/
public class DataSourceChangeListenerTest extends FoodMartTestCase {
private static Logger logger = Logger.getLogger(DataSourceChangeListenerTest.class);
SqlConstraintFactory scf = SqlConstraintFactory.instance();
public DataSourceChangeListenerTest() {
super();
}
public DataSourceChangeListenerTest(String name) {
super(name);
}
/**
* Test whether the data source plugin is able to tell mondrian
* to read the hierarchy again.
*/
public void testDataSourceChangeListenerPlugin() {
CachePool.instance().flush();
/* Use hard caching for testing. When using soft references, we can not test caching
* because things may be garbage collected during the tests. */
SmartMemberReader smr = getSmartMemberReader("Store");
smr.mapLevelToMembers.setCache(
new HardSmartCache<
SmartMemberListCache.Key2<RolapLevel, Object>,
List<RolapMember>>());
smr.mapMemberToChildren.setCache(
new HardSmartCache<
SmartMemberListCache.Key2<RolapMember, Object>,
List<RolapMember>>());
smr.mapKeyToMember = new HardSmartCache<Object, RolapMember>();
// Create a dummy DataSource which will throw a 'bomb' if it is asked
// to execute a particular SQL statement, but will otherwise behave
// exactly the same as the current DataSource.
SqlLogger sqlLogger = new SqlLogger();
RolapUtil.threadHooks.set(sqlLogger);
try {
String s1, s2, s3, s4, s5;
// Flush the cache, to ensure that the query gets executed.
RolapResult r1 = (RolapResult) executeQuery(
"select {[Store].[All Stores].[USA].[CA].[San Francisco]} on columns from [Sales]");
Util.discard(r1);
s1 = sqlLogger.getSqlQueries().toString();
sqlLogger.clear();
// s1 should not be empty
assertNotSame("[]", s1);
// Run query again, to make sure only cache is used
RolapResult r2 = (RolapResult) executeQuery(
"select {[Store].[All Stores].[USA].[CA].[San Francisco]} on columns from [Sales]");
Util.discard(r2);
s2 = sqlLogger.getSqlQueries().toString();
sqlLogger.clear();
assertEquals("[]", s2);
// Attach dummy change listener that tells mondrian the datasource is never changed
smr.changeListener = new DataSourceChangeListenerImpl();
// Run query again, to make sure only cache is used
RolapResult r3 = (RolapResult) executeQuery(
"select {[Store].[All Stores].[USA].[CA].[San Francisco]} on columns from [Sales]");
Util.discard(r3);
s3 = sqlLogger.getSqlQueries().toString();
sqlLogger.clear();
assertEquals("[]",s3);
// Manually clear the cache to make compare sql result later on
smr.mapKeyToMember.clear();
smr.mapLevelToMembers.clear();
smr.mapMemberToChildren.clear();
// Run query again, to make sure only cache is used
RolapResult r4 = (RolapResult) executeQuery(
"select {[Store].[All Stores].[USA].[CA].[San Francisco]} on columns from [Sales]");
Util.discard(r4);
s4 = sqlLogger.getSqlQueries().toString();
sqlLogger.clear();
assertNotSame("[]",s4);
// Attach dummy change listener that tells mondrian the datasource is always changed
smr.changeListener = new DataSourceChangeListenerImpl2();
// Run query again, to make sure only cache is used
RolapResult r5 = (RolapResult) executeQuery(
"select {[Store].[All Stores].[USA].[CA].[San Francisco]} on columns from [Sales]");
Util.discard(r5);
s5 = sqlLogger.getSqlQueries().toString();
sqlLogger.clear();
assertEquals(s4,s5);
} finally {
smr.changeListener = null;
RolapUtil.threadHooks.set(null);
}
}
private static class SqlLogger implements RolapUtil.ExecuteQueryHook {
private final List<String> sqlQueries;
public SqlLogger() {
this.sqlQueries = new ArrayList<String>();
}
public void clear() {
sqlQueries.clear();
}
public List<String> getSqlQueries() {
return sqlQueries;
}
public void onExecuteQuery(String sql) {
sqlQueries.add(sql);
}
}
Result executeQuery(String mdx, Connection connection) {
Query query = connection.parseQuery(mdx);
return connection.execute(query);
}
SmartMemberReader getSmartMemberReader(String hierName) {
Connection con = super.getConnection(false);
return getSmartMemberReader(con, hierName);
}
SmartMemberReader getSmartMemberReader(Connection con, String hierName) {
RolapCube cube = (RolapCube) con.getSchema().lookupCube("Sales", true);
RolapSchemaReader schemaReader = (RolapSchemaReader) cube.getSchemaReader();
RolapHierarchy hierarchy = (RolapHierarchy) cube.lookupHierarchy(hierName, false);
assertNotNull(hierarchy);
return (SmartMemberReader) hierarchy.getMemberReader(schemaReader.getRole());
}
}
// End NonEmptyTest.java |
package sample.ble.sensortag;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothGattCharacteristic;
import android.bluetooth.BluetoothGattService;
import android.content.Context;
import android.content.Intent;
import android.location.Criteria;
import android.location.Location;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.EditText;
import android.widget.ExpandableListView;
import android.widget.SimpleExpandableListAdapter;
import android.widget.TextView;
import android.widget.Toast;
import sample.ble.sensortag.adapters.BleDevicesAdapter;
import sample.ble.sensortag.adapters.TiServicesAdapter;
import sample.ble.sensortag.ble.BleDevicesScanner;
import sample.ble.sensortag.config.AppConfig;
import sample.ble.sensortag.fusion.SensorFusionActivity;
import sample.ble.sensortag.sensor.TiSensor;
import sample.ble.sensortag.sensor.TiSensors;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
/**
* For a given BLE device, this Activity provides the user interface to connect, display data,
* and display GATT services and characteristics supported by the device. The Activity
* communicates with {@code BleService}, which in turn interacts with the
* Bluetooth LE API.
*/
public class DeviceServicesActivity extends BleServiceBindingActivity
implements ExpandableListView.OnChildClickListener,
TiServicesAdapter.OnServiceItemClickListener {
@SuppressWarnings("UnusedDeclaration")
private final static String TAG = DeviceServicesActivity.class.getSimpleName();
private TextView dataField;
private ExpandableListView gattServicesList;
private TiServicesAdapter gattServiceAdapter;
private BluetoothDevice device;
private BleDevicesScanner scanner;
private BleDevicesAdapter leDeviceListAdapter;
private BluetoothAdapter bluetoothAdapter;
private int rssi;
private TiSensor<?> activeSensor;
TextView calDistView, ptsRecView;
EditText actDistEdit;
private final HashMap<BluetoothDevice, Integer> rssiMap = new HashMap<BluetoothDevice, Integer>();
/*@Override
public void onDisconnected() {
finish();
}*/
@Override
public void onServiceDiscovered() {
// Show all the supported services and characteristics on the user interface.
displayGattServices(getBleService().getSupportedGattServices());
}
@Override
public void onDataAvailable(String serviceUuid, String characteristicUUid, String text, byte[] data) {
dataField.setText(text);
}
@Override
public boolean onChildClick(ExpandableListView parent, View v, int groupPosition,
int childPosition, long id) {
if (gattServiceAdapter == null)
return false;
final BluetoothGattCharacteristic characteristic = gattServiceAdapter.getChild(groupPosition, childPosition);
final TiSensor<?> sensor = TiSensors.getSensor(characteristic.getService().getUuid().toString());
if (activeSensor != null)
getBleService().enableSensor(activeSensor, false);
if (sensor == null) {
getBleService().getBleManager().readCharacteristic(characteristic);
return true;
}
if (sensor == activeSensor)
return true;
activeSensor = sensor;
getBleService().enableSensor(sensor, true);
return true;
}
@Override
public void onServiceUpdated(BluetoothGattService service) {
final TiSensor<?> sensor = TiSensors.getSensor(service.getUuid().toString());
if (sensor == null)
return;
getBleService().updateSensor(sensor);
}
private void displayGattServices(List<BluetoothGattService> gattServices) {
if (gattServices == null)
return;
gattServiceAdapter = new TiServicesAdapter(this, gattServices);
gattServiceAdapter.setServiceListener(this);
gattServicesList.setAdapter(gattServiceAdapter);
}
public void startRecording(View view) //opens a file
{
Toast.makeText(getBaseContext(), "Begin Recording...", Toast.LENGTH_SHORT).show();
recording = true;
}
public void update(View view) //gets the most recent signal strength measurement and updates the displayed values
{
//rssiMap.put(device, rssi); //pass the bluetooth device to get the db level
// initialize scanner
/*
scanner = new BleDevicesScanner(bluetoothAdapter, new BluetoothAdapter.LeScanCallback() {
@Override
public void onLeScan(final BluetoothDevice mdevice, final int mrssi, byte[] scanRecord) {
leDeviceListAdapter.addDevice(mdevice, mrssi);
leDeviceListAdapter.notifyDataSetChanged();
if(mdevice == device)
{
Toast.makeText(getBaseContext(),
"Strength: " + mrssi + " db",
Toast.LENGTH_SHORT).show();
}
}
});
*/
//scanner.setScanPeriod(SCAN_PERIOD);
if(recording)
{
Toast.makeText(getBaseContext(), "Updating...", Toast.LENGTH_SHORT).show();
}
}
public void recordPoint(View view) //records a new measurement by adding the most recent value to the fully compiled string
{
if(recording)
{
Log.d("Devon Test", "logging...");
actDistString = actDistEdit.getText().toString(); //gets the distance set by the user as string
actDistance = Float.parseFloat(actDistString); //convert to float
fullyCompiledString.concat(ptIdx + " Actual: " + actDistance + "Calculated: " + calDistance + "\n"); //concatenate the most recent measurement to the existing data
}
}
public void endRecording(View view) //save file and close
{
Toast.makeText(getBaseContext(),
"Saving BLE File...",
Toast.LENGTH_SHORT).show();
try
{
//BLELogFileName = "BLELog" + myCalendar.getTimeInMillis() + ".txt"; //
BLELogFileName = "BLELog.txt";
Log.d("Dev", BLELogFileName);
/*
reportDirectoryName = new File(Environment.getRootDirectory(), "/BLELog_Files/");
Log.d("Dev", "2");
if(!reportDirectoryName.exists()){
Log.d("Dev", "3");
reportDirectoryName.mkdirs();
Log.d("Dev", "4");
}
Log.d("Dev", "5");
myFile = new File(reportDirectoryName, BLELogFileName);
Log.d("Dev", "6");
myFile.createNewFile();
*/
Log.d("Dev", "7");
myOutWriter = openFileOutput(BLELogFileName, Context.MODE_PRIVATE);
Log.d("Dev", "8");
try //write all the data to the file
{
Log.d("Dev", "9");
myOutWriter.write(fullyCompiledString.getBytes());
Log.d("Dev", "10");
}
catch (Exception e) {
Toast.makeText(getBaseContext(), "Error writing to GPS File",
Toast.LENGTH_SHORT).show();
}
myOutWriter.close(); //then close the output stream
Toast.makeText(getBaseContext(),
"BLE File Saved.",
Toast.LENGTH_SHORT).show();
}
catch (Exception e) {
Toast.makeText(getBaseContext(), "Error opening or closing BLE records File",
Toast.LENGTH_SHORT).show();
}
recording = false;
/*
try {//
Toast.makeText(getBaseContext(),
"BLE File successfully saved",
Toast.LENGTH_SHORT).show();
} catch (Exception e) {
Toast.makeText(getBaseContext(), "Error saving BLE File",
Toast.LENGTH_SHORT).show();
}
*/
}
} |
package de.sormuras.bach;
import de.sormuras.bach.util.Paths;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
/** Tool call builder. */
public class Call {
/*package-private*/ final String name;
/*package-private*/ final List<String> arguments;
public Call(String name, String... args) {
this.name = name;
this.arguments = new ArrayList<>(List.of(args));
}
public Call(Call that) {
this.name = that.name;
this.arguments = new ArrayList<>(that.arguments);
}
public Call add(Object object) {
arguments.add(object.toString());
return this;
}
public Call add(String key, Object value) {
return add(key).add(value);
}
public Call add(String key, List<Path> paths) {
if (paths.isEmpty()) return this;
return add(key).add(Paths.join(paths));
}
public <T> Call forEach(Iterable<T> arguments, BiConsumer<Call, T> visitor) {
arguments.forEach(argument -> visitor.accept(this, argument));
return this;
}
public Call iff(boolean predicate, Consumer<Call> visitor) {
if (predicate) visitor.accept(this);
return this;
}
public Call iff(boolean predicate, Consumer<Call> then, Consumer<Call> otherwise) {
if (predicate) then.accept(this); else otherwise.accept(this);
return this;
}
@SuppressWarnings("OptionalUsedAsFieldOrParameterType")
public <T> Call iff(Optional<T> optional, BiConsumer<Call, T> visitor) {
optional.ifPresent(value -> visitor.accept(this, value));
return this;
}
@Override
@SuppressWarnings("MethodDoesntCallSuperMethod")
public Call clone() {
return new Call(this);
}
public String[] toArray(boolean named) {
return (named ? toList(true) : arguments).toArray(String[]::new);
}
public List<String> toList(boolean named) {
if (!named) return List.copyOf(arguments);
var list = new ArrayList<String>(1 + arguments.size());
list.add(name);
list.addAll(arguments);
return List.copyOf(list);
}
@Override
public String toString() {
return "Call{name='" + name + "', arguments=" + arguments + '}';
}
} |
import java.util.*;
import java.io.*;
public class Map
{
public Map (String fileToLoad, boolean debug)
{
_theMap = new Vector<MapElement>();
_debug = debug;
if (!loadData(fileToLoad))
System.out.println("Error in loading data file: "+fileToLoad);
}
public Map (Map theCopy)
{
}
@Override
public String toString ()
{
if (_debug)
System.out.println("Dimensions <"+_width+", "+_height+">");
Enumeration<MapElement> iter = _theMap.elements();
String str = "";
while (iter.hasMoreElements())
{
MapElement theEntry = iter.nextElement();
str += theEntry.type();
System.out.println(theEntry.position());
if (theEntry.position().getX() == _width -1)
str += "\n";
}
return str;
}
private final boolean loadData (String file)
{
BufferedReader reader = null;
boolean valid = true;
try
{
reader = new BufferedReader(new FileReader(file));
String line = null;
while ((line = reader.readLine()) != null)
{
char[] asChar = line.toCharArray();
if (_width == 0)
_width = asChar.length;
for (int i = 0; i < _width; i++)
{
// remember x,y coords are reversed for arrays
if (asChar[i] == MapElement.TREE)
{
_theMap.add(new MapElement(new Coordinate(i, _height), MapElement.TREE));
}
else
{
if (asChar[i] == MapElement.OPEN)
{
_theMap.add(new MapElement(new Coordinate(i, _height), MapElement.OPEN));
}
else
{
System.out.println("Unknown character in data file: "+asChar[i]);
valid = false;
}
}
}
_height++;
}
}
catch (Throwable ex)
{
valid = false;
ex.printStackTrace();
}
finally
{
try
{
reader.close();
}
catch (Throwable ex)
{
}
}
return valid;
}
private Vector<MapElement> _theMap;
private int _height = 0;
private int _width = 0;
private boolean _debug;
} |
package mondrian.test.loader;
import mondrian.olap.MondrianResource;
import mondrian.olap.Util;
import mondrian.rolap.RolapUtil;
import mondrian.rolap.sql.SqlQuery;
import java.io.*;
import java.math.BigDecimal;
import java.sql.*;
import java.text.DecimalFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Utility to load the FoodMart dataset into an arbitrary JDBC database.
*
* <p>It is known to work for the following databases:<ul>
*
* <li>MySQL 3.23 using MySQL-connector/J 3.0.16
* <p>On the command line:
*
* <blockquote><code>
* $ mysqladmin create foodmart<br/>
* $ java -cp 'classes;testclasses' mondrian.test.loader.MondrianFoodMartLoader
* -verbose -tables -data -indexes -jdbcDrivers=com.mysql.jdbc.Driver
* -outputJdbcURL=jdbc:mysql://localhost/foodmart
* </code></blockquote>
* </li>
*
* <li>MySQL 4.15 using MySQL-connector/J 3.0.16</li>
*
* <li>Postgres 8.0 beta using postgresql-driver-jdbc3-74-214.jar</li>
*
* </ul>
*
* @author jhyde
* @since 23 December, 2004
* @version $Id$
*/
public class MondrianFoodMartLoader {
private String jdbcDrivers;
private String jdbcURL;
private String userName;
private String password;
private String inputJdbcURL;
private String inputUserName;
private String inputPassword;
private String inputFile;
private String outputDirectory;
private boolean tables = false;
private boolean indexes = false;
private boolean data = false;
private static final String nl = System.getProperty("line.separator");
private boolean verbose = false;
private boolean jdbcInput = false;
private boolean jdbcOutput = false;
private int inputBatchSize = 50;
private Connection connection;
private Connection inputConnection;
private FileWriter fileOutput = null;
private SqlQuery sqlQuery;
private final HashMap mapTableNameToColumns = new HashMap();
public MondrianFoodMartLoader(String[] args) {
StringBuffer errorMessage = new StringBuffer();
for ( int i=0; i<args.length; i++ ) {
if (args[i].equals("-verbose")) {
verbose = true;
} else if (args[i].equals("-tables")) {
tables = true;
} else if (args[i].equals("-data")) {
data = true;
} else if (args[i].equals("-indexes")) {
indexes = true;
} else if (args[i].startsWith("-jdbcDrivers=")) {
jdbcDrivers = args[i].substring("-jdbcDrivers=".length());
} else if (args[i].startsWith("-outputJdbcURL=")) {
jdbcURL = args[i].substring("-outputJdbcURL=".length());
} else if (args[i].startsWith("-outputJdbcUser=")) {
userName = args[i].substring("-outputJdbcUser=".length());
} else if (args[i].startsWith("-outputJdbcPassword=")) {
password = args[i].substring("-outputJdbcPassword=".length());
} else if (args[i].startsWith("-inputJdbcURL=")) {
inputJdbcURL = args[i].substring("-inputJdbcURL=".length());
} else if (args[i].startsWith("-inputJdbcUser=")) {
inputUserName = args[i].substring("-inputJdbcUser=".length());
} else if (args[i].startsWith("-inputJdbcPassword=")) {
inputPassword = args[i].substring("-inputJdbcPassword=".length());
} else if (args[i].startsWith("-inputFile=")) {
inputFile = args[i].substring("-inputFile=".length());
} else if (args[i].startsWith("-outputDirectory=")) {
outputDirectory = args[i].substring("-outputDirectory=".length());
} else if (args[i].startsWith("-outputJdbcBatchSize=")) {
inputBatchSize = Integer.parseInt(args[i].substring("-outputJdbcBatchSize=".length()));
} else {
errorMessage.append("unknown arg: " + args[i] + "\n");
}
}
if (inputJdbcURL != null) {
jdbcInput = true;
if (inputFile != null) {
errorMessage.append("Specified both an input JDBC connection and an input file");
}
}
if (jdbcURL != null && outputDirectory == null) {
jdbcOutput = true;
}
if (errorMessage.length() > 0) {
usage();
throw MondrianResource.instance().newMissingArg(errorMessage.toString());
}
}
public void usage() {
System.out.println("Usage: MondrianFoodMartLoader [-verbose] [-tables] [-data] [-indexes] -jdbcDrivers=<jdbcDriver> [-outputJdbcURL=<jdbcURL> [-outputJdbcUser=user] [-outputJdbcPassword=password] [-outputJdbcBatchSize=<batch size>] | -outputDirectory=<directory name>] [ [-inputJdbcURL=<jdbcURL> [-inputJdbcUser=user] [-inputJdbcPassword=password]] | [-inputfile=<file name>]]");
System.out.println("");
System.out.println(" <jdbcURL> JDBC connect string for DB");
System.out.println(" [user] JDBC user name for DB");
System.out.println(" [password] JDBC password for user for DB");
System.out.println(" If no source DB parameters are given, assumes data comes from file");
System.out.println(" [file name] file containing test data - INSERT statements");
System.out.println(" If no input file name or input JDBC parameters are given, assume insert statements come from demo/FoodMartData.sql file");
System.out.println(" [outputDirectory] Where FoodMartCreateTables.sql, FoodMartData.sql and FoodMartCreateIndexes.sql will be created");
System.out.println(" <batch size> size of JDBC batch updates - default to 50 inserts");
System.out.println(" <jdbcDrivers> Comma-separated list of JDBC drivers.");
System.out.println(" They must be on the classpath.");
System.out.println(" -verbose Verbose mode.");
System.out.println(" -tables If specified, drop and create the tables.");
System.out.println(" -data If specified, load the data.");
System.out.println(" -indexes If specified, drop and create the tables.");
}
public static void main(String[] args) {
System.out.println("Starting load at: " + (new Date()));
try {
new MondrianFoodMartLoader(args).load();
} catch (Throwable e) {
e.printStackTrace();
}
System.out.println("Finished load at: " + (new Date()));
}
private void load() throws Exception {
RolapUtil.loadDrivers(jdbcDrivers);
if (userName == null) {
connection = DriverManager.getConnection(jdbcURL);
} else {
connection = DriverManager.getConnection(jdbcURL, userName, password);
}
if (jdbcInput) {
if (inputUserName == null) {
inputConnection = DriverManager.getConnection(inputJdbcURL);
} else {
inputConnection = DriverManager.getConnection(inputJdbcURL, inputUserName, inputPassword);
}
}
final DatabaseMetaData metaData = connection.getMetaData();
sqlQuery = new SqlQuery(metaData);
try {
createTables(); // This also initializes mapTableNameToColumns
if (data) {
if (jdbcInput) {
loadDataFromJdbcInput();
} else {
loadDataFromFile();
}
}
if (indexes) {
createIndexes();
}
} finally {
if (connection != null) {
connection.close();
connection = null;
}
if (inputConnection != null) {
inputConnection.close();
inputConnection = null;
}
if (fileOutput != null) {
fileOutput.close();
fileOutput = null;
}
}
}
private void loadDataFromFile() throws IOException, SQLException {
final InputStream is = openInputStream();
final InputStreamReader reader = new InputStreamReader(is);
final BufferedReader bufferedReader = new BufferedReader(reader);
final Pattern regex = Pattern.compile("INSERT INTO ([^ ]+)(.*)VALUES(.*)\\((.*)\\);");
String line;
int lineNumber = 0;
int tableRowCount = 0;
String prevTable = "";
String[] batch = new String[inputBatchSize];
int batchSize = 0;
while ((line = bufferedReader.readLine()) != null) {
++lineNumber;
if (line.startsWith("
continue;
}
// Split the up the line. For example,
// INSERT INTO foo VALUES (1, 'bar');
// would yield
// tableName = "foo"
// values = "1, 'bar'"
final Matcher matcher = regex.matcher(line);
if (!matcher.matches()) {
throw MondrianResource.instance().newInvalidInsertLine(
new Integer(lineNumber), line);
}
final String tableName = matcher.group(1); // e.g. "foo"
final String values = matcher.group(2); // e.g. "1, 'bar'"
Util.discard(values); // Not needed now
// If table just changed, flush the previous batch.
if (!tableName.equals(prevTable)) {
if (!prevTable.equals("")) {
System.out.println("Table " + prevTable +
": loaded " + tableRowCount + " rows.");
}
tableRowCount = 0;
writeBatch(batch, batchSize);
batchSize = 0;
prevTable = tableName;
}
// remove trailing ';'
assert line.endsWith(";");
line = line.substring(0, line.length() - 1);
// this database represents booleans as integers
if (sqlQuery.isMySQL()) {
line = line.replaceAll("false", "0")
.replaceAll("true", "1");
}
++tableRowCount;
batch[batchSize++] = line;
if (batchSize >= inputBatchSize) {
writeBatch(batch, batchSize);
batchSize = 0;
}
}
// Print summary of the final table.
if (!prevTable.equals("")) {
System.out.println("Table " + prevTable +
": loaded " + tableRowCount + " rows.");
tableRowCount = 0;
writeBatch(batch, batchSize);
batchSize = 0;
}
}
private void loadDataFromJdbcInput() throws Exception {
if (outputDirectory != null) {
fileOutput = new FileWriter(new File(outputDirectory, "createData.sql"));
}
/*
* For each input table,
* read specified columns for all rows in the input connection
*
* For each row, insert a row
*/
for (Iterator it = mapTableNameToColumns.entrySet().iterator(); it.hasNext(); ) {
Map.Entry tableEntry = (Map.Entry) it.next();
int rowsAdded = loadTable((String) tableEntry.getKey(), (Column[]) tableEntry.getValue());
System.out.println("Table " + (String) tableEntry.getKey() +
": loaded " + rowsAdded + " rows.");
}
if (outputDirectory != null) {
fileOutput.close();
}
}
private int loadTable(String name, Column[] columns) throws Exception {
int rowsAdded = 0;
StringBuffer buf = new StringBuffer();
buf.append("select ");
for (int i = 0; i < columns.length; i++) {
Column column = columns[i];
if (i > 0) {
buf.append(",");
}
buf.append(quoteId(column.name));
}
buf.append(" from ")
.append(quoteId(name));
String ddl = buf.toString();
Statement statement = inputConnection.createStatement();
if (verbose) {
System.out.println("Input table SQL: " + ddl);
}
ResultSet rs = statement.executeQuery(ddl);
String[] batch = new String[inputBatchSize];
int batchSize = 0;
while (rs.next()) {
/*
* Get a batch of insert statements, then save a batch
*/
batch[batchSize++] = createInsertStatement(rs, name, columns);
if (batchSize >= inputBatchSize) {
rowsAdded += writeBatch(batch, batchSize);
batchSize = 0;
}
}
if (batchSize > 0) {
rowsAdded += writeBatch(batch, batchSize);
}
return rowsAdded;
}
private String createInsertStatement(ResultSet rs, String name, Column[] columns) throws Exception {
StringBuffer buf = new StringBuffer();
buf.append("INSERT INTO ")
.append(quoteId(name))
.append(" ( ");
for (int i = 0; i < columns.length; i++) {
Column column = columns[i];
if (i > 0) {
buf.append(",");
}
buf.append(quoteId(column.name));
}
buf.append(" ) VALUES(");
for (int i = 0; i < columns.length; i++) {
Column column = columns[i];
if (i > 0) {
buf.append(",");
}
buf.append(columnValue(rs, column));
}
buf.append(" )");
return buf.toString();
}
private int writeBatch(String[] batch, int batchSize) throws IOException, SQLException {
if (outputDirectory != null) {
for (int i = 0; i < batchSize; i++) {
fileOutput.write(batch[i]);
fileOutput.write(";\n");
}
} else {
connection.setAutoCommit(false);
Statement stmt = connection.createStatement();
if (batchSize == 1) {
// Don't use batching if there's only one item. This allows
// us to work around bugs in the JDBC driver by setting
// outputJdbcBatchSize=1.
stmt.execute(batch[0]);
} else {
for (int i = 0; i < batchSize; i++) {
stmt.addBatch(batch[i]);
}
int [] updateCounts = stmt.executeBatch();
int updates = 0;
for (int i = 0; i < updateCounts.length; updates += updateCounts[i], i++) {
if (updateCounts[i] == 0) {
System.out.println("Error in SQL: " + batch[i]);
}
}
if (updates < batchSize) {
throw new RuntimeException("Failed to execute batch: " + batchSize + " versus " + updates);
}
}
stmt.close();
connection.setAutoCommit(true);
}
return batchSize;
}
private FileInputStream openInputStream() {
final File file = (inputFile != null) ? new File(inputFile) : new File("demo", "FoodMartData.sql");
if (file.exists()) {
try {
return new FileInputStream(file);
} catch (FileNotFoundException e) {
}
} else {
System.out.println("No input file: " + file);
}
return null;
}
private void createIndexes() throws Exception {
if (outputDirectory != null) {
fileOutput = new FileWriter(new File(outputDirectory, "createIndexes.sql"));
}
createIndex(true, "account", "i_account_id", new String[] {"account_id"});
createIndex(false, "account", "i_account_parent", new String[] {"account_parent"});
createIndex(true, "category", "i_category_id", new String[] {"category_id"});
createIndex(false, "category", "i_category_parent", new String[] {"category_parent"});
createIndex(true, "currency", "i_currency", new String[] {"currency_id", "date"});
createIndex(false, "customer", "i_customer_account_num", new String[] {"account_num"});
createIndex(false, "customer", "i_customer_fname", new String[] {"fname"});
createIndex(false, "customer", "i_customer_lname", new String[] {"lname"});
createIndex(false, "customer", "i_customer_children_at_home", new String[] {"num_children_at_home"});
createIndex(true, "customer", "i_customer_id", new String[] {"customer_id"});
createIndex(false, "customer", "i_customer_postal_code", new String[] {"postal_code"});
createIndex(false, "customer", "i_customer_region_id", new String[] {"customer_region_id"});
createIndex(true, "department", "i_department_id", new String[] {"department_id"});
createIndex(true, "employee", "i_employee_id", new String[] {"employee_id"});
createIndex(false, "employee", "i_employee_department_id", new String[] {"department_id"});
createIndex(false, "employee", "i_employee_store_id", new String[] {"store_id"});
createIndex(false, "employee", "i_employee_supervisor_id", new String[] {"supervisor_id"});
createIndex(true, "employee_closure", "i_employee_closure", new String[] {"supervisor_id", "employee_id"});
createIndex(false, "employee_closure", "i_employee_closure_emp", new String[] {"employee_id"});
createIndex(false, "expense_fact", "i_expense_store_id", new String[] {"store_id"});
createIndex(false, "expense_fact", "i_expense_account_id", new String[] {"account_id"});
createIndex(false, "expense_fact", "i_expense_time_id", new String[] {"time_id"});
createIndex(false, "inventory_fact_1997", "i_inv_1997_product_id", new String[] {"product_id"});
createIndex(false, "inventory_fact_1997", "i_inv_1997_store_id", new String[] {"store_id"});
createIndex(false, "inventory_fact_1997", "i_inv_1997_time_id", new String[] {"time_id"});
createIndex(false, "inventory_fact_1997", "i_inv_1997_warehouse_id", new String[] {"warehouse_id"});
createIndex(false, "inventory_fact_1998", "i_inv_1998_product_id", new String[] {"product_id"});
createIndex(false, "inventory_fact_1998", "i_inv_1998_store_id", new String[] {"store_id"});
createIndex(false, "inventory_fact_1998", "i_inv_1998_time_id", new String[] {"time_id"});
createIndex(false, "inventory_fact_1998", "i_inv_1998_warehouse_id", new String[] {"warehouse_id"});
createIndex(true, "position", "i_position_id", new String[] {"position_id"});
createIndex(false, "product", "i_product_brand_name", new String[] {"brand_name"});
createIndex(true, "product", "i_product_id", new String[] {"product_id"});
createIndex(false, "product", "i_product_class_id", new String[] {"product_class_id"});
createIndex(false, "product", "i_product_name", new String[] {"product_name"});
createIndex(false, "product", "i_product_SKU", new String[] {"SKU"});
createIndex(true, "promotion", "i_promotion_id", new String[] {"promotion_id"});
createIndex(false, "promotion", "i_promotion_district_id", new String[] {"promotion_district_id"});
createIndex(true, "reserve_employee", "i_reserve_employee_id", new String[] {"employee_id"});
createIndex(false, "reserve_employee", "i_reserve_employee_dept_id", new String[] {"department_id"});
createIndex(false, "reserve_employee", "i_reserve_employee_store_id", new String[] {"store_id"});
createIndex(false, "reserve_employee", "i_reserve_employee_super_id", new String[] {"supervisor_id"});
createIndex(false, "sales_fact_1997", "i_sales_1997_customer_id", new String[] {"customer_id"});
createIndex(false, "sales_fact_1997", "i_sales_1997_product_id", new String[] {"product_id"});
createIndex(false, "sales_fact_1997", "i_sales_1997_promotion_id", new String[] {"promotion_id"});
createIndex(false, "sales_fact_1997", "i_sales_1997_store_id", new String[] {"store_id"});
createIndex(false, "sales_fact_1997", "i_sales_1997_time_id", new String[] {"time_id"});
createIndex(false, "sales_fact_dec_1998", "i_sales_dec_1998_customer_id", new String[] {"customer_id"});
createIndex(false, "sales_fact_dec_1998", "i_sales_dec_1998_product_id", new String[] {"product_id"});
createIndex(false, "sales_fact_dec_1998", "i_sales_dec_1998_promotion_id", new String[] {"promotion_id"});
createIndex(false, "sales_fact_dec_1998", "i_sales_dec_1998_store_id", new String[] {"store_id"});
createIndex(false, "sales_fact_dec_1998", "i_sales_dec_1998_time_id", new String[] {"time_id"});
createIndex(false, "sales_fact_1998", "i_sales_1998_customer_id", new String[] {"customer_id"});
createIndex(false, "sales_fact_1998", "i_sales_1998_product_id", new String[] {"product_id"});
createIndex(false, "sales_fact_1998", "i_sales_1998_promotion_id", new String[] {"promotion_id"});
createIndex(false, "sales_fact_1998", "i_sales_1998_store_id", new String[] {"store_id"});
createIndex(false, "sales_fact_1998", "i_sales_1998_time_id", new String[] {"time_id"});
createIndex(true, "store", "i_store_id", new String[] {"store_id"});
createIndex(false, "store", "i_store_region_id", new String[] {"region_id"});
if (outputDirectory != null) {
fileOutput.close();
}
}
private void createIndex(
boolean isUnique,
String tableName,
String indexName,
String[] columnNames)
{
try {
StringBuffer buf = new StringBuffer();
buf.append(isUnique ? "CREATE UNIQUE INDEX " : "CREATE INDEX ")
.append(quoteId(indexName)).append(" ON ")
.append(quoteId(tableName)).append(" (");
for (int i = 0; i < columnNames.length; i++) {
String columnName = columnNames[i];
if (i > 0) {
buf.append(", ");
}
buf.append(quoteId(columnName));
}
buf.append(")");
final String ddl = buf.toString();
if (verbose) {
System.out.println(ddl);
}
if (jdbcOutput) {
final Statement statement = connection.createStatement();
statement.execute(ddl);
} else {
fileOutput.write(ddl);
fileOutput.write(";\n");
}
} catch (Exception e) {
throw MondrianResource.instance().newCreateIndexFailed(indexName,
tableName, e);
}
}
/**
* Also initializes mapTableNameToColumns
*
* @throws Exception
*/
private void createTables() throws Exception {
if (outputDirectory != null) {
fileOutput = new FileWriter(new File(outputDirectory, "createTables.sql"));
}
String booleanColumnType = "BIT";
if (sqlQuery.isPostgres()) {
booleanColumnType = "BOOLEAN";
}
createTable("sales_fact_1997", new Column[] {
new Column("product_id", "INTEGER", "NOT NULL"),
new Column("time_id", "INTEGER", "NOT NULL"),
new Column("customer_id", "INTEGER", "NOT NULL"),
new Column("promotion_id", "INTEGER", "NOT NULL"),
new Column("store_id", "INTEGER", "NOT NULL"),
new Column("store_sales", "DECIMAL(10,4)", "NOT NULL"),
new Column("store_cost", "DECIMAL(10,4)", "NOT NULL"),
new Column("unit_sales", "BIGINT", "NOT NULL"),
});
createTable("sales_fact_1998", new Column[] {
new Column("product_id", "INTEGER", "NOT NULL"),
new Column("time_id", "INTEGER", "NOT NULL"),
new Column("customer_id", "INTEGER", "NOT NULL"),
new Column("promotion_id", "INTEGER", "NOT NULL"),
new Column("store_id", "INTEGER", "NOT NULL"),
new Column("store_sales", "DECIMAL(10,4)", "NOT NULL"),
new Column("store_cost", "DECIMAL(10,4)", "NOT NULL"),
new Column("unit_sales", "BIGINT", "NOT NULL"),
});
createTable("sales_fact_dec_1998", new Column[] {
new Column("product_id", "INTEGER", "NOT NULL"),
new Column("time_id", "INTEGER", "NOT NULL"),
new Column("customer_id", "INTEGER", "NOT NULL"),
new Column("promotion_id", "INTEGER", "NOT NULL"),
new Column("store_id", "INTEGER", "NOT NULL"),
new Column("store_sales", "DECIMAL(10,4)", "NOT NULL"),
new Column("store_cost", "DECIMAL(10,4)", "NOT NULL"),
new Column("unit_sales", "BIGINT", "NOT NULL"),
});
createTable("inventory_fact_1997", new Column[] {
new Column("product_id", "INTEGER", "NOT NULL"),
new Column("time_id", "INTEGER", ""),
new Column("warehouse_id", "INTEGER", ""),
new Column("store_id", "INTEGER", ""),
new Column("units_ordered", "INTEGER", ""),
new Column("units_shipped", "INTEGER", ""),
new Column("warehouse_sales", "DECIMAL(10,4)", ""),
new Column("warehouse_cost", "DECIMAL(10,4)", ""),
new Column("supply_time", "SMALLINT", ""),
new Column("store_invoice", "DECIMAL(10,4)", ""),
});
createTable("inventory_fact_1998", new Column[] {
new Column("product_id", "INTEGER", "NOT NULL"),
new Column("time_id", "INTEGER", ""),
new Column("warehouse_id", "INTEGER", ""),
new Column("store_id", "INTEGER", ""),
new Column("units_ordered", "INTEGER", ""),
new Column("units_shipped", "INTEGER", ""),
new Column("warehouse_sales", "DECIMAL(10,4)", ""),
new Column("warehouse_cost", "DECIMAL(10,4)", ""),
new Column("supply_time", "SMALLINT", ""),
new Column("store_invoice", "DECIMAL(10,4)", ""),
});
createTable("account", new Column[] {
new Column("account_id", "INTEGER", "NOT NULL"),
new Column("account_parent", "INTEGER", ""),
new Column("account_description", "VARCHAR(30)", ""),
new Column("account_type", "VARCHAR(30)", "NOT NULL"),
new Column("account_rollup", "VARCHAR(30)", "NOT NULL"),
new Column("Custom_Members", "VARCHAR(255)", ""),
});
createTable("category", new Column[] {
new Column("category_id", "VARCHAR(30)", "NOT NULL"),
new Column("category_parent", "VARCHAR(30)", ""),
new Column("category_description", "VARCHAR(30)", "NOT NULL"),
new Column("category_rollup", "VARCHAR(30)", ""),
});
createTable("currency", new Column[] {
new Column("currency_id", "INTEGER", "NOT NULL"),
new Column("date", "DATE", "NOT NULL"),
new Column("currency", "VARCHAR(30)", "NOT NULL"),
new Column("conversion_ratio", "DECIMAL(10,4)", "NOT NULL"),
});
createTable("customer", new Column[] {
new Column("customer_id", "INTEGER", "NOT NULL"),
new Column("account_num", "BIGINT", "NOT NULL"),
new Column("lname", "VARCHAR(30)", "NOT NULL"),
new Column("fname", "VARCHAR(30)", "NOT NULL"),
new Column("mi", "VARCHAR(30)", ""),
new Column("address1", "VARCHAR(30)", ""),
new Column("address2", "VARCHAR(30)", ""),
new Column("address3", "VARCHAR(30)", ""),
new Column("address4", "VARCHAR(30)", ""),
new Column("city", "VARCHAR(30)", ""),
new Column("state_province", "VARCHAR(30)", ""),
new Column("postal_code", "VARCHAR(30)", "NOT NULL"),
new Column("country", "VARCHAR(30)", "NOT NULL"),
new Column("customer_region_id", "INTEGER", "NOT NULL"),
new Column("phone1", "VARCHAR(30)", "NOT NULL"),
new Column("phone2", "VARCHAR(30)", "NOT NULL"),
new Column("birthdate", "DATE", "NOT NULL"),
new Column("marital_status", "VARCHAR(30)", "NOT NULL"),
new Column("yearly_income", "VARCHAR(30)", "NOT NULL"),
new Column("gender", "VARCHAR(30)", "NOT NULL"),
new Column("total_children", "SMALLINT", "NOT NULL"),
new Column("num_children_at_home", "SMALLINT", "NOT NULL"),
new Column("education", "VARCHAR(30)", "NOT NULL"),
new Column("date_accnt_opened", "DATE", "NOT NULL"),
new Column("member_card", "VARCHAR(30)", ""),
new Column("occupation", "VARCHAR(30)", ""),
new Column("houseowner", "VARCHAR(30)", ""),
new Column("num_cars_owned", "INTEGER", ""),
});
createTable("days", new Column[] {
new Column("day", "INTEGER", "NOT NULL"),
new Column("week_day", "VARCHAR(30)", "NOT NULL"),
});
createTable("department", new Column[] {
new Column("department_id", "INTEGER", "NOT NULL"),
new Column("department_description", "VARCHAR(30)", "NOT NULL"),
});
createTable("employee", new Column[] {
new Column("employee_id", "INTEGER", "NOT NULL"),
new Column("full_name", "VARCHAR(30)", "NOT NULL"),
new Column("first_name", "VARCHAR(30)", "NOT NULL"),
new Column("last_name", "VARCHAR(30)", "NOT NULL"),
new Column("position_id", "INTEGER", ""),
new Column("position_title", "VARCHAR(30)", ""),
new Column("store_id", "INTEGER", "NOT NULL"),
new Column("department_id", "INTEGER", "NOT NULL"),
new Column("birth_date", "DATE", "NOT NULL"),
new Column("hire_date", "TIMESTAMP", ""),
new Column("end_date", "TIMESTAMP", ""),
new Column("salary", "DECIMAL(10,4)", "NOT NULL"),
new Column("supervisor_id", "INTEGER", ""),
new Column("education_level", "VARCHAR(30)", "NOT NULL"),
new Column("marital_status", "VARCHAR(30)", "NOT NULL"),
new Column("gender", "VARCHAR(30)", "NOT NULL"),
new Column("management_role", "VARCHAR(30)", ""),
});
createTable("employee_closure", new Column[] {
new Column("employee_id", "INTEGER", "NOT NULL"),
new Column("supervisor_id", "INTEGER", "NOT NULL"),
new Column("distance", "INTEGER", ""),
});
createTable("expense_fact", new Column[] {
new Column("store_id", "INTEGER", "NOT NULL"),
new Column("account_id", "INTEGER", "NOT NULL"),
new Column("exp_date", "TIMESTAMP", "NOT NULL"),
new Column("time_id", "INTEGER", "NOT NULL"),
new Column("category_id", "VARCHAR(30)", "NOT NULL"),
new Column("currency_id", "INTEGER", "NOT NULL"),
new Column("amount", "DECIMAL(10,4)", "NOT NULL"),
});
createTable("position", new Column[] {
new Column("position_id", "INTEGER", "NOT NULL"),
new Column("position_title", "VARCHAR(30)", "NOT NULL"),
new Column("pay_type", "VARCHAR(30)", "NOT NULL"),
new Column("min_scale", "DECIMAL(10,4)", "NOT NULL"),
new Column("max_scale", "DECIMAL(10,4)", "NOT NULL"),
new Column("management_role", "VARCHAR(30)", "NOT NULL"),
});
createTable("product", new Column[] {
new Column("product_class_id", "INTEGER", "NOT NULL"),
new Column("product_id", "INTEGER", "NOT NULL"),
new Column("brand_name", "VARCHAR(60)", ""),
new Column("product_name", "VARCHAR(60)", "NOT NULL"),
new Column("SKU", "BIGINT", "NOT NULL"),
new Column("SRP", "DECIMAL(10,4)", ""),
new Column("gross_weight", "REAL", ""),
new Column("net_weight", "REAL", ""),
new Column("recyclable_package", booleanColumnType, ""),
new Column("low_fat", booleanColumnType, ""),
new Column("units_per_case", "SMALLINT", ""),
new Column("cases_per_pallet", "SMALLINT", ""),
new Column("shelf_width", "REAL", ""),
new Column("shelf_height", "REAL", ""),
new Column("shelf_depth", "REAL", ""),
});
createTable("product_class", new Column[] {
new Column("product_class_id", "INTEGER", "NOT NULL"),
new Column("product_subcategory", "VARCHAR(30)", ""),
new Column("product_category", "VARCHAR(30)", ""),
new Column("product_department", "VARCHAR(30)", ""),
new Column("product_family", "VARCHAR(30)", ""),
});
createTable("promotion", new Column[] {
new Column("promotion_id", "INTEGER", "NOT NULL"),
new Column("promotion_district_id", "INTEGER", ""),
new Column("promotion_name", "VARCHAR(30)", ""),
new Column("media_type", "VARCHAR(30)", ""),
new Column("cost", "BIGINT", ""),
new Column("start_date", "TIMESTAMP", ""),
new Column("end_date", "TIMESTAMP", ""),
});
createTable("region", new Column[] {
new Column("region_id", "INTEGER", "NOT NULL"),
new Column("sales_city", "VARCHAR(30)", ""),
new Column("sales_state_province", "VARCHAR(30)", ""),
new Column("sales_district", "VARCHAR(30)", ""),
new Column("sales_region", "VARCHAR(30)", ""),
new Column("sales_country", "VARCHAR(30)", ""),
new Column("sales_district_id", "INTEGER", ""),
});
createTable("reserve_employee", new Column[] {
new Column("employee_id", "INTEGER", "NOT NULL"),
new Column("full_name", "VARCHAR(30)", "NOT NULL"),
new Column("first_name", "VARCHAR(30)", "NOT NULL"),
new Column("last_name", "VARCHAR(30)", "NOT NULL"),
new Column("position_id", "INTEGER", ""),
new Column("position_title", "VARCHAR(30)", ""),
new Column("store_id", "INTEGER", "NOT NULL"),
new Column("department_id", "INTEGER", "NOT NULL"),
new Column("birth_date", "TIMESTAMP", "NOT NULL"),
new Column("hire_date", "TIMESTAMP", ""),
new Column("end_date", "TIMESTAMP", ""),
new Column("salary", "DECIMAL(10,4)", "NOT NULL"),
new Column("supervisor_id", "INTEGER", ""),
new Column("education_level", "VARCHAR(30)", "NOT NULL"),
new Column("marital_status", "VARCHAR(30)", "NOT NULL"),
new Column("gender", "VARCHAR(30)", "NOT NULL"),
});
createTable("salary", new Column[] {
new Column("pay_date", "TIMESTAMP", "NOT NULL"),
new Column("employee_id", "INTEGER", "NOT NULL"),
new Column("department_id", "INTEGER", "NOT NULL"),
new Column("currency_id", "INTEGER", "NOT NULL"),
new Column("salary_paid", "DECIMAL(10,4)", "NOT NULL"),
new Column("overtime_paid", "DECIMAL(10,4)", "NOT NULL"),
new Column("vacation_accrued", "INTEGER", "NOT NULL"),
new Column("vacation_used", "INTEGER", "NOT NULL"),
});
createTable("store", new Column[] {
new Column("store_id", "INTEGER", "NOT NULL"),
new Column("store_type", "VARCHAR(30)", ""),
new Column("region_id", "INTEGER", ""),
new Column("store_name", "VARCHAR(30)", ""),
new Column("store_number", "BIGINT", ""),
new Column("store_street_address", "VARCHAR(30)", ""),
new Column("store_city", "VARCHAR(30)", ""),
new Column("store_state", "VARCHAR(30)", ""),
new Column("store_postal_code", "VARCHAR(30)", ""),
new Column("store_country", "VARCHAR(30)", ""),
new Column("store_manager", "VARCHAR(30)", ""),
new Column("store_phone", "VARCHAR(30)", ""),
new Column("store_fax", "VARCHAR(30)", ""),
new Column("first_opened_date", "TIMESTAMP", ""),
new Column("last_remodel_date", "TIMESTAMP", ""),
new Column("store_sqft", "BIGINT", ""),
new Column("grocery_sqft", "BIGINT", ""),
new Column("frozen_sqft", "BIGINT", ""),
new Column("meat_sqft", "BIGINT", ""),
new Column("coffee_bar", booleanColumnType, ""),
new Column("video_store", booleanColumnType, ""),
new Column("salad_bar", booleanColumnType, ""),
new Column("prepared_food", booleanColumnType, ""),
new Column("florist", booleanColumnType, ""),
});
createTable("store_ragged", new Column[] {
new Column("store_id", "INTEGER", "NOT NULL"),
new Column("store_type", "VARCHAR(30)", ""),
new Column("region_id", "INTEGER", ""),
new Column("store_name", "VARCHAR(30)", ""),
new Column("store_number", "BIGINT", ""),
new Column("store_street_address", "VARCHAR(30)", ""),
new Column("store_city", "VARCHAR(30)", ""),
new Column("store_state", "VARCHAR(30)", ""),
new Column("store_postal_code", "VARCHAR(30)", ""),
new Column("store_country", "VARCHAR(30)", ""),
new Column("store_manager", "VARCHAR(30)", ""),
new Column("store_phone", "VARCHAR(30)", ""),
new Column("store_fax", "VARCHAR(30)", ""),
new Column("first_opened_date", "TIMESTAMP", ""),
new Column("last_remodel_date", "TIMESTAMP", ""),
new Column("store_sqft", "BIGINT", ""),
new Column("grocery_sqft", "BIGINT", ""),
new Column("frozen_sqft", "BIGINT", ""),
new Column("meat_sqft", "BIGINT", ""),
new Column("coffee_bar", booleanColumnType, ""),
new Column("video_store", booleanColumnType, ""),
new Column("salad_bar", booleanColumnType, ""),
new Column("prepared_food", booleanColumnType, ""),
new Column("florist", booleanColumnType, ""),
});
createTable("time_by_day", new Column[] {
new Column("time_id", "INTEGER", "NOT NULL"),
new Column("the_date", "TIMESTAMP", ""),
new Column("the_day", "VARCHAR(30)", ""),
new Column("the_month", "VARCHAR(30)", ""),
new Column("the_year", "SMALLINT", ""),
new Column("day_of_month", "SMALLINT", ""),
new Column("week_of_year", "INTEGER", ""),
new Column("month_of_year", "SMALLINT", ""),
new Column("quarter", "VARCHAR(30)", ""),
new Column("fiscal_period", "VARCHAR(30)", ""),
});
createTable("warehouse", new Column[] {
new Column("warehouse_id", "INTEGER", "NOT NULL"),
new Column("warehouse_class_id", "INTEGER", ""),
new Column("stores_id", "INTEGER", ""),
new Column("warehouse_name", "VARCHAR(60)", ""),
new Column("wa_address1", "VARCHAR(30)", ""),
new Column("wa_address2", "VARCHAR(30)", ""),
new Column("wa_address3", "VARCHAR(30)", ""),
new Column("wa_address4", "VARCHAR(30)", ""),
new Column("warehouse_city", "VARCHAR(30)", ""),
new Column("warehouse_state_province", "VARCHAR(30)", ""),
new Column("warehouse_postal_code", "VARCHAR(30)", ""),
new Column("warehouse_country", "VARCHAR(30)", ""),
new Column("warehouse_owner_name", "VARCHAR(30)", ""),
new Column("warehouse_phone", "VARCHAR(30)", ""),
new Column("warehouse_fax", "VARCHAR(30)", ""),
});
createTable("warehouse_class", new Column[] {
new Column("warehouse_class_id", "INTEGER", "NOT NULL"),
new Column("description", "VARCHAR(30)", ""),
});
if (outputDirectory != null) {
fileOutput.close();
}
}
private void createTable(String name, Column[] columns) {
try {
// Define the table.
mapTableNameToColumns.put(name, columns);
if (!tables) {
if (data) {
// We're going to load the data without [re]creating
// the table, so let's remove the data.
final Statement statement = connection.createStatement();
try {
statement.execute("DELETE FROM " + quoteId(name));
} catch (SQLException e) {
throw MondrianResource.instance().newCreateTableFailed(name, e);
}
}
return;
}
StringBuffer buf = new StringBuffer();
buf.append("CREATE TABLE ").append(quoteId(name)).append("(");
for (int i = 0; i < columns.length; i++) {
Column column = columns[i];
if (i > 0) {
buf.append(",");
}
buf.append(nl);
buf.append(" ").append(quoteId(column.name)).append(" ")
.append(column.type);
if (!column.constraint.equals("")) {
buf.append(" ").append(column.constraint);
}
}
buf.append(")");
final String ddl = buf.toString();
if (verbose) {
System.out.println(ddl);
}
if (jdbcOutput) {
final Statement statement = connection.createStatement();
try {
statement.execute("DROP TABLE " + quoteId(name));
} catch (SQLException e) {
// ignore 'table does not exist' error
}
statement.execute(ddl);
} else {
fileOutput.write(ddl);
fileOutput.write(";\n");
}
} catch (Exception e) {
throw MondrianResource.instance().newCreateTableFailed(name, e);
}
}
private String quoteId(String name) {
return sqlQuery.quoteIdentifier(name);
}
private String columnValue(ResultSet rs, Column column) throws Exception {
String columnType = column.type;
final Pattern regex = Pattern.compile("DECIMAL\\((.*),(.*)\\)");
if (columnType.startsWith("INTEGER")) {
int result = rs.getInt(column.name);
return Integer.toString(result);
}
if (columnType.startsWith("SMALLINT")) {
short result = rs.getShort(column.name);
return Integer.toString(result);
}
if (columnType.startsWith("BIGINT")) {
long result = rs.getLong(column.name);
return Long.toString(result);
}
if (columnType.startsWith("VARCHAR")) {
return embedQuotes(rs.getString(column.name));
}
if (columnType.startsWith("TIMESTAMP")) {
Timestamp ts = rs.getTimestamp(column.name);
if (ts == null) {
return "NULL";
} else {
return "'" + ts + "'" ;
}
}
if (columnType.startsWith("DATE")) {
java.sql.Date dt = rs.getDate(column.name);
if (dt == null) {
return "NULL";
} else {
return "'" + dt + "'" ;
}
}
if (columnType.startsWith("REAL")) {
return Float.toString(rs.getFloat(column.name));
}
if (columnType.startsWith("DECIMAL")) {
final Matcher matcher = regex.matcher(columnType);
if (!matcher.matches()) {
throw new Exception("Bad DECIMAL column type for " + columnType);
}
DecimalFormat formatter = new DecimalFormat(decimalFormat(matcher.group(1), matcher.group(2)));
return formatter.format(rs.getDouble(column.name));
/*
int places = Integer.parseInt(matcher.group(2));
BigDecimal dec = rs.getBigDecimal(column.name);
dec = dec.setScale(places, BigDecimal.ROUND_HALF_UP);
return dec.toString();
*/ }
if (columnType.startsWith("BIT")) {
return Byte.toString(rs.getByte(column.name));
}
if (columnType.startsWith("BOOLEAN")) {
return Boolean.toString(rs.getBoolean(column.name));
}
throw new Exception("Unknown column type: " + columnType + " for column: " + column.name);
}
private String embedQuotes(String original) {
if (original == null) {
return "NULL";
}
StringBuffer sb = new StringBuffer();
sb.append("'");
for (int i = 0; i < original.length(); i++) {
char ch = original.charAt(i);
sb.append(ch);
if (ch == '\'') {
sb.append('\'');
}
}
sb.append("'");
return sb.toString();
}
private String decimalFormat(String lengthStr, String placesStr) {
StringBuffer sb = new StringBuffer();
int length = Integer.parseInt(lengthStr);
int places = Integer.parseInt(placesStr);
for (int i = 0; i < length; i++) {
if ((length - i) == places) {
sb.append('.');
}
sb.append("
}
return sb.toString();
}
private static class Column {
private final String name;
private final String type;
private final String constraint;
public Column(String name, String type, String constraint) {
this.name = name;
this.type = type;
this.constraint = constraint;
}
}
} |
package scal.io.liger.model;
import com.google.gson.annotations.Expose;
public class AudioClip {
@Expose private String position_clip_id; // can be null. card id we are linked to either this or the next must have a value, but only one
@Expose private int position_index; // can null
@Expose private float volume; // 1.0 is full volume
@Expose private boolean clip_span; // how many clips it should try to span
@Expose private boolean truncate; // should this play out past the clips its spans, or trim its end to match
@Expose private boolean overlap; // if overlap the next clip or push it out, can we
@Expose private boolean fill_repeat; // repeat to fill if this audioclip is shorter than the clips it spans
// public AudioClip() {}
} |
package mondrian.test.loader;
import mondrian.olap.MondrianResource;
import mondrian.rolap.RolapUtil;
import mondrian.rolap.sql.SqlQuery;
import java.io.*;
import java.math.BigDecimal;
//import java.math.BigDecimal;
import java.sql.*;
import java.text.DateFormat;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* Utility to load the FoodMart dataset into an arbitrary JDBC database.
*
* <p>This is known to create test data for the following databases:</p>
* <ul>
*
* <li>MySQL 3.23 using MySQL-connector/J 3.0.16</li>
*
* <li>MySQL 4.15 using MySQL-connector/J 3.0.16</li>
*
* <li>Postgres 8.0 beta using postgresql-driver-jdbc3-74-214.jar</li>
*
* <li>Oracle 10g using ojdbc14.jar</li>
*
* </ul>
*
* <p>Output can be to a set of files with create table, insert and create index
* statements, or directly to a JDBC connection with JDBC batches (lots faster!)</p>
*
* <p>On the command line:</p>
*
* <blockquote>MySQL example<code>
* $ mysqladmin create foodmart<br/>
* $ java -cp 'classes;testclasses' mondrian.test.loader.MondrianFoodMartLoader
* -verbose -tables -data -indexes -jdbcDrivers=com.mysql.jdbc.Driver
* -inputJdbcURL=jdbc:odbc:MondrianFoodMart -outputJdbcURL=jdbc:mysql://localhost/foodmart
* </code></blockquote>
*
* @author jhyde
* @since 23 December, 2004
* @version $Id$
*/
public class MondrianFoodMartLoader {
final Pattern decimalDataTypeRegex = Pattern.compile("DECIMAL\\((.*),(.*)\\)");
final DecimalFormat integerFormatter = new DecimalFormat(decimalFormat(15, 0));
final String dateFormatString = "yyyy-MM-dd";
final String oracleDateFormatString = "YYYY-MM-DD";
final DateFormat dateFormatter = new SimpleDateFormat(dateFormatString);
private String jdbcDrivers;
private String jdbcURL;
private String userName;
private String password;
private String inputJdbcURL;
private String inputUserName;
private String inputPassword;
private String inputFile;
private String outputDirectory;
private boolean tables = false;
private boolean indexes = false;
private boolean data = false;
private static final String nl = System.getProperty("line.separator");
private boolean verbose = false;
private boolean jdbcInput = false;
private boolean jdbcOutput = false;
private int inputBatchSize = 50;
private Connection connection;
private Connection inputConnection;
private FileWriter fileOutput = null;
private SqlQuery sqlQuery;
private String booleanColumnType;
private String bigIntColumnType;
private final HashMap mapTableNameToColumns = new HashMap();
public MondrianFoodMartLoader(String[] args) {
StringBuffer errorMessage = new StringBuffer();
for ( int i=0; i<args.length; i++ ) {
if (args[i].equals("-verbose")) {
verbose = true;
} else if (args[i].equals("-tables")) {
tables = true;
} else if (args[i].equals("-data")) {
data = true;
} else if (args[i].equals("-indexes")) {
indexes = true;
} else if (args[i].startsWith("-jdbcDrivers=")) {
jdbcDrivers = args[i].substring("-jdbcDrivers=".length());
} else if (args[i].startsWith("-outputJdbcURL=")) {
jdbcURL = args[i].substring("-outputJdbcURL=".length());
} else if (args[i].startsWith("-outputJdbcUser=")) {
userName = args[i].substring("-outputJdbcUser=".length());
} else if (args[i].startsWith("-outputJdbcPassword=")) {
password = args[i].substring("-outputJdbcPassword=".length());
} else if (args[i].startsWith("-inputJdbcURL=")) {
inputJdbcURL = args[i].substring("-inputJdbcURL=".length());
} else if (args[i].startsWith("-inputJdbcUser=")) {
inputUserName = args[i].substring("-inputJdbcUser=".length());
} else if (args[i].startsWith("-inputJdbcPassword=")) {
inputPassword = args[i].substring("-inputJdbcPassword=".length());
} else if (args[i].startsWith("-inputFile=")) {
inputFile = args[i].substring("-inputFile=".length());
} else if (args[i].startsWith("-outputDirectory=")) {
outputDirectory = args[i].substring("-outputDirectory=".length());
} else if (args[i].startsWith("-outputJdbcBatchSize=")) {
inputBatchSize = Integer.parseInt(args[i].substring("-outputJdbcBatchSize=".length()));
} else {
errorMessage.append("unknown arg: " + args[i] + "\n");
}
}
if (inputJdbcURL != null) {
jdbcInput = true;
if (inputFile != null) {
errorMessage.append("Specified both an input JDBC connection and an input file");
}
}
if (jdbcURL != null && outputDirectory == null) {
jdbcOutput = true;
}
if (errorMessage.length() > 0) {
usage();
throw MondrianResource.instance().newMissingArg(errorMessage.toString());
}
}
public void usage() {
System.out.println("Usage: MondrianFoodMartLoader [-verbose] [-tables] [-data] [-indexes] " +
"-jdbcDrivers=<jdbcDriver> " +
"-outputJdbcURL=<jdbcURL> [-outputJdbcUser=user] [-outputJdbcPassword=password]" +
"[-outputJdbcBatchSize=<batch size>] " +
"| " +
"[-outputDirectory=<directory name>] " +
"[" +
" [-inputJdbcURL=<jdbcURL> [-inputJdbcUser=user] [-inputJdbcPassword=password]]" +
" | " +
" [-inputfile=<file name>]" +
"]");
System.out.println("");
System.out.println(" <jdbcURL> JDBC connect string for DB");
System.out.println(" [user] JDBC user name for DB");
System.out.println(" [password] JDBC password for user for DB");
System.out.println(" If no source DB parameters are given, assumes data comes from file");
System.out.println(" [file name] file containing test data - INSERT statements in MySQL format");
System.out.println(" If no input file name or input JDBC parameters are given, assume insert statements come from demo/FoodMartCreateData.zip file");
System.out.println(" [outputDirectory] Where FoodMartCreateTables.sql, FoodMartCreateData.sql and FoodMartCreateIndexes.sql will be created");
System.out.println(" <batch size> size of JDBC batch updates - default to 50 inserts");
System.out.println(" <jdbcDrivers> Comma-separated list of JDBC drivers.");
System.out.println(" They must be on the classpath.");
System.out.println(" -verbose Verbose mode.");
System.out.println(" -tables If specified, drop and create the tables.");
System.out.println(" -data If specified, load the data.");
System.out.println(" -indexes If specified, drop and create the tables.");
}
public static void main(String[] args) {
System.out.println("Starting load at: " + (new Date()));
try {
new MondrianFoodMartLoader(args).load();
} catch (Throwable e) {
e.printStackTrace();
}
System.out.println("Finished load at: " + (new Date()));
}
/**
* Load output from the input, optionally creating tables,
* populating tables and creating indexes
*
* @throws Exception
*/
private void load() throws Exception {
RolapUtil.loadDrivers(jdbcDrivers);
if (userName == null) {
connection = DriverManager.getConnection(jdbcURL);
} else {
connection = DriverManager.getConnection(jdbcURL, userName, password);
}
if (jdbcInput) {
if (inputUserName == null) {
inputConnection = DriverManager.getConnection(inputJdbcURL);
} else {
inputConnection = DriverManager.getConnection(inputJdbcURL, inputUserName, inputPassword);
}
}
final DatabaseMetaData metaData = connection.getMetaData();
String productName = metaData.getDatabaseProductName();
String version = metaData.getDatabaseProductVersion();
System.out.println("Output connection is " + productName + ", " + version);
sqlQuery = new SqlQuery(metaData);
booleanColumnType = "SMALLINT";
if (sqlQuery.isPostgres()) {
booleanColumnType = "BOOLEAN";
} else if (sqlQuery.isMySQL()) {
booleanColumnType = "BIT";
}
bigIntColumnType = "BIGINT";
if (sqlQuery.isOracle()) {
bigIntColumnType = "DECIMAL(15,0)";
}
try {
createTables(); // This also initializes mapTableNameToColumns
if (data) {
if (jdbcInput) {
loadDataFromJdbcInput();
} else {
loadDataFromFile();
}
}
if (indexes) {
createIndexes();
}
} finally {
if (connection != null) {
connection.close();
connection = null;
}
if (inputConnection != null) {
inputConnection.close();
inputConnection = null;
}
if (fileOutput != null) {
fileOutput.close();
fileOutput = null;
}
}
}
/**
* Parse a file of INSERT statements and output to the configured JDBC
* connection or another file in the dialect of the target data source.
*
* The assumption is that the input INSERT statements are out of MySQL, generated
* by this loader by something like:
*
* MondrianFoodLoader
* -verbose -tables -data -indexes
* -jdbcDrivers=sun.jdbc.odbc.JdbcOdbcDriver,com.mysql.jdbc.Driver
* -inputJdbcURL=jdbc:odbc:MondrianFoodMart
* -outputJdbcURL=jdbc:mysql://localhost/textload?user=root&password=myAdmin
* -outputDirectory=C:\Temp\wip\Loader-Output
*
* @throws Exception
*/
private void loadDataFromFile() throws Exception {
InputStream is = openInputStream();
if (is == null) {
throw new Exception("No data file to process");
}
try {
final InputStreamReader reader = new InputStreamReader(is);
final BufferedReader bufferedReader = new BufferedReader(reader);
final Pattern regex = Pattern.compile("INSERT INTO `([^ ]+)` \\((.*)\\) VALUES\\((.*)\\);");
String line;
int lineNumber = 0;
int tableRowCount = 0;
String prevTable = "";
String quotedTableName = null;
String quotedColumnNames = null;
Column[] orderedColumns = null;
String[] batch = new String[inputBatchSize];
int batchSize = 0;
while ((line = bufferedReader.readLine()) != null) {
++lineNumber;
if (line.startsWith("
continue;
}
// Split the up the line. For example,
// INSERT INTO `foo` ( `column1`,`column2` ) VALUES (1, 'bar');
// would yield
// tableName = "foo"
// columnNames = " `column1`,`column2` "
// values = "1, 'bar'"
final Matcher matcher = regex.matcher(line);
if (!matcher.matches()) {
throw MondrianResource.instance().newInvalidInsertLine(
new Integer(lineNumber), line);
}
String tableName = matcher.group(1); // e.g. "foo"
String columnNames = matcher.group(2);
String values = matcher.group(3);
// If table just changed, flush the previous batch.
if (!tableName.equals(prevTable)) {
if (!prevTable.equals("")) {
System.out.println("Table " + prevTable +
": loaded " + tableRowCount + " rows.");
}
tableRowCount = 0;
writeBatch(batch, batchSize);
batchSize = 0;
prevTable = tableName;
quotedTableName = quoteId(tableName);
quotedColumnNames = columnNames
.replaceAll("`", sqlQuery.getQuoteIdentifierString());
String[] splitColumnNames = columnNames.replaceAll("`", "")
.replaceAll(" ", "").split(",");
Column[] columns = (Column[]) mapTableNameToColumns.get(tableName);
orderedColumns = new Column[columns.length];
for (int i = 0; i < splitColumnNames.length; i++) {
Column thisColumn = null;
for (int j = 0; j < columns.length && thisColumn == null; j++) {
if (columns[j].name.equalsIgnoreCase(splitColumnNames[i])) {
thisColumn = columns[j];
}
}
if (thisColumn == null) {
throw new Exception("Unknown column in INSERT statement from file: " + splitColumnNames[i]);
} else {
orderedColumns[i] = thisColumn;
}
}
}
StringBuffer massagedLine = new StringBuffer();
massagedLine
.append("INSERT INTO ")
.append(quotedTableName)
.append(" (")
.append(quotedColumnNames)
.append(" ) VALUES(")
.append(getMassagedValues(orderedColumns, values))
.append(" )");
line = massagedLine.toString();
++tableRowCount;
batch[batchSize++] = line;
if (batchSize >= inputBatchSize) {
writeBatch(batch, batchSize);
batchSize = 0;
}
}
// Print summary of the final table.
if (!prevTable.equals("")) {
System.out.println("Table " + prevTable +
": loaded " + tableRowCount + " rows.");
tableRowCount = 0;
writeBatch(batch, batchSize);
batchSize = 0;
}
} finally {
if (is != null) {
is.close();
}
}
}
/**
* @param splitColumnNames the individual column names in the same order as the values
* @param columns column metadata for the table
* @param values the contents of the INSERT VALUES clause ie. "34,67.89,'GHt''ab'". These are in MySQL form.
* @return String values for the destination dialect
* @throws Exception
*/
private String getMassagedValues(Column[] columns, String values) throws Exception {
StringBuffer sb = new StringBuffer();
// Get the values out as individual elements
// Split the string at commas, and cope with embedded commas
String[] individualValues = new String[columns.length];
String[] splitValues = values.split(",");
// If these 2 are the same length, then there are no embedded commas
if (splitValues.length == columns.length) {
individualValues = splitValues;
} else {
// "34,67.89,'GH,t''a,b'" => { "34", "67.89", "'GH", "t''a", "b'"
int valuesPos = 0;
boolean inQuote = false;
for (int i = 0; i < splitValues.length; i++) {
if (i == 0) {
individualValues[valuesPos] = splitValues[i];
inQuote = inQuote(splitValues[i], inQuote);
} else {
// at end
if (inQuote) {
individualValues[valuesPos] = individualValues[valuesPos] + "," + splitValues[i];
inQuote = inQuote(splitValues[i], inQuote);
} else {
valuesPos++;
individualValues[valuesPos] = splitValues[i];
inQuote = inQuote(splitValues[i], inQuote);
}
}
}
assert(valuesPos + 1 == columns.length);
}
for (int i = 0; i < columns.length; i++) {
if (i > 0) {
sb.append(",");
}
String value = individualValues[i];
if (value != null && value.trim().equals("NULL")) {
value = null;
}
sb.append(columnValue(value, columns[i]));
}
return sb.toString();
}
private boolean inQuote(String str, boolean nowInQuote) {
if (str.indexOf('\'') == -1) {
// No quote, so stay the same
return nowInQuote;
}
int lastPos = 0;
while (lastPos <= str.length() && str.indexOf('\'', lastPos) != -1) {
int pos = str.indexOf('\'', lastPos);
nowInQuote = !nowInQuote;
lastPos = pos + 1;
}
return nowInQuote;
}
private void loadDataFromJdbcInput() throws Exception {
if (outputDirectory != null) {
fileOutput = new FileWriter(new File(outputDirectory, "createData.sql"));
}
/*
* For each input table,
* read specified columns for all rows in the input connection
*
* For each row, insert a row
*/
for (Iterator it = mapTableNameToColumns.entrySet().iterator(); it.hasNext(); ) {
Map.Entry tableEntry = (Map.Entry) it.next();
int rowsAdded = loadTable((String) tableEntry.getKey(), (Column[]) tableEntry.getValue());
System.out.println("Table " + (String) tableEntry.getKey() +
": loaded " + rowsAdded + " rows.");
}
if (outputDirectory != null) {
fileOutput.close();
}
}
/**
* Read the given table from the input RDBMS and output to destination
* RDBMS or file
*
* @param name name of table
* @param columns columns to be read/output
* @return #rows inserted
* @throws Exception
*/
private int loadTable(String name, Column[] columns) throws Exception {
int rowsAdded = 0;
StringBuffer buf = new StringBuffer();
buf.append("select ");
for (int i = 0; i < columns.length; i++) {
Column column = columns[i];
if (i > 0) {
buf.append(",");
}
buf.append(quoteId(column.name));
}
buf.append(" from ")
.append(quoteId(name));
String ddl = buf.toString();
Statement statement = inputConnection.createStatement();
if (verbose) {
System.out.println("Input table SQL: " + ddl);
}
ResultSet rs = statement.executeQuery(ddl);
String[] batch = new String[inputBatchSize];
int batchSize = 0;
boolean displayedInsert = false;
while (rs.next()) {
/*
* Get a batch of insert statements, then save a batch
*/
String insertStatement = createInsertStatement(rs, name, columns);
if (!displayedInsert && verbose) {
System.out.println("Example Insert statement: " + insertStatement);
displayedInsert = true;
}
batch[batchSize++] = insertStatement;
if (batchSize >= inputBatchSize) {
rowsAdded += writeBatch(batch, batchSize);
batchSize = 0;
}
}
if (batchSize > 0) {
rowsAdded += writeBatch(batch, batchSize);
}
return rowsAdded;
}
/**
* Create a SQL INSERT statement in the dialect of the output RDBMS.
*
* @param rs ResultSet of input RDBMS
* @param name name of table
* @param columns column definitions for INSERT statement
* @return String the INSERT statement
* @throws Exception
*/
private String createInsertStatement(ResultSet rs, String name, Column[] columns) throws Exception {
StringBuffer buf = new StringBuffer();
buf.append("INSERT INTO ")
.append(quoteId(name))
.append(" ( ");
for (int i = 0; i < columns.length; i++) {
Column column = columns[i];
if (i > 0) {
buf.append(",");
}
buf.append(quoteId(column.name));
}
buf.append(" ) VALUES(");
for (int i = 0; i < columns.length; i++) {
Column column = columns[i];
if (i > 0) {
buf.append(",");
}
buf.append(columnValue(rs, column));
}
buf.append(" )");
return buf.toString();
}
/**
* If we are outputting to JDBC,
* Execute the given set of SQL statements
*
* Otherwise,
* output the statements to a file.
*
* @param batch SQL statements to execute
* @param batchSize # SQL statements to execute
* @return # SQL statements executed
* @throws IOException
* @throws SQLException
*/
private int writeBatch(String[] batch, int batchSize) throws IOException, SQLException {
if (outputDirectory != null) {
for (int i = 0; i < batchSize; i++) {
fileOutput.write(batch[i]);
fileOutput.write(";\n");
}
} else {
connection.setAutoCommit(false);
Statement stmt = connection.createStatement();
if (batchSize == 1) {
// Don't use batching if there's only one item. This allows
// us to work around bugs in the JDBC driver by setting
// outputJdbcBatchSize=1.
stmt.execute(batch[0]);
} else {
for (int i = 0; i < batchSize; i++) {
stmt.addBatch(batch[i]);
}
int [] updateCounts = null;
try {
updateCounts = stmt.executeBatch();
} catch (SQLException e) {
for (int i = 0; i < batchSize; i++) {
System.out.println("Error in SQL batch: " + batch[i]);
}
throw e;
}
int updates = 0;
for (int i = 0; i < updateCounts.length; updates += updateCounts[i], i++) {
if (updateCounts[i] == 0) {
System.out.println("Error in SQL: " + batch[i]);
}
}
if (updates < batchSize) {
throw new RuntimeException("Failed to execute batch: " + batchSize + " versus " + updates);
}
}
stmt.close();
connection.setAutoCommit(true);
}
return batchSize;
}
/**
* Open the file of INSERT statements to load the data. Default
* file name is ./demo/FoodMartCreateData.zip
*
* @return FileInputStream
*/
private InputStream openInputStream() throws Exception {
final String defaultZipFileName = "FoodMartCreateData.zip";
final String defaultDataFileName = "FoodMartCreateData.sql";
final File file = (inputFile != null) ? new File(inputFile) : new File("demo", defaultZipFileName);
if (!file.exists()) {
System.out.println("No input file: " + file);
return null;
}
if (file.getName().toLowerCase().endsWith(".zip")) {
ZipFile zippedData = new ZipFile(file);
ZipEntry entry = zippedData.getEntry(defaultDataFileName);
return zippedData.getInputStream(entry);
} else {
return new FileInputStream(file);
}
}
/**
* Create all indexes for the FoodMart database
*
* @throws Exception
*/
private void createIndexes() throws Exception {
if (outputDirectory != null) {
fileOutput = new FileWriter(new File(outputDirectory, "createIndexes.sql"));
}
createIndex(true, "account", "i_account_id", new String[] {"account_id"});
createIndex(false, "account", "i_account_parent", new String[] {"account_parent"});
createIndex(true, "category", "i_category_id", new String[] {"category_id"});
createIndex(false, "category", "i_category_parent", new String[] {"category_parent"});
createIndex(true, "currency", "i_currency", new String[] {"currency_id", "date"});
createIndex(false, "customer", "i_cust_acct_num", new String[] {"account_num"});
createIndex(false, "customer", "i_customer_fname", new String[] {"fname"});
createIndex(false, "customer", "i_customer_lname", new String[] {"lname"});
createIndex(false, "customer", "i_cust_child_home", new String[] {"num_children_at_home"});
createIndex(true, "customer", "i_customer_id", new String[] {"customer_id"});
createIndex(false, "customer", "i_cust_postal_code", new String[] {"postal_code"});
createIndex(false, "customer", "i_cust_region_id", new String[] {"customer_region_id"});
createIndex(true, "department", "i_department_id", new String[] {"department_id"});
createIndex(true, "employee", "i_employee_id", new String[] {"employee_id"});
createIndex(false, "employee", "i_empl_dept_id", new String[] {"department_id"});
createIndex(false, "employee", "i_empl_store_id", new String[] {"store_id"});
createIndex(false, "employee", "i_empl_super_id", new String[] {"supervisor_id"});
createIndex(true, "employee_closure", "i_empl_closure", new String[] {"supervisor_id", "employee_id"});
createIndex(false, "employee_closure", "i_empl_closure_emp", new String[] {"employee_id"});
createIndex(false, "expense_fact", "i_expense_store_id", new String[] {"store_id"});
createIndex(false, "expense_fact", "i_expense_acct_id", new String[] {"account_id"});
createIndex(false, "expense_fact", "i_expense_time_id", new String[] {"time_id"});
createIndex(false, "inventory_fact_1997", "i_inv_97_prod_id", new String[] {"product_id"});
createIndex(false, "inventory_fact_1997", "i_inv_97_store_id", new String[] {"store_id"});
createIndex(false, "inventory_fact_1997", "i_inv_97_time_id", new String[] {"time_id"});
createIndex(false, "inventory_fact_1997", "i_inv_97_wrhse_id", new String[] {"warehouse_id"});
createIndex(false, "inventory_fact_1998", "i_inv_98_prod_id", new String[] {"product_id"});
createIndex(false, "inventory_fact_1998", "i_inv_98_store_id", new String[] {"store_id"});
createIndex(false, "inventory_fact_1998", "i_inv_98_time_id", new String[] {"time_id"});
createIndex(false, "inventory_fact_1998", "i_inv_98_wrhse_id", new String[] {"warehouse_id"});
createIndex(true, "position", "i_position_id", new String[] {"position_id"});
createIndex(false, "product", "i_prod_brand_name", new String[] {"brand_name"});
createIndex(true, "product", "i_product_id", new String[] {"product_id"});
createIndex(false, "product", "i_prod_class_id", new String[] {"product_class_id"});
createIndex(false, "product", "i_product_name", new String[] {"product_name"});
createIndex(false, "product", "i_product_SKU", new String[] {"SKU"});
createIndex(true, "promotion", "i_promotion_id", new String[] {"promotion_id"});
createIndex(false, "promotion", "i_promo_dist_id", new String[] {"promotion_district_id"});
createIndex(true, "reserve_employee", "i_rsrv_empl_id", new String[] {"employee_id"});
createIndex(false, "reserve_employee", "i_rsrv_empl_dept", new String[] {"department_id"});
createIndex(false, "reserve_employee", "i_rsrv_empl_store", new String[] {"store_id"});
createIndex(false, "reserve_employee", "i_rsrv_empl_sup", new String[] {"supervisor_id"});
createIndex(false, "salary", "i_salary_pay_date", new String[] {"pay_date"});
createIndex(false, "salary", "i_salary_employee", new String[] {"employee_id"});
createIndex(false, "sales_fact_1997", "i_sls_97_cust_id", new String[] {"customer_id"});
createIndex(false, "sales_fact_1997", "i_sls_97_prod_id", new String[] {"product_id"});
createIndex(false, "sales_fact_1997", "i_sls_97_promo_id", new String[] {"promotion_id"});
createIndex(false, "sales_fact_1997", "i_sls_97_store_id", new String[] {"store_id"});
createIndex(false, "sales_fact_1997", "i_sls_97_time_id", new String[] {"time_id"});
createIndex(false, "sales_fact_dec_1998", "i_sls_dec98_cust", new String[] {"customer_id"});
createIndex(false, "sales_fact_dec_1998", "i_sls_dec98_prod", new String[] {"product_id"});
createIndex(false, "sales_fact_dec_1998", "i_sls_dec98_promo", new String[] {"promotion_id"});
createIndex(false, "sales_fact_dec_1998", "i_sls_dec98_store", new String[] {"store_id"});
createIndex(false, "sales_fact_dec_1998", "i_sls_dec98_time", new String[] {"time_id"});
createIndex(false, "sales_fact_1998", "i_sls_98_cust_id", new String[] {"customer_id"});
createIndex(false, "sales_fact_1998", "i_sls_1998_prod_id", new String[] {"product_id"});
createIndex(false, "sales_fact_1998", "i_sls_1998_promo", new String[] {"promotion_id"});
createIndex(false, "sales_fact_1998", "i_sls_1998_store", new String[] {"store_id"});
createIndex(false, "sales_fact_1998", "i_sls_1998_time_id", new String[] {"time_id"});
createIndex(true, "store", "i_store_id", new String[] {"store_id"});
createIndex(false, "store", "i_store_region_id", new String[] {"region_id"});
createIndex(true, "store_ragged", "i_store_raggd_id", new String[] {"store_id"});
createIndex(false, "store_ragged", "i_store_rggd_reg", new String[] {"region_id"});
createIndex(true, "time_by_day", "i_time_id", new String[] {"time_id"});
createIndex(true, "time_by_day", "i_time_day", new String[] {"the_date"});
createIndex(false, "time_by_day", "i_time_year", new String[] {"the_year"});
createIndex(false, "time_by_day", "i_time_quarter", new String[] {"quarter"});
createIndex(false, "time_by_day", "i_time_month", new String[] {"month_of_year"});
if (outputDirectory != null) {
fileOutput.close();
}
}
/**
*
* If we are outputting to JDBC,
* Execute the CREATE INDEX statement
*
* Otherwise,
* output the statement to a file.
*
* @param isUnique
* @param tableName
* @param indexName
* @param columnNames
*/
private void createIndex(
boolean isUnique,
String tableName,
String indexName,
String[] columnNames)
{
try {
StringBuffer buf = new StringBuffer();
if (jdbcOutput) {
try {
buf.append("DROP INDEX ")
.append(quoteId(indexName));
if (sqlQuery.isMySQL()) {
buf.append(" ON ")
.append(quoteId(tableName));
}
final String deleteDDL = buf.toString();
executeDDL(deleteDDL);
} catch (Exception e1) {
System.out.println("Drop failed: but continue");
}
}
buf = new StringBuffer();
buf.append(isUnique ? "CREATE UNIQUE INDEX " : "CREATE INDEX ")
.append(quoteId(indexName)).append(" ON ")
.append(quoteId(tableName)).append(" (");
for (int i = 0; i < columnNames.length; i++) {
String columnName = columnNames[i];
if (i > 0) {
buf.append(", ");
}
buf.append(quoteId(columnName));
}
buf.append(")");
final String createDDL = buf.toString();
executeDDL(createDDL);
} catch (Exception e) {
throw MondrianResource.instance().newCreateIndexFailed(indexName,
tableName, e);
}
}
/**
* Define all tables for the FoodMart database.
*
* Also initializes mapTableNameToColumns
*
* @throws Exception
*/
private void createTables() throws Exception {
if (outputDirectory != null) {
fileOutput = new FileWriter(new File(outputDirectory, "createTables.sql"));
}
createTable("sales_fact_1997", new Column[] {
new Column("product_id", "INTEGER", "NOT NULL"),
new Column("time_id", "INTEGER", "NOT NULL"),
new Column("customer_id", "INTEGER", "NOT NULL"),
new Column("promotion_id", "INTEGER", "NOT NULL"),
new Column("store_id", "INTEGER", "NOT NULL"),
new Column("store_sales", "DECIMAL(10,4)", "NOT NULL"),
new Column("store_cost", "DECIMAL(10,4)", "NOT NULL"),
new Column("unit_sales", "DECIMAL(10,4)", "NOT NULL"),
});
createTable("sales_fact_1998", new Column[] {
new Column("product_id", "INTEGER", "NOT NULL"),
new Column("time_id", "INTEGER", "NOT NULL"),
new Column("customer_id", "INTEGER", "NOT NULL"),
new Column("promotion_id", "INTEGER", "NOT NULL"),
new Column("store_id", "INTEGER", "NOT NULL"),
new Column("store_sales", "DECIMAL(10,4)", "NOT NULL"),
new Column("store_cost", "DECIMAL(10,4)", "NOT NULL"),
new Column("unit_sales", "DECIMAL(10,4)", "NOT NULL"),
});
createTable("sales_fact_dec_1998", new Column[] {
new Column("product_id", "INTEGER", "NOT NULL"),
new Column("time_id", "INTEGER", "NOT NULL"),
new Column("customer_id", "INTEGER", "NOT NULL"),
new Column("promotion_id", "INTEGER", "NOT NULL"),
new Column("store_id", "INTEGER", "NOT NULL"),
new Column("store_sales", "DECIMAL(10,4)", "NOT NULL"),
new Column("store_cost", "DECIMAL(10,4)", "NOT NULL"),
new Column("unit_sales", "DECIMAL(10,4)", "NOT NULL"),
});
createTable("inventory_fact_1997", new Column[] {
new Column("product_id", "INTEGER", "NOT NULL"),
new Column("time_id", "INTEGER", ""),
new Column("warehouse_id", "INTEGER", ""),
new Column("store_id", "INTEGER", ""),
new Column("units_ordered", "INTEGER", ""),
new Column("units_shipped", "INTEGER", ""),
new Column("warehouse_sales", "DECIMAL(10,4)", ""),
new Column("warehouse_cost", "DECIMAL(10,4)", ""),
new Column("supply_time", "SMALLINT", ""),
new Column("store_invoice", "DECIMAL(10,4)", ""),
});
createTable("inventory_fact_1998", new Column[] {
new Column("product_id", "INTEGER", "NOT NULL"),
new Column("time_id", "INTEGER", ""),
new Column("warehouse_id", "INTEGER", ""),
new Column("store_id", "INTEGER", ""),
new Column("units_ordered", "INTEGER", ""),
new Column("units_shipped", "INTEGER", ""),
new Column("warehouse_sales", "DECIMAL(10,4)", ""),
new Column("warehouse_cost", "DECIMAL(10,4)", ""),
new Column("supply_time", "SMALLINT", ""),
new Column("store_invoice", "DECIMAL(10,4)", ""),
});
createTable("currency", new Column[] {
new Column("currency_id", "INTEGER", "NOT NULL"),
new Column("date", "DATE", "NOT NULL"),
new Column("currency", "VARCHAR(30)", "NOT NULL"),
new Column("conversion_ratio", "DECIMAL(10,4)", "NOT NULL"),
});
createTable("account", new Column[] {
new Column("account_id", "INTEGER", "NOT NULL"),
new Column("account_parent", "INTEGER", ""),
new Column("account_description", "VARCHAR(30)", ""),
new Column("account_type", "VARCHAR(30)", "NOT NULL"),
new Column("account_rollup", "VARCHAR(30)", "NOT NULL"),
new Column("Custom_Members", "VARCHAR(255)", ""),
});
createTable("category", new Column[] {
new Column("category_id", "VARCHAR(30)", "NOT NULL"),
new Column("category_parent", "VARCHAR(30)", ""),
new Column("category_description", "VARCHAR(30)", "NOT NULL"),
new Column("category_rollup", "VARCHAR(30)", ""),
});
createTable("customer", new Column[] {
new Column("customer_id", "INTEGER", "NOT NULL"),
new Column("account_num", bigIntColumnType, "NOT NULL"),
new Column("lname", "VARCHAR(30)", "NOT NULL"),
new Column("fname", "VARCHAR(30)", "NOT NULL"),
new Column("mi", "VARCHAR(30)", ""),
new Column("address1", "VARCHAR(30)", ""),
new Column("address2", "VARCHAR(30)", ""),
new Column("address3", "VARCHAR(30)", ""),
new Column("address4", "VARCHAR(30)", ""),
new Column("city", "VARCHAR(30)", ""),
new Column("state_province", "VARCHAR(30)", ""),
new Column("postal_code", "VARCHAR(30)", "NOT NULL"),
new Column("country", "VARCHAR(30)", "NOT NULL"),
new Column("customer_region_id", "INTEGER", "NOT NULL"),
new Column("phone1", "VARCHAR(30)", "NOT NULL"),
new Column("phone2", "VARCHAR(30)", "NOT NULL"),
new Column("birthdate", "DATE", "NOT NULL"),
new Column("marital_status", "VARCHAR(30)", "NOT NULL"),
new Column("yearly_income", "VARCHAR(30)", "NOT NULL"),
new Column("gender", "VARCHAR(30)", "NOT NULL"),
new Column("total_children", "SMALLINT", "NOT NULL"),
new Column("num_children_at_home", "SMALLINT", "NOT NULL"),
new Column("education", "VARCHAR(30)", "NOT NULL"),
new Column("date_accnt_opened", "DATE", "NOT NULL"),
new Column("member_card", "VARCHAR(30)", ""),
new Column("occupation", "VARCHAR(30)", ""),
new Column("houseowner", "VARCHAR(30)", ""),
new Column("num_cars_owned", "INTEGER", ""),
});
createTable("days", new Column[] {
new Column("day", "INTEGER", "NOT NULL"),
new Column("week_day", "VARCHAR(30)", "NOT NULL"),
});
createTable("department", new Column[] {
new Column("department_id", "INTEGER", "NOT NULL"),
new Column("department_description", "VARCHAR(30)", "NOT NULL"),
});
createTable("employee", new Column[] {
new Column("employee_id", "INTEGER", "NOT NULL"),
new Column("full_name", "VARCHAR(30)", "NOT NULL"),
new Column("first_name", "VARCHAR(30)", "NOT NULL"),
new Column("last_name", "VARCHAR(30)", "NOT NULL"),
new Column("position_id", "INTEGER", ""),
new Column("position_title", "VARCHAR(30)", ""),
new Column("store_id", "INTEGER", "NOT NULL"),
new Column("department_id", "INTEGER", "NOT NULL"),
new Column("birth_date", "DATE", "NOT NULL"),
new Column("hire_date", "TIMESTAMP", ""),
new Column("end_date", "TIMESTAMP", ""),
new Column("salary", "DECIMAL(10,4)", "NOT NULL"),
new Column("supervisor_id", "INTEGER", ""),
new Column("education_level", "VARCHAR(30)", "NOT NULL"),
new Column("marital_status", "VARCHAR(30)", "NOT NULL"),
new Column("gender", "VARCHAR(30)", "NOT NULL"),
new Column("management_role", "VARCHAR(30)", ""),
});
createTable("employee_closure", new Column[] {
new Column("employee_id", "INTEGER", "NOT NULL"),
new Column("supervisor_id", "INTEGER", "NOT NULL"),
new Column("distance", "INTEGER", ""),
});
createTable("expense_fact", new Column[] {
new Column("store_id", "INTEGER", "NOT NULL"),
new Column("account_id", "INTEGER", "NOT NULL"),
new Column("exp_date", "TIMESTAMP", "NOT NULL"),
new Column("time_id", "INTEGER", "NOT NULL"),
new Column("category_id", "VARCHAR(30)", "NOT NULL"),
new Column("currency_id", "INTEGER", "NOT NULL"),
new Column("amount", "DECIMAL(10,4)", "NOT NULL"),
});
createTable("position", new Column[] {
new Column("position_id", "INTEGER", "NOT NULL"),
new Column("position_title", "VARCHAR(30)", "NOT NULL"),
new Column("pay_type", "VARCHAR(30)", "NOT NULL"),
new Column("min_scale", "DECIMAL(10,4)", "NOT NULL"),
new Column("max_scale", "DECIMAL(10,4)", "NOT NULL"),
new Column("management_role", "VARCHAR(30)", "NOT NULL"),
});
createTable("product", new Column[] {
new Column("product_class_id", "INTEGER", "NOT NULL"),
new Column("product_id", "INTEGER", "NOT NULL"),
new Column("brand_name", "VARCHAR(60)", ""),
new Column("product_name", "VARCHAR(60)", "NOT NULL"),
new Column("SKU", bigIntColumnType, "NOT NULL"),
new Column("SRP", "DECIMAL(10,4)", ""),
new Column("gross_weight", "REAL", ""),
new Column("net_weight", "REAL", ""),
new Column("recyclable_package", booleanColumnType, ""),
new Column("low_fat", booleanColumnType, ""),
new Column("units_per_case", "SMALLINT", ""),
new Column("cases_per_pallet", "SMALLINT", ""),
new Column("shelf_width", "REAL", ""),
new Column("shelf_height", "REAL", ""),
new Column("shelf_depth", "REAL", ""),
});
createTable("product_class", new Column[] {
new Column("product_class_id", "INTEGER", "NOT NULL"),
new Column("product_subcategory", "VARCHAR(30)", ""),
new Column("product_category", "VARCHAR(30)", ""),
new Column("product_department", "VARCHAR(30)", ""),
new Column("product_family", "VARCHAR(30)", ""),
});
createTable("promotion", new Column[] {
new Column("promotion_id", "INTEGER", "NOT NULL"),
new Column("promotion_district_id", "INTEGER", ""),
new Column("promotion_name", "VARCHAR(30)", ""),
new Column("media_type", "VARCHAR(30)", ""),
new Column("cost", "DECIMAL(10,4)", ""),
new Column("start_date", "TIMESTAMP", ""),
new Column("end_date", "TIMESTAMP", ""),
});
createTable("region", new Column[] {
new Column("region_id", "INTEGER", "NOT NULL"),
new Column("sales_city", "VARCHAR(30)", ""),
new Column("sales_state_province", "VARCHAR(30)", ""),
new Column("sales_district", "VARCHAR(30)", ""),
new Column("sales_region", "VARCHAR(30)", ""),
new Column("sales_country", "VARCHAR(30)", ""),
new Column("sales_district_id", "INTEGER", ""),
});
createTable("reserve_employee", new Column[] {
new Column("employee_id", "INTEGER", "NOT NULL"),
new Column("full_name", "VARCHAR(30)", "NOT NULL"),
new Column("first_name", "VARCHAR(30)", "NOT NULL"),
new Column("last_name", "VARCHAR(30)", "NOT NULL"),
new Column("position_id", "INTEGER", ""),
new Column("position_title", "VARCHAR(30)", ""),
new Column("store_id", "INTEGER", "NOT NULL"),
new Column("department_id", "INTEGER", "NOT NULL"),
new Column("birth_date", "TIMESTAMP", "NOT NULL"),
new Column("hire_date", "TIMESTAMP", ""),
new Column("end_date", "TIMESTAMP", ""),
new Column("salary", "DECIMAL(10,4)", "NOT NULL"),
new Column("supervisor_id", "INTEGER", ""),
new Column("education_level", "VARCHAR(30)", "NOT NULL"),
new Column("marital_status", "VARCHAR(30)", "NOT NULL"),
new Column("gender", "VARCHAR(30)", "NOT NULL"),
});
createTable("salary", new Column[] {
new Column("pay_date", "TIMESTAMP", "NOT NULL"),
new Column("employee_id", "INTEGER", "NOT NULL"),
new Column("department_id", "INTEGER", "NOT NULL"),
new Column("currency_id", "INTEGER", "NOT NULL"),
new Column("salary_paid", "DECIMAL(10,4)", "NOT NULL"),
new Column("overtime_paid", "DECIMAL(10,4)", "NOT NULL"),
new Column("vacation_accrued", "REAL", "NOT NULL"),
new Column("vacation_used", "REAL", "NOT NULL"),
});
createTable("store", new Column[] {
new Column("store_id", "INTEGER", "NOT NULL"),
new Column("store_type", "VARCHAR(30)", ""),
new Column("region_id", "INTEGER", ""),
new Column("store_name", "VARCHAR(30)", ""),
new Column("store_number", "INTEGER", ""),
new Column("store_street_address", "VARCHAR(30)", ""),
new Column("store_city", "VARCHAR(30)", ""),
new Column("store_state", "VARCHAR(30)", ""),
new Column("store_postal_code", "VARCHAR(30)", ""),
new Column("store_country", "VARCHAR(30)", ""),
new Column("store_manager", "VARCHAR(30)", ""),
new Column("store_phone", "VARCHAR(30)", ""),
new Column("store_fax", "VARCHAR(30)", ""),
new Column("first_opened_date", "TIMESTAMP", ""),
new Column("last_remodel_date", "TIMESTAMP", ""),
new Column("store_sqft", "INTEGER", ""),
new Column("grocery_sqft", "INTEGER", ""),
new Column("frozen_sqft", "INTEGER", ""),
new Column("meat_sqft", "INTEGER", ""),
new Column("coffee_bar", booleanColumnType, ""),
new Column("video_store", booleanColumnType, ""),
new Column("salad_bar", booleanColumnType, ""),
new Column("prepared_food", booleanColumnType, ""),
new Column("florist", booleanColumnType, ""),
});
createTable("store_ragged", new Column[] {
new Column("store_id", "INTEGER", "NOT NULL"),
new Column("store_type", "VARCHAR(30)", ""),
new Column("region_id", "INTEGER", ""),
new Column("store_name", "VARCHAR(30)", ""),
new Column("store_number", "INTEGER", ""),
new Column("store_street_address", "VARCHAR(30)", ""),
new Column("store_city", "VARCHAR(30)", ""),
new Column("store_state", "VARCHAR(30)", ""),
new Column("store_postal_code", "VARCHAR(30)", ""),
new Column("store_country", "VARCHAR(30)", ""),
new Column("store_manager", "VARCHAR(30)", ""),
new Column("store_phone", "VARCHAR(30)", ""),
new Column("store_fax", "VARCHAR(30)", ""),
new Column("first_opened_date", "TIMESTAMP", ""),
new Column("last_remodel_date", "TIMESTAMP", ""),
new Column("store_sqft", "INTEGER", ""),
new Column("grocery_sqft", "INTEGER", ""),
new Column("frozen_sqft", "INTEGER", ""),
new Column("meat_sqft", "INTEGER", ""),
new Column("coffee_bar", booleanColumnType, ""),
new Column("video_store", booleanColumnType, ""),
new Column("salad_bar", booleanColumnType, ""),
new Column("prepared_food", booleanColumnType, ""),
new Column("florist", booleanColumnType, ""),
});
createTable("time_by_day", new Column[] {
new Column("time_id", "INTEGER", "NOT NULL"),
new Column("the_date", "TIMESTAMP", ""),
new Column("the_day", "VARCHAR(30)", ""),
new Column("the_month", "VARCHAR(30)", ""),
new Column("the_year", "SMALLINT", ""),
new Column("day_of_month", "SMALLINT", ""),
new Column("week_of_year", "INTEGER", ""),
new Column("month_of_year", "SMALLINT", ""),
new Column("quarter", "VARCHAR(30)", ""),
new Column("fiscal_period", "VARCHAR(30)", ""),
});
createTable("warehouse", new Column[] {
new Column("warehouse_id", "INTEGER", "NOT NULL"),
new Column("warehouse_class_id", "INTEGER", ""),
new Column("stores_id", "INTEGER", ""),
new Column("warehouse_name", "VARCHAR(60)", ""),
new Column("wa_address1", "VARCHAR(30)", ""),
new Column("wa_address2", "VARCHAR(30)", ""),
new Column("wa_address3", "VARCHAR(30)", ""),
new Column("wa_address4", "VARCHAR(30)", ""),
new Column("warehouse_city", "VARCHAR(30)", ""),
new Column("warehouse_state_province", "VARCHAR(30)", ""),
new Column("warehouse_postal_code", "VARCHAR(30)", ""),
new Column("warehouse_country", "VARCHAR(30)", ""),
new Column("warehouse_owner_name", "VARCHAR(30)", ""),
new Column("warehouse_phone", "VARCHAR(30)", ""),
new Column("warehouse_fax", "VARCHAR(30)", ""),
});
createTable("warehouse_class", new Column[] {
new Column("warehouse_class_id", "INTEGER", "NOT NULL"),
new Column("description", "VARCHAR(30)", ""),
});
if (outputDirectory != null) {
fileOutput.close();
}
}
/**
* If we are outputting to JDBC, and not creating tables, delete all rows.
*
* Otherwise:
*
* Generate the SQL CREATE TABLE statement.
*
* If we are outputting to JDBC,
* Execute a DROP TABLE statement
* Execute the CREATE TABLE statement
*
* Otherwise,
* output the statement to a file.
*
* @param name
* @param columns
*/
private void createTable(String name, Column[] columns) {
try {
// Define the table.
mapTableNameToColumns.put(name, columns);
if (!tables) {
if (data && jdbcOutput) {
// We're going to load the data without [re]creating
// the table, so let's remove the data.
try {
executeDDL("DELETE FROM " + quoteId(name));
} catch (SQLException e) {
throw MondrianResource.instance().newCreateTableFailed(name, e);
}
}
return;
}
// If table does not exist, that is OK
try {
executeDDL("DROP TABLE " + quoteId(name));
} catch (Exception e) {
if (verbose) {
System.out.println("Drop of " + name + " failed. Ignored");
}
}
StringBuffer buf = new StringBuffer();
buf.append("CREATE TABLE ").append(quoteId(name)).append("(");
for (int i = 0; i < columns.length; i++) {
Column column = columns[i];
if (i > 0) {
buf.append(",");
}
buf.append(nl);
buf.append(" ").append(quoteId(column.name)).append(" ")
.append(column.type);
if (!column.constraint.equals("")) {
buf.append(" ").append(column.constraint);
}
}
buf.append(")");
final String ddl = buf.toString();
executeDDL(ddl);
} catch (Exception e) {
throw MondrianResource.instance().newCreateTableFailed(name, e);
}
}
private void executeDDL(String ddl) throws Exception {
if (verbose) {
System.out.println(ddl);
}
if (jdbcOutput) {
final Statement statement = connection.createStatement();
statement.execute(ddl);
} else {
fileOutput.write(ddl);
fileOutput.write(";\n");
}
}
/**
* Quote the given SQL identifier suitable for the output DBMS.
* @param name
* @return
*/
private String quoteId(String name) {
return sqlQuery.quoteIdentifier(name);
}
/**
* String representation of the column in the result set, suitable for
* inclusion in a SQL insert statement.
*
* The column in the result set is transformed according to the type in
* the column parameter.
*
* Different DBMSs return different Java types for a given column.
* ClassCastExceptions may occur.
*
* @param rs ResultSet row to process
* @param column Column to process
* @return String representation of column value
* @throws Exception
*/
private String columnValue(ResultSet rs, Column column) throws Exception {
Object obj = rs.getObject(column.name);
String columnType = column.type;
if (obj == null) {
return "NULL";
}
/*
* Output for an INTEGER column, handling Doubles and Integers
* in the result set
*/
if (columnType.startsWith("INTEGER")) {
if (obj.getClass() == Double.class) {
try {
Double result = (Double) obj;
return integerFormatter.format(result.doubleValue());
} catch (ClassCastException cce) {
System.out.println("CCE: " + column.name + " to Long from: " + obj.getClass().getName() + " - " + obj.toString());
throw cce;
}
} else {
try {
Integer result = (Integer) obj;
return result.toString();
} catch (ClassCastException cce) {
System.out.println("CCE: " + column.name + " to Integer from: " + obj.getClass().getName() + " - " + obj.toString());
throw cce;
}
}
/*
* Output for an SMALLINT column, handling Integers
* in the result set
*/
} else if (columnType.startsWith("SMALLINT")) {
if (obj.getClass() == Boolean.class) {
Boolean result = (Boolean) obj;
if (result.booleanValue()) {
return "1";
} else {
return "0";
}
} else {
try {
Integer result = (Integer) obj;
return result.toString();
} catch (ClassCastException cce) {
System.out.println("CCE: " + column.name + " to Integer from: " + obj.getClass().getName() + " - " + obj.toString());
throw cce;
}
}
/*
* Output for an BIGINT column, handling Doubles and Longs
* in the result set
*/
} else if (columnType.startsWith("BIGINT")) {
if (obj.getClass() == Double.class) {
try {
Double result = (Double) obj;
return integerFormatter.format(result.doubleValue());
} catch (ClassCastException cce) {
System.out.println("CCE: " + column.name + " to Double from: " + obj.getClass().getName() + " - " + obj.toString());
throw cce;
}
} else {
try {
Long result = (Long) obj;
return result.toString();
} catch (ClassCastException cce) {
System.out.println("CCE: " + column.name + " to Long from: " + obj.getClass().getName() + " - " + obj.toString());
throw cce;
}
}
/*
* Output for a String, managing embedded quotes
*/
} else if (columnType.startsWith("VARCHAR")) {
return embedQuotes((String) obj);
/*
* Output for a TIMESTAMP
*/
} else if (columnType.startsWith("TIMESTAMP")) {
Timestamp ts = (Timestamp) obj;
if (sqlQuery.isOracle()) {
return "TIMESTAMP '" + ts + "'";
} else {
return "'" + ts + "'";
}
//return "'" + ts + "'" ;
/*
* Output for a DATE
*/
} else if (columnType.startsWith("DATE")) {
Date dt = (Date) obj;
if (sqlQuery.isOracle()) {
return "DATE '" + dateFormatter.format(dt) + "'";
} else {
return "'" + dateFormatter.format(dt) + "'";
}
/*
* Output for a FLOAT
*/
} else if (columnType.startsWith("REAL")) {
Float result = (Float) obj;
return result.toString();
/*
* Output for a DECIMAL(length, places)
*/
} else if (columnType.startsWith("DECIMAL")) {
final Matcher matcher = decimalDataTypeRegex.matcher(columnType);
if (!matcher.matches()) {
throw new Exception("Bad DECIMAL column type for " + columnType);
}
DecimalFormat formatter = new DecimalFormat(decimalFormat(matcher.group(1), matcher.group(2)));
if (obj.getClass() == Double.class) {
try {
Double result = (Double) obj;
return formatter.format(result.doubleValue());
} catch (ClassCastException cce) {
System.out.println("CCE: " + column.name + " to Double from: " + obj.getClass().getName() + " - " + obj.toString());
throw cce;
}
} else {
// should be (obj.getClass() == BigDecimal.class)
try {
BigDecimal result = (BigDecimal) obj;
return formatter.format(result);
} catch (ClassCastException cce) {
System.out.println("CCE: " + column.name + " to BigDecimal from: " + obj.getClass().getName() + " - " + obj.toString());
throw cce;
}
}
/*
* Output for a BOOLEAN (Postgres) or BIT (other DBMSs)
*/
} else if (columnType.startsWith("BOOLEAN") || columnType.startsWith("BIT")) {
Boolean result = (Boolean) obj;
return result.toString();
}
throw new Exception("Unknown column type: " + columnType + " for column: " + column.name);
}
private String columnValue(String columnValue, Column column) throws Exception {
String columnType = column.type;
if (columnValue == null) {
return "NULL";
}
/*
* Output for a TIMESTAMP
*/
if (columnType.startsWith("TIMESTAMP")) {
if (sqlQuery.isOracle()) {
return "TIMESTAMP " + columnValue;
}
/*
* Output for a DATE
*/
} else if (columnType.startsWith("DATE")) {
if (sqlQuery.isOracle()) {
return "DATE " + columnValue;
}
/*
* Output for a BOOLEAN (Postgres) or BIT (other DBMSs)
*
* FIXME This code assumes that only a boolean column would
* map onto booleanColumnType. It would be better if we had a
* logical and physical type for each column.
*/
} else if (columnType.equals(booleanColumnType)) {
String trimmedValue = columnValue.trim();
if (!sqlQuery.isMySQL() &&
!sqlQuery.isOracle()) {
if (trimmedValue.equals("1")) {
return "true";
} else if (trimmedValue.equals("0")) {
return "false";
}
} else {
if (trimmedValue.equals("true")) {
return "1";
} else if (trimmedValue.equals("false")) {
return "0";
}
}
}
return columnValue;
//throw new Exception("Unknown column type: " + columnType + " for column: " + column.name);
}
/**
* Generate an appropriate string to use in an SQL insert statement for
* a VARCHAR colummn, taking into account NULL strings and strings with embedded
* quotes
*
* @param original String to transform
* @return NULL if null string, otherwise massaged string with doubled quotes
* for SQL
*/
private String embedQuotes(String original) {
if (original == null) {
return "NULL";
}
StringBuffer sb = new StringBuffer();
sb.append("'");
for (int i = 0; i < original.length(); i++) {
char ch = original.charAt(i);
sb.append(ch);
if (ch == '\'') {
sb.append('\'');
}
}
sb.append("'");
return sb.toString();
}
private static String decimalFormat(String lengthStr, String placesStr) {
int length = Integer.parseInt(lengthStr);
int places = Integer.parseInt(placesStr);
return decimalFormat(length, places);
}
private static String decimalFormat(int length, int places) {
StringBuffer sb = new StringBuffer();
for (int i = 0; i < length; i++) {
if ((length - i) == places) {
sb.append('.');
}
if ((length - i) <= (places + 1)) {
sb.append("0");
} else {
sb.append("
}
}
return sb.toString();
}
private static class Column {
private final String name;
private final String type;
private final String constraint;
public Column(String name, String type, String constraint) {
this.name = name;
this.type = type;
this.constraint = constraint;
}
}
}
// End MondrianFoodMartLoader.java |
package org.jpmml.xjc;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import javax.xml.bind.annotation.XmlValue;
import javax.xml.namespace.QName;
import com.sun.codemodel.JAnnotationUse;
import com.sun.codemodel.JClass;
import com.sun.codemodel.JClassAlreadyExistsException;
import com.sun.codemodel.JCodeModel;
import com.sun.codemodel.JDefinedClass;
import com.sun.codemodel.JExpr;
import com.sun.codemodel.JExpression;
import com.sun.codemodel.JFieldRef;
import com.sun.codemodel.JFieldVar;
import com.sun.codemodel.JJavaName;
import com.sun.codemodel.JMethod;
import com.sun.codemodel.JMod;
import com.sun.codemodel.JMods;
import com.sun.codemodel.JPackage;
import com.sun.codemodel.JStringLiteral;
import com.sun.codemodel.JType;
import com.sun.codemodel.JVar;
import com.sun.tools.xjc.Options;
import com.sun.tools.xjc.model.CAttributePropertyInfo;
import com.sun.tools.xjc.model.CClassInfo;
import com.sun.tools.xjc.model.CClassInfoParent;
import com.sun.tools.xjc.model.CDefaultValue;
import com.sun.tools.xjc.model.CElementPropertyInfo;
import com.sun.tools.xjc.model.CPluginCustomization;
import com.sun.tools.xjc.model.CPropertyInfo;
import com.sun.tools.xjc.model.Model;
import com.sun.tools.xjc.model.nav.NClass;
import com.sun.tools.xjc.outline.ClassOutline;
import com.sun.tools.xjc.outline.EnumOutline;
import com.sun.tools.xjc.outline.FieldOutline;
import com.sun.tools.xjc.outline.Outline;
import org.eclipse.persistence.oxm.annotations.XmlValueExtension;
import org.jvnet.jaxb2_commons.plugin.AbstractParameterizablePlugin;
import org.jvnet.jaxb2_commons.util.CustomizationUtils;
import org.w3c.dom.Element;
import org.xml.sax.ErrorHandler;
public class PMMLPlugin extends AbstractParameterizablePlugin {
@Override
public String getOptionName(){
return "Xpmml";
}
@Override
public String getUsage(){
return null;
}
@Override
public Collection<QName> getCustomizationElementNames(){
return Arrays.asList(PMMLPlugin.SERIALVERSIONUID_ELEMENT_NAME, PMMLPlugin.SUBPACKAGE_ELEMENT_NAME);
}
@Override
public void postProcessModel(Model model, ErrorHandler errorHandler){
super.postProcessModel(model, errorHandler);
JCodeModel codeModel = model.codeModel;
JClass measureClass = codeModel.ref("org.dmg.pmml.Measure");
JClass nodeClass = codeModel.ref("org.dmg.pmml.tree.Node");
JClass pmmlObjectClass = codeModel.ref("org.dmg.pmml.PMMLObject");
JClass activationFunctionEnum = codeModel.directClass("org.dmg.pmml.neural_network.NeuralNetwork.ActivationFunction");
JClass normalizationMethodEnum = codeModel.directClass("org.dmg.pmml.neural_network.NeuralNetwork.NormalizationMethod");
Comparator<CPropertyInfo> comparator = new Comparator<CPropertyInfo>(){
@Override
public int compare(CPropertyInfo left, CPropertyInfo right){
boolean leftAttribute = (left instanceof CAttributePropertyInfo);
boolean rightAttribute = (right instanceof CAttributePropertyInfo);
if(leftAttribute && !rightAttribute){
return -1;
} else
if(!leftAttribute && rightAttribute){
return 1;
}
return 0;
}
};
{
CPluginCustomization serialVersionUIDCustomization = CustomizationUtils.findCustomization(model, PMMLPlugin.SERIALVERSIONUID_ELEMENT_NAME);
if(serialVersionUIDCustomization != null){
Element element = serialVersionUIDCustomization.element;
if(model.serialVersionUID != null){
throw new RuntimeException();
}
int major = parseVersion(element.getAttribute("major"));
int minor = parseVersion(element.getAttribute("minor"));
int patch = parseVersion(element.getAttribute("patch"));
int implementation = parseVersion(element.getAttribute("implementation"));
model.serialVersionUID = (long)((major << 24) | (minor << 16) | (patch << 8) | implementation);
}
}
Map<NClass, CClassInfo> beans = model.beans();
Collection<CClassInfo> classInfos = beans.values();
for(CClassInfo classInfo : classInfos){
CPluginCustomization subpackageCustomization = CustomizationUtils.findCustomization(classInfo, PMMLPlugin.SUBPACKAGE_ELEMENT_NAME);
if(subpackageCustomization != null){
CClassInfoParent.Package packageParent = (CClassInfoParent.Package)classInfo.parent();
Element element = subpackageCustomization.element;
String name = element.getAttribute("name");
if(name == null){
throw new RuntimeException();
}
try {
Field field = CClassInfoParent.Package.class.getDeclaredField("pkg");
if(!field.isAccessible()){
field.setAccessible(true);
}
JPackage subPackage = packageParent.pkg.subPackage(name);
field.set(packageParent, subPackage);
} catch(ReflectiveOperationException roe){
throw new RuntimeException(roe);
}
} // End if
if((classInfo.shortName).equals("ComplexNode")){
try {
Field field = CClassInfo.class.getDeclaredField("elementName");
if(!field.isAccessible()){
field.setAccessible(true);
}
field.set(classInfo, new QName("http:
} catch(ReflectiveOperationException roe){
throw new RuntimeException(roe);
}
}
List<CPropertyInfo> propertyInfos = classInfo.getProperties();
propertyInfos.sort(comparator);
for(CPropertyInfo propertyInfo : propertyInfos){
String publicName = propertyInfo.getName(true);
String privateName = propertyInfo.getName(false);
// Collection of values
if(propertyInfo.isCollection()){
if((classInfo.shortName).equals("ComplexNode") && (privateName).equals("node")){
propertyInfo.baseType = nodeClass;
} else
if((classInfo.shortName).equals("VectorFields") && (privateName).equals("fieldRefOrCategoricalPredictor")){
propertyInfo.baseType = pmmlObjectClass;
} // End if
if((privateName).contains("And") || (privateName).contains("Or") || (privateName).equalsIgnoreCase("content")){
propertyInfo.setName(true, "Content");
propertyInfo.setName(false, "content");
} else
{
// Have "arrays" instead of "arraies"
if((privateName).endsWith("array") || (privateName).endsWith("Array")){
publicName += "s";
privateName += "s";
} else
// Have "refs" instead of "reves"
if((privateName).endsWith("ref") || (privateName).endsWith("Ref")){
publicName += "s";
privateName += "s";
} else
{
publicName = JJavaName.getPluralForm(publicName);
privateName = JJavaName.getPluralForm(privateName);
}
propertyInfo.setName(true, publicName);
propertyInfo.setName(false, privateName);
}
} else
// Simple value
{
if((classInfo.shortName).equals("ComparisonMeasure") && (privateName).equals("measure")){
propertyInfo.baseType = measureClass;
} else
if((classInfo.shortName).equals("DecisionTree") && (privateName).equals("node")){
propertyInfo.baseType = nodeClass;
} else
if((classInfo.shortName).equals("NeuralLayer") && (privateName).equals("activationFunction")){
propertyInfo.baseType = activationFunctionEnum;
} else
if((classInfo.shortName).equals("NeuralLayer") && (privateName).equals("normalizationMethod")){
propertyInfo.baseType = normalizationMethodEnum;
} else
if((classInfo.shortName).equals("TreeModel") && (privateName).equals("node")){
propertyInfo.baseType = nodeClass;
} // End if
if((privateName).equals("functionName")){
propertyInfo.setName(true, "MiningFunction");
propertyInfo.setName(false, "miningFunction");
}
CDefaultValue defaultValue = propertyInfo.defaultValue;
if(defaultValue != null){
propertyInfo.defaultValue = new CShareableDefaultValue(propertyInfo, propertyInfo.defaultValue);
}
}
}
}
}
@Override
public boolean run(Outline outline, Options options, ErrorHandler errorHandler){
Model model = outline.getModel();
JCodeModel codeModel = model.codeModel;
JClass iterableInterface = codeModel.ref("java.lang.Iterable");
JClass iteratorInterface = codeModel.ref("java.util.Iterator");
JClass hasExtensionsInterface = codeModel.ref("org.dmg.pmml.HasExtensions");
JClass stringValueInterface = codeModel.ref("org.dmg.pmml.StringValue");
JClass stringClass = codeModel.ref("java.lang.String");
JClass arraysClass = codeModel.ref("java.util.Arrays");
JClass fieldNameClass = codeModel.ref("org.dmg.pmml.FieldName");
JClass propertyAnnotation = codeModel.ref("org.jpmml.model.annotations.Property");
List<? extends ClassOutline> classOutlines = new ArrayList<>(outline.getClasses());
classOutlines.sort((left, right) -> (left.implClass.name()).compareToIgnoreCase(right.implClass.name()));
for(ClassOutline classOutline : classOutlines){
JDefinedClass beanClazz = classOutline.implClass;
// Implementations of org.dmg.pmml.HasFieldReference
if(checkType(beanClazz, "org.dmg.pmml.TextIndex")){
createGetterProxy(beanClazz, fieldNameClass, "getField", "getTextField");
createSetterProxy(beanClazz, fieldNameClass, "field", "setField", "setTextField");
} // End if
// Implementations of org.dmg.pmml.HasName
if(checkType(beanClazz, "org.dmg.pmml.regression.CategoricalPredictor") || checkType(beanClazz, "org.dmg.pmml.regression.NumericPredictor")){
createGetterProxy(beanClazz, fieldNameClass, "getName", "getField");
createSetterProxy(beanClazz, fieldNameClass, "name", "setName", "setField");
} // End if
// Implementations of org.dmg.pmml.Indexable
if(checkType(beanClazz, "org.dmg.pmml.DefineFunction") || checkType(beanClazz, "org.dmg.pmml.general_regression.Parameter")){
createGetterProxy(beanClazz, stringClass, "getKey", "getName");
} else
if(checkType(beanClazz, "org.dmg.pmml.MiningField")){
createGetterProxy(beanClazz, fieldNameClass, "getKey", "getName");
} else
if(checkType(beanClazz, "org.dmg.pmml.Target") || checkType(beanClazz, "org.dmg.pmml.VerificationField") || checkType(beanClazz, "org.dmg.pmml.nearest_neighbor.InstanceField")){
createGetterProxy(beanClazz, fieldNameClass, "getKey", "getField");
} else
if(checkType(beanClazz, "org.dmg.pmml.association.Item") || checkType(beanClazz, "org.dmg.pmml.association.Itemset") || checkType(beanClazz, "org.dmg.pmml.sequence.Sequence") || checkType(beanClazz, "org.dmg.pmml.support_vector_machine.VectorInstance") || checkType(beanClazz, "org.dmg.pmml.text.TextDocument")){
createGetterProxy(beanClazz, stringClass, "getKey", "getId");
}
Map<String, JFieldVar> fieldVars = beanClazz.fields();
FieldOutline contentFieldOutline = getContentField(classOutline);
if(contentFieldOutline != null){
CPropertyInfo propertyInfo = contentFieldOutline.getPropertyInfo();
String publicName = propertyInfo.getName(true);
String privateName = propertyInfo.getName(false);
JFieldVar fieldVar = fieldVars.get(privateName);
JType elementType = CodeModelUtil.getElementType(fieldVar.type());
beanClazz._implements(iterableInterface.narrow(elementType));
JMethod getElementsMethod = beanClazz.getMethod("get" + publicName, new JType[0]);
JMethod iteratorMethod = beanClazz.method(JMod.PUBLIC, iteratorInterface.narrow(elementType), "iterator");
iteratorMethod.annotate(Override.class);
iteratorMethod.body()._return(JExpr.invoke(getElementsMethod).invoke("iterator"));
moveBefore(beanClazz, iteratorMethod, getElementsMethod);
}
FieldOutline extensionsFieldOutline = getExtensionsField(classOutline);
if(extensionsFieldOutline != null){
beanClazz._implements(hasExtensionsInterface.narrow(beanClazz));
}
FieldOutline[] fieldOutlines = classOutline.getDeclaredFields();
for(FieldOutline fieldOutline : fieldOutlines){
CPropertyInfo propertyInfo = fieldOutline.getPropertyInfo();
String publicName = propertyInfo.getName(true);
String privateName = propertyInfo.getName(false);
JFieldVar fieldVar = fieldVars.get(privateName);
String name = fieldVar.name();
JMods modifiers = fieldVar.mods();
if((modifiers.getValue() & JMod.PRIVATE) != JMod.PRIVATE){
modifiers.setPrivate();
}
JType type = fieldVar.type();
CShareableDefaultValue defaultValue = (CShareableDefaultValue)propertyInfo.defaultValue;
if(defaultValue != null){
if(defaultValue.isShared()){
beanClazz.field(JMod.PRIVATE | JMod.STATIC | JMod.FINAL, type, defaultValue.getField(), defaultValue.computeInit(outline));
}
}
JMethod getterMethod = beanClazz.getMethod("get" + publicName, new JType[0]);
JMethod setterMethod = beanClazz.getMethod("set" + publicName, new JType[]{type});
if(getterMethod != null){
JType returnType = getterMethod.type();
if(returnType.isPrimitive() && !type.isPrimitive()){
JType boxifiedReturnType = returnType.boxify();
if((boxifiedReturnType).equals(type)){
getterMethod.type(boxifiedReturnType);
}
}
} // End if
if(setterMethod != null){
setterMethod.type(beanClazz);
JVar param = (setterMethod.params()).get(0);
param.name(name);
param.annotate(propertyAnnotation).param("value", name);
setterMethod.body()._return(JExpr._this());
} // End if
if(propertyInfo.isCollection()){
JType elementType = CodeModelUtil.getElementType(type);
JFieldRef fieldRef = JExpr.refthis(name);
JMethod getElementsMethod = beanClazz.getMethod("get" + publicName, new JType[0]);
JMethod hasElementsMethod = beanClazz.method(JMod.PUBLIC, boolean.class, "has" + publicName);
hasElementsMethod.body()._return((fieldRef.ne(JExpr._null())).cand((fieldRef.invoke("size")).gt(JExpr.lit(0))));
moveBefore(beanClazz, hasElementsMethod, getElementsMethod);
JMethod addElementsMethod = beanClazz.method(JMod.PUBLIC, beanClazz, "add" + publicName);
JVar param = addElementsMethod.varParam(elementType, name);
addElementsMethod.body().add(JExpr.invoke(getterMethod).invoke("addAll").arg(arraysClass.staticInvoke("asList").arg(param)));
addElementsMethod.body()._return(JExpr._this());
moveAfter(beanClazz, addElementsMethod, getElementsMethod);
} // End if
if(propertyInfo instanceof CAttributePropertyInfo){
declareAttributeField(beanClazz, fieldVar);
} else
if(propertyInfo instanceof CElementPropertyInfo){
declareElementField(beanClazz, fieldVar);
}
Collection<JAnnotationUse> annotations = fieldVar.annotations();
if(hasAnnotation(annotations, XmlValue.class)){
fieldVar.annotate(XmlValueExtension.class);
}
}
if(model.serialVersionUID != null){
beanClazz.field(JMod.PRIVATE | JMod.STATIC | JMod.FINAL, long.class, "serialVersionUID", JExpr.lit(model.serialVersionUID));
}
String[][][] markerInterfaces = {
{{"HasContinuousDomain"}, {"hasIntervals", "getIntervals", "addIntervals"}},
{{"HasDataType", "Field"}, {"getDataType", "setDataType"}},
{{"HasDefaultValue"}, {"getDefaultValue", "setDefaultValue"}},
{{"HasDiscreteDomain"}, {"hasValues", "getValues", "addValues"}},
{{"HasDisplayName"}, {"getDisplayName", "setDisplayName"}},
{{"HasExpression"}, {"getExpression", "setExpression"}},
{{"HasExtensions"}, {"hasExtensions", "getExtensions", "addExtensions"}},
{{"HasFieldReference", "ComparisonField"}, {"getField", "setField"}},
{{"HasId", "Entity", "NeuralEntity", "Node", "Rule"}, {"getId", "setId"}},
{{"HasLocator"}, {"getLocator", "setLocator"}},
{{"HasMapMissingTo"}, {"getMapMissingTo", "setMapMissingTo"}},
{{"HasMixedContent"}, {"hasContext", "getContent", "addContent"}},
{{"HasName", "Field", "Term"}, {"getName", "setName"}},
{{"HasOpType", "Field"}, {"getOpType", "setOpType"}},
{{"HasPredicate", "Node", "Rule"}, {"getPredicate", "setPredicate"}},
{{"HasScore", "Node", "Rule"}, {"getScore", "setScore"}},
{{"HasTable"}, {"getTableLocator", "setTableLocator", "getInlineTable", "setInlineTable"}},
{{"HasValue"}, {"getValue", "setValue"}},
{{"HasValueSet"}, {"getArray", "setArray"}}
};
for(String[][] markerInterface : markerInterfaces){
String[] types = markerInterface[0];
String[] members = markerInterface[1];
boolean matches = false;
{
JClass superClazz = beanClazz._extends();
superClazz = superClazz.erasure();
for(int i = 1; i < types.length; i++){
matches |= (superClazz.name()).equals(types[i]);
}
}
for(Iterator<JClass> it = beanClazz._implements(); it.hasNext(); ){
JClass _interface = it.next();
_interface = _interface.erasure();
matches |= (_interface.name()).equals(types[0]);
}
if(!matches){
continue;
}
Collection<JMethod> methods = beanClazz.methods();
for(int i = 0; i < members.length; i++){
for(JMethod method : methods){
String name = method.name();
if(!(name).equals(members[i])){
continue;
} // End if
List<JVar> params = method.params();
if((name.startsWith("has") || name.startsWith("get")) && params.size() == 0){
if(!hasAnnotation(method.annotations(), Override.class)){
method.annotate(Override.class);
}
} else
if(name.startsWith("add") && params.size() == 0 && method.hasVarArgs()){
method.annotate(Override.class);
} else
if(name.startsWith("set") && params.size() == 1){
if(!hasAnnotation(method.annotations(), Override.class)){
method.annotate(Override.class);
}
}
}
}
}
}
List<? extends EnumOutline> enumOutlines = new ArrayList<>(outline.getEnums());
enumOutlines.sort((left, right) -> (left.clazz.name()).compareToIgnoreCase(right.clazz.name()));
for(EnumOutline enumOutline : enumOutlines){
JDefinedClass clazz = enumOutline.clazz;
clazz._implements(stringValueInterface.narrow(clazz));
JMethod valueMethod = clazz.getMethod("value", new JType[0]);
valueMethod.annotate(Override.class);
JMethod toStringMethod = clazz.method(JMod.PUBLIC, String.class, "toString");
toStringMethod.annotate(Override.class);
toStringMethod.body()._return(JExpr.invoke(valueMethod));
}
if(model.serialVersionUID != null){
model.serialVersionUID = null;
}
return true;
}
static
private FieldOutline getExtensionsField(ClassOutline classOutline){
Predicate<FieldOutline> predicate = new Predicate<FieldOutline>(){
@Override
public boolean test(FieldOutline fieldOutline){
CPropertyInfo propertyInfo = fieldOutline.getPropertyInfo();
if(("extensions").equals(propertyInfo.getName(false)) && propertyInfo.isCollection()){
JType elementType = CodeModelUtil.getElementType(fieldOutline.getRawType());
return checkType(elementType, "org.dmg.pmml.Extension");
}
return false;
}
};
return XJCUtil.findSingletonField(classOutline.getDeclaredFields(), predicate);
}
static
private FieldOutline getContentField(ClassOutline classOutline){
Predicate<FieldOutline> predicate = new Predicate<FieldOutline>(){
private String name = classOutline.implClass.name();
@Override
public boolean test(FieldOutline fieldOutline){
CPropertyInfo propertyInfo = fieldOutline.getPropertyInfo();
if(propertyInfo.isCollection()){
JType elementType = CodeModelUtil.getElementType(fieldOutline.getRawType());
String name = elementType.name();
return ((this.name).equals(name + "s") || (this.name).equals(JJavaName.getPluralForm(name)));
}
return false;
}
};
return XJCUtil.findSingletonField(classOutline.getDeclaredFields(), predicate);
}
static
private boolean hasAnnotation(Collection<JAnnotationUse> annotations, Class<?> clazz){
JAnnotationUse annotation = findAnnotation(annotations, clazz);
return (annotation != null);
}
static
private JAnnotationUse findAnnotation(Collection<JAnnotationUse> annotations, Class<?> clazz){
String fullName = clazz.getName();
for(JAnnotationUse annotation : annotations){
JClass type = annotation.getAnnotationClass();
if(checkType(type, fullName)){
return annotation;
}
}
return null;
}
static
private void createGetterProxy(JDefinedClass beanClazz, JType type, String name, String getterName){
JMethod getterMethod = beanClazz.getMethod(getterName, new JType[0]);
JMethod method = beanClazz.method(JMod.PUBLIC, type, name);
method.annotate(Override.class);
method.body()._return(JExpr.invoke(getterMethod));
moveBefore(beanClazz, method, getterMethod);
}
static
public void createSetterProxy(JDefinedClass beanClazz, JType type, String parameterName, String name, String setterName){
JMethod getterMethod = beanClazz.getMethod(setterName.replace("set", "get"), new JType[0]);
JMethod method = beanClazz.method(JMod.PUBLIC, beanClazz, name);
method.annotate(Override.class);
JVar nameParameter = method.param(type, parameterName);
method.body()._return(JExpr.invoke(setterName).arg(nameParameter));
moveBefore(beanClazz, method, getterMethod);
}
static
private void declareAttributeField(JDefinedClass beanClazz, JFieldVar fieldVar){
JDefinedClass attributesInterface = ensureInterface(beanClazz._package(), "PMMLAttributes");
declareField(attributesInterface, beanClazz, fieldVar);
}
static
private void declareElementField(JDefinedClass beanClazz, JFieldVar fieldVar){
JDefinedClass elementsInterface = ensureInterface(beanClazz._package(), "PMMLElements");
declareField(elementsInterface, beanClazz, fieldVar);
}
static
private void declareField(JDefinedClass _interface, JDefinedClass beanClazz, JFieldVar fieldVar){
JCodeModel codeModel = _interface.owner();
JExpression init = codeModel.ref("org.jpmml.model.ReflectionUtil").staticInvoke("getField").arg(beanClazz.dotclass()).arg(fieldVar.name());
_interface.field(0, Field.class, (beanClazz.name() + "_" + fieldVar.name()).toUpperCase(), init);
}
static
private JDefinedClass ensureInterface(JPackage _package, String name){
try {
return _package._interface(name);
} catch(JClassAlreadyExistsException jcaee){
return jcaee.getExistingClass();
}
}
static
private void moveBefore(JDefinedClass beanClazz, JMethod method, JMethod referenceMethod){
List<JMethod> methods = (List<JMethod>)beanClazz.methods();
int index = methods.indexOf(referenceMethod);
if(index < 0){
throw new RuntimeException();
}
methods.remove(method);
methods.add(index, method);
}
static
private void moveAfter(JDefinedClass beanClazz, JMethod method, JMethod referenceMethod){
List<JMethod> methods = (List<JMethod>)beanClazz.methods();
int index = methods.indexOf(referenceMethod);
if(index < 0){
throw new RuntimeException();
}
methods.remove(method);
methods.add(index + 1, method);
}
static
private boolean checkType(JType type, String fullName){
return (type.fullName()).equals(fullName);
}
static
private int parseVersion(String version){
if(version == null){
throw new RuntimeException();
}
int value = Integer.parseInt(version);
if(value < 0 || value > 255){
throw new RuntimeException();
}
return value;
}
static
private class CShareableDefaultValue extends CDefaultValue {
private CDefaultValue parent = null;
private String field = null;
private CShareableDefaultValue(CPropertyInfo propertyInfo, CDefaultValue parent){
setParent(parent);
setField(formatField(propertyInfo.getName(false)));
}
@Override
public JExpression compute(Outline outline){
JExpression expression = computeInit(outline);
if((expression instanceof JFieldRef) || (expression instanceof JStringLiteral)){
setField(null);
return expression;
}
return JExpr.ref(getField());
}
public JExpression computeInit(Outline outline){
CDefaultValue parent = getParent();
return parent.compute(outline);
}
public boolean isShared(){
String field = getField();
return (field != null);
}
public CDefaultValue getParent(){
return this.parent;
}
private void setParent(CDefaultValue parent){
this.parent = parent;
}
public String getField(){
return this.field;
}
private void setField(String field){
this.field = field;
}
static
private String formatField(String string){
StringBuilder sb = new StringBuilder();
sb.append("DEFAULT_");
for(int i = 0; i < string.length(); i++){
char c = string.charAt(i);
if(Character.isUpperCase(c)){
sb.append('_');
}
sb.append(Character.toUpperCase(c));
}
return sb.toString();
}
}
public static final QName SERIALVERSIONUID_ELEMENT_NAME = new QName("http://jpmml.org/jpmml-model", "serialVersionUID");
public static final QName SUBPACKAGE_ELEMENT_NAME = new QName("http://jpmml.org/jpmml-model", "subpackage");
} |
// package
package org.mskcc.cbio.importer.converter.internal;
// imports
import org.mskcc.cbio.importer.Config;
import org.mskcc.cbio.importer.CaseIDs;
import org.mskcc.cbio.importer.IDMapper;
import org.mskcc.cbio.importer.Converter;
import org.mskcc.cbio.importer.FileUtils;
import org.mskcc.cbio.importer.util.MapperUtil;
import org.mskcc.cbio.importer.model.PortalMetadata;
import org.mskcc.cbio.importer.model.DatatypeMetadata;
import org.mskcc.cbio.importer.model.DataMatrix;
import org.mskcc.cbio.importer.model.CancerStudyMetadata;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.List;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.Set;
/**
* Class which implements the Converter interface for processing rna-seq (v2) RSEM files.
*/
public class RNASEQV2MRNAMedianConverterImpl implements Converter {
protected static final String HYBRIDIZATION_REF_COLUMN_HEADER_NAME = "Hybridization REF";
// our logger
private static final Log LOG = LogFactory.getLog(RNASEQV2MRNAMedianConverterImpl.class);
// ref to configuration
protected Config config;
// ref to file utils
protected FileUtils fileUtils;
// ref to caseids
protected CaseIDs caseIDs;
// ref to IDMapper
protected IDMapper idMapper;
/**
* Constructor.
*
* @param config Config
* @param fileUtils FileUtils
* @param caseIDs CaseIDs;
* @param idMapper IDMapper
*/
public RNASEQV2MRNAMedianConverterImpl(Config config, FileUtils fileUtils,
CaseIDs caseIDs, IDMapper idMapper) {
// set members
this.config = config;
this.fileUtils = fileUtils;
this.caseIDs = caseIDs;
this.idMapper = idMapper;
}
/**
* Converts data for the given portal.
*
* @param portal String
* @param runDate String
* @param applyOverrides Boolean
* @throws Exception
*/
@Override
public void convertData(String portal, String runDate, Boolean applyOverrides) throws Exception {
throw new UnsupportedOperationException();
}
/**
* Generates case lists for the given portal.
*
* @param portal String
* @throws Exception
*/
@Override
public void generateCaseLists(String portal) throws Exception {
throw new UnsupportedOperationException();
}
/**
* Applies overrides to the given portal using the given data source.
* Any datatypes within the excludes datatypes set will not have be overridden.
*
* @param portal String
* @param excludeDatatypes Set<String>
* @param applyCaseLists boolean
* @throws Exception
*/
@Override
public void applyOverrides(String portal, Set<String> excludeDatatypes, boolean applyCaseLists) throws Exception {
throw new UnsupportedOperationException();
}
/**
* Creates a staging file from the given import data.
*
* @param portalMetadata PortalMetadata
* @param cancerStudyMetadata CancerStudyMetadata
* @param datatypeMetadata DatatypeMetadata
* @param dataMatrices DataMatrix[]
* @throws Exception
*/
@Override
public void createStagingFile(PortalMetadata portalMetadata, CancerStudyMetadata cancerStudyMetadata,
DatatypeMetadata datatypeMetadata, DataMatrix[] dataMatrices) throws Exception {
// sanity check
if (dataMatrices.length != 1) {
if (LOG.isErrorEnabled()) {
LOG.error("createStagingFile(), dataMatrices.length != 1, aborting...");
}
return;
}
DataMatrix dataMatrix = dataMatrices[0];
// row one (zero offset) in file is another header:
// (gene, normalized count, normalized count, ...)
dataMatrix.ignoreRow(0, true); // row data starts at 0
// rna seq data files has combination gene_symbol|id
// replace the combination with gene_symbol only
if (LOG.isInfoEnabled()) {
LOG.info("createStagingFile(), cleaning up Hybridization REF column...");
}
String geneColumnName = HYBRIDIZATION_REF_COLUMN_HEADER_NAME;
List<String> pairs;
List<LinkedList<String>> columnData = dataMatrix.getColumnData(HYBRIDIZATION_REF_COLUMN_HEADER_NAME);
if (!columnData.isEmpty()) {
pairs = columnData.get(0);
} else {
pairs = dataMatrix.getColumnData(0); // non standard gene column name
geneColumnName = dataMatrix.getColumnHeaders().get(0);
}
for (int lc = 0; lc < pairs.size(); lc++) {
String[] parts = pairs.get(lc).trim().split("\\|");
if (parts.length == 2) {
String toPart = parts[1];
if (!toPart.matches("[0-9]+")) {
if (toPart.matches("[0-9]+_calculated")) {
toPart = toPart.substring(0,toPart.indexOf("_"));
} else {
toPart = parts[0];
}
}
if (LOG.isInfoEnabled()) {
LOG.info("setting element: " + Arrays.asList(parts) + ", to: " + toPart);
}
pairs.set(lc, toPart);
}
}
// add gene symbol column, rename gene id col
if (LOG.isInfoEnabled()) {
LOG.info("createStagingFile(), adding & renaming columns");
}
dataMatrix.addColumn(Converter.GENE_SYMBOL_COLUMN_HEADER_NAME, new ArrayList<String>());
dataMatrix.renameColumn(geneColumnName, Converter.GENE_ID_COLUMN_HEADER_NAME);
dataMatrix.setGeneIDColumnHeading(Converter.GENE_ID_COLUMN_HEADER_NAME);
// perform gene mapping, remove records as needed
if (LOG.isInfoEnabled()) {
LOG.info("createStagingFile(), calling MapperUtil.mapDataToGeneID()...");
}
MapperUtil.mapGeneIDToSymbol(dataMatrix, idMapper,
Converter.GENE_ID_COLUMN_HEADER_NAME, Converter.GENE_SYMBOL_COLUMN_HEADER_NAME);
// convert case ids
if (LOG.isInfoEnabled()) {
LOG.info("createStagingFile(), filtering & converting case ids");
}
String[] columnsToIgnore = { Converter.GENE_ID_COLUMN_HEADER_NAME, Converter.GENE_SYMBOL_COLUMN_HEADER_NAME };
dataMatrix.convertCaseIDs(Arrays.asList(columnsToIgnore));
// ensure the first two columns are symbol, id respectively
if (LOG.isInfoEnabled()) {
LOG.info("createStagingFile(), sorting column headers");
}
List<String> headers = dataMatrix.getColumnHeaders();
headers.remove(Converter.GENE_SYMBOL_COLUMN_HEADER_NAME);
headers.add(0, Converter.GENE_SYMBOL_COLUMN_HEADER_NAME);
headers.remove(Converter.GENE_ID_COLUMN_HEADER_NAME);
headers.add(1, Converter.GENE_ID_COLUMN_HEADER_NAME);
dataMatrix.setColumnOrder(headers);
// we need to write out the file
if (LOG.isInfoEnabled()) {
LOG.info("createStagingFile(), writing staging file.");
}
fileUtils.writeStagingFile(portalMetadata.getStagingDirectory(), cancerStudyMetadata, datatypeMetadata, dataMatrix);
if (LOG.isInfoEnabled()) {
LOG.info("createStagingFile(), complete.");
}
}
} |
package de.lmu.ifi.dbs.algorithm.result;
import de.lmu.ifi.dbs.normalization.Normalization;
import de.lmu.ifi.dbs.utilities.UnableToComplyException;
import java.io.File;
import java.util.Map;
import java.util.Iterator;
/**
* A result for a partitioning algorithm providing a single result for a single
* partition.
*
* @author Arthur Zimek (<a
* href="mailto:zimek@dbs.ifi.lmu.de">zimek@dbs.ifi.lmu.de</a>)
*/
public class PartitionResults implements Result {
public static final String PARTITION_MARKER = "PartitionID";
/**
* Holds the results for the partitions.
*/
private Map<Integer, Result> partitionResults;
/**
* A result for a partitioning algorithm providing a single result for a
* single partition.
*
* @param resultMap a map of partition IDs to results
*/
public PartitionResults(Map<Integer, Result> resultMap) {
this.partitionResults = resultMap;
}
/**
* @see Result#output(File, Normalization)
*/
public void output(File out, Normalization normalization) throws UnableToComplyException {
for (Integer resultID : partitionResults.keySet()) {
Result result = partitionResults.get(resultID);
String marker = File.separator + PARTITION_MARKER + resultID;
if (out == null) {
System.out.println(marker);
result.output(out, normalization);
}
else {
File markedOut = new File(out.getAbsolutePath() + marker);
markedOut.getParentFile().mkdirs();
result.output(markedOut, normalization);
}
}
}
/**
* Returns an iterator over the partition IDs.
* @return an iterator over the partition IDs
*/
public Iterator<Integer> partitionIterator() {
return partitionResults.keySet().iterator();
}
/**
* Returns the result of the specified partition.
* @param partitionID the ID of the partition
* @return the result of the specified partition
*/
public Result getResult(Integer partitionID) {
return partitionResults.get(partitionID);
}
} |
package de.mrunde.bachelorthesis.activities;
import java.io.IOException;
import java.util.List;
import java.util.Locale;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.DialogInterface.OnCancelListener;
import android.content.Intent;
import android.location.Address;
import android.location.Geocoder;
import android.location.LocationManager;
import android.os.AsyncTask;
import android.os.Bundle;
import android.provider.Settings;
import android.speech.tts.TextToSpeech;
import android.speech.tts.TextToSpeech.OnInitListener;
import android.util.Log;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import com.mapquest.android.maps.BoundingBox;
import com.mapquest.android.maps.DefaultItemizedOverlay;
import com.mapquest.android.maps.GeoPoint;
import com.mapquest.android.maps.MapActivity;
import com.mapquest.android.maps.MapView;
import com.mapquest.android.maps.MyLocationOverlay;
import com.mapquest.android.maps.OverlayItem;
import com.mapquest.android.maps.RouteManager;
import com.mapquest.android.maps.RouteResponse;
import de.mrunde.bachelorthesis.R;
/**
* This is the initial activity which is started with the application. It offers
* the user to change the route type and to search for his desired destination.
*
* @author Marius Runde
*/
public class MainActivity extends MapActivity implements OnInitListener {
/**
* Maximum amount of results for the destination
*/
private final int MAX_RESULTS = 5;
/**
* Fastest route type
*/
private final String ROUTETYPE_FASTEST = "fastest";
/**
* Shortest route type
*/
private final String ROUTETYPE_SHORTEST = "shortest";
/**
* Pedestrian route type
*/
private final String ROUTETYPE_PEDESTRIAN = "pedestrian";
/**
* Bicycle route type
*/
private final String ROUTETYPE_BICYCLE = "bicycle";
/**
* Current route type
*/
private String routeType;
/**
* The entered destination
*/
private EditText edt_destination;
/**
* The "search for destination" button
*/
private Button btn_search;
/**
* The "calculate route" button<br/>
* This button also starts the navigation after route calculation.
*/
private Button btn_calculate;
/**
* The "preferences" button to change the route type
*/
private Button btn_preferences;
/**
* The "help" button
*/
private Button btn_help;
/**
* The initial map view
*/
protected MapView map;
/**
* An overlay to display the user's location
*/
private MyLocationOverlay myLocationOverlay;
/**
* Route manager for route calculation
*/
private RouteManager rm;
/**
* The current location as a String
*/
private String str_currentLocation;
/**
* The destination as a String
*/
private String str_destination;
/**
* Coordinates of the destination to be sent to the NaviActivity
*/
private double[] destination_coords = null;
/**
* TextToSpeech for audio output
*/
private TextToSpeech tts;
/**
* The Hitchhiker's Guide to the Galaxy :D
*/
private boolean time = false;
/**
* This method is called when the application has been started
*/
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// Initialize the TextToSpeech
tts = new TextToSpeech(this, this);
// Set the route type to fastest
this.routeType = ROUTETYPE_FASTEST;
// Setup the whole GUI and map
setupGUI();
setupMapView();
setupMyLocation();
}
/**
* Set up the GUI
*/
private void setupGUI() {
this.edt_destination = (EditText) findViewById(R.id.edt_destination);
this.btn_search = (Button) findViewById(R.id.btn_search);
btn_search.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// Get the entered destination
str_destination = edt_destination.getText().toString();
if (str_destination.length() == 0) {
Toast.makeText(MainActivity.this,
R.string.noDestinationEntered, Toast.LENGTH_SHORT)
.show();
} else {
// Search for the destination
SearchDestinationTask destinationTask = new SearchDestinationTask();
destinationTask.execute(str_destination);
}
}
});
this.btn_calculate = (Button) findViewById(R.id.btn_calculate);
btn_calculate.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (destination_coords == null) {
Toast.makeText(MainActivity.this,
R.string.noDestinationEntered, Toast.LENGTH_SHORT)
.show();
} else if (btn_calculate.getText() == getResources().getString(
R.string.calculate)) {
// Transform the current location into a String
str_currentLocation = "{latLng:{lat:"
+ myLocationOverlay.getMyLocation().getLatitude()
+ ",lng:"
+ myLocationOverlay.getMyLocation().getLongitude()
+ "}}";
// Transform the destination location into a String
str_destination = "{latLng:{lat:" + destination_coords[0]
+ ",lng:" + destination_coords[1] + "}}";
// Calculate the route
calculateRoute();
} else {
// Create an Intent to start the NaviActivity and hereby the
// navigation
Intent intent = new Intent(MainActivity.this,
NaviActivity.class);
intent.putExtra("str_currentLocation", str_currentLocation);
intent.putExtra("str_destination", str_destination);
intent.putExtra("destination_lat", destination_coords[0]);
intent.putExtra("destination_lng", destination_coords[1]);
intent.putExtra("routeOptions", getRouteOptions());
startActivity(intent);
}
}
});
this.btn_preferences = (Button) findViewById(R.id.btn_preferences);
btn_preferences.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// Display the route type dialog
displayRouteTypeDialog();
}
});
this.btn_help = (Button) findViewById(R.id.btn_help);
btn_help.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// Create an Intent to start the HelpActivity
Intent intent = new Intent(MainActivity.this,
HelpActivity.class);
startActivity(intent);
}
});
}
/**
* This is a class to search for the destination asynchronously.
*
* @author Marius Runde
*/
private class SearchDestinationTask extends
AsyncTask<String, Void, GeoPoint> {
/**
* Progress dialog to inform the user about the searching process
*/
private ProgressDialog progressDialog = new ProgressDialog(
MainActivity.this);
@Override
protected void onPreExecute() {
// Display progress dialog
progressDialog.setMessage("Searching for destination...");
progressDialog.show();
progressDialog.setOnCancelListener(new OnCancelListener() {
@Override
public void onCancel(DialogInterface dialog) {
// Enable canceling the search
SearchDestinationTask.this.cancel(true);
}
});
}
@Override
protected GeoPoint doInBackground(String... destination) {
String str_destination = destination[0];
List<Address> addresses;
try {
// Create a geocoder to locate the destination
Geocoder geocoder = new Geocoder(MainActivity.this,
Locale.getDefault());
addresses = geocoder.getFromLocationName(str_destination,
MAX_RESULTS);
} catch (IOException e1) {
// Destination could not be located but try again once
// because sometimes it works at the second try
Log.d("MainActivity",
"First try to locate destination failed. Starting second try...");
try {
// Create a geocoder to locate the destination
Geocoder geocoder = new Geocoder(MainActivity.this,
Locale.getDefault());
addresses = geocoder.getFromLocationName(str_destination,
MAX_RESULTS);
} catch (IOException e2) {
// Seems like the destination could really not be
// found, so send the user a message about the error
Log.e("MainActivity",
"IO Exception in searching for destination. This is the error message: "
+ e2.getMessage());
Toast.makeText(MainActivity.this,
R.string.noDestinationFound, Toast.LENGTH_SHORT)
.show();
return null;
}
}
if (addresses.isEmpty()) {
// Destination could not be located
Toast.makeText(MainActivity.this, R.string.noDestinationFound,
Toast.LENGTH_SHORT).show();
return null;
} else {
// Destination could be located
Log.d("MainActivity", "Located destination sucessfully.");
GeoPoint result = new GeoPoint(addresses.get(0).getLatitude(),
addresses.get(0).getLongitude());
return result;
}
}
@Override
protected void onPostExecute(GeoPoint result) {
// Dismiss progress dialog
progressDialog.dismiss();
// Check if the search was successful
if (result != null) {
// Create the destination overlay
addDestinationOverlay(result);
// If the route has been calculated before change the text
// of the button so the route has to be calculated again and
// clear the route from the RouteManager
if (btn_calculate.getText() == getResources().getString(
R.string.start)) {
btn_calculate.setText(R.string.calculate);
rm.clearRoute();
}
}
}
}
/**
* Add the destination overlay to the map
*
* @param destination
* The destination
*/
private void addDestinationOverlay(GeoPoint destination) {
// Create a GeoPoint object of the current location and the destination
GeoPoint currentLocation = new GeoPoint(myLocationOverlay
.getMyLocation().getLatitude(), myLocationOverlay
.getMyLocation().getLongitude());
// Also set the coordinates of the destination for the NaviActivity
this.destination_coords = new double[] { destination.getLatitude(),
destination.getLongitude() };
// Clear previous overlays first
if (map.getOverlays().size() > 1) {
map.getOverlays().remove(1);
}
// Create the destination overlay
OverlayItem oi_destination = new OverlayItem(destination,
"Destination", str_destination);
final DefaultItemizedOverlay destinationOverlay = new DefaultItemizedOverlay(
getResources().getDrawable(R.drawable.destination_flag));
destinationOverlay.addItem(oi_destination);
// Add the overlay to the map
map.getOverlays().add(destinationOverlay);
// Zoom and pan the map to show all overlays
map.getController().zoomToSpan(
new BoundingBox(currentLocation, destination));
}
/**
* Calculate the route from the current location to the destination
*/
private void calculateRoute() {
// Clear the previous route first
if (rm != null) {
rm.clearRoute();
}
// Initialize a new RouteManager to calculate the route
rm = new RouteManager(getBaseContext(), getResources().getString(
R.string.apiKey));
rm.setMapView(map);
// Zoom and center the map to display the route
rm.setBestFitRoute(true);
// Set the route options (e.g. route type)
rm.setOptions(getRouteOptions());
// Set debug true to receive the URL
rm.setDebug(true);
// Set route callback
rm.setRouteCallback(new RouteManager.RouteCallback() {
@Override
public void onSuccess(RouteResponse response) {
// Route has been calculated successfully
Log.i("MainActivity",
getResources().getString(R.string.routeCalculated));
// Change the text of the button to enable navigation
btn_calculate.setText(R.string.start);
}
@Override
public void onError(RouteResponse response) {
// Find the reason why the route could not be calculated. The
// status codes can be found here:
// pedestrian error seems to be 500 though and not 6xx
if (response.info.statusCode == 500) {
// Route could not be calculated because the length of
// routes with the pedestrian route type are restricted to a
// specific distance
Log.e("MainActivity",
getResources().getString(
R.string.routeNotCalculated_500));
Toast.makeText(
MainActivity.this,
getResources().getString(
R.string.routeNotCalculated_500),
Toast.LENGTH_LONG).show();
} else {
// Route could not be calculated because of another error
Log.e("MainActivity",
getResources().getString(
R.string.routeNotCalculated)
+ "\nStatus Code: "
+ response.info.statusCode);
Toast.makeText(
MainActivity.this,
getResources().getString(
R.string.routeNotCalculated),
Toast.LENGTH_SHORT).show();
}
}
});
// Calculate the route and display it on the map
rm.createRoute(str_currentLocation, str_destination);
}
/**
* Setup the route options and return them
*
* @return Route options as String
*/
private String getRouteOptions() {
JSONObject options = new JSONObject();
try {
// Set the units to kilometers
String unit = "m";
options.put("unit", unit);
// Set the route type
options.put("routeType", routeType);
// Set the output shape format
String outShapeFormat = "raw";
options.put("outShapeFormat", outShapeFormat);
} catch (JSONException e) {
e.printStackTrace();
}
return options.toString();
}
/**
* Set up the map and enable default zoom controls
*/
private void setupMapView() {
this.map = (MapView) findViewById(R.id.map);
map.setBuiltInZoomControls(true);
}
/**
* Set up a MyLocationOverlay and execute the runnable once a location has
* been fixed
*/
private void setupMyLocation() {
// Check if the GPS is enabled
if (!((LocationManager) getSystemService(LOCATION_SERVICE))
.isProviderEnabled(LocationManager.GPS_PROVIDER)) {
// Open dialog to inform the user that the GPS is disabled
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(getResources().getString(R.string.gpsDisabled));
builder.setCancelable(false);
builder.setPositiveButton(R.string.openSettings,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// Open the location settings if it is disabled
Intent intent = new Intent(
Settings.ACTION_LOCATION_SOURCE_SETTINGS);
startActivity(intent);
}
});
builder.setNegativeButton(R.string.cancel,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// Dismiss the dialog
dialog.cancel();
}
});
// Display the dialog
AlertDialog dialog = builder.create();
dialog.show();
}
// Create the MyLocationOverlay
this.myLocationOverlay = new MyLocationOverlay(this, map);
myLocationOverlay.enableMyLocation();
myLocationOverlay.setMarker(
getResources().getDrawable(R.drawable.my_location), 0);
myLocationOverlay.runOnFirstFix(new Runnable() {
@Override
public void run() {
GeoPoint currentLocation = myLocationOverlay.getMyLocation();
map.getController().animateTo(currentLocation);
map.getController().setZoom(14);
map.getOverlays().add(myLocationOverlay);
myLocationOverlay.setFollowing(false);
}
});
}
@Override
protected boolean isRouteDisplayed() {
// Do nothing
return false;
}
/**
* Called when the OptionsMenu is created
*/
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.main, menu);
return true;
}
/**
* Called when an item of the OptionsMenu is clicked
*/
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Intent intent;
// Handle item selection
switch (item.getItemId()) {
case R.id.menu_about:
// Create an Intent to start the AboutActivity
intent = new Intent(MainActivity.this, AboutActivity.class);
startActivity(intent);
return true;
case R.id.menu_help:
// Create an Intent to start the HelpActivity
intent = new Intent(MainActivity.this, HelpActivity.class);
startActivity(intent);
return true;
case R.id.menu_routeTypes:
displayRouteTypeDialog();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/**
* Display the route type dialog so that the user can change it
*/
private void displayRouteTypeDialog() {
// Initialize an AlertDialog.Builder and an AlertDialog
AlertDialog.Builder builder = new AlertDialog.Builder(this);
AlertDialog dialog;
// If the route has been calculated before change the text
// of the button so the route has to be calculated again and
// clear the route from the RouteManager
if (btn_calculate.getText() == getResources().getString(R.string.start)) {
btn_calculate.setText(R.string.calculate);
rm.clearRoute();
}
// Change the route type in the settings
builder.setTitle(R.string.routeType);
builder.setItems(R.array.routeTypes,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
switch (which) {
case 0:
// Fastest selected
routeType = ROUTETYPE_FASTEST;
if (time) {
Toast.makeText(
MainActivity.this,
"Time is an illusion. Lunchtime doubly so.",
Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(MainActivity.this,
"Fastest route type selected",
Toast.LENGTH_SHORT).show();
time = true;
}
break;
case 1:
// Shortest selected
routeType = ROUTETYPE_SHORTEST;
Toast.makeText(MainActivity.this,
"Shortest route type selected",
Toast.LENGTH_SHORT).show();
break;
case 2:
// Pedestrian selected
routeType = ROUTETYPE_PEDESTRIAN;
Toast.makeText(MainActivity.this,
"Pedestrian route type selected",
Toast.LENGTH_SHORT).show();
break;
case 3:
// Bicycle selected
routeType = ROUTETYPE_BICYCLE;
Toast.makeText(MainActivity.this,
"Bicycle route type selected",
Toast.LENGTH_SHORT).show();
break;
default:
break;
}
}
});
dialog = builder.create();
dialog.show();
}
/**
* Enable features of the MyLocationOverlay
*/
@Override
protected void onResume() {
myLocationOverlay.enableMyLocation();
super.onResume();
}
/**
* Disable features of the MyLocationOverlay when in the background
*/
@Override
protected void onPause() {
super.onPause();
myLocationOverlay.disableMyLocation();
}
/**
* Shut down the TextToSpeech engine when the application is terminated
*/
@Override
protected void onDestroy() {
if (tts != null) {
tts.stop();
tts.shutdown();
}
super.onDestroy();
}
@Override
public void onInit(int status) {
if (status == TextToSpeech.SUCCESS) {
tts.setLanguage(Locale.ENGLISH);
} else {
tts = null;
Log.e("MainActivity", "Failed to initialize the TextToSpeech");
}
}
} |
package org.intermine.webservice.server.idresolution;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.log4j.Logger;
import org.intermine.api.InterMineAPI;
import org.intermine.api.bag.BagQueryResult;
import org.intermine.api.bag.BagQueryResult.IssueResult;
import org.intermine.api.bag.ConvertedObjectPair;
import org.intermine.api.util.PathUtil;
import org.intermine.metadata.ClassDescriptor;
import org.intermine.metadata.Model;
import org.intermine.model.InterMineObject;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.pathquery.Path;
import org.intermine.pathquery.PathException;
import org.intermine.util.DynamicUtil;
import org.intermine.web.context.InterMineContext;
import org.intermine.web.logic.config.FieldConfig;
import org.intermine.web.logic.config.FieldConfigHelper;
import org.intermine.web.logic.config.WebConfig;
public class BagResultCategoryKeyFormatter implements BagResultFormatter {
private static final Logger LOG = Logger.getLogger(BagResultCategoryKeyFormatter.class);
private static final String[] ISSUES = new String[] {
BagQueryResult.DUPLICATE, BagQueryResult.WILDCARD, BagQueryResult.OTHER, BagQueryResult.TYPE_CONVERTED
};
private final InterMineAPI im;
public BagResultCategoryKeyFormatter(InterMineAPI api) {
this.im = api;
}
@Override
public Map<String, Object> format(BagQueryResult bqr) {
final Map<String, Object> ret = new HashMap<String, Object>();
ret.put("matches", getMatchInfo(bqr));
ret.put("unresolved", bqr.getUnresolvedIdentifiers());
ret.put("stats", getStats(bqr));
return ret;
}
private Map<String, Object> getMatchInfo(BagQueryResult bqr) {
final Map<String, Object> ret = new HashMap<String, Object>();
ret.put("MATCH", getMatches(bqr));
for (String issue: ISSUES) {
ret.put(issue, getIssues(issue, bqr));
}
return ret;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private Map<String, Object> getStats(BagQueryResult bqr) {
Map<String, Object> stats = new HashMap<String, Object>();
Set<String> goodMatchTerms = new HashSet<String>();
Set<String> issueMatchTerms = new HashSet<String>();
Set<Integer> matchedObjects = bqr.getMatches().keySet();
Set<Integer> allMatchedObjects = bqr.getMatchAndIssueIds();
// Do any processing that needs doing here.
for (List inputTerms: bqr.getMatches().values()) {
goodMatchTerms.addAll((Collection<? extends String>) inputTerms);
}
for (String issue: ISSUES) {
for (IssueResult ir: bqr.getIssueResults(issue)) {
issueMatchTerms.add(ir.inputIdent);
}
}
// Add calculated values to the result.
stats.put("notFound", bqr.getUnresolvedIdentifiers().size());
stats.put("goodMatches", goodMatchTerms.size());
stats.put("issueMatchTerms", issueMatchTerms.size());
goodMatchTerms.addAll(issueMatchTerms); // Mutation - beware!!
stats.put("allMatchTerms", goodMatchTerms.size());
stats.put("matchedObjects", matchedObjects.size());
stats.put("allMatchedObjects", allMatchedObjects.size());
allMatchedObjects.removeAll(matchedObjects); // Mutation - beware!!
stats.put("issueObjects", allMatchedObjects.size());
return stats;
}
private List<Map<String, Object>> getIssues(String issueType, BagQueryResult bqr) {
final List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
for (IssueResult issue: bqr.getIssueResults(issueType)) {
final Map<String, Object> obj = new HashMap<String, Object>();
final List<Map<String, Object>> matches = new ArrayList<Map<String, Object>>();
obj.put("input", issue.inputIdent);
obj.put("reason", issue.queryDesc);
obj.put("matches", matches);
for (Object match: issue.results) {
matches.add(processIssueMatch(match));
}
result.add(obj);
}
return result;
}
/*
* Dispatch by type to the actual processors.
* Could be more elegant with a map from type -> Processor, but that would be utter overkill.
*/
private Map<String, Object> processIssueMatch(Object match) {
final Map<String, Object> matchObj;
if (match == null) {
throw new IllegalStateException("null match returned.");
} else if (match instanceof Integer) {
matchObj = processMatch((Integer) match);
} else if (match instanceof InterMineObject) {
matchObj = processMatch((InterMineObject) match);
} else if (match instanceof ConvertedObjectPair) {
matchObj = processMatch((ConvertedObjectPair) match);
} else {
throw new IllegalStateException("Cannot process " + match);
}
return matchObj;
}
private Map<String, Object> processMatch(Integer id) {
Map<String, Object> matchObj = new HashMap<String, Object>();
matchObj.put("id", id);
matchObj.put("summary", getObjectDetails(id));
return matchObj;
}
private Map<String, Object> processMatch(InterMineObject imo) {
Map<String, Object> matchObj = new HashMap<String, Object>();
matchObj.put("id", imo.getId());
matchObj.put("summary", getObjectDetails(imo));
return matchObj;
}
private Map<String, Object> processMatch(ConvertedObjectPair pair) {
Map<String, Object> matchObj = processMatch(pair.getNewObject());
Map<String, Object> from = processMatch(pair.getOldObject());
matchObj.put("from", from);
return matchObj;
}
private List<Map<String, Object>> getMatches(BagQueryResult bqr) {
final List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
for (Entry<Integer, List> match: bqr.getMatches().entrySet()) {
Map<String, Object> obj = new HashMap<String, Object>();
obj.put("id", match.getKey());
obj.put("input", match.getValue());
obj.put("summary", getObjectDetails(match.getKey()));
result.add(obj);
}
return result;
}
private Map<String, Object> getObjectDetails(Integer objId) {
InterMineObject imo;
if (objId == null) throw new IllegalArgumentException("obj cannot be null");
try {
imo = im.getObjectStore().getObjectById(objId);
} catch (ObjectStoreException e) {
throw new IllegalStateException("Could not retrieve object reported as match", e);
}
return getObjectDetails(imo);
}
private Map<String, Object> getObjectDetails(InterMineObject imo) {
WebConfig webConfig = InterMineContext.getWebConfig();
Model m = im.getModel();
Map<String, Object> objectDetails = new HashMap<String, Object>();
String className = DynamicUtil.getSimpleClassName(imo.getClass());
ClassDescriptor cd = m.getClassDescriptorByName(className);
objectDetails.put("class", cd.getUnqualifiedName());
for (FieldConfig fc : FieldConfigHelper.getClassFieldConfigs(webConfig, cd)) {
try {
Path p = new Path(m, cd.getUnqualifiedName() + "." + fc.getFieldExpr());
if (p.endIsAttribute() && fc.getShowInSummary()) {
objectDetails.put(
p.getNoConstraintsString().replaceAll("^[^.]*\\.", ""),
PathUtil.resolvePath(p, imo));
}
} catch (PathException e) {
LOG.error("Configuration error", e);
}
}
return objectDetails;
}
} |
package soot.jimple.toolkits.typing.fast;
import java.util.Iterator;
import soot.ArrayType;
import soot.BooleanType;
import soot.IntType;
import soot.IntegerType;
import soot.Local;
import soot.RefType;
import soot.SootMethodRef;
import soot.Type;
import soot.Unit;
import soot.Value;
import soot.jimple.AbstractStmtSwitch;
import soot.jimple.AddExpr;
import soot.jimple.AndExpr;
import soot.jimple.ArrayRef;
import soot.jimple.AssignStmt;
import soot.jimple.BinopExpr;
import soot.jimple.BreakpointStmt;
import soot.jimple.CastExpr;
import soot.jimple.CmpExpr;
import soot.jimple.CmpgExpr;
import soot.jimple.CmplExpr;
import soot.jimple.Constant;
import soot.jimple.DivExpr;
import soot.jimple.EnterMonitorStmt;
import soot.jimple.EqExpr;
import soot.jimple.ExitMonitorStmt;
import soot.jimple.FieldRef;
import soot.jimple.GeExpr;
import soot.jimple.GotoStmt;
import soot.jimple.GtExpr;
import soot.jimple.IdentityStmt;
import soot.jimple.IfStmt;
import soot.jimple.InstanceFieldRef;
import soot.jimple.InstanceInvokeExpr;
import soot.jimple.InstanceOfExpr;
import soot.jimple.InvokeExpr;
import soot.jimple.InvokeStmt;
import soot.jimple.JimpleBody;
import soot.jimple.LeExpr;
import soot.jimple.LengthExpr;
import soot.jimple.LookupSwitchStmt;
import soot.jimple.LtExpr;
import soot.jimple.MulExpr;
import soot.jimple.NeExpr;
import soot.jimple.NegExpr;
import soot.jimple.NewArrayExpr;
import soot.jimple.NewMultiArrayExpr;
import soot.jimple.NopStmt;
import soot.jimple.NullConstant;
import soot.jimple.OrExpr;
import soot.jimple.RemExpr;
import soot.jimple.ReturnStmt;
import soot.jimple.ReturnVoidStmt;
import soot.jimple.ShlExpr;
import soot.jimple.ShrExpr;
import soot.jimple.Stmt;
import soot.jimple.SubExpr;
import soot.jimple.TableSwitchStmt;
import soot.jimple.ThrowStmt;
import soot.jimple.UshrExpr;
import soot.jimple.XorExpr;
/**
* This checks all uses against the rules in Jimple, except some uses are not
* checked where the bytecode verifier guarantees use validity.
* @author Ben Bellamy
*/
public class UseChecker extends AbstractStmtSwitch
{
private JimpleBody jb;
private Typing tg;
private IUseVisitor uv;
public UseChecker(JimpleBody jb)
{
this.jb = jb;
}
public void check(Typing tg, IUseVisitor uv)
{
try {
this.tg = tg;
this.uv = uv;
if (this.tg == null) throw new Exception("null typing passed to useChecker");
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
for ( Iterator<Unit> i = this.jb.getUnits().snapshotIterator();
i.hasNext(); )
{
if ( uv.finish() )
return;
((Stmt)i.next()).apply(this);
}
}
private void handleInvokeExpr(InvokeExpr ie, Stmt stmt)
{
SootMethodRef m = ie.getMethodRef();
if ( ie instanceof InstanceInvokeExpr )
{
InstanceInvokeExpr iie = (InstanceInvokeExpr)ie;
iie.setBase(this.uv.visit(
iie.getBase(),m.declaringClass().getType(), stmt));
}
for ( int i = 0; i < ie.getArgCount(); i++ )
ie.setArg(i, this.uv.visit(
ie.getArg(i), m.parameterType(i), stmt));
}
private void handleBinopExpr(BinopExpr be, Stmt stmt, Type tlhs)
{
Value opl = be.getOp1(), opr = be.getOp2();
Type tl = AugEvalFunction.eval_(this.tg, opl, stmt, this.jb),
tr = AugEvalFunction.eval_(this.tg, opr, stmt, this.jb);
if ( be instanceof AddExpr
|| be instanceof SubExpr
|| be instanceof MulExpr
|| be instanceof DivExpr
|| be instanceof RemExpr
|| be instanceof GeExpr
|| be instanceof GtExpr
|| be instanceof LeExpr
|| be instanceof LtExpr
|| be instanceof ShlExpr
|| be instanceof ShrExpr
|| be instanceof UshrExpr )
{
if ( tlhs instanceof IntegerType )
{
be.setOp1(this.uv.visit(opl, IntType.v(), stmt));
be.setOp2(this.uv.visit(opr, IntType.v(), stmt));
}
}
else if ( be instanceof CmpExpr
|| be instanceof CmpgExpr
|| be instanceof CmplExpr )
{
// No checks in the original assigner
}
else if ( be instanceof AndExpr
|| be instanceof OrExpr
|| be instanceof XorExpr )
{
be.setOp1(this.uv.visit(opl, tlhs, stmt));
be.setOp2(this.uv.visit(opr, tlhs, stmt));
}
else if ( be instanceof EqExpr
|| be instanceof NeExpr )
{
if ( tl instanceof BooleanType && tr instanceof BooleanType )
{ }
else if ( tl instanceof Integer1Type || tr instanceof Integer1Type )
{ }
else if ( tl instanceof IntegerType )
{
be.setOp1(this.uv.visit(opl, IntType.v(), stmt));
be.setOp2(this.uv.visit(opr, IntType.v(), stmt));
}
}
}
private void handleArrayRef(ArrayRef ar, Stmt stmt)
{
ar.setIndex(this.uv.visit(ar.getIndex(), IntType.v(), stmt));
}
private void handleInstanceFieldRef(InstanceFieldRef ifr, Stmt stmt)
{
ifr.setBase(this.uv.visit(ifr.getBase(),
ifr.getFieldRef().declaringClass().getType(), stmt));
}
public void caseBreakpointStmt(BreakpointStmt stmt) { }
public void caseInvokeStmt(InvokeStmt stmt)
{
this.handleInvokeExpr(stmt.getInvokeExpr(), stmt);
}
public void caseAssignStmt(AssignStmt stmt)
{
Value lhs = stmt.getLeftOp();
Value rhs = stmt.getRightOp();
Type tlhs = null;
if ( lhs instanceof Local )
tlhs = this.tg.get((Local)lhs);
else if ( lhs instanceof ArrayRef )
{
ArrayRef aref = (ArrayRef) lhs;
Local base = (Local) aref.getBase();
// Try to force Type integrity. The left side must agree on the
// element type of the right side array reference.
ArrayType at;
if (this.tg.get(base) instanceof ArrayType)
at = (ArrayType)this.tg.get(base);
else
at = this.tg.get(base).makeArrayType();
tlhs = at.getElementType();
this.handleArrayRef(aref, stmt);
aref.setBase((Local) this.uv.visit(aref.getBase(), at, stmt));
stmt.setRightOp(this.uv.visit(rhs, tlhs, stmt));
}
else if ( lhs instanceof FieldRef )
{
tlhs = ((FieldRef)lhs).getFieldRef().type();
if ( lhs instanceof InstanceFieldRef )
this.handleInstanceFieldRef((InstanceFieldRef)lhs, stmt);
}
// They may have been changed above
lhs = stmt.getLeftOp();
rhs = stmt.getRightOp();
if ( rhs instanceof Local )
stmt.setRightOp(this.uv.visit(rhs, tlhs, stmt));
else if ( rhs instanceof ArrayRef )
{
ArrayRef aref = (ArrayRef) rhs;
Local base = (Local) aref.getBase();
//try to force Type integrity
ArrayType at;
if (this.tg.get(base) instanceof ArrayType)
at = (ArrayType)this.tg.get(base);
else
at = this.tg.get(base).makeArrayType();
this.handleArrayRef(aref, stmt);
aref.setBase((Local) this.uv.visit(aref.getBase(), at, stmt));
// Cast the right side to the type of the left side to match the
// assignment.
stmt.setRightOp(this.uv.visit(rhs, tlhs, stmt));
}
else if ( rhs instanceof InstanceFieldRef )
{
this.handleInstanceFieldRef((InstanceFieldRef)rhs, stmt);
stmt.setRightOp(this.uv.visit(rhs, tlhs, stmt));
}
else if ( rhs instanceof BinopExpr )
this.handleBinopExpr((BinopExpr)rhs, stmt, tlhs);
else if ( rhs instanceof InvokeExpr )
{
this.handleInvokeExpr((InvokeExpr)rhs, stmt);
stmt.setRightOp(this.uv.visit(rhs, tlhs, stmt));
}
else if ( rhs instanceof CastExpr )
stmt.setRightOp(this.uv.visit(rhs, tlhs, stmt));
else if ( rhs instanceof InstanceOfExpr )
{
InstanceOfExpr ioe = (InstanceOfExpr)rhs;
ioe.setOp(this.uv.visit(
ioe.getOp(), RefType.v("java.lang.Object"), stmt));
stmt.setRightOp(this.uv.visit(rhs, tlhs, stmt));
}
else if ( rhs instanceof NewArrayExpr )
{
NewArrayExpr nae = (NewArrayExpr)rhs;
nae.setSize(this.uv.visit(nae.getSize(), IntType.v(), stmt));
stmt.setRightOp(this.uv.visit(rhs, tlhs, stmt));
}
else if ( rhs instanceof NewMultiArrayExpr )
{
NewMultiArrayExpr nmae = (NewMultiArrayExpr)rhs;
for ( int i = 0; i < nmae.getSizeCount(); i++ )
nmae.setSize(i, this.uv.visit(
nmae.getSize(i), IntType.v(), stmt));
stmt.setRightOp(this.uv.visit(rhs, tlhs, stmt));
}
else if ( rhs instanceof LengthExpr )
{
stmt.setRightOp(this.uv.visit(rhs, tlhs, stmt));
}
else if ( rhs instanceof NegExpr )
{
((NegExpr)rhs).setOp(this.uv.visit(
((NegExpr)rhs).getOp(), tlhs, stmt));
}
else if ( rhs instanceof Constant )
if (!(rhs instanceof NullConstant))
stmt.setRightOp(this.uv.visit(rhs, tlhs, stmt));
}
public void caseIdentityStmt(IdentityStmt stmt) { }
public void caseEnterMonitorStmt(EnterMonitorStmt stmt)
{
stmt.setOp(this.uv.visit(
stmt.getOp(), RefType.v("java.lang.Object"), stmt));
}
public void caseExitMonitorStmt(ExitMonitorStmt stmt)
{
stmt.setOp(this.uv.visit(
stmt.getOp(), RefType.v("java.lang.Object"), stmt));
}
public void caseGotoStmt(GotoStmt stmt) { }
public void caseIfStmt(IfStmt stmt)
{
this.handleBinopExpr((BinopExpr)stmt.getCondition(), stmt,
BooleanType.v());
}
public void caseLookupSwitchStmt(LookupSwitchStmt stmt)
{
stmt.setKey(this.uv.visit(stmt.getKey(), IntType.v(), stmt));
}
public void caseNopStmt(NopStmt stmt) { }
public void caseReturnStmt(ReturnStmt stmt)
{
stmt.setOp(this.uv.visit(
stmt.getOp(), this.jb.getMethod().getReturnType(), stmt));
}
public void caseReturnVoidStmt(ReturnVoidStmt stmt) { }
public void caseTableSwitchStmt(TableSwitchStmt stmt)
{
stmt.setKey(this.uv.visit(stmt.getKey(), IntType.v(), stmt));
}
public void caseThrowStmt(ThrowStmt stmt)
{
stmt.setOp(this.uv.visit(
stmt.getOp(), RefType.v("java.lang.Throwable"), stmt));
}
public void defaultCase(Stmt stmt)
{
throw new RuntimeException(
"Unhandled stgtement type: " + stmt.getClass());
}
} |
package to.etc.domui.component.input;
import java.util.*;
import to.etc.domui.component.meta.*;
import to.etc.domui.dom.html.*;
import to.etc.webapp.nls.*;
public class RadioGroup<T> extends Div implements IControl<T> {
private to.etc.domui.dom.html.RadioGroup<T> m_radioGroup;
private Class<T> m_clz;
private String m_name;
private boolean m_disabled;
private boolean m_mandatory;
private boolean m_readOnly;
private T m_value;
protected Map<RadioButton<T>, T> m_valueMap;
IValueChanged<?> m_valueChanger;
ClassMetaModel m_cmm;
public RadioGroup(Class<T> clz, String name, boolean expandEnum) throws InstantiationException, IllegalAccessException, Exception {
m_radioGroup = new to.etc.domui.dom.html.RadioGroup<T>();
m_clz = clz;
m_name = name;
m_valueMap = new HashMap<RadioButton<T>, T>();
m_valueChanger = null;
m_cmm = MetaManager.findClassMeta(clz);
if (expandEnum)
expandEnum();
}
public RadioGroup(Class<T> clz, String name) throws InstantiationException, IllegalAccessException, Exception {
this(clz,name,false);
}
private RadioButton<T> newDressedRadioButton() {
RadioButton<T> rb = new RadioButton<T>();
rb.setGroup(m_radioGroup);
m_radioGroup.add(rb);
dress(rb);
return rb;
}
public void expandEnum() throws InstantiationException, IllegalAccessException, Exception {
if (m_clz.isEnum()) {
for (T e: m_clz.getEnumConstants()) {
addLabelAndRadio(e);
}
}
}
/**
* Sets the groupglobal properties for a radiobutton in this group
*
* @param rb
*/
private void dress(RadioButton<T> rb) {
rb.setReadOnly(m_readOnly);
rb.setDisabled(m_disabled);
rb.setClicked(new IClicked<NodeBase>() {
@Override
public void clicked(NodeBase clickednode) throws Exception {
setValue(m_valueMap.get(clickednode));
checkSelectedRadio();
}
});
}
@Override
public void add(int index, NodeBase nd) {
if(nd instanceof RadioButton< ? >) {
throw new IllegalStateException("RadioButtons cannot be added, they will be created by the RadioGroup");
}
super.add(index, nd);
}
@Override
public void add(NodeBase nd) {
if(nd instanceof RadioButton< ? >) {
throw new IllegalStateException("RadioButtons cannot be added, they will be created by the RadioGroup");
}
super.add(nd);
}
public void addLabelAndRadio(String label, T object) throws InstantiationException, IllegalAccessException, Exception {
Div labelledradio = new Div();
if (object.getClass().isEnum()) {
label = m_cmm.getDomainLabel(NlsContext.getLocale(), object);
}
if (label == null)
label = object.toString();
RadioButton<T> rb = newDressedRadioButton();
m_valueMap.put(rb, object);
labelledradio.add(rb);
labelledradio.add(label);
add(labelledradio);
}
public void addLabelAndRadio(T object) throws InstantiationException, IllegalAccessException, Exception {
addLabelAndRadio(null, object);
}
public void addAllAsRadio(Collection<T> objects) throws InstantiationException, IllegalAccessException, Exception {
for (T object: objects) {
addLabelAndRadio(object);
}
}
/**
* @see to.etc.domui.dom.html.IDisplayControl#getValue()
*/
@Override
public T getValue() {
return m_value;
}
/**
* @see to.etc.domui.dom.html.IDisplayControl#setValue(java.lang.Object)
*/
@Override
public void setValue(T v) {
if (m_readOnly)
return;
if (v == null && m_value == null)
return;
if (v != null && v.equals(m_value))
return;
m_value = v;
if(getOnValueChanged() != null) {
try {
((IValueChanged<NodeBase>) getOnValueChanged()).onValueChanged(this);
} catch (Exception e) {}
}
checkSelectedRadio();
}
/**
* @see to.etc.domui.dom.html.IHasChangeListener#getOnValueChanged()
*/
@Override
public IValueChanged< ? > getOnValueChanged() {
// TODO Auto-generated method stub
return m_valueChanger;
}
/**
* @see to.etc.domui.dom.html.IHasChangeListener#setOnValueChanged(to.etc.domui.dom.html.IValueChanged)
*/
@Override
public void setOnValueChanged(IValueChanged< ? > onValueChanged) {
m_valueChanger = onValueChanged;
}
/**
* @see to.etc.domui.dom.html.IActionControl#setDisabled(boolean)
*/
@Override
public void setDisabled(boolean d) {
m_disabled = d;
}
/**
* @see to.etc.domui.dom.html.IControl#getValueSafe()
*/
@Override
public T getValueSafe() {
return m_value;
}
/**
* @see to.etc.domui.dom.html.IControl#isReadOnly()
*/
@Override
public boolean isReadOnly() {
// TODO Auto-generated method stub
return m_readOnly;
}
/**
* @see to.etc.domui.dom.html.IControl#setReadOnly(boolean)
*/
@Override
public void setReadOnly(boolean ro) {
m_readOnly = ro;
}
/**
* @see to.etc.domui.dom.html.IControl#isDisabled()
*/
@Override
public boolean isDisabled() {
return m_disabled;
}
/**
* @see to.etc.domui.dom.html.IControl#isMandatory()
*/
@Override
public boolean isMandatory() {
return m_mandatory;
}
/**
* @see to.etc.domui.dom.html.IControl#setMandatory(boolean)
*/
@Override
public void setMandatory(boolean ro) {
m_mandatory = ro;
}
/**
* @see to.etc.domui.dom.html.NodeBase#createContent()
*/
@Override
public void createContent() throws Exception {
//We let the Div do all the hard work. For now, just make sure the right button is checked:
for(RadioButton<T> rb : m_valueMap.keySet()) {
rb.setReadOnly(m_readOnly);
rb.setDisabled(m_disabled);
}
checkSelectedRadio();
super.createContent();
}
/**
* Makes sure the RadioButtons ar ein sync with the value. Changes the checked state of the radiobuttons if needed.
*/
protected void checkSelectedRadio() {
//We let the Div do all the hard work. For now, just make sure the right button is checked:
for(RadioButton<T> rb : m_valueMap.keySet()) {
if (m_valueMap.get(rb).equals(m_value)) {
rb.setChecked(true);
}
}
}
} |
package fr.lip6.jkernelmachines.kernel;
import java.util.List;
import fr.lip6.jkernelmachines.type.TrainingSample;
/**
* Very simple caching method for any kernel. Caches only the Gram matrix of a
* specified list of training samples.
*
* @author picard
*
* @param <T>
*/
public final class SimpleCacheKernel<T> extends Kernel<T> {
private static final long serialVersionUID = -2417905029129394427L;
final private Kernel<T> kernel;
final private double matrix[][];
/**
* Constructor using a kernel and a list of samples
*
* @param k
* the underlying of this caching kernel
* @param l
* the list on which to compute the Gram matrix
*/
public SimpleCacheKernel(Kernel<T> k, List<TrainingSample<T>> l) {
kernel = k;
matrix = k.getKernelMatrix(l);
}
@Override
final public double valueOf(T t1, T t2) {
return kernel.valueOf(t1, t2);
}
@Override
final public double valueOf(T t1) {
return kernel.valueOf(t1);
}
@Override
public double[][] getKernelMatrix(List<TrainingSample<T>> e) {
return matrix;
}
/**
* Returns the underlying kernel
*
* @return the cached kernel
*/
public Kernel<T> getKernel() {
return kernel;
}
@Override
public String toString() {
return kernel.toString();
}
} |
package org.jlib.container.sequence.index;
import org.jlib.container.sequence.AbstractSequenceIteratorState;
import org.jlib.container.sequence.NoSuchSequenceElementException;
import org.jlib.container.sequence.Sequence;
import org.jlib.container.sequence.SequenceIteratorState;
/**
* {@link SequenceIteratorState} of an {@link IndexSequenceIterator}.
*
* @param <Element>
* type of the elements held in the {@link Sequence}
*
* @author Igor Akkerman
*/
public abstract class MiddleOfIndexSequenceIteratorState<Element>
extends AbstractSequenceIteratorState<Element>
implements IndexSequenceIteratorState<Element> {
/** index of the next Element */
private int nextElementIndex;
/** traversed {@link IndexSequence} */
private final IndexSequence<Element> sequence;
private int recentlyReturnedElementIndex;
/**
* Creates a new {@link MiddleOfIndexSequenceIteratorState}.
*
* @param sequence
* traversed {@link IndexSequence}
*/
public MiddleOfIndexSequenceIteratorState(final IndexSequence<Element> sequence) {
super();
this.sequence = sequence;
}
@Override
public boolean hasPrevious() {
return true;
}
@Override
public boolean hasNext() {
return true;
}
@Override
public Element next()
throws NoSuchSequenceElementException {
try {
return getSequenceElement(recentlyReturnedElementIndex = nextElementIndex ++);
}
catch (final SequenceIndexOutOfBoundsException exception) {
throw new NoSuchSequenceElementException(sequence, exception);
}
}
@Override
public Element previous() {
try {
return getSequenceElement(recentlyReturnedElementIndex = nextElementIndex
}
catch (final SequenceIndexOutOfBoundsException exception) {
throw new NoSuchSequenceElementException(sequence, exception);
}
}
/**
* Returns the Element stored at the specified index in the {@link Sequence}
*
* @param elementIndex
* integer specifying the index of the Element
*
* @return Element stored at {@code elementIndex}
*
* @throws SequenceIndexOutOfBoundsException
* if {@code elementIndex} is out of the index bounds
*/
private Element getSequenceElement(final int elementIndex)
throws SequenceIndexOutOfBoundsException {
return sequence.get(elementIndex);
}
@Override
public IndexSequenceIteratorState<Element> getPreviousState() {
return getReturnedElementState();
}
@Override
public IndexSequenceIteratorState<Element> getNextState() {
return getReturnedElementState();
}
/**
* Returns the new {@link SequenceIteratorState} after returning an Element.
*
* @return new {@link SequenceIteratorState}
*/
protected abstract IndexSequenceIteratorState<Element> getReturnedElementState();
@Override
public int getPreviousElementIndex() {
return nextElementIndex - 1;
}
@Override
public int getNextElementIndex() {
return nextElementIndex;
}
/**
* Registers the index of the next Element of the {@link IndexSequence}.
*
* @param nextElementIndex
* integer specifying the index of the next Element
*/
public void setNextElementIndex(final int nextElementIndex) {
this.nextElementIndex = nextElementIndex;
}
} |
package gate.plugin.evaluation.resources;
import gate.plugin.evaluation.api.ContainmentType;
import gate.plugin.evaluation.api.NilTreatment;
import gate.Annotation;
import gate.AnnotationSet;
import gate.Controller;
import gate.Factory;
import gate.FeatureMap;
import gate.Resource;
import gate.Utils;
import gate.annotation.AnnotationSetImpl;
import gate.annotation.ImmutableAnnotationSetImpl;
import gate.creole.AbstractLanguageAnalyser;
import gate.creole.ControllerAwarePR;
import gate.creole.CustomDuplication;
import gate.creole.ExecutionException;
import gate.creole.ResourceInstantiationException;
import gate.creole.metadata.CreoleParameter;
import gate.creole.metadata.CreoleResource;
import gate.creole.metadata.Optional;
import gate.creole.metadata.RunTime;
import gate.plugin.evaluation.api.AnnotationDifferTagging;
import gate.plugin.evaluation.api.AnnotationDifferTagging.CandidateList;
import gate.plugin.evaluation.api.ByThEvalStatsTagging;
import gate.plugin.evaluation.api.ContingencyTableInteger;
import gate.plugin.evaluation.api.EvalStatsTagging;
import gate.plugin.evaluation.api.EvalStatsTaggingMacro;
import gate.plugin.evaluation.api.FeatureComparison;
import gate.plugin.evaluation.api.ThresholdsToUse;
import gate.util.Files;
import gate.util.GateRuntimeException;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.PrintStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.log4j.Logger;
// TODO: add a list for getting the distribution of ranks/score of first lenient/exact match
// if we process lists.
// This should get output as a separate tsv both per document and over all documents
// Maybe only if enabled with a parameter.
// If we process lists, also output per-document information about what the best is we can do
// in that document with the candidate lists we have.
// TODO: add a datastructure for incremental update of the contingency tables needed for
// McNemar etc, if we have a reference set.
// These should get output at the end with values over all documents
// TODO: think about how this should deal with parallelization and custom duplication.
// At the moment this will not work properly or even fail when run from duplicated pipelines.
// In the best case, each duplicate will just report the statistics on its own subset of documents,
// in the worst case, some resources will override each other (e.g. when writing from each
// duplicate to the tsv files).
// The proper way to deal with this requires that we have a standard way of how to share
// data between those custom duplicated instances of the PR which belong to the same job (in a
// larger VM, there could duplicates that belong to different jobs between which we do not want
// to share data).
// For now, we just prevent the multithreaded use of this PR by throwing an exception if an
// attempt is made to custom-duplicate it.
// Later on, we could change this such that all duplicated instances just call the methods of
// the original in a synchronized way.
// Finally, once we can calculate overall stats on all stats objects (including the byThreshold
// objects) we can try to support multi-threading in the following way:
// = all per-document io needs to go through a single instance synchronized method, or each
// instance creates their own output file
// = each duplicate creates their own stats objects over all documents it sees
// = there is a way to access all the stats objects of all the other duplicates somehow, or
// ideally there is a way to do this for just the "original"
// = the controllerExecutionFinished method either knows if it is running for the original or
// can somehow make sure that the actual finishing code is only run for one of the duplicates.
// That finishing code that would get the stats obejcts from all duplicates and merge/sum them,
// then output the final statistics.
// TODO(!!!): Add a java class for holding counts or instances of pairings between the reference set
// and the response set so we can calculate p-values for the SingleResponse accuracy analysis.
// This could be used to get p-values for the McNemar test and the paired t-test.
// TODO: make the output directory URL more easily parametrizable by making the runtime parameter
// an OR parameter with a String version which can then be expandable like the other String
// parameters.
/**
*
* @author Johann Petrak
*/
@CreoleResource(
name = "EvaluateTagging",
helpURL ="https://github.com/johann-petrak/gateplugin-Evaluation/wiki/EvaluateTagging-PR",
comment = "Calculate P/R/F evalutation measures for documents")
public class EvaluateTagging extends AbstractLanguageAnalyser
implements ControllerAwarePR, CustomDuplication {
/// PR PARAMETERS
private String keyASName;
@CreoleParameter (comment="The name of the annotation set that contains the target/key annotations (gold standard)", defaultValue="Key")
@RunTime
public void setKeyASName(String name) { keyASName = name; }
public String getKeyASName() { return keyASName; }
public String getExpandedKeyASName() { return Utils.replaceVariablesInString(getKeyASName()); }
private String responseASName;
@CreoleParameter (comment="The name of the annotation set that contains the response annotations",defaultValue ="Response")
@RunTime
public void setResponseASName(String name) { responseASName = name; }
public String getResponseASName() { return responseASName; }
public String getExpandedResponseASName() { return Utils.replaceVariablesInString(getResponseASName()); }
private String referenceASName;
@CreoleParameter (comment="The name of the annotation set that contains the reference/old response annotations. Empty means no reference set.")
@Optional
@RunTime
public void setReferenceASName(String name) { referenceASName = name; }
public String getReferenceASName() { return referenceASName; }
public String getExpandedReferenceASName() { return Utils.replaceVariablesInString(getReferenceASName()); }
private String containingASNameAndType;
@CreoleParameter (comment="The name of the restricting annotation set and the name of the type in the form asname:typename")
@Optional
@RunTime
public void setContainingASNameAndType(String name) { containingASNameAndType = name; }
public String getContainingASNameAndType() { return containingASNameAndType; }
public String getExpandedContainingASNameAndType() { return Utils.replaceVariablesInString(getContainingASNameAndType()); }
private ContainmentType containmentType;
@CreoleParameter (comment="How the responses are restricted to the annotations of the containingASNameAndType",defaultValue="OVERLAPPING")
@Optional
@RunTime
public void setContainmentType(ContainmentType ct) { ct = containmentType; }
public ContainmentType getContainmentType() { return containmentType; }
private List<String> annotationTypes;
@CreoleParameter (comment="The annotation types to use for evaluations, at least one type must be given",defaultValue="Mention")
@RunTime
public void setAnnotationTypes(List<String> name) { annotationTypes = name; }
public List<String> getAnnotationTypes() { return annotationTypes; }
private List<String> featureNames;
private Set<String> featureSet;
@CreoleParameter (comment="A list of feature names to use for comparison, can be empty. First is used as the id feature, if necessary.")
@RunTime
@Optional
public void setFeatureNames(List<String> names) {
featureNames = names;
if(featureNames != null) {
featureSet = new HashSet<String>(featureNames);
}
}
public List<String> getFeatureNames() { return featureNames; }
public FeatureComparison featureComparison;
@CreoleParameter(comment="",defaultValue="FEATURE_EQUALITY")
@RunTime
@Optional
public void setFeatureComparison(FeatureComparison value) { featureComparison = value; }
public FeatureComparison getFeatureComparison() { return featureComparison; }
/*
private List<String> byValueFeatureNames;
@CreoleParameter (comment="A list of feature names to use for breaking up the evaluation (NOT IMPLEMENTED YET)")
@RunTime
@Optional
public void setByValueFeatureNames(List<String> names) { byValueFeatureNames = names; }
public List<String> getByValueFeatureNames() { return byValueFeatureNames; }
*/
public String listIdFeatureName;
@CreoleParameter(comment="",defaultValue="")
@RunTime
@Optional
public void setListIdFeatureName(String value) { listIdFeatureName = value; }
public String getListIdFeatureName() { return listIdFeatureName; }
public String getExpandedListIdFeatureName() { return Utils.replaceVariablesInString(getListIdFeatureName()); }
private String scoreFeatureName;
@CreoleParameter (comment="The name of the feature which contains a numeric score or confidence. If specified will generated P/R curve.")
@Optional
@RunTime
public void setScoreFeatureName(String name) { scoreFeatureName = name; }
public String getScoreFeatureName() { return scoreFeatureName; }
public String getExpandedScoreFeatureName() { return Utils.replaceVariablesInString(getScoreFeatureName()); }
private String outputASPrefix;
@CreoleParameter (comment="The name of the annotation set for creating descriptive annotations. If empty, no annotations are created.")
@Optional
@RunTime
public void setOutputASPrefix(String name) { outputASPrefix = name; }
public String getOutputASPrefix() { return outputASPrefix; }
public String getExpandedOutputASPrefix() { return Utils.replaceVariablesInString(getOutputASPrefix()); }
public String featureNameNilCluster;
@CreoleParameter(comment = "", defaultValue = "")
@RunTime
@Optional
public void setFeatureNameNilCluster(String value) { featureNameNilCluster = value; }
public String getFeatureNameNilCluster() { return featureNameNilCluster; }
public String getExpandedFeatureNameNilCluster() { return Utils.replaceVariablesInString(getFeatureNameNilCluster()); }
public NilTreatment nilTreatment;
@CreoleParameter(comment="",defaultValue="NO_NILS")
@RunTime
@Optional
public void setNilTreatment(NilTreatment value) { nilTreatment = value; }
public NilTreatment getNilTreatment() { return nilTreatment; }
public String nilValue;
@CreoleParameter(comment="",defaultValue="")
@RunTime
@Optional
public void setNilValue(String value) { nilValue = value; }
public String getNilValue() { return nilValue; }
public String getExpandedNilValue() { return Utils.replaceVariablesInString(getNilValue()); }
public URL outputDirectoryUrl;
@CreoleParameter(comment="",defaultValue="")
@RunTime
@Optional
public void setOutputDirectoryUrl(URL value) { outputDirectoryUrl = value; }
public URL getOutputDirectoryUrl() { return outputDirectoryUrl; }
public String evaluationId;
@CreoleParameter(comment="",defaultValue="")
@RunTime
@Optional
public void setEvaluationId(String value) { evaluationId = value; }
public String getEvaluationId() { return evaluationId == null ? "" : evaluationId; }
public String getExpandedEvaluationId() { return Utils.replaceVariablesInString(getEvaluationId()); }
public ThresholdsToUse whichThresholds;
@CreoleParameter(comment="",defaultValue="USE_ALL")
@RunTime
@Optional
public void setWhichThresholds(ThresholdsToUse value) { whichThresholds = value; }
public ThresholdsToUse getWhichThresholds() { return whichThresholds; }
// PR METHODS
@Override
public Resource init() {
return this;
}
@Override
public void reInit() {
init();
}
@Override
public void cleanup() {
}
// fields shared between the execute method and the methods for initializing and finalization
protected Map<String,EvalStatsTagging> allDocumentsStats;
protected Map<String,EvalStatsTagging> allDocumentsReferenceStats = null;
protected ContingencyTableInteger correctnessTableStrict;
protected ContingencyTableInteger correctnessTableLenient;
// This will be initialized at the start of the run and be incremented in the AnnotationDifferTagging
// for each document.
// This stores, for each type, the ByThEvalStatsTagging object for that type. The empty string
// is used for the object that has the values over all types combined.
protected Map<String,ByThEvalStatsTagging> evalStatsByThreshold;
protected static final String initialFeaturePrefixResponse = "evaluateTagging.response.";
protected static final String initialFeaturePrefixReference = "evaluateTagging.reference.";
protected String featurePrefixResponse;
protected String featurePrefixReference;
protected boolean doListEvaluation = false;
protected boolean doScoreEvaluation = false;
String expandedKeySetName;
String expandedResponseSetName;
String expandedReferenceSetName;
String expandedContainingNameAndType;
String expandedListIdFeatureName;
String expandedScoreFeatureName;
String expandedOutputASPrefix;
String outputASResName = "";
String outputASRefName = "";
String outputASDiffName = "";
String expandedFeatureNameNilCluster;
String expandedNilValue;
String expandedEvaluationId;
protected static final Logger logger = Logger.getLogger(EvaluateTagging.class);
@Override
public void execute() {
if(needInitialization) {
needInitialization = false;
initializeForRunning();
}
// Per document initialization
// Prepare the annotation sets
// run the whole thing once for each type and also for the sets where all the specified types
// are contained.
// First do it for all types together, but only if more than one type was specified
AnnotationSet keySet = null;
AnnotationSet responseSet = null;
AnnotationSet referenceSet = null;
Set<String> typeSet = new HashSet<String>();
Set<String> typeSet4ListAnns = new HashSet<String>();
if(getAnnotationTypes().size() > 1) {
if(doListEvaluation) {
for(String t : getAnnotationTypes()) {
typeSet4ListAnns.add(t+"List");
}
}
typeSet.addAll(getAnnotationTypes());
keySet = document.getAnnotations(expandedKeySetName).get(typeSet);
if(doListEvaluation) {
responseSet = document.getAnnotations(expandedResponseSetName).get(typeSet4ListAnns);
} else {
responseSet = document.getAnnotations(expandedResponseSetName).get(typeSet);
}
if(!expandedReferenceSetName.isEmpty()) {
if(doListEvaluation) {
referenceSet = document.getAnnotations(expandedReferenceSetName).get(typeSet4ListAnns);
} else {
referenceSet = document.getAnnotations(expandedReferenceSetName).get(typeSet);
}
}
evaluateForType(keySet,responseSet,referenceSet,"");
}
// now do it for each type seperately
for(String type : getAnnotationTypes()) {
keySet = document.getAnnotations(expandedKeySetName).get(type);
String origType = type;
if(doListEvaluation) {
type = type + "List";
}
responseSet = document.getAnnotations(expandedResponseSetName).get(type);
if(!expandedReferenceSetName.isEmpty()) {
referenceSet = document.getAnnotations(expandedReferenceSetName).get(type);
}
evaluateForType(keySet,responseSet,referenceSet,origType);
}
}
protected void evaluateForType(
AnnotationSet keySet, AnnotationSet responseSet, AnnotationSet referenceSet,
String type) {
AnnotationSet containingSet = null;
String containingSetName = "";
String containingType = "";
if(!expandedContainingNameAndType.isEmpty()) {
String[] setAndType = expandedContainingNameAndType.split(":",2);
if(setAndType.length != 2 || setAndType[0].isEmpty() || setAndType[1].isEmpty()) {
throw new GateRuntimeException("Runtime Parameter continingASAndName not of the form setname:typename");
}
containingSetName = setAndType[0];
containingType = setAndType[1];
containingSet = document.getAnnotations(setAndType[0]).get(setAndType[1]);
// now filter the keys and responses. If the containing set/type is the same as the key set/type,
// do not filter the keys.
ContainmentType ct = containmentType;
if(ct == null) ct = ContainmentType.OVERLAPPING;
responseSet = selectOverlappingBy(responseSet,containingSet,ct);
// TODO: at the moment this will never be true since we have changed the single type to a list
// of types. Thing about when to not do this ...
if(containingSetName.equals(expandedKeySetName) && containingType.equals(annotationTypes)) {
// no need to do anything for the key set
} else {
keySet = selectOverlappingBy(keySet,containingSet,ct);
}
// if we have a reference set, we need to apply the same filtering to that one too
if(referenceSet != null) {
referenceSet = selectOverlappingBy(referenceSet,containingSet,ct);
}
} // have a containing set and type
// TODO: to get the best candidate we need to have the candidates already sorted!
// So we should better do the evaluation over the top element after the candidate lists
// have been created and we should refactor things so that creating the candidate lists
// is a separate step!
// Then we create the candidate lists here, then pass the already created candidate lists
// to the static method for calculating the lists evaluation!
AnnotationSet listAnns = null;
AnnotationSet listAnnsReference = null;
List<CandidateList> candList = null;
if(doListEvaluation) {
listAnns = responseSet;
listAnnsReference = referenceSet;
candList =
AnnotationDifferTagging.createCandidateLists(
document.getAnnotations(expandedResponseSetName),
listAnns, expandedListIdFeatureName, expandedScoreFeatureName);
// get the highest scored annotation from each list
responseSet = new AnnotationSetImpl(listAnns.getDocument());
if(referenceSet != null) {
referenceSet = new AnnotationSetImpl(listAnnsReference.getDocument());
}
for(CandidateList cl : candList) {
responseSet.add(cl.get(0));
}
}
// Now depending on the NIL processing strategy, do something with those annotations which
// are identified as nil in the key and response sets.
// NO_NILS: do nothing, all keys and responses are taken as is. If there are special NIL
// values in the key set, the responses must match them like any other value. Parameter nilValue
// is ignored here.
// NIL_IS_ABSENT:
// In this case, all key and response annotation which are NIL are removed before the
// evaluation is carried out.
// NIL_CLUSTERS:
// In this case, a missing response does not equal a key nil, because we need to provide
// a label to be correct. Parameter nilValue is used so we know which keys and responses
// are nils.
// We match all non-NILs in the usual way, ignoring all the NILS both in the key and
// response sets. We accumulate all the NIL annotations over all documents and after all
// documents have been processed, we try to find an optimal assignment between them, based
// on the NIL labels.
// TODO!!!
// Nils can only be represented if there is an id feature. If there is one and we treat
// nils as absent, lets remove all the nils.
if(getFeatureNames() != null && getFeatureNames().size() > 0 && getNilTreatment().equals(NilTreatment.NIL_IS_ABSENT)) {
removeNilAnns(keySet);
removeNilAnns(responseSet);
if(referenceSet != null) {
removeNilAnns(referenceSet);
}
}
AnnotationDifferTagging docDiffer = new AnnotationDifferTagging(
keySet,
responseSet,
featureSet,
featureComparison
);
EvalStatsTagging es = docDiffer.getEvalStatsTagging();
if(doScoreEvaluation) {
ByThEvalStatsTagging bth = evalStatsByThreshold.get(type);
AnnotationDifferTagging.calculateByThEvalStatsTagging(
keySet, responseSet, featureSet, featureComparison, expandedScoreFeatureName, bth.getWhichThresholds(), bth);
}
if(doListEvaluation) {
ByThEvalStatsTagging bth = evalStatsByThreshold.get(type);
AnnotationDifferTagging.calculateListByThEvalStatsTagging(
keySet,
document.getAnnotations(expandedResponseSetName),
candList, featureSet, featureComparison,
expandedListIdFeatureName, expandedScoreFeatureName, bth.getWhichThresholds(), bth);
}
// Store the counts and measures as document feature values
FeatureMap docFm = document.getFeatures();
String featurePrefixResponseT = featurePrefixResponse;
if(type.isEmpty()) {
featurePrefixResponseT += "[ALL].";
} else {
featurePrefixResponseT += (type + ".");
}
docFm.put(featurePrefixResponseT+"FMeasureStrict", es.getFMeasureStrict(1.0));
docFm.put(featurePrefixResponseT+"FMeasureLenient", es.getFMeasureLenient(1.0));
docFm.put(featurePrefixResponseT+"PrecisionStrict", es.getPrecisionStrict());
docFm.put(featurePrefixResponseT+"PrecisionLenient", es.getPrecisionLenient());
docFm.put(featurePrefixResponseT+"RecallStrict", es.getRecallStrict());
docFm.put(featurePrefixResponseT+"RecallLenient", es.getRecallLenient());
docFm.put(featurePrefixResponseT+"SingleCorrectAccuracyStrict", es.getSingleCorrectAccuracyStrict());
docFm.put(featurePrefixResponseT+"SingleCorrectAccuracyLenient", es.getSingleCorrectAccuracyLenient());
docFm.put(featurePrefixResponseT+"CorrectStrict", es.getCorrectStrict());
docFm.put(featurePrefixResponseT+"CorrectPartial", es.getCorrectPartial());
docFm.put(featurePrefixResponseT+"IncorrectStrict", es.getIncorrectStrict());
docFm.put(featurePrefixResponseT+"IncorrectPartial", es.getIncorrectPartial());
docFm.put(featurePrefixResponseT+"TrueMissingStrict", es.getTrueMissingStrict());
docFm.put(featurePrefixResponseT+"TrueMissingLenient", es.getTrueMissingLenient());
docFm.put(featurePrefixResponseT+"TrueSpuriousStrict", es.getTrueSpuriousStrict());
docFm.put(featurePrefixResponseT+"TrueSpuriousLenient", es.getTrueSpuriousLenient());
docFm.put(featurePrefixResponseT+"Targets", es.getTargets());
docFm.put(featurePrefixResponseT+"Responses", es.getResponses());
logger.debug("DEBUG: type is "+type);
logger.debug("DEBUG: all document stats types "+allDocumentsStats.keySet());
allDocumentsStats.get(type).add(es);
// Now if we have parameters to record the matchings, get the information from the docDiffer
// and create the apropriate annotations.
AnnotationSet outputAnnotationSet = null;
if(!outputASResName.isEmpty()) {
outputAnnotationSet = document.getAnnotations(outputASResName);
docDiffer.addIndicatorAnnotations(outputAnnotationSet,"");
}
// If we have a reference set, also calculate the stats for the reference set
EvalStatsTagging res = null;
if(referenceSet != null) {
AnnotationDifferTagging docRefDiffer = new AnnotationDifferTagging(
keySet,
referenceSet,
featureSet,
featureComparison
);
res = docRefDiffer.getEvalStatsTagging();
allDocumentsReferenceStats.get(type).add(res);
// if we need to record the matchings, also add the annotations for how things changed
// between the reference set and the response set.
if(!outputASRefName.isEmpty()) {
outputAnnotationSet = document.getAnnotations(outputASRefName);
docRefDiffer.addIndicatorAnnotations(outputAnnotationSet,"");
// Now add also the annotations that indicate the changes between the reference set and
// the response set
outputAnnotationSet = document.getAnnotations(outputASDiffName);
AnnotationDifferTagging.addChangesIndicatorAnnotations(docDiffer, docRefDiffer, outputAnnotationSet);
}
// TODO: increment the overall counts of how things changed
AnnotationDifferTagging.addChangesToContingenyTables(docDiffer, docRefDiffer, correctnessTableStrict, correctnessTableLenient);
// add document features for the reference set
String featurePrefixReferenceT = featurePrefixReference;
if(type.isEmpty()) {
featurePrefixReferenceT += "[ALL].";
} else {
featurePrefixReferenceT += type;
}
docFm.put(featurePrefixReferenceT + "FMeasureStrict", res.getFMeasureStrict(1.0));
docFm.put(featurePrefixReferenceT + "FMeasureLenient", res.getFMeasureLenient(1.0));
docFm.put(featurePrefixReferenceT + "PrecisionStrict", res.getPrecisionStrict());
docFm.put(featurePrefixReferenceT + "PrecisionLenient", res.getPrecisionLenient());
docFm.put(featurePrefixReferenceT + "RecallStrict", res.getRecallStrict());
docFm.put(featurePrefixReferenceT + "RecallLenient", res.getRecallLenient());
docFm.put(featurePrefixReferenceT + "SingleCorrectAccuracyStrict", res.getSingleCorrectAccuracyStrict());
docFm.put(featurePrefixReferenceT + "SingleCorrectAccuracyLenient", res.getSingleCorrectAccuracyLenient());
docFm.put(featurePrefixReferenceT + "CorrectStrict", res.getCorrectStrict());
docFm.put(featurePrefixReferenceT + "CorrectPartial", res.getCorrectPartial());
docFm.put(featurePrefixReferenceT + "IncorrectStrict", res.getIncorrectStrict());
docFm.put(featurePrefixReferenceT + "IncorrectPartial", res.getIncorrectPartial());
docFm.put(featurePrefixReferenceT + "TrueMissingStrict", res.getTrueMissingStrict());
docFm.put(featurePrefixReferenceT + "TrueMissingLenient", res.getTrueMissingLenient());
docFm.put(featurePrefixReferenceT + "TrueSpuriousStrict", res.getTrueSpuriousStrict());
docFm.put(featurePrefixReferenceT + "TrueSpuriousLenient", res.getTrueSpuriousLenient());
docFm.put(featurePrefixReferenceT + "Targets", res.getTargets());
docFm.put(featurePrefixReferenceT + "Responses", res.getResponses());
}
if(mainTsvPrintStream != null) {
// a line for the response stats for that document
mainTsvPrintStream.println(outputTsvLine(document.getName(), type, expandedResponseSetName, es));
if(res != null) {
mainTsvPrintStream.println(outputTsvLine(document.getName(), type, expandedReferenceSetName, res));
}
}
}
/**
* Return the evaluation statistics for the given type or over all types if an empty String is
* passed.
* If a type name is passed which was not used for the evaluation, null is returned.
* @param type
* @return
*/
public EvalStatsTagging getEvalStatsTagging(String type) {
// if there was only one type specified, then the type-specific evalstats is also the
// overall evalstats and we will not have created a separate evalstats for "". In that case,
// if the overall evalstats are requested, we return the one for the one and only type.
if(type.equals("") && getAnnotationTypes().size() == 1) {
return allDocumentsStats.get(getAnnotationTypes().get(0));
}
return allDocumentsStats.get(type);
}
/**
* Return the evaluation statistics for the reference set for the given type or over all types if an empty String is
* passed.
* If a type name is passed which was not used for the evaluation, null is returned. If no reference
* set was specified, null is returned.
* @param type
* @return
*/
public EvalStatsTagging getEvalStatsTaggingReference(String type) {
if(getReferenceASName() == null || getReferenceASName().isEmpty()) {
return null;
}
// if there was only one type specified, then the type-specific evalstats is also the
// overall evalstats and we will not have created a separate evalstats for "". In that case,
// if the overall evalstats are requested, we return the one for the one and only type.
if(type.equals("") && getAnnotationTypes().size() == 1) {
return allDocumentsReferenceStats.get(getAnnotationTypes().get(0));
}
return allDocumentsReferenceStats.get(type);
}
/**
* Get the evaluation statistics by threshold for the given type or over all types if an empty
* String is passed.
* @param type
* @return
*/
public ByThEvalStatsTagging getByThEvalStatsTagging(String type) {
// if there was only one type specified, then the type-specific evalstats is also the
// overall evalstats and we will not have created a separate evalstats for "". In that case,
// if the overall evalstats are requested, we return the one for the one and only type.
if(type.equals("") && getAnnotationTypes().size() == 1) {
return evalStatsByThreshold.get(getAnnotationTypes().get(0));
}
return evalStatsByThreshold.get(type);
}
/// HELPER METHODS
/**
* Return a new set with all the NIL annotations removed.
* @param set
*/
private AnnotationSet removeNilAnns(AnnotationSet set) {
String nilStr = "";
if(getNilValue() != null) { nilValue = getNilValue(); }
String idFeature = getFeatureNames().get(0);
Set<Annotation> nils = new HashSet<Annotation>();
for (Annotation ann : set) {
Object val = ann.getFeatures().get(idFeature);
String valStr = val == null ? "" : val.toString();
if (valStr.equals(nilStr)) {
nils.add(ann);
}
}
AnnotationSet newset = new AnnotationSetImpl(set);
newset.removeAll(nils);
return newset;
}
private PrintStream mainTsvPrintStream;
/**
* Create and open an print stream to the file where the Tsv rows should get written to.
* If no output directory was specified, this returns null.
* Otherwise it returns a stream that writes to a file in the output directory that has
* the name "EvaluateTagging-ID.tsv" where "ID" is the value of the evaluationId parameter.
* If the evaluationId parameter is not set, the file name is "EvaluateTagging.tsv".
* @return
*/
private PrintStream getOutputStream(String suffix) {
if(getOutputDirectoryUrl() == null) {
return null;
}
File dir = Files.fileFromURL(getOutputDirectoryUrl());
if(!dir.exists()) {
throw new GateRuntimeException("Output directory does not exists: "+getOutputDirectoryUrl());
}
if(!dir.isDirectory()) {
throw new GateRuntimeException("Not a directory: "+getOutputDirectoryUrl());
}
String fname = getStringOrElse(getEvaluationId(), "").equals("")
? "EvaluateTagging.tsv" : "EvaluateTagging-"+getEvaluationId();
if(suffix != null && !suffix.isEmpty()) {
fname += "-"+suffix;
}
fname += ".tsv";
File outFile = new File(dir,fname);
FileOutputStream os = null;
try {
os = new FileOutputStream(outFile);
} catch (FileNotFoundException ex) {
throw new GateRuntimeException("Could not open output file "+outFile,ex);
}
return new PrintStream(os);
}
private boolean needInitialization = true;
// This needs to run as part of the first execute, since at the moment, the parametrization
// does not work correctly with the controller callbacks.
private void initializeForRunning() {
expandedKeySetName = getStringOrElse(getExpandedKeyASName(), "");
expandedResponseSetName = getStringOrElse(getExpandedResponseASName(),"");
expandedReferenceSetName = getStringOrElse(getExpandedReferenceASName(),"");
expandedContainingNameAndType = getStringOrElse(getExpandedContainingASNameAndType(),"");
expandedEvaluationId = getStringOrElse(getExpandedEvaluationId(),"");
expandedFeatureNameNilCluster = getStringOrElse(getExpandedFeatureNameNilCluster(),"");
expandedListIdFeatureName = getStringOrElse(getExpandedListIdFeatureName(),"");
expandedNilValue = getStringOrElse(getExpandedNilValue(),"");
expandedOutputASPrefix = getStringOrElse(getExpandedOutputASPrefix(),"");
if(!expandedOutputASPrefix.isEmpty()) {
outputASResName = expandedOutputASPrefix+"_Res";
if(!expandedReferenceSetName.isEmpty()) {
outputASRefName = expandedOutputASPrefix+"_Ref";
outputASDiffName = expandedOutputASPrefix+"_Diff";
}
}
expandedScoreFeatureName = getStringOrElse(getExpandedScoreFeatureName(),"");
if(getAnnotationTypes() == null || getAnnotationTypes().isEmpty()) {
throw new GateRuntimeException("List of annotation types to use is not specified or empty!");
}
for(String t : getAnnotationTypes()) {
if(t == null || t.isEmpty()) {
throw new GateRuntimeException("List of annotation types to use contains a null or empty type name!");
}
}
if(getFeatureNames() != null) {
for(String t : getFeatureNames()) {
if(t == null || t.isEmpty()) {
throw new GateRuntimeException("List of feature names to use contains a null or empty type name!");
}
}
}
List<String> typesPlusEmpty = new ArrayList<String>();
if(getAnnotationTypes().size() > 1) {
typesPlusEmpty.add("");
}
//create the data structure that hold an evalstats object over all documents for each type
allDocumentsStats = new HashMap<String, EvalStatsTagging>();
// if we also have a reference set, create the data structure that holds an evalstats object
// over all documents for each type. This is left null if no reference set is specified!
if(!expandedReferenceSetName.isEmpty()) {
allDocumentsReferenceStats = new HashMap<String, EvalStatsTagging>();
correctnessTableStrict = new ContingencyTableInteger(2, 2);
correctnessTableLenient = new ContingencyTableInteger(2, 2);
correctnessTableLenient.setName(expandedEvaluationId+"-"+expandedReferenceSetName+"/"+expandedResponseSetName+"(lenient)");
correctnessTableLenient.setRowLabel(0, expandedReferenceSetName+":correct");
correctnessTableLenient.setRowLabel(1, expandedReferenceSetName+":wrong");
correctnessTableLenient.setColumnLabel(0, expandedResponseSetName+":correct");
correctnessTableLenient.setColumnLabel(1, expandedResponseSetName+":wrong");
correctnessTableStrict.setName(expandedEvaluationId+"-"+expandedReferenceSetName+"/"+expandedResponseSetName+"(strict)");
correctnessTableStrict.setRowLabel(0, expandedReferenceSetName+":correct");
correctnessTableStrict.setRowLabel(1, expandedReferenceSetName+":wrong");
correctnessTableStrict.setColumnLabel(0, expandedResponseSetName+":correct");
correctnessTableStrict.setColumnLabel(1, expandedResponseSetName+":wrong");
}
// If a score feature name is specified, we need to do either by score or list-based
// evaluation. In both cases we need a data structure to hold one by-threshold-object per
// type.
if(!expandedScoreFeatureName.isEmpty()) {
evalStatsByThreshold = new HashMap<String, ByThEvalStatsTagging>();
// also figure out if we want to do list or score evaluation or none of the two
if(!expandedListIdFeatureName.isEmpty()) {
doListEvaluation = true;
} else {
doScoreEvaluation = true;
}
}
typesPlusEmpty.addAll(getAnnotationTypes());
for(String t : typesPlusEmpty) {
logger.debug("DEBUG: initializing alldocument stats for type "+t);
allDocumentsStats.put(t,new EvalStatsTagging());
if(evalStatsByThreshold != null) {
evalStatsByThreshold.put(t,new ByThEvalStatsTagging(getWhichThresholds()));
}
if(allDocumentsReferenceStats != null) {
allDocumentsReferenceStats.put(t,new EvalStatsTagging());
}
}
// If the featureNames list is null, this has the special meaning that the features in
// the key/target annotation should be used. In that case the featureNameSet will also
// be left null. Otherwise the list will get converted to a set.
// Convert the feature list into a set
if(featureNames != null) {
Set<String> featureNameSet = new HashSet<String>();
featureNameSet.addAll(featureNames);
// check if we have duplicate entries in the featureNames
if(featureNameSet.size() != featureNames.size()) {
throw new GateRuntimeException("Duplicate feature in the feature name list");
}
}
// Establish the default containment type if it was not specified.
if(getContainmentType() == null) {
containmentType = ContainmentType.OVERLAPPING;
}
if(getNilTreatment() == null) {
nilTreatment = NilTreatment.NO_NILS;
}
featurePrefixResponse = initialFeaturePrefixResponse + getEvaluationId() + "." + getResponseASName();
featurePrefixReference = initialFeaturePrefixReference + getEvaluationId() + "." + getReferenceASName();
mainTsvPrintStream = getOutputStream(null);
// Output the initial header line
if(mainTsvPrintStream != null) {
mainTsvPrintStream.print("evaluationId"); mainTsvPrintStream.print("\t");
mainTsvPrintStream.print("docName"); mainTsvPrintStream.print("\t");
mainTsvPrintStream.print("setName"); mainTsvPrintStream.print("\t");
mainTsvPrintStream.print("annotationType"); mainTsvPrintStream.print("\t");
mainTsvPrintStream.println(EvalStatsTagging.getTSVHeaders());
}
/** not used yet
if(doListEvaluation || doScoreEvaluation) {
scoreDistPrintStream = getOutputStream("-scores");
}
*/
}
private String getStringOrElse(String value, String elseValue) {
if(value == null) return elseValue; else return value;
}
/**
* Filter the annotations in the set toFilter and select only those which
* overlap with any annotation in set by.
*
* @param toFilter
* @param by
* @return
*/
private AnnotationSet selectOverlappingBy(AnnotationSet toFilterSet, AnnotationSet bySet, ContainmentType how) {
if(toFilterSet.isEmpty()) return toFilterSet;
if(bySet.isEmpty()) return new ImmutableAnnotationSetImpl(toFilterSet.getDocument(),null);
Set<Annotation> selected = new HashSet<Annotation>();
for(Annotation byAnn : bySet) {
AnnotationSet tmp = null;
if(how == ContainmentType.OVERLAPPING) {
tmp = gate.Utils.getOverlappingAnnotations(toFilterSet, byAnn);
} else if(how == ContainmentType.CONTAINING) {
tmp = gate.Utils.getContainedAnnotations(toFilterSet, byAnn);
} else if(how == ContainmentType.COEXTENSIVE) {
tmp = gate.Utils.getCoextensiveAnnotations(toFilterSet, byAnn);
} else {
throw new GateRuntimeException("Odd ContainmentType parameter value: "+how);
}
selected.addAll(tmp);
}
return new ImmutableAnnotationSetImpl(document, selected);
}
protected String outputTsvLine(
String docName,
String annotationType,
String setName,
EvalStatsTagging es
) {
StringBuilder sb = new StringBuilder();
String avg = "micro";
if(es instanceof EvalStatsTaggingMacro) {
avg = "macro";
}
sb.append(expandedEvaluationId); sb.append("\t");
if(docName == null) {
sb.append("[doc:all:");
sb.append(avg);
sb.append("]");
} else {
sb.append(docName);
}
sb.append("\t");
if(setName == null || setName.isEmpty()) {
sb.append(expandedResponseSetName);
} else {
sb.append(setName);
}
sb.append("\t");
if(annotationType == null || annotationType.isEmpty()) {
sb.append("[type:all:");
sb.append(avg);
sb.append("]");
} else {
sb.append(annotationType);
}
sb.append("\t");
sb.append(es.getTSVLine());
return sb.toString();
}
public void finishRunning() {
outputDefaultResults();
if(mainTsvPrintStream != null) {
mainTsvPrintStream.close();
}
/** not used yet
if(scoreDistPrintStream != null) {
scoreDistPrintStream.close();
}
*/
}
private static double r4(double x) {
return ((double) Math.round(x * 10000.0) / 10000.0);
}
// Output the complete EvalStats object, but in a format that makes it easier to grep
// out the lines one is interested in based on threshold and type
public void outputEvalStatsForType(PrintStream out, EvalStatsTagging es, String type, String set) {
EvaluateTagging.outputEvalStatsForType(out,es,type,set,expandedEvaluationId);
}
public static void outputEvalStatsForType(PrintStream out, EvalStatsTagging es, String type, String set, String expandedEvaluationId) {
String ts = "none";
double th = es.getThreshold();
if(!Double.isNaN(th)) {
if(Double.isInfinite(th)) {
ts="inf";
} else {
ts = "" + r4(th);
}
}
ts = ", th="+ts+", ";
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Precision Strict: "+r4(es.getPrecisionStrict()));
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Recall Strict: "+r4(es.getRecallStrict()));
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"F1.0 Strict: "+r4(es.getFMeasureStrict(1.0)));
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Accuracy Strict: "+r4(es.getSingleCorrectAccuracyStrict()));
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Precision Lenient: "+r4(es.getPrecisionLenient()));
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Recall Lenient: "+r4(es.getRecallLenient()));
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"F1.0 Lenient: "+r4(es.getFMeasureLenient(1.0)));
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Accuracy Lenient: "+r4(es.getSingleCorrectAccuracyLenient()));
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Targets: "+es.getTargets());
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Responses: "+es.getResponses());
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Correct Strict: "+es.getCorrectStrict());
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Correct Partial: "+es.getCorrectPartial());
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Incorrect Strict: "+es.getIncorrectStrict());
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Incorrect Partial: "+es.getIncorrectPartial());
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Missing Strict: "+es.getMissingStrict());
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"True Missing Strict: "+es.getTrueMissingStrict());
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Missing Lenient: "+es.getMissingLenient());
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"True Missing Lenient: "+es.getTrueMissingLenient());
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Spurious Strict: "+es.getSpuriousStrict());
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"True Spurious Strict: "+es.getTrueSpuriousStrict());
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Spurious Lenient: "+es.getSpuriousLenient());
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"True Spurious Lenient: "+es.getTrueSpuriousLenient());
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Single Correct Strict: "+es.getSingleCorrectStrict());
out.println(expandedEvaluationId+" set="+set+", type="+type+ts+"Single Correct Lenient: "+es.getSingleCorrectLenient());
}
// TODO: make this work per type once we collect the tables per type!
public void outputContingencyTable(PrintStream out, ContingencyTableInteger table) {
out.println(expandedEvaluationId+" "+table.getName()+"correct/correct: "+table.get(0, 0));
out.println(expandedEvaluationId+" "+table.getName()+"correct/wrong: "+table.get(0, 1));
out.println(expandedEvaluationId+" "+table.getName()+"wrong/correct: "+table.get(1, 0));
out.println(expandedEvaluationId+" "+table.getName()+"wrong/wrong: "+table.get(1, 1));
}
public void outputDefaultResults() {
// TODO: think of a way of how to add the interpolated precision strict interpolated precision
// lenient to the by thresholds lines!!!
// output for each of the types ...
for(String type : getAnnotationTypes()) {
//System.out.println("DEBUG: alldocumentsStats="+allDocumentsStats+" type="+type+" expandedResponseSetName="+expandedResponseSetName);
outputEvalStatsForType(System.out, allDocumentsStats.get(type), type, expandedResponseSetName);
if(mainTsvPrintStream != null) { mainTsvPrintStream.println(outputTsvLine(null, type, getResponseASName(), allDocumentsStats.get(type))); }
if(!expandedReferenceSetName.isEmpty()) {
outputEvalStatsForType(System.out, allDocumentsReferenceStats.get(type), type, expandedReferenceSetName);
if(mainTsvPrintStream != null) { mainTsvPrintStream.println(outputTsvLine(null, type, expandedReferenceSetName, allDocumentsReferenceStats.get(type))); }
}
if(evalStatsByThreshold != null) {
ByThEvalStatsTagging bthes = evalStatsByThreshold.get(type);
for(double th : bthes.getByThresholdEvalStats().navigableKeySet()) {
outputEvalStatsForType(System.out, bthes.get(th), type, expandedResponseSetName);
if(mainTsvPrintStream != null) { mainTsvPrintStream.println(outputTsvLine(null, type, expandedResponseSetName, bthes.get(th))); }
}
}
}
// If there was more than one type, also output the summary stats over all types
if(getAnnotationTypes().size() > 1) {
outputEvalStatsForType(System.out, allDocumentsStats.get(""), "all(micro)", expandedResponseSetName);
if(mainTsvPrintStream != null) { mainTsvPrintStream.println(outputTsvLine(null, "", expandedResponseSetName, allDocumentsStats.get(""))); }
if(!getStringOrElse(getReferenceASName(), "").isEmpty()) {
outputEvalStatsForType(System.out, allDocumentsReferenceStats.get(""), "all(micro)", expandedReferenceSetName);
if(mainTsvPrintStream != null) { mainTsvPrintStream.println(outputTsvLine(null, "", expandedReferenceSetName, allDocumentsReferenceStats.get(""))); }
}
if(evalStatsByThreshold != null) {
ByThEvalStatsTagging bthes = evalStatsByThreshold.get("");
for(double th : bthes.getByThresholdEvalStats().navigableKeySet()) {
outputEvalStatsForType(System.out, bthes.get(th), "all(micro)", expandedResponseSetName);
if(mainTsvPrintStream != null) { mainTsvPrintStream.println(outputTsvLine(null, "", expandedResponseSetName, bthes.get(th))); }
}
}
EvalStatsTaggingMacro esm = new EvalStatsTaggingMacro();
for(String type : getAnnotationTypes()) {
esm.add(allDocumentsStats.get(type));
}
outputEvalStatsForType(System.out, esm, "all(macro)", expandedResponseSetName);
if(mainTsvPrintStream != null) { mainTsvPrintStream.println(outputTsvLine(null, "", expandedResponseSetName, esm)); }
if(!getStringOrElse(getReferenceASName(), "").isEmpty()) {
esm = new EvalStatsTaggingMacro();
for(String type : getAnnotationTypes()) {
esm.add(allDocumentsReferenceStats.get(type));
}
outputEvalStatsForType(System.out, esm, "all(macro)", expandedReferenceSetName);
if(mainTsvPrintStream != null) { mainTsvPrintStream.println(outputTsvLine(null, "", expandedReferenceSetName, esm)); }
}
}
if(!expandedReferenceSetName.isEmpty()) {
outputContingencyTable(System.out, correctnessTableStrict);
outputContingencyTable(System.out, correctnessTableLenient);
}
}
/// CONTROLLER AWARE PR methods
@Override
public void controllerExecutionStarted(Controller cntrlr) throws ExecutionException {
needInitialization = true;
}
@Override
public void controllerExecutionFinished(Controller cntrlr) throws ExecutionException {
// only do anything at all if we had actually been executed once. The callback gets also
// invoked if the PR was disabled, so we have to check ...
// needInitialization is set in the started callback and reset in execute, so if it is still
// on, we never were in execute.
if(!needInitialization) {
finishRunning();
needInitialization = true;
}
}
@Override
public void controllerExecutionAborted(Controller cntrlr, Throwable thrwbl) throws ExecutionException {
if(!needInitialization) {
System.err.println("Processing was aborted: "+thrwbl.getMessage());
thrwbl.printStackTrace(System.err);
System.err.println("Here are the summary stats for what was processed: ");
finishRunning();
needInitialization = true;
}
}
@Override
public Resource duplicate(Factory.DuplicationContext dc) throws ResourceInstantiationException {
throw new UnsupportedOperationException("At the moment, this PR may not be duplicated and must be run single-threaded");
// TODO: duplicate such that all duplicates get a flag set which indicates that they are
// duplicates, and only the original has the flag not set.
// Also, use a shared object to give all PRs access to everyone elses's statistics objects.
// Also, use a shared obejcts to give all PRs access to a singleton object for writing to
// the output files.
// Finally, implement the controllerExecutionFinished() method such that only the original
// will do the actual summarization: it will access all stats objects from all other PRs and
// summarize them and
}
} |
package org.xins.server;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.lang.reflect.Field;
import java.util.Properties;
import javax.servlet.Servlet;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import org.apache.log4j.helpers.NullEnumeration;
import org.xins.util.MandatoryArgumentChecker;
import org.xins.util.io.FileWatcher;
import org.xins.util.servlet.ServletUtils;
import org.xins.util.text.Replacer;
/**
* Servlet that forwards requests to an <code>API</code>.
*
* @version $Revision$ $Date$
* @author Ernst de Haan (<a href="mailto:znerd@FreeBSD.org">znerd@FreeBSD.org</a>)
*/
public final class APIServlet
extends Object
implements Servlet {
// Class fields
/**
* The <em>uninitialized</em> state. See {@link #_state}.
*/
private static final State UNINITIALIZED = new State("UNINITIALIZED");
/**
* The <em>initializing</em> state. See {@link #_state}.
*/
private static final State INITIALIZING = new State("INITIALIZING");
/**
* The <em>ready</em> state. See {@link #_state}.
*/
private static final State READY = new State("READY");
/**
* The <em>disposing</em> state. See {@link #_state}.
*/
private static final State DISPOSING = new State("DISPOSING");
/**
* The <em>disposed</em> state. See {@link #_state}.
*/
private static final State DISPOSED = new State("DISPOSED");
/**
* The name of the system property that specifies the location of the
* configuration directory.
*/
public static final String CONFIG_DIR_SYSTEM_PROPERTY = "confdir";
/**
* The name of the configuration file to use. The system property
* {@link #CONFIG_DIR_SYSTEM_PROPERTY} specifies the location where this
* file should be found.
*/
public static final String CONFIG_FILE = "xins.properties";
/**
* The name of the initialization property that specifies the name of the
* API class to load.
*/
public static final String API_CLASS_PROPERTY = "org.xins.api.class";
/**
* The name of the configuration property that specifies the interval
* for the configuration file modification checks, in seconds.
*/
public static final String CONFIG_RELOAD_INTERVAL_PROPERTY = "org.xins.server.config.reload";
/**
* The default configuration file modification check interval, in seconds.
*/
public static final int DEFAULT_CONFIG_RELOAD_INTERVAL = 60;
// Class functions
/**
* Initializes an API instance based on the specified servlet
* configuration.
*
* @param config
* the servlet configuration, cannot be <code>null</code>.
*
* @throws ServletException
* if an API instance could not be initialized.
*/
private static API configureAPI(ServletConfig config)
throws ServletException {
API api;
// Determine the API class
String apiClassName = config.getInitParameter(API_CLASS_PROPERTY);
if (apiClassName == null || apiClassName.trim().length() < 1) {
final String message = "Invalid application package. API class name not set in initialization parameter \"" + API_CLASS_PROPERTY + "\".";
Library.LIFESPAN_LOG.fatal(message);
throw new ServletException(message);
}
// Load the API class
Class apiClass;
try {
apiClass = Class.forName(apiClassName);
} catch (Exception e) {
String message = "Invalid application package. Failed to load API class set in initialization parameter \"" + API_CLASS_PROPERTY + "\": \"" + apiClassName + "\".";
Library.LIFESPAN_LOG.fatal(message, e);
throw new ServletException(message);
}
// TODO: Check that the API class is derived from org.xins.server.API
// Get the SINGLETON field
Field singletonField;
try {
singletonField = apiClass.getDeclaredField("SINGLETON");
} catch (Exception e) {
String message = "Invalid application package. Failed to lookup class field SINGLETON in API class \"" + apiClassName + "\".";
Library.LIFESPAN_LOG.fatal(message, e);
throw new ServletException(message);
}
// Get the value of the SINGLETON field
try {
api = (API) singletonField.get(null);
} catch (Exception e) {
String message = "Invalid application package. Failed to get value of SINGLETON field of API class \"" + apiClassName + "\".";
Library.LIFESPAN_LOG.fatal(message, e);
throw new ServletException(message);
}
// TODO: Make sure that the field is an instance of that same class
if (Library.LIFESPAN_LOG.isDebugEnabled()) {
Library.LIFESPAN_LOG.debug("Obtained API instance of class: \"" + apiClassName + "\".");
}
// Initialize the API
if (Library.LIFESPAN_LOG.isDebugEnabled()) {
Library.LIFESPAN_LOG.debug("Initializing API.");
}
Properties settings = ServletUtils.settingsAsProperties(config);
try {
api.init(settings);
} catch (Throwable e) {
String message = "Failed to initialize API.";
Library.LIFESPAN_LOG.fatal(message, e);
// TODO: Let the API.init() rollback the initialization self
try {
api.destroy();
} catch (Throwable e2) {
Library.LIFESPAN_LOG.error("Caught " + e2.getClass().getName() + " while destroying API instance of class " + api.getClass().getName() + ". Ignoring.", e2);
}
throw new ServletException(message);
}
if (Library.LIFESPAN_LOG.isDebugEnabled()) {
Library.LIFESPAN_LOG.debug("Initialized API.");
}
return api;
}
// Constructors
/**
* Constructs a new <code>APIServlet</code> object.
*/
public APIServlet() {
_stateLock = new Object();
_state = UNINITIALIZED;
}
// Fields
/**
* The current state.
*/
private State _state;
/**
* Lock for <code>_state</code>
*/
private Object _stateLock;
/**
* The stored servlet configuration object.
*/
private ServletConfig _servletConfig;
/**
* The name of the configuration file.
*/
private String _configFile;
/**
* The API that this servlet forwards requests to.
*/
private API _api;
// Methods
/**
* Initializes this servlet using the specified configuration. The
* (required) {@link ServletConfig} argument is stored internally and is
* returned from {@link #getServletConfig()}.
*
* <p>The initialization is performed as follows:
*
* <ol>
* <li>if this servlet is not currently <em>uninitialized</em>, then a
* {@link ServletException} is thrown;
* <li>if <code>config == null</code> then a {@link ServletException} is
* thrown;
* <li>the state is set to <em>initializing</em>;
* <li>the value of the required system property
* {@link #CONFIG_DIR_SYSTEM_PROPERTY} is determined; if it is not
* set then a {@link ServletException} is thrown;
* <li>the config file (the name should equals {@link #CONFIG_FILE}) is
* loaded from the directory denoted by the
* {@link #CONFIG_DIR_SYSTEM_PROPERTY} setting, and all
* configuration properties in this file are read according to
* {@link Properties#load(InputStream) the specifications for a property file};
* if this fails, then a {@link ServletException} is thrown;
* <li>the logging subsystem is initialized using the properties from
* the config file, see
* {@link PropertyConfigurator#doConfigure(String,org.apache.log4j.spi.LoggerRepository) the Log4J documentation};
* <li>the logging system is investigated to check if it is properly
* initialized, if it is not then it will be configured to log to
* the console using a simple output method, with no log level
* threshold; in this case a warning message is immediately logged;
* <li>at this point the logging subsystem is definitely initialized;
* the interval for the configuration file modification checks is
* determined by reading the
* {@link #CONFIG_RELOAD_INTERVAL_PROPERTY} configuration property;
* if this property is not set, then
* {@link #DEFAULT_CONFIG_RELOAD_INTERVAL} is assumed;
* if this property exists but has an invalid value, then a
* <em>warning</em> message is logged and
* {@link #DEFAULT_CONFIG_RELOAD_INTERVAL} is also assumed;
* <li>the configuration file watch thread is started;
* <li>the initialization property {@link #API_CLASS_PROPERTY} is read
* from the {@link ServletConfig servlet configuration} (not from
* the configuration file); if it is not set then a
* {@link ServletException} is thrown.
* <li>the API class, specified in the {@link #API_CLASS_PROPERTY}
* property, is loaded; it must be derived from the {@link API}
* class in the XINS/Java Server Framework; if this fails then a
* {@link ServletException} is thrown;
* <li>in the API class a static field called <code>SINGLETON</code> is
* looked up and the value is determined; the value must be an
* instance of that same class, and cannot be <code>null</code>;
* if this fails, then a {@link ServletException} is thrown;
* <li>the API instance will be initialized by calling
* {@link API#init(Properties)}; if this fails a
* {@link ServletException} is thrown;
* <li>the {@link ServletConfig config} object is stored internally, to
* be returned by {@link #getServletConfig()}.
* </ol>
*
* <p>Note that if a {@link ServletException} is thrown, the state is reset
* to <em>uninitialized</em>.
*
* <p>Also note that if the logging subsystem is already initialized and a
* {@link ServletException} is thrown, a <em>fatal</em> message is logged
* just before the exception is actually thrown.
*
* @param config
* the {@link ServletConfig} object which contains initialization and
* startup parameters for this servlet, cannot be <code>null</code>.
*
* @throws ServletException
* if <code>config == null</code>, if this servlet is not uninitialized
* or if the initialization failed for some other reason.
*/
public void init(ServletConfig config)
throws ServletException {
// Hold the state lock
synchronized (_stateLock) {
// Check preconditions
if (_state != UNINITIALIZED) {
// TODO: throw new ServletException("State is " + _state + " instead of " + UNINITIALIZED + '.');
throw new ServletException("Unable to initialize, state is not UNINITIALIZED.");
} else if (config == null) {
throw new ServletException("config == null");
}
// Set the state
_state = INITIALIZING;
try {
// Determine configuration directory
String confdir = System.getProperty(CONFIG_DIR_SYSTEM_PROPERTY);
// Read properties from the config file
Properties properties;
String _configFile; // TODO: Determine absolute path name
if (confdir == null || confdir.length() < 1) {
throw new ServletException("System property \"" + CONFIG_DIR_SYSTEM_PROPERTY + "\" is not set.");
} else {
_configFile = confdir + System.getProperty("file.separator") + CONFIG_FILE;
try {
FileInputStream in = new FileInputStream(_configFile);
properties = new Properties();
properties.load(in);
} catch (FileNotFoundException exception) {
throw new ServletException("Configuration file \"" + _configFile + "\" not found.");
} catch (SecurityException exception) {
throw new ServletException("Access denied while loading configuration file \"" + _configFile + "\".");
} catch (IOException exception) {
throw new ServletException("Unable to read configuration file \"" + _configFile + "\".");
}
}
// Initialize Log4J
PropertyConfigurator.configure(properties);
// TODO: If Log4J is not properly initialized, fallback to simple
// appender, console output
// Watch the file
FileWatcher.Listener listener = new ConfigurationFileListener();
final int delay = 10; // TODO: Read from config file
FileWatcher watcher = new FileWatcher(_configFile, 10, listener);
watcher.start();
Library.LIFESPAN_LOG.info("Using config file \"" + _configFile + "\". Checking for changes every " + delay + " seconds.");
// Initialization starting
String version = org.xins.server.Library.getVersion();
if (Library.LIFESPAN_LOG.isDebugEnabled()) {
Library.LIFESPAN_LOG.debug("XINS/Java Server Framework " + version + " is initializing.");
}
// Initialize API instance
_api = configureAPI(config);
// Initialization done
if (Library.LIFESPAN_LOG.isInfoEnabled()) {
Library.LIFESPAN_LOG.info("XINS/Java Server Framework " + version + " is initialized.");
}
// Finally enter the ready state
_state = READY;
// Store the ServletConfig object, per the Servlet API Spec, see:
// http://java.sun.com/products/servlet/2.3/javadoc/javax/servlet/Servlet.html#getServletConfig()
_servletConfig = config;
// If an exception is thrown, then reset the state
} finally {
if (_state != READY) {
_state = UNINITIALIZED;
}
}
}
}
/**
* Returns the <code>ServletConfig</code> object which contains the
* initialization and startup parameters for this servlet. The returned
* {@link ServletConfig} object is the one passed to the
* {@link #init(ServletConfig)} method.
*
* @return
* the {@link ServletConfig} object that was used to initialize this
* servlet, not <code>null</code> if this servlet is indeed already
* initialized.
*/
public ServletConfig getServletConfig() {
return _servletConfig;
}
public void service(ServletRequest request, ServletResponse response)
throws ServletException, IOException {
// Determine current time
long start = System.currentTimeMillis();
// Check state
if (_state != READY) {
if (_state == UNINITIALIZED) {
throw new ServletException("This servlet is not yet initialized.");
} else if (_state == DISPOSING) {
throw new ServletException("This servlet is currently being disposed.");
} else if (_state == DISPOSED) {
throw new ServletException("This servlet is disposed.");
} else {
throw new Error("This servlet is not ready, the state is unknown.");
}
}
// TODO: Support and use OutputStream instead of Writer, for improved
// performance
// Call the API
CallResult result = _api.handleCall(start, request);
// Determine the XSLT to link to
String xslt = request.getParameter("_xslt");
// Send the XML output to the stream and flush
PrintWriter out = response.getWriter();
response.setContentType("text/xml");
CallResultOutputter.output(out, result, xslt);
out.flush();
}
/**
* Returns information about this servlet, as plain text.
*
* @return
* textual description of this servlet, not <code>null</code> and not an
* empty character string.
*/
public String getServletInfo() {
return "XINS " + Library.getVersion() + " API Servlet";
}
public void destroy() {
if (Library.LIFESPAN_LOG.isDebugEnabled()) {
Library.LIFESPAN_LOG.debug("XINS/Java Server Framework shutdown initiated.");
}
synchronized (_stateLock) {
_state = DISPOSING;
if (_api != null) {
_api.destroy();
}
Library.LIFESPAN_LOG.info("XINS/Java Server Framework shutdown completed.");
_state = DISPOSED;
}
}
private void reinit() {
Library.RUNTIME_LOG.info("Re-initializing XINS/Java Server Framework.");
// TODO: PropertyConfigurator.configure(properties);
// TODO: reinit API
}
// Inner classes
/**
* State of an <code>APIServlet</code>.
*
* @version $Revision$ $Date$
* @author Ernst de Haan (<a href="mailto:znerd@FreeBSD.org">znerd@FreeBSD.org</a>)
*
* @since XINS 0.121
*/
private static final class State extends Object {
// Constructors
private State(String name) throws IllegalArgumentException {
// Check preconditions
MandatoryArgumentChecker.check("name", name);
_name = name;
}
// Fields
/**
* The name of this state. Cannot be <code>null</code>.
*/
private final String _name;
// Methods
/**
* Returns the name of this state.
*
* @return
* the name of this state, cannot be <code>null</code>.
*/
String getName() {
return _name;
}
public String toString() {
return _name;
}
}
/**
* Listener that reloads the configuration file if it changes.
*
* @version $Revision$ $Date$
* @author Ernst de Haan (<a href="mailto:znerd@FreeBSD.org">znerd@FreeBSD.org</a>)
*
* @since XINS 0.121
*/
private final class ConfigurationFileListener
extends Object
implements FileWatcher.Listener {
// Constructors
/**
* Constructs a new <code>ConfigurationFileListener</code> object.
*/
private ConfigurationFileListener() {
// empty
}
// Fields
// Methods
public void fileModified() {
reinit();
}
public void fileNotFound() {
Library.RUNTIME_LOG.warn("Configuration file \"" + _configFile + "\" not found.");
}
public void fileNotModified() {
Library.RUNTIME_LOG.debug("Configuration file \"" + _configFile + "\" is not modified.");
}
public void securityException(SecurityException exception) {
Library.RUNTIME_LOG.warn("Access denied while reading file \"" + _configFile + "\".");
}
}
} |
package com.astoev.cave.survey.activity.map;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.DashPathEffect;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.util.Log;
import android.util.SparseArray;
import android.util.SparseIntArray;
import android.view.View;
import com.astoev.cave.survey.Constants;
import com.astoev.cave.survey.R;
import com.astoev.cave.survey.activity.UIUtilities;
import com.astoev.cave.survey.model.Gallery;
import com.astoev.cave.survey.model.Leg;
import com.astoev.cave.survey.model.Option;
import com.astoev.cave.survey.model.Vector;
import com.astoev.cave.survey.service.Options;
import com.astoev.cave.survey.service.Workspace;
import com.astoev.cave.survey.util.DaoUtil;
import java.io.ByteArrayOutputStream;
import java.util.ArrayList;
import java.util.List;
public class MapView extends View {
public static final int POINT_RADIUS = 3;
public static final int MIDDLE_POINT_RADIUS = 2;
public static final int MEASURE_POINT_RADIUS = 2;
public static final int CURR_POINT_RADIUS = 8;
private final static int LABEL_DEVIATION_X = 10;
private final static int LABEL_DEVIATION_Y = 15;
private static final int [] GRID_STEPS = new int[] {20,10, 5, 5, 2, 2, 2, 2, 1, 1, 1};
private final int SPACING = 5;
private final Paint polygonPaint = new Paint();
private final Paint polygonWidthPaint = new Paint();
private final Paint overlayPaint = new Paint();
private final Paint youAreHerePaint = new Paint();
private final Paint gridPaint = new Paint();
private final Paint vectorsPaint = new Paint();
private final Paint vectorPointPaint = new Paint();
private int scale = 10;
private int mapCenterMoveX = 0;
private int mapCenterMoveY = 0;
private float initialMoveX = 0;
private float initialMoveY = 0;
private Point northCenter = new Point();
private List<Integer> processedLegs = new ArrayList<Integer>();
private SparseArray<Point2D> mapPoints = new SparseArray<Point2D>();
private SparseIntArray galleryColors = new SparseIntArray();
private SparseArray<String> galleryNames = new SparseArray<String>();
private boolean horizontalPlan = true;
public MapView(Context context, AttributeSet attrs) {
super(context, attrs);
polygonPaint.setColor(Color.RED);
polygonPaint.setStrokeWidth(2);
polygonWidthPaint.setColor(Color.RED);
polygonWidthPaint.setStrokeWidth(1);
overlayPaint.setColor(Color.WHITE);
youAreHerePaint.setColor(Color.WHITE);
youAreHerePaint.setAlpha(50);
// semi transparent white
gridPaint.setColor(Color.parseColor("#11FFFFFF"));
gridPaint.setStrokeWidth(1);
vectorsPaint.setStrokeWidth(1);
vectorsPaint.setStyle(Paint.Style.STROKE);
vectorsPaint.setPathEffect(new DashPathEffect(new float[]{2, 6}, 0));
vectorsPaint.setAlpha(50);
vectorPointPaint.setStrokeWidth(1);
vectorPointPaint.setAlpha(50);
// need to instruct that changes to the canvas will be made, otherwise the screen might become blank
setWillNotDraw(false);
}
@Override
public void onDraw(Canvas canvas) {
// need to call parent
super.onDraw(canvas);
try {
processedLegs.clear();
mapPoints.clear();
galleryColors.clear();
galleryNames.clear();
// prepare map surface
int maxX = canvas.getWidth();
int maxY = canvas.getHeight();
int centerX;
int centerY;
if (horizontalPlan) {
// starting from the center of the screen
centerX = maxX / 2;
centerY = maxY / 2;
} else {
// slightly more left for the vertical plan (advancing to right)
centerX = maxX / 4;
centerY = maxY / 2;
}
String azimuthUnits = Options.getOptionValue(Option.CODE_AZIMUTH_UNITS);
String slopeUnits = Options.getOptionValue(Option.CODE_SLOPE_UNITS);
int gridStepIndex = scale/5;
// grid scale
int gridStep = GRID_STEPS[gridStepIndex] * scale;
// grid start
int gridStartX = mapCenterMoveX % gridStep - SPACING + centerX - (centerX / gridStep) * gridStep;
int gridStartY = mapCenterMoveY % gridStep - SPACING + centerY - (centerY / gridStep) * gridStep;
// grid horizontal lines
for (int x=0; x<maxX/gridStep; x++) {
canvas.drawLine(x*gridStep + SPACING + gridStartX, SPACING, x*gridStep + SPACING + gridStartX, maxY - SPACING, gridPaint);
}
// grid vertical lines
for (int y=0; y<maxY/gridStep; y++) {
canvas.drawLine(SPACING, y*gridStep + SPACING + gridStartY, maxX - SPACING, y*gridStep + SPACING + gridStartY, gridPaint);
}
// load the points
List<Leg> legs = DaoUtil.getCurrProjectLegs(true);
String pointLabel;
while (processedLegs.size() < legs.size()) {
for (Leg l : legs) {
if (processedLegs.size() == legs.size()) {
break;
}
if (!processedLegs.contains(l.getId())) {
// first leg ever
Point2D first;
if (processedLegs.size() == 0) {
if (horizontalPlan) {
first = new Point2D(Float.valueOf(centerX), Float.valueOf(centerY), l.getLeft(), l.getRight(), MapUtilities.getAzimuthInDegrees(l.getAzimuth(), azimuthUnits));
} else {
first = new Point2D(Float.valueOf(centerX), Float.valueOf(centerY), l.getTop(), l.getDown(), MapUtilities.getSlopeInDegrees(l.getSlope(), slopeUnits));
}
} else {
// update previously created point with the correct values for left/right/up/down
first = mapPoints.get(l.getFromPoint().getId());
if (horizontalPlan) {
first.setLeft(l.getLeft());
first.setRight(l.getRight());
if (l.getAzimuth() != null) {
first.setAngle(MapUtilities.getAzimuthInDegrees(l.getAzimuth(), azimuthUnits));
} else {
first.setAngle(null);
}
} else {
first.setLeft(l.getTop());
first.setRight(l.getDown());
if (l.getSlope() != null) {
first.setAngle(MapUtilities.getSlopeInDegrees(l.getSlope(), slopeUnits));
} else {
first.setAngle(null);
}
}
}
if (mapPoints.get(l.getFromPoint().getId()) == null) {
if (!l.isMiddle()) {
mapPoints.put(l.getFromPoint().getId(), first);
}
// draw first point
if (!l.isMiddle()) {
//color
if (galleryColors.get(l.getGalleryId(), Constants.NOT_FOUND) == Constants.NOT_FOUND) {
galleryColors.put(l.getGalleryId(), MapUtilities.getNextGalleryColor(galleryColors.size()));
Gallery gallery = DaoUtil.getGallery(l.getGalleryId());
galleryNames.put(l.getGalleryId(), gallery.getName());
}
polygonPaint.setColor(galleryColors.get(l.getGalleryId()));
polygonWidthPaint.setColor(galleryColors.get(l.getGalleryId()));
vectorsPaint.setColor(galleryColors.get(l.getGalleryId()));
vectorPointPaint.setColor(galleryColors.get(l.getGalleryId()));
DaoUtil.refreshPoint(l.getFromPoint());
pointLabel = galleryNames.get(l.getGalleryId()) + l.getFromPoint().getName();
if (scale >= 3) {
canvas.drawText(pointLabel, mapCenterMoveX + first.getX() + LABEL_DEVIATION_X, mapCenterMoveY + first.getY() + LABEL_DEVIATION_Y, polygonPaint);
}
canvas.drawCircle(mapCenterMoveX + first.getX(), mapCenterMoveY + first.getY(), POINT_RADIUS, polygonPaint);
}
}
float deltaX;
float deltaY;
if (horizontalPlan) {
if (l.getDistance() == null || l.getAzimuth() == null) {
deltaX = 0;
deltaY = 0;
} else {
float legDistance;
if (l.isMiddle()) {
legDistance = MapUtilities.applySlopeToDistance(l.getMiddlePointDistance(), MapUtilities.getSlopeInDegrees(l.getSlope(), slopeUnits));
} else {
legDistance = MapUtilities.applySlopeToDistance(l.getDistance(), MapUtilities.getSlopeInDegrees(l.getSlope(), slopeUnits));
}
deltaY = -(float) (legDistance * Math.cos(Math.toRadians(MapUtilities.getAzimuthInDegrees(l.getAzimuth(), azimuthUnits)))) * scale;
deltaX = (float) (legDistance * Math.sin(Math.toRadians(MapUtilities.getAzimuthInDegrees(l.getAzimuth(), azimuthUnits)))) * scale;
}
} else {
if (l.getDistance() == null || l.getDistance() == 0) {
deltaX = 0;
deltaY = 0;
} else {
float legDistance;
if (l.isMiddle()) {
legDistance = l.getMiddlePointDistance();
} else {
legDistance = l.getDistance();
}
deltaY = (float) (legDistance * Math.cos(Math.toRadians(MapUtilities.add90Degrees(MapUtilities.getSlopeInDegrees(l.getSlope() == null ? 0 : l.getSlope(), slopeUnits))))) * scale;
deltaX = (float) (legDistance * Math.sin(Math.toRadians(MapUtilities.add90Degrees(MapUtilities.getSlopeInDegrees(l.getSlope() == null ? 0 : l.getSlope(), slopeUnits))))) * scale;
}
}
Point2D second = new Point2D(first.getX() + deltaX, first.getY() + deltaY);
if (mapPoints.get(l.getToPoint().getId()) == null || l.isMiddle()) {
if (!l.isMiddle()) {
mapPoints.put(l.getToPoint().getId(), second);
}
// color
if (galleryColors.get(l.getGalleryId(), Constants.NOT_FOUND) == Constants.NOT_FOUND) {
galleryColors.put(l.getGalleryId(), MapUtilities.getNextGalleryColor(galleryColors.size()));
Gallery gallery = DaoUtil.getGallery(l.getGalleryId());
galleryNames.put(l.getGalleryId(), gallery.getName());
}
polygonPaint.setColor(galleryColors.get(l.getGalleryId()));
polygonWidthPaint.setColor(galleryColors.get(l.getGalleryId()));
vectorsPaint.setColor(galleryColors.get(l.getGalleryId()));
vectorPointPaint.setColor(galleryColors.get(l.getGalleryId()));
// Log.i(Constants.LOG_TAG_UI, "Drawing leg " + l.getFromPoint().getName() + ":" + l.getToPoint().getName() + "-" + l.getGalleryId());
if (Workspace.getCurrentInstance().getActiveLegId().equals(l.getId())) {
// you are here
if (l.isMiddle()) {
canvas.drawCircle(mapCenterMoveX + second.getX(), mapCenterMoveY + second.getY(), CURR_POINT_RADIUS, youAreHerePaint);
} else {
canvas.drawCircle(mapCenterMoveX + first.getX(), mapCenterMoveY + first.getY(), CURR_POINT_RADIUS, youAreHerePaint);
}
}
DaoUtil.refreshPoint(l.getToPoint());
if (l.isMiddle()) {
canvas.drawCircle(mapCenterMoveX + second.getX(), mapCenterMoveY + second.getY(), MIDDLE_POINT_RADIUS, polygonPaint);
} else {
pointLabel = galleryNames.get(l.getGalleryId()) + l.getToPoint().getName();
if (scale >= 3) {
canvas.drawText(pointLabel, mapCenterMoveX + second.getX() + LABEL_DEVIATION_X, mapCenterMoveY + second.getY() + LABEL_DEVIATION_Y, polygonPaint);
}
canvas.drawCircle(mapCenterMoveX + second.getX(), mapCenterMoveY + second.getY(), POINT_RADIUS, polygonPaint);
}
}
// leg
if (!l.isMiddle()) {
canvas.drawLine(mapCenterMoveX + first.getX(), mapCenterMoveY + first.getY(), mapCenterMoveX + second.getX(), mapCenterMoveY + second.getY(), polygonPaint);
}
Leg prevLeg = DaoUtil.getLegByToPointId(l.getFromPoint().getId());
if (scale >= 5) {
if (horizontalPlan) {
// left
calculateAndDrawSide(canvas, l, first, second, prevLeg, first.getLeft(), azimuthUnits, true);
// right
calculateAndDrawSide(canvas, l, first, second, prevLeg, first.getRight(), azimuthUnits, false);
} else {
// top
calculateAndDrawSide(canvas, l, first, second, prevLeg, first.getLeft(), slopeUnits, true);
// down
calculateAndDrawSide(canvas, l, first, second, prevLeg, first.getRight(), slopeUnits, false);
}
}
if (!l.isMiddle() && scale >= 10) {
// vectors
List<Vector> vectors = DaoUtil.getLegVectors(l);
if (vectors != null) {
for (Vector v : vectors) {
if (horizontalPlan) {
float legDistance = MapUtilities.applySlopeToDistance(v.getDistance(), MapUtilities.getSlopeInDegrees(v.getSlope(), slopeUnits));
deltaY = -(float) (legDistance * Math.cos(Math.toRadians(MapUtilities.getAzimuthInDegrees(v.getAzimuth(), azimuthUnits)))) * scale;
deltaX = (float) (legDistance * Math.sin(Math.toRadians(MapUtilities.getAzimuthInDegrees(v.getAzimuth(), azimuthUnits)))) * scale;
} else {
float legDistance = v.getDistance();
deltaY = (float) (legDistance * Math.cos(Math.toRadians(MapUtilities.add90Degrees(
MapUtilities.getSlopeInDegrees(MapUtilities.getSlopeOrHorizontallyIfMissing(v.getSlope()), slopeUnits))))) * scale;
deltaX = (float) (legDistance * Math.sin(Math.toRadians(MapUtilities.add90Degrees(
MapUtilities.getSlopeInDegrees(MapUtilities.getSlopeOrHorizontallyIfMissing(v.getSlope()), slopeUnits))))) * scale;
}
canvas.drawLine(mapCenterMoveX + first.getX(), mapCenterMoveY + first.getY(), mapCenterMoveX + first.getX() + deltaX, mapCenterMoveY + first.getY() + deltaY, vectorsPaint);
canvas.drawCircle(mapCenterMoveX + first.getX() + deltaX, mapCenterMoveY + first.getY() + deltaY, 2, vectorPointPaint);
}
}
}
processedLegs.add(l.getId());
}
}
}
// borders
//top
canvas.drawLine(SPACING, SPACING, maxX - SPACING, SPACING, overlayPaint);
//right
canvas.drawLine(maxX - SPACING, SPACING, maxX - SPACING, maxY - SPACING, overlayPaint);
// bottom
canvas.drawLine(SPACING, maxY - SPACING, maxX - SPACING, maxY - SPACING, overlayPaint);
//left
canvas.drawLine(SPACING, maxY - SPACING, SPACING, SPACING, overlayPaint);
if (horizontalPlan) {
// north arrow
northCenter.set(maxX - 20, 30);
canvas.drawLine(northCenter.x, northCenter.y, northCenter.x + 10, northCenter.y + 10, overlayPaint);
canvas.drawLine(northCenter.x + 10, northCenter.y + 10, northCenter.x, northCenter.y - 20, overlayPaint);
canvas.drawLine(northCenter.x, northCenter.y - 20, northCenter.x - 10, northCenter.y + 10, overlayPaint);
canvas.drawLine(northCenter.x - 10, northCenter.y + 10, northCenter.x, northCenter.y, overlayPaint);
canvas.drawText("N", northCenter.x + 5, northCenter.y - 10, overlayPaint);
} else {
// up wrrow
northCenter.set(maxX - 15, 10);
canvas.drawLine(northCenter.x + 1, northCenter.y, northCenter.x + 6, northCenter.y + 10, overlayPaint);
canvas.drawLine(northCenter.x - 5, northCenter.y + 10, northCenter.x, northCenter.y, overlayPaint);
canvas.drawLine(northCenter.x, northCenter.y -1, northCenter.x, northCenter.y + 20, overlayPaint);
}
// scale
canvas.drawText("x" + scale, 25 + gridStep/2, 45, overlayPaint);
canvas.drawLine(30, 25, 30, 35, overlayPaint);
canvas.drawLine(30, 30, 30 + gridStep, 30, overlayPaint);
canvas.drawLine(30 + gridStep, 25, 30 + gridStep, 35, overlayPaint);
canvas.drawText(GRID_STEPS[gridStepIndex] + "m" , 25 + gridStep/2, 25, overlayPaint);
} catch (Exception e) {
Log.e(Constants.LOG_TAG_UI, "Failed to draw map activity", e);
UIUtilities.showNotification(R.string.error);
}
}
public void zoomOut() {
scale
invalidate();
}
public void zoomIn() {
scale++;
invalidate();
}
public boolean canZoomOut() {
return scale > 1;
}
public boolean canZoomIn() {
return scale < 50;
}
public boolean isHorizontalPlan() {
return horizontalPlan;
}
public void setHorizontalPlan(boolean horizontalPlan) {
this.horizontalPlan = horizontalPlan;
scale = 10;
mapCenterMoveX = 0;
mapCenterMoveY = 0;
invalidate();
}
public void resetMove(float aX, float aY) {
initialMoveX = aX;
initialMoveY = aY;
}
public void move(float x, float y) {
mapCenterMoveX += (x - initialMoveX);
initialMoveX = x;
mapCenterMoveY += (y - initialMoveY);
initialMoveY = y;
invalidate();
}
public byte[] getPngDump() {
// render
Bitmap returnedBitmap = Bitmap.createBitmap(this.getWidth(), this.getHeight(),Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(returnedBitmap);
Drawable bgDrawable = this.getBackground();
if (bgDrawable!=null) {
bgDrawable.draw(canvas);
}
draw(canvas);
// crop borders etc
returnedBitmap = Bitmap.createBitmap(returnedBitmap, 6, 6, this.getWidth() - 50, this.getHeight() - 70);
// return
ByteArrayOutputStream buff = new ByteArrayOutputStream();
returnedBitmap.compress(Bitmap.CompressFormat.PNG, 50, buff);
return buff.toByteArray();
}
private void calculateAndDrawSide(Canvas canvas, Leg l, Point2D first, Point2D second, Leg prevLeg, Float aMeasure, String anUnits, boolean left) {
double galleryWidthAngle;
if (aMeasure != null && aMeasure > 0) {
// first or middle by 90'
if (prevLeg == null || l.isMiddle()) {
float angle = first.getAngle();
if (horizontalPlan) {
if (left) {
angle = MapUtilities.minus90Degrees(angle);
} else {
angle = MapUtilities.add90Degrees(angle);
}
}
galleryWidthAngle = Math.toRadians(angle);
} else {
float angle = first.getAngle();
// each next in the gallery by the bisector
if (l.getGalleryId().equals(prevLeg.getGalleryId())) {
if (horizontalPlan) {
angle = MapUtilities.getMiddleAngle(MapUtilities.getAzimuthInDegrees(prevLeg.getAzimuth(), anUnits), angle);
if (left) {
angle = MapUtilities.minus90Degrees(angle);
} else {
angle = MapUtilities.add90Degrees(angle);
}
} else {
if (left) {
angle = Option.MIN_VALUE_AZIMUTH;
} else {
angle = Option.MAX_VALUE_AZIMUTH_DEGREES / 2;
}
}
} else { // new galleries again by 90'
if (horizontalPlan) {
if (left) {
angle = MapUtilities.minus90Degrees(angle);
} else {
angle = MapUtilities.add90Degrees(angle);
}
} else {
if (left) {
angle = Option.MIN_VALUE_AZIMUTH;
} else {
angle = Option.MAX_VALUE_AZIMUTH_DEGREES / 2;
}
}
}
galleryWidthAngle = Math.toRadians(angle);
}
float deltaY = -(float) (aMeasure * Math.cos(galleryWidthAngle) * scale);
float deltaX = (float) (aMeasure * Math.sin(galleryWidthAngle) * scale);
drawSideMeasurePoint(canvas, l.isMiddle(), first, second, deltaX, deltaY);
}
}
private void drawSideMeasurePoint(Canvas aCanvas, boolean isMiddle, Point2D aFirst, Point2D aSecond, float aDeltaX, float aDeltaY) {
if (isMiddle) {
aCanvas.drawCircle(mapCenterMoveX + aSecond.getX() + aDeltaX, mapCenterMoveY + aSecond.getY() + aDeltaY, MEASURE_POINT_RADIUS, polygonWidthPaint);
} else {
aCanvas.drawCircle(mapCenterMoveX + aFirst.getX() + aDeltaX, mapCenterMoveY + aFirst.getY() + aDeltaY, MEASURE_POINT_RADIUS, polygonWidthPaint);
}
}
} |
// Narya library - tools for developing networked games
// This library is free software; you can redistribute it and/or modify it
// (at your option) any later version.
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.threerings.presents.client;
import java.io.EOFException;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.nio.ByteBuffer;
import java.nio.channels.SocketChannel;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import com.samskivert.util.LoopingThread;
import com.samskivert.util.Queue;
import com.samskivert.util.RuntimeAdjust;
import com.samskivert.util.StringUtil;
import com.threerings.io.FramedInputStream;
import com.threerings.io.FramingOutputStream;
import com.threerings.io.ObjectInputStream;
import com.threerings.io.ObjectOutputStream;
import com.threerings.presents.Log;
import com.threerings.presents.dobj.DObjectManager;
import com.threerings.presents.net.AuthRequest;
import com.threerings.presents.net.AuthResponse;
import com.threerings.presents.net.AuthResponseData;
import com.threerings.presents.net.DownstreamMessage;
import com.threerings.presents.net.LogoffRequest;
import com.threerings.presents.net.UpstreamMessage;
/**
* The client performs all network I/O on separate threads (one for
* reading and one for writing). The communicator class encapsulates that
* functionality.
*
* <pre>
* Logon synopsis:
*
* Client.logon():
* - Calls Communicator.start()
* Communicator.start():
* - spawn Reader thread
* Reader.run():
* { - connect
* - authenticate
* } if either fail, notify observers of failed logon
* - start writer thread
* - notify observers that we're logged on
* - read loop
* Writer.run():
* - write loop
* </pre>
*/
public class Communicator
{
/**
* Creates a new communicator instance which is associated with the
* supplied client.
*/
public Communicator (Client client)
{
_client = client;
}
/**
* Logs on to the server and initiates our full-duplex message
* exchange.
*/
public void logon ()
{
// make sure things are copacetic
if (_reader != null) {
throw new RuntimeException("Communicator already started.");
}
// start up the reader thread. it will connect to the server and
// start up the writer thread if everything went successfully
_reader = new Reader();
_reader.start();
}
/**
* Delivers a logoff notification to the server and shuts down the
* network connection. Also causes all communication threads to
* terminate.
*/
public synchronized void logoff ()
{
// if our socket is already closed, we've already taken care of
// this business
if (_channel == null) {
return;
}
// post a logoff message
postMessage(new LogoffRequest());
// let our reader and writer know that it's time to go
if (_reader != null) {
// if logoff() is being called by the client as part of a
// normal shutdown, this will cause the reader thread to be
// interrupted and shutdown gracefully. if logoff is being
// called by the reader thread as a result of a failed socket,
// it won't interrupt itself as it is already shutting down
// gracefully. if the JVM is buggy and calling interrupt() on
// a thread that is blocked on a socket doesn't wake it up,
// then when we close() the socket a bit further down, we have
// another chance that the reader thread will wake up; this
// time slightly less gracefully because it will think there's
// a network error when in fact we're just shutting down, but
// at least it will cleanly exit
_reader.shutdown();
}
if (_writer != null) {
// shutting down the writer thread is simpler because we can
// post a termination message on the queue and be sure that it
// will receive it. when the writer thread has delivered our
// logoff request and exited, we will complete the logoff
// process by closing our socket and invoking the
// clientDidLogoff callback
_writer.shutdown();
}
}
/**
* Queues up the specified message for delivery upstream.
*/
public void postMessage (UpstreamMessage msg)
{
// simply append the message to the queue
_msgq.append(msg);
}
/**
* Configures this communicator with a custom class loader to be used
* when reading and writing objects over the network.
*/
public void setClassLoader (ClassLoader loader)
{
_loader = loader;
if (_oin != null) {
_oin.setClassLoader(loader);
}
}
/**
* Callback called by the reader when the authentication process
* completes successfully. Here we extract the bootstrap information
* for the client and start up the writer thread to manage the other
* half of our bi-directional message stream.
*/
protected synchronized void logonSucceeded (AuthResponseData data)
{
Log.debug("Logon succeeded: " + data);
// create our distributed object manager
_omgr = new ClientDObjectMgr(this, _client);
// create a new writer thread and start it up
if (_writer != null) {
throw new RuntimeException("Writer already started!?");
}
_writer = new Writer();
_writer.start();
// fill the auth data into the client's local field so that it can
// be requested by external entities
_client._authData = data;
// wait for the bootstrap notification before we claim that we're
// actually logged on
}
/**
* Callback called by the reader or writer thread when something goes
* awry with our socket connection to the server.
*/
protected synchronized void connectionFailed (IOException ioe)
{
// make sure the socket isn't already closed down (meaning we've
// already dealt with the failed connection)
if (_channel == null) {
return;
}
Log.info("Connection failed: " + ioe);
Log.logStackTrace(ioe);
// let the client know that things went south
_client.notifyObservers(Client.CLIENT_CONNECTION_FAILED, ioe);
// and request that we go through the motions of logging off
logoff();
}
/**
* Callback called by the reader if the server closes the other end of
* the connection.
*/
protected synchronized void connectionClosed ()
{
// make sure the socket isn't already closed down (meaning we've
// already dealt with the closed connection)
if (_channel == null) {
return;
}
Log.debug("Connection closed.");
// now do the whole logoff thing
logoff();
}
/**
* Callback called by the reader thread when it goes away.
*/
protected synchronized void readerDidExit ()
{
// clear out our reader reference
_reader = null;
if (_writer == null) {
// there's no writer during authentication, so we may be
// responsible for closing the socket channel
closeChannel();
// let the client know when we finally go away
_client.cleanup(_logonError);
}
Log.debug("Reader thread exited.");
}
/**
* Callback called by the writer thread when it goes away.
*/
protected synchronized void writerDidExit ()
{
// clear out our writer reference
_writer = null;
Log.debug("Writer thread exited.");
// let the client observers know that we're logged off
_client.notifyObservers(Client.CLIENT_DID_LOGOFF, null);
// now that the writer thread has gone away, we can safely close
// our socket and let the client know that the logoff process has
// completed
closeChannel();
// let the client know when we finally go away
if (_reader == null) {
_client.cleanup(_logonError);
}
}
/**
* Closes the socket channel that we have open to the server. Called
* by either {@link #readerDidExit} or {@link #writerDidExit}
* whichever is called last.
*/
protected void closeChannel ()
{
if (_channel != null) {
Log.debug("Closing socket channel.");
try {
_channel.close();
} catch (IOException ioe) {
Log.warning("Error closing failed socket: " + ioe);
}
_channel = null;
// clear these out because they are probably large and in charge
_oin = null;
_oout = null;
}
}
/**
* Writes the supplied message to the socket.
*/
protected void sendMessage (UpstreamMessage msg)
throws IOException
{
if (_logMessages.getValue()) {
Log.info("SEND " + msg);
}
// first we write the message so that we can measure it's length
_oout.writeObject(msg);
_oout.flush();
// then write the framed message to actual output stream
try {
ByteBuffer buffer = _fout.frameAndReturnBuffer();
if (buffer.limit() > 4096) {
String txt = StringUtil.truncate(
String.valueOf(msg), 80, "...");
Log.info("Whoa, writin' a big one [msg=" + txt +
", size=" + buffer.limit() + "].");
}
int wrote = _channel.write(buffer);
if (wrote != buffer.limit()) {
Log.warning("Aiya! Couldn't write entire message [msg=" + msg +
", size=" + buffer.limit() +
", wrote=" + wrote + "].");
// } else {
// Log.info("Wrote " + wrote + " bytes.");
}
} finally {
_fout.resetFrame();
}
// make a note of our most recent write time
updateWriteStamp();
}
/**
* Returns the time at which we last sent a packet to the server.
*/
protected synchronized long getLastWrite ()
{
return _lastWrite;
}
/**
* Makes a note of the time at which we last communicated with the
* server.
*/
protected synchronized void updateWriteStamp ()
{
_lastWrite = System.currentTimeMillis();
}
/**
* Reads a new message from the socket (blocking until a message has
* arrived).
*/
protected DownstreamMessage receiveMessage ()
throws IOException
{
// read in the next message frame (readFrame() can return false
// meaning it only read part of the frame from the network, in
// which case we simply call it again because we can't do anything
// until it has a whole frame; it will throw an exception if it
// hits EOF or if something goes awry)
while (!_fin.readFrame(_channel));
try {
DownstreamMessage msg = (DownstreamMessage)_oin.readObject();
if (_logMessages.getValue()) {
Log.info("RECEIVE " + msg);
}
return msg;
} catch (ClassNotFoundException cnfe) {
throw (IOException) new IOException(
"Unable to decode incoming message.").initCause(cnfe);
}
}
/**
* Callback called by the reader thread when it has parsed a new
* message from the socket and wishes to have it processed.
*/
protected void processMessage (DownstreamMessage msg)
{
// post this message to the dobjmgr queue
_omgr.processMessage(msg);
}
/**
* The reader encapsulates the authentication and message reading
* process. It calls back to the <code>Communicator</code> class to do
* things, but the general flow of the reader thread is encapsulated
* in this class.
*/
protected class Reader extends LoopingThread
{
protected void willStart ()
{
// first we connect and authenticate with the server
try {
// connect to the server
connect();
// then authenticate
logon();
} catch (Exception e) {
Log.debug("Logon failed: " + e);
// Log.logStackTrace(e);
// once we're shutdown we'll report this error
_logonError = e;
// terminate our communicator thread
shutdown();
}
}
protected void connect ()
throws IOException
{
// if we're already connected, we freak out
if (_channel != null) {
throw new IOException("Already connected.");
}
// look up the address of the target server
InetAddress host = InetAddress.getByName(_client.getHostname());
int port = _client.getPort();
// establish a socket connection to said server
Log.debug("Connecting [host=" + host + ", port=" + port + "].");
InetSocketAddress addr = new InetSocketAddress(host, port);
try {
_channel = SocketChannel.open(addr);
} catch (IOException ioe) {
Log.warning("Error opening [addr=" + addr + "].");
throw ioe; // rethrow
}
_channel.configureBlocking(true);
// our messages are framed (preceded by their length), so we
// use these helper streams to manage the framing
_fin = new FramedInputStream();
_fout = new FramingOutputStream();
// create our object input and output streams
_oin = new ObjectInputStream(_fin);
_oin.setClassLoader(_loader);
_oout = new ObjectOutputStream(_fout);
}
protected void logon ()
throws IOException, LogonException
{
// construct an auth request and send it
AuthRequest req = new AuthRequest(_client.getCredentials(),
_client.getVersion());
sendMessage(req);
// now wait for the auth response
Log.debug("Waiting for auth response.");
AuthResponse rsp = (AuthResponse)receiveMessage();
AuthResponseData data = rsp.getData();
Log.debug("Got auth response: " + data);
// if the auth request failed, we want to let the communicator
// know by throwing a logon exception
if (!data.code.equals(AuthResponseData.SUCCESS)) {
throw new LogonException(data.code);
}
// we're all clear. let the communicator know that we're in
logonSucceeded(data);
}
// now that we're authenticated, we manage the reading
// half of things by continuously reading messages from
// the socket and processing them
protected void iterate ()
{
DownstreamMessage msg = null;
try {
// read the next message from the socket
msg = receiveMessage();
// process the message
processMessage(msg);
} catch (InterruptedIOException iioe) {
// somebody set up us the bomb! we've been interrupted
// which means that we're being shut down, so we just
// report it and return from iterate() like a good monkey
Log.debug("Reader thread woken up in time to die.");
} catch (EOFException eofe) {
// let the communicator know that our connection was
// closed
connectionClosed();
// and shut ourselves down
shutdown();
} catch (IOException ioe) {
// let the communicator know that our connection failed
connectionFailed(ioe);
// and shut ourselves down
shutdown();
} catch (Exception e) {
Log.warning("Error processing message [msg=" + msg +
", error=" + e + "].");
}
}
protected void handleIterateFailure (Exception e)
{
Log.warning("Uncaught exception it reader thread.");
Log.logStackTrace(e);
}
protected void didShutdown ()
{
// let the communicator know when we finally go away
readerDidExit();
}
protected void kick ()
{
// we want to interrupt the reader thread as it may be blocked
// listening to the socket; this is only called if the reader
// thread doesn't shut itself down
// interrupt();
}
}
/**
* The writer encapsulates the message writing process. It calls back
* to the <code>Communicator</code> class to do things, but the
* general flow of the writer thread is encapsulated in this class.
*/
protected class Writer extends LoopingThread
{
protected void iterate ()
{
// fetch the next message from the queue
UpstreamMessage msg = (UpstreamMessage)_msgq.get();
// if this is a termination message, we're being
// requested to exit, so we want to bail now rather
// than continuing
if (msg instanceof TerminationMessage) {
return;
}
try {
// write the message out the socket
sendMessage(msg);
} catch (IOException ioe) {
// let the communicator know if we have any
// problems
connectionFailed(ioe);
// and bail
shutdown();
}
}
protected void handleIterateFailure (Exception e)
{
Log.warning("Uncaught exception it writer thread.");
Log.logStackTrace(e);
}
protected void didShutdown ()
{
writerDidExit();
}
protected void kick ()
{
// post a bogus message to the outgoing queue to ensure that
// the writer thread notices that it's time to go
postMessage(new TerminationMessage());
}
}
/** This is used to terminate the writer thread. */
protected static class TerminationMessage extends UpstreamMessage
{
}
protected Client _client;
protected Reader _reader;
protected Writer _writer;
protected SocketChannel _channel;
protected Queue _msgq = new Queue();
protected long _lastWrite;
protected Exception _logonError;
/** We use this to frame our upstream messages. */
protected FramingOutputStream _fout;
protected ObjectOutputStream _oout;
/** We use this to frame our downstream messages. */
protected FramedInputStream _fin;
protected ObjectInputStream _oin;
protected ClientDObjectMgr _omgr;
protected ClassLoader _loader;
/** Used to control low-level message logging. */
protected static RuntimeAdjust.BooleanAdjust _logMessages =
new RuntimeAdjust.BooleanAdjust(
"Toggles whether or not all sent and received low-level " +
"network events are logged.", "narya.presents.log_events",
PresentsPrefs.config, false);
} |
//This library is free software; you can redistribute it and/or
//modify it under the terms of the GNU Lesser General Public
//This library is distributed in the hope that it will be useful,
//MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//You should have received a copy of the GNU Lesser General Public
//Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
package opennlp.tools.namefind;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import opennlp.tools.util.BeamSearchContextGenerator;
import opennlp.tools.util.Cache;
import opennlp.tools.util.Sequence;
public class NameContextGenerator implements BeamSearchContextGenerator {
private Cache contextsCache;
private Object wordsKey;
private int pi = -1;
private List prevStaticFeatures;
private FeatureGenerator mFeatureGenerators[];
/**
* Creates a name context generator.
*/
public NameContextGenerator() {
this(0, null);
}
public NameContextGenerator(int cacheSize) {
this(cacheSize, null);
}
/**
* Creates a name context generator with the specified cach size.
*/
public NameContextGenerator(int cacheSize,
FeatureGenerator featureGenerators[]) {
if (featureGenerators != null) {
mFeatureGenerators = featureGenerators;
}
else {
mFeatureGenerators = new FeatureGenerator[]
{
new WindowFeatureGenerator(new TokenFeatureGenerator(), 2, 2),
new WindowFeatureGenerator(new TokenClassFeatureGenerator(), 2, 2)
};
}
if (cacheSize > 0) {
contextsCache = new Cache(cacheSize);
}
}
void addFeatureGenerator(FeatureGenerator generator) {
FeatureGenerator generators[] = mFeatureGenerators;
mFeatureGenerators = new FeatureGenerator[mFeatureGenerators.length + 1];
System.arraycopy(generators, 0, mFeatureGenerators, 0, generators.length);
mFeatureGenerators[mFeatureGenerators.length - 1] = generator;
}
public String[] getContext(Object o) {
Object[] data = (Object[]) o;
return getContext(((Integer) data[0]).intValue(), (List) data[1], (List) data[2], (Map) data[3]);
}
public String[] getContext(int index, List sequence, Sequence s, Object[] additionalContext) {
return getContext(index,sequence,s.getOutcomes(),(Map) additionalContext[0]);
}
public String[] getContext(int i, List toks, List preds, Map prevTags) {
return getContext(i, toks.toArray(), (String[]) preds.toArray(new String[preds.size()]),prevTags);
}
public String[] getContext(int index, Object[] sequence, String[] priorDecisions, Object[] additionalContext) {
return getContext(index,sequence,priorDecisions,(Map) additionalContext[0]);
}
/**
* Return the context for finding names at the specified index.
* @param i The index of the token in the specified toks array for which the context should be constructed.
* @param toks The tokens of the sentence. The <code>toString</code> methods of these objects should return the token text.
* @param preds The previous decisions made in the taging of this sequence. Only indices less than i will be examined.
* @param prevTags A mapping between tokens and the previous outcome for these tokens.
* @return the context for finding names at the specified index.
*/
public String[] getContext(int i, Object[] toks, String[] preds, Map prevTags) {
String po=NameFinderME.OTHER;
String ppo=NameFinderME.OTHER;
if (i > 1){
ppo = preds[i-2];
}
if (i > 0) {
po = preds[i-1];
}
String cacheKey = i+po+ppo;
if (contextsCache != null) {
if (wordsKey == toks){
String[] cachedContexts = (String[]) contextsCache.get(cacheKey);
if (cachedContexts != null) {
return cachedContexts;
}
}
else {
contextsCache.clear();
wordsKey = toks;
}
}
List features;
if (wordsKey == toks && i == pi) {
features =prevStaticFeatures;
}
else {
features = getStaticFeatures(toks,i);
String pt = (String) prevTags.get(toks[i].toString());
features.add("pd="+pt);
if (i == 0) {
features.add("df=it");
}
pi=i;
prevStaticFeatures=features;
}
int fn = features.size();
String[] contexts = new String[fn+4];
for (int fi=0;fi<fn;fi++) {
contexts[fi]=(String) features.get(fi);
}
contexts[fn]="po="+po;
contexts[fn+1]="pow="+po+toks[i];
contexts[fn+2]="powf="+po+FeatureGeneratorUtil.tokenFeature(toks[i].toString());
contexts[fn+3]="ppo="+ppo;
if (contextsCache != null) {
contextsCache.put(cacheKey,contexts);
}
return contexts;
}
/**
* Returns a list of the features for <code>toks[i]</code> that can
* be safely cached. In other words, return a list of all
* features that do not depend on previous outcome or decision
* features. This method is called by <code>search</code>.
*
* @param toks The list of tokens being processed.
* @param i The index of the token whose features should be returned.
* @return a list of the features for <code>toks[i]</code> that can
* be safely cached.
*/
protected List getStaticFeatures(Object[] toks, int index) {
List feats = new ArrayList();
String tokens[] = new String[toks.length];
for (int i = 0; i < toks.length; i++) {
tokens[i] = toks[i].toString();
}
for (int i = 0; i < mFeatureGenerators.length; i++) {
mFeatureGenerators[i].createFeatures(feats, tokens, index);
}
return feats;
}
} |
/*
* $Id: TestV3Poller.java,v 1.46 2012-03-20 17:15:28 barry409 Exp $
*/
package org.lockss.poller.v3;
import java.io.*;
import java.util.*;
import java.security.*;
import org.lockss.app.*;
import org.lockss.config.ConfigManager;
import org.lockss.daemon.*;
import org.lockss.plugin.*;
import org.lockss.protocol.*;
import org.lockss.protocol.IdentityManager.IdentityAgreement;
import org.lockss.protocol.psm.*;
import org.lockss.util.*;
import org.lockss.poller.*;
import org.lockss.poller.v3.FuncV3Poller.MyV3Poller;
import org.lockss.poller.v3.V3Serializer.*;
import org.lockss.test.*;
import org.lockss.hasher.*;
import org.lockss.repository.LockssRepositoryImpl;
import org.mortbay.util.B64Code;
import static org.lockss.util.Constants.*;
public class TestV3Poller extends LockssTestCase {
private MyIdentityManager idMgr;
private MockLockssDaemon theDaemon;
private PeerIdentity pollerId;
private String tempDirPath;
private ArchivalUnit testau;
private PollManager pollmanager;
private HashService hashService;
private PluginManager pluginMgr;
private PeerIdentity[] voters;
private V3LcapMessage[] pollAcks;
private V3LcapMessage[] nominates;
private V3LcapMessage[] votes;
private V3LcapMessage[] repairs;
private byte[][] pollerNonces;
private byte[][] voterNonces;
private String localPeerKey = "TCP:[127.0.0.1]:9729";
private File tempDir;
private static final String BASE_URL = "http:
private List initialPeers =
ListUtil.list("TCP:[10.1.0.1]:9729", "TCP:[10.1.0.2]:9729",
"TCP:[10.1.0.3]:9729", "TCP:[10.1.0.4]:9729",
"TCP:[10.1.0.5]:9729", "TCP:[10.1.0.6]:9729");
private static String[] urls = {
"lockssau:",
BASE_URL,
BASE_URL + "index.html",
BASE_URL + "file1.html",
BASE_URL + "file2.html",
BASE_URL + "branch1/",
BASE_URL + "branch1/index.html",
BASE_URL + "branch1/file1.html",
BASE_URL + "branch1/file2.html",
BASE_URL + "branch2/",
BASE_URL + "branch2/index.html",
BASE_URL + "branch2/file1.html",
BASE_URL + "branch2/file2.html",
};
private static List voteBlocks;
static {
voteBlocks = new ArrayList();
for (int ix = 0; ix < urls.length; ix++) {
VoteBlock vb = V3TestUtils.makeVoteBlock(urls[ix]);
voteBlocks.add(vb);
}
}
public void setUp() throws Exception {
super.setUp();
theDaemon = getMockLockssDaemon();
TimeBase.setSimulated();
this.tempDir = getTempDir();
this.testau = setupAu();
initRequiredServices();
setupRepo(testau);
this.pollerId = findPeerIdentity(localPeerKey);
this.voters = makeVoters(initialPeers);
this.pollerNonces = makeNonces();
this.voterNonces = makeNonces();
this.pollAcks = makePollAckMessages();
this.nominates = makeNominateMessages();
this.votes = makeVoteMessages();
this.repairs = makeRepairMessages();
}
private MockArchivalUnit setupAu() {
MockArchivalUnit mau = new MockArchivalUnit();
mau.setAuId("mock");
MockPlugin plug = new MockPlugin(theDaemon);
mau.setPlugin(plug);
MockCachedUrlSet cus = (MockCachedUrlSet)mau.getAuCachedUrlSet();
cus.setEstimatedHashDuration(1000);
List files = new ArrayList();
for (int ix = 0; ix < urls.length; ix++) {
MockCachedUrl cu = (MockCachedUrl)mau.addUrl(urls[ix], false, true);
// Add mock file content.
cu.setContent("This is content for CUS file " + ix);
files.add(cu);
}
cus.setHashItSource(files);
cus.setFlatItSource(files);
return mau;
}
private void setupRepo(ArchivalUnit au) throws Exception {
MockLockssRepository repo = new MockLockssRepository("/foo", au);
for (int ix = 0; ix < urls.length; ix++) {
repo.createNewNode(urls[ix]);
}
((MockLockssDaemon)theDaemon).setLockssRepository(repo, au);
}
PeerIdentity findPeerIdentity(String key) throws Exception {
PeerIdentity pid = idMgr.findPeerIdentity(key);
// hack to ensure it's created
idMgr.findLcapIdentity(pid, pid.getIdString());
return pid;
}
private PeerIdentity[] makeVoters(List keys) throws Exception {
PeerIdentity[] ids = new PeerIdentity[keys.size()];
int idIndex = 0;
for (Iterator it = keys.iterator(); it.hasNext(); ) {
PeerIdentity pid = findPeerIdentity((String)it.next());
PeerIdentityStatus status = idMgr.getPeerIdentityStatus(pid);
ids[idIndex++] = pid;
}
return ids;
}
private byte[][] makeNonces() {
byte[][] nonces = new byte[voters.length][];
for (int ix = 0; ix < voters.length; ix++) {
nonces[ix] = ByteArray.makeRandomBytes(20);
}
return nonces;
}
private V3LcapMessage[] makePollAckMessages() {
V3LcapMessage[] msgs = new V3LcapMessage[voters.length];
for (int i = 0; i < voters.length; i++) {
msgs[i] =
new V3LcapMessage("auid", "key", "1",
ByteArray.makeRandomBytes(20),
ByteArray.makeRandomBytes(20),
V3LcapMessage.MSG_POLL_ACK,
987654321,
voters[i],
tempDir, theDaemon);
}
return msgs;
}
private V3LcapMessage[] makeNominateMessages() {
V3LcapMessage[] msgs = new V3LcapMessage[voters.length];
for (int i = 0; i < voters.length; i++) {
V3LcapMessage msg = new V3LcapMessage("auid", "key", "1",
ByteArray.makeRandomBytes(20),
ByteArray.makeRandomBytes(20),
V3LcapMessage.MSG_NOMINATE,
987654321,
voters[i],
tempDir, theDaemon);
msg.setNominees(ListUtil.list("TCP:[10.0." + i + ".1]:9729",
"TCP:[10.0." + i + ".2]:9729",
"TCP:[10.0." + i + ".3]:9729",
"TCP:[10.0." + i + ".4]:9729"));
msgs[i] = msg;
}
return msgs;
}
private V3LcapMessage[] makeVoteMessages() throws IOException {
V3LcapMessage[] msgs = new V3LcapMessage[voters.length];
for (int i = 0; i < voters.length; i++) {
V3LcapMessage msg = new V3LcapMessage("auid", "key", "1",
ByteArray.makeRandomBytes(20),
ByteArray.makeRandomBytes(20),
V3LcapMessage.MSG_VOTE,
987654321,
voters[i],
tempDir, theDaemon);
for (Iterator it = voteBlocks.iterator(); it.hasNext(); ) {
msg.addVoteBlock((VoteBlock)it.next());
}
msgs[i] = msg;
}
return msgs;
}
private V3LcapMessage[] makeRepairMessages() {
V3LcapMessage[] msgs = new V3LcapMessage[voters.length];
for (int i = 0; i < voters.length; i++) {
V3LcapMessage msg = new V3LcapMessage("auid", "key", "1",
ByteArray.makeRandomBytes(20),
ByteArray.makeRandomBytes(20),
V3LcapMessage.MSG_REPAIR_REP,
987654321,
voters[i],
tempDir, theDaemon);
msgs[i] = msg;
}
return msgs;
}
public void tearDown() throws Exception {
theDaemon.getLockssRepository(testau).stopService();
theDaemon.getHashService().stopService();
theDaemon.getDatagramRouterManager().stopService();
theDaemon.getRouterManager().stopService();
theDaemon.getSystemMetrics().stopService();
theDaemon.getPollManager().stopService();
TimeBase.setReal();
super.tearDown();
}
double invitationWeight(long lastInvite, long lastMsg)
throws Exception {
String id = "tcp:[1.2.3.4]:4321";
V3Poller poller = makeV3Poller("testing poll key");
PeerIdentity pid = findPeerIdentity(id);
idMgr.findLcapIdentity(pid, id);
PeerIdentityStatus status = idMgr.getPeerIdentityStatus(pid);
status.setLastMessageTime(lastMsg);
status.setLastPollInvitationTime(lastInvite);
return poller.weightResponsiveness(status);
}
String w1 = "tcp:[1.2.3.4]:4321";
String w2 = "tcp:[1.2.3.4]:4322";
String atRiskEntry(ArchivalUnit au, String pidkey) throws Exception {
return atRiskEntry(au, findPeerIdentity(pidkey));
}
String atRiskEntry(ArchivalUnit au, PeerIdentity pid) {
return testau.getAuId() + "," + pid.getIdString();
}
private IdentityAgreement getIda(PeerIdentity pid) {
return idMgr.findTestIdentityAgreement(pid, testau);
}
double invitationWeight(String pidkey, long lastInvite, long lastMsg)
throws Exception {
return invitationWeight(findPeerIdentity(pidkey), lastInvite, lastMsg);
}
double invitationWeight(String pidkey, long lastInvite,
long lastMsg, float highestAgreement)
throws Exception {
return invitationWeight(findPeerIdentity(pidkey), lastInvite, lastMsg,
highestAgreement);
}
double invitationWeight(PeerIdentity pid,
long lastInvite, long lastMsg)
throws Exception {
return invitationWeight(pid, lastInvite, lastMsg, 0.1f);
}
double invitationWeight(PeerIdentity pid, long lastInvite,
long lastMsg, float highestAgreement)
throws Exception {
ConfigurationUtil.addFromArgs(V3Poller.PARAM_AT_RISK_AU_INSTANCES,
atRiskEntry(testau, w2),
V3Poller.PARAM_INVITATION_WEIGHT_AT_RISK,
"3.0");
V3Poller poller = makeV3Poller("testing poll key");
PeerIdentityStatus status = idMgr.getPeerIdentityStatus(pid);
status.setLastMessageTime(lastMsg);
status.setLastPollInvitationTime(lastInvite);
if (highestAgreement >= 0) {
IdentityAgreement ida = getIda(pid);
ida.setPercentAgreement(highestAgreement);
}
return poller.invitationWeight(status);
}
public void testInvitationWeight() throws Exception {
// default age curve: [10d,1.0],[30d,0.1],[40d,0.01]
assertEquals(1.0, invitationWeight(w1, -1, -1));
// w2 is listed as having this AU at risk
assertEquals(3.0, invitationWeight(w2, -1, -1));
// With high agreement, invitationWeightAlreadyRepairable kicks in (.5)
assertEquals(0.5, invitationWeight(w1, -1, -1, .9f));
assertEquals(1.5, invitationWeight(w2, -1, -1, .9f));
}
public void testInvitationWeightAgeCurve() throws Exception {
// default is [10d,1.0],[30d,0.1],[40d,0.01]
double r1 = .01*90.0/16.0;
double r2 = .01*9.0/20.0;
assertEquals(1.0, invitationWeight(-1, -1));
assertEquals(1.0, invitationWeight(-1, 0));
assertEquals(1.0, invitationWeight(0, -1));
assertEquals(1.0, invitationWeight(0, 0));
assertEquals(1.0, invitationWeight(1, 1));
assertEquals(1.0, invitationWeight(10, 1));
assertEquals(1.0, invitationWeight(1, 10));
assertEquals(1.0, invitationWeight(1*DAY, 0), .01);
assertEquals(1.0, invitationWeight(4*DAY, 0), .01);
assertEquals(1.0, invitationWeight(44*DAY, 40*DAY), .01);
assertEquals(.94, invitationWeight(5*DAY, 0), .01);
assertEquals(1.0-r1, invitationWeight(5*DAY, 0), .02);
assertEquals(.94, invitationWeight(105*DAY, 100*DAY), .01);
assertEquals(.55, invitationWeight(112*DAY, 100*DAY), .01);
assertEquals(.10, invitationWeight(120*DAY, 100*DAY), .01);
assertEquals(.01, invitationWeight(140*DAY, 100*DAY), .01);
ConfigurationUtil.addFromArgs(V3Poller.PARAM_INVITATION_WEIGHT_AGE_CURVE,
"[1w,1.0],[20w,.1]");
assertEquals(1.0, invitationWeight(1*WEEK, 0), .01);
assertEquals(0.1, invitationWeight(20*WEEK, 0), .01);
}
/* Test for a specific bug fix. */
public void testNullNomineesShouldntThrow() throws Exception {
V3Poller v3Poller = makeInittedV3Poller("foo");
try {
v3Poller.nominatePeers(voters[2], null);
} catch (NullPointerException ex) {
fail("Should not have caused NullPointerException", ex);
}
}
public void testInitHasherByteArrays() throws Exception {
V3Poller v3Poller = makeInittedV3Poller("foo");
Map innerCircle =
(Map)PrivilegedAccessor.getValue(v3Poller, "theParticipants");
assertEquals(innerCircle.size(), voters.length);
byte[][] initBytes =
(byte[][])PrivilegedAccessor.invokeMethod(v3Poller, "initHasherByteArrays");
assertEquals(initBytes.length, innerCircle.size() + 1); // one for plain hash
byte[][] compareBytes = new byte[innerCircle.size() + 1][];
compareBytes[innerCircle.size()] =
new byte[0]; // Plain hash last
int ix = 0;
for (Iterator it = innerCircle.values().iterator(); it.hasNext();) {
ParticipantUserData proxy = (ParticipantUserData)it.next();
compareBytes[ix++] =
ByteArray.concat(proxy.getPollerNonce(), proxy.getVoterNonce());
}
for (int i = 0; i < initBytes.length; i++) {
assertTrue(Arrays.equals(initBytes[i], compareBytes[i]));
}
}
public void testInitHasherDigests() throws Exception {
V3Poller v3Poller = makeInittedV3Poller("foo");
Map innerCircle =
(Map)PrivilegedAccessor.getValue(v3Poller, "theParticipants");
assertEquals(innerCircle.size(), voters.length);
MessageDigest[] digests =
(MessageDigest[])PrivilegedAccessor.invokeMethod(v3Poller, "initHasherDigests");
assertEquals(digests.length, innerCircle.size() + 1); // one for plain hash
for (int i = 0; i < digests.length; i++) {
assertNotNull("Digest " + i + " unexpectedly null.", digests[i]);
assertEquals("SHA-1", digests[i].getAlgorithm());
}
}
private HashBlock makeHashBlock(String url) {
MockCachedUrl cu = new MockCachedUrl(url);
return new HashBlock(cu);
}
private HashBlock makeHashBlock(String url, String content)
throws Exception {
MockCachedUrl cu = new MockCachedUrl(url);
HashBlock hb = new HashBlock(cu);
addVersion(hb, content);
return hb;
}
private static int hbVersionNum = 1;
private void addVersion(HashBlock block, String content) throws Exception {
MessageDigest[] digests = new MessageDigest[5]; // 4 voters plus plain hash
// fake "Nonced Hash" for voter 0
digests[0] = MessageDigest.getInstance("SHA1");
digests[0].update(content.getBytes());
// fake "Nonced Hash" for voter 1
digests[1] = MessageDigest.getInstance("SHA1");
digests[1].update(content.getBytes());
// fake "Nonced Hash" for voter 2
digests[2] = MessageDigest.getInstance("SHA1");
digests[2].update(content.getBytes());
// fake "Nonced Hash" for voter 3
digests[3] = MessageDigest.getInstance("SHA1");
digests[3].update(content.getBytes());
// fake "Plain Hash" last
digests[4] = MessageDigest.getInstance("SHA1");
digests[4].update(content.getBytes());
block.addVersion(0, content.length(),
0, content.length(),
digests, TestV3Poller.hbVersionNum++, null);
}
private VoteBlock makeVoteBlock(String url) {
VoteBlock vb = new VoteBlock(url);
return vb;
}
private VoteBlock makeVoteBlock(String url, String content)
throws Exception {
VoteBlock vb = new VoteBlock(url);
addVersion(vb, content);
return vb;
}
private void addVersion(VoteBlock block, String content) throws Exception {
MessageDigest md = MessageDigest.getInstance("SHA1");
md.update(content.getBytes());
byte[] hash = md.digest();
block.addVersion(0, content.length(),
0, content.length(),
hash, hash, false);
}
private ParticipantUserData makeParticipant(PeerIdentity id,
V3Poller poller,
VoteBlock [] votes)
throws Exception {
byte[] pollerNonce = ByteArray.makeRandomBytes(20);
ParticipantUserData ud = new ParticipantUserData(id, poller, tempDir);
ud.setPollerNonce(pollerNonce);
VoteBlocks vb = new DiskVoteBlocks(tempDir);
for (int i = 0; i < votes.length; i++) {
vb.addVoteBlock(votes[i]);
}
ud.setVoteBlocks(vb);
return ud;
}
public void testIsPeerEligible() throws Exception {
V3Poller v3Poller = makeV3Poller("testing poll key");
assertFalse(v3Poller.isPeerEligible(pollerId));
PeerIdentity p1 = findPeerIdentity("TCP:[127.0.0.1]:5009");
PeerIdentity p2 = findPeerIdentity("TCP:[1.2.3.4]:5009");
PeerIdentity p3 = findPeerIdentity("TCP:[1.2.3.7]:1111");
PeerIdentity p4 = findPeerIdentity("TCP:[1.2.3.8]:1111");
PeerIdentity p5 = findPeerIdentity("TCP:[4.5.6.2]:1111");
assertTrue(v3Poller.isPeerEligible(p1));
assertTrue(v3Poller.isPeerEligible(p2));
assertTrue(v3Poller.isPeerEligible(p3));
assertTrue(v3Poller.isPeerEligible(p4));
assertTrue(v3Poller.isPeerEligible(p5));
ConfigurationUtil.addFromArgs(V3Poller.PARAM_NO_INVITATION_SUBNETS,
"1.2.3.4/30;4.5.6.2");
assertTrue(v3Poller.isPeerEligible(p1));
assertFalse(v3Poller.isPeerEligible(p2));
assertFalse(v3Poller.isPeerEligible(p3));
assertTrue(v3Poller.isPeerEligible(p4));
assertFalse(v3Poller.isPeerEligible(p5));
}
Collection getAvailablePeers(V3Poller v3Poller) {
return v3Poller.getAvailablePeers().keySet();
}
public void testGetAvailablePeers() throws Exception {
PeerIdentity p1 = findPeerIdentity("TCP:[10.1.0.100]:9729");
PeerIdentity p2 = findPeerIdentity("TCP:[10.1.0.101]:9729");
DatedPeerIdSet noAuSet = pollmanager.getNoAuPeerSet(testau);
synchronized (noAuSet) {
noAuSet.add(p2);
}
assertTrue(noAuSet.contains(p2));
V3Poller v3Poller = makeV3Poller("testing poll key");
Collection avail = getAvailablePeers(v3Poller);
log.info("avail: " + avail);
assertTrue(avail.contains(p1));
assertFalse(avail.contains(p2));
Set exp = new HashSet();
exp.add(p1);
for (PeerIdentity pid : voters) {
exp.add(pid);
}
assertEquals(exp, avail);
}
public void testGetAvailablePeersInitialPeersOnly() throws Exception {
ConfigurationUtil.addFromArgs(V3Poller.PARAM_ENABLE_DISCOVERY, "false");
findPeerIdentity("TCP:[10.1.0.100]:9729");
findPeerIdentity("TCP:[10.1.0.101]:9729");
V3Poller v3Poller = makeV3Poller("testing poll key");
assertNotNull(getAvailablePeers(v3Poller));
assertEquals(6, getAvailablePeers(v3Poller).size());
}
public void testGetAvailablePeersDoesNotIncludeLocalIdentity() throws Exception {
ConfigurationUtil.addFromArgs(V3Poller.PARAM_ENABLE_DISCOVERY, "false");
// append our local config to the initial Peer List
List initialPeersCopy = new ArrayList(initialPeers);
initialPeersCopy.add(localPeerKey);
ConfigurationUtil.addFromArgs(IdentityManagerImpl.PARAM_INITIAL_PEERS,
StringUtil.separatedString(initialPeersCopy, ";"));
V3Poller v3Poller = makeV3Poller("testing poll key");
assertNotNull(getAvailablePeers(v3Poller));
// Sanity check
assertTrue(findPeerIdentity(localPeerKey).isLocalIdentity());
// Should NOT be included in reference list
assertEquals(6, getAvailablePeers(v3Poller).size());
assertFalse(getAvailablePeers(v3Poller).contains(findPeerIdentity(localPeerKey)));
}
public List<PeerIdentity> makeAdditionalPeers() throws Exception {
PeerIdentity[] morePeers = {
findPeerIdentity("TCP:[127.0.0.1]:5000"),
findPeerIdentity("TCP:[127.0.0.1]:5001"),
findPeerIdentity("TCP:[127.0.0.1]:5002"),
findPeerIdentity("TCP:[127.0.0.1]:5003"),
findPeerIdentity("TCP:[127.0.0.1]:5004"),
findPeerIdentity("TCP:[127.0.0.1]:5005"),
findPeerIdentity("TCP:[127.0.0.1]:5006"),
findPeerIdentity("TCP:[127.0.0.1]:5007"),
findPeerIdentity("TCP:[127.0.0.1]:5008"),
findPeerIdentity("TCP:[127.0.0.1]:5009"),
};
return ListUtil.fromArray(morePeers);
}
public void testCountParticipatingPeers() throws Exception {
MyV3Poller poller = makeV3Poller("testing poll key");
List<String> somePeers =
ListUtil.list(initialPeers.get(0),
initialPeers.get(1),
initialPeers.get(2));
List<PeerIdentity> participatingPeers = pidsFromPeerNames(somePeers);
for (PeerIdentity pid : participatingPeers) {
ParticipantUserData participant = poller.addInnerCircleVoter(pid);
// make it look like it's participating
participant.setStatus(V3Poller.PEER_STATUS_ACCEPTED_POLL);
}
assertEquals(3, poller.countParticipatingPeers());
}
public Collection findMorePeersToInvite(int quorum,
double invitationMult)
throws Exception {
Properties p = new Properties();
p.setProperty(V3Poller.PARAM_QUORUM, ""+quorum);
p.setProperty(V3Poller.PARAM_INVITATION_SIZE_TARGET_MULTIPLIER,
""+invitationMult);
ConfigurationUtil.addFromProps(p);
MyV3Poller poller = makeV3Poller("testing poll key");
List<String> somePeers =
ListUtil.list(initialPeers.get(0),
initialPeers.get(1),
initialPeers.get(2));
List<PeerIdentity> allPeers = pidsFromPeerNames(initialPeers);
allPeers.addAll(makeAdditionalPeers());
List<PeerIdentity> participatingPeers = pidsFromPeerNames(somePeers);
for (PeerIdentity pid : participatingPeers) {
ParticipantUserData participant = poller.addInnerCircleVoter(pid);
// make it look like it's participating
participant.setStatus(V3Poller.PEER_STATUS_ACCEPTED_POLL);
}
Collection more = poller.findNPeersToInvite(quorum);
assertTrue(more + " isn't disjoint with " + participatingPeers,
CollectionUtil.isDisjoint(more, participatingPeers));
assertTrue(allPeers + " doesn't contain all of " + more,
allPeers.containsAll(more));
return more;
}
public void testFindMore1() throws Exception {
assertEquals(2, findMorePeersToInvite(2, 1).size());
}
public void testFindMore2() throws Exception {
assertEquals(4, findMorePeersToInvite(2, 2).size());
}
public void testFindMore3() throws Exception {
assertEquals(6, findMorePeersToInvite(3, 2).size());
}
public void testFindMore4() throws Exception {
assertEquals(10, findMorePeersToInvite(10, 1).size());
}
public void testFindMore5() throws Exception {
assertEquals(13, findMorePeersToInvite(10, 2).size());
}
List<PeerIdentity> pidsFromPeerNames(Collection<String> names)
throws Exception {
List<PeerIdentity> res = new ArrayList();
for (String name : names) {
res.add(findPeerIdentity(name));
}
return res;
}
public void testTallyBlocksSucceedsOnExtraFileEdgeCase() throws Exception {
V3Poller v3Poller = makeV3Poller("testing poll key");
PeerIdentity id1 = findPeerIdentity("TCP:[127.0.0.1]:8990");
PeerIdentity id2 = findPeerIdentity("TCP:[127.0.0.1]:8991");
PeerIdentity id3 = findPeerIdentity("TCP:[127.0.0.1]:8992");
String [] urls_poller =
{
"http://test.com/foo1",
"http://test.com/foo2",
"http://test.com/foo3"
};
HashBlock [] hashblocks =
{
makeHashBlock("http://test.com/foo1", "content for foo1"),
makeHashBlock("http://test.com/foo2", "content for foo2"),
makeHashBlock("http://test.com/foo3", "content for foo3")
};
VoteBlock [] voter1_voteblocks =
{
makeVoteBlock("http://test.com/foo1", "content for foo1"),
makeVoteBlock("http://test.com/foo2a", "content for foo2a"),
makeVoteBlock("http://test.com/foo3", "content for foo3")
};
VoteBlock [] voter2_voteblocks =
{
makeVoteBlock("http://test.com/foo1", "content for foo1"),
makeVoteBlock("http://test.com/foo2a", "content for foo2a"),
makeVoteBlock("http://test.com/foo3", "content for foo3")
};
VoteBlock [] voter3_voteblocks =
{
makeVoteBlock("http://test.com/foo1", "content for foo1"),
makeVoteBlock("http://test.com/foo3", "content for foo3")
};
v3Poller.theParticipants.put(id1, makeParticipant(id1, v3Poller,
voter1_voteblocks));
v3Poller.theParticipants.put(id2, makeParticipant(id2, v3Poller,
voter2_voteblocks));
v3Poller.theParticipants.put(id3, makeParticipant(id3, v3Poller,
voter3_voteblocks));
v3Poller.lockParticipants();
// Finally, let's test.
BlockTally<ParticipantUserData> tally;
// The results expected are based on a quorum of 3.
assertEquals(3, v3Poller.getQuorum());
assertEquals(75, v3Poller.getVoteMargin());
tally = v3Poller.tallyBlock(hashblocks[0]);
assertEquals(BlockTally.Result.WON, tally.getTallyResult(3, 75));
assertContains(tally.getAgreeVoters(), v3Poller.theParticipants.get(id1));
assertContains(tally.getAgreeVoters(), v3Poller.theParticipants.get(id2));
assertContains(tally.getAgreeVoters(), v3Poller.theParticipants.get(id3));
tally = v3Poller.tallyBlock(hashblocks[1]);
assertEquals(BlockTally.Result.LOST_POLLER_ONLY_BLOCK,
tally.getTallyResult(3, 75));
assertContains(tally.getPollerOnlyBlockVoters(),
v3Poller.theParticipants.get(id1));
assertContains(tally.getPollerOnlyBlockVoters(),
v3Poller.theParticipants.get(id2));
assertContains(tally.getPollerOnlyBlockVoters(),
v3Poller.theParticipants.get(id3));
tally = v3Poller.tallyBlock(hashblocks[2]);
assertEquals(BlockTally.Result.WON, tally.getTallyResult(3, 75));
assertContains(tally.getAgreeVoters(), v3Poller.theParticipants.get(id1));
assertContains(tally.getAgreeVoters(), v3Poller.theParticipants.get(id2));
assertContains(tally.getAgreeVoters(), v3Poller.theParticipants.get(id3));
assertEquals("2/0/1/1/0/0",
v3Poller.theParticipants.get(id1).getVoteCounts().votes());
assertEquals("2/0/1/1/0/0",
v3Poller.theParticipants.get(id2).getVoteCounts().votes());
// This voter sees a "neither" URL, since neither it nor the
// poller has foo2a.
assertEquals("2/0/1/0/1/0",
v3Poller.theParticipants.get(id3).getVoteCounts().votes());
}
public void testBlockCompare() throws Exception {
// V3Poller v3Poller = makeV3Poller("testing poll key");
// PeerIdentity id1 = findPeerIdentity("TCP:[127.0.0.1]:8990");
// PeerIdentity id2 = findPeerIdentity("TCP:[127.0.0.1]:8991");
// PeerIdentity id3 = findPeerIdentity("TCP:[127.0.0.1]:8992");
// PeerIdentity id4 = findPeerIdentity("TCP:[127.0.0.1]:8993");
// String n1v1 = "This is node 1, version 1. It's the oldest.";
// String n1v2 = "This is node 1, version 2. It's slightly older.";
// String n1v3 = "This is node 1, version 3. This is the current version!";
// // Our hash block only has v1 and v3, not v2
// HashBlock hb1 = makeHashBlock(url);
// addVersion(hb1, n1v1);
// addVersion(hb1, n1v3);
// UrlTallier.HashBlockComparer comparer =
// new UrlTallier.HashBlockComparer(hb1);
// // Should agree on n1v1.
// VoteBlock vb1 = makeVoteBlock(url);
// addVersion(vb1, n1v1);
// // NOTE: The participantIndex passed to compare is not relevent:
// // All the nonces in the HashBlock are the same, so the expected
// // hashes are the same for each participant.
// assertTrue(comparer.compare(vb1, 0));
// // Should agree on n1v1 and n1v3.
// VoteBlock vb2 = makeVoteBlock(url);
// addVersion(vb2, n1v1);
// addVersion(vb2, n1v3);
// assertTrue(comparer.compare(vb2, 1));
// // Should agree on n1v3.
// VoteBlock vb3 = makeVoteBlock(url);
// addVersion(vb3, n1v2);
// addVersion(vb3, n1v3);
// assertTrue(comparer.compare(vb3, 2));
// // Should not agree on any version, since the HashBlock doesn't
// // have n1v2.
// VoteBlock vb4 = makeVoteBlock(url);
// addVersion(vb4, n1v2);
// assertFalse(comparer.compare(vb4, 3));
}
public void testSignalAuEvent() throws Exception {
MyV3Poller poller = makeV3Poller("testing poll key");
pluginMgr.registerAuEventHandler(new MyAuEventHandler());
List<String> urls = ListUtil.list("url1", "foo2");
List<PollerStateBean.Repair> rep = new ArrayList<PollerStateBean.Repair>();
for (String u : urls) {
rep.add(new PollerStateBean.Repair(u));
}
poller.setCompletedRepairs(rep);
assertEquals(0, changeEvents.size());
poller.signalAuEvent();
assertEquals(1, changeEvents.size());
AuEventHandler.ChangeInfo ci = changeEvents.get(0);
assertEquals(AuEventHandler.ChangeInfo.Type.Repair, ci.getType());
assertTrue(ci.isComplete());
assertEquals(2, ci.getNumUrls());
assertNull(ci.getMimeCounts());
assertEquals(urls, ci.getUrls());
}
List<AuEventHandler.ChangeInfo> changeEvents = new ArrayList();
class MyAuEventHandler extends AuEventHandler.Base {
@Override public void auContentChanged(PluginManager.AuEvent event,
ArchivalUnit au,
AuEventHandler.ChangeInfo info) {
changeEvents.add(info);
}
}
private MyV3Poller makeV3Poller(String key) throws Exception {
PollSpec ps = new MockPollSpec(testau.getAuCachedUrlSet(), null, null,
Poll.V3_POLL);
return new MyV3Poller(ps, theDaemon, pollerId, key, 20000, "SHA-1");
}
private MyV3Poller makeInittedV3Poller(String key) throws Exception {
PollSpec ps = new MockPollSpec(testau.getAuCachedUrlSet(), null, null,
Poll.V3_POLL);
MyV3Poller p = new MyV3Poller(ps, theDaemon, pollerId, key, 20000,
"SHA-1");
p.constructInnerCircle(voters.length);
Map innerCircle = (Map)PrivilegedAccessor.getValue(p, "theParticipants");
for (int ix = 0; ix < voters.length; ix++) {
PeerIdentity pid = voters[ix];
ParticipantUserData ud = (ParticipantUserData) innerCircle.get(pid);
if (ud != null) {
ud.setVoterNonce(voterNonces[ix]);
}
}
return p;
}
private class MyV3Poller extends V3Poller {
// For testing: Hashmap of voter IDs to V3LcapMessages.
private Map sentMsgs = Collections.synchronizedMap(new HashMap());
private Map semaphores = new HashMap();
private List<PollerStateBean.Repair> repairs;
MyV3Poller(PollSpec spec, LockssDaemon daemon, PeerIdentity id,
String pollkey, long duration, String hashAlg)
throws PollSerializerException {
super(spec, daemon, id, pollkey, duration, hashAlg);
}
public void sendMessageTo(V3LcapMessage msg, PeerIdentity to) {
sentMsgs.put(to, msg);
SimpleBinarySemaphore sem = (SimpleBinarySemaphore)semaphores.get(to);
if (sem == null) {
sem = new SimpleBinarySemaphore();
semaphores.put(to, sem);
}
sem.give();
}
public V3LcapMessage getSentMessage(PeerIdentity voter) {
SimpleBinarySemaphore sem = (SimpleBinarySemaphore)semaphores.get(voter);
if (sem == null) {
fail ("Message never sent!");
}
sem.take(5000); // Really shouldn't take this long
return (V3LcapMessage)sentMsgs.get(voter);
}
void setCompletedRepairs(List<PollerStateBean.Repair> repairs) {
this.repairs = repairs;
}
@Override
public List getCompletedRepairs() {
if (repairs != null) {
return repairs;
}
return super.getCompletedRepairs();
}
}
private void initRequiredServices() throws Exception {
pollmanager = theDaemon.getPollManager();
hashService = theDaemon.getHashService();
pluginMgr = theDaemon.getPluginManager();
tempDir = getTempDir();
tempDirPath = tempDir.getAbsolutePath();
System.setProperty("java.io.tmpdir", tempDirPath);
Properties p = new Properties();
p.setProperty(IdentityManagerImpl.PARAM_ENABLE_V1, "false");
p.setProperty(LcapDatagramComm.PARAM_ENABLED, "false");
p.setProperty(IdentityManager.PARAM_IDDB_DIR, tempDirPath + "iddb");
p.setProperty(LockssRepositoryImpl.PARAM_CACHE_LOCATION, tempDirPath);
p.setProperty(IdentityManager.PARAM_LOCAL_IP, "127.0.0.1");
p.setProperty(IdentityManager.PARAM_LOCAL_V3_IDENTITY, localPeerKey);
p.setProperty(ConfigManager.PARAM_NEW_SCHEDULER, "true");
p.setProperty(IdentityManagerImpl.PARAM_INITIAL_PEERS,
StringUtil.separatedString(initialPeers, ";"));
p.setProperty(V3Poller.PARAM_QUORUM, "3");
p.setProperty(ConfigManager.PARAM_PLATFORM_DISK_SPACE_LIST, tempDirPath);
p.setProperty(V3Poller.PARAM_STATE_PATH, tempDirPath);
ConfigurationUtil.setCurrentConfigFromProps(p);
idMgr = new MyIdentityManager();
theDaemon.setIdentityManager(idMgr);
idMgr.initService(theDaemon);
idMgr.startService();
theDaemon.getSchedService().startService();
hashService.startService();
theDaemon.getDatagramRouterManager().startService();
theDaemon.getRouterManager().startService();
theDaemon.getSystemMetrics().startService();
theDaemon.getActivityRegulator(testau).startService();
theDaemon.setNodeManager(new MockNodeManager(), testau);
pollmanager.startService();
}
static class MyIdentityManager extends IdentityManagerImpl {
IdentityAgreement findTestIdentityAgreement(PeerIdentity pid,
ArchivalUnit au) {
Map map = findAuAgreeMap(au);
synchronized (map) {
return findPeerIdentityAgreement(map, pid);
}
}
public void storeIdentities() throws ProtocolException {
}
}
} |
package org.concord.otrunk.net;
import java.io.IOException;
public class HTTPRequestException extends IOException {
private static final long serialVersionUID = 1L;
private int responseCode;
public HTTPRequestException(String msg, int responseCode) {
super(msg);
this.responseCode = responseCode;
}
// if we ever move to Java 1.6 we can enable this...
// public HTTPRequestException(String msg, Throwable t, int responseCode) {
// super(msg, t);
// this.responseCode = responseCode;
/**
* @return the responseCode
*/
public int getResponseCode()
{
return responseCode;
}
} |
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketAddress;
import java.io.*;
import java.util.*;
import protocol.Tool.AcceptByteArry;
import protocol.Tool.SendData;
public class socketClient {
private Socket socket;
private SocketAddress address;
SendThread sendTh;
AcceptThread acceptTh;
public class SendThread extends Thread {
private Socket socketObj;
private Vector<SendData> sendeVector = new Vector<>(); // Vector
public SendThread(Socket obj)
{
socketObj = obj;
}
public void AddSendData(SendData obj)
{
sendeVector.add(obj);
}
public void run()
{
while(true)
{
if(sendeVector.isEmpty())
{
continue;
}
try{
int dataIndex = sendeVector.size() - 1;
SendData data = sendeVector.get(dataIndex);
DataOutputStream dos = new DataOutputStream(socketObj.getOutputStream());
dos.write(data.msgPackage);
dos.flush();
sendeVector.remove(dataIndex);
} catch (IOException e) {
e.printStackTrace();
break;
}
}
}
}
public class AcceptThread extends Thread {
private Socket socketObj;
private AcceptByteArry acceptBytesObj;
private byte[] acceptedData;
public AcceptThread(Socket obj)
{
socketObj = obj;
}
public void run()
{
while(true)
{
try {
BufferedInputStream bis = new BufferedInputStream(
socket.getInputStream());
DataInputStream dis = new DataInputStream(bis);
byte[] bytes = new byte[1]; // byte
boolean hasData = false;
while (dis.read(bytes) != -1) {
System.out.println(bytes[0]); // for test
acceptedData = acceptBytesObj.push(bytes[0]);
hasData = true;
}
if (hasData)
{
try
{
sleep(500);
}
catch (InterruptedException e)
{
e.printStackTrace();
}
}
} catch (IOException e) {
e.printStackTrace();
break;
}
}
}
}
public socketClient() {
try {
socket = new Socket();
address = new InetSocketAddress("127.0.0.1", 5020);
socket.connect(address, 1000);
} catch (IOException e) {
e.printStackTrace();
}
}
public void DoingSend()
{
sendTh = new SendThread(socket);
sendTh.start();
}
public void DoingAccept()
{
acceptTh = new AcceptThread(socket);
acceptTh.start();
}
public void Test() {
try {
DataInputStream input = new DataInputStream(socket.getInputStream());
//DataInputStream
InputStream os = new DataInputStream(System.in);
byte[] bytes = {0x03, 0x02, 0x01};
byte [] b = new byte[1];
DataOutputStream dos = new DataOutputStream(socket.getOutputStream());
dos.write(bytes);
while (-1 != os.read(b)) {
dos.write(b);
}
dos.flush();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
socket.close();
} catch (IOException e) {
}
}
}
public static void main(String[] args) {
socketClient client = new socketClient();
client.DoingSend();
client.DoingAccept();
}
} |
package biweekly.util;
import static biweekly.util.TestUtils.date;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import java.util.Arrays;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.TimeZone;
import org.junit.Test;
import biweekly.component.VEvent;
import biweekly.property.ExceptionDates;
import biweekly.property.ExceptionRule;
import biweekly.property.RecurrenceDates;
import biweekly.util.Recurrence.Frequency;
/**
* @author Michael Angstadt
*/
public class Google2445UtilsTest {
@Test
public void getDateIterator_empty() {
VEvent event = new VEvent();
List<Date> expectedList = Arrays.asList();
assertIteratorEquals(expectedList, Google2445Utils.getDateIterator(event, TimeZone.getTimeZone("UTC")));
}
@Test
public void getDateIterator_start_date_only() {
VEvent event = new VEvent();
event.setDateStart(date("2016-03-25 14:00:00"));
//@formatter:off
List<Date> expectedList = Arrays.asList(
date("2016-03-25 14:00:00")
);
//@formatter:on
assertIteratorEquals(expectedList, Google2445Utils.getDateIterator(event, TimeZone.getTimeZone("UTC")));
}
@Test
public void getDateIterator_start_date_has_no_time() {
VEvent event = new VEvent();
event.setDateStart(date("2016-03-25"), false);
event.setRecurrenceRule(new Recurrence.Builder(Frequency.DAILY).count(3).build());
//@formatter:off
List<Date> expectedList = Arrays.asList(
date("2016-03-25 00:00:00"),
date("2016-03-26 00:00:00"),
date("2016-03-27 00:00:00")
);
//@formatter:on
assertIteratorEquals(expectedList, Google2445Utils.getDateIterator(event, null));
}
@Test
public void getDateIterator() {
TimeZone tz = TimeZone.getTimeZone("America/Los_Angeles");
VEvent event = new VEvent();
event.setDateStart(date("2016-03-25 14:00:00", tz));
event.setRecurrenceRule(new Recurrence.Builder(Frequency.DAILY).count(10).build());
RecurrenceDates rdate = new RecurrenceDates();
rdate.getDates().add(new ICalDate(date("2016-03-26 20:00:00", tz)));
rdate.getDates().add(new ICalDate(date("2016-03-27 20:00:00", tz)));
event.addRecurrenceDates(rdate);
ExceptionDates exdate = new ExceptionDates();
exdate.getValues().add(new ICalDate(date("2016-03-27 14:00:00", tz)));
event.addExceptionDates(exdate);
ExceptionRule exrule = new ExceptionRule(new Recurrence.Builder(Frequency.WEEKLY).count(2).build());
event.addProperty(exrule);
//@formatter:off
List<Date> expectedList = Arrays.asList(
date("2016-03-26 14:00:00", tz),
date("2016-03-26 20:00:00", tz),
date("2016-03-27 20:00:00", tz),
date("2016-03-28 14:00:00", tz),
date("2016-03-29 14:00:00", tz),
date("2016-03-30 14:00:00", tz),
date("2016-03-31 14:00:00", tz),
date("2016-04-02 14:00:00", tz),
date("2016-04-03 14:00:00", tz)
);
//@formatter:on
assertIteratorEquals(expectedList, Google2445Utils.getDateIterator(event, TimeZone.getTimeZone("UTC")));
}
private static <T> void assertIteratorEquals(List<T> expectedList, Iterator<T> actualIt) {
Iterator<T> expectedIt = expectedList.iterator();
while (expectedIt.hasNext()) {
T actual = actualIt.next();
T expected = expectedIt.next();
assertEquals(expected, actual);
}
assertFalse(actualIt.hasNext());
}
} |
package com.dubravsky.arcache;
import org.junit.Before;
import org.junit.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
public class ArcacheTest {
private static final String ANY_STRING = "LoremIpsum";
private static final byte[] NULL_BYTE_ARRAY = null;
private Arcache defaultArcache;
private static String fillStringWith(int size, int value) {
StringBuilder result = new StringBuilder();
for (int i = 0; i < size; i++) {
result.append(String.valueOf(value));
}
return result.toString();
}
@Before
public void init(){
defaultArcache = Arcache.createDefault();
}
@Test
public void shouldPutAndGetString() {
String key = "key_01";
defaultArcache.put(key, ANY_STRING);
assertThat(defaultArcache.get(key), is(ANY_STRING));
}
@Test(expected = NullPointerException.class)
public void shouldThrowNpeIfNullKeyIsProvided() {
defaultArcache.put(null, ANY_STRING);
}
@Test
public void shouldRemoveOldestItemIfLimitIsExceeded() {
String stringFilledWithZeros = fillStringWith(50, 0);
String stringFilledWithOnes = fillStringWith(50, 1);
String stringFilledWithTwos = fillStringWith(50, 2);
Arcache arcache = Arcache.builder()
.limitSize(100)
.build();
arcache.put("key_01", stringFilledWithZeros);
arcache.put("key_02", stringFilledWithOnes);
arcache.put("key_03", stringFilledWithTwos); // it tries to insert element with size 50 but
// the cache contains two elements with total size of 100. hence, the oldes element (key_01) should be removed
assertThat(arcache.get("key_01"), is(NULL_BYTE_ARRAY));
assertThat(arcache.get("key_02"), is(stringFilledWithOnes));
assertThat(arcache.get("key_03"), is(stringFilledWithTwos));
}
} |
package com.github.apsk.hax;
import com.github.apsk.hax.parser.HAXEventReader;
import com.github.apsk.hax.parser.Parser;
import org.junit.Test;
import javax.xml.stream.XMLStreamException;
import java.util.function.Function;
import static com.github.apsk.hax.HAX.*;
import static junit.framework.Assert.assertEquals;
public class OpsParserTest {
public static class Op {
public enum Type { Sum, Sub, Mul, Div }
public final Type type;
public final int lhs, rhs;
public Op(Type type, int lhs, int rhs) {
this.type = type;
this.lhs = lhs;
this.rhs = rhs;
}
}
@Test
public void checkOps() throws XMLStreamException {
Function<String,String> capitalize = s ->
Character.toUpperCase(s.charAt(0)) + s.substring(1);
Parser<Op> op =
within("op", attr("name"),
elemText("lhs").and(elemText("rhs")))
.map(r -> new Op(
Op.Type.valueOf(capitalize.apply(r._1)),
Integer.parseInt(r._2._1),
Integer.parseInt(r._2._2)
));
HAXEventReader reader = new HAXEventReader(
getClass().getClassLoader().getResourceAsStream("ops.xml")
);
find("ops").run(reader);
manyWithin("ops", attr("class"), op)
.run(reader)
.unpack((cls, ops) -> {
assertEquals(cls, "arith");
Op opA = ops.get(0);
assertEquals(opA.type, Op.Type.Sum);
assertEquals(opA.lhs, 3);
assertEquals(opA.rhs, 6);
Op opB = ops.get(1);
assertEquals(opB.type, Op.Type.Mul);
assertEquals(opB.lhs, 3);
assertEquals(opB.rhs, 3);
Op opC = ops.get(2);
assertEquals(opC.type, Op.Type.Sub);
assertEquals(opC.lhs, 10);
assertEquals(opC.rhs, 1);
Op opD = ops.get(3);
assertEquals(opD.type, Op.Type.Div);
assertEquals(opD.lhs, 18);
assertEquals(opD.rhs, 2);
return null;
});
}
} |
package com.stripe.functional;
import com.stripe.BaseStripeTest;
import com.stripe.Stripe;
import com.stripe.exception.APIConnectionException;
import com.stripe.exception.StripeException;
import com.stripe.model.Balance;
import com.stripe.net.RequestOptions;
import java.io.IOException;
import java.net.Inet4Address;
import java.net.ServerSocket;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
public class TimeoutTest extends BaseStripeTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
@Test
public void testReadTimeout() throws IOException, StripeException {
// Create a local server that does nothing to trigger a read timeout
try (final ServerSocket serverSocket =
new ServerSocket(0, 1, Inet4Address.getByName("localhost"))) {
Stripe.overrideApiBase(String.format("http://localhost:%d", serverSocket.getLocalPort()));
thrown.expect(APIConnectionException.class);
thrown.expectMessage("Read timed out");
final RequestOptions options = RequestOptions.builder().setReadTimeout(1).build();
Balance.retrieve(options);
}
}
} |
package net.alloyggp.tournament;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import net.alloyggp.tournament.api.Game;
import net.alloyggp.tournament.api.MatchResult;
import net.alloyggp.tournament.api.MatchSetup;
import net.alloyggp.tournament.api.Player;
import net.alloyggp.tournament.api.Seeding;
/**
* Contains utilities for writing fuzz tests that use a single random seed for all
* divergence in their behavior. To keep test failures reproducible, these tests
* should have deterministic behavior if their seed is kept constant.
*/
public class FuzzTests {
private FuzzTests() {
//Not instantiable
}
/**
* Returns the combinations of player counts and tournament specification files that
* are used for parameterized fuzz testing across different tournament types.
*/
public static Iterable<Object[]> getParameters() {
return ImmutableList.of(
new Object[] {6, "singleElim"},
new Object[] {8, "singleElim"},
new Object[] {7, "singleElimTwoStages"},
new Object[] {8, "singleElimTwoStages"},
new Object[] {10, "swiss1test1"},
new Object[] {9, "swiss1test1"},
new Object[] {10, "swiss1test2"},
new Object[] {9, "swiss1test2"},
new Object[] {10, "swiss1test3"},
new Object[] {9, "swiss1test3"},
new Object[] {10, "swiss1test4"},
new Object[] {9, "swiss1test4"},
new Object[] {3, "swiss1test4"},
new Object[] {10, "swiss1test5"},
new Object[] {9, "swiss1test5"},
new Object[] {3, "swiss1test5"},
new Object[] {6, "swiss1test6"},
new Object[] {7, "swiss1test6"},
new Object[] {7, "swiss1test7"},
new Object[] {8, "swiss1test7"}
);
}
/**
* Returns a randomized result for the given match, using randomness from the
* supplied {@link Random} object.
*
* <p>The match result may be either aborted or successful. If the game is
* zero-sum, the goal values will reflect that.
*/
public static MatchResult getResult(Random random, MatchSetup match) {
if (random.nextDouble() > 0.7) {
return MatchResult.getAbortedMatchResult(match);
}
List<Integer> goals = getGoals(random, match.getGame());
return MatchResult.getSuccessfulMatchResult(match, goals);
}
private static List<Integer> getGoals(Random random, Game game) {
if (game.getNumRoles() == 1) {
return ImmutableList.of(getOneGoalValue(random));
} else if (game.getNumRoles() == 2) {
if (game.isFixedSum()) {
int goal = getOneGoalValue(random);
return ImmutableList.of(goal, 100 - goal);
} else {
return ImmutableList.of(getOneGoalValue(random), getOneGoalValue(random));
}
} else {
int numRoles = game.getNumRoles();
if (game.isFixedSum()) {
int winner = random.nextInt(numRoles);
List<Integer> goals = Lists.newArrayList(Collections.nCopies(numRoles, 0));
goals.set(winner, 100);
return goals;
} else {
List<Integer> goals = Lists.newArrayList();
for (int i = 0; i < numRoles; i++) {
goals.add(getOneGoalValue(random));
}
return goals;
}
}
}
private static int getOneGoalValue(Random random) {
double roll = random.nextDouble();
if (roll > 0.6) {
return 100;
} else if (roll > 0.2) {
return 0;
} else {
return random.nextInt(101);
}
}
/**
* Selects one match from the given set at random, using the given source of
* randomness.
*
* <p>This is necessary to ensure that fuzz tests work the same way every time
* when their PRNG seed is the same. If the next random value is the same, the
* match chosen will be the same. By contrast, selecting the first match from
* the set iterator may give different results on different runs.
*/
public static MatchSetup pickMatchAtRandom(Random random, Set<MatchSetup> nextMatches) {
List<MatchSetup> sortedMatches = Lists.newArrayList(nextMatches);
sortedMatches.sort(Comparator.comparing(MatchSetup::getMatchId));
int chosenIndex = random.nextInt(sortedMatches.size());
return sortedMatches.get(chosenIndex);
}
/**
* Creates a random seeding with the given number of players using the given
* source of randomness.
*
* <p>Players are named "1", "2", "3", etc.
*/
public static Seeding createRandomSeeding(Random random, int numPlayers) {
List<Player> players = IntStream.range(1, numPlayers + 1)
.mapToObj(Integer::toString)
.map(Player::create)
.collect(Collectors.toList());
return Seeding.createRandomSeeding(random, players);
}
} |
package org.jboss.sasl.test;
import java.io.File;
import java.util.Collections;
import org.jboss.sasl.util.Charsets;
import org.junit.Test;
import javax.security.auth.callback.CallbackHandler;
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslClient;
import javax.security.sasl.SaslException;
import javax.security.sasl.SaslServer;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
/**
* Test for the local user SASL mechanism, this will test both the client and server side.
*
* @author <a href="mailto:darran.lofthouse@jboss.com">Darran Lofthouse</a>
* @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a>
*/
public class LocalUserTest extends BaseTestCase {
private static final String LOCAL_USER = "JBOSS-LOCAL-USER";
/*
* Normal SASL Client/Server interaction.
*/
/**
* Test a successful exchange using the JBOSS-LOCAL-USER mechanism.
*/
@Test
public void testSuccessfulExchange() throws Exception {
CallbackHandler serverCallback = new ServerCallbackHandler("George", (char[]) null);
SaslServer server = Sasl.createSaslServer(LOCAL_USER, "TestProtocol", "TestServer", Collections.<String, Object>emptyMap(), serverCallback);
CallbackHandler clientCallback = new ClientCallbackHandler("George", (char[]) null);
SaslClient client = Sasl.createSaslClient(new String[]{ LOCAL_USER }, "George", "TestProtocol", "TestServer", Collections.<String, Object>emptyMap(), clientCallback);
assertTrue(client.hasInitialResponse());
byte[] response = client.evaluateChallenge(new byte[0]);
byte[] challenge = server.evaluateResponse(response);
response = client.evaluateChallenge(challenge);
challenge = server.evaluateResponse(response);
assertNull(challenge);
assertTrue(server.isComplete());
assertTrue(client.isComplete());
assertEquals("George", server.getAuthorizationID());
}
/**
* Test an exchange where the client sends a bad response is correctly rejected.
*/
@Test
public void testBadExchange() throws Exception {
CallbackHandler serverCallback = new ServerCallbackHandler("George", (char[]) null);
SaslServer server = Sasl.createSaslServer(LOCAL_USER, "TestProtocol", "TestServer", Collections.<String, Object>emptyMap(), serverCallback);
CallbackHandler clientCallback = new ClientCallbackHandler("George", (char[]) null);
SaslClient client = Sasl.createSaslClient(new String[]{ LOCAL_USER }, "George", "TestProtocol", "TestServer", Collections.<String, Object>emptyMap(), clientCallback);
assertTrue(client.hasInitialResponse());
byte[] response = client.evaluateChallenge(new byte[0]);
byte[] challenge = server.evaluateResponse(response);
response = client.evaluateChallenge(challenge);
for (int i = 0; i < 8; i++) {
response[i] = 0x00;
}
try {
challenge = server.evaluateResponse(response);
fail("Expected SaslException not thrown.");
} catch (SaslException expected) {
}
assertFalse(server.isComplete());
try {
server.getAuthorizationID();
fail("Expected IllegalStateException not thrown");
} catch (IllegalStateException expected) {
}
}
/**
* Test an exchange where the client is passed the path to a file that does not exist.
*/
@Test
public void testBadFile() throws Exception {
CallbackHandler serverCallback = new ServerCallbackHandler("George", (char[]) null);
SaslServer server = Sasl.createSaslServer(LOCAL_USER, "TestProtocol", "TestServer",
Collections.<String, Object> emptyMap(), serverCallback);
CallbackHandler clientCallback = new ClientCallbackHandler("George", (char[]) null);
SaslClient client = Sasl.createSaslClient(new String[] { LOCAL_USER }, "George", "TestProtocol", "TestServer",
Collections.<String, Object> emptyMap(), clientCallback);
assertTrue(client.hasInitialResponse());
byte[] response = client.evaluateChallenge(new byte[0]);
byte[] challenge = server.evaluateResponse(response);
File nonExistant = new File("nonExistant.txt");
String path = nonExistant.getAbsolutePath();
challenge = new byte[Charsets.encodedLengthOf(path)];
Charsets.encodeTo(path, challenge, 0);
try {
response = client.evaluateChallenge(challenge);
} catch (SaslException expected) {
}
assertFalse(server.isComplete());
try {
server.getAuthorizationID();
fail("Expected IllegalStateException not thrown");
} catch (IllegalStateException expected) {
}
}
/**
* Test an exchange where there is no authorization ID
*/
@Test
public void testNoAuthorizationId() throws Exception {
CallbackHandler serverCallback = new ServerCallbackHandler("George", (char[]) null);
SaslServer server = Sasl.createSaslServer(LOCAL_USER, "TestProtocol", "TestServer",
Collections.<String, Object> emptyMap(), serverCallback);
CallbackHandler clientCallback = new ClientCallbackHandler("George", (char[]) null);
SaslClient client = Sasl.createSaslClient(new String[] { LOCAL_USER }, null, "TestProtocol", "TestServer",
Collections.<String, Object> emptyMap(), clientCallback);
assertTrue(client.hasInitialResponse());
byte[] response = client.evaluateChallenge(new byte[0]);
byte[] challenge = server.evaluateResponse(response);
response = client.evaluateChallenge(challenge);
challenge = server.evaluateResponse(response);
assertNull(challenge);
assertTrue(server.isComplete());
assertTrue(client.isComplete());
assertEquals("George", server.getAuthorizationID());
}
} |
package org.redmine.ta;
import org.junit.*;
import org.redmine.ta.RedmineManager.INCLUDE;
import org.redmine.ta.beans.*;
import org.redmine.ta.internal.logging.Logger;
import org.redmine.ta.internal.logging.LoggerFactory;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.*;
import static org.junit.Assert.*;
/**
* This class and its dependencies are located in org.redmine.ta.api project.
*/
public class RedmineManagerTest {
// TODO We don't know activities IDs!
private static final Integer ACTIVITY_ID = 8;
private static Logger logger = LoggerFactory.getLogger(RedmineManagerTest.class);
private static RedmineManager mgr;
private static String projectKey;
private static TestConfig testConfig;
@BeforeClass
public static void oneTimeSetUp() {
testConfig = new TestConfig();
logger.info("Running redmine tests using: " + testConfig.getURI());
// mgr = new RedmineManager(TestConfig.getURI(), TestConfig.getApiKey());
mgr = new RedmineManager(testConfig.getURI());
mgr.setLogin(testConfig.getLogin());
mgr.setPassword(testConfig.getPassword());
Project junitTestProject = new Project();
junitTestProject.setName("test project");
junitTestProject.setIdentifier("test"
+ Calendar.getInstance().getTimeInMillis());
try {
Project createdProject = mgr.createProject(junitTestProject);
projectKey = createdProject.getIdentifier();
} catch (Exception e) {
logger.error(e, "Exception while creating test project");
Assert.fail("can't create a test project. " + e.getMessage());
}
}
@AfterClass
public static void oneTimeTearDown() {
try {
if (mgr != null && projectKey != null) {
mgr.deleteProject(projectKey);
}
} catch (Exception e) {
logger.error(e, "Exception while deleting test project");
Assert.fail("can't delete the test project '" + projectKey + ". reason: "
+ e.getMessage());
}
}
@Test
public void testCreateIssue() {
try {
Issue issueToCreate = new Issue();
issueToCreate.setSubject("test zzx");
Calendar startCal = Calendar.getInstance();
// have to clear them because they are ignored by Redmine and prevent from comparison later
startCal.clear(Calendar.HOUR_OF_DAY);
startCal.clear(Calendar.MINUTE);
startCal.clear(Calendar.SECOND);
startCal.clear(Calendar.MILLISECOND);
startCal.add(Calendar.DATE, 5);
issueToCreate.setStartDate(startCal.getTime());
Calendar due = Calendar.getInstance();
due.add(Calendar.MONTH, 1);
issueToCreate.setDueDate(due.getTime());
User assignee = getOurUser();
issueToCreate.setAssignee(assignee);
String description = "This is the description for the new task." +
"\nIt has several lines." +
"\nThis is the last line.";
issueToCreate.setDescription(description);
float estimatedHours = 44;
issueToCreate.setEstimatedHours(estimatedHours);
Issue newIssue = mgr.createIssue(projectKey, issueToCreate);
// System.out.println("created: " + newIssue);
Assert.assertNotNull("Checking returned result", newIssue);
Assert.assertNotNull("New issue must have some ID", newIssue.getId());
// check startDate
Calendar returnedStartCal = Calendar.getInstance();
returnedStartCal.setTime(newIssue.getStartDate());
Assert.assertEquals(startCal.get(Calendar.YEAR), returnedStartCal.get(Calendar.YEAR));
Assert.assertEquals(startCal.get(Calendar.MONTH), returnedStartCal.get(Calendar.MONTH));
Assert.assertEquals(startCal.get(Calendar.DAY_OF_MONTH), returnedStartCal.get(Calendar.DAY_OF_MONTH));
// check dueDate
Calendar returnedDueCal = Calendar.getInstance();
returnedDueCal.setTime(newIssue.getDueDate());
Assert.assertEquals(due.get(Calendar.YEAR), returnedDueCal.get(Calendar.YEAR));
Assert.assertEquals(due.get(Calendar.MONTH), returnedDueCal.get(Calendar.MONTH));
Assert.assertEquals(due.get(Calendar.DAY_OF_MONTH), returnedDueCal.get(Calendar.DAY_OF_MONTH));
// check ASSIGNEE
User actualAssignee = newIssue.getAssignee();
Assert.assertNotNull("Checking assignee not null", actualAssignee);
Assert.assertEquals("Checking assignee id", assignee.getId(),
actualAssignee.getId());
// check AUTHOR
Integer EXPECTED_AUTHOR_ID = getOurUser().getId();
Assert.assertEquals(EXPECTED_AUTHOR_ID, newIssue.getAuthor().getId());
// check ESTIMATED TIME
Assert.assertEquals((Float) estimatedHours, newIssue.getEstimatedHours());
// check multi-line DESCRIPTION
String regexpStripExtra = "\\r|\\n|\\s";
description = description.replaceAll(regexpStripExtra, "");
String actualDescription = newIssue.getDescription();
actualDescription = actualDescription.replaceAll(regexpStripExtra, "");
Assert.assertEquals(description, actualDescription);
// PRIORITY
Assert.assertNotNull(newIssue.getPriorityId());
Assert.assertTrue(newIssue.getPriorityId() > 0);
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testCreateIssueWithParent() {
try {
Issue parentIssue = new Issue();
parentIssue.setSubject("parent 1");
Issue newParentIssue = mgr.createIssue(projectKey, parentIssue);
logger.debug("created parent: " + newParentIssue);
Assert.assertNotNull("Checking parent was created", newParentIssue);
Assert.assertNotNull("Checking ID of parent issue is not null",
newParentIssue.getId());
// Integer parentId = 46;
Integer parentId = newParentIssue.getId();
Issue childIssue = new Issue();
childIssue.setSubject("child 1");
childIssue.setParentId(parentId);
Issue newChildIssue = mgr.createIssue(projectKey, childIssue);
logger.debug("created child: " + newChildIssue);
Assert.assertEquals("Checking parent ID of the child issue", parentId,
newChildIssue.getParentId());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testStartDateNull() {
try {
Issue issue = new Issue();
issue.setSubject("test start date");
issue.setStartDate(null);
Issue newIssue = mgr.createIssue(projectKey, issue);
Issue loadedIssue = mgr.getIssueById(newIssue.getId());
Assert.assertNull(loadedIssue.getStartDate());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testGetIssuesBySummary() {
String summary = "issue with subject ABC";
try {
Issue issue = new Issue();
issue.setSubject(summary);
User assignee = getOurUser();
issue.setAssignee(assignee);
Issue newIssue = mgr.createIssue(projectKey, issue);
logger.debug("created: " + newIssue);
Assert.assertNotNull("Checking returned result", newIssue);
Assert.assertNotNull("New issue must have some ID", newIssue.getId());
// try to find the issue
List<Issue> foundIssues = mgr.getIssuesBySummary(projectKey,
summary);
Assert.assertNotNull("Checking if search results is not NULL", foundIssues);
Assert.assertTrue("Search results must be not empty",
!(foundIssues.isEmpty()));
Issue loadedIssue1 = RedmineTestUtils.findIssueInList(foundIssues, newIssue.getId());
Assert.assertNotNull(loadedIssue1);
Assert.assertEquals(summary, loadedIssue1.getSubject());
// User actualAssignee = newIssue.getAssignee();
// assertNotNull("Checking assignee not null", actualAssignee);
// assertEquals("Checking assignee Name", assignee.getName(),
// actualAssignee.getName());
// assertEquals("Checking assignee Id", assignee.getId(),
// actualAssignee.getId());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testTryFindNonExistingIssue() {
String summary = "some summary here for issue which does not exist";
try {
// try to find the issue
List<Issue> foundIssues = mgr.getIssuesBySummary(projectKey,
summary);
Assert.assertNotNull("Search result must be not null", foundIssues);
Assert.assertTrue("Search result list must be empty",
foundIssues.isEmpty());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
private static User getOurUser() {
Integer userId = Integer
.parseInt(testConfig.getParam("createissue.userid"));
String login = testConfig.getLogin();
String fName = testConfig.getParam("userFName");
String lName = testConfig.getParam("userLName");
User user = new User();
user.setId(userId);
user.setLogin(login);
user.setFirstName(fName);
user.setLastName(lName);
return user;
}
@Test(expected = IllegalArgumentException.class)
public void testNULLHostParameter() {
new RedmineManager(null);
}
@Test(expected = IllegalArgumentException.class)
public void testEmptyHostParameter() throws RuntimeException {
new RedmineManager("");
}
@Test(expected = AuthenticationException.class)
public void noAPIKeyOnCreateIssueThrowsAE() throws Exception {
RedmineManager redmineMgrEmpty = new RedmineManager(testConfig.getURI());
Issue issue = new Issue();
issue.setSubject("test zzx");
redmineMgrEmpty.createIssue(projectKey, issue);
}
@Test(expected = AuthenticationException.class)
public void wrongAPIKeyOnCreateIssueThrowsAE() throws Exception {
RedmineManager redmineMgrInvalidKey = new RedmineManager(testConfig.getURI(), "wrong_key");
Issue issue = new Issue();
issue.setSubject("test zzx");
redmineMgrInvalidKey.createIssue(projectKey, issue);
}
@Test
public void testUpdateIssue() {
try {
Issue issue = new Issue();
String originalSubject = "Issue " + new Date();
issue.setSubject(originalSubject);
Issue newIssue = mgr.createIssue(projectKey, issue);
String changedSubject = "changed subject";
newIssue.setSubject(changedSubject);
mgr.update(newIssue);
Issue reloadedFromRedmineIssue = mgr.getIssueById(newIssue.getId());
Assert.assertEquals(
"Checking if 'update issue' operation changed the 'subject' field",
changedSubject, reloadedFromRedmineIssue.getSubject());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
/**
* Tests the retrieval of an {@link Issue} by its ID.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetIssueById() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Issue issue = new Issue();
String originalSubject = "Issue " + new Date();
issue.setSubject(originalSubject);
Issue newIssue = mgr.createIssue(projectKey, issue);
Issue reloadedFromRedmineIssue = mgr.getIssueById(newIssue.getId());
Assert.assertEquals(
"Checking if 'get issue by ID' operation returned issue with same 'subject' field",
originalSubject, reloadedFromRedmineIssue.getSubject());
Tracker tracker = reloadedFromRedmineIssue.getTracker();
Assert.assertNotNull("Tracker of issue should not be null", tracker);
Assert.assertNotNull("ID of tracker of issue should not be null", tracker.getId());
Assert.assertNotNull("Name of tracker of issue should not be null", tracker.getName());
}
@Test
public void testGetProjects() {
try {
List<Project> projects = mgr.getProjects();
Assert.assertTrue(projects.size() > 0);
boolean found = false;
for (Project project : projects) {
if (project.getIdentifier().equals(projectKey)) {
found = true;
break;
}
}
if (!found) {
Assert.fail("Our project with key '" + projectKey + "' is not found on the server");
}
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testGetIssues() {
try {
// create at least 1 issue
Issue issueToCreate = new Issue();
issueToCreate.setSubject("testGetIssues: " + new Date());
Issue newIssue = mgr.createIssue(projectKey, issueToCreate);
List<Issue> issues = mgr.getIssues(projectKey, null);
logger.debug("getIssues() loaded " + issues.size() + " issues");//using query #" + queryIdIssuesCreatedLast2Days);
Assert.assertTrue(issues.size() > 0);
boolean found = false;
for (Issue issue : issues) {
if (issue.getId().equals(newIssue.getId())) {
found = true;
break;
}
}
if (!found) {
Assert.fail("getIssues() didn't return the issue we just created. The query "
+ " must have returned all issues created during the last 2 days");
}
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test(expected = NotFoundException.class)
public void testGetIssuesInvalidQueryId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
Integer invalidQueryId = 9999999;
mgr.getIssues(projectKey, invalidQueryId);
}
@Test
public void testCreateProject() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Project projectToCreate = generateRandomProject();
String key = null;
try {
Project createdProject = mgr.createProject(projectToCreate);
key = createdProject.getIdentifier();
Assert.assertNotNull("checking that a non-null project is returned", createdProject);
Assert.assertEquals(projectToCreate.getIdentifier(), createdProject.getIdentifier());
Assert.assertEquals(projectToCreate.getName(), createdProject.getName());
Assert.assertEquals(projectToCreate.getDescription(), createdProject.getDescription());
Assert.assertEquals(projectToCreate.getHomepage(), createdProject.getHomepage());
List<Tracker> trackers = createdProject.getTrackers();
Assert.assertNotNull("checking that project has some trackers", trackers);
Assert.assertTrue("checking that project has some trackers", !(trackers.isEmpty()));
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
if (key != null) {
mgr.deleteProject(key);
}
}
}
@Test
public void testCreateGetUpdateDeleteProject() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Project projectToCreate = generateRandomProject();
String key = null;
try {
projectToCreate.setIdentifier("id" + new Date().getTime());
logger.debug("trying to create a project with id " + projectToCreate.getIdentifier());
Project createdProject = mgr.createProject(projectToCreate);
key = createdProject.getIdentifier();
String newDescr = "NEW123";
String newName = "new name here";
createdProject.setName(newName);
createdProject.setDescription(newDescr);
mgr.update(createdProject);
Project updatedProject = mgr.getProjectByKey(key);
Assert.assertNotNull(updatedProject);
Assert.assertEquals(createdProject.getIdentifier(), updatedProject.getIdentifier());
Assert.assertEquals(newName, updatedProject.getName());
Assert.assertEquals(newDescr, updatedProject.getDescription());
List<Tracker> trackers = updatedProject.getTrackers();
Assert.assertNotNull("checking that project has some trackers", trackers);
Assert.assertTrue("checking that project has some trackers", !(trackers.isEmpty()));
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
} finally {
if (key != null) {
mgr.deleteProject(key);
}
}
}
@Test
public void testCreateProjectFailsWithReservedIdentifier() throws Exception {
Project projectToCreate = new Project();
projectToCreate.setName("new");
projectToCreate.setIdentifier("new");
String key = null;
try {
Project createdProject = mgr.createProject(projectToCreate);
// in case if the creation haven't failed (although it should have had!),
// need to cleanup - delete this project
key = createdProject.getIdentifier();
} catch (RedmineException e) {
Assert.assertNotNull(e.getErrors());
Assert.assertEquals(1, e.getErrors().size());
Assert.assertEquals("Identifier is reserved", e.getErrors().get(0));
} finally {
if (key != null) {
mgr.deleteProject(key);
}
}
}
private static Project generateRandomProject() {
Project project = new Project();
Long timeStamp = Calendar.getInstance().getTimeInMillis();
String key = "projkey" + timeStamp;
String name = "project number " + timeStamp;
String description = "some description for the project";
project.setIdentifier(key);
project.setName(name);
project.setDescription(description);
project.setHomepage("www.randompage" + timeStamp + ".com");
return project;
}
@Test
public void testCreateIssueNonUnicodeSymbols() {
try {
String nonLatinSymbols = "Example with accents Ao";
Issue toCreate = new Issue();
toCreate.setSubject(nonLatinSymbols);
Issue created = mgr.createIssue(projectKey, toCreate);
Assert.assertEquals(nonLatinSymbols, created.getSubject());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateIssueSummaryOnly() {
try {
Issue issueToCreate = new Issue();
issueToCreate.setSubject("This is the summary line 123");
Issue newIssue = mgr.createIssue(projectKey, issueToCreate);
Assert.assertNotNull("Checking returned result", newIssue);
Assert.assertNotNull("New issue must have some ID", newIssue.getId());
// check AUTHOR
Integer EXPECTED_AUTHOR_ID = getOurUser().getId();
Assert.assertEquals(EXPECTED_AUTHOR_ID, newIssue.getAuthor().getId());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test(expected = NotFoundException.class)
public void testCreateIssueInvalidProjectKey() throws IOException, AuthenticationException, RedmineException, NotFoundException {
Issue issueToCreate = new Issue();
issueToCreate.setSubject("Summary line 100");
mgr.createIssue("someNotExistingProjectKey", issueToCreate);
}
@Test(expected = NotFoundException.class)
public void testGetProjectNonExistingId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.getProjectByKey("some-non-existing-key");
}
@Test(expected = NotFoundException.class)
public void testDeleteNonExistingProject() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.deleteProject("some-non-existing-key");
}
@Test(expected = NotFoundException.class)
public void testGetIssueNonExistingId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
int someNonExistingID = 999999;
mgr.getIssueById(someNonExistingID);
}
@Test(expected = NotFoundException.class)
public void testUpdateIssueNonExistingId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
int nonExistingId = 999999;
Issue issue = new Issue();
issue.setId(nonExistingId);
mgr.update(issue);
}
@Test
public void testGetUsers() {
try {
List<User> users = mgr.getUsers();
Assert.assertTrue(users.size() > 0);
// boolean found = false;
// for (Project project : projects) {
// if (project.getIdentifier().equals(projectKey)) {
// found = true;
// break;
// if (!found) {
// fail("Our project with key '" + projectKey+"' is not found on the server");
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testGetCurrentUser() throws IOException, AuthenticationException, RedmineException, NotFoundException {
User currentUser = mgr.getCurrentUser();
Assert.assertEquals(getOurUser().getId(), currentUser.getId());
Assert.assertEquals(getOurUser().getLogin(), currentUser.getLogin());
}
@Test
public void testGetUserById() throws IOException, AuthenticationException, NotFoundException, RedmineException {
User loadedUser = mgr.getUserById(getOurUser().getId());
Assert.assertEquals(getOurUser().getId(), loadedUser.getId());
Assert.assertEquals(getOurUser().getLogin(), loadedUser.getLogin());
}
@Test(expected = NotFoundException.class)
public void testGetUserNonExistingId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.getUserById(999999);
}
@Test(expected = NotFoundException.class)
public void testInvalidGetCurrentUser() throws IOException, AuthenticationException, RedmineException, NotFoundException {
RedmineManager invalidManager = new RedmineManager(testConfig.getURI() + "/INVALID");
invalidManager.setLogin("Invalid");
invalidManager.setPassword("Invalid");
invalidManager.getCurrentUser();
}
@Test
public void testCreateUser() throws IOException, AuthenticationException, NotFoundException, RedmineException {
User createdUser = null;
try {
User userToCreate = generateRandomUser();
createdUser = mgr.createUser(userToCreate);
Assert.assertNotNull("checking that a non-null project is returned", createdUser);
Assert.assertEquals(userToCreate.getLogin(), createdUser.getLogin());
Assert.assertEquals(userToCreate.getFirstName(), createdUser.getFirstName());
Assert.assertEquals(userToCreate.getLastName(), createdUser.getLastName());
Integer id = createdUser.getId();
Assert.assertNotNull(id);
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
if (createdUser != null) {
mgr.deleteUser(createdUser.getId());
}
}
}
private static User generateRandomUser() {
User user = new User();
user.setFirstName("fname");
user.setLastName("lname");
long randomNumber = new Date().getTime();
user.setLogin("login" + randomNumber);
user.setMail("somemail" + randomNumber + "@somedomain.com");
user.setPassword("zzzz");
return user;
}
@Test
public void testUpdateUser() throws IOException, AuthenticationException, NotFoundException {
User userToCreate = new User();
userToCreate.setFirstName("fname2");
userToCreate.setLastName("lname2");
long randomNumber = new Date().getTime();
userToCreate.setLogin("login33" + randomNumber);
userToCreate.setMail("email" + randomNumber + "@somedomain.com");
userToCreate.setPassword("1234");
try {
User createdUser = mgr.createUser(userToCreate);
Integer userId = createdUser.getId();
Assert.assertNotNull("checking that a non-null project is returned", createdUser);
String newFirstName = "fnameNEW";
String newLastName = "lnameNEW";
String newMail = "newmail" + randomNumber + "@asd.com";
createdUser.setFirstName(newFirstName);
createdUser.setLastName(newLastName);
createdUser.setMail(newMail);
mgr.update(createdUser);
User updatedUser = mgr.getUserById(userId);
Assert.assertEquals(newFirstName, updatedUser.getFirstName());
Assert.assertEquals(newLastName, updatedUser.getLastName());
Assert.assertEquals(newMail, updatedUser.getMail());
Assert.assertEquals(userId, updatedUser.getId());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void userCanBeDeleted() throws IOException, AuthenticationException, RedmineException, NotFoundException {
User user = generateRandomUser();
User createdUser = mgr.createUser(user);
Integer newUserId = createdUser.getId();
try {
mgr.deleteUser(newUserId);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
try {
mgr.getUserById(newUserId);
fail("Must have failed with NotFoundException because we tried to delete the user");
} catch (NotFoundException e) {
// ignore: the user should not be found
}
}
@Test(expected = NotFoundException.class)
public void deletingNonExistingUserThrowsNFE() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.deleteUser(999999);
}
@Test
public void testGetIssuesPaging() {
try {
// create 27 issues. default page size is 25.
createIssues(27);
// mgr.setObjectsPerPage(5); <-- does not work now
List<Issue> issues = mgr.getIssues(projectKey, null);
logger.debug("testGetIssuesPaging() loaded " + issues.size() + " issues");//using query #" + queryIdIssuesCreatedLast2Days);
Assert.assertTrue(issues.size() > 26);
Set<Issue> issueSet = new HashSet<Issue>(issues);
Assert.assertEquals(issues.size(), issueSet.size());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
private List<Issue> createIssues(int issuesNumber) throws IOException, AuthenticationException, NotFoundException, RedmineException {
List<Issue> issues = new ArrayList<Issue>(issuesNumber);
for (int i = 0; i < issuesNumber; i++) {
Issue issueToCreate = new Issue();
issueToCreate.setSubject("some issue " + i + " " + new Date());
Issue issue = mgr.createIssue(projectKey, issueToCreate);
issues.add(issue);
}
return issues;
}
private Issue generateRandomIssue() {
Random r = new Random();
Issue issue = new Issue();
issue.setSubject("some issue " + r.nextInt() + " " + new Date());
return issue;
}
@Test
public void testProjectsAllPagesLoaded() throws IOException, AuthenticationException, NotFoundException, URISyntaxException, RedmineException {
int NUM = 27; // must be larger than 25, which is a default page size in Redmine
List<Project> projects = createProjects(NUM);
List<Project> loadedProjects = mgr.getProjects();
Assert.assertTrue(
"Number of projects loaded from the server must be bigger than "
+ NUM + ", but it's " + loadedProjects.size(),
loadedProjects.size() > NUM);
deleteProjects(projects);
}
private List<Project> createProjects(int num) throws IOException, AuthenticationException, RedmineException {
List<Project> projects = new ArrayList<Project>(num);
for (int i = 0; i < num; i++) {
Project projectToCreate = generateRandomProject();
Project p = mgr.createProject(projectToCreate);
projects.add(p);
}
return projects;
}
private void deleteProjects(List<Project> projects) throws IOException, AuthenticationException, NotFoundException, RedmineException {
for (Project p : projects) {
mgr.deleteProject(p.getIdentifier());
}
}
@Test
public void testGetTimeEntries() throws IOException, AuthenticationException, NotFoundException, RedmineException {
List<TimeEntry> list = mgr.getTimeEntries();
Assert.assertNotNull(list);
}
@Test
public void testCreateGetTimeEntry() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Issue issue = createIssues(1).get(0);
Integer issueId = issue.getId();
TimeEntry entry = new TimeEntry();
Float hours = 11f;
entry.setHours(hours);
entry.setIssueId(issueId);
// TODO We don't know activities IDs!
entry.setActivityId(ACTIVITY_ID);
TimeEntry createdEntry = mgr.createTimeEntry(entry);
Assert.assertNotNull(createdEntry);
logger.debug("Created time entry " + createdEntry);
Assert.assertEquals(hours, createdEntry.getHours());
Float newHours = 22f;
createdEntry.setHours(newHours);
mgr.update(createdEntry);
TimeEntry updatedEntry = mgr.getTimeEntry(createdEntry.getId());
Assert.assertEquals(newHours, updatedEntry.getHours());
}
@Test(expected = NotFoundException.class)
public void testCreateDeleteTimeEntry() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Issue issue = createIssues(1).get(0);
Integer issueId = issue.getId();
TimeEntry entry = new TimeEntry();
Float hours = 4f;
entry.setHours(hours);
entry.setIssueId(issueId);
entry.setActivityId(ACTIVITY_ID);
TimeEntry createdEntry = mgr.createTimeEntry(entry);
Assert.assertNotNull(createdEntry);
mgr.deleteTimeEntry(createdEntry.getId());
mgr.getTimeEntry(createdEntry.getId());
}
@Test
public void testGetTimeEntriesForIssue() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Issue issue = createIssues(1).get(0);
Integer issueId = issue.getId();
Float hours1 = 2f;
Float hours2 = 7f;
Float totalHoursExpected = hours1 + hours2;
TimeEntry createdEntry1 = createTimeEntry(issueId, hours1);
TimeEntry createdEntry2 = createTimeEntry(issueId, hours2);
Assert.assertNotNull(createdEntry1);
Assert.assertNotNull(createdEntry2);
List<TimeEntry> entries = mgr.getTimeEntriesForIssue(issueId);
Assert.assertEquals(2, entries.size());
Float totalTime = 0f;
for (TimeEntry timeEntry : entries) {
totalTime += timeEntry.getHours();
}
Assert.assertEquals(totalHoursExpected, totalTime);
}
private TimeEntry createTimeEntry(Integer issueId, float hours) throws IOException,
AuthenticationException, NotFoundException, RedmineException {
TimeEntry entry = new TimeEntry();
entry.setHours(hours);
entry.setIssueId(issueId);
entry.setActivityId(ACTIVITY_ID);
return mgr.createTimeEntry(entry);
}
@Test(expected = NotFoundException.class)
public void testDeleteIssue() throws IOException, AuthenticationException,
NotFoundException, RedmineException {
Issue issue = createIssues(1).get(0);
Issue retrievedIssue = mgr.getIssueById(issue.getId());
Assert.assertEquals(issue, retrievedIssue);
mgr.deleteIssue(issue.getId());
mgr.getIssueById(issue.getId());
}
@Test
public void testUpdateIssueSpecialXMLtags() throws Exception {
Issue issue = createIssues(1).get(0);
String newSubject = "\"text in quotes\" and <xml> tags";
String newDescription = "<taghere>\"abc\"</here>";
issue.setSubject(newSubject);
issue.setDescription(newDescription);
mgr.update(issue);
Issue updatedIssue = mgr.getIssueById(issue.getId());
Assert.assertEquals(newSubject, updatedIssue.getSubject());
Assert.assertEquals(newDescription, updatedIssue.getDescription());
}
@Test
public void testCustomFields() throws Exception {
Issue issue = createIssues(1).get(0);
// default empty values
Assert.assertEquals(2, issue.getCustomFields().size());
// TODO update this!
int id1 = 1; // TODO this is pretty much a hack, we don't generally know these ids!
String custom1FieldName = "my_custom_1";
String custom1Value = "some value 123";
int id2 = 2;
String custom2FieldName = "custom_boolean_1";
String custom2Value = "true";
issue.setCustomFields(new ArrayList<CustomField>());
issue.getCustomFields().add(new CustomField(id1, custom1FieldName, custom1Value));
issue.getCustomFields().add(new CustomField(id2, custom2FieldName, custom2Value));
mgr.update(issue);
Issue updatedIssue = mgr.getIssueById(issue.getId());
Assert.assertEquals(2, updatedIssue.getCustomFields().size());
Assert.assertEquals(custom1Value, updatedIssue.getCustomField(custom1FieldName));
Assert.assertEquals(custom2Value, updatedIssue.getCustomField(custom2FieldName));
}
@Test
public void testUpdateIssueDoesNotChangeEstimatedTime() {
try {
Issue issue = new Issue();
String originalSubject = "Issue " + new Date();
issue.setSubject(originalSubject);
Issue newIssue = mgr.createIssue(projectKey, issue);
Assert.assertEquals("Estimated hours must be NULL", null, newIssue.getEstimatedHours());
mgr.update(newIssue);
Issue reloadedFromRedmineIssue = mgr.getIssueById(newIssue.getId());
Assert.assertEquals("Estimated hours must be NULL", null, reloadedFromRedmineIssue.getEstimatedHours());
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void subProjectIsCreatedWithCorrectParentId() {
Project createdMainProject = null;
try {
createdMainProject = createProject();
Project subProject = createSubProject(createdMainProject);
Assert.assertEquals("Must have correct parent ID",
createdMainProject.getId(), subProject.getParentId());
} catch (Exception e) {
Assert.fail();
} finally {
if (createdMainProject != null) {
try {
mgr.deleteProject(createdMainProject.getIdentifier());
} catch (Exception e) {
Assert.fail();
}
}
}
}
private Project createProject() throws IOException, AuthenticationException, RedmineException {
Project mainProject = new Project();
long id = new Date().getTime();
mainProject.setName("project" + id);
mainProject.setIdentifier("project" + id);
return mgr.createProject(mainProject);
}
private Project createSubProject(Project parent) throws IOException, AuthenticationException, RedmineException {
Project project = new Project();
long id = new Date().getTime();
project.setName("sub_pr" + id);
project.setIdentifier("subpr" + id);
project.setParentId(parent.getId());
return mgr.createProject(project);
}
@Test
public void testIssueDoneRatio() {
try {
Issue issue = new Issue();
String subject = "Issue " + new Date();
issue.setSubject(subject);
Issue createdIssue = mgr.createIssue(projectKey, issue);
Assert.assertEquals("Initial 'done ratio' must be 0", (Integer) 0, createdIssue.getDoneRatio());
Integer doneRatio = 50;
createdIssue.setDoneRatio(doneRatio);
mgr.update(createdIssue);
Integer issueId = createdIssue.getId();
Issue reloadedFromRedmineIssue = mgr.getIssueById(issueId);
Assert.assertEquals(
"Checking if 'update issue' operation changed 'done ratio' field",
doneRatio, reloadedFromRedmineIssue.getDoneRatio());
Integer invalidDoneRatio = 130;
reloadedFromRedmineIssue.setDoneRatio(invalidDoneRatio);
try {
mgr.update(reloadedFromRedmineIssue);
} catch (RedmineException e) {
Assert.assertEquals("Must be 1 error", 1, e.getErrors().size());
Assert.assertEquals("Checking error text", "% Done is not included in the list", e.getErrors().get(0));
}
Issue reloadedFromRedmineIssueUnchanged = mgr.getIssueById(issueId);
Assert.assertEquals(
"'done ratio' must have remained unchanged after invalid value",
doneRatio, reloadedFromRedmineIssueUnchanged.getDoneRatio());
} catch (Exception e) {
fail(e.toString());
}
}
@Test
public void testIssueNullDescriptionDoesNotEraseIt() {
try {
Issue issue = new Issue();
String subject = "Issue " + new Date();
String descr = "Some description";
issue.setSubject(subject);
issue.setDescription(descr);
Issue createdIssue = mgr.createIssue(projectKey, issue);
Assert.assertEquals("Checking description", descr, createdIssue.getDescription());
createdIssue.setDescription(null);
mgr.update(createdIssue);
Integer issueId = createdIssue.getId();
Issue reloadedFromRedmineIssue = mgr.getIssueById(issueId);
Assert.assertEquals(
"Description must not be erased",
descr, reloadedFromRedmineIssue.getDescription());
reloadedFromRedmineIssue.setDescription("");
mgr.update(reloadedFromRedmineIssue);
Issue reloadedFromRedmineIssueUnchanged = mgr.getIssueById(issueId);
Assert.assertEquals(
"Description must be erased",
"", reloadedFromRedmineIssueUnchanged.getDescription());
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testIssueJournals() {
try {
// create at least 1 issue
Issue issueToCreate = new Issue();
issueToCreate.setSubject("testGetIssues: " + new Date());
Issue newIssue = mgr.createIssue(projectKey, issueToCreate);
Issue loadedIssueWithJournals = mgr.getIssueById(newIssue.getId(), INCLUDE.journals);
Assert.assertTrue(loadedIssueWithJournals.getJournals().isEmpty());
String commentDescribingTheUpdate = "some comment describing the issue update";
loadedIssueWithJournals.setSubject("new subject");
loadedIssueWithJournals.setNotes(commentDescribingTheUpdate);
mgr.update(loadedIssueWithJournals);
Issue loadedIssueWithJournals2 = mgr.getIssueById(newIssue.getId(), INCLUDE.journals);
Assert.assertEquals(1, loadedIssueWithJournals2.getJournals().size());
Journal journalItem = loadedIssueWithJournals2.getJournals().get(0);
Assert.assertEquals(commentDescribingTheUpdate, journalItem.getNotes());
User ourUser = getOurUser();
// can't compare User objects because either of them is not completely filled
Assert.assertEquals(ourUser.getId(), journalItem.getUser().getId());
Assert.assertEquals(ourUser.getFirstName(), journalItem.getUser().getFirstName());
Assert.assertEquals(ourUser.getLastName(), journalItem.getUser().getLastName());
Issue loadedIssueWithoutJournals = mgr.getIssueById(newIssue.getId());
Assert.assertTrue(loadedIssueWithoutJournals.getJournals().isEmpty());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateRelation() {
try {
List<Issue> issues = createIssues(2);
Issue src = issues.get(0);
Issue target = issues.get(1);
String relationText = IssueRelation.TYPE.precedes.toString();
IssueRelation r = mgr.createRelation(src.getId(), target.getId(), relationText);
assertEquals(src.getId(), r.getIssueId());
Assert.assertEquals(target.getId(), r.getIssueToId());
Assert.assertEquals(relationText, r.getType());
} catch (Exception e) {
Assert.fail(e.toString());
}
}
private IssueRelation createTwoRelatedIssues() throws IOException, AuthenticationException, NotFoundException, RedmineException {
List<Issue> issues = createIssues(2);
Issue src = issues.get(0);
Issue target = issues.get(1);
String relationText = IssueRelation.TYPE.precedes.toString();
return mgr.createRelation(src.getId(), target.getId(), relationText);
}
@Test
public void issueRelationsAreCreatedAndLoadedOK() {
try {
IssueRelation relation = createTwoRelatedIssues();
Issue issue = mgr.getIssueById(relation.getIssueId(), INCLUDE.relations);
Issue issueTarget = mgr.getIssueById(relation.getIssueToId(), INCLUDE.relations);
Assert.assertEquals(1, issue.getRelations().size());
Assert.assertEquals(1, issueTarget.getRelations().size());
IssueRelation relation1 = issue.getRelations().get(0);
assertEquals(issue.getId(), relation1.getIssueId());
assertEquals(issueTarget.getId(), relation1.getIssueToId());
assertEquals("precedes", relation1.getType());
assertEquals((Integer) 0, relation1.getDelay());
IssueRelation reverseRelation = issueTarget.getRelations().get(0);
// both forward and reverse relations are the same!
Assert.assertEquals(relation1, reverseRelation);
} catch (Exception e) {
Assert.fail(e.toString());
}
}
@Ignore
@Test
public void issueFixVersionIsSet() throws Exception {
String existingProjectKey = "test";
Issue toCreate = generateRandomIssue();
Version v = new Version();
String versionName = "1.0";
v.setName("1.0");
v.setId(1);
toCreate.setTargetVersion(v);
Issue createdIssue = mgr.createIssue(existingProjectKey, toCreate);
Assert.assertNotNull(createdIssue.getTargetVersion());
Assert.assertEquals(createdIssue.getTargetVersion().getName(), versionName);
}
@Ignore
@Test
public void testGetProjectsIncludesTrackers() {
try {
List<Project> projects = mgr.getProjects();
Assert.assertTrue(projects.size() > 0);
Project p1 = projects.get(0);
Assert.assertNotNull(p1.getTrackers());
// XXX there could be a case when a project does not have any trackers
// need to create a project with some trackers to make this test deterministic
Assert.assertTrue(!p1.getTrackers().isEmpty());
logger.debug("Created trackers " + p1.getTrackers());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Ignore
@Test
public void testSpentTime() {
// TODO need to use "Time Entries"
// float spentHours = 12.5f;
// issueToCreate.setSpentHours(spentHours);
// check SPENT TIME
// assertEquals((Float) spentHours, newIssue.getSpentHours());
}
@Test (expected = IllegalArgumentException.class)
public void invalidTimeEntryFailsWithIAEOnCreate() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.createTimeEntry(createIncompleteTimeEntry());
}
@Test (expected = IllegalArgumentException.class)
public void invalidTimeEntryFailsWithIAEOnUpdate() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.update(createIncompleteTimeEntry());
}
private TimeEntry createIncompleteTimeEntry() {
TimeEntry timeEntry = new TimeEntry();
timeEntry.setActivityId(ACTIVITY_ID);
timeEntry.setSpentOn(new Date());
timeEntry.setHours(1.5f);
return timeEntry;
}
@Test
public void testViolateTimeEntryConstraint_ProjectOrIssueID_issue66() throws IOException, AuthenticationException, RedmineException {
TimeEntry timeEntry = createIncompleteTimeEntry();
// Now can try to verify with project ID (only test with issue ID seems to be already covered)
int projectId = mgr.getProjects().get(0).getId();
timeEntry.setProjectId(projectId);
try {
TimeEntry created = mgr.createTimeEntry(timeEntry);
logger.debug("Created time entry " + created);
} catch (Exception e) {
e.printStackTrace();
fail("Unexpected " + e.getClass().getSimpleName() + ": " + e.getMessage());
}
}
/**
* tests the retrieval of statuses.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetStatuses() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// TODO we should create some statuses first, but the Redmine Java API does not support this presently
List<IssueStatus> statuses = mgr.getStatuses();
Assert.assertFalse("Expected list of statuses not to be empty", statuses.isEmpty());
for (IssueStatus issueStatus : statuses) {
// asserts on status
assertNotNull("ID of status must not be null", issueStatus.getId());
assertNotNull("Name of status must not be null", issueStatus.getName());
}
}
/**
* tests the creation of an invalid {@link Version}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test(expected = IllegalArgumentException.class)
public void testCreateInvalidVersion() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Version version = new Version(null, "Invalid test version " + UUID.randomUUID().toString());
mgr.createVersion(version);
}
/**
* tests the deletion of an invalid {@link Version}. Expects a
* {@link NotFoundException} to be thrown.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test(expected = NotFoundException.class)
public void testDeleteInvalidVersion() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// create new test version
Version version = new Version(null, "Invalid test version " + UUID.randomUUID().toString());
version.setDescription("An invalid test version created by " + this.getClass());
// set invalid id
version.setId(-1);
// now try to delete version
mgr.deleteVersion(version);
}
/**
* tests the deletion of a {@link Version}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testDeleteVersion() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
// create new test version
Version version = new Version(project, "Test version " + UUID.randomUUID().toString());
version.setDescription("A test version created by " + this.getClass());
version.setStatus("open");
Version newVersion = mgr.createVersion(version);
// assert new test version
Assert.assertNotNull("Expected new version not to be null", newVersion);
// now delete version
mgr.deleteVersion(newVersion);
// assert that the version is gone
List<Version> versions = mgr.getVersions(project.getId());
Assert.assertTrue("List of versions of test project must be empty now but is " + versions, versions.isEmpty());
}
/**
* tests the retrieval of {@link Version}s.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetVersions() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
// create some versions
Version testVersion1 = mgr.createVersion(new Version(project, "Version" + UUID.randomUUID()));
Version testVersion2 = mgr.createVersion(new Version(project, "Version" + UUID.randomUUID()));
try {
List<Version> versions = mgr.getVersions(project.getId());
Assert.assertEquals("Wrong number of versions for project " + project.getName() + " delivered by Redmine Java API", 2, versions.size());
for (Version version : versions) {
// assert version
Assert.assertNotNull("ID of version must not be null", version.getId());
Assert.assertNotNull("Name of version must not be null", version.getName());
Assert.assertNotNull("Project of version must not be null", version.getProject());
}
} finally {
if (testVersion1 != null) {
mgr.deleteVersion(testVersion1);
}
if (testVersion2 != null) {
mgr.deleteVersion(testVersion2);
}
}
}
/**
* tests the creation and deletion of a {@link IssueCategory}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testCreateAndDeleteIssueCategory() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
// create new test category
IssueCategory category = new IssueCategory(project, "Category" + new Date().getTime());
category.setAssignee(getOurUser());
IssueCategory newIssueCategory = mgr.createCategory(category);
// assert new test category
Assert.assertNotNull("Expected new category not to be null", newIssueCategory);
Assert.assertNotNull("Expected project of new category not to be null", newIssueCategory.getProject());
Assert.assertNotNull("Expected assignee of new category not to be null", newIssueCategory.getAssignee());
// now delete category
mgr.deleteCategory(newIssueCategory);
// assert that the category is gone
List<IssueCategory> categories = mgr.getCategories(project.getId());
Assert.assertTrue("List of categories of test project must be empty now but is " + categories, categories.isEmpty());
}
/**
* tests the retrieval of {@link IssueCategory}s.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetIssueCategories() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
// create some categories
IssueCategory testIssueCategory1 = new IssueCategory(project, "Category" + new Date().getTime());
testIssueCategory1.setAssignee(getOurUser());
IssueCategory newIssueCategory1 = mgr.createCategory(testIssueCategory1);
IssueCategory testIssueCategory2 = new IssueCategory(project, "Category" + new Date().getTime());
testIssueCategory2.setAssignee(getOurUser());
IssueCategory newIssueCategory2 = mgr.createCategory(testIssueCategory2);
try {
List<IssueCategory> categories = mgr.getCategories(project.getId());
Assert.assertEquals("Wrong number of categories for project " + project.getName() + " delivered by Redmine Java API", 2, categories.size());
for (IssueCategory category : categories) {
// assert category
Assert.assertNotNull("ID of category must not be null", category.getId());
Assert.assertNotNull("Name of category must not be null", category.getName());
Assert.assertNotNull("Project of category must not be null", category.getProject());
Assert.assertNotNull("Assignee of category must not be null", category.getAssignee());
}
} finally {
// scrub test categories
if (newIssueCategory1 != null) {
mgr.deleteCategory(newIssueCategory1);
}
if (newIssueCategory2 != null) {
mgr.deleteCategory(newIssueCategory2);
}
}
}
/**
* tests the creation of an invalid {@link IssueCategory}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test(expected = IllegalArgumentException.class)
public void testCreateInvalidIssueCategory() throws RedmineException, IOException, AuthenticationException, NotFoundException {
IssueCategory category = new IssueCategory(null, "InvalidCategory" + new Date().getTime());
mgr.createCategory(category);
}
/**
* tests the deletion of an invalid {@link IssueCategory}. Expects a
* {@link NotFoundException} to be thrown.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test(expected = NotFoundException.class)
public void testDeleteInvalidIssueCategory() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// create new test category
IssueCategory category = new IssueCategory(null, "InvalidCategory" + new Date().getTime());
// set invalid id
category.setId(-1);
// now try to delete category
mgr.deleteCategory(category);
}
/**
* Tests the retrieval of {@link Tracker}s.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetTrackers() throws RedmineException, IOException, AuthenticationException, NotFoundException {
List<Tracker> trackers = mgr.getTrackers();
assertNotNull("List of trackers returned should not be null", trackers);
assertFalse("List of trackers returned should not be empty", trackers.isEmpty());
for (Tracker tracker : trackers) {
assertNotNull("Tracker returned should not be null", tracker);
assertNotNull("ID of tracker returned should not be null", tracker.getId());
assertNotNull("Name of tracker returned should not be null", tracker.getName());
}
}
/**
* Tests the retrieval of an {@link Issue}, inlcuding the {@link org.redmine.ta.beans.Attachment}s.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetIssueWithAttachments() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Issue newIssue = null;
try {
// create at least 1 issue
Issue issueToCreate = new Issue();
issueToCreate.setSubject("testGetIssueAttachment_" + UUID.randomUUID());
newIssue = mgr.createIssue(projectKey, issueToCreate);
// TODO create test attachments for the issue once the Redmine REST API allows for it
// retrieve issue attachments
Issue retrievedIssue = mgr.getIssueById(newIssue.getId(), INCLUDE.attachments);
Assert.assertNotNull("List of attachments retrieved for issue " + newIssue.getId() + " delivered by Redmine Java API should not be null", retrievedIssue.getAttachments());
// TODO assert attachments once we actually receive ones for our test issue
} finally {
// scrub test issue
if (newIssue != null) {
mgr.deleteIssue(newIssue.getId());
}
}
}
/**
* Tests the retrieval of an {@link org.redmine.ta.beans.Attachment} by its ID.
* TODO reactivate once the Redmine REST API allows for creating attachments
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
// @Test
public void testGetAttachmentById() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// TODO where do we get a valid attachment number from? We can't create an attachment by our own for the test as the Redmine REST API does not support that.
int attachmentID = 1;
Attachment attachment = mgr.getAttachmentById(attachmentID);
Assert.assertNotNull("Attachment retrieved by ID " + attachmentID + " should not be null", attachment);
Assert.assertNotNull("Content URL of attachment retrieved by ID " + attachmentID + " should not be null", attachment.getContentURL());
// TODO more asserts on the attachment once this delivers an attachment
}
/**
* Tests the download of the content of an {@link org.redmine.ta.beans.Attachment}.
* TODO reactivate once the Redmine REST API allows for creating attachments
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
// @Test
public void testDownloadAttachmentContent() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// TODO where do we get a valid attachment number from? We can't create an attachment by our own for the test as the Redmine REST API does not support that.
int attachmentID = 1;
// retrieve issue attachment
Attachment attachment = mgr.getAttachmentById(attachmentID);
// download attachment content
byte[] attachmentContent = mgr.downloadAttachmentContent(attachment);
Assert.assertNotNull("Download of content of attachment with content URL " + attachment.getContentURL() + " should not be null", attachmentContent);
}
/**
* Tests the creation and retrieval of an {@link org.redmine.ta.beans.Issue} with a {@link IssueCategory}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testCreateAndGetIssueWithCategory() throws RedmineException, IOException, AuthenticationException, NotFoundException {
IssueCategory newIssueCategory = null;
Issue newIssue = null;
try {
Project project = mgr.getProjectByKey(projectKey);
// create an issue category
IssueCategory category = new IssueCategory(project, "Category_" + new Date().getTime());
category.setAssignee(getOurUser());
newIssueCategory = mgr.createCategory(category);
// create an issue
Issue issueToCreate = new Issue();
issueToCreate.setSubject("testGetIssueWithCategory_" + UUID.randomUUID());
issueToCreate.setCategory(newIssueCategory);
newIssue = mgr.createIssue(projectKey, issueToCreate);
// retrieve issue
Issue retrievedIssue = mgr.getIssueById(newIssue.getId());
// assert retrieved category of issue
IssueCategory retrievedCategory = retrievedIssue.getCategory();
Assert.assertNotNull("Category retrieved for issue " + newIssue.getId() + " should not be null", retrievedCategory);
Assert.assertEquals("ID of category retrieved for issue " + newIssue.getId() + " is wrong", newIssueCategory.getId(), retrievedCategory.getId());
Assert.assertEquals("Name of category retrieved for issue " + newIssue.getId() + " is wrong", newIssueCategory.getName(), retrievedCategory.getName());
} finally {
if (newIssue != null) {
mgr.deleteIssue(newIssue.getId());
}
if (newIssueCategory != null) {
mgr.deleteCategory(newIssueCategory);
}
}
}
} |
package org.redmine.ta;
import org.junit.*;
import org.redmine.ta.RedmineManager.INCLUDE;
import org.redmine.ta.beans.*;
import org.redmine.ta.internal.logging.Logger;
import org.redmine.ta.internal.logging.LoggerFactory;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.*;
import static org.junit.Assert.*;
/**
* This class and its dependencies are located in org.redmine.ta.api project.
*/
public class RedmineManagerTest {
// TODO We don't know activities IDs!
private static final Integer ACTIVITY_ID = 8;
private static Logger logger = LoggerFactory.getLogger(RedmineManagerTest.class);
private static RedmineManager mgr;
private static String projectKey;
private static TestConfig testConfig;
@BeforeClass
public static void oneTimeSetUp() {
testConfig = new TestConfig();
logger.info("Running redmine tests using: " + testConfig.getURI());
// mgr = new RedmineManager(TestConfig.getURI(), TestConfig.getApiKey());
mgr = new RedmineManager(testConfig.getURI());
mgr.setLogin(testConfig.getLogin());
mgr.setPassword(testConfig.getPassword());
Project junitTestProject = new Project();
junitTestProject.setName("test project");
junitTestProject.setIdentifier("test"
+ Calendar.getInstance().getTimeInMillis());
try {
Project createdProject = mgr.createProject(junitTestProject);
projectKey = createdProject.getIdentifier();
} catch (Exception e) {
logger.error(e, "Exception while creating test project");
Assert.fail("can't create a test project. " + e.getMessage());
}
}
@AfterClass
public static void oneTimeTearDown() {
try {
if (mgr != null && projectKey != null) {
mgr.deleteProject(projectKey);
}
} catch (Exception e) {
logger.error(e, "Exception while deleting test project");
Assert.fail("can't delete the test project '" + projectKey + ". reason: "
+ e.getMessage());
}
}
@Before
// Is executed before each test method
public void setup() throws Exception {
}
@Test
public void testCreateIssue() {
try {
Issue issueToCreate = new Issue();
issueToCreate.setSubject("test zzx");
Calendar startCal = Calendar.getInstance();
// have to clear them because they are ignored by Redmine and prevent from comparison later
startCal.clear(Calendar.HOUR_OF_DAY);
startCal.clear(Calendar.MINUTE);
startCal.clear(Calendar.SECOND);
startCal.clear(Calendar.MILLISECOND);
startCal.add(Calendar.DATE, 5);
issueToCreate.setStartDate(startCal.getTime());
Calendar due = Calendar.getInstance();
due.add(Calendar.MONTH, 1);
issueToCreate.setDueDate(due.getTime());
User assignee = getOurUser();
issueToCreate.setAssignee(assignee);
String description = "This is the description for the new task." +
"\nIt has several lines." +
"\nThis is the last line.";
issueToCreate.setDescription(description);
float estimatedHours = 44;
issueToCreate.setEstimatedHours(estimatedHours);
Issue newIssue = mgr.createIssue(projectKey, issueToCreate);
// System.out.println("created: " + newIssue);
Assert.assertNotNull("Checking returned result", newIssue);
Assert.assertNotNull("New issue must have some ID", newIssue.getId());
// check startDate
Calendar returnedStartCal = Calendar.getInstance();
returnedStartCal.setTime(newIssue.getStartDate());
Assert.assertEquals(startCal.get(Calendar.YEAR), returnedStartCal.get(Calendar.YEAR));
Assert.assertEquals(startCal.get(Calendar.MONTH), returnedStartCal.get(Calendar.MONTH));
Assert.assertEquals(startCal.get(Calendar.DAY_OF_MONTH), returnedStartCal.get(Calendar.DAY_OF_MONTH));
// check dueDate
Calendar returnedDueCal = Calendar.getInstance();
returnedDueCal.setTime(newIssue.getDueDate());
Assert.assertEquals(due.get(Calendar.YEAR), returnedDueCal.get(Calendar.YEAR));
Assert.assertEquals(due.get(Calendar.MONTH), returnedDueCal.get(Calendar.MONTH));
Assert.assertEquals(due.get(Calendar.DAY_OF_MONTH), returnedDueCal.get(Calendar.DAY_OF_MONTH));
// check ASSIGNEE
User actualAssignee = newIssue.getAssignee();
Assert.assertNotNull("Checking assignee not null", actualAssignee);
Assert.assertEquals("Checking assignee id", assignee.getId(),
actualAssignee.getId());
// check AUTHOR
Integer EXPECTED_AUTHOR_ID = getOurUser().getId();
Assert.assertEquals(EXPECTED_AUTHOR_ID, newIssue.getAuthor().getId());
// check ESTIMATED TIME
Assert.assertEquals((Float) estimatedHours, newIssue.getEstimatedHours());
// check multi-line DESCRIPTION
String regexpStripExtra = "\\r|\\n|\\s";
description = description.replaceAll(regexpStripExtra, "");
String actualDescription = newIssue.getDescription();
actualDescription = actualDescription.replaceAll(regexpStripExtra, "");
Assert.assertEquals(description, actualDescription);
// PRIORITY
Assert.assertNotNull(newIssue.getPriorityId());
Assert.assertTrue(newIssue.getPriorityId() > 0);
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testCreateIssueWithParent() {
try {
Issue parentIssue = new Issue();
parentIssue.setSubject("parent 1");
Issue newParentIssue = mgr.createIssue(projectKey, parentIssue);
logger.debug("created parent: " + newParentIssue);
Assert.assertNotNull("Checking parent was created", newParentIssue);
Assert.assertNotNull("Checking ID of parent issue is not null",
newParentIssue.getId());
// Integer parentId = 46;
Integer parentId = newParentIssue.getId();
Issue childIssue = new Issue();
childIssue.setSubject("child 1");
childIssue.setParentId(parentId);
Issue newChildIssue = mgr.createIssue(projectKey, childIssue);
logger.debug("created child: " + newChildIssue);
Assert.assertEquals("Checking parent ID of the child issue", parentId,
newChildIssue.getParentId());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testStartDateNull() {
try {
Issue issue = new Issue();
issue.setSubject("test start date");
issue.setStartDate(null);
Issue newIssue = mgr.createIssue(projectKey, issue);
Issue loadedIssue = mgr.getIssueById(newIssue.getId());
Assert.assertNull(loadedIssue.getStartDate());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testGetIssuesBySummary() {
String summary = "issue with subject ABC";
try {
Issue issue = new Issue();
issue.setSubject(summary);
User assignee = getOurUser();
issue.setAssignee(assignee);
Issue newIssue = mgr.createIssue(projectKey, issue);
logger.debug("created: " + newIssue);
Assert.assertNotNull("Checking returned result", newIssue);
Assert.assertNotNull("New issue must have some ID", newIssue.getId());
// try to find the issue
List<Issue> foundIssues = mgr.getIssuesBySummary(projectKey,
summary);
Assert.assertNotNull("Checking if search results is not NULL", foundIssues);
Assert.assertTrue("Search results must be not empty",
!(foundIssues.isEmpty()));
Issue loadedIssue1 = RedmineTestUtils.findIssueInList(foundIssues, newIssue.getId());
Assert.assertNotNull(loadedIssue1);
Assert.assertEquals(summary, loadedIssue1.getSubject());
// User actualAssignee = newIssue.getAssignee();
// assertNotNull("Checking assignee not null", actualAssignee);
// assertEquals("Checking assignee Name", assignee.getName(),
// actualAssignee.getName());
// assertEquals("Checking assignee Id", assignee.getId(),
// actualAssignee.getId());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testTryFindNonExistingIssue() {
String summary = "some summary here for issue which does not exist";
try {
// try to find the issue
List<Issue> foundIssues = mgr.getIssuesBySummary(projectKey,
summary);
Assert.assertNotNull("Search result must be not null", foundIssues);
Assert.assertTrue("Search result list must be empty",
foundIssues.isEmpty());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
private static User getOurUser() {
Integer userId = Integer
.parseInt(testConfig.getParam("createissue.userid"));
String login = testConfig.getLogin();
String fName = testConfig.getParam("userFName");
String lName = testConfig.getParam("userLName");
User user = new User();
user.setId(userId);
user.setLogin(login);
user.setFirstName(fName);
user.setLastName(lName);
return user;
}
@Test(expected = IllegalArgumentException.class)
public void testNULLHostParameter() {
new RedmineManager(null);
}
@Test(expected = IllegalArgumentException.class)
public void testEmptyHostParameter() throws RuntimeException {
new RedmineManager("");
}
@Test(expected = AuthenticationException.class)
public void noAPIKeyOnCreateIssueThrowsAE() throws Exception {
RedmineManager redmineMgrEmpty = new RedmineManager(testConfig.getURI());
Issue issue = new Issue();
issue.setSubject("test zzx");
redmineMgrEmpty.createIssue(projectKey, issue);
}
@Test(expected = AuthenticationException.class)
public void wrongAPIKeyOnCreateIssueThrowsAE() throws Exception {
RedmineManager redmineMgrInvalidKey = new RedmineManager(testConfig.getURI(), "wrong_key");
Issue issue = new Issue();
issue.setSubject("test zzx");
redmineMgrInvalidKey.createIssue(projectKey, issue);
}
@Test
public void testUpdateIssue() {
try {
Issue issue = new Issue();
String originalSubject = "Issue " + new Date();
issue.setSubject(originalSubject);
Issue newIssue = mgr.createIssue(projectKey, issue);
String changedSubject = "changed subject";
newIssue.setSubject(changedSubject);
mgr.updateIssue(newIssue);
Issue reloadedFromRedmineIssue = mgr.getIssueById(newIssue.getId());
Assert.assertEquals(
"Checking if 'update issue' operation changed the 'subject' field",
changedSubject, reloadedFromRedmineIssue.getSubject());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testGetIssueById() {
try {
Issue issue = new Issue();
String originalSubject = "Issue " + new Date();
issue.setSubject(originalSubject);
Issue newIssue = mgr.createIssue(projectKey, issue);
Issue reloadedFromRedmineIssue = mgr.getIssueById(newIssue.getId());
Assert.assertEquals(
"Checking if 'get issue by ID' operation returned issue with same 'subject' field",
originalSubject, reloadedFromRedmineIssue.getSubject());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test
public void testGetProjects() {
try {
List<Project> projects = mgr.getProjects();
Assert.assertTrue(projects.size() > 0);
boolean found = false;
for (Project project : projects) {
if (project.getIdentifier().equals(projectKey)) {
found = true;
break;
}
}
if (!found) {
Assert.fail("Our project with key '" + projectKey + "' is not found on the server");
}
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testGetIssues() {
try {
// create at least 1 issue
Issue issueToCreate = new Issue();
issueToCreate.setSubject("testGetIssues: " + new Date());
Issue newIssue = mgr.createIssue(projectKey, issueToCreate);
List<Issue> issues = mgr.getIssues(projectKey, null);
logger.debug("getIssues() loaded " + issues.size() + " issues");//using query #" + queryIdIssuesCreatedLast2Days);
Assert.assertTrue(issues.size() > 0);
boolean found = false;
for (Issue issue : issues) {
if (issue.getId().equals(newIssue.getId())) {
found = true;
break;
}
}
if (!found) {
Assert.fail("getIssues() didn't return the issue we just created. The query "
+ " must have returned all issues created during the last 2 days");
}
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test(expected = NotFoundException.class)
public void testGetIssuesInvalidQueryId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
Integer invalidQueryId = 9999999;
mgr.getIssues(projectKey, invalidQueryId);
}
@Test
public void testCreateProject() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Project projectToCreate = generateRandomProject();
String key = null;
try {
Project createdProject = mgr.createProject(projectToCreate);
key = createdProject.getIdentifier();
Assert.assertNotNull("checking that a non-null project is returned", createdProject);
Assert.assertEquals(projectToCreate.getIdentifier(), createdProject.getIdentifier());
Assert.assertEquals(projectToCreate.getName(), createdProject.getName());
Assert.assertEquals(projectToCreate.getDescription(), createdProject.getDescription());
Assert.assertEquals(projectToCreate.getHomepage(), createdProject.getHomepage());
List<Tracker> trackers = createdProject.getTrackers();
Assert.assertNotNull("checking that project has some trackers", trackers);
Assert.assertTrue("checking that project has some trackers", !(trackers.isEmpty()));
} catch (Exception e) {
Assert.fail(e.getMessage());
} finally {
if (key != null) {
mgr.deleteProject(key);
}
}
}
@Test
public void testCreateGetUpdateDeleteProject() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Project projectToCreate = generateRandomProject();
String key = null;
try {
projectToCreate.setIdentifier("id" + new Date().getTime());
logger.debug("trying to create a project with id " + projectToCreate.getIdentifier());
Project createdProject = mgr.createProject(projectToCreate);
key = createdProject.getIdentifier();
String newDescr = "NEW123";
String newName = "new name here";
createdProject.setName(newName);
createdProject.setDescription(newDescr);
mgr.updateProject(createdProject);
Project updatedProject = mgr.getProjectByKey(key);
Assert.assertNotNull(updatedProject);
Assert.assertEquals(createdProject.getIdentifier(), updatedProject.getIdentifier());
Assert.assertEquals(newName, updatedProject.getName());
Assert.assertEquals(newDescr, updatedProject.getDescription());
List<Tracker> trackers = updatedProject.getTrackers();
Assert.assertNotNull("checking that project has some trackers", trackers);
Assert.assertTrue("checking that project has some trackers", !(trackers.isEmpty()));
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
} finally {
if (key != null) {
mgr.deleteProject(key);
}
}
}
@Test
public void testCreateProjectFailsWithReservedIdentifier() throws Exception {
Project projectToCreate = new Project();
projectToCreate.setName("new");
projectToCreate.setIdentifier("new");
String key = null;
try {
Project createdProject = mgr.createProject(projectToCreate);
// in case if the creation haven't failed (although it should have had!),
// need to cleanup - delete this project
key = createdProject.getIdentifier();
} catch (RedmineException e) {
Assert.assertNotNull(e.getErrors());
Assert.assertEquals(1, e.getErrors().size());
Assert.assertEquals("Identifier is reserved", e.getErrors().get(0));
} finally {
if (key != null) {
mgr.deleteProject(key);
}
}
}
private static Project generateRandomProject() {
Project project = new Project();
Long timeStamp = Calendar.getInstance().getTimeInMillis();
String key = "projkey" + timeStamp;
String name = "project number " + timeStamp;
String description = "some description for the project";
project.setIdentifier(key);
project.setName(name);
project.setDescription(description);
project.setHomepage("www.randompage" + timeStamp + ".com");
return project;
}
@Test
public void testCreateIssueNonUnicodeSymbols() {
try {
String nonLatinSymbols = "Example with accents Ao";
Issue toCreate = new Issue();
toCreate.setSubject(nonLatinSymbols);
Issue created = mgr.createIssue(projectKey, toCreate);
Assert.assertEquals(nonLatinSymbols, created.getSubject());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateIssueSummaryOnly() {
try {
Issue issueToCreate = new Issue();
issueToCreate.setSubject("This is the summary line 123");
Issue newIssue = mgr.createIssue(projectKey, issueToCreate);
Assert.assertNotNull("Checking returned result", newIssue);
Assert.assertNotNull("New issue must have some ID", newIssue.getId());
// check AUTHOR
Integer EXPECTED_AUTHOR_ID = getOurUser().getId();
Assert.assertEquals(EXPECTED_AUTHOR_ID, newIssue.getAuthor().getId());
} catch (Exception e) {
e.printStackTrace();
Assert.fail();
}
}
@Test(expected = NotFoundException.class)
public void testCreateIssueInvalidProjectKey() throws IOException, AuthenticationException, RedmineException, NotFoundException {
Issue issueToCreate = new Issue();
issueToCreate.setSubject("Summary line 100");
mgr.createIssue("someNotExistingProjectKey", issueToCreate);
}
@Test(expected = NotFoundException.class)
public void testGetProjectNonExistingId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.getProjectByKey("some-non-existing-key");
}
@Test(expected = NotFoundException.class)
public void testDeleteNonExistingProject() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.deleteProject("some-non-existing-key");
}
@Test(expected = NotFoundException.class)
public void testGetIssueNonExistingId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
int someNonExistingID = 999999;
mgr.getIssueById(someNonExistingID);
}
@Test(expected = NotFoundException.class)
public void testUpdateIssueNonExistingId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
int nonExistingId = 999999;
Issue issue = new Issue();
issue.setId(nonExistingId);
mgr.updateIssue(issue);
}
@Test
public void testGetUsers() {
try {
List<User> users = mgr.getUsers();
Assert.assertTrue(users.size() > 0);
// boolean found = false;
// for (Project project : projects) {
// if (project.getIdentifier().equals(projectKey)) {
// found = true;
// break;
// if (!found) {
// fail("Our project with key '" + projectKey+"' is not found on the server");
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testGetCurrentUser() throws IOException, AuthenticationException, RedmineException, NotFoundException {
User currentUser = mgr.getCurrentUser();
Assert.assertEquals(getOurUser().getId(), currentUser.getId());
Assert.assertEquals(getOurUser().getLogin(), currentUser.getLogin());
}
@Test
public void testGetUserById() throws IOException, AuthenticationException, NotFoundException, RedmineException {
User loadedUser = mgr.getUserById(getOurUser().getId());
Assert.assertEquals(getOurUser().getId(), loadedUser.getId());
Assert.assertEquals(getOurUser().getLogin(), loadedUser.getLogin());
}
@Test(expected = NotFoundException.class)
public void testGetUserNonExistingId() throws IOException, AuthenticationException, RedmineException, NotFoundException {
mgr.getUserById(999999);
}
@Test(expected = NotFoundException.class)
public void testInvalidGetCurrentUser() throws IOException, AuthenticationException, RedmineException, NotFoundException {
RedmineManager invalidManager = new RedmineManager(testConfig.getURI() + "/INVALID");
invalidManager.setLogin("Invalid");
invalidManager.setPassword("Invalid");
invalidManager.getCurrentUser();
}
@Test
public void testCreateUser() throws IOException, AuthenticationException, NotFoundException {
try {
User userToCreate = generateRandomUser();
User createdUser = mgr.createUser(userToCreate);
Assert.assertNotNull("checking that a non-null project is returned", createdUser);
Assert.assertEquals(userToCreate.getLogin(), createdUser.getLogin());
Assert.assertEquals(userToCreate.getFirstName(), createdUser.getFirstName());
Assert.assertEquals(userToCreate.getLastName(), createdUser.getLastName());
Integer id = createdUser.getId();
Assert.assertNotNull(id);
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
private static User generateRandomUser() {
User user = new User();
user.setFirstName("fname");
user.setLastName("lname");
long randomNumber = new Date().getTime();
user.setLogin("login" + randomNumber);
user.setMail("somemail" + randomNumber + "@somedomain.com");
user.setPassword("zzzz");
return user;
}
@Test
public void testUpdateUser() throws IOException, AuthenticationException, NotFoundException {
User userToCreate = new User();
userToCreate.setFirstName("fname2");
userToCreate.setLastName("lname2");
long randomNumber = new Date().getTime();
userToCreate.setLogin("login33" + randomNumber);
userToCreate.setMail("email" + randomNumber + "@somedomain.com");
userToCreate.setPassword("1234");
try {
User createdUser = mgr.createUser(userToCreate);
Integer userId = createdUser.getId();
Assert.assertNotNull("checking that a non-null project is returned", createdUser);
String newFirstName = "fnameNEW";
String newLastName = "lnameNEW";
String newMail = "newmail" + randomNumber + "@asd.com";
createdUser.setFirstName(newFirstName);
createdUser.setLastName(newLastName);
createdUser.setMail(newMail);
mgr.updateUser(createdUser);
User updatedUser = mgr.getUserById(userId);
Assert.assertEquals(newFirstName, updatedUser.getFirstName());
Assert.assertEquals(newLastName, updatedUser.getLastName());
Assert.assertEquals(newMail, updatedUser.getMail());
Assert.assertEquals(userId, updatedUser.getId());
} catch (Exception e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testGetIssuesPaging() {
try {
// create 27 issues. default page size is 25.
createIssues(27);
// mgr.setObjectsPerPage(5); <-- does not work now
List<Issue> issues = mgr.getIssues(projectKey, null);
logger.debug("testGetIssuesPaging() loaded " + issues.size() + " issues");//using query #" + queryIdIssuesCreatedLast2Days);
Assert.assertTrue(issues.size() > 26);
Set<Issue> issueSet = new HashSet<Issue>(issues);
Assert.assertEquals(issues.size(), issueSet.size());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
private List<Issue> createIssues(int issuesNumber) throws IOException, AuthenticationException, NotFoundException, RedmineException {
List<Issue> issues = new ArrayList<Issue>(issuesNumber);
for (int i = 0; i < issuesNumber; i++) {
Issue issueToCreate = new Issue();
issueToCreate.setSubject("some issue " + i + " " + new Date());
Issue issue = mgr.createIssue(projectKey, issueToCreate);
issues.add(issue);
}
return issues;
}
private Issue createIssue() throws IOException, AuthenticationException, NotFoundException, RedmineException {
List<Issue> createIssues = createIssues(1);
return createIssues.get(0);
}
private Issue generateRandomIssue() {
Random r = new Random();
Issue issue = new Issue();
issue.setSubject("some issue " + r.nextInt() + " " + new Date());
return issue;
}
@Test
public void testProjectsAllPagesLoaded() throws IOException, AuthenticationException, NotFoundException, URISyntaxException, RedmineException {
int NUM = 27; // must be larger than 25, which is a default page size in Redmine
List<Project> projects = createProjects(NUM);
List<Project> loadedProjects = mgr.getProjects();
Assert.assertTrue(
"Number of projects loaded from the server must be bigger than "
+ NUM + ", but it's " + loadedProjects.size(),
loadedProjects.size() > NUM);
deleteProjects(projects);
}
private List<Project> createProjects(int num) throws IOException, AuthenticationException, NotFoundException, RedmineException {
List<Project> projects = new ArrayList<Project>(num);
for (int i = 0; i < num; i++) {
Project projectToCreate = generateRandomProject();
Project p = mgr.createProject(projectToCreate);
projects.add(p);
}
return projects;
}
private void deleteProjects(List<Project> projects) throws IOException, AuthenticationException, NotFoundException, RedmineException {
for (Project p : projects) {
mgr.deleteProject(p.getIdentifier());
}
}
@Test
public void testGetTimeEntries() throws IOException, AuthenticationException, NotFoundException, RedmineException {
List<TimeEntry> list = mgr.getTimeEntries();
Assert.assertNotNull(list);
// boolean found = false;
// for (Project project : projects) {
// if (project.getIdentifier().equals(projectKey)) {
// found = true;
// break;
// if (!found) {
// fail("Our project with key '" + projectKey+"' is not found on the server");
}
@Test
public void testCreateGetTimeEntry() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Issue issue = createIssues(1).get(0);
Integer issueId = issue.getId();
TimeEntry entry = new TimeEntry();
Float hours = 11f;
entry.setHours(hours);
entry.setIssueId(issueId);
// TODO We don't know activities IDs!
entry.setActivityId(ACTIVITY_ID);
TimeEntry createdEntry = mgr.createTimeEntry(entry);
Assert.assertNotNull(createdEntry);
logger.debug("Created time entry " + createdEntry);
Assert.assertEquals(hours, createdEntry.getHours());
Float newHours = 22f;
createdEntry.setHours(newHours);
mgr.updateTimeEntry(createdEntry);
TimeEntry updatedEntry = mgr.getTimeEntry(createdEntry.getId());
Assert.assertEquals(newHours, updatedEntry.getHours());
}
@Test(expected = NotFoundException.class)
public void testCreateDeleteTimeEntry() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Issue issue = createIssues(1).get(0);
Integer issueId = issue.getId();
TimeEntry entry = new TimeEntry();
Float hours = 4f;
entry.setHours(hours);
entry.setIssueId(issueId);
entry.setActivityId(ACTIVITY_ID);
TimeEntry createdEntry = mgr.createTimeEntry(entry);
Assert.assertNotNull(createdEntry);
mgr.deleteTimeEntry(createdEntry.getId());
mgr.getTimeEntry(createdEntry.getId());
}
@Test
public void testGetTimeEntriesForIssue() throws IOException, AuthenticationException, NotFoundException, RedmineException {
Issue issue = createIssues(1).get(0);
Integer issueId = issue.getId();
Float hours1 = 2f;
Float hours2 = 7f;
Float totalHoursExpected = hours1 + hours2;
TimeEntry createdEntry1 = createTimeEntry(issueId, hours1);
TimeEntry createdEntry2 = createTimeEntry(issueId, hours2);
Assert.assertNotNull(createdEntry1);
Assert.assertNotNull(createdEntry2);
List<TimeEntry> entries = mgr.getTimeEntriesForIssue(issueId);
Assert.assertEquals(2, entries.size());
Float totalTime = 0f;
for (TimeEntry timeEntry : entries) {
totalTime += timeEntry.getHours();
}
Assert.assertEquals(totalHoursExpected, totalTime);
}
private TimeEntry createTimeEntry(Integer issueId, float hours) throws IOException,
AuthenticationException, NotFoundException, RedmineException {
TimeEntry entry = new TimeEntry();
entry.setHours(hours);
entry.setIssueId(issueId);
entry.setActivityId(ACTIVITY_ID);
TimeEntry createdEntry = mgr.createTimeEntry(entry);
return createdEntry;
}
@Test(expected = NotFoundException.class)
public void testDeleteIssue() throws IOException, AuthenticationException,
NotFoundException, RedmineException {
Issue issue = createIssues(1).get(0);
Issue retrievedIssue = mgr.getIssueById(issue.getId());
Assert.assertEquals(issue, retrievedIssue);
mgr.deleteIssue(issue.getId());
mgr.getIssueById(issue.getId());
}
@Test
public void testUpdateIssueSpecialXMLtags() throws Exception {
Issue issue = createIssues(1).get(0);
String newSubject = "\"text in quotes\" and <xml> tags";
String newDescription = "<teghere>\"abc\"</here>";
issue.setSubject(newSubject);
issue.setDescription(newDescription);
mgr.updateIssue(issue);
Issue updatedIssue = mgr.getIssueById(issue.getId());
Assert.assertEquals(newSubject, updatedIssue.getSubject());
Assert.assertEquals(newDescription, updatedIssue.getDescription());
}
@Test
public void testCustomFields() throws Exception {
Issue issue = createIssues(1).get(0);
// default empty values
Assert.assertEquals(2, issue.getCustomFields().size());
// TODO update this!
int id1 = 1; // TODO this is pretty much a hack, we don't generally know these ids!
String custom1FieldName = "my_custom_1";
String custom1Value = "some value 123";
int id2 = 2;
String custom2FieldName = "custom_boolean_1";
String custom2Value = "true";
issue.setCustomFields(new ArrayList<CustomField>());
issue.getCustomFields().add(new CustomField(id1, custom1FieldName, custom1Value));
issue.getCustomFields().add(new CustomField(id2, custom2FieldName, custom2Value));
mgr.updateIssue(issue);
Issue updatedIssue = mgr.getIssueById(issue.getId());
Assert.assertEquals(2, updatedIssue.getCustomFields().size());
Assert.assertEquals(custom1Value, updatedIssue.getCustomField(custom1FieldName));
Assert.assertEquals(custom2Value, updatedIssue.getCustomField(custom2FieldName));
}
@Test
public void testUpdateIssueDoesNotChangeEstimatedTime() {
try {
Issue issue = new Issue();
String originalSubject = "Issue " + new Date();
issue.setSubject(originalSubject);
Issue newIssue = mgr.createIssue(projectKey, issue);
Assert.assertEquals("Estimated hours must be NULL", null, newIssue.getEstimatedHours());
mgr.updateIssue(newIssue);
Issue reloadedFromRedmineIssue = mgr.getIssueById(newIssue.getId());
Assert.assertEquals("Estimated hours must be NULL", null, reloadedFromRedmineIssue.getEstimatedHours());
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testCreateSubProject() {
Project createdMainProject = null;
try {
createdMainProject = createProject();
Project subProject = createSubProject(createdMainProject);
Assert.assertEquals("Must have correct parent ID",
createdMainProject.getId(), subProject.getParentId());
} catch (Exception e) {
Assert.fail();
} finally {
if (createdMainProject != null) {
try {
mgr.deleteProject(createdMainProject.getIdentifier());
} catch (Exception e) {
Assert.fail();
}
}
}
}
private Project createProject() throws IOException, AuthenticationException, RedmineException {
Project mainProject = new Project();
long id = new Date().getTime();
mainProject.setName("project" + id);
mainProject.setIdentifier("project" + id);
return mgr.createProject(mainProject);
}
private Project createSubProject(Project parent) throws IOException, AuthenticationException, RedmineException {
Project project = new Project();
long id = new Date().getTime();
project.setName("sub_pr" + id);
project.setIdentifier("subpr" + id);
project.setParentId(parent.getId());
return mgr.createProject(project);
}
@Test
public void testIssueDoneRatio() {
try {
Issue issue = new Issue();
String subject = "Issue " + new Date();
issue.setSubject(subject);
Issue createdIssue = mgr.createIssue(projectKey, issue);
Assert.assertEquals("Initial 'done ratio' must be 0", (Integer) 0, createdIssue.getDoneRatio());
Integer doneRatio = 50;
createdIssue.setDoneRatio(doneRatio);
mgr.updateIssue(createdIssue);
Integer issueId = createdIssue.getId();
Issue reloadedFromRedmineIssue = mgr.getIssueById(issueId);
Assert.assertEquals(
"Checking if 'update issue' operation changed 'done ratio' field",
doneRatio, reloadedFromRedmineIssue.getDoneRatio());
Integer invalidDoneRatio = 130;
reloadedFromRedmineIssue.setDoneRatio(invalidDoneRatio);
try {
mgr.updateIssue(reloadedFromRedmineIssue);
} catch (RedmineException e) {
Assert.assertEquals("Must be 1 error", 1, e.getErrors().size());
Assert.assertEquals("Checking error text", "% Done is not included in the list", e.getErrors().get(0).toString());
}
Issue reloadedFromRedmineIssueUnchanged = mgr.getIssueById(issueId);
Assert.assertEquals(
"'done ratio' must have remained unchanged after invalid value",
doneRatio, reloadedFromRedmineIssueUnchanged.getDoneRatio());
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testIssueNullDescriptionDoesNotEraseIt() {
try {
Issue issue = new Issue();
String subject = "Issue " + new Date();
String descr = "Some description";
issue.setSubject(subject);
issue.setDescription(descr);
Issue createdIssue = mgr.createIssue(projectKey, issue);
Assert.assertEquals("Checking description", descr, createdIssue.getDescription());
createdIssue.setDescription(null);
mgr.updateIssue(createdIssue);
Integer issueId = createdIssue.getId();
Issue reloadedFromRedmineIssue = mgr.getIssueById(issueId);
Assert.assertEquals(
"Description must not be erased",
descr, reloadedFromRedmineIssue.getDescription());
reloadedFromRedmineIssue.setDescription("");
mgr.updateIssue(reloadedFromRedmineIssue);
Issue reloadedFromRedmineIssueUnchanged = mgr.getIssueById(issueId);
Assert.assertEquals(
"Description must be erased",
"", reloadedFromRedmineIssueUnchanged.getDescription());
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testIssueJournals() {
try {
// create at least 1 issue
Issue issueToCreate = new Issue();
issueToCreate.setSubject("testGetIssues: " + new Date());
Issue newIssue = mgr.createIssue(projectKey, issueToCreate);
Issue loadedIssueWithJournals = mgr.getIssueById(newIssue.getId(), INCLUDE.journals);
Assert.assertTrue(loadedIssueWithJournals.getJournals().isEmpty());
String commentDescribingTheUpdate = "some comment describing the issue update";
loadedIssueWithJournals.setSubject("new subject");
loadedIssueWithJournals.setNotes(commentDescribingTheUpdate);
mgr.updateIssue(loadedIssueWithJournals);
Issue loadedIssueWithJournals2 = mgr.getIssueById(newIssue.getId(), INCLUDE.journals);
Assert.assertEquals(1, loadedIssueWithJournals2.getJournals().size());
Journal journalItem = loadedIssueWithJournals2.getJournals().get(0);
Assert.assertEquals(commentDescribingTheUpdate, journalItem.getNotes());
User ourUser = getOurUser();
// can't compare User objects because either of them is not completely filled
Assert.assertEquals(ourUser.getId(), journalItem.getUser().getId());
Assert.assertEquals(ourUser.getFirstName(), journalItem.getUser().getFirstName());
Assert.assertEquals(ourUser.getLastName(), journalItem.getUser().getLastName());
Issue loadedIssueWithoutJournals = mgr.getIssueById(newIssue.getId());
Assert.assertTrue(loadedIssueWithoutJournals.getJournals().isEmpty());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testCreateRelation() {
try {
List<Issue> issues = createIssues(2);
Issue src = issues.get(0);
Issue target = issues.get(1);
String relationText = IssueRelation.TYPE.precedes.toString();
IssueRelation r = mgr.createRelation(src.getId(), target.getId(), relationText);
assertEquals(src.getId(), r.getIssueId());
Assert.assertEquals(target.getId(), r.getIssueToId());
Assert.assertEquals(relationText, r.getType());
} catch (Exception e) {
Assert.fail(e.toString());
}
}
private IssueRelation createTwoRelatedIssues() throws IOException, AuthenticationException, NotFoundException, RedmineException {
List<Issue> issues = createIssues(2);
Issue src = issues.get(0);
Issue target = issues.get(1);
String relationText = IssueRelation.TYPE.precedes.toString();
return mgr.createRelation(src.getId(), target.getId(), relationText);
}
@Test
public void issueRelationsAreCreatedAndLoadedOK() {
try {
IssueRelation relation = createTwoRelatedIssues();
Issue issue = mgr.getIssueById(relation.getIssueId(), INCLUDE.relations);
Issue issueTarget = mgr.getIssueById(relation.getIssueToId(), INCLUDE.relations);
Assert.assertEquals(1, issue.getRelations().size());
Assert.assertEquals(1, issueTarget.getRelations().size());
IssueRelation relation1 = issue.getRelations().get(0);
assertEquals(issue.getId(), relation1.getIssueId());
assertEquals(issueTarget.getId(), relation1.getIssueToId());
assertEquals("precedes", relation1.getType());
assertEquals((Integer) 0, relation1.getDelay());
IssueRelation reverseRelation = issueTarget.getRelations().get(0);
// both forward and reverse relations are the same!
Assert.assertEquals(relation1, reverseRelation);
} catch (Exception e) {
Assert.fail(e.toString());
}
}
@Ignore
@Test
public void issueFixVersionIsSet() throws Exception {
String existingProjectKey = "test";
Issue toCreate = generateRandomIssue();
Version v = new Version();
String versionName = "1.0";
v.setName("1.0");
v.setId(1);
toCreate.setTargetVersion(v);
Issue createdIssue = mgr.createIssue(existingProjectKey, toCreate);
Assert.assertNotNull(createdIssue.getTargetVersion());
Assert.assertEquals(createdIssue.getTargetVersion().getName(), versionName);
}
@Ignore
@Test
public void testGetProjectsIncludesTrackers() {
try {
List<Project> projects = mgr.getProjects();
Assert.assertTrue(projects.size() > 0);
Project p1 = projects.get(0);
Assert.assertNotNull(p1.getTrackers());
// XXX there could be a case when a project does not have any trackers
// need to create a project with some trackers to make this test deterministic
Assert.assertTrue(!p1.getTrackers().isEmpty());
logger.debug("Created trackers " + p1.getTrackers());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Ignore
@Test
public void testSpentTime() {
// TODO need to use "Time Entries"
// float spentHours = 12.5f;
// issueToCreate.setSpentHours(spentHours);
// check SPENT TIME
// assertEquals((Float) spentHours, newIssue.getSpentHours());
}
@Test
public void testViolateTimeEntryConstraint_ProjectOrIssueID_issue66() throws IOException, AuthenticationException, RedmineException {
TimeEntry timeEntry = new TimeEntry();
timeEntry.setActivityId(ACTIVITY_ID);
timeEntry.setSpentOn(new Date());
timeEntry.setHours(1.5f);
try {
mgr.createTimeEntry(timeEntry);
} catch (IllegalArgumentException e) {
logger.debug("create: Got expected IllegalArgumentException for invalid Time Entry (issue
} catch (Exception e) {
e.printStackTrace();
fail("Got unexpected " + e.getClass().getSimpleName() + ": " + e.getMessage());
}
try {
mgr.updateTimeEntry(timeEntry);
} catch (IllegalArgumentException e) {
logger.debug("update: Got expected IllegalArgumentException for invalid Time Entry (issue
} catch (Exception e) {
e.printStackTrace();
fail("Got unexpected " + e.getClass().getSimpleName() + ": " + e.getMessage());
}
// Now can try to verify with project ID (only test with issue ID seems to be already covered)
int projectId = mgr.getProjects().get(0).getId();
timeEntry.setProjectId(projectId);
try {
TimeEntry created = mgr.createTimeEntry(timeEntry);
logger.debug("Created time entry " + created);
} catch (Exception e) {
e.printStackTrace();
fail("Unexpected " + e.getClass().getSimpleName() + ": " + e.getMessage());
}
}
/**
* tests the retrieval of statuses.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetStatuses() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// TODO we should create some statuses first, but the Redmine Java API does not support this presently
List<IssueStatus> statuses = mgr.getStatuses();
Assert.assertFalse("Expected list of statuses not to be empty", statuses.isEmpty());
for (IssueStatus issueStatus : statuses) {
// asserts on status
assertNotNull("ID of status must not be null", issueStatus.getId());
assertNotNull("Name of status must not be null", issueStatus.getName());
}
}
/**
* tests the creation of an invalid {@link Version}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test(expected = IllegalArgumentException.class)
public void testCreateInvalidVersion() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Version version = new Version(null, "Invalid test version " + UUID.randomUUID().toString());
mgr.createVersion(version);
}
/**
* tests the deletion of an invalid {@link Version}. Expects a
* {@link NotFoundException} to be thrown.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test(expected = NotFoundException.class)
public void testDeleteInvalidVersion() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// create new test version
Version version = new Version(null, "Invalid test version " + UUID.randomUUID().toString());
version.setDescription("An invalid test version created by " + this.getClass());
// set invalid id
version.setId(-1);
// now try to delete version
mgr.deleteVersion(version);
}
/**
* tests the deletion of a {@link Version}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testDeleteVersion() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
// create new test version
Version version = new Version(project, "Test version " + UUID.randomUUID().toString());
version.setDescription("A test version created by " + this.getClass());
version.setStatus("open");
Version newVersion = mgr.createVersion(version);
// assert new test version
Assert.assertNotNull("Expected new version not to be null", newVersion);
// now delete version
mgr.deleteVersion(newVersion);
// assert that the version is gone
List<Version> versions = mgr.getVersions(project.getId());
Assert.assertTrue("List of versions of test project must be empty now but is " + versions, versions.isEmpty());
}
/**
* tests the retrieval of {@link Version}s.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetVersions() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
// create some versions
Version testVersion1 = mgr.createVersion(new Version(project, "Version" + UUID.randomUUID()));
Version testVersion2 = mgr.createVersion(new Version(project, "Version" + UUID.randomUUID()));
try {
List<Version> versions = mgr.getVersions(project.getId());
Assert.assertEquals("Wrong number of versions for project " + project.getName() + " delivered by Redmine Java API", 2, versions.size());
for (Version version : versions) {
// assert version
Assert.assertNotNull("ID of version must not be null", version.getId());
Assert.assertNotNull("Name of version must not be null", version.getName());
Assert.assertNotNull("Project of version must not be null", version.getProject());
}
} finally {
// scrub test versions
if (testVersion1 != null) {
mgr.deleteVersion(testVersion1);
}
if (testVersion2 != null) {
mgr.deleteVersion(testVersion2);
}
}
}
/**
* tests the creation and deletion of a {@link IssueCategory}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testCreateAndDeleteIssueCategory() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
// create new test category
IssueCategory category = new IssueCategory(project, "Category" + new Date().getTime());
category.setAssignee(getOurUser());
IssueCategory newIssueCategory = mgr.createCategory(category);
// assert new test category
Assert.assertNotNull("Expected new category not to be null", newIssueCategory);
Assert.assertNotNull("Expected project of new category not to be null", newIssueCategory.getProject());
Assert.assertNotNull("Expected assignee of new category not to be null", newIssueCategory.getAssignee());
// now delete category
mgr.deleteCategory(newIssueCategory);
// assert that the category is gone
List<IssueCategory> categories = mgr.getCategories(project.getId());
Assert.assertTrue("List of categories of test project must be empty now but is " + categories, categories.isEmpty());
}
/**
* tests the retrieval of {@link IssueCategory}s.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetIssueCategories() throws RedmineException, IOException, AuthenticationException, NotFoundException {
Project project = mgr.getProjectByKey(projectKey);
// create some categories
IssueCategory testIssueCategory1 = new IssueCategory(project, "Category" + new Date().getTime());
testIssueCategory1.setAssignee(getOurUser());
IssueCategory newIssueCategory1 = mgr.createCategory(testIssueCategory1);
IssueCategory testIssueCategory2 = new IssueCategory(project, "Category" + new Date().getTime());
testIssueCategory2.setAssignee(getOurUser());
IssueCategory newIssueCategory2 = mgr.createCategory(testIssueCategory2);
try {
List<IssueCategory> categories = mgr.getCategories(project.getId());
Assert.assertEquals("Wrong number of categories for project " + project.getName() + " delivered by Redmine Java API", 2, categories.size());
for (IssueCategory category : categories) {
// assert category
Assert.assertNotNull("ID of category must not be null", category.getId());
Assert.assertNotNull("Name of category must not be null", category.getName());
Assert.assertNotNull("Project of category must not be null", category.getProject());
Assert.assertNotNull("Assignee of category must not be null", category.getAssignee());
}
} finally {
// scrub test categories
if (newIssueCategory1 != null) {
mgr.deleteCategory(newIssueCategory1);
}
if (newIssueCategory2 != null) {
mgr.deleteCategory(newIssueCategory2);
}
}
}
/**
* tests the creation of an invalid {@link IssueCategory}.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test(expected = IllegalArgumentException.class)
public void testCreateInvalidIssueCategory() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// create new invalid test category
IssueCategory category = new IssueCategory(null, "InvalidCategory" + new Date().getTime());
mgr.createCategory(category);
}
/**
* tests the deletion of an invalid {@link IssueCategory}. Expects a
* {@link NotFoundException} to be thrown.
*
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test(expected = NotFoundException.class)
public void testDeleteInvalidIssueCategory() throws RedmineException, IOException, AuthenticationException, NotFoundException {
// create new test category
IssueCategory category = new IssueCategory(null, "InvalidCategory" + new Date().getTime());
// set invalid id
category.setId(-1);
// now try to delete category
mgr.deleteCategory(category);
}
/**
* Tests the retrieval of {@link Tracker}s.
* @throws RedmineException thrown in case something went wrong in Redmine
* @throws IOException thrown in case something went wrong while performing I/O
* operations
* @throws AuthenticationException thrown in case something went wrong while trying to login
* @throws NotFoundException thrown in case the objects requested for could not be found
*/
@Test
public void testGetTrackers() throws RedmineException, IOException, AuthenticationException, NotFoundException {
List<Tracker> trackers = mgr.getTrackers();
assertNotNull("List of trackers returned should not be null",trackers);
assertFalse("List of trackers returned should not be empty",trackers.isEmpty());
}
} |
package seedu.address.testutil;
import seedu.address.model.tag.UniqueTagList;
import seedu.utask.model.task.Deadline;
import seedu.utask.model.task.Frequency;
import seedu.utask.model.task.Name;
import seedu.utask.model.task.ReadOnlyTask;
import seedu.utask.model.task.Timestamp;
/**
* A mutable person object. For testing only.
*/
public class TestPerson implements ReadOnlyTask {
private Name name;
private Frequency address;
private Timestamp email;
private Deadline phone;
private UniqueTagList tags;
public TestPerson() {
tags = new UniqueTagList();
}
/**
* Creates a copy of {@code personToCopy}.
*/
public TestPerson(TestPerson personToCopy) {
this.name = personToCopy.getName();
this.phone = personToCopy.getDeadline();
this.email = personToCopy.getTimestamp();
this.address = personToCopy.getFrequency();
this.tags = personToCopy.getTags();
}
public void setName(Name name) {
this.name = name;
}
public void setAddress(Frequency address) {
this.address = address;
}
public void setEmail(Timestamp email) {
this.email = email;
}
public void setPhone(Deadline phone) {
this.phone = phone;
}
public void setTags(UniqueTagList tags) {
this.tags = tags;
}
@Override
public Name getName() {
return name;
}
@Override
public Deadline getDeadline() {
return phone;
}
@Override
public Timestamp getTimestamp() {
return email;
}
@Override
public Frequency getFrequency() {
return address;
}
@Override
public UniqueTagList getTags() {
return tags;
}
@Override
public String toString() {
return getAsText();
}
public String getAddCommand() {
StringBuilder sb = new StringBuilder();
sb.append("create " + this.getName().fullName + " ");
sb.append("/by " + this.getDeadline().value + " ");
sb.append("/from " + this.getTimestamp().value + " ");
sb.append("/repeat " + this.getFrequency().value + " ");
this.getTags().asObservableList().stream().forEach(s -> sb.append("/tag " + s.tagName + " "));
return sb.toString();
}
} |
package toadmess.explosives;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Random;
import org.bukkit.util.config.Configuration;
import org.junit.Before;
import org.junit.Test;
public class ExplodingConfTest {
private Configuration conf;
@Before
public void setup() {
conf = new Configuration(null);
}
@Test
public void testEmptyConfig() {
checkEmptyConfigDefaults(conf, "");
}
@Test
public void testEmptyConfig_InvalidPath() {
checkEmptyConfigDefaults(conf, "someplace.overtherainbow");
}
private void checkEmptyConfigDefaults(final Configuration conf, final String confPath) {
final ExplodingConf ec = new ExplodingConf(conf, confPath);
// Check some defualt bounds are created
checkBoundsAreDefault(ec);
// Check the fire defaults to false even though it's not in the config.
checkFire(ec, false, false);
// Check the next radius multiplier defaults to 1.0 event though none was specified in the config
checkRadiusMultiplier(ec, 1.0F, false);
checkRadiusMultiplier(ec, 1.0F, false);
// Check the next player damage multiplier defaults to 1.0 even though none was specified in the config
checkPlayerDmgMultiplier(ec, 1.0F, false);
checkPlayerDmgMultiplier(ec, 1.0F, false);
// Check the next creature damage multiplier defaults to 1.0 even though none was specified in the config
checkCreatureDmgMultiplier(ec, 1.0F, false);
checkCreatureDmgMultiplier(ec, 1.0F, false);
}
@Test
public void testFire_False() { testFire(false); }
@Test
public void testFire_True() { testFire(true); }
private void testFire(final boolean confHasFireSetToTrue) {
conf.setProperty("someentity." + HEMain.CONF_ENTITY_FIRE, confHasFireSetToTrue);
checkFire(new ExplodingConf(conf, "someentity"), confHasFireSetToTrue, true);
}
@Test
public void testRadiusMultiplierSingle() {
conf.setProperty("someentity." + HEMain.CONF_ENTITY_RADIUSMULT, 0.23F);
final ExplodingConf ec = new ExplodingConf(conf, "someentity");
checkRadiusMultiplier(ec, 0.23F, true);
checkRadiusMultiplier(ec, 0.23F, true);
checkRadiusMultiplier(ec, 0.23F, true);
}
@Test
public void testRadiusMultiplierMulti() {
checkMultipliers(eMultiplierType.RADIUS);
}
private enum eMultiplierType {
RADIUS(HEMain.CONF_ENTITY_RADIUSMULT),
PLAYER_DMG(HEMain.CONF_ENTITY_PLAYER_DAMAGEMULT),
CREATURE_DMG(HEMain.CONF_ENTITY_CREATURE_DAMAGEMULT);
private final String confKey;
private eMultiplierType(final String confKey) { this.confKey = confKey;}
protected String getConfKey() { return this.confKey; }
};
private void checkMultipliers(final eMultiplierType multiplerType) {
final String confPrefix = "someentity";
/**
* Set up a configuration like:
*
* radiusMultiplier:
* - {chance: 0.2, value: 2.0}
* - {chance: 0.7, value: 1.0}
* - {chance: 0.1, value: 0.0}
*/
final List<HashMap<String,Object>> listOMultipliers = new ArrayList<HashMap<String,Object>>();
final HashMap<String,Object> firstMultiplier = new HashMap<String,Object>();
final HashMap<String,Object> secondMultiplier = new HashMap<String,Object>();
final HashMap<String,Object> thirdMultiplier = new HashMap<String,Object>();
listOMultipliers.add(firstMultiplier);
listOMultipliers.add(secondMultiplier);
listOMultipliers.add(thirdMultiplier);
firstMultiplier.put(HEMain.CONF_MULTIPLIER_CHANCE, 0.2D);
firstMultiplier.put(HEMain.CONF_MULTIPLIER_VALUE, 2.0D);
secondMultiplier.put(HEMain.CONF_MULTIPLIER_CHANCE, 0.7D);
secondMultiplier.put(HEMain.CONF_MULTIPLIER_VALUE, 1.0D);
thirdMultiplier.put(HEMain.CONF_MULTIPLIER_CHANCE, 0.1D);
thirdMultiplier.put(HEMain.CONF_MULTIPLIER_VALUE, 0.0D);
conf.setProperty(confPrefix + "." + multiplerType.getConfKey(), listOMultipliers);
final PredicatableNumGen rng = new PredicatableNumGen(0.01D, 1.0D);
final ExplodingConf ec = new ExplodingConf(conf, confPrefix, rng);
for(int i = 0; i < 5; i++) { // Make our fake RNG Cycle round a few times
final float expectedMultiplier;
if(rng.peekNextDouble() <= 0.7D) {
expectedMultiplier = 1.0F;
} else if (rng.peekNextDouble() <= 0.9D) {
expectedMultiplier = 2.0F;
} else {
expectedMultiplier = 0.0F;
}
switch(multiplerType) {
case RADIUS:
checkRadiusMultiplier(ec, expectedMultiplier, true);
break;
case PLAYER_DMG:
checkPlayerDmgMultiplier(ec, expectedMultiplier, true);
break;
case CREATURE_DMG:
checkCreatureDmgMultiplier(ec, expectedMultiplier, true);
break;
}
}
}
@Test
public void testPlayerDmgMultiplierSingle() {
conf.setProperty("someentity." + HEMain.CONF_ENTITY_PLAYER_DAMAGEMULT, 0.23F);
final ExplodingConf ec = new ExplodingConf(conf, "someentity");
checkPlayerDmgMultiplier(ec, 0.23F, true);
checkPlayerDmgMultiplier(ec, 0.23F, true);
checkPlayerDmgMultiplier(ec, 0.23F, true);
}
@Test
public void testPlayerDmgMultiplierMulti() {
checkMultipliers(eMultiplierType.PLAYER_DMG);
}
@Test
public void testCreatureDmgMultiplierSingle() {
conf.setProperty("someentity." + HEMain.CONF_ENTITY_CREATURE_DAMAGEMULT, 0.23F);
final ExplodingConf ec = new ExplodingConf(conf, "someentity");
checkCreatureDmgMultiplier(ec, 0.23F, true);
checkCreatureDmgMultiplier(ec, 0.23F, true);
checkCreatureDmgMultiplier(ec, 0.23F, true);
}
@Test
public void testCreatureDmgMultiplierMulti() {
checkMultipliers(eMultiplierType.CREATURE_DMG);
}
private void checkRadiusMultiplier(final ExplodingConf ec, final float expectMultiplier, final boolean expectRadiusConfig) {
assertEquals((Float) expectMultiplier, (Float) ec.getNextRadiusMultiplier());
assertEquals(expectRadiusConfig, ec.hasRadiusConfig());
}
private void checkPlayerDmgMultiplier(final ExplodingConf ec, final float expectMultiplier, final boolean expectPlayerDmgConfig) {
assertEquals((Float) expectMultiplier, (Float) ec.getNextPlayerDamageMultiplier());
assertEquals(expectPlayerDmgConfig, ec.hasPlayerDamageConfig());
}
private void checkCreatureDmgMultiplier(final ExplodingConf ec, final float expectMultiplier, final boolean expectCreatureDmgConfig) {
assertEquals((Float) expectMultiplier, (Float) ec.getNextCreatureDamageMultiplier());
assertEquals(expectCreatureDmgConfig, ec.hasCreatureDamageConfig());
}
private void checkFire(final ExplodingConf ec, final boolean expectFire, final boolean expectFireConfig) {
assertEquals(expectFire, ec.getFire());
assertEquals(expectFireConfig, ec.hasFireConfig());
}
private void checkBoundsAreDefault(final ExplodingConf ec) {
assertNull(ec.getActiveBounds().getMaxX());
assertNull(ec.getActiveBounds().getMaxY());
assertNull(ec.getActiveBounds().getMaxZ());
assertNull(ec.getActiveBounds().getMaxX());
assertNull(ec.getActiveBounds().getMaxY());
assertNull(ec.getActiveBounds().getMaxZ());
}
} |
package mondrian.rolap;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.List;
import java.util.ArrayList;
import junit.framework.Assert;
import mondrian.olap.Axis;
import mondrian.olap.Cell;
import mondrian.olap.Connection;
import mondrian.olap.Evaluator;
import mondrian.olap.Level;
import mondrian.olap.Member;
import mondrian.olap.MondrianProperties;
import mondrian.olap.Query;
import mondrian.olap.Result;
import mondrian.olap.NativeEvaluationUnsupportedException;
import mondrian.rolap.RolapConnection.NonEmptyResult;
import mondrian.rolap.RolapNative.Listener;
import mondrian.rolap.RolapNative.NativeEvent;
import mondrian.rolap.RolapNative.TupleEvent;
import mondrian.rolap.cache.CachePool;
import mondrian.rolap.cache.HardSmartCache;
import mondrian.rolap.sql.MemberChildrenConstraint;
import mondrian.rolap.sql.TupleConstraint;
import mondrian.test.FoodMartTestCase;
import mondrian.test.TestContext;
import mondrian.util.Bug;
import org.apache.log4j.*;
import org.apache.log4j.spi.*;
import org.eigenbase.util.property.*;
/**
* Tests for NON EMPTY Optimization, includes SqlConstraint type hierarchy and
* RolapNative classes.
*
* @author av
* @since Nov 21, 2005
* @version $Id$
*/
public class NonEmptyTest extends FoodMartTestCase {
private static Logger logger = Logger.getLogger(NonEmptyTest.class);
SqlConstraintFactory scf = SqlConstraintFactory.instance();
public NonEmptyTest() {
super();
}
public NonEmptyTest(String name) {
super(name);
}
public void testStrMeasure() {
TestContext ctx = TestContext.create(
null,
"<Cube name=\"StrMeasure\"> \n" +
" <Table name=\"promotion\"/> \n" +
" <Dimension name=\"Promotions\"> \n" +
" <Hierarchy hasAll=\"true\" > \n" +
" <Level name=\"Promotion Name\" column=\"promotion_name\" uniqueMembers=\"true\"/> \n" +
" </Hierarchy> \n" +
" </Dimension> \n" +
" <Measure name=\"Media\" column=\"media_type\" aggregator=\"max\" datatype=\"String\"/> \n" +
"</Cube> \n",
null,null,null);
ctx.assertQueryReturns(
"select {[Measures].[Media]} on columns " +
"from [StrMeasure]",
"Axis #0:" + nl +
"{}" + nl +
"Axis #1:" + nl +
"{[Measures].[Media]}" + nl +
"Row #0: TV" + nl
);
}
public void testBug1515302() {
TestContext ctx = TestContext.create(
null,
"<Cube name=\"Bug1515302\"> \n" +
" <Table name=\"sales_fact_1997\"/> \n" +
" <Dimension name=\"Promotions\" foreignKey=\"promotion_id\"> \n" +
" <Hierarchy hasAll=\"false\" primaryKey=\"promotion_id\"> \n" +
" <Table name=\"promotion\"/> \n" +
" <Level name=\"Promotion Name\" column=\"promotion_name\" uniqueMembers=\"true\"/> \n" +
" </Hierarchy> \n" +
" </Dimension> \n" +
" <Dimension name=\"Customers\" foreignKey=\"customer_id\"> \n" +
" <Hierarchy hasAll=\"true\" allMemberName=\"All Customers\" primaryKey=\"customer_id\"> \n" +
" <Table name=\"customer\"/> \n" +
" <Level name=\"Country\" column=\"country\" uniqueMembers=\"true\"/> \n" +
" <Level name=\"State Province\" column=\"state_province\" uniqueMembers=\"true\"/> \n" +
" <Level name=\"City\" column=\"city\" uniqueMembers=\"false\"/> \n" +
" <Level name=\"Name\" column=\"customer_id\" type=\"Numeric\" uniqueMembers=\"true\"/> \n" +
" </Hierarchy> \n" +
" </Dimension> \n" +
" <Measure name=\"Unit Sales\" column=\"unit_sales\" aggregator=\"sum\"/> \n" +
"</Cube> \n",
null,null,null);
ctx.assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns, " +
"non empty crossjoin({[Promotions].[Big Promo]}, " +
"Descendants([Customers].[USA], [City], " +
"SELF_AND_BEFORE)) on rows " +
"from [Bug1515302]",
"Axis #0:" + nl +
"{}" + nl +
"Axis #1:" + nl +
"{[Measures].[Unit Sales]}" + nl +
"Axis #2:" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA].[Anacortes]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA].[Ballard]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA].[Bellingham]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA].[Burien]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA].[Everett]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA].[Issaquah]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA].[Kirkland]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA].[Lynnwood]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA].[Marysville]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA].[Olympia]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA].[Puyallup]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA].[Redmond]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA].[Renton]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA].[Seattle]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA].[Sedro Woolley]}" + nl +
"{[Promotions].[Big Promo], [Customers].[All Customers].[USA].[WA].[Tacoma]}" + nl +
"Row #0: 1,789" + nl +
"Row #1: 1,789" + nl +
"Row #2: 20" + nl +
"Row #3: 35" + nl +
"Row #4: 15" + nl +
"Row #5: 18" + nl +
"Row #6: 60" + nl +
"Row #7: 42" + nl +
"Row #8: 36" + nl +
"Row #9: 79" + nl +
"Row #10: 58" + nl +
"Row #11: 520" + nl +
"Row #12: 438" + nl +
"Row #13: 14" + nl +
"Row #14: 20" + nl +
"Row #15: 65" + nl +
"Row #16: 3" + nl +
"Row #17: 366" + nl
);
}
/**
* must not use native sql optimization because it chooses the wrong RolapStar
* in SqlContextConstraint/SqlConstraintUtils.
* Test ensures that no exception is thrown.
*/
public void testVirtualCube() throws Exception {
if (MondrianProperties.instance().TestExpDependencies.get() > 0) {
return;
}
TestCase c = new TestCase(99, 3,
"select NON EMPTY {[Measures].[Unit Sales], [Measures].[Warehouse Sales]} ON COLUMNS, " +
"NON EMPTY [Product].[All Products].Children ON ROWS " +
"from [Warehouse and Sales]");
c.run();
}
public void testVirtualCubeMembers() throws Exception {
if (MondrianProperties.instance().TestExpDependencies.get() > 0) {
return;
}
// ok to use native sql optimization for members on a virtual cube
TestCase c = new TestCase(6, 3,
"select NON EMPTY {[Measures].[Unit Sales], [Measures].[Warehouse Sales]} ON COLUMNS, " +
"NON EMPTY {[Product].[Product Family].Members} ON ROWS " +
"from [Warehouse and Sales]");
c.run();
}
public void testVirtualCubeMembersNonConformingDim() throws Exception {
// native sql optimization should not be used when you have a
// non-conforming dimensions because it will result in a cartesian
// product join
checkNotNative(
1,
"select non empty {[Customers].[Country].members} on columns, " +
"{[Measures].[Units Ordered]} on rows from " +
"[Warehouse and Sales]");
}
public void testNativeFilter() {
String query =
"select {[Measures].[Store Sales]} ON COLUMNS, "
+ "Order(Filter(Descendants([Customers].[All Customers].[USA].[CA], [Customers].[Name]), ([Measures].[Store Sales] > 200.0)), [Measures].[Store Sales], DESC) ON ROWS "
+ "from [Sales] "
+ "where ([Time].[1997])";
boolean origNativeFilter =
MondrianProperties.instance().EnableNativeFilter.get();
MondrianProperties.instance().EnableNativeFilter.set(true);
// Get a fresh connection; Otherwise the mondrian property setting
// is not refreshed for this parameter.
boolean requestFreshConnection = true;
checkNative(32, 18, query, null, requestFreshConnection);
MondrianProperties.instance().EnableNativeFilter.set(origNativeFilter);
}
/**
* Executes a Filter() whose condition contains a calculated member.
*/
public void testCmNativeFilter() {
String query =
"with member [Measures].[Rendite] as '([Measures].[Store Sales] - [Measures].[Store Cost]) / [Measures].[Store Cost]' "
+ "select NON EMPTY {[Measures].[Unit Sales], [Measures].[Store Cost], [Measures].[Rendite], [Measures].[Store Sales]} ON COLUMNS, "
+ "NON EMPTY Order(Filter([Product].[Product Name].Members, ([Measures].[Rendite] > 1.8)), [Measures].[Rendite], BDESC) ON ROWS "
+ "from [Sales] "
+ "where ([Store].[All Stores].[USA].[CA], [Time].[1997])";
String result =
"Axis
"{[Store].[All Stores].[USA].[CA], [Time].[1997]}\n" +
"Axis
"{[Measures].[Unit Sales]}\n" +
"{[Measures].[Store Cost]}\n" +
"{[Measures].[Rendite]}\n" +
"{[Measures].[Store Sales]}\n" +
"Axis
"{[Product].[All Products].[Food].[Baking Goods].[Jams and Jellies].[Peanut Butter].[Plato].[Plato Extra Chunky Peanut Butter]}\n" +
"{[Product].[All Products].[Food].[Snack Foods].[Snack Foods].[Popcorn].[Horatio].[Horatio Buttered Popcorn]}\n" +
"{[Product].[All Products].[Food].[Canned Foods].[Canned Tuna].[Tuna].[Better].[Better Canned Tuna in Oil]}\n" +
"{[Product].[All Products].[Food].[Produce].[Fruit].[Fresh Fruit].[High Top].[High Top Cantelope]}\n" +
"{[Product].[All Products].[Non-Consumable].[Household].[Electrical].[Lightbulbs].[Denny].[Denny 75 Watt Lightbulb]}\n" +
"{[Product].[All Products].[Food].[Breakfast Foods].[Breakfast Foods].[Cereal].[Johnson].[Johnson Oatmeal]}\n" +
"{[Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Wine].[Portsmouth].[Portsmouth Light Wine]}\n" +
"{[Product].[All Products].[Food].[Produce].[Vegetables].[Fresh Vegetables].[Ebony].[Ebony Squash]}\n" +
"Row #0: 42\n" +
"Row #0: 24.06\n" +
"Row #0: 1.93\n" +
"Row #0: 70.56\n" +
"Row #1: 36\n" +
"Row #1: 29.02\n" +
"Row #1: 1.91\n" +
"Row #1: 84.60\n" +
"Row #2: 39\n" +
"Row #2: 20.55\n" +
"Row #2: 1.85\n" +
"Row #2: 58.50\n" +
"Row #3: 25\n" +
"Row #3: 21.76\n" +
"Row #3: 1.84\n" +
"Row #3: 61.75\n" +
"Row #4: 43\n" +
"Row #4: 59.62\n" +
"Row #4: 1.83\n" +
"Row #4: 168.99\n" +
"Row #5: 34\n" +
"Row #5: 7.20\n" +
"Row #5: 1.83\n" +
"Row #5: 20.40\n" +
"Row #6: 36\n" +
"Row #6: 33.10\n" +
"Row #6: 1.83\n" +
"Row #6: 93.60\n" +
"Row #7: 46\n" +
"Row #7: 28.34\n" +
"Row #7: 1.81\n" +
"Row #7: 79.58\n";
boolean origNativeFilter =
MondrianProperties.instance().EnableNativeFilter.get();
MondrianProperties.instance().EnableNativeFilter.set(true);
// Get a fresh connection; Otherwise the mondrian property setting
// is not refreshed for this parameter.
boolean requestFreshConnection = true;
checkNative(0, 8, query, fold(result), requestFreshConnection);
MondrianProperties.instance().EnableNativeFilter.set(origNativeFilter);
}
public void testNonNativeFilterWithNullMeasure() {
String query =
"select Filter([Store].[Store Name].members, " +
" Not ([Measures].[Store Sqft] - [Measures].[Grocery Sqft] < 10000) ) on rows, " +
"{[Measures].[Store Sqft], [Measures].[Grocery Sqft]} on columns " +
"from [Store]";
String result =
"Axis
"{}\n" +
"Axis
"{[Measures].[Store Sqft]}\n" +
"{[Measures].[Grocery Sqft]}\n" +
"Axis
"{[Store].[All Stores].[Mexico].[DF].[Mexico City].[Store 9]}\n" +
"{[Store].[All Stores].[Mexico].[DF].[San Andres].[Store 21]}\n" +
"{[Store].[All Stores].[Mexico].[Yucatan].[Merida].[Store 8]}\n" +
"{[Store].[All Stores].[USA].[CA].[Alameda].[HQ]}\n" +
"{[Store].[All Stores].[USA].[CA].[San Diego].[Store 24]}\n" +
"{[Store].[All Stores].[USA].[WA].[Bremerton].[Store 3]}\n" +
"{[Store].[All Stores].[USA].[WA].[Tacoma].[Store 17]}\n" +
"{[Store].[All Stores].[USA].[WA].[Walla Walla].[Store 22]}\n" +
"{[Store].[All Stores].[USA].[WA].[Yakima].[Store 23]}\n" +
"Row #0: 36,509\n" +
"Row #0: 22,450\n" +
"Row #1: \n" +
"Row #1: \n" +
"Row #2: 30,797\n" +
"Row #2: 20,141\n" +
"Row #3: \n" +
"Row #3: \n" +
"Row #4: \n" +
"Row #4: \n" +
"Row #5: 39,696\n" +
"Row #5: 24,390\n" +
"Row #6: 33,858\n" +
"Row #6: 22,123\n" +
"Row #7: \n" +
"Row #7: \n" +
"Row #8: \n" +
"Row #8: \n";
boolean origNativeFilter =
MondrianProperties.instance().EnableNativeFilter.get();
MondrianProperties.instance().EnableNativeFilter.set(false);
checkNotNative(9, query, fold(result));
MondrianProperties.instance().EnableNativeFilter.set(origNativeFilter);
}
public void testNativeFilterWithNullMeasure() {
// Currently this behaves differently from the non-native evaluation.
String query =
"select Filter([Store].[Store Name].members, " +
" Not ([Measures].[Store Sqft] - [Measures].[Grocery Sqft] < 10000) ) on rows, " +
"{[Measures].[Store Sqft], [Measures].[Grocery Sqft]} on columns " +
"from [Store]";
String result =
"Axis
"{}\n" +
"Axis
"{[Measures].[Store Sqft]}\n" +
"{[Measures].[Grocery Sqft]}\n" +
"Axis
"{[Store].[All Stores].[Mexico].[DF].[Mexico City].[Store 9]}\n" +
"{[Store].[All Stores].[Mexico].[Yucatan].[Merida].[Store 8]}\n" +
"{[Store].[All Stores].[USA].[WA].[Bremerton].[Store 3]}\n" +
"{[Store].[All Stores].[USA].[WA].[Tacoma].[Store 17]}\n" +
"Row #0: 36,509\n" +
"Row #0: 22,450\n" +
"Row #1: 30,797\n" +
"Row #1: 20,141\n" +
"Row #2: 39,696\n" +
"Row #2: 24,390\n" +
"Row #3: 33,858\n" +
"Row #3: 22,123\n";
boolean origNativeFilter =
MondrianProperties.instance().EnableNativeFilter.get();
MondrianProperties.instance().EnableNativeFilter.set(true);
// Get a fresh connection; Otherwise the mondrian property setting
// is not refreshed for this parameter.
Connection conn = getTestContext().getFoodMartConnection(false);
TestContext context = getTestContext(conn);
context.assertQueryReturns(query, fold(result));
MondrianProperties.instance().EnableNativeFilter.set(origNativeFilter);
}
public void testNonNativeFilterWithCalcMember() {
// Currently this query cannot run natively
String query =
"with\n" +
"member [Time].[Date Range] as 'Aggregate({[Time].[1997].[Q1]:[Time].[1997].[Q4]})'\n" +
"select\n" +
"{[Measures].[Unit Sales]} ON columns,\n" +
"Filter ([Store].[Store State].members, [Measures].[Store Cost] > 100) ON rows\n" +
"from [Sales]\n" +
"where [Time].[Date Range]\n";
String result =
"Axis
"{[Time].[Date Range]}\n" +
"Axis
"{[Measures].[Unit Sales]}\n" +
"Axis
"{[Store].[All Stores].[USA].[CA]}\n" +
"{[Store].[All Stores].[USA].[OR]}\n" +
"{[Store].[All Stores].[USA].[WA]}\n" +
"Row #0: 74,748\n" +
"Row #1: 67,659\n" +
"Row #2: 124,366\n";
boolean origNativeFilter =
MondrianProperties.instance().EnableNativeFilter.get();
MondrianProperties.instance().EnableNativeFilter.set(false);
checkNotNative(3, query, fold(result));
MondrianProperties.instance().EnableNativeFilter.set(origNativeFilter);
}
/**
* Verify that filter with Not IsEmpty(storedMeasure) can be natively
* evaluated.
*/
public void testNativeFilterNonEmpty() {
String query =
"select Filter(CrossJoin([Store].[Store Name].members, " +
" [Store Type].[Store Type].members), " +
" Not IsEmpty([Measures].[Store Sqft]) ) on rows, " +
"{[Measures].[Store Sqft]} on columns " +
"from [Store]";
boolean origNativeFilter =
MondrianProperties.instance().EnableNativeFilter.get();
MondrianProperties.instance().EnableNativeFilter.set(true);
// Get a fresh connection; Otherwise the mondrian property setting
// is not refreshed for this parameter.
boolean requestFreshConnection = true;
checkNative(0, 20, query, null, requestFreshConnection);
MondrianProperties.instance().EnableNativeFilter.set(origNativeFilter);
}
/**
* Verify that CrossJoins with two non native inputs can be natively evaluated.
*/
public void testExpandAllNonNativeInputs() {
// This query will not run natively unless the <Dimension>.Children
// expression is expanded to a member list.
// Note: Both dimensions only have one hierarchy, which has the All
// member. <Dimension>.Children is interpreted as the children of
// the All member.
String query =
"select " +
"NonEmptyCrossJoin([Gender].Children, [Store].Children) on rows " +
"from [Sales]";
String result =
"Axis
"{}\n" +
"Axis
"{[Gender].[All Gender].[F], [Store].[All Stores].[USA]}\n" +
"{[Gender].[All Gender].[M], [Store].[All Stores].[USA]}\n" +
"Row #0: 131,558\n" +
"Row #0: 135,215\n";
boolean origExpandNonNative =
MondrianProperties.instance().ExpandNonNative.get();
boolean origNativeCrossJoin =
MondrianProperties.instance().EnableNativeCrossJoin.get();
MondrianProperties.instance().ExpandNonNative.set(true);
MondrianProperties.instance().EnableNativeCrossJoin.set(true);
// Get a fresh connection; Otherwise the mondrian property setting
// is not refreshed for this parameter.
boolean requestFreshConnection = true;
checkNative(0, 2, query, fold(result), requestFreshConnection);
MondrianProperties.instance().ExpandNonNative.set(origExpandNonNative);
MondrianProperties.instance().EnableNativeCrossJoin.set(origNativeCrossJoin);
}
/**
* Verify that CrossJoins with one non native inputs can be natively evaluated.
*/
public void testExpandOneNonNativeInput() {
// This query will not be evaluated natively unless the Filter
// expression is expanded to a member list.
String query =
"With " +
"Set [*Filtered_Set] as Filter([Product].[Product Name].Members, [Product].CurrentMember IS [Product].[Product Name].[Fast Raisins]) " +
"Set [*NECJ_Set] as NonEmptyCrossJoin([Store].[Store Country].Members, [*Filtered_Set]) " +
"select [*NECJ_Set] on columns " +
"From [Sales]";
String result =
"Axis
"{}\n" +
"Axis
"{[Store].[All Stores].[USA], [Product].[All Products].[Food].[Snack Foods].[Snack Foods].[Dried Fruit].[Fast].[Fast Raisins]}\n" +
"Row #0: 152\n";
boolean origExpandNonNative =
MondrianProperties.instance().ExpandNonNative.get();
boolean origNativeCrossJoin =
MondrianProperties.instance().EnableNativeCrossJoin.get();
MondrianProperties.instance().ExpandNonNative.set(true);
MondrianProperties.instance().EnableNativeCrossJoin.set(true);
// Get a fresh connection; Otherwise the mondrian property setting
// is not refreshed for this parameter.
boolean requestFreshConnection = true;
checkNative(0, 1, query, fold(result), requestFreshConnection);
MondrianProperties.instance().ExpandNonNative.set(origExpandNonNative);
MondrianProperties.instance().EnableNativeCrossJoin.set(origNativeCrossJoin);
}
/**
* Verify that the presence of All member in all the inputs disables native
* evaluation, even when ExpandNonNative is true.
*/
public void testExpandAllMembersInAllInputs() {
// This query will not be evaluated natively, even if the Hierarchize
// expression is expanded to a member list. The reason is that the
// expanded list contains ALL members.
String query =
"select NON EMPTY {[Time].[1997]} ON COLUMNS,\n" +
" NON EMPTY Crossjoin(Hierarchize(Union({[Store].[All Stores]},\n" +
" [Store].[USA].[CA].[San Francisco].[Store 14].Children)), {[Product].[All Products]}) \n" +
" ON ROWS\n" +
" from [Sales]\n" +
" where [Measures].[Unit Sales]";
String result =
"Axis
"{[Measures].[Unit Sales]}\n" +
"Axis
"{[Time].[1997]}\n" +
"Axis
"{[Store].[All Stores], [Product].[All Products]}\n" +
"Row #0: 266,773\n";
boolean origExpandNonNative =
MondrianProperties.instance().ExpandNonNative.get();
boolean origNativeCrossJoin =
MondrianProperties.instance().EnableNativeCrossJoin.get();
MondrianProperties.instance().ExpandNonNative.set(true);
MondrianProperties.instance().EnableNativeCrossJoin.set(true);
checkNotNative(1, query, fold(result));
MondrianProperties.instance().ExpandNonNative.set(origExpandNonNative);
MondrianProperties.instance().EnableNativeCrossJoin.set(origNativeCrossJoin);
}
/**
* Verify that evaluation is native for expressions with nested non native
* inputs that preduce MemberList results.
*/
public void testExpandNestedNonNativeInputs() {
String query =
"select " +
"NonEmptyCrossJoin(" +
" NonEmptyCrossJoin([Gender].Children, [Store].Children), " +
" [Product].Children) on rows " +
"from [Sales]";
String result =
"Axis
"{}\n" +
"Axis
"{[Gender].[All Gender].[F], [Store].[All Stores].[USA], [Product].[All Products].[Drink]}\n" +
"{[Gender].[All Gender].[F], [Store].[All Stores].[USA], [Product].[All Products].[Food]}\n" +
"{[Gender].[All Gender].[F], [Store].[All Stores].[USA], [Product].[All Products].[Non-Consumable]}\n" +
"{[Gender].[All Gender].[M], [Store].[All Stores].[USA], [Product].[All Products].[Drink]}\n" +
"{[Gender].[All Gender].[M], [Store].[All Stores].[USA], [Product].[All Products].[Food]}\n" +
"{[Gender].[All Gender].[M], [Store].[All Stores].[USA], [Product].[All Products].[Non-Consumable]}\n" +
"Row #0: 12,202\n" +
"Row #0: 94,814\n" +
"Row #0: 24,542\n" +
"Row #0: 12,395\n" +
"Row #0: 97,126\n" +
"Row #0: 25,694\n";
boolean origExpandNonNative =
MondrianProperties.instance().ExpandNonNative.get();
boolean origNativeCrossJoin =
MondrianProperties.instance().EnableNativeCrossJoin.get();
MondrianProperties.instance().ExpandNonNative.set(true);
MondrianProperties.instance().EnableNativeCrossJoin.set(true);
// Get a fresh connection; Otherwise the mondrian property setting
// is not refreshed for this parameter.
boolean requestFreshConnection = true;
checkNative(0, 6, query, fold(result), requestFreshConnection);
MondrianProperties.instance().ExpandNonNative.set(origExpandNonNative);
MondrianProperties.instance().EnableNativeCrossJoin.set(origNativeCrossJoin);
}
/**
* Verify that a low value for maxConstraints disables native evaluation,
* even when ExpandNonNative is true.
*/
public void testExpandLowMaxConstraints() {
String query =
"select NonEmptyCrossJoin(" +
" Filter([Store Type].Children, [Measures].[Unit Sales] > 10000), " +
" [Product].Children) on rows " +
"from [Sales]";
String result =
"Axis
"{}\n" +
"Axis
"{[Store Type].[All Store Types].[Deluxe Supermarket], [Product].[All Products].[Drink]}\n" +
"{[Store Type].[All Store Types].[Deluxe Supermarket], [Product].[All Products].[Food]}\n" +
"{[Store Type].[All Store Types].[Deluxe Supermarket], [Product].[All Products].[Non-Consumable]}\n" +
"{[Store Type].[All Store Types].[Gourmet Supermarket], [Product].[All Products].[Drink]}\n" +
"{[Store Type].[All Store Types].[Gourmet Supermarket], [Product].[All Products].[Food]}\n" +
"{[Store Type].[All Store Types].[Gourmet Supermarket], [Product].[All Products].[Non-Consumable]}\n" +
"{[Store Type].[All Store Types].[Mid-Size Grocery], [Product].[All Products].[Drink]}\n" +
"{[Store Type].[All Store Types].[Mid-Size Grocery], [Product].[All Products].[Food]}\n" +
"{[Store Type].[All Store Types].[Mid-Size Grocery], [Product].[All Products].[Non-Consumable]}\n" +
"{[Store Type].[All Store Types].[Supermarket], [Product].[All Products].[Drink]}\n" +
"{[Store Type].[All Store Types].[Supermarket], [Product].[All Products].[Food]}\n" +
"{[Store Type].[All Store Types].[Supermarket], [Product].[All Products].[Non-Consumable]}\n" +
"Row #0: 6,827\n" +
"Row #0: 55,358\n" +
"Row #0: 14,652\n" +
"Row #0: 1,945\n" +
"Row #0: 15,438\n" +
"Row #0: 3,950\n" +
"Row #0: 1,159\n" +
"Row #0: 8,192\n" +
"Row #0: 2,140\n" +
"Row #0: 14,092\n" +
"Row #0: 108,188\n" +
"Row #0: 28,275\n";
int origMaxConstraint =
MondrianProperties.instance().MaxConstraints.get();
boolean origExpandNonNative =
MondrianProperties.instance().ExpandNonNative.get();
MondrianProperties.instance().MaxConstraints.set(2);
MondrianProperties.instance().ExpandNonNative.set(true);
try {
checkNotNative(12, query, fold(result));
} finally {
MondrianProperties.instance().MaxConstraints.set(origMaxConstraint);
MondrianProperties.instance().ExpandNonNative.set(origExpandNonNative);
}
}
/**
* Verify that native evaluation is not enabled if expanded member list will
* contain members from different levels, even if ExpandNonNative is set.
*
*/
public void testExpandDifferentLevels() {
String query =
"select NonEmptyCrossJoin(" +
" Descendants([Customers].[All Customers].[USA].[WA].[Yakima]), " +
" [Product].Children) on rows " +
"from [Sales]";
boolean origExpandNonNative =
MondrianProperties.instance().ExpandNonNative.get();
MondrianProperties.instance().ExpandNonNative.set(true);
try {
checkNotNative(278, query, null);
} finally {
MondrianProperties.instance().ExpandNonNative.set(origExpandNonNative);
}
}
/**
* Verify that naitve evaluation is possible when calculated members are present
* expanded member list inputs to NECJ.
*
*/
public void testExpandCalcMembers() {
// Note there is a bug currently wrt Calc members in the inputs to native cross join.
// See testCjEnumCalcMembersBug() test.
// However, that bug doe snot affect this test as the empty cell is filtered out by
// the Filter, whose result is not affected by the calc members in its input.
String query =
"with " +
"member [Store Type].[All Store Types].[S] as sum({[Store Type].[All Store Types]}) " +
"set [Enum Store Types] as {" +
" [Store Type].[All Store Types].[Small Grocery], " +
" [Store Type].[All Store Types].[Supermarket], " +
" [Store Type].[All Store Types].[HeadQuarters], " +
" [Store Type].[All Store Types].[S]} " +
"set [Filtered Enum Store Types] as Filter([Enum Store Types], [Measures].[Unit Sales] > 0)" +
"select NonEmptyCrossJoin([Product].[All Products].Children, [Filtered Enum Store Types]) on rows from [Sales]";
String result =
"Axis
"{}\n" +
"Axis
"{[Product].[All Products].[Drink], [Store Type].[All Store Types].[Small Grocery]}\n" +
"{[Product].[All Products].[Drink], [Store Type].[All Store Types].[Supermarket]}\n" +
"{[Product].[All Products].[Drink], [Store Type].[All Store Types].[S]}\n" +
"{[Product].[All Products].[Food], [Store Type].[All Store Types].[Small Grocery]}\n" +
"{[Product].[All Products].[Food], [Store Type].[All Store Types].[Supermarket]}\n" +
"{[Product].[All Products].[Food], [Store Type].[All Store Types].[S]}\n" +
"{[Product].[All Products].[Non-Consumable], [Store Type].[All Store Types].[Small Grocery]}\n" +
"{[Product].[All Products].[Non-Consumable], [Store Type].[All Store Types].[Supermarket]}\n" +
"{[Product].[All Products].[Non-Consumable], [Store Type].[All Store Types].[S]}\n" +
"Row #0: 574\n" +
"Row #0: 14,092\n" +
"Row #0: 24,597\n" +
"Row #0: 4,764\n" +
"Row #0: 108,188\n" +
"Row #0: 191,940\n" +
"Row #0: 1,219\n" +
"Row #0: 28,275\n" +
"Row #0: 50,236\n";
boolean origExpandNonNative =
MondrianProperties.instance().ExpandNonNative.get();
MondrianProperties.instance().ExpandNonNative.set(true);
// Get a fresh connection; Otherwise the mondrian property setting
// is not refreshed for this parameter.
boolean requestFreshConnection = true;
try {
checkNative(0, 9, query, fold(result), requestFreshConnection);
} finally {
MondrianProperties.instance().ExpandNonNative.set(origExpandNonNative);
}
}
/**
* Verify that native evaluation is turned off for tuple inputs, even if
* ExpandNonNative is set.
*/
public void testExpandTupleInputs() {
String query =
"with " +
"set [Tuple Set] as {([Store Type].[All Store Types].[HeadQuarters], [Product].[All Products].[Drink]), ([Store Type].[All Store Types].[Supermarket], [Product].[All Products].[Food])} " +
"set [Filtered Tuple Set] as Filter([Tuple Set], 1=1) " +
"set [NECJ] as NonEmptyCrossJoin([Filtered Tuple Set], [Store].Children) " +
"select [NECJ] on rows from [Sales]";
String result =
"Axis
"{}\n" +
"Axis
"{[Store Type].[All Store Types].[Supermarket], [Product].[All Products].[Food], [Store].[All Stores].[USA]}\n" +
"Row #0: 108,188\n";
boolean origExpandNonNative =
MondrianProperties.instance().ExpandNonNative.get();
MondrianProperties.instance().ExpandNonNative.set(true);
try {
checkNotNative(1, query, fold(result));
} finally {
MondrianProperties.instance().ExpandNonNative.set(origExpandNonNative);
}
}
/**
* Verify that native MemberLists inputs are subject to SQL constriant
* limitation. If mondrian.rolap.maxConstraints is set too low, native
* evaluations will be turned off.
*/
public void testEnumLowMaxConstraints() {
String query =
"with " +
"set [All Store Types] as {" +
"[Store Type].[All Store Types].[Deluxe Supermarket], " +
"[Store Type].[All Store Types].[Gourmet Supermarket], " +
"[Store Type].[All Store Types].[Mid-Size Grocery], " +
"[Store Type].[All Store Types].[Small Grocery], " +
"[Store Type].[All Store Types].[Supermarket]} " +
"set [All Products] as {" +
"[Product].[All Products].[Drink], " +
"[Product].[All Products].[Food], " +
"[Product].[All Products].[Non-Consumable]} " +
"select " +
"NonEmptyCrossJoin( " +
"Filter([All Store Types], ([Measures].[Unit Sales] > 10000)), " +
"[All Products]) on rows " +
"from [Sales]";
String result =
"Axis
"{}\n" +
"Axis
"{[Store Type].[All Store Types].[Deluxe Supermarket], [Product].[All Products].[Drink]}\n" +
"{[Store Type].[All Store Types].[Deluxe Supermarket], [Product].[All Products].[Food]}\n" +
"{[Store Type].[All Store Types].[Deluxe Supermarket], [Product].[All Products].[Non-Consumable]}\n" +
"{[Store Type].[All Store Types].[Gourmet Supermarket], [Product].[All Products].[Drink]}\n" +
"{[Store Type].[All Store Types].[Gourmet Supermarket], [Product].[All Products].[Food]}\n" +
"{[Store Type].[All Store Types].[Gourmet Supermarket], [Product].[All Products].[Non-Consumable]}\n" +
"{[Store Type].[All Store Types].[Mid-Size Grocery], [Product].[All Products].[Drink]}\n" +
"{[Store Type].[All Store Types].[Mid-Size Grocery], [Product].[All Products].[Food]}\n" +
"{[Store Type].[All Store Types].[Mid-Size Grocery], [Product].[All Products].[Non-Consumable]}\n" +
"{[Store Type].[All Store Types].[Supermarket], [Product].[All Products].[Drink]}\n" +
"{[Store Type].[All Store Types].[Supermarket], [Product].[All Products].[Food]}\n" +
"{[Store Type].[All Store Types].[Supermarket], [Product].[All Products].[Non-Consumable]}\n" +
"Row #0: 6,827\n" +
"Row #0: 55,358\n" +
"Row #0: 14,652\n" +
"Row #0: 1,945\n" +
"Row #0: 15,438\n" +
"Row #0: 3,950\n" +
"Row #0: 1,159\n" +
"Row #0: 8,192\n" +
"Row #0: 2,140\n" +
"Row #0: 14,092\n" +
"Row #0: 108,188\n" +
"Row #0: 28,275\n";
int origMaxConstraint =
MondrianProperties.instance().MaxConstraints.get();
MondrianProperties.instance().MaxConstraints.set(2);
try {
checkNotNative(12, query, fold(result));
} finally {
MondrianProperties.instance().MaxConstraints.set(origMaxConstraint);
}
}
/**
* Verify that the presence of All member in all the inputs disables native
* evaluation.
*/
public void testAllMembersNECJ1() {
// This query cannot be evaluated natively because of the "All" member.
String query =
"select " +
"NonEmptyCrossJoin({[Store].[All Stores]}, {[Product].[All Products]}) on rows " +
"from [Sales]";
String result =
"Axis
"{}\n" +
"Axis
"{[Store].[All Stores], [Product].[All Products]}\n" +
"Row #0: 266,773\n";
boolean origNativeCrossJoin =
MondrianProperties.instance().EnableNativeCrossJoin.get();
MondrianProperties.instance().EnableNativeCrossJoin.set(true);
checkNotNative(1, query, fold(result));
MondrianProperties.instance().EnableNativeCrossJoin.set(origNativeCrossJoin);
}
/**
* Verify that the native evaluation is possible if one input does not
* contain the All member.
*/
public void testAllMembersNECJ2() {
// This query can be evaluated natively because there is at least one
// non "All" member.
// It can also be rewritten to use
// Filter([Product].[All Products].Children, Is NotEmpty([Measures].[Unit Sales]))
// which can be natively evaluated
String query =
"select " +
"NonEmptyCrossJoin([Product].[All Products].Children, {[Store].[All Stores]}) on rows " +
"from [Sales]";
String result =
"Axis
"{}\n" +
"Axis
"{[Product].[All Products].[Drink], [Store].[All Stores]}\n" +
"{[Product].[All Products].[Food], [Store].[All Stores]}\n" +
"{[Product].[All Products].[Non-Consumable], [Store].[All Stores]}\n" +
"Row #0: 24,597\n" +
"Row #0: 191,940\n" +
"Row #0: 50,236\n";
boolean origNativeCrossJoin =
MondrianProperties.instance().EnableNativeCrossJoin.get();
MondrianProperties.instance().EnableNativeCrossJoin.set(true);
// Get a fresh connection; Otherwise the mondrian property setting
// is not refreshed for this parameter.
boolean requestFreshConnection = true;
checkNative(0, 3, query, fold(result), requestFreshConnection);
MondrianProperties.instance().EnableNativeCrossJoin.set(origNativeCrossJoin);
}
/**
* getMembersInLevel where Level = (All)
*/
public void testAllLevelMembers() {
checkNative(
14,
14,
"select {[Measures].[Store Sales]} ON COLUMNS, "
+ "NON EMPTY Crossjoin([Product].[(All)].Members, [Promotion Media].[All Media].Children) ON ROWS "
+ "from [Sales]");
}
/**
* enum sets {} containing ALL
*/
public void testCjDescendantsEnumAllOnly() {
checkNative(9, 9, "select {[Measures].[Unit Sales]} ON COLUMNS, " + "NON EMPTY Crossjoin("
+ " Descendants([Customers].[All Customers].[USA], [Customers].[City]), "
+ " {[Product].[All Products]}) ON ROWS " + "from [Sales] "
+ "where ([Promotions].[All Promotions].[Bag Stuffers])");
}
/**
* checks that crossjoin returns a modifiable copy from cache
* because its modified during sort
*/
public void testResultIsModifyableCopy() {
checkNative(
3,
3,
"select {[Measures].[Store Sales]} on columns,"
+ " NON EMPTY Order("
+ " CrossJoin([Customers].[All Customers].[USA].children, [Promotions].[Promotion Name].Members), "
+ " [Measures].[Store Sales]) ON ROWS" + " from [Sales] where ("
+ " [Store].[All Stores].[USA].[CA].[San Francisco].[Store 14],"
+ " [Time].[1997].[Q1].[1])");
}
/** check that top count is executed native unless disabled */
public void testNativeTopCount() {
String query =
"select {[Measures].[Store Sales]} on columns,"
+ " NON EMPTY TopCount("
+ " CrossJoin([Customers].[All Customers].[USA].children, [Promotions].[Promotion Name].Members), "
+ " 3, (3 * [Measures].[Store Sales]) - 100) ON ROWS"
+ " from [Sales] where ("
+ " [Store].[All Stores].[USA].[CA].[San Francisco].[Store 14],"
+ " [Time].[1997].[Q1].[1])";
boolean origNativeTopCount =
MondrianProperties.instance().EnableNativeTopCount.get();
MondrianProperties.instance().EnableNativeTopCount.set(true);
// Get a fresh connection; Otherwise the mondrian property setting
// is not refreshed for this parameter.
boolean requestFreshConnection = true;
checkNative(3, 3, query, null, requestFreshConnection);
MondrianProperties.instance().EnableNativeTopCount.set(origNativeTopCount);
}
/** check that top count is executed native with calculated member */
public void testCmNativeTopCount() {
String query =
"with member [Measures].[Store Profit Rate] as '([Measures].[Store Sales]-[Measures].[Store Cost])/[Measures].[Store Cost]', format = '
+ "select {[Measures].[Store Sales]} on columns,"
+ " NON EMPTY TopCount("
+ " [Customers].[All Customers].[USA].children, "
+ " 3, [Measures].[Store Profit Rate] / 2) ON ROWS"
+ " from [Sales]";
boolean origNativeTopCount =
MondrianProperties.instance().EnableNativeTopCount.get();
MondrianProperties.instance().EnableNativeTopCount.set(true);
// Get a fresh connection; Otherwise the mondrian property setting
// is not refreshed for this parameter.
boolean requestFreshConnection = true;
checkNative(3, 3, query, null, requestFreshConnection);
MondrianProperties.instance().EnableNativeTopCount.set(origNativeTopCount);
}
public void testMeasureAndAggregateInSlicer() {
String result = "Axis
+ nl
+ "{[Store Type].[All Store Types].[All Types], [Measures].[Unit Sales], [Customers].[All Customers].[USA], [Product].[All Products].[Drink]}"
+ nl + "Axis #1:" + nl + "{[Time].[1997]}" + nl + "Axis #2:" + nl
+ "{[Store].[All Stores].[USA].[CA].[Beverly Hills]}" + nl
+ "{[Store].[All Stores].[USA].[CA].[Los Angeles]}" + nl
+ "{[Store].[All Stores].[USA].[CA].[San Diego]}" + nl
+ "{[Store].[All Stores].[USA].[CA].[San Francisco]}" + nl + "Row #0: 1,945" + nl
+ "Row #1: 2,422" + nl + "Row #2: 2,560" + nl + "Row #3: 175" + nl;
assertQueryReturns(
"with member [Store Type].[All Store Types].[All Types] as 'Aggregate({[Store Type].[All Store Types].[Deluxe Supermarket], "
+ "[Store Type].[All Store Types].[Gourmet Supermarket], "
+ "[Store Type].[All Store Types].[HeadQuarters], "
+ "[Store Type].[All Store Types].[Mid-Size Grocery], "
+ "[Store Type].[All Store Types].[Small Grocery], "
+ "[Store Type].[All Store Types].[Supermarket]})' "
+ "select NON EMPTY {[Time].[1997]} ON COLUMNS, "
+ "NON EMPTY [Store].[All Stores].[USA].[CA].Children ON ROWS "
+ "from [Sales] "
+ "where ([Store Type].[All Store Types].[All Types], [Measures].[Unit Sales], [Customers].[All Customers].[USA], [Product].[All Products].[Drink]) ",
result);
}
public void testMeasureInSlicer() {
String result = "Axis
+ nl
+ "{[Measures].[Unit Sales], [Customers].[All Customers].[USA], [Product].[All Products].[Drink]}"
+ nl + "Axis #1:" + nl + "{[Time].[1997]}" + nl + "Axis #2:" + nl
+ "{[Store].[All Stores].[USA].[CA].[Beverly Hills]}" + nl
+ "{[Store].[All Stores].[USA].[CA].[Los Angeles]}" + nl
+ "{[Store].[All Stores].[USA].[CA].[San Diego]}" + nl
+ "{[Store].[All Stores].[USA].[CA].[San Francisco]}" + nl + "Row #0: 1,945" + nl
+ "Row #1: 2,422" + nl + "Row #2: 2,560" + nl + "Row #3: 175" + nl;
assertQueryReturns(
"select NON EMPTY {[Time].[1997]} ON COLUMNS, "
+ "NON EMPTY [Store].[All Stores].[USA].[CA].Children ON ROWS "
+ "from [Sales] "
+ "where ([Measures].[Unit Sales], [Customers].[All Customers].[USA], [Product].[All Products].[Drink])",
result);
}
/**
* Calc Member in TopCount: this topcount can not be calculated native because
* its set contains calculated members.
*/
public void testCmInTopCount() {
checkNotNative(1, "with member [Time].[Jan] as "
+ "'Aggregate({[Time].[1998].[Q1].[1], [Time].[1997].[Q1].[1]})' "
+ "select NON EMPTY {[Measures].[Unit Sales]} ON columns, "
+ "NON EMPTY TopCount({[Time].[Jan]}, 2) ON rows from [Sales] ");
}
/** calc member in slicer can not be executed natively */
public void testCmInSlicer() {
checkNotNative(3, "with member [Time].[Jan] as "
+ "'Aggregate({[Time].[1998].[Q1].[1], [Time].[1997].[Q1].[1]})' "
+ "select NON EMPTY {[Measures].[Unit Sales]} ON columns, "
+ "NON EMPTY [Product].[All Products].Children ON rows from [Sales] "
+ "where ([Time].[Jan]) ");
}
public void testCjMembersMembersMembers() {
checkNative(67, 67, "select {[Measures].[Store Sales]} on columns,"
+ " NON EMPTY Crossjoin("
+ " Crossjoin("
+ " [Customers].[Name].Members,"
+ " [Product].[Product Name].Members), "
+ " [Promotions].[Promotion Name].Members) ON rows "
+ " from [Sales] where ("
+ " [Store].[All Stores].[USA].[CA].[San Francisco].[Store 14],"
+ " [Time].[1997].[Q1].[1])");
}
/** use SQL even when all members are known */
public void testCjEnumEnum() {
// Make sure maxConstraint settting is high enough
int origMaxConstraints =
MondrianProperties.instance().MaxConstraints.get();
int minConstraints = 2;
if (origMaxConstraints < minConstraints) {
MondrianProperties.instance().MaxConstraints.set(minConstraints);
}
try {
checkNative(
4,
4,
"select {[Measures].[Unit Sales]} ON COLUMNS, "
+ "NonEmptyCrossjoin({[Product].[All Products].[Drink].[Beverages], [Product].[All Products].[Drink].[Dairy]}, {[Customers].[All Customers].[USA].[OR].[Portland], [Customers].[All Customers].[USA].[OR].[Salem]}) ON ROWS "
+ "from [Sales] ");
} finally {
MondrianProperties.instance().MaxConstraints.set(origMaxConstraints);
}
}
/** set containing only null member should not prevent usage of native */
public void testCjNullInEnum() {
MondrianProperties properties = MondrianProperties.instance();
boolean savedInvalidProp =
properties.IgnoreInvalidMembersDuringQuery.get();
try {
properties.IgnoreInvalidMembersDuringQuery.set(true);
checkNative(
20, 0,
"select {[Measures].[Unit Sales]} ON COLUMNS, "
+ "NON EMPTY Crossjoin({[Gender].[All Gender].[emale]}, [Customers].[All Customers].[USA].children) ON ROWS "
+ "from [Sales] ");
} finally {
properties.IgnoreInvalidMembersDuringQuery.set(savedInvalidProp);
}
}
/**
* enum sets {} containing members from different levels can not be computed
* natively currently.
*/
public void testCjDescendantsEnumAll() {
checkNotNative(
13,
"select {[Measures].[Unit Sales]} ON COLUMNS, "
+ "NON EMPTY Crossjoin("
+ " Descendants([Customers].[All Customers].[USA], [Customers].[City]), "
+ " {[Product].[All Products], [Product].[All Products].[Drink].[Dairy]}) ON ROWS "
+ "from [Sales] "
+ "where ([Promotions].[All Promotions].[Bag Stuffers])");
}
public void testCjDescendantsEnum() {
// Make sure maxConstraint settting is high enough
int origMaxConstraints =
MondrianProperties.instance().MaxConstraints.get();
int minConstraints = 2;
if (origMaxConstraints < minConstraints) {
MondrianProperties.instance().MaxConstraints.set(minConstraints);
}
try {
checkNative(
11,
11,
"select {[Measures].[Unit Sales]} ON COLUMNS, "
+ "NON EMPTY Crossjoin("
+ " Descendants([Customers].[All Customers].[USA], [Customers].[City]), "
+ " {[Product].[All Products].[Drink].[Beverages], [Product].[All Products].[Drink].[Dairy]}) ON ROWS "
+ "from [Sales] "
+ "where ([Promotions].[All Promotions].[Bag Stuffers])");
} finally {
MondrianProperties.instance().MaxConstraints.set(origMaxConstraints);
}
}
public void testCjEnumChildren() {
// Make sure maxConstraint settting is high enough
int origMaxConstraints =
MondrianProperties.instance().MaxConstraints.get();
int minConstraints = 2;
if (origMaxConstraints < minConstraints) {
MondrianProperties.instance().MaxConstraints.set(minConstraints);
}
try {
checkNative(
3,
3,
"select {[Measures].[Unit Sales]} ON COLUMNS, "
+ "NON EMPTY Crossjoin("
+ " {[Product].[All Products].[Drink].[Beverages], [Product].[All Products].[Drink].[Dairy]}, "
+ " [Customers].[All Customers].[USA].[WA].Children) ON ROWS "
+ "from [Sales] " + "where ([Promotions].[All Promotions].[Bag Stuffers])");
} finally {
MondrianProperties.instance().MaxConstraints.set(origMaxConstraints);
}
}
/** {} contains members from different levels, this can not be handled by
* the current native crossjoin.
*/
public void testCjEnumDifferentLevelsChildren() {
// Don't run the test if we're testing expression dependencies.
// Expression dependencies cause spurious interval calls to
// 'level.getMembers()' which create false negatives in this test.
if (MondrianProperties.instance().TestExpDependencies.get() > 0) {
return;
}
TestCase c = new TestCase(8, 5, "select {[Measures].[Unit Sales]} ON COLUMNS, "
+ "NON EMPTY Crossjoin("
+ " {[Product].[All Products].[Food], [Product].[All Products].[Drink].[Dairy]}, "
+ " [Customers].[All Customers].[USA].[WA].Children) ON ROWS " + "from [Sales] "
+ "where ([Promotions].[All Promotions].[Bag Stuffers])");
c.run();
}
public void testCjDescendantsMembers() {
checkNative(67, 67, "select {[Measures].[Store Sales]} on columns,"
+ " NON EMPTY Crossjoin("
+ " Descendants([Customers].[All Customers].[USA].[CA], [Customers].[Name]),"
+ " [Product].[Product Name].Members) ON rows " + " from [Sales] where ("
+ " [Store].[All Stores].[USA].[CA].[San Francisco].[Store 14],"
+ " [Time].[1997].[Q1].[1])");
}
public void testCjMembersDescendants() {
checkNative(
67,
67,
"select {[Measures].[Store Sales]} on columns,"
+ " NON EMPTY Crossjoin("
+ " [Product].[Product Name].Members,"
+ " Descendants([Customers].[All Customers].[USA].[CA], [Customers].[Name])) ON rows "
+ " from [Sales] where ("
+ " [Store].[All Stores].[USA].[CA].[San Francisco].[Store 14],"
+ " [Time].[1997].[Q1].[1])");
}
public void testCjChildrenMembers() {
checkNative(67, 67, "select {[Measures].[Store Sales]} on columns,"
+ " NON EMPTY Crossjoin([Customers].[All Customers].[USA].[CA].children,"
+ " [Product].[Product Name].Members) ON rows " + " from [Sales] where ("
+ " [Store].[All Stores].[USA].[CA].[San Francisco].[Store 14],"
+ " [Time].[1997].[Q1].[1])");
}
public void testCjMembersChildren() {
checkNative(67, 67, "select {[Measures].[Store Sales]} on columns,"
+ " NON EMPTY Crossjoin([Product].[Product Name].Members,"
+ " [Customers].[All Customers].[USA].[CA].children) ON rows "
+ " from [Sales] where ("
+ " [Store].[All Stores].[USA].[CA].[San Francisco].[Store 14],"
+ " [Time].[1997].[Q1].[1])");
}
public void testCjMembersMembers() {
checkNative(67, 67, "select {[Measures].[Store Sales]} on columns,"
+ " NON EMPTY Crossjoin([Customers].[Name].Members,"
+ " [Product].[Product Name].Members) ON rows " + " from [Sales] where ("
+ " [Store].[All Stores].[USA].[CA].[San Francisco].[Store 14],"
+ " [Time].[1997].[Q1].[1])");
}
public void testCjChildrenChildren() {
checkNative(
3,
3,
"select {[Measures].[Store Sales]} on columns, "
+ " NON EMPTY Crossjoin( "
+ " [Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Wine].children, "
+ " [Customers].[All Customers].[USA].[CA].CHILDREN) ON rows"
+ " from [Sales] where ("
+ " [Store].[All Stores].[USA].[CA].[San Francisco].[Store 14],"
+ " [Time].[1997].[Q1].[1])");
}
public void testNonEmptyUnionQuery() {
Result result = executeQuery(
"select {[Measures].[Unit Sales], [Measures].[Store Cost], [Measures].[Store Sales]} on columns,\n" +
" NON EMPTY Hierarchize( \n" +
" Union(\n" +
" Crossjoin(\n" +
" Crossjoin([Gender].[All Gender].children,\n" +
" [Marital Status].[All Marital Status].children ),\n" +
" Crossjoin([Customers].[All Customers].children,\n" +
" [Product].[All Products].children ) ),\n" +
" Crossjoin( {([Gender].[All Gender].[M], [Marital Status].[All Marital Status].[M] )},\n" +
" Crossjoin(\n" + " [Customers].[All Customers].[USA].children,\n" +
" [Product].[All Products].children ) ) )) on rows\n" +
"from Sales where ([Time].[1997])");
final Axis rowsAxis = result.getAxes()[1];
Assert.assertEquals(21, rowsAxis.getPositions().size());
}
/**
* when Mondrian parses a string like "[Store].[All Stores].[USA].[CA].[San Francisco]"
* it shall not lookup additional members.
*/
public void testLookupMemberCache() {
if (MondrianProperties.instance().TestExpDependencies.get() > 0) {
// Dependency testing causes extra SQL reads, and screws up this
// test.
return;
}
SmartMemberReader smr = getSmartMemberReader("Store");
smr.mapLevelToMembers.setCache(
new HardSmartCache<
SmartMemberListCache.Key2<RolapLevel, Object>,
List<RolapMember>>());
smr.mapMemberToChildren.setCache(
new HardSmartCache<
SmartMemberListCache.Key2<RolapMember, Object>,
List<RolapMember>>());
// smr.mapKeyToMember = new HardSmartCache();
smr.mapKeyToMember.clear();
RolapResult result = (RolapResult) executeQuery(
"select {[Store].[All Stores].[USA].[CA].[San Francisco]} on columns from [Sales]");
assertTrue("no additional members should be read:" + smr.mapKeyToMember.size(),
smr.mapKeyToMember.size() <= 5);
RolapMember sf = (RolapMember) result.getAxes()[0].getPositions().get(0).get(0);
RolapMember ca = sf.getParentMember();
List list = smr.mapMemberToChildren.get(ca, scf.getMemberChildrenConstraint(null));
assertNull("children of [CA] are not in cache", list);
list = smr.mapMemberToChildren.get(ca, scf.getChildByNameConstraint(ca, "San Francisco"));
assertNotNull("child [San Francisco] of [CA] is in cache", list);
assertEquals("[San Francisco] expected", sf, list.get(0));
}
/**
* When looking for [Month] Mondrian generates SQL that tries to find 'Month'
* as a member of the time dimension. This resulted in an SQLException because
* the year level is numeric and the constant 'Month' in the WHERE condition is not.
* Its probably a bug that Mondrian does not take into account [Time].[1997]
* when looking up [Month].
*/
public void testLookupMember() {
// ok if no exception occurs
executeQuery("SELECT DESCENDANTS([Time].[1997], [Month]) ON COLUMNS FROM [Sales]");
}
/**
* Non Empty CrossJoin (A,B) gets turned into CrossJoin (Non Empty(A), Non Empty(B))
* Verify that there is no crash when the length of B could be non-zero length before the non emptyy
* and 0 after the non empty.
*
*/
public void testNonEmptyCrossJoinList() {
boolean oldEnableNativeCJ = MondrianProperties.instance().EnableNativeCrossJoin.get();
MondrianProperties.instance().EnableNativeCrossJoin.set(false);
boolean oldEnableNativeNonEmpty = MondrianProperties.instance().EnableNativeNonEmpty.get();
MondrianProperties.instance().EnableNativeNonEmpty.set(false);
executeQuery("select non empty CrossJoin([Customers].[Name].Members, {[Promotions].[All Promotions].[Fantastic Discounts]}) ON COLUMNS FROM [Sales]");
MondrianProperties.instance().EnableNativeCrossJoin.set(oldEnableNativeCJ);
MondrianProperties.instance().EnableNativeNonEmpty.set(oldEnableNativeNonEmpty);
}
/**
* SQL Optimization must be turned off in ragged hierarchies.
*/
public void testLookupMember2() {
// ok if no exception occurs
executeQuery("select {[Store].[USA].[Washington]} on columns from [Sales Ragged]");
}
/**
* Make sure that the Crossjoin in [Measures].[CustomerCount]
* is not evaluated in NON EMPTY context.
*/
public void testCalcMemberWithNonEmptyCrossJoin() {
CachePool.instance().flush();
Result result = executeQuery("with member [Measures].[CustomerCount] as \n"
+ "'Count(CrossJoin({[Product].[All Products]}, [Customers].[Name].Members))'\n"
+ "select \n"
+ "NON EMPTY{[Measures].[CustomerCount]} ON columns,\n"
+ "NON EMPTY{[Product].[All Products]} ON rows\n"
+ "from [Sales]\n"
+ "where ([Store].[All Stores].[USA].[CA].[San Francisco].[Store 14], [Time].[1997].[Q1].[1] )");
Cell c = result.getCell(new int[] { 0, 0});
// we expect 10281 customers, although there are only 20 non-empty ones
// @see #testLevelMembers
assertEquals("10,281", c.getFormattedValue());
}
/**
* runs a MDX query with a predefined resultLimit and checks the number of positions
* of the row axis. The reduces resultLimit ensures that the optimization is present.
*/
class TestCase {
/**
* Maximum number of rows to be read from SQL. If more than this number
* of rows are read, the test will fail.
*/
int resultLimit;
/**
* MDX query to execute.
*/
String query;
/**
* Number of positions we expect on rows axis of result.
*/
int rowCount;
/**
* Mondrian connection.
*/
Connection con;
public TestCase(int resultLimit, int rowCount, String query) {
this.con = getConnection();
this.resultLimit = resultLimit;
this.rowCount = rowCount;
this.query = query;
}
public TestCase(Connection con, int resultLimit, int rowCount, String query) {
this.con = con;
this.resultLimit = resultLimit;
this.rowCount = rowCount;
this.query = query;
}
private Result run() {
CachePool.instance().flush();
IntegerProperty monLimit = MondrianProperties.instance().ResultLimit;
int oldLimit = monLimit.get();
try {
monLimit.set(this.resultLimit);
Result result = executeQuery(query, con);
/*
* rows are on the last axis.
*/
int numAxes = result.getAxes().length;
Axis a = result.getAxes()[numAxes-1];
assertEquals(rowCount, a.getPositions().size());
return result;
} finally {
monLimit.set(oldLimit);
}
}
}
public void testLevelMembers() {
if (MondrianProperties.instance().TestExpDependencies.get() > 0) {
// Dependency testing causes extra SQL reads, and screws up this
// test.
return;
}
SmartMemberReader smr = getSmartMemberReader("Customers");
smr.mapLevelToMembers.setCache(
new HardSmartCache<
SmartMemberListCache.Key2<RolapLevel, Object>,
List<RolapMember>>());
smr.mapMemberToChildren.setCache(
new HardSmartCache<
SmartMemberListCache.Key2<RolapMember, Object>,
List<RolapMember>>());
TestCase c = new TestCase(
50,
21,
"select \n"
+ "{[Measures].[Unit Sales]} ON columns,\n"
+ "NON EMPTY {[Customers].[All Customers], [Customers].[Name].Members} ON rows\n"
+ "from [Sales]\n"
+ "where ([Store].[All Stores].[USA].[CA].[San Francisco].[Store 14], [Time].[1997].[Q1].[1] )");
Result r = c.run();
Level[] levels = smr.getHierarchy().getLevels();
Level nameLevel = levels[levels.length - 1];
// evaluator for [All Customers], [Store 14], [1/1/1997]
Evaluator context = getEvaluator(r, new int[] { 0, 0});
// make sure that [Customers].[Name].Members is NOT in cache
TupleConstraint lmc = scf.getLevelMembersConstraint(null);
assertNull(smr.mapLevelToMembers.get((RolapLevel) nameLevel, lmc));
// make sure that NON EMPTY [Customers].[Name].Members IS in cache
lmc = scf.getLevelMembersConstraint(context);
List list = smr.mapLevelToMembers.get((RolapLevel) nameLevel, lmc);
assertNotNull(list);
assertEquals(20, list.size());
// make sure that the parent/child for the context are cached
// [Customers].[All Customers].[USA].[CA].[Burlingame].[Peggy Justice]
Member member = r.getAxes()[1].getPositions().get(1).get(0);
Member parent = member.getParentMember();
// lookup all children of [Burlingame] -> not in cache
MemberChildrenConstraint mcc = scf.getMemberChildrenConstraint(null);
assertNull(smr.mapMemberToChildren.get((RolapMember) parent, mcc));
// lookup NON EMPTY children of [Burlingame] -> yes these are in cache
mcc = scf.getMemberChildrenConstraint(context);
list = smr.mapMemberToChildren.get((RolapMember) parent, mcc);
assertNotNull(list);
assertTrue(list.contains(member));
}
public void testLevelMembersWithoutNonEmpty() {
SmartMemberReader smr = getSmartMemberReader("Customers");
smr.mapLevelToMembers.setCache(
new HardSmartCache<
SmartMemberListCache.Key2<RolapLevel, Object>,
List<RolapMember>>());
smr.mapMemberToChildren.setCache(
new HardSmartCache<
SmartMemberListCache.Key2<RolapMember, Object>,
List<RolapMember>>());
Result r = executeQuery("select \n"
+ "{[Measures].[Unit Sales]} ON columns,\n"
+ "{[Customers].[All Customers], [Customers].[Name].Members} ON rows\n"
+ "from [Sales]\n"
+ "where ([Store].[All Stores].[USA].[CA].[San Francisco].[Store 14], [Time].[1997].[Q1].[1] )");
Level[] levels = smr.getHierarchy().getLevels();
Level nameLevel = levels[levels.length - 1];
// evaluator for [All Customers], [Store 14], [1/1/1997]
Evaluator context = getEvaluator(r, new int[] { 0, 0});
// make sure that [Customers].[Name].Members IS in cache
TupleConstraint lmc = scf.getLevelMembersConstraint(null);
List list = smr.mapLevelToMembers.get((RolapLevel) nameLevel, lmc);
assertNotNull(list);
assertEquals(10281, list.size());
// make sure that NON EMPTY [Customers].[Name].Members is NOT in cache
lmc = scf.getLevelMembersConstraint(context);
assertNull(smr.mapLevelToMembers.get((RolapLevel) nameLevel, lmc));
// make sure that the parent/child for the context are cached
// [Customers].[All Customers].[Canada].[BC].[Burnaby]
Member member = r.getAxes()[1].getPositions().get(1).get(0);
Member parent = member.getParentMember();
// lookup all children of [Burlingame] -> yes, found in cache
MemberChildrenConstraint mcc = scf.getMemberChildrenConstraint(null);
list = smr.mapMemberToChildren.get((RolapMember) parent, mcc);
assertNotNull(list);
assertTrue(list.contains(member));
// lookup NON EMPTY children of [Burlingame] -> not in cache
mcc = scf.getMemberChildrenConstraint(context);
list = smr.mapMemberToChildren.get((RolapMember) parent, mcc);
assertNull(list);
}
/**
* Tests that <Dimension>.Members exploits the same optimization as
* <Level>.Members.
*/
public void testDimensionMembers() {
// No query should return more than 20 rows. (1 row at 'all' level,
// 1 row at nation level, 1 at state level, 20 at city level, and 11
// at customers level = 34.)
TestCase c = new TestCase(
34,
34,
"select \n"
+ "{[Measures].[Unit Sales]} ON columns,\n"
+ "NON EMPTY [Customers].Members ON rows\n"
+ "from [Sales]\n"
+ "where ([Store].[All Stores].[USA].[CA].[San Francisco].[Store 14], [Time].[1997].[Q1].[1] )");
c.run();
}
/**
* Tests non empty children of rolap member
*/
public void testMemberChildrenOfRolapMember() {
TestCase c = new TestCase(
50,
4,
"select \n"
+ "{[Measures].[Unit Sales]} ON columns,\n"
+ "NON EMPTY [Customers].[All Customers].[USA].[CA].[Palo Alto].Children ON rows\n"
+ "from [Sales]\n"
+ "where ([Store].[All Stores].[USA].[CA].[San Francisco].[Store 14], [Time].[1997].[Q1].[1] )");
c.run();
}
/**
* Tests non empty children of All member
*/
public void testMemberChildrenOfAllMember() {
TestCase c = new TestCase(50, 14, "select {[Measures].[Unit Sales]} ON columns,\n"
+ "NON EMPTY [Promotions].[All Promotions].Children ON rows from [Sales]\n"
+ "where ([Time].[1997].[Q1].[1] )");
c.run();
}
/**
* Tests non empty children of All member w/o WHERE clause
*/
public void testMemberChildrenNoWhere() {
// the time dimension is joined because there is no (All) level in the Time
// hierarchy:
// select
// `promotion`.`promotion_name` as `c0`
// from
// `time_by_day` as `time_by_day`,
// `sales_fact_1997` as `sales_fact_1997`,
// `promotion` as `promotion`
// where `sales_fact_1997`.`time_id` = `time_by_day`.`time_id`
// and `time_by_day`.`the_year` = 1997
// and `sales_fact_1997`.`promotion_id` = `promotion`.`promotion_id`
// group by
// `promotion`.`promotion_name`
// order by
// `promotion`.`promotion_name`
TestCase c = new TestCase(50, 48, "select {[Measures].[Unit Sales]} ON columns,\n"
+ "NON EMPTY [Promotions].[All Promotions].Children ON rows from [Sales]\n");
c.run();
}
/**
* Testcase for bug 1379068, which causes no children of [Time].[1997].[Q2]
* to be found, because it incorrectly constrains on the level's key column
* rather than name column.
*/
public void testMemberChildrenNameCol() {
// Expression dependency testing casues false negatives.
if (MondrianProperties.instance().TestExpDependencies.get() > 0) {
return;
}
TestCase c = new TestCase(3, 1,
"select " +
" {[Measures].[Count]} ON columns," +
" {[Time].[1997].[Q2].[April]} on rows " +
"from [HR]");
c.run();
}
/**
* When a member is expanded in JPivot with mulitple hierarchies visible it
* generates a
* <code>CrossJoin({[member from left hierarchy]}, [member to expand].Children)</code>
* This should behave the same as if <code>[member from left hierarchy]</code> was
* put into the slicer.
*/
public void testCrossjoin() {
TestCase c = new TestCase(
45,
4,
"select \n"
+ "{[Measures].[Unit Sales]} ON columns,\n"
+ "NON EMPTY Crossjoin({[Store].[All Stores].[USA].[CA].[San Francisco].[Store 14]}, [Customers].[All Customers].[USA].[CA].[Palo Alto].Children) ON rows\n"
+ "from [Sales] where ([Time].[1997].[Q1].[1] )");
c.run();
}
/**
* Ensures that NON EMPTY Descendants is optimized.
* Ensures that Descendants as a side effect collects MemberChildren that
* may be looked up in the cache.
*/
public void testNonEmptyDescendants() {
// Don't run the test if we're testing expression dependencies.
// Expression dependencies cause spurious interval calls to
// 'level.getMembers()' which create false negatives in this test.
if (MondrianProperties.instance().TestExpDependencies.get() > 0) {
return;
}
Connection con = getTestContext().getFoodMartConnection(false);
SmartMemberReader smr = getSmartMemberReader(con, "Customers");
smr.mapLevelToMembers.setCache(
new HardSmartCache<
SmartMemberListCache.Key2<RolapLevel, Object>,
List<RolapMember>>());
smr.mapMemberToChildren.setCache(
new HardSmartCache<
SmartMemberListCache.Key2<RolapMember, Object>,
List<RolapMember>>());
TestCase c = new TestCase(
con,
45,
21,
"select \n"
+ "{[Measures].[Unit Sales]} ON columns, "
+ "NON EMPTY {[Customers].[All Customers], Descendants([Customers].[All Customers].[USA].[CA], [Customers].[Name])} on rows "
+ "from [Sales] "
+ "where ([Store].[All Stores].[USA].[CA].[San Francisco].[Store 14], [Time].[1997].[Q1].[1] )");
Result result = c.run();
// [Customers].[All Customers].[USA].[CA].[Burlingame].[Peggy Justice]
RolapMember peggy = (RolapMember) result.getAxes()[1].getPositions().get(1).get(0);
RolapMember burlingame = peggy.getParentMember();
// all children of burlingame are not in cache
MemberChildrenConstraint mcc = scf.getMemberChildrenConstraint(null);
assertNull(smr.mapMemberToChildren.get(burlingame, mcc));
// but non empty children is
Evaluator evaluator = getEvaluator(result, new int[] { 0, 0});
mcc = scf.getMemberChildrenConstraint(evaluator);
List list = smr.mapMemberToChildren.get(burlingame, mcc);
assertNotNull(list);
assertTrue(list.contains(peggy));
// now we run the same query again, this time everything must come out of the cache
RolapNativeRegistry reg = getRegistry(con);
reg.setListener(new Listener() {
public void foundEvaluator(NativeEvent e) {
}
public void foundInCache(TupleEvent e) {
}
public void excutingSql(TupleEvent e) {
fail("expected caching");
}
});
try {
c.run();
} finally {
reg.setListener(null);
}
}
public void testBug1412384() {
// Bug 1412384 causes a NPE in SqlConstraintUtils.
assertQueryReturns("select NON EMPTY {[Time].[1997]} ON COLUMNS,\n" +
"NON EMPTY Hierarchize(Union({[Customers].[All Customers]},\n" +
"[Customers].[All Customers].Children)) ON ROWS\n" +
"from [Sales]\n" +
"where [Measures].[Profit]",
fold(
"Axis
"{[Measures].[Profit]}\n" +
"Axis
"{[Time].[1997]}\n" +
"Axis
"{[Customers].[All Customers]}\n" +
"{[Customers].[All Customers].[USA]}\n" +
"Row #0: $339,610.90\n" +
"Row #1: $339,610.90\n"));
}
public void testVirtualCubeCrossJoin()
{
checkNative(18, 3,
"select " +
"{[Measures].[Units Ordered], [Measures].[Store Sales]} on columns, " +
"non empty crossjoin([Product].[All Products].children, " +
"[Store].[All Stores].children) on rows " +
"from [Warehouse and Sales]");
}
public void testVirtualCubeNonEmptyCrossJoin()
{
checkNative(18, 3,
"select " +
"{[Measures].[Units Ordered], [Measures].[Store Sales]} on columns, " +
"NonEmptyCrossJoin([Product].[All Products].children, " +
"[Store].[All Stores].children) on rows " +
"from [Warehouse and Sales]");
}
public void testVirtualCubeNonEmptyCrossJoin3Args()
{
checkNative(3, 3,
"select " +
"{[Measures].[Store Sales]} on columns, " +
"nonEmptyCrossJoin([Product].[All Products].children, " +
"nonEmptyCrossJoin([Customers].[All Customers].children," +
"[Store].[All Stores].children)) on rows " +
"from [Warehouse and Sales]");
}
public void testVirtualCubeCrossJoinNonConformingDim()
{
// for this test, verify that no alert is raised even though
// native evaluation isn't supported, because lack of
// support is intentional
StringProperty alertProperty =
MondrianProperties.instance().AlertNativeEvaluationUnsupported;
String oldAlert = alertProperty.get();
alertProperty.set("ERROR");
try {
// cross join involves non-conforming dimensions should not use
// native cross joins because it will result in a cartesian
// product join
checkNotNative(0,
"select " +
"{[Measures].[Units Ordered], [Measures].[Store Sales]} on columns, " +
"NonEmptyCrossJoin([Customers].[All Customers].children, " +
"[Warehouse].[All Warehouses].children) on rows " +
"from [Warehouse and Sales]");
} finally {
alertProperty.set(oldAlert);
}
}
public void testNotNativeVirtualCubeCrossJoin1()
{
// for this test, verify that no alert is raised even though
// native evaluation isn't supported, because query
// doesn't use explicit NonEmptyCrossJoin
StringProperty alertProperty =
MondrianProperties.instance().AlertNativeEvaluationUnsupported;
String oldAlert = alertProperty.get();
alertProperty.set("ERROR");
try {
// native cross join cannot be used due to AllMembers
checkNotNative(3,
"select " +
"{[Measures].AllMembers} on columns, " +
"non empty crossjoin([Product].[All Products].children, " +
"[Store].[All Stores].children) on rows " +
"from [Warehouse and Sales]");
} finally {
alertProperty.set(oldAlert);
}
}
public void testNotNativeVirtualCubeCrossJoin2()
{
// native cross join cannot be used due to the range operator
checkNotNative(3,
"select " +
"{[Measures].[Sales Count] : [Measures].[Unit Sales]} on columns, " +
"non empty crossjoin([Product].[All Products].children, " +
"[Store].[All Stores].children) on rows " +
"from [Warehouse and Sales]");
}
public void testNotNativeVirtualCubeCrossJoinUnsupported()
{
BooleanProperty enableProperty =
MondrianProperties.instance().EnableNativeCrossJoin;
if (!enableProperty.get()) {
// When native cross joins are explicitly disabled, no alerts
// are supposed to be raised.
return;
}
String mdx =
"select " +
"{[Measures].AllMembers} on columns, " +
"NonEmptyCrossJoin([Product].[All Products].children, " +
"[Store].[All Stores].children) on rows " +
"from [Warehouse and Sales]";
final List<LoggingEvent> events = new ArrayList<LoggingEvent>();
// set up log4j listener to detect alerts
Appender alertListener = new AppenderSkeleton() {
protected void append(LoggingEvent event) {
events.add(event);
}
public void close() {
}
public boolean requiresLayout() {
return false;
}
};
Logger rolapUtilLogger = Logger.getLogger(RolapUtil.class);
rolapUtilLogger.addAppender(alertListener);
String expectedMessage =
"Unable to use native SQL evaluation for 'NonEmptyCrossJoin'";
// verify that exception is thrown if alerting is set to ERROR
StringProperty alertProperty =
MondrianProperties.instance().AlertNativeEvaluationUnsupported;
String oldAlert = alertProperty.get();
alertProperty.set(org.apache.log4j.Level.ERROR.toString());
try {
checkNotNative(3, mdx);
fail("Expected NativeEvaluationUnsupportedException");
} catch (NativeEvaluationUnsupportedException ex) {
// Expected
} finally {
alertProperty.set(oldAlert);
}
// should have gotten one ERROR
int nEvents = countFilteredEvents(
events,
org.apache.log4j.Level.ERROR,
expectedMessage);
assertEquals(1, nEvents);
events.clear();
// verify that exactly one warning is posted but execution succeeds
// if alerting is set to WARN
alertProperty.set(org.apache.log4j.Level.WARN.toString());
try {
checkNotNative(3, mdx);
} finally {
alertProperty.set(oldAlert);
}
// should have gotten one WARN
nEvents = countFilteredEvents(
events,
org.apache.log4j.Level.WARN,
expectedMessage);
assertEquals(1, nEvents);
events.clear();
// verify that no warning is posted if native evaluation is
// explicitly disabled
alertProperty.set(org.apache.log4j.Level.WARN.toString());
enableProperty.set(false);
try {
checkNotNative(3, mdx);
} finally {
alertProperty.set(oldAlert);
enableProperty.set(true);
}
// should have gotten no WARN
nEvents = countFilteredEvents(
events,
org.apache.log4j.Level.WARN,
expectedMessage);
assertEquals(0, nEvents);
events.clear();
// no biggie if we don't get here for some reason; just being
// half-heartedly clean
rolapUtilLogger.removeAppender(alertListener);
}
private int countFilteredEvents(
List<LoggingEvent> events,
org.apache.log4j.Level level,
String pattern)
{
int filteredEventCount = 0;
for (LoggingEvent event : events) {
if (!event.getLevel().equals(level)) {
continue;
}
if (event.getMessage().toString().indexOf(pattern) == -1) {
continue;
}
filteredEventCount++;
}
return filteredEventCount;
}
public void testVirtualCubeCrossJoinCalculatedMember1()
{
// calculated member appears in query
checkNative(18, 3,
"WITH MEMBER [Measures].[Total Cost] as " +
"'[Measures].[Store Cost] + [Measures].[Warehouse Cost]' " +
"select " +
"{[Measures].[Total Cost]} on columns, " +
"non empty crossjoin([Product].[All Products].children, " +
"[Store].[All Stores].children) on rows " +
"from [Warehouse and Sales]");
}
public void testVirtualCubeCrossJoinCalculatedMember2()
{
// calculated member defined in schema
checkNative(18, 3,
"select " +
"{[Measures].[Profit Per Unit Shipped]} on columns, " +
"non empty crossjoin([Product].[All Products].children, " +
"[Store].[All Stores].children) on rows " +
"from [Warehouse and Sales]");
}
public void testNotNativeVirtualCubeCrossJoinCalculatedMember()
{
// native cross join cannot be used due to CurrentMember in the
// calculated member
checkNotNative(3,
"WITH MEMBER [Measures].[CurrMember] as " +
"'[Measures].CurrentMember' " +
"select " +
"{[Measures].[CurrMember]} on columns, " +
"non empty crossjoin([Product].[All Products].children, " +
"[Store].[All Stores].children) on rows " +
"from [Warehouse and Sales]");
}
public void testCjEnumCalcMembers()
{
// 3 cross joins -- 2 of the 4 arguments to the cross joins are
// enumerated sets with calculated members
checkNative(
30,
30,
"with " +
"member [Product].[All Products].[Drink].[*SUBTOTAL_MEMBER_SEL~SUM] as " +
" 'sum({[Product].[All Products].[Drink]})' " +
"member [Product].[All Products].[Non-Consumable].[*SUBTOTAL_MEMBER_SEL~SUM] as " +
" 'sum({[Product].[All Products].[Non-Consumable]})' " +
"member [Customers].[All Customers].[USA].[CA].[*SUBTOTAL_MEMBER_SEL~SUM] as " +
" 'sum({[Customers].[All Customers].[USA].[CA]})' " +
"member [Customers].[All Customers].[USA].[OR].[*SUBTOTAL_MEMBER_SEL~SUM] as " +
" 'sum({[Customers].[All Customers].[USA].[OR]})' " +
"member [Customers].[All Customers].[USA].[WA].[*SUBTOTAL_MEMBER_SEL~SUM] as " +
" 'sum({[Customers].[All Customers].[USA].[WA]})' " +
"select " +
"{[Measures].[Unit Sales]} on columns, " +
"non empty " +
" crossjoin( " +
" crossjoin( " +
" crossjoin( " +
" {[Product].[All Products].[Drink].[*SUBTOTAL_MEMBER_SEL~SUM], " +
" [Product].[All Products].[Non-Consumable].[*SUBTOTAL_MEMBER_SEL~SUM]}, " +
" [Education Level].[Education Level].Members), " +
" {[Customers].[All Customers].[USA].[CA].[*SUBTOTAL_MEMBER_SEL~SUM], " +
" [Customers].[All Customers].[USA].[OR].[*SUBTOTAL_MEMBER_SEL~SUM], " +
" [Customers].[All Customers].[USA].[WA].[*SUBTOTAL_MEMBER_SEL~SUM]}), " +
" [Time].[Year].members)" +
" on rows " +
"from [Sales]");
}
public void testCjEnumCalcMembersBug() {
// TO be fixed:
// Native evaluation of NECJ is incorrect.
String query =
"with " +
"member [Store Type].[All Store Types].[S] as sum({[Store Type].[All Store Types]}) " +
"set [Enum Store Types] as {" +
" [Store Type].[All Store Types].[HeadQuarters], " +
" [Store Type].[All Store Types].[Small Grocery], " +
" [Store Type].[All Store Types].[Supermarket], " +
" [Store Type].[All Store Types].[S]}" +
"select " +
" NonEmptyCrossJoin([Product].[All Products].Children, [Enum Store Types]) on rows " +
"from [Sales]";
String wrongResult =
"Axis
"{}\n" +
"Axis
"{[Product].[All Products].[Drink], [Store Type].[All Store Types].[HeadQuarters]}\n" +
"{[Product].[All Products].[Drink], [Store Type].[All Store Types].[Small Grocery]}\n" +
"{[Product].[All Products].[Drink], [Store Type].[All Store Types].[Supermarket]}\n" +
"{[Product].[All Products].[Drink], [Store Type].[All Store Types].[S]}\n" +
"{[Product].[All Products].[Food], [Store Type].[All Store Types].[HeadQuarters]}\n" +
"{[Product].[All Products].[Food], [Store Type].[All Store Types].[Small Grocery]}\n" +
"{[Product].[All Products].[Food], [Store Type].[All Store Types].[Supermarket]}\n" +
"{[Product].[All Products].[Food], [Store Type].[All Store Types].[S]}\n" +
"{[Product].[All Products].[Non-Consumable], [Store Type].[All Store Types].[HeadQuarters]}\n" +
"{[Product].[All Products].[Non-Consumable], [Store Type].[All Store Types].[Small Grocery]}\n" +
"{[Product].[All Products].[Non-Consumable], [Store Type].[All Store Types].[Supermarket]}\n" +
"{[Product].[All Products].[Non-Consumable], [Store Type].[All Store Types].[S]}\n" +
"Row #0: \n" +
"Row #0: 574\n" +
"Row #0: 14,092\n" +
"Row #0: 24,597\n" +
"Row #0: \n" +
"Row #0: 4,764\n" +
"Row #0: 108,188\n" +
"Row #0: 191,940\n" +
"Row #0: \n" +
"Row #0: 1,219\n" +
"Row #0: 28,275\n" +
"Row #0: 50,236\n";
// Get a fresh connection; Otherwise the mondrian property setting
// is not refreshed for this parameter.
Connection conn = getTestContext().getFoodMartConnection(false);
TestContext context = getTestContext(conn);
context.assertQueryReturns(query, fold(wrongResult));
}
public void testCjEnumEmptyCalcMembers()
{
// Make sure maxConstraint settting is high enough
int origMaxConstraints =
MondrianProperties.instance().MaxConstraints.get();
int minConstraints = 3;
if (origMaxConstraints < minConstraints) {
MondrianProperties.instance().MaxConstraints.set(minConstraints);
}
try {
// enumerated list of calculated members results in some empty cells
checkNative(
15,
5,
"with " +
"member [Customers].[All Customers].[USA].[*SUBTOTAL_MEMBER_SEL~SUM] as " +
" 'sum({[Customers].[All Customers].[USA]})' " +
"member [Customers].[All Customers].[Mexico].[*SUBTOTAL_MEMBER_SEL~SUM] as " +
" 'sum({[Customers].[All Customers].[Mexico]})' " +
"member [Customers].[All Customers].[Canada].[*SUBTOTAL_MEMBER_SEL~SUM] as " +
" 'sum({[Customers].[All Customers].[Canada]})' " +
"select " +
"{[Measures].[Unit Sales]} on columns, " +
"non empty " +
" crossjoin( " +
" {[Customers].[All Customers].[Mexico].[*SUBTOTAL_MEMBER_SEL~SUM], " +
" [Customers].[All Customers].[Canada].[*SUBTOTAL_MEMBER_SEL~SUM], " +
" [Customers].[All Customers].[USA].[*SUBTOTAL_MEMBER_SEL~SUM]}, " +
" [Education Level].[Education Level].Members) " +
" on rows " +
"from [Sales]");
} finally {
MondrianProperties.instance().MaxConstraints.set(origMaxConstraints);
}
}
public void testCjUnionEnumCalcMembers()
{
// native sql should be used to retrieve Product Department members
// and the second cross join should use the cached results from the
// first, since the sql select excludes the calculated members
checkNative(
46,
46,
"with " +
"member [Education Level].[*SUBTOTAL_MEMBER_SEL~SUM] as " +
" 'sum({[Education Level].[All Education Levels]})' " +
"member [Education Level].[*SUBTOTAL_MEMBER_SEL~AVG] as " +
" 'avg([Education Level].[Education Level].Members)' select " +
"{[Measures].[Unit Sales]} on columns, " +
"non empty union (Crossjoin( " +
" [Product].[Product Department].Members, " +
" {[Education Level].[*SUBTOTAL_MEMBER_SEL~AVG]}), " +
"crossjoin( " +
" [Product].[Product Department].Members, " +
" {[Education Level].[*SUBTOTAL_MEMBER_SEL~SUM]})) on rows " +
"from [Sales]");
}
/**
* Tests the behavior if you have NON EMPTY on both axes, and the default
* member of a hierarchy is not 'all' or the first child.
*/
public void testNonEmptyWithWeirdDefaultMember() {
if (!Bug.Bug1574942Fixed) return;
TestContext testContext = TestContext.createSubstitutingCube(
"Sales",
" <Dimension name=\"Time\" type=\"TimeDimension\" foreignKey=\"time_id\">\n" +
" <Hierarchy hasAll=\"false\" primaryKey=\"time_id\" defaultMember=\"[Time].[1997].[Q1].[1]\" >\n" +
" <Table name=\"time_by_day\"/>\n" +
" <Level name=\"Year\" column=\"the_year\" type=\"Numeric\" uniqueMembers=\"true\"\n" +
" levelType=\"TimeYears\"/>\n" +
" <Level name=\"Quarter\" column=\"quarter\" uniqueMembers=\"false\"\n" +
" levelType=\"TimeQuarters\"/>\n" +
" <Level name=\"Month\" column=\"month_of_year\" uniqueMembers=\"false\" type=\"Numeric\"\n" +
" levelType=\"TimeMonths\"/>\n" +
" </Hierarchy>\n" +
" </Dimension>");
// Check that the grand total is different than when [Time].[1997] is
// the default member.
testContext.assertQueryReturns("select from [Sales]",
fold("Axis
"{}\n" +
"21,628"));
// Results of this query agree with MSAS 2000 SP1.
// The query gives the same results if the default member of [Time]
// is [Time].[1997] or [Time].[1997].[Q1].[1].
testContext.assertQueryReturns("select\n" +
"NON EMPTY Crossjoin({[Time].[1997].[Q2].[4]}, [Customers].[Country].members) on columns,\n" +
"NON EMPTY [Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth].children on rows\n" +
"from sales",
fold("Axis
"{}\n" +
"Axis
"{[Time].[1997].[Q2].[4], [Customers].[All Customers].[USA]}\n" +
"Axis
"{[Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth].[Portsmouth Imported Beer]}\n" +
"{[Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth].[Portsmouth Light Beer]}\n" +
"Row #0: 3\n" +
"Row #1: 21\n"));
}
public void testCrossJoinNamedSets1()
{
checkNative(
3,
3,
"with " +
"SET [ProductChildren] as '[Product].[All Products].children' " +
"SET [StoreMembers] as '[Store].[Store Country].members' " +
"select {[Measures].[Store Sales]} on columns, " +
"non empty crossjoin([ProductChildren], [StoreMembers]) " +
"on rows from [Sales]");
}
public void testCrossJoinNamedSets2()
{
// Make sure maxConstraint settting is high enough
int origMaxConstraints =
MondrianProperties.instance().MaxConstraints.get();
int minConstraints = 3;
if (origMaxConstraints < minConstraints) {
MondrianProperties.instance().MaxConstraints.set(minConstraints);
}
try {
checkNative(
3,
3,
"with " +
"SET [ProductChildren] as '{[Product].[All Products].[Drink], " +
"[Product].[All Products].[Food], " +
"[Product].[All Products].[Non-Consumable]}' " +
"SET [StoreChildren] as '[Store].[All Stores].children' " +
"select {[Measures].[Store Sales]} on columns, " +
"non empty crossjoin([ProductChildren], [StoreChildren]) on rows from " +
"[Sales]");
} finally {
MondrianProperties.instance().MaxConstraints.set(origMaxConstraints);
}
}
public void testCrossJoinSetWithDifferentParents()
{
// Verify that only the members explicitly referenced in the set
// are returned. Note that different members are referenced in
// each level in the time dimension.
checkNative(
5,
5,
"select " +
"{[Measures].[Unit Sales]} on columns, " +
"NonEmptyCrossJoin([Education Level].[Education Level].Members, " +
"{[Time].[1997].[Q1], [Time].[1998].[Q2]}) on rows from Sales");
}
public void testCrossJoinSetWithCrossProdMembers()
{
// Make sure maxConstraint settting is high enough
int origMaxConstraints =
MondrianProperties.instance().MaxConstraints.get();
int minConstraints = 6;
if (origMaxConstraints < minConstraints) {
MondrianProperties.instance().MaxConstraints.set(minConstraints);
}
try {
// members in set are a cross product of (1997, 1998) and (Q1, Q2, Q3)
checkNative(
15,
15,
"select " +
"{[Measures].[Unit Sales]} on columns, " +
"NonEmptyCrossJoin([Education Level].[Education Level].Members, " +
"{[Time].[1997].[Q1], [Time].[1997].[Q2], [Time].[1997].[Q3], " +
"[Time].[1998].[Q1], [Time].[1998].[Q2], [Time].[1998].[Q3]})" +
"on rows from Sales");
} finally {
MondrianProperties.instance().MaxConstraints.set(origMaxConstraints);
}
}
public void testCrossJoinSetWithSameParent()
{
// Make sure maxConstraint settting is high enough
int origMaxConstraints =
MondrianProperties.instance().MaxConstraints.get();
int minConstraints = 2;
if (origMaxConstraints < minConstraints) {
MondrianProperties.instance().MaxConstraints.set(minConstraints);
}
try {
// members in set have the same parent
checkNative(
10,
10,
"select " +
"{[Measures].[Unit Sales]} on columns, " +
"NonEmptyCrossJoin([Education Level].[Education Level].Members, " +
"{[Store].[All Stores].[USA].[CA].[Beverly Hills], " +
"[Store].[All Stores].[USA].[CA].[San Francisco]}) " +
"on rows from Sales");
} finally {
MondrianProperties.instance().MaxConstraints.set(origMaxConstraints);
}
}
public void testCrossJoinSetWithUniqueLevel()
{
// Make sure maxConstraint settting is high enough
int origMaxConstraints =
MondrianProperties.instance().MaxConstraints.get();
int minConstraints = 2;
if (origMaxConstraints < minConstraints) {
MondrianProperties.instance().MaxConstraints.set(minConstraints);
}
try {
// members in set have different parents but there is a unique level
checkNative(
10,
10,
"select " +
"{[Measures].[Unit Sales]} on columns, " +
"NonEmptyCrossJoin([Education Level].[Education Level].Members, " +
"{[Store].[All Stores].[USA].[CA].[Beverly Hills].[Store 6], "+
"[Store].[All Stores].[USA].[WA].[Bellingham].[Store 2]}) " +
"on rows from Sales");
} finally {
MondrianProperties.instance().MaxConstraints.set(origMaxConstraints);
}
}
public void testCrossJoinMultiInExprAllMember()
{
checkNative(
10,
10,
"select " +
"{[Measures].[Unit Sales]} on columns, " +
"NonEmptyCrossJoin([Education Level].[Education Level].Members, " +
"{[Product].[All Products].[Drink].[Alcoholic Beverages], " +
"[Product].[All Products].[Food].[Breakfast Foods]}) " +
"on rows from Sales");
}
public void testCrossJoinEvaluatorContext1()
{
// This test ensures that the proper measure members context is
// set when evaluating a non-empty cross join. The context should
// not include the calculated measure [*TOP_BOTTOM_SET]. If it
// does, the query will result in an infinite loop because the cross
// join will try evaluating the calculated member (when it shouldn't)
// and the calculated member references the cross join, resulting
// in the loop
assertQueryReturns(
"With " +
"Set [*NATIVE_CJ_SET] as " +
"'NonEmptyCrossJoin([*BASE_MEMBERS_Store], [*BASE_MEMBERS_Products])' " +
"Set [*TOP_BOTTOM_SET] as " +
"'Order([*GENERATED_MEMBERS_Store], ([Measures].[Unit Sales], " +
"[Product].[All Products].[*TOP_BOTTOM_MEMBER]), BDESC)' " +
"Set [*BASE_MEMBERS_Store] as '[Store].members' " +
"Set [*GENERATED_MEMBERS_Store] as 'Generate([*NATIVE_CJ_SET], {[Store].CurrentMember})' " +
"Set [*BASE_MEMBERS_Products] as " +
"'{[Product].[All Products].[Food], [Product].[All Products].[Drink], " +
"[Product].[All Products].[Non-Consumable]}' " +
"Set [*GENERATED_MEMBERS_Products] as " +
"'Generate([*NATIVE_CJ_SET], {[Product].CurrentMember})' " +
"Member [Product].[All Products].[*TOP_BOTTOM_MEMBER] as " +
"'Aggregate([*GENERATED_MEMBERS_Products])'" +
"Member [Measures].[*TOP_BOTTOM_MEMBER] as 'Rank([Store].CurrentMember,[*TOP_BOTTOM_SET])' " +
"Member [Store].[All Stores].[*SUBTOTAL_MEMBER_SEL~SUM] as " +
"'sum(Filter([*GENERATED_MEMBERS_Store], [Measures].[*TOP_BOTTOM_MEMBER] <= 10))'" +
"Select {[Measures].[Store Cost]} on columns, " +
"Non Empty Filter(Generate([*NATIVE_CJ_SET], {([Store].CurrentMember)}), " +
"[Measures].[*TOP_BOTTOM_MEMBER] <= 10) on rows From [Sales]",
fold(
"Axis
"{}\n" +
"Axis
"{[Measures].[Store Cost]}\n" +
"Axis
"{[Store].[All Stores]}\n" +
"{[Store].[All Stores].[USA]}\n" +
"{[Store].[All Stores].[USA].[CA]}\n" +
"{[Store].[All Stores].[USA].[OR]}\n" +
"{[Store].[All Stores].[USA].[OR].[Portland]}\n" +
"{[Store].[All Stores].[USA].[OR].[Salem]}\n" +
"{[Store].[All Stores].[USA].[OR].[Salem].[Store 13]}\n" +
"{[Store].[All Stores].[USA].[WA]}\n" +
"{[Store].[All Stores].[USA].[WA].[Tacoma]}\n" +
"{[Store].[All Stores].[USA].[WA].[Tacoma].[Store 17]}\n" +
"Row #0: 225,627.23\n" +
"Row #1: 225,627.23\n" +
"Row #2: 63,530.43\n" +
"Row #3: 56,772.50\n" +
"Row #4: 21,948.94\n" +
"Row #5: 34,823.56\n" +
"Row #6: 34,823.56\n" +
"Row #7: 105,324.31\n" +
"Row #8: 29,959.28\n" +
"Row #9: 29,959.28\n"));
}
public void testCrossJoinEvaluatorContext2()
{
int origMaxConstraints =
MondrianProperties.instance().MaxConstraints.get();
int minConstraints = 2;
if (origMaxConstraints < minConstraints) {
MondrianProperties.instance().MaxConstraints.set(minConstraints);
}
try {
// calculated measure contains a calculated member
assertQueryReturns(
"With Set [*NATIVE_CJ_SET] as " +
"'NonEmptyCrossJoin([*BASE_MEMBERS_Dates], [*BASE_MEMBERS_Stores])' " +
"Set [*BASE_MEMBERS_Dates] as '{[Time].[1997].[Q1], [Time].[1997].[Q2]}' " +
"Set [*GENERATED_MEMBERS_Dates] as " +
"'Generate([*NATIVE_CJ_SET], {[Time].CurrentMember})' " +
"Set [*GENERATED_MEMBERS_Measures] as '{[Measures].[*SUMMARY_METRIC_0]}' " +
"Set [*BASE_MEMBERS_Stores] as '{[Store].[USA].[CA], [Store].[USA].[WA]}' " +
"Set [*GENERATED_MEMBERS_Stores] as " +
"'Generate([*NATIVE_CJ_SET], {[Store].CurrentMember})' " +
"Member [Time].[*SM_CTX_SEL] as 'Aggregate([*GENERATED_MEMBERS_Dates])' " +
"Member [Measures].[*SUMMARY_METRIC_0] as " +
"'[Measures].[Unit Sales]/([Measures].[Unit Sales],[Time].[*SM_CTX_SEL])', " +
"FORMAT_STRING = '0.00%' " +
"Member [Time].[*SUBTOTAL_MEMBER_SEL~SUM] as 'sum([*GENERATED_MEMBERS_Dates])' " +
"Member [Store].[*SUBTOTAL_MEMBER_SEL~SUM] as " +
"'sum(Filter([*GENERATED_MEMBERS_Stores], " +
"([Measures].[Unit Sales], [Time].[*SUBTOTAL_MEMBER_SEL~SUM]) > 0.0))' " +
"Select Union " +
"(CrossJoin " +
"(Filter " +
"(Generate([*NATIVE_CJ_SET], {([Time].CurrentMember)}), " +
"Not IsEmpty ([Measures].[Unit Sales])), " +
"[*GENERATED_MEMBERS_Measures]), " +
"CrossJoin " +
"(Filter " +
"({[Time].[*SUBTOTAL_MEMBER_SEL~SUM]}, " +
"Not IsEmpty ([Measures].[Unit Sales])), " +
"[*GENERATED_MEMBERS_Measures])) on columns, " +
"Non Empty Union " +
"(Filter " +
"(Filter " +
"(Generate([*NATIVE_CJ_SET], " +
"{([Store].CurrentMember)}), " +
"([Measures].[Unit Sales], " +
"[Time].[*SUBTOTAL_MEMBER_SEL~SUM]) > 0.0), " +
"Not IsEmpty ([Measures].[Unit Sales])), " +
"Filter( " +
"{[Store].[*SUBTOTAL_MEMBER_SEL~SUM]}, " +
"Not IsEmpty ([Measures].[Unit Sales]))) on rows " +
"From [Sales]",
fold(
"Axis
"{}\n" +
"Axis
"{[Time].[1997].[Q1], [Measures].[*SUMMARY_METRIC_0]}\n" +
"{[Time].[1997].[Q2], [Measures].[*SUMMARY_METRIC_0]}\n" +
"{[Time].[*SUBTOTAL_MEMBER_SEL~SUM], [Measures].[*SUMMARY_METRIC_0]}\n" +
"Axis
"{[Store].[All Stores].[USA].[CA]}\n" +
"{[Store].[All Stores].[USA].[WA]}\n" +
"{[Store].[*SUBTOTAL_MEMBER_SEL~SUM]}\n" +
"Row #0: 48.34%\n" +
"Row #0: 51.66%\n" +
"Row #0: 100.00%\n" +
"Row #1: 50.53%\n" +
"Row #1: 49.47%\n" +
"Row #1: 100.00%\n" +
"Row #2: 49.72%\n" +
"Row #2: 50.28%\n" +
"Row #2: 100.00%\n"));
} finally {
MondrianProperties.instance().MaxConstraints.set(origMaxConstraints);
}
}
public void testVCNativeCJWithIsEmptyOnMeasure()
{
// Don't use checkNative method here because in the case where
// native cross join isn't used, the query causes a stack overflow.
// A measures member is referenced in the IsEmpty() function. This
// shouldn't prevent native cross join from being used.
assertQueryReturns(
"with " +
"set BM_PRODUCT as {[Product].[All Products].[Drink]} " +
"set BM_EDU as [Education Level].[Education Level].Members " +
"set BM_GENDER as {[Gender].[Gender].[M]} " +
"set CJ as NonEmptyCrossJoin(BM_GENDER,NonEmptyCrossJoin(BM_EDU,BM_PRODUCT)) " +
"set GM_PRODUCT as Generate(CJ, {[Product].CurrentMember}) " +
"set GM_EDU as Generate(CJ, {[Education Level].CurrentMember}) " +
"set GM_GENDER as Generate(CJ, {[Gender].CurrentMember}) " +
"set GM_MEASURE as {[Measures].[Unit Sales]} " +
"member [Education Level].FILTER1 as Aggregate(GM_EDU) " +
"member [Gender].FILTER2 as Aggregate(GM_GENDER) " +
"select " +
"Filter(GM_PRODUCT, Not IsEmpty([Measures].[Unit Sales])) on rows, " +
"GM_MEASURE on columns " +
"from [Warehouse and Sales] " +
"where ([Education Level].FILTER1, [Gender].FILTER2)",
fold(
"Axis
"{[Education Level].[FILTER1], [Gender].[FILTER2]}\n" +
"Axis
"{[Measures].[Unit Sales]}\n" +
"Axis
"{[Product].[All Products].[Drink]}\n" +
"Row #0: 12,395\n"));
}
public void testVCNativeCJWithTopPercent()
{
// The reference to [Store Sales] inside the topPercent function
// should not prevent native cross joins from being used
checkNative(
92,
1,
"select {topPercent(nonemptycrossjoin([Product].[Product Department].members, " +
"[Time].[1997].children),10,[Measures].[Store Sales])} on columns, " +
"{[Measures].[Store Sales]} on rows from " +
"[Warehouse and Sales]");
}
public void testVCOrdinalExpression() {
// [Customers].[Name] is an ordinal expression. Make sure ordering
// is done on the column corresponding to that expression.
checkNative(67, 67,
"select {[Measures].[Store Sales]} on columns,"
+ " NON EMPTY Crossjoin([Customers].[Name].Members,"
+ " [Product].[Product Name].Members) ON rows " +
" from [Warehouse and Sales] where ("
+ " [Store].[All Stores].[USA].[CA].[San Francisco].[Store 14],"
+ " [Time].[1997].[Q1].[1])");
}
/**
* Test for bug #1696772
* Modified which calculations are tested for non native, non empty joins
*/
public void testNonEmptyWithCalcMeasure() {
checkNative(15, 6,
"With " +
"Set [*NATIVE_CJ_SET] as 'NonEmptyCrossJoin([*BASE_MEMBERS_Store],NonEmptyCrossJoin([*BASE_MEMBERS_Education Level],[*BASE_MEMBERS_Product]))' " +
"Set [*METRIC_CJ_SET] as 'Filter([*NATIVE_CJ_SET],[Measures].[*Store Sales_SEL~SUM] > 50000.0 And [Measures].[*Unit Sales_SEL~MAX] > 50000.0)' " +
"Set [*BASE_MEMBERS_Store] as '[Store].[Store Country].Members' " +
"Set [*NATIVE_MEMBERS_Store] as 'Generate([*NATIVE_CJ_SET], {[Store].CurrentMember})' " +
"Set [*METRIC_MEMBERS_Store] as 'Generate([*METRIC_CJ_SET], {[Store].CurrentMember})' " +
"Set [*BASE_MEMBERS_Measures] as '{[Measures].[Store Sales],[Measures].[Unit Sales]}' " +
"Set [*BASE_MEMBERS_Education Level] as '[Education Level].[Education Level].Members' " +
"Set [*NATIVE_MEMBERS_Education Level] as 'Generate([*NATIVE_CJ_SET], {[Education Level].CurrentMember})' " +
"Set [*METRIC_MEMBERS_Education Level] as 'Generate([*METRIC_CJ_SET], {[Education Level].CurrentMember})' " +
"Set [*BASE_MEMBERS_Product] as '[Product].[Product Family].Members' " +
"Set [*NATIVE_MEMBERS_Product] as 'Generate([*NATIVE_CJ_SET], {[Product].CurrentMember})' " +
"Set [*METRIC_MEMBERS_Product] as 'Generate([*METRIC_CJ_SET], {[Product].CurrentMember})' " +
"Member [Product].[*CTX_METRIC_MEMBER_SEL~SUM] as 'Sum({[Product].[All Products]})' " +
"Member [Store].[*CTX_METRIC_MEMBER_SEL~SUM] as 'Sum({[Store].[All Stores]})' " +
"Member [Measures].[*Store Sales_SEL~SUM] as '([Measures].[Store Sales],[Education Level].CurrentMember,[Product].[*CTX_METRIC_MEMBER_SEL~SUM],[Store].[*CTX_METRIC_MEMBER_SEL~SUM])' " +
"Member [Product].[*CTX_METRIC_MEMBER_SEL~MAX] as 'Max([*NATIVE_MEMBERS_Product])' " +
"Member [Store].[*CTX_METRIC_MEMBER_SEL~MAX] as 'Max([*NATIVE_MEMBERS_Store])' " +
"Member [Measures].[*Unit Sales_SEL~MAX] as '([Measures].[Unit Sales],[Education Level].CurrentMember,[Product].[*CTX_METRIC_MEMBER_SEL~MAX],[Store].[*CTX_METRIC_MEMBER_SEL~MAX])' " +
"Select " +
"CrossJoin(Generate([*METRIC_CJ_SET], {([Store].CurrentMember)}),[*BASE_MEMBERS_Measures]) on columns, " +
"Non Empty Generate([*METRIC_CJ_SET], {([Education Level].CurrentMember,[Product].CurrentMember)}) on rows " +
"From [Sales]"
);
}
public void testCalculatedSlicerMember() {
// This test verifies that members(the FILTER members in the query
// below) on the slicer are ignored in CrossJoin emptiness check.
// Otherwise, if they are not ignored, stack over flow will occur
// because emptiness check depends on a calculated slicer member
// which references the non-empty set being computed.
// Bcause native evaluation already ignores calculated members on
// the slicer, both native and non-native evaluation should return
// the same result.
checkNative(20, 1,
"With " +
"Set BM_PRODUCT as '{[Product].[All Products].[Drink]}' " +
"Set BM_EDU as '[Education Level].[Education Level].Members' " +
"Set BM_GENDER as '{[Gender].[Gender].[M]}' " +
"Set NECJ_SET as 'NonEmptyCrossJoin(BM_GENDER, NonEmptyCrossJoin(BM_EDU,BM_PRODUCT))' " +
"Set GM_PRODUCT as 'Generate(NECJ_SET, {[Product].CurrentMember})' " +
"Set GM_EDU as 'Generate(NECJ_SET, {[Education Level].CurrentMember})' " +
"Set GM_GENDER as 'Generate(NECJ_SET, {[Gender].CurrentMember})' " +
"Set GM_MEASURE as '{[Measures].[Unit Sales]}' " +
"Member [Education Level].FILTER1 as 'Aggregate(GM_EDU)' " +
"Member [Gender].FILTER2 as 'Aggregate(GM_GENDER)' " +
"Select " +
"GM_PRODUCT on rows, GM_MEASURE on columns " +
"From [Sales] Where ([Education Level].FILTER1, [Gender].FILTER2)"
);
}
public void testIndependentSlicerMemberNonNative() {
String query =
"with set [p] as '[Product].[Product Family].members' " +
"set [s] as '[Store].[Store Country].members' " +
"set [ne] as 'nonemptycrossjoin([p],[s])' " +
"set [nep] as 'Generate([ne],{[Product].CurrentMember})' " +
"select [nep] on columns from sales " +
"where ([Store].[Store Country].[Mexico])";
String resultNonNative =
"Axis
"{[Store].[All Stores].[Mexico]}\n" +
"Axis
"{[Product].[All Products].[Drink]}\n" +
"{[Product].[All Products].[Food]}\n" +
"{[Product].[All Products].[Non-Consumable]}\n" +
"Row #0: \n" +
"Row #0: \n" +
"Row #0: \n";
boolean origNativeCJ =
MondrianProperties.instance().EnableNativeCrossJoin.get();
MondrianProperties.instance().EnableNativeCrossJoin.set(false);
// Get a fresh connection; Otherwise the mondrian property setting
// is not refreshed for this parameter.
Connection conn = getTestContext().getFoodMartConnection(false);
TestContext context = getTestContext(conn);
context.assertQueryReturns(query, fold(resultNonNative));
MondrianProperties.instance().EnableNativeCrossJoin.set(origNativeCJ);
}
public void testIndependentSlicerMemberNative() {
// Currently this behaves differently from non-native evaluation.
String query =
"with set [p] as '[Product].[Product Family].members' " +
"set [s] as '[Store].[Store Country].members' " +
"set [ne] as 'nonemptycrossjoin([p],[s])' " +
"set [nep] as 'Generate([ne],{[Product].CurrentMember})' " +
"select [nep] on columns from sales " +
"where ([Store].[Store Country].[Mexico])";
String resultNative =
"Axis
"{[Store].[All Stores].[Mexico]}\n" +
"Axis
boolean origNativeCJ =
MondrianProperties.instance().EnableNativeCrossJoin.get();
MondrianProperties.instance().EnableNativeCrossJoin.set(true);
// Get a fresh connection; Otherwise the mondrian property setting
// is not refreshed for this parameter.
Connection conn = getTestContext().getFoodMartConnection(false);
TestContext context = getTestContext(conn);
context.assertQueryReturns(query, fold(resultNative));
MondrianProperties.instance().EnableNativeCrossJoin.set(origNativeCJ);
}
public void testDependentSlicerMemberNonNative() {
String query =
"with set [p] as '[Product].[Product Family].members' " +
"set [s] as '[Store].[Store Country].members' " +
"set [ne] as 'nonemptycrossjoin([p],[s])' " +
"set [nep] as 'Generate([ne],{[Product].CurrentMember})' " +
"select [nep] on columns from sales " +
"where ([Time].[1998])";
String resultNonNative =
"Axis
"{[Time].[1998]}\n" +
"Axis
boolean origNativeCJ =
MondrianProperties.instance().EnableNativeCrossJoin.get();
MondrianProperties.instance().EnableNativeCrossJoin.set(false);
// Get a fresh connection; Otherwise the mondrian property setting
// is not refreshed for this parameter.
Connection conn = getTestContext().getFoodMartConnection(false);
TestContext context = getTestContext(conn);
context.assertQueryReturns(query, fold(resultNonNative));
MondrianProperties.instance().EnableNativeCrossJoin.set(origNativeCJ);
}
public void testDependentSlicerMemberNative() {
String query =
"with set [p] as '[Product].[Product Family].members' " +
"set [s] as '[Store].[Store Country].members' " +
"set [ne] as 'nonemptycrossjoin([p],[s])' " +
"set [nep] as 'Generate([ne],{[Product].CurrentMember})' " +
"select [nep] on columns from sales " +
"where ([Time].[1998])";
String resultNative =
"Axis
"{[Time].[1998]}\n" +
"Axis
boolean origNativeCJ =
MondrianProperties.instance().EnableNativeCrossJoin.get();
MondrianProperties.instance().EnableNativeCrossJoin.set(true);
// Get a fresh connection; Otherwise the mondrian property setting
// is not refreshed for this parameter.
Connection conn = getTestContext().getFoodMartConnection(false);
TestContext context = getTestContext(conn);
context.assertQueryReturns(query, fold(resultNative));
MondrianProperties.instance().EnableNativeCrossJoin.set(origNativeCJ);
}
/**
* Make sure the mdx runs correctly and not in native mode.
*
* @param rowCount number of rows returned
* @param mdx query
*/
private void checkNotNative(int rowCount, String mdx) {
checkNotNative(rowCount, mdx, null);
}
/**
* Make sure the mdx runs correctly and not in native mode.
*
* @param rowCount number of rows returned
* @param mdx query
* @param expectedResult expected result string
*/
private void checkNotNative(int rowCount, String mdx, String expectedResult) {
CachePool.instance().flush();
Connection con = getTestContext().getFoodMartConnection(false);
RolapNativeRegistry reg = getRegistry(con);
reg.setListener(new Listener() {
public void foundEvaluator(NativeEvent e) {
fail("should not be executed native");
}
public void foundInCache(TupleEvent e) {
}
public void excutingSql(TupleEvent e) {
}
});
TestCase c = new TestCase(con, 0, rowCount, mdx);
Result result = c.run();
if (expectedResult != null) {
String nonNativeResult = toString(result);
if (!nonNativeResult.equals(expectedResult)) {
TestContext.assertEqualsVerbose(
nonNativeResult, expectedResult, false,
"Non Native implementation returned different result than " +
"expected; MDX=" + mdx);
}
}
}
RolapNativeRegistry getRegistry(Connection connection) {
RolapCube cube = (RolapCube) connection.getSchema().lookupCube("Sales", true);
RolapSchemaReader schemaReader = (RolapSchemaReader) cube.getSchemaReader();
return schemaReader.getSchema().getNativeRegistry();
}
/**
* Runs a query twice, with native crossjoin optimization enabled and
* disabled. If both results are equal, its considered correct.
*
* @param resultLimit maximum result size of all the MDX operations in this
* query. This might be hard to estimate as it is usually larger than the
* rowCount of the final result. Setting it to 0 will cause this limit to
* be ignored.
* @param rowCount number of rows returned
* @param mdx query
*/
private void checkNative(
int resultLimit, int rowCount, String mdx) {
checkNative(resultLimit, rowCount, mdx, null, false);
}
/**
* Runs a query twice, with native crossjoin optimization enabled and
* disabled. If both results are equal,and both aggree with the expected
* result, its considered correct. Optionally the query could be run with
* fresh connection. This is useful if the test case sets its certain
* mondrian properties, e.g. native properties like:
* mondrian.native.filter.enable
*
* @param resultLimit maximum result size of all the MDX operations in this
* query. This might be hard to estimate as it is usually larger than the
* rowCount of the final result. Setting it to 0 will cause this limit to
* be ignored.
* @param rowCount number of rows returned
* @param mdx query
* @param expectedResult expected result string
* @param freshConnection set to true if fresh connection is required
*/
private void checkNative(
int resultLimit, int rowCount, String mdx, String expectedResult,
boolean freshConnection) {
// Don't run the test if we're testing expression dependencies.
// Expression dependencies cause spurious interval calls to
// 'level.getMembers()' which create false negatives in this test.
if (MondrianProperties.instance().TestExpDependencies.get() > 0) {
return;
}
CachePool.instance().flush();
try {
logger.debug("*** Native: " + mdx);
boolean reuseConnection = !freshConnection;
Connection con =
getTestContext().getFoodMartConnection(reuseConnection);
RolapNativeRegistry reg = getRegistry(con);
reg.useHardCache(true);
TestListener listener = new TestListener();
reg.setListener(listener);
reg.setEnabled(true);
TestCase c = new TestCase(con, resultLimit, rowCount, mdx);
Result result = c.run();
String nativeResult = toString(result);
if (!listener.isFoundEvaluator()) {
fail("expected native execution of " + mdx);
}
if (!listener.isExcecuteSql()) {
fail("cache is empty: expected SQL query to be executed");
}
// run once more to make sure that the result comes from cache now
listener.setExcecuteSql(false);
c.run();
if (listener.isExcecuteSql()) {
fail("expected result from cache when query runs twice");
}
con.close();
logger.debug("*** Interpreter: " + mdx);
CachePool.instance().flush();
con = getTestContext().getFoodMartConnection(false);
reg = getRegistry(con);
listener.setFoundEvaluator(false);
reg.setListener(listener);
// disable RolapNativeSet
reg.setEnabled(false);
result = executeQuery(mdx, con);
String interpretedResult = toString(result);
if (listener.isFoundEvaluator()) {
fail("did not expect native executions of " + mdx);
}
if (expectedResult != null) {
TestContext.assertEqualsVerbose(
nativeResult, expectedResult, false,
"Native implementation returned different result than expected; MDX=" + mdx);
TestContext.assertEqualsVerbose(
interpretedResult, expectedResult, false,
"Interpreter implementation returned different result than expected; MDX=" + mdx);
}
if (!nativeResult.equals(interpretedResult)) {
TestContext.assertEqualsVerbose(
nativeResult, interpretedResult, false,
"Native implementation returned different result than interpreter; MDX=" + mdx);
}
} finally {
Connection con = getConnection();
RolapNativeRegistry reg = getRegistry(con);
reg.setEnabled(true);
reg.useHardCache(false);
}
}
Result executeQuery(String mdx, Connection connection) {
Query query = connection.parseQuery(mdx);
return connection.execute(query);
}
private String toString(Result r) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
r.print(pw);
pw.close();
return sw.toString();
}
SmartMemberReader getSmartMemberReader(String hierName) {
Connection con = getTestContext().getFoodMartConnection();
return getSmartMemberReader(con, hierName);
}
SmartMemberReader getSmartMemberReader(Connection con, String hierName) {
RolapCube cube = (RolapCube) con.getSchema().lookupCube("Sales", true);
RolapSchemaReader schemaReader = (RolapSchemaReader) cube.getSchemaReader();
RolapHierarchy hierarchy = (RolapHierarchy) cube.lookupHierarchy(hierName, false);
assertNotNull(hierarchy);
return (SmartMemberReader) hierarchy.getMemberReader(schemaReader.getRole());
}
RolapEvaluator getEvaluator(Result res, int[] pos) {
while (res instanceof NonEmptyResult)
res = ((NonEmptyResult) res).underlying;
return (RolapEvaluator) ((RolapResult) res).getEvaluator(pos);
}
/**
* gets notified
* <ul>
* <li>when a matching native evaluator was found
* <li>when SQL is executed
* <li>when result is found in the cache
* </ul>
* @author av
* @since Nov 22, 2005
*/
static class TestListener implements Listener {
boolean foundEvaluator;
boolean foundInCache;
boolean excecuteSql;
boolean isExcecuteSql() {
return excecuteSql;
}
void setExcecuteSql(boolean excecuteSql) {
this.excecuteSql = excecuteSql;
}
boolean isFoundEvaluator() {
return foundEvaluator;
}
void setFoundEvaluator(boolean foundEvaluator) {
this.foundEvaluator = foundEvaluator;
}
boolean isFoundInCache() {
return foundInCache;
}
void setFoundInCache(boolean foundInCache) {
this.foundInCache = foundInCache;
}
public void foundEvaluator(NativeEvent e) {
this.foundEvaluator = true;
}
public void foundInCache(TupleEvent e) {
this.foundInCache = true;
}
public void excutingSql(TupleEvent e) {
this.excecuteSql = true;
}
}
}
// End NonEmptyTest.java |
package application.controllers;
import application.fxobjects.cell.Cell;
import application.fxobjects.cell.Edge;
import application.fxobjects.cell.layout.CellLayout;
import application.fxobjects.cell.layout.TreeLayout;
import application.fxobjects.cell.tree.LeafCell;
import core.MetaParser;
import core.graph.PhylogeneticTree;
import javafx.scene.control.ScrollPane;
import javafx.scene.input.ScrollEvent;
import javafx.scene.layout.AnchorPane;
import javafx.scene.paint.Color;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.ResourceBundle;
import java.util.TreeMap;
public class TreeController extends Controller<ScrollPane> {
private PhylogeneticTree pt;
private List<Cell> selectedStrains;
private List<Cell> collectedStrains;
private TreeMouseHandling treeMouseHandling;
private TreeMap<String, Integer> metaData;
private static final Color LIN0 = Color.web("000000");
private static final Color LIN1 = Color.web("ed00c3");
private static final Color LIN2 = Color.web("0000ff");
private static final Color LIN3 = Color.web("500079");
private static final Color LIN4 = Color.web("ff0000");
private static final Color LIN5 = Color.web("4e2c00");
private static final Color LIN6 = Color.web("69ca00");
private static final Color LIN7 = Color.web("ff7e00");
private static final Color LIN8 = Color.web("00ff9c");
private static final Color LIN9 = Color.web("00ff9c");
private static final Color LIN10 = Color.web("00ffff");
/**
* Class constructor.
*
* @param pt A phylogenetic tree.
* @param m MainController.
* @param s InputStream for metaData.
*/
public TreeController(PhylogeneticTree pt, MainController m, InputStream s) {
super(new ScrollPane());
this.pt = pt;
this.metaData = MetaParser.parse(s);
this.selectedStrains = new ArrayList<>();
this.collectedStrains = new ArrayList<>();
this.treeMouseHandling = new TreeMouseHandling(m);
this.getRoot().setHbarPolicy(ScrollPane.ScrollBarPolicy.ALWAYS);
this.getRoot().setVbarPolicy(ScrollPane.ScrollBarPolicy.ALWAYS);
this.getRoot().addEventFilter(ScrollEvent.SCROLL, event -> {
if (event.getDeltaY() != 0) {
event.consume();
}
});
init();
}
/**
* Get the phylogenetic tree.
*
* @return The phylogenetic tree.
*/
public PhylogeneticTree getPT() {
return pt;
}
@Override
public void initialize(URL location, ResourceBundle resources) {
}
/**
* Add cells from the model to the gui.
*/
public void init() {
AnchorPane root = new AnchorPane();
CellLayout layout = new TreeLayout(pt.getModel(), 30);
layout.execute();
List<Cell> nodeList = pt.getModel().getAddedCells();
List<Edge> edgeList = pt.getModel().getAddedEdges();
nodeList.forEach(treeMouseHandling::setMouseHandling);
edgeList.forEach(treeMouseHandling::setMouseHandling);
// Add all cells and edges to the anchor pane
root.getChildren().addAll(pt.getModel().getAddedCells());
root.getChildren().addAll(pt.getModel().getAddedEdges());
this.getRoot().setContent(root);
}
/**
* Selects strains to keep them highlighted.
*/
public void selectStrains() {
collectedStrains.forEach(e -> {
if (selectedStrains.contains(e)) {
selectedStrains.remove(e);
} else {
selectedStrains.add(e);
}
});
}
/**
* Colors the selected strains after un-hover.
*/
public void colorSelectedStrains() {
selectedStrains.forEach(this::applyCellHighlight);
}
/**
* Applies the highlight in the phylogenetic tree on hovering over a leafNode.
*
* @param cell the Cell being hovered over.
*/
public void applyCellHighlight(Cell cell) {
if (cell instanceof LeafCell) {
String temp = ((LeafCell) cell).getName();
collectedStrains.clear();
if (temp.contains("TKK")) {
List<Cell> parentList = new ArrayList<>();
parentList.add(cell);
collectedStrains.add(cell);
applyColorUpwards(parentList,
determineLinColor(metaData.get(temp)),
4.0);
}
}
}
/**
* Reverts the highlight in the phylogenetic tree on losing hover over a leafNode.
*
* @param cell the Cell which is no longer being hovered over.
*/
public void revertCellHighlight(Cell cell) {
List<Cell> parentList = new ArrayList<>();
parentList.add(cell);
collectedStrains.clear();
collectedStrains.add(cell);
applyColorUpwards(parentList, Color.BLACK, 1.0);
}
/**
* Applies the highlight in the phylogenetic tree on hovering over an Edge.
*
* @param edge the Edge being hovered over.
*/
public void applyEdgeHighlight(Edge edge) {
List<Cell> parentList = new ArrayList<>();
List<Cell> childList = new ArrayList<>();
parentList.add(edge.getSource());
childList.add(edge.getTarget());
collectedStrains.clear();
applyColorOnSelf(edge, Color.YELLOW, 4.0);
applyColorUpwards(parentList, Color.YELLOW, 4.0);
applyColorDownwards(childList, Color.YELLOW, 4.0);
}
/**
* Reverts the highlight in the phylogenetic tree on losing hover over an Edge.
*
* @param edge the Edge which is no longer being hovered over.
*/
public void revertEdgeHighlight(Edge edge) {
List<Cell> parentList = new ArrayList<>();
List<Cell> childList = new ArrayList<>();
parentList.add(edge.getSource());
childList.add(edge.getTarget());
collectedStrains.clear();
applyColorOnSelf(edge, Color.BLACK, 1.0);
applyColorUpwards(parentList, Color.BLACK, 1.0);
applyColorDownwards(childList, Color.BLACK, 1.0);
}
/**
* Apply a certain color and stroke to the edge being hovered over.
*
* @param e the given Edge.
* @param c the given Color.
* @param s the given stroke.
*/
private void applyColorOnSelf(Edge e, Color c, double s) {
e.getLine().setStroke(c);
e.getLine().setStrokeWidth(s);
}
/**
* Apply a certain color and stroke to the edges upwards from the node in the list.
*
* @param l the given List of Edges.
* @param c the given Color.
* @param s the given stroke.
*/
private void applyColorUpwards(List<Cell> l, Color c, double s) {
while (!l.isEmpty()) {
Cell next = l.remove(0);
l.addAll(next.getCellParents());
if (next.getCellId() != 0) {
Edge e = pt.getModel().getEdgeFromChild(next);
e.getLine().setStroke(c);
e.getLine().setStrokeWidth(s);
}
}
}
/**
* Apply a certain color and stroke to the edges downwards from the node in the list.
*
* @param l the given List of Edges.
* @param c the given Color.
* @param s the given stroke.
*/
private void applyColorDownwards(List<Cell> l, Color c, double s) {
while (!l.isEmpty()) {
Cell next = l.remove(0);
l.addAll(next.getCellChildren());
if (!(next instanceof LeafCell)) {
List<Edge> edges = pt.getModel().getEdgeFromParent(next);
edges.forEach(e -> {
e.getLine().setStroke(c);
e.getLine().setStrokeWidth(s);
});
} else {
collectedStrains.add(next);
}
}
}
/**
* Getter method for the selected strains.
*
* @return a list with the selected strains.
*/
public List<Cell> getSelectedStrains() {
return selectedStrains;
}
/**
* Determines the color of the edges for the corresponding lineages in a highlighted situation.
*
* @param l the lineage code.
* @return the color.
*/
private Color determineLinColor(int l) {
switch (l) {
case 0:
return LIN0;
case 1:
return LIN1;
case 2:
return LIN2;
case 3:
return LIN3;
case 4:
return LIN4;
case 5:
return LIN5;
case 6:
return LIN6;
case 7:
return LIN7;
case 8:
return LIN8;
case 9:
return LIN9;
case 10:
return LIN10;
default:
break;
}
return LIN0;
}
} |
package at.jku.pervasive.ecg;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Semaphore;
import javax.bluetooth.BluetoothConnectionException;
import javax.bluetooth.BluetoothStateException;
import javax.bluetooth.DataElement;
import javax.bluetooth.DeviceClass;
import javax.bluetooth.DiscoveryAgent;
import javax.bluetooth.DiscoveryListener;
import javax.bluetooth.LocalDevice;
import javax.bluetooth.RemoteDevice;
import javax.bluetooth.ServiceRecord;
import javax.bluetooth.UUID;
import javax.microedition.io.Connection;
import javax.microedition.io.Connector;
import com.intel.bluetooth.BlueCoveImpl;
import com.intel.bluetooth.BluetoothConsts;
import com.intel.bluetooth.RemoteDeviceHelper;
public class HeartManDiscovery {
public static final UUID HEARTMAN_SERVICE_UUID = BluetoothConsts.RFCOMM_PROTOCOL_UUID;
public static Object STACK_ID;
private static final HeartManDiscovery INSTANCE = new HeartManDiscovery();
public static final HeartManDiscovery getInstance() {
return INSTANCE;
}
private final Semaphore deviceInquiry = new Semaphore(1);
private final List<HeartManDevice> discoveredHeartManDevices = new LinkedList<HeartManDevice>();
private final Map<String, List<ServiceRecord>> servicesDiscovered = new HashMap<String, List<ServiceRecord>>();
private final Map<String, RemoteDevice> devicesDiscovered = new HashMap<String, RemoteDevice>();
private final Map<String, ListeningTask> listeningTasks = new HashMap<String, ListeningTask>();
// flag, indicating whether a device inquiry has started
private boolean isDeviceInquiryStarted;
// check for new ecg values every UPDATE_RATE ms
private final long updateRate;
private HeartManDiscovery() {
this(5);
}
private HeartManDiscovery(long updateRate) {
super();
this.updateRate = updateRate;
try {
BlueCoveImpl.useThreadLocalBluetoothStack();
STACK_ID = BlueCoveImpl.getThreadBluetoothStackID();
BlueCoveImpl.setDefaultThreadBluetoothStackID(STACK_ID);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public List<HeartManDevice> discoverHeartManDevices() throws IOException, InterruptedException {
BlueCoveImpl.setDefaultThreadBluetoothStackID(STACK_ID);
final Object inquiryCompletedEvent = new Object();
devicesDiscovered.clear();
DiscoveryListener listener = new DiscoveryListener() {
@Override
public void deviceDiscovered(RemoteDevice btDevice, DeviceClass cod) {
System.out.println("Device " + btDevice.getBluetoothAddress() + " found");
try {
// RemoteDeviceHelper.authenticate(btDevice, "heartman");
} catch (Exception e) {
e.printStackTrace();
}
try {
String name = btDevice.getFriendlyName(true);
// Only add devices which have "heartman" in their name
if (name != null && name.toLowerCase().contains("heartman")) {
devicesDiscovered.put(btDevice.getBluetoothAddress(), btDevice);
}
} catch (IOException cantGetDeviceName) {
}
try {
System.out.println(" name " + btDevice.getFriendlyName(true));
System.out.println(" adress: " + btDevice.getBluetoothAddress());
} catch (IOException cantGetDeviceName) {
}
}
@Override
public void inquiryCompleted(int discType) {
System.out.println("Device Inquiry completed!");
synchronized (inquiryCompletedEvent) {
deviceInquiry.release();
inquiryCompletedEvent.notifyAll();
}
}
@Override
public void servicesDiscovered(int transID, ServiceRecord[] servRecord) {
}
@Override
public void serviceSearchCompleted(int transID, int respCode) {
}
};
synchronized (inquiryCompletedEvent) {
deviceInquiry.acquire();
boolean started = LocalDevice.getLocalDevice().getDiscoveryAgent().startInquiry(DiscoveryAgent.GIAC, listener);
if (started) {
this.isDeviceInquiryStarted = true;
System.out.println("wait for device inquiry to complete...");
inquiryCompletedEvent.wait();
System.out.println(devicesDiscovered.size() + " device(s) found");
discoveredHeartManDevices.clear();
for (RemoteDevice device : devicesDiscovered.values()) {
String name = "UNKNOWN";
try {
name = device.getFriendlyName(false);
} catch (Exception e) {
e.printStackTrace();
}
HeartManDevice heartManDevice = new HeartManDevice(device.getBluetoothAddress(), name);
discoveredHeartManDevices.add(heartManDevice);
}
for (HeartManDevice device : discoveredHeartManDevices) {
searchServices(device.getAddress());
}
this.isDeviceInquiryStarted = false;
return discoveredHeartManDevices;
}
this.isDeviceInquiryStarted = false;
return null;
}
}
public List<HeartManDevice> getDiscoveredHeartManDevices() {
return discoveredHeartManDevices;
}
protected ListeningTask getListeningTask(String address, ServiceRecord serviceRecord) {
ListeningTask listeningTask = listeningTasks.get(address);
if (listeningTask == null) {
listeningTask = new ListeningTask(STACK_ID, updateRate, serviceRecord);
listeningTasks.put(address, listeningTask);
listeningTask.start();
}
return listeningTask;
}
public boolean isBluetoothEnabled() {
return LocalDevice.isPowerOn();
}
public boolean isDeviceInquiryStarted() {
return isDeviceInquiryStarted;
}
public RemoteDevice pingDevice(String address) throws IOException {
String urlPattern = "btspp://%1$s:1;authenticate=false;encrypt=false;master=false";
String url = String.format(urlPattern, address);
System.out.println("ping device");
try {
Connection connection = Connector.open(url, Connector.READ);
RemoteDevice remoteDevice = RemoteDevice.getRemoteDevice(connection);
RemoteDeviceHelper.authenticate(remoteDevice, "Heartman");
return remoteDevice;
} catch (BluetoothConnectionException bce) {
if (bce.getMessage().startsWith("No such device")) {
return null;
}
throw bce;
}
}
public List<ServiceRecord> searchServices(RemoteDevice remoteDevice) throws BluetoothStateException {
final List<ServiceRecord> serviceRecords = new LinkedList<ServiceRecord>();
LocalDevice localDevice = LocalDevice.getLocalDevice();
DiscoveryAgent discoveryAgent = localDevice.getDiscoveryAgent();
final Semaphore searchServicesLock = new Semaphore(0);
final int[] attrs = new int[] { 0x0100 }; // Service name
UUID[] serviceUUIDs = new UUID[] { HeartManDiscovery.HEARTMAN_SERVICE_UUID };
discoveryAgent.searchServices(attrs, serviceUUIDs, remoteDevice, new DiscoveryListener() {
@Override
public void deviceDiscovered(RemoteDevice btDevice, DeviceClass cod) {
}
@Override
public void inquiryCompleted(int discType) {
}
@Override
public void servicesDiscovered(int transID, ServiceRecord[] servRecord) {
for (int i = 0; i < servRecord.length; i++) {
String url = servRecord[i].getConnectionURL(ServiceRecord.NOAUTHENTICATE_NOENCRYPT, false);
if (url == null) {
continue;
}
DataElement serviceName = servRecord[i].getAttributeValue(0x0100);
if (serviceName != null) {
System.out.println("service " + serviceName.getValue() + " found " + url);
serviceRecords.add(servRecord[i]);
} else {
System.out.println("service found " + url);
}
}
}
@Override
public void serviceSearchCompleted(int transID, int respCode) {
searchServicesLock.release();
}
});
searchServicesLock.acquireUninterruptibly();
return serviceRecords;
}
public List<ServiceRecord> searchServices(String address) throws BluetoothStateException {
System.out.println("search for services for " + address);
RemoteDevice device = devicesDiscovered.get(address);
if (device != null) {
List<ServiceRecord> serviceRecords = servicesDiscovered.get(address);
if (serviceRecords == null) {
serviceRecords = searchServices(device);
servicesDiscovered.put(address, serviceRecords);
}
return serviceRecords;
}
return null;
}
public void startListening(String address, IHeartManListener listener) throws BluetoothStateException {
ListeningTask listeningTask = listeningTasks.get(address);
boolean start = false;
if (listeningTask == null) {
List<ServiceRecord> services = searchServices(address);
ServiceRecord serviceRecord = services.get(0);
listeningTask = new ListeningTask(STACK_ID, updateRate, serviceRecord);
listeningTasks.put(address, listeningTask);
start = true;
}
listeningTask.addListener(listener);
if (start) {
listeningTask.start();
}
}
public void startListening(String address, IHeartManListener listener, ServiceRecord serviceRecord)
throws BluetoothStateException {
ListeningTask listeningTask = listeningTasks.get(address);
boolean start = false;
if (listeningTask == null) {
listeningTask = new ListeningTask(STACK_ID, updateRate, serviceRecord);
listeningTasks.put(address, listeningTask);
start = true;
}
listeningTask.addListener(listener);
if (start) {
listeningTask.start();
}
}
public void stopListening(String address) {
ListeningTask listeningTask = listeningTasks.get(address);
if (listeningTask != null) {
listeningTask.clearListener();
listeningTask.interrupt();
listeningTask = null;
listeningTasks.put(address, null);
}
}
public void tearDown() {
for (String address : listeningTasks.keySet()) {
stopListening(address);
}
}
} |
package cgeo.geocaching.files;
import cgeo.geocaching.R;
import cgeo.geocaching.activity.ActivityMixin;
import org.apache.commons.lang3.StringUtils;
import android.app.ListActivity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.os.Environment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.TextView;
import java.io.File;
import java.io.FilenameFilter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Dialog for choosing a file or directory.
*/
public class SimpleDirChooser extends ListActivity {
public static final String EXTRA_CHOSEN_DIR = "chosenDir";
public static final String START_DIR = "start_dir";
private static final String PARENT_DIR = ".. ";
private File currentDir;
private FileArrayAdapter adapter;
private Button okButton = null;
private int lastPosition = -1;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final Bundle extras = getIntent().getExtras();
String startDir = extras.getString(START_DIR);
if (StringUtils.isBlank(startDir)) {
startDir = Environment.getExternalStorageDirectory().getPath();
} else {
startDir = startDir.substring(0, startDir.lastIndexOf(File.separatorChar));
}
currentDir = new File(startDir);
ActivityMixin.setTheme(this);
setContentView(R.layout.simple_dir_chooser);
setTitle(this.getResources().getString(R.string.simple_dir_chooser_title));
fill(currentDir);
okButton = (Button) findViewById(R.id.simple_dir_chooser_ok);
okButton.setEnabled(false);
okButton.setVisibility(View.INVISIBLE);
okButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent();
String chosenDirName = File.separator + adapter.getItem(lastPosition).getName();
intent.putExtra(EXTRA_CHOSEN_DIR, currentDir.getAbsolutePath() + chosenDirName);
setResult(RESULT_OK, intent);
finish();
}
});
Button cancelButton = (Button) findViewById(R.id.simple_dir_chooser_cancel);
cancelButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent();
setResult(RESULT_CANCELED, intent);
finish();
}
});
}
private void fill(File dir) {
EditText path = (EditText) findViewById(R.id.simple_dir_chooser_path);
path.setText(this.getResources().getString(R.string.simple_dir_chooser_current_path) + " " + dir.getAbsolutePath());
final File[] dirs = dir.listFiles(new DirOnlyFilenameFilter());
List<Option> listDirs = new ArrayList<Option>();
try {
for (File currentDir : dirs) {
listDirs.add(new Option(currentDir.getName(), currentDir.getAbsolutePath()));
}
} catch (Exception e) {
}
Collections.sort(listDirs);
if (dir.getParent() != null) {
listDirs.add(0, new Option(PARENT_DIR, dir.getParent()));
}
this.adapter = new FileArrayAdapter(this, R.layout.simple_dir_item, listDirs);
this.setListAdapter(adapter);
}
@Override
protected void onListItemClick(ListView l, View v, int position, long id) {
super.onListItemClick(l, v, position, id);
}
public class FileArrayAdapter extends ArrayAdapter<Option> {
private Context content;
private int id;
private List<Option> items;
public FileArrayAdapter(Context context, int simpleDirItemResId, List<Option> objects) {
super(context, simpleDirItemResId, objects);
this.content = context;
this.id = simpleDirItemResId;
this.items = objects;
}
@Override
public Option getItem(int index) {
return items.get(index);
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View v = convertView;
if (v == null) {
LayoutInflater vi = (LayoutInflater) content.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
v = vi.inflate(id, null);
}
final Option option = items.get(position);
if (option != null) {
TextView t1 = (TextView) v.findViewById(R.id.TextView01);
if (t1 != null) {
t1.setOnClickListener(new OnTextViewClickListener(position));
t1.setText(option.getName());
}
CheckBox check = (CheckBox) v.findViewById(R.id.CheckBox);
if (check != null) {
check.setOnClickListener(new OnCheckBoxClickListener(position));
check.setChecked(option.isChecked());
}
}
return v;
}
}
public class OnTextViewClickListener implements OnClickListener {
private int position;
OnTextViewClickListener(int position) {
this.position = position;
}
@Override
public void onClick(View arg0) {
Option option = adapter.getItem(position);
if (option.getName().equals(PARENT_DIR)) {
currentDir = new File(option.getPath());
fill(currentDir);
} else {
File dir = new File(option.getPath());
if (dir.list(new DirOnlyFilenameFilter()).length > 0) {
currentDir = dir;
fill(currentDir);
}
}
}
}
public class OnCheckBoxClickListener implements OnClickListener {
private int position;
OnCheckBoxClickListener(int position) {
this.position = position;
}
@Override
public void onClick(View arg0) {
Option lastOption = (lastPosition > -1) ? adapter.getItem(lastPosition) : null;
Option currentOption = adapter.getItem(position);
if (lastOption != null) {
lastOption.setChecked(false);
}
if (currentOption != lastOption) {
currentOption.setChecked(true);
lastPosition = position;
okButton.setEnabled(true);
okButton.setVisibility(View.VISIBLE);
} else {
lastPosition = -1;
okButton.setEnabled(false);
okButton.setVisibility(View.INVISIBLE);
}
adapter.notifyDataSetChanged();
}
}
public class Option implements Comparable<Option> {
private final String name;
private final String path;
private boolean checked = false;
public Option(String name, String path) {
this.name = name;
this.path = path;
}
public String getName() {
return name;
}
public String getPath() {
return path;
}
public boolean isChecked() {
return this.checked;
}
public void setChecked(boolean checked) {
this.checked = checked;
}
@Override
public int compareTo(Option other) {
if (other != null && this.name != null) {
return String.CASE_INSENSITIVE_ORDER.compare(this.name, other.getName());
}
throw new IllegalArgumentException("");
}
}
public static class DirOnlyFilenameFilter implements FilenameFilter {
@Override
public boolean accept(File dir, String filename) {
File file = new File(dir, filename);
return file.isDirectory();
}
}
} |
package at.ngmpps.fjsstt.factory;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import at.ngmpps.fjsstt.model.problem.FJSSTTproblem;
import at.ngmpps.fjsstt.model.problem.FJSSTTproblem.Objective;
public class ProblemParser {
static final Logger logger = LoggerFactory.getLogger(ProblemParser.class);
static Pattern firstlinePattern = Pattern.compile("(\\d+)\\W(\\d+)");
static Pattern operationsLinePattern = Pattern.compile("(\\d+)\\W(.+)(\\d+)\\W(\\d+)\\W(\\d+)");
static Pattern operationsProcessesPattern = Pattern.compile("(\\d+)");
static Pattern transportTimesLinePatter = Pattern.compile("(\\d+)");
public static String PROBLEM_FILE_EXTENSION = "fjs";
public static String TRANSPORT_FILE_EXTENSION = "transport";
public static String CONFIG_FILE_EXTENSION = "properties";
public static final String SEARCH_TYPE_KEY = "SubgradientSearch.SearchType";
public static final String SEARCH_NR_TIME_SLOTS_KEY = "SubgradientSearch.NrTimeSlots";
public static final String TRANSPORT_FILE_KEY = "SubgradientSearch.TransportFile";
// fjs file to pares
File problemFile;
Properties configuration = new Properties();
// File file;
// Objective is to fill to be able to create a FJSSTT_problem
int jobs;
int machines;
// perJob
int[] operations;
int maxOperations;
// make timeslots in the problem the maxDueDate found in the files
int timeslotsMaxDueDate;
// The sets of alternative machines per operation. A key is an
// integer tuple (job,operation), the corresponding value is the set
// of alternative machines.
HashMap<String, List<Integer>> altMachines;
// The first index is the job, the second index is the operation,
// and the third index is the machine.
int[][][] processTimes;
int[][] travelTimes;
// per job
int[] dueDates;
// job priorities
int[] jobWeights;
// default objective function for parsed files
Objective objective = Objective.TARDINESS;
public ProblemParser() {
}
/**
* Based on the fjs File find also a .properties file with the same name and
* the transport file configured there.
*
* @param file
* @return
* @throws URISyntaxException
* @throws IOException
*/
public static FJSSTTproblem parseProblemWithProblemFile(final File file) throws URISyntaxException, IOException {
final ProblemParser parse = new ProblemParser();
return parse.parseProblemConfig(file);
}
/**
* Based on the fjs File find also a .properties file with the same name and
* the transport file configured there.
*
* @param file
* @return
* @throws URISyntaxException
* @throws IOException
*/
public void parseProblemFileOnly(final File file) throws URISyntaxException, IOException {
List<File> files = checkOrFindFile(file, PROBLEM_FILE_EXTENSION);
if (files.size() > 0) {
problemFile = files.get(0);
parseProblemFile();
}
}
/**
* Based on the fjs File find also a .properties file with the same name and
* the transport file configured there.
*
* @param filename
* @return
* @throws URISyntaxException
* @throws IOException
*/
public static FJSSTTproblem parseProblemWithProblemFile(final String filename) throws URISyntaxException, IOException {
return parseProblemWithProblemFile(new File(filename));
}
/**
* Based on the properties File (=configuration) find also a problem file
* (.fjs) with the same name and the transport file configured.
*
* @param filename
* @return
* @throws URISyntaxException
* @throws IOException
*/
public static FJSSTTproblem parseProblemWithConfigFile(final String filename) throws URISyntaxException, IOException {
final ProblemParser parse = new ProblemParser();
return parse.parseConfigProblem(new File(filename));
}
/**
* Checks if the Given File is OK or if not searches for that file
*
* @param fileOrFolderPath
* may be the path to a file or to a folder, if the latter then the
* fileExtension is used to get a list of files
* @param fileExtension
* @return
*/
public static List<File> checkOrFindFile(final File fileOrFolderPath, final String fileExtension) {
List<File> files = new ArrayList<File>();
if (fileOrFolderPath != null) {
File found = fileOrFolderPath;
if (!found.canRead()) {
// :( ... use class loader to resolve that file
final URL file = ProblemParser.class.getClassLoader().getResource(found.getAbsolutePath().toString());
if (file != null) {
try {
found = new File(file.toURI());
if (!found.canRead())
found = null;
} catch (Exception e) {
e.printStackTrace();
}
}
}
if (found != null && found.isDirectory()) {
// we received a directory. try to find the right file in this
// directory
try {
for (File subfile : found.listFiles()) {
if (!subfile.isDirectory()) {
String problemName = subfile.toString();
if (problemName.endsWith(fileExtension)) {
files.add(subfile);
}
}
}
} catch (Exception e) {
}
} else if (found != null && found.isFile()) {
files.add(found);
}
}
return files;
}
public static List<File> findConfigurationFiles(File problemFilePath) throws IOException {
return findFiles(problemFilePath, CONFIG_FILE_EXTENSION);
}
/**
* Takes a File or Folder as input and searches for files in the same folder
* with the given extensions for the first case (File) search for files with
* given extension and where the name starts equal to the given file
* WT1.fjs,properties -> WT1a.properties, WT1b.properties
*
* @param problemFilePath
* @param someOtherFileExtension
* @return
* @throws IOException
*/
public static List<File> findFiles(File fileOrPath, String someOtherFileExtension) throws IOException {
String FilePathName = null;
File folder = null;
ArrayList<File> resultFiles = new ArrayList<File>();
File problemFilePath = fileOrPath;
if (problemFilePath != null && !problemFilePath.canRead()) {
// this checks the filename and eventually uses the classpathloader
List<File> found = checkOrFindFile(problemFilePath, someOtherFileExtension);
if (found != null && found.size() > 0)
problemFilePath = found.get(0);
}
if (problemFilePath != null && problemFilePath.canRead()) {
// its a file using that format name.ext
if (problemFilePath.toString().lastIndexOf(".") > problemFilePath.toString().lastIndexOf(File.separatorChar)) {
// all transport files starting with the same name
FilePathName = problemFilePath.toString().substring(0, problemFilePath.toString().lastIndexOf('.'));
// search in this folder for files that start with FilePathName and
// end with .transport
folder = problemFilePath.getParentFile();
} else if (problemFilePath.isDirectory()) {
// its a Folder!
folder = problemFilePath;
// all transport files in this folder
FilePathName = problemFilePath.toString();
}
// we have selected WT1.fjs and want to find WT1a.properties &&
// WT1b.properties, but not WT2.properties
// alse
// we have selected WT1a.properties and want to find WT1.fjs but not
// WT2.fjs
if (folder != null) {
int substring = FilePathName.length();
int folderCharPos = Math.max(FilePathName.lastIndexOf(File.separatorChar), 0);
for (int s = substring; s > folderCharPos; s
for (File file : folder.listFiles()) {
String fn = file.toString();
if (fn.endsWith(someOtherFileExtension) && fn.startsWith(FilePathName))
resultFiles.add(file);
}
if (resultFiles.isEmpty())
FilePathName = FilePathName.substring(0, s - 1);
else
s = folderCharPos;
}
}
}
return resultFiles;
}
public static List<File> findProblemFiles(File problemFilePath) throws IOException {
return findFiles(problemFilePath, ProblemParser.PROBLEM_FILE_EXTENSION);
}
public static List<File> findTransportFiles(File problemFilePath) throws IOException {
return findFiles(problemFilePath, TRANSPORT_FILE_EXTENSION);
}
public static boolean getPropertyBool(Properties config, String key) {
if (config.containsKey(key)) {
String prop = config.getProperty(key);
if (prop == null || prop.isEmpty()) {
logger.error("Property Key {0} is empty in Config {1}.", key, config.toString());
return false;
}
return Boolean.parseBoolean(trimm(config.getProperty(key)));
}
logger.error("Property Key {0} not found in Config {1}.", key, config.toString());
return false;
}
public static double getPropertyDouble(Properties config, String key) {
if (config.containsKey(key)) {
String prop = config.getProperty(key);
if (prop == null || prop.isEmpty()) {
logger.error("Property Key {0} is empty in Config {1}.", key, config.toString());
return 0.0;
}
return Double.parseDouble(trimm(prop));
}
logger.error("Property Key {0} not found in Config {1}.", key, config.toString());
return 0.0;
}
public static int getPropertyInt(Properties config, String key) {
if (config.containsKey(key)) {
String prop = config.getProperty(key);
if (prop == null || prop.isEmpty()) {
logger.error("Property Key {0} is empty in Config {1}.", key, config.toString());
return 0;
}
return Integer.parseInt(trimm(prop));
}
logger.error("Property Key {0} not found in Config {1}.", key, config.toString());
return 0;
}
public static String getPropertyString(Properties config, String key) {
if (config.containsKey(key)) {
String prop = config.getProperty(key);
if (prop == null || prop.isEmpty()) {
logger.error("Property Key {0} is empty in Config {1}.", key, config.toString());
return "";
}
return trimm(config.getProperty(key));
}
logger.error("Property Key {0} not found in Config {1}.", key, config.toString());
return "";
}
public static String trimm(String string) {
// break at first non space char
int i, ii;
for (i = string.length() - 1; i > 0; --i)
if (string.charAt(i) != ' ')
break;
for (ii = 0; ii < i; ++ii)
if (string.charAt(ii) != ' ')
break;
return string.substring(ii, i + 1);
}
public FJSSTTproblem getProblem() {
return new FJSSTTproblem(jobs, operations, maxOperations, machines, timeslotsMaxDueDate, altMachines, processTimes, travelTimes,
dueDates, objective, jobWeights, configuration);
}
public boolean getPropertyBool(String key) {
return getPropertyBool(configuration, key);
}
public double getPropertyDouble(String key) {
return getPropertyDouble(configuration, key);
}
public int getPropertyInt(String key) {
return getPropertyInt(configuration, key);
}
public String getPropertyString(String key) {
return getPropertyString(configuration, key);
}
/**
* Checks if the configuration File (.properties) is readable and parses it;
* additionally it searches for fjs problems files and takes one of the found
* files with a similar name. The config file is used to check if it contains
* the link to the transport file (SubgradientSearch.TRANSPORT_FILE_KEY); if
* so its parsed
*
* @return
*/
public FJSSTTproblem parseConfigProblem(File configFile) {
try {
parseConfigurationFile(configFile);
List<File> problemFiles = findProblemFiles(configFile);
if (problemFiles != null && problemFiles.size() > 0) {
problemFile = problemFiles.get(0);
}
if (configuration != null) {
parseTransportTimes();
} else {
List<File> transportFiles = findTransportFiles(configFile);
if (transportFiles != null && transportFiles.size() > 0) {
parseTransportTimes(transportFiles.get(0));
}
}
return getProblem();
} catch (Exception io) {
// its ok if something happens here. we still have the problem
io.printStackTrace();
}
return null;
}
/**
* Checks if the problemFile is readable and parses it; additionally it
* searches for properties files (the config) and takes one of the found
* files. That file is used to check if it contains the link to the transport
* file (SubgradientSearch.TRANSPORT_FILE_KEY); if so its parsed
*
* @return
*/
public FJSSTTproblem parseProblemConfig(File problemFjsFile) {
List<File> files = checkOrFindFile(problemFile, PROBLEM_FILE_EXTENSION);
if (files.size() > 0) {
problemFile = files.get(0);
try {
parseProblemFile();
try {
List<File> multiplePropertiesFiles = findConfigurationFiles(problemFile);
if (multiplePropertiesFiles != null && multiplePropertiesFiles.size() > 0) {
parseConfigurationFile(multiplePropertiesFiles.get(0));
parseTransportTimes();
}
} catch (Exception io) {
// its ok if something happens here. we still have the problem
io.printStackTrace();
}
return getProblem();
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
return null;
}
protected File parseProblemFile() throws IOException {
if (problemFile != null && problemFile.canRead()) {
// key == int[](job,operation)
altMachines = new HashMap<String, List<Integer>>();
// read files line per line
BufferedReader reader = new BufferedReader(new FileReader(problemFile));
String currentLine = reader.readLine();
// first line has 2 numbers: mJobs mMachines
final Matcher firstline = ProblemParser.firstlinePattern.matcher(currentLine);
firstline.matches();
jobs = Integer.parseInt(firstline.group(1));
machines = Integer.parseInt(firstline.group(2));
dueDates = new int[jobs];
operations = new int[jobs];
jobWeights = new int[jobs];
// to find max, we init this with 0
timeslotsMaxDueDate = 0;
maxOperations = 0;
processTimes = new int[jobs][][];
// one line per job
for (int j = 0; j < jobs; ++j) {
currentLine = reader.readLine();
final Matcher operationsLine = ProblemParser.operationsLinePattern.matcher(currentLine);
operationsLine.matches();
operations[j] = new Integer(operationsLine.group(1));
maxOperations = maxOperations > operations[j] ? maxOperations : operations[j];
// main stuff in the form of: #Machines/Op (machine,processtime)
// (machine,processtime)... #Machines/Op times
// e.g.: 3 1 2 2 3 4 2 : 3 machines for this operation: machine
// 1 needs 2, machine 2 needs 3, machine 4 needs 2
// !!! need to adjust machineID: id=1 here its 0
final String operationsForJob = operationsLine.group(2);
// TODO releaseTime is not used; keep this line to keep the
// semantics of this value
final int releaseTime = new Integer(operationsLine.group(3));
// (time point)
// vs our time-slot based point of view. Addition our first
// time-slot == 0.
// hence we reduce the dueDates parsed by 1
dueDates[j] = new Integer(operationsLine.group(4)) - 1;
timeslotsMaxDueDate = timeslotsMaxDueDate > dueDates[j] ? timeslotsMaxDueDate : dueDates[j];
// priorities
jobWeights[j] = new Integer(operationsLine.group(5));
// parse line
final Matcher operationsProcessTimes = ProblemParser.operationsProcessesPattern.matcher(operationsForJob);
// parse per job per operation
processTimes[j] = new int[operations[j]][];
for (int o = 0; o < operations[j]; ++o) {
// in last array we have all machines. but not all are
// possible (=0)
processTimes[j][o] = new int[machines];
// how many tuples (machine, processingtime) do we have for
// the
// operation o
operationsProcessTimes.find();
final int altMachinesForOpCount = new Integer(operationsProcessTimes.group());
final List<Integer> altMachinesForOp = new ArrayList<Integer>();
for (int machineIdx = 0; machineIdx < altMachinesForOpCount; machineIdx++) {
operationsProcessTimes.find();
int machine = new Integer(operationsProcessTimes.group());
// machine 1 in file is machine 0 here!
machine
altMachinesForOp.add(machine);
operationsProcessTimes.find();
final int time = new Integer(operationsProcessTimes.group());
processTimes[j][o][machine] = time;
}
final String key = j + "-" + o;
altMachines.put(key, altMachinesForOp);
}
}
reader.close();
}
return problemFile;
}
/**
* parse the .properties file with the configuration (search for it, if not
* there)
*
* @param properties
* @return
* @throws IOException
*/
public Properties parseConfigurationFile(File properties) throws IOException {
// parse propertiesfile ([problem].conf) and add to problem
List<File> found = checkOrFindFile(properties, CONFIG_FILE_EXTENSION);
if (found.size() > 0)
properties = found.get(0);
if (properties != null && properties.canRead()) {
configuration = new Properties();
configuration.load(new FileReader(properties));
return configuration;
} else {
configuration = null;
}
return null;
}
/**
* If a configuration is given, use the transport file there; else search for
* a transport file with a similar name as the problem file
*
* @return
* @throws IOException
*/
public File parseTransportTimes() throws IOException {
File file = null;
if (configuration != null && configuration.containsKey(TRANSPORT_FILE_KEY)) {
String tpfile = configuration.getProperty(TRANSPORT_FILE_KEY);
if (tpfile != null && !tpfile.isEmpty() && !tpfile.equals(" ") && !tpfile.equals(" ") && !tpfile.equals(" ")) {
List<File> transpFile = findFiles(new File(problemFile.getParentFile() + File.separator + tpfile), TRANSPORT_FILE_EXTENSION);
if (transpFile != null && transpFile.size() > 0) {
file = transpFile.get(0);
parseTransportTimes(file);
}
}
} else if (configuration == null) {
List<File> transports = findTransportFiles(problemFile);
if (transports != null && !transports.isEmpty())
parseTransportTimes(transports.get(0));
}
return file;
}
public void parseTransportTimes(File transportFile) throws IOException {
boolean initTravelTimes = false;
if (transportFile != null) {
// try also travel time
BufferedReader reader = new BufferedReader(new FileReader(transportFile));
travelTimes = new int[machines][];
String currentLine = reader.readLine();
for (int machine = 0; currentLine != null && machine < machines; machine++) {
initTravelTimes = true;
// one line per machine
travelTimes[machine] = new int[machines];
final Matcher transportTimesLine = ProblemParser.transportTimesLinePatter.matcher(currentLine);
for (int othermachine = 0; othermachine < machines; ++othermachine) {
if (transportTimesLine.find()) {
travelTimes[machine][othermachine] = Integer.parseInt(transportTimesLine.group());
}
}
currentLine = reader.readLine();
}
reader.close();
}
if (!initTravelTimes) {
// we don't have times: init w/ 0
travelTimes = new int[machines][];
for (int i = 0; i < travelTimes.length; ++i) {
travelTimes[i] = new int[machines];
for (int ii = 0; ii < travelTimes[i].length; ++ii) {
travelTimes[i][ii] = 0;
}
}
}
}
public void parseTransportTimes(String pathToFile) throws IOException {
List<File> transpFile = findFiles(new File(pathToFile), TRANSPORT_FILE_EXTENSION);
if (transpFile != null && transpFile.size() > 0) {
File file = transpFile.get(0);
parseTransportTimes(file);
}
}
} |
package com.joelapenna.foursquared;
import com.joelapenna.foursquare.Foursquare;
import com.joelapenna.foursquare.error.FoursquareError;
import com.joelapenna.foursquare.error.FoursquareException;
import com.joelapenna.foursquare.types.User;
import com.joelapenna.foursquared.app.FoursquaredService;
import com.joelapenna.foursquared.error.LocationException;
import com.joelapenna.foursquared.location.BestLocationListener;
import com.joelapenna.foursquared.location.LocationUtils;
import com.joelapenna.foursquared.preferences.Preferences;
import com.joelapenna.foursquared.util.DumpcatcherHelper;
import com.joelapenna.foursquared.util.JavaLoggingHandler;
import com.joelapenna.foursquared.util.NullDiskCache;
import com.joelapenna.foursquared.util.RemoteResourceManager;
import android.app.Application;
import android.appwidget.AppWidgetManager;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.content.res.Resources;
import android.location.Location;
import android.location.LocationManager;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.preference.PreferenceManager;
import android.text.TextUtils;
import android.util.Log;
import java.io.IOException;
import java.util.Observer;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* @author Joe LaPenna (joe@joelapenna.com)
*/
public class Foursquared extends Application {
private static final String TAG = "Foursquared";
private static final boolean DEBUG = FoursquaredSettings.DEBUG;
static {
Logger.getLogger("com.joelapenna.foursquare").addHandler(new JavaLoggingHandler());
Logger.getLogger("com.joelapenna.foursquare").setLevel(Level.ALL);
}
public static final String PACKAGE_NAME = "com.joelapenna.foursquared";
public static final String INTENT_ACTION_LOGGED_OUT = "com.joelapenna.foursquared.intent.action.LOGGED_OUT";
public static final String INTENT_ACTION_LOGGED_IN = "com.joelapenna.foursquared.intent.action.LOGGED_IN";
public static final String EXTRA_VENUE_ID = "com.joelapenna.foursquared.VENUE_ID";
private String mVersion = null;
private TaskHandler mTaskHandler;
private HandlerThread mTaskThread;
private SharedPreferences mPrefs;
private RemoteResourceManager mRemoteResourceManager;
private Foursquare mFoursquare;
private BestLocationListener mBestLocationListener = new BestLocationListener();
@Override
public void onCreate() {
Log.i(TAG, "Using Debug Server:\t" + FoursquaredSettings.USE_DEBUG_SERVER);
Log.i(TAG, "Using Dumpcatcher:\t" + FoursquaredSettings.USE_DUMPCATCHER);
Log.i(TAG, "Using Debug Log:\t" + DEBUG);
// Get a version number for the app.
try {
PackageManager pm = getPackageManager();
PackageInfo pi = pm.getPackageInfo(PACKAGE_NAME, 0);
mVersion = PACKAGE_NAME + ":" + String.valueOf(pi.versionCode);
} catch (NameNotFoundException e) {
if (DEBUG) Log.d(TAG, "NameNotFoundException", e);
throw new RuntimeException(e);
}
// Setup Prefs (to load dumpcatcher)
mPrefs = PreferenceManager.getDefaultSharedPreferences(this);
// Setup Dumpcatcher
if (FoursquaredSettings.USE_DUMPCATCHER) {
Resources resources = getResources();
new DumpcatcherHelper(Preferences.createUniqueId(mPrefs), resources);
}
// Sometimes we want the application to do some work on behalf of the
// Activity. Lets do that
// asynchronously.
mTaskThread = new HandlerThread(TAG + "-AsyncThread");
mTaskThread.start();
mTaskHandler = new TaskHandler(mTaskThread.getLooper());
// Set up storage cache.
loadResourceManagers();
// Catch sdcard state changes
new MediaCardStateBroadcastReceiver().register();
// Catch logins or logouts.
new LoggedInOutBroadcastReceiver().register();
// Log into Foursquare, if we can.
loadFoursquare();
}
public boolean isReady() {
return getFoursquare().hasLoginAndPassword() && !TextUtils.isEmpty(getUserId());
}
public Foursquare getFoursquare() {
return mFoursquare;
}
public String getUserId() {
return Preferences.getUserId(mPrefs);
}
public String getVersion() {
if (mVersion != null) {
return mVersion;
} else {
return "";
}
}
public RemoteResourceManager getRemoteResourceManager() {
return mRemoteResourceManager;
}
public BestLocationListener requestLocationUpdates(boolean gps) {
mBestLocationListener.register(
(LocationManager) getSystemService(Context.LOCATION_SERVICE), gps);
return mBestLocationListener;
}
public BestLocationListener requestLocationUpdates(Observer observer) {
mBestLocationListener.addObserver(observer);
mBestLocationListener.register(
(LocationManager) getSystemService(Context.LOCATION_SERVICE), true);
return mBestLocationListener;
}
public void removeLocationUpdates() {
mBestLocationListener
.unregister((LocationManager) getSystemService(Context.LOCATION_SERVICE));
}
public void removeLocationUpdates(Observer observer) {
mBestLocationListener.deleteObserver(observer);
this.removeLocationUpdates();
}
public Location getLastKnownLocation() throws LocationException {
Location location = mBestLocationListener.getLastKnownLocation();
if (location == null) {
throw new LocationException();
}
return location;
}
public void requestStartService() {
mTaskHandler.sendMessage(
mTaskHandler.obtainMessage(TaskHandler.MESSAGE_START_SERVICE));
}
public void requestUpdateUser() {
mTaskHandler.sendEmptyMessage(TaskHandler.MESSAGE_UPDATE_USER);
}
private void loadFoursquare() {
// Try logging in and setting up foursquare oauth, then user
// credentials.
if (FoursquaredSettings.USE_DEBUG_SERVER) {
mFoursquare = new Foursquare(Foursquare.createHttpApi("10.0.2.2:8080", mVersion, false));
} else {
mFoursquare = new Foursquare(Foursquare.createHttpApi(mVersion, false));
}
if (FoursquaredSettings.DEBUG) Log.d(TAG, "loadCredentials()");
String phoneNumber = mPrefs.getString(Preferences.PREFERENCE_LOGIN, null);
String password = mPrefs.getString(Preferences.PREFERENCE_PASSWORD, null);
mFoursquare.setCredentials(phoneNumber, password);
if (mFoursquare.hasLoginAndPassword()) {
sendBroadcast(new Intent(INTENT_ACTION_LOGGED_IN));
} else {
sendBroadcast(new Intent(INTENT_ACTION_LOGGED_OUT));
}
}
private void loadResourceManagers() {
// We probably don't have SD card access if we get an
// at least have some sort of disk cache so that things don't npe when
// trying to access the
// resource managers.
try {
if (DEBUG) Log.d(TAG, "Attempting to load RemoteResourceManager(cache)");
mRemoteResourceManager = new RemoteResourceManager("cache");
} catch (IllegalStateException e) {
if (DEBUG) Log.d(TAG, "Falling back to NullDiskCache for RemoteResourceManager");
mRemoteResourceManager = new RemoteResourceManager(new NullDiskCache());
}
}
/**
* Set up resource managers on the application depending on SD card state.
*
* @author Joe LaPenna (joe@joelapenna.com)
*/
private class MediaCardStateBroadcastReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
if (DEBUG)
Log
.d(TAG, "Media state changed, reloading resource managers:"
+ intent.getAction());
if (Intent.ACTION_MEDIA_UNMOUNTED.equals(intent.getAction())) {
getRemoteResourceManager().shutdown();
loadResourceManagers();
} else if (Intent.ACTION_MEDIA_MOUNTED.equals(intent.getAction())) {
loadResourceManagers();
}
}
public void register() {
// Register our media card broadcast receiver so we can
// enable/disable the cache as
// appropriate.
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction(Intent.ACTION_MEDIA_UNMOUNTED);
intentFilter.addAction(Intent.ACTION_MEDIA_MOUNTED);
// intentFilter.addAction(Intent.ACTION_MEDIA_REMOVED);
// intentFilter.addAction(Intent.ACTION_MEDIA_SHARED);
// intentFilter.addAction(Intent.ACTION_MEDIA_BAD_REMOVAL);
// intentFilter.addAction(Intent.ACTION_MEDIA_UNMOUNTABLE);
// intentFilter.addAction(Intent.ACTION_MEDIA_NOFS);
// intentFilter.addAction(Intent.ACTION_MEDIA_SCANNER_STARTED);
// intentFilter.addAction(Intent.ACTION_MEDIA_SCANNER_FINISHED);
intentFilter.addDataScheme("file");
registerReceiver(this, intentFilter);
}
}
private class LoggedInOutBroadcastReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
if (INTENT_ACTION_LOGGED_IN.equals(intent.getAction())) {
requestUpdateUser() {
}
}
public void register() {
// Register our media card broadcast receiver so we can
// enable/disable the cache as
// appropriate.
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction(INTENT_ACTION_LOGGED_IN);
intentFilter.addAction(INTENT_ACTION_LOGGED_OUT);
registerReceiver(this, intentFilter);
}
}
private class TaskHandler extends Handler {
private static final int MESSAGE_UPDATE_USER = 1;
private static final int MESSAGE_START_SERVICE = 2;
public TaskHandler(Looper looper) {
super(looper);
}
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
if (DEBUG) Log.d(TAG, "handleMessage: " + msg.what);
switch (msg.what) {
case MESSAGE_UPDATE_USER:
try {
// Update user info
Log.d(TAG, "Updating user.");
// Use location when requesting user information, if we
// have it.
Foursquare.Location location = null;
try {
location = LocationUtils
.createFoursquareLocation(getLastKnownLocation());
} catch (LocationException e) {
// Best effort...
}
User user = getFoursquare().user(null, false, false, location);
Editor editor = mPrefs.edit();
Preferences.storeUser(editor, user);
editor.commit();
if (location == null) {
// Pump the location listener, we don't have a
// location in our listener yet.
Log.d(TAG, "Priming Location from user city.");
Location primeLocation = new Location("foursquare");
// Very inaccurate, right?
primeLocation.setAccuracy(10000);
primeLocation.setTime(System.currentTimeMillis());
mBestLocationListener.updateLocation(primeLocation);
}
} catch (FoursquareError e) {
if (DEBUG) Log.d(TAG, "FoursquareError", e);
// TODO Auto-generated catch block
} catch (FoursquareException e) {
if (DEBUG) Log.d(TAG, "FoursquareException", e);
// TODO Auto-generated catch block
} catch (IOException e) {
if (DEBUG) Log.d(TAG, "IOException", e);
// TODO Auto-generated catch block
}
return;
case MESSAGE_START_SERVICE:
Intent serviceIntent = new Intent(Foursquared.this, FoursquaredService.class);
serviceIntent.setAction(AppWidgetManager.ACTION_APPWIDGET_UPDATE);
startService(serviceIntent);
return;
}
}
}
} |
package br.edu.utfpr.recipes.dao;
import br.edu.utfpr.recipes.entidade.Receita;
import br.edu.utfpr.recipes.entidade.TagReceita;
import java.util.List;
/**
*
* @author mairieliw
*/
public class DaoTagReceita extends DaoGenerics<TagReceita> {
public DaoTagReceita() {
super.clazz = TagReceita.class;
}
public List<TagReceita> buscaPorReceita(Receita receita){
return getsession()
.createQuery(
"SELECT t FROM TagReceita t WHERE t.receita.id = "+receita.getId())
.list();
}
} |
package br.gov.servicos.editor.servicos;
import lombok.SneakyThrows;
import lombok.experimental.FieldDefaults;
import lombok.extern.slf4j.Slf4j;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.LogCommand;
import org.eclipse.jgit.api.errors.JGitInternalException;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.*;
import org.eclipse.jgit.merge.MergeStrategy;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.transport.RefSpec;
import org.jsoup.nodes.Document;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.security.core.userdetails.User;
import org.springframework.stereotype.Component;
import java.io.*;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Iterator;
import java.util.Optional;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Stream;
import static java.lang.String.format;
import static java.nio.charset.Charset.defaultCharset;
import static java.util.Optional.*;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import static lombok.AccessLevel.PRIVATE;
import static org.eclipse.jgit.api.CreateBranchCommand.SetupUpstreamMode.NOTRACK;
import static org.eclipse.jgit.lib.Constants.*;
@Slf4j
@Component
@FieldDefaults(level = PRIVATE, makeFinal = true)
public class Cartas {
File repositorioCartasLocal;
boolean fazerPush;
@Autowired
public Cartas(File repositorioCartasLocal, @Value("${flags.git.push}") boolean fazerPush) {
this.repositorioCartasLocal = repositorioCartasLocal;
this.fazerPush = fazerPush;
}
@SneakyThrows
public Optional<String> conteudoServicoV2(String id) {
return conteudoServico(id, leitorDeConteudo(id, "v2"));
}
@SneakyThrows
public Optional<String> conteudoServicoV3(String id) {
return conteudoServico(id, leitorDeConteudo(id, "v3"));
}
public Optional<String> conteudoServico(String id, Supplier<Optional<String>> leitor) {
return executaNoBranchDoServico(id, leitor);
}
public Supplier<Optional<String>> leitorDeConteudo(String id, String versao) {
return () -> {
File arquivo = caminhoRelativo(id, versao).toFile();
if (arquivo.exists()) {
log.info("Arquivo {} encontrado", arquivo);
return ler(arquivo);
}
log.info("Arquivo {} não encontrado", arquivo);
return empty();
};
}
public Optional<Metadados> ultimaRevisaoV2(String id) {
return ultimaRevisao(id, "v2");
}
public Optional<Metadados> ultimaRevisaoV3(String id) {
return ultimaRevisao(id, "v3");
}
private Optional<Metadados> ultimaRevisao(final String id, final String versao) {
return comRepositorioAberto(new Function<Git, Optional<Metadados>>() {
@Override
@SneakyThrows
public Optional<Metadados> apply(Git git) {
LogCommand revs;
Ref branchRef = git.getRepository().getRef(R_HEADS + id);
if (branchRef != null) {
// temos uma branch para o servico
revs = git.log().add(branchRef.getObjectId());
} else {
// pegamos o ultimo commit no master
revs = git.log().addPath(caminhoRelativo(caminhoRelativo(id, versao)));
}
Iterator<RevCommit> commits = revs.setMaxCount(1).call().iterator();
if (commits.hasNext()) {
RevCommit commit = commits.next();
return of(new Metadados()
.withRevisao(commit.getId().getName())
.withAutor(commit.getAuthorIdent().getName())
.withHorario(commit.getAuthorIdent().getWhen())
);
}
return empty();
}
});
}
@SneakyThrows
private Optional<String> ler(File arquivo) {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(arquivo), defaultCharset()))) {
return of(reader.lines().collect(joining("\n")));
}
}
@SneakyThrows
public void salvarServicoV2(String id, Document doc, User usuario) {
comRepositorioAberto(git -> {
pull(git);
try {
return executaNoBranchDoServico(id, () -> {
Path caminho = caminhoRelativo(id, "v2");
Path dir = caminho.getParent();
if (dir.toFile().mkdirs()) {
log.debug("Diretório {} não existia e foi criado", dir);
} else {
log.debug("Diretório {} já existia e não precisou ser criado", dir);
}
String mensagem = format("%s '%s'", caminho.toFile().exists() ? "Altera" : "Cria", id);
escreve(doc, caminho);
add(git, caminho);
commit(git, mensagem, usuario, caminho);
return null;
});
} finally {
push(git, id);
}
});
}
@SneakyThrows
public void salvarServicoV3(String id, String doc, User usuario) {
comRepositorioAberto(git -> {
pull(git);
try {
return executaNoBranchDoServico(id, () -> {
Path caminho = caminhoRelativo(id, "v3");
Path dir = caminho.getParent();
if (dir.toFile().mkdirs()) {
log.debug("Diretório {} não existia e foi criado", dir);
} else {
log.debug("Diretório {} já existia e não precisou ser criado", dir);
}
String mensagem = format("%s '%s'", caminho.toFile().exists() ? "Altera" : "Cria", id);
escreveV3(doc, caminho);
add(git, caminho);
commit(git, mensagem, usuario, caminho);
return null;
});
} finally {
push(git, id);
}
});
}
@SneakyThrows
private void push(Git git, String id) {
log.info("git push: {} ({})", git.getRepository().getBranch(), git.getRepository().getRepositoryState());
if (fazerPush && !id.equals("novo")) {
git.push()
.setRemote(DEFAULT_REMOTE_NAME)
.setRefSpecs(new RefSpec(id + ":" + id))
.setProgressMonitor(new TextProgressMonitor())
.call();
} else {
log.info("Envio de alterações ao Github desligado (FLAGS_GIT_PUSH=false)");
}
}
@SneakyThrows
private void pull(Git git) {
log.info("git pull: {} ({})", git.getRepository().getBranch(), git.getRepository().getRepositoryState());
git.pull()
.setRebase(true)
.setStrategy(MergeStrategy.THEIRS)
.setProgressMonitor(new TextProgressMonitor())
.call();
}
@SneakyThrows
private void commit(Git git, String mensagem, User usuario, Path caminho) {
PersonIdent ident = new PersonIdent(usuario.getUsername(), "servicos@planejamento.gov.br");
log.debug("git commit: {} ({}): '{}', {}, {}",
git.getRepository().getBranch(),
git.getRepository().getRepositoryState(),
mensagem,
ident,
caminho
);
try {
git.commit()
.setMessage(mensagem)
.setCommitter(ident)
.setAuthor(ident)
.setOnly(caminhoRelativo(caminho))
.call();
} catch(JGitInternalException e) {
if(e.getMessage().equals(JGitText.get().emptyCommit)) {
log.info("{} não sofreu alterações", caminho);
} else {
throw e;
};
}
}
@SneakyThrows
private void add(Git git, Path path) {
String pattern = caminhoRelativo(path);
log.debug("git add: {} ({})", git.getRepository().getBranch(), git.getRepository().getRepositoryState(), pattern);
git.add()
.addFilepattern(pattern)
.call();
}
private String caminhoRelativo(Path path) {
return repositorioCartasLocal.toPath().relativize(path).toString();
}
@SneakyThrows
private <T> T comRepositorioAberto(Function<Git, T> fn) {
try (Git git = Git.open(repositorioCartasLocal)) {
synchronized (Cartas.class) {
return fn.apply(git);
}
}
}
@SneakyThrows
private <T> T executaNoBranchDoServico(String id, Supplier<T> supplier) {
return comRepositorioAberto(git -> {
checkout(git, id);
try {
return supplier.get();
} finally {
checkoutMaster(git);
}
});
}
@SneakyThrows
private void checkoutMaster(Git git) {
log.debug("git checkout master: {} ({})", git.getRepository().getBranch(), git.getRepository().getRepositoryState());
git.checkout().setName(MASTER).call();
}
@SneakyThrows
private void checkout(Git git, String id) {
log.debug("git checkout: {} ({})", git.getRepository().getBranch(), git.getRepository().getRepositoryState(), id);
git.checkout()
.setName(id)
.setStartPoint(R_HEADS + MASTER)
.setUpstreamMode(NOTRACK)
.setCreateBranch(!branchExiste(git, id))
.call();
}
@SneakyThrows
private boolean branchExiste(Git git, String id) {
boolean resultado = git
.branchList()
.call()
.stream()
.anyMatch(b -> b.getName().equals(R_HEADS + id));
log.debug("git branch {} já existe? {}", id, resultado);
return resultado;
}
private Path caminhoRelativo(String id, String versao) {
return Paths.get(repositorioCartasLocal.getAbsolutePath(), "cartas-servico", versao, "servicos", id + ".xml");
}
@SneakyThrows
private void escreve(Document document, Path arquivo) {
try (Writer writer = new OutputStreamWriter(new FileOutputStream(arquivo.toFile()), "UTF-8")) {
writer.write(document.toString());
}
log.debug("Arquivo '{}' modificado", arquivo.getFileName());
}
@SneakyThrows
private void escreveV3(String document, Path arquivo) {
try (Writer writer = new OutputStreamWriter(new FileOutputStream(arquivo.toFile()), "UTF-8")) {
writer.write(document);
}
log.debug("Arquivo '{}' modificado", arquivo.getFileName());
}
@SneakyThrows
private Optional<ReflogEntry> reflogMaisRecente(Git git, String id) {
return ofNullable(git.getRepository()
.getReflogReader(id))
.map(new Function<ReflogReader, ReflogEntry>() {
@Override
@SneakyThrows
public ReflogEntry apply(ReflogReader reflogReader) {
return reflogReader.getLastEntry();
}
});
}
public Iterable<String> listar() {
FilenameFilter filter = (x, name) -> name.endsWith(".xml");
File v2 = Paths.get(repositorioCartasLocal.getAbsolutePath(), "cartas-servico", "v2", "servicos").toFile();
return Stream.of(v2.list(filter)).collect(toList());
}
} |
package br.ufpe.nti.controller;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.time.LocalTime;
import java.time.format.DateTimeFormatter;
import org.json.JSONException;
import org.json.JSONObject;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import br.ufpe.nti.model.Clock;
@RestController
public class ClockController {
@RequestMapping(value = "/clock", method = RequestMethod.GET)
public ResponseEntity<String> GetAngleRequest() {
Clock clock = new Clock();
double angle = computeAngle(clock.getTime());
return getClockResponse(clock, angle);
}
@RequestMapping(value = "/clock", method = RequestMethod.POST)
public ResponseEntity<String> PostAngleRequest(@RequestBody String body) {
Clock clock = new Clock();
double angle = 0.0;
try {
JSONObject jsonBody = new JSONObject(body);
LocalTime lt = LocalTime.parse(jsonBody.getString("time"));
clock.setTime(lt);
angle = computeAngle(lt);
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return getClockResponse(clock, angle);
}
/**
* @param clock
* @param angle
* @return complete HTTP response
*/
private ResponseEntity<String> getClockResponse(Clock clock, double angle) {
HttpHeaders header = new HttpHeaders();
header.setContentType(MediaType.APPLICATION_JSON);
DateFormat createdAtFormatter = new SimpleDateFormat("YYYY-MM-DD HH:mm:ss");
JSONObject body = new JSONObject();
try {
body.put("id", "null");
body.put("time", clock.getTime().format(DateTimeFormatter.ofPattern("HH:mm")));
body.put("createdAt", createdAtFormatter.format(clock.getCreatedAt()));
body.put("angle", angle);
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return new ResponseEntity<String>(body.toString(), header, HttpStatus.OK);
}
/**
* @param time
* @return angle between clock hands
*/
private double computeAngle(LocalTime time) {
double angle = 0.0;
int hour = time.getHour();
int minute = time.getMinute();
hour = hour > 12 ? hour - 12 : hour;
double dh = (60.0*hour + minute)/2.0;
double dm = 6.0*minute;
//angle = Math.abs(0.5*(60*(hour-11)*minute));
angle = Math.abs(dh - dm);
angle = angle > 180.0 ? 360.0 - angle : angle;
return angle;
}
} |
package ca.concordia.cssanalyser.io;
import java.util.ArrayList;
import java.util.List;
public class CSVColumns {
private String separator = "|";
private final List<String> columns;
public CSVColumns(String... columns) {
this.columns = new ArrayList<>();
for (String column : columns) {
this.columns.add(column);
}
}
public void setSeparator(String separator) {
this.separator = separator;
}
public String getSeparator() {
return this.separator;
}
public void addColumn(String columnName) {
this.columns.add(columnName);
}
public String getHeader(boolean addLineSeparator) {
StringBuilder toReturn = new StringBuilder();
for (int i = 0; i < columns.size(); i++) {
toReturn.append(columns.get(i));
if (i < columns.size() - 1) {
toReturn.append(this.separator);
} else {
if (addLineSeparator) {
toReturn.append(System.lineSeparator());
}
}
}
return toReturn.toString();
}
public String getRowFormat(boolean addLineSeparator) {
StringBuilder toReturn = new StringBuilder();
for (int i = 0; i < columns.size(); i++) {
toReturn.append("%s");
if (i < columns.size() - 1) {
toReturn.append(separator);
} else {
if (addLineSeparator) {
toReturn.append(System.lineSeparator());
}
}
}
return toReturn.toString();
}
} |
package org.pentaho.di.blackbox;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.logging.Log4jStringAppender;
import org.pentaho.di.core.logging.LogWriter;
import org.pentaho.di.core.util.EnvUtil;
import org.pentaho.di.i18n.GlobalMessages;
import org.pentaho.di.job.JobEntryLoader;
import org.pentaho.di.trans.StepLoader;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import junit.framework.AssertionFailedError;
import junit.framework.TestCase;
public class BlackBoxTests extends TestCase {
protected int failures = 0;
protected File currentFile = null;
public void testBlackBox() {
// set the locale to English so that log file comparisons work
GlobalMessages.setLocale( new Locale("en-US") );
// do not process the output folder, there won't be any tests there
File dir = new File("testfiles/blackbox/tests");
assertTrue( dir.exists() );
assertTrue( dir.isDirectory() );
processDirectory( dir );
assertEquals( 0, failures );
}
protected void addFailure( String message ) {
System.err.println("failure: "+message);
failures++;
}
protected void processDirectory( File dir ) {
File files[] = dir.listFiles();
// recursively process every folder in testfiles/blackbox/tests
for( int i=0; i<files.length; i++ )
{
if( files[i].isDirectory() )
{
processDirectory( files[i] );
}
}
// now process any transformations or jobs we find
for( int i=0; i<files.length; i++ )
{
if( files[i].isFile() )
{
String name = files[i].getName();
if( name.endsWith(".ktr") && !name.endsWith("-tmp.ktr") )
{
// we found a transformation
// see if we can find an output file
File expected = getExpectedOutputFile( dir, name.substring(0, name.length()-4) );
try {
runTrans( files[i], expected );
} catch ( AssertionFailedError failure ) {
// we're going to trap these so that we can continue with the other black box tests
System.err.println( failure.getMessage() );
}
}
else if( name.endsWith(".kjb") )
{
// we found a job
System.out.println(name);
}
}
}
}
protected void runTrans( File transFile, File expected ) {
System.out.println("Running: "+getPath(transFile));
LogWriter log;
log=LogWriter.getInstance( LogWriter.LOG_LEVEL_ERROR );
Log4jStringAppender stringAppender = LogWriter.createStringAppender();
log.addAppender(stringAppender);
boolean ok = false;
int failsIn = failures;
// create a path to the expected output
String actualFile = expected.getAbsolutePath();
actualFile = actualFile.replaceFirst(".expected.", ".actual.");
try {
currentFile = transFile;
if( !transFile.exists() )
{
log.logError( "BlackBoxTest", "Transformation does not exist: "+ getPath( transFile ) );
addFailure( "Transformation does not exist: "+ getPath( transFile ) );
}
if( !expected.exists() )
{
fail( "Expected output file does not exist: "+ getPath( expected ) );
addFailure("Expected output file does not exist: "+ getPath( expected ));
}
File actual = new File( actualFile );
try {
ok = runTrans( transFile.getAbsolutePath(), log );
if( ok ) {
fileCompare( expected, actual, log );
}
} catch (KettleException ke) {
// this will get logged below
} catch ( AssertionFailedError failure ) {
// we're going to trap these so that we can continue with the other black box tests
} catch (Throwable t) {
}
} catch ( AssertionFailedError failure ) {
// we're going to trap these so that we can continue with the other black box tests
System.err.println( failure.getMessage() );
}
log.removeAppender(stringAppender);
if( !ok ) {
String logStr = stringAppender.toString();
String tmpFileName = transFile.getAbsolutePath().substring(0, transFile.getAbsolutePath().length()-4)+"-log.txt";
File logFile = new File( tmpFileName );
writeLog( logFile, logStr );
try {
if( fileCompare( expected, logFile, log ) ) {
// we were expecting this to fail, reset any accumulated failures
failures = failsIn;
}
} catch (IOException e) {
addFailure("Could not compare log files: " + getPath( logFile ) + "" +e.getMessage());
fail( "Could not compare log files: " + getPath( logFile ) + "" +e.getMessage() );
}
}
}
public void writeLog( File logFile, String logStr )
{
try {
// document encoding will be important here
OutputStream stream = new FileOutputStream( logFile );
// parse the log file and remove things that will make comparisons hard
int length = logStr.length();
int pos = 0;
String line;
while( pos < length ) {
line = null;
int eol = logStr.indexOf("\r\n", pos);
if( eol != -1 ) {
line = logStr.substring(pos, eol);
pos = eol+2;
} else {
eol = logStr.indexOf("\n", pos);
if( eol != -1 ) {
line = logStr.substring(pos, eol);
pos = eol+1;
} else {
// this must be the last line
line = logStr.substring(pos);
pos = length;
}
}
if( line != null ) {
// remove the date/time
line = line.substring(22);
// find the subject
String subject = "";
int idx = line.indexOf(" - ");
if( idx != -1 ) {
subject = line.substring(0, idx);
}
// skip the version and build numbers
idx = line.indexOf(" : ", idx );
if( idx != -1 ) {
String details = line.substring(idx+3);
// filter out stacktraces
if( details.startsWith( "\tat " ) ) {
continue;
}
if( details.startsWith( "\t... " ) ) {
continue;
}
// force the windows EOL characters
stream.write( (subject+" : "+details+"\r\n").getBytes("UTF-8") );
}
}
}
stream.close();
} catch (Exception e)
{
addFailure("Could not write to log file: "+logFile.getAbsolutePath());
}
}
public String getPath( File file ) {
return getPath (file.getAbsolutePath() );
}
public String getPath( String filepath ) {
int idx = filepath.indexOf( "/testfiles/" );
if( idx == - 1) {
idx = filepath.indexOf( "\\testfiles\\" );
}
if( idx != - 1) {
return filepath.substring( idx+1 );
}
return filepath;
}
public boolean fileCompare( File expected, File actual, LogWriter log ) throws IOException {
int failsIn = failures;
InputStream expectedStream = new FileInputStream( expected );
InputStream actualStream = new FileInputStream( actual );
// compare the two files
int goldPos = 0;
int tmpPos = 0;
byte goldBuffer[] = new byte[2048];
byte tmpBuffer[] = new byte[2048];
try {
// read the start of both files
goldPos = expectedStream.read( goldBuffer );
tmpPos = actualStream.read( tmpBuffer );
// assume lock-step
// if( goldPos != tmpPos )
// addFailure("Test file pointers are out of step : "+getPath( actual ));
// assertEquals( "Test file pointers are out of step : "+getPath( actual ), goldPos, tmpPos );
int lineno = 1;
int charno = 0;
int indexGold = 0;
int indexTmp = 0;
int totalGold = goldPos;
int totalTmp = tmpPos;
while( goldPos > 0 && tmpPos > 0 ) {
if( indexGold == goldPos ) {
goldPos = expectedStream.read( goldBuffer );
if( goldPos > 0 ) {
totalGold += goldPos;
}
indexGold = 0;
}
if( indexTmp == tmpPos ) {
tmpPos = actualStream.read( tmpBuffer );
if( tmpPos > 0 ) {
totalTmp += tmpPos;
}
indexTmp = 0;
}
if( goldPos < 0 ) {
break;
}
if( tmpPos < 0 ) {
break;
}
charno++;
if( goldBuffer[indexGold] != tmpBuffer[indexTmp] )
{
int start = indexTmp > 10 ? indexTmp-10 : 0;
int end = indexTmp < tmpBuffer.length-11 ? indexTmp+10 : tmpBuffer.length-1;
int offset = indexTmp-start;
byte bytes[] = new byte[offset];
System.arraycopy(tmpBuffer, start, bytes, 0, bytes.length);
String frag = "-->"+new String(bytes);
frag += "[" + (char) tmpBuffer[indexTmp] + "]";
bytes = new byte[end-start-offset];
System.arraycopy(tmpBuffer, start+offset+1, bytes, 0, bytes.length);
frag += new String(bytes);
frag += "<
String exp = goldBuffer[indexGold] < 32 ? "\\"+ (char) (goldBuffer[indexGold]-'a') : ""+ (char) goldBuffer[indexGold] ;
String act = tmpBuffer[indexTmp] < 32 ? "\\"+ (char) (tmpBuffer[indexTmp]-'a') : ""+ (char) tmpBuffer[indexTmp] ;
String message = "Test files ("+getPath(actual)+") differ at: line " +lineno + " char " +charno + " expecting '"+ exp + "' but found '" + act + "' - "+frag;
addFailure(message);
log.logError("BlackBoxTest", message);
fail( message );
}
else if( tmpBuffer[indexTmp] == '\n' )
{
lineno++;
charno=0;
}
indexGold++;
indexTmp++;
}
if( totalGold != totalTmp ) {
addFailure( "Comparison files are not same length" );
}
} catch (Exception e) {
addFailure("Error trying to compare output files: " + getPath(actual));
e.printStackTrace();
fail( "Error trying to compare output files: " + getPath(actual) );
}
return failsIn == failures;
}
/**
* Tries to find an output file to match a transformation or job file
* @param dir The directory to look in
* @param baseName Name of the transformation or the job without the extension
* @return
*/
protected File getExpectedOutputFile( File dir, String baseName ) {
File expected;
expected = new File( dir, baseName + ".expected.txt" );
if( expected.exists() )
{
return expected;
}
expected = new File( dir, baseName + ".expected.csv" );
if( expected.exists() )
{
return expected;
}
expected = new File( dir, baseName + ".expected.xml" );
if( expected.exists() )
{
return expected;
}
return null;
}
public boolean runTrans(String fileName, LogWriter log) throws KettleException
{
EnvUtil.environmentInit();
Trans trans = null;
/* Load the plugins etc.*/
StepLoader steploader = StepLoader.getInstance();
if (!steploader.read())
{
addFailure("Error loading steps... halting!" + getPath(fileName));
log.logError("BlackBoxTest", "Error loading steps... halting!" + getPath(fileName));
return false;
}
/* Load the plugins etc.*/
JobEntryLoader jeloader = JobEntryLoader.getInstance();
if (!jeloader.read())
{
addFailure("Error loading job entries & plugins... halting!" + getPath(fileName));
log.logError("BlackBoxTest", "Error loading job entries & plugins... halting!" + getPath(fileName));
return false;
}
TransMeta transMeta = new TransMeta();
try
{
transMeta = new TransMeta(fileName);
trans = new Trans(transMeta);
}
catch(Exception e)
{
trans=null;
transMeta=null;
addFailure("Processing has stopped because of an error: " + getPath(fileName));
log.logError("BlackBoxTest", "Processing has stopped because of an error: " + getPath(fileName), e);
return false;
}
if (trans==null)
{
addFailure("Can't continue because the transformation couldn't be loaded." + getPath(fileName));
log.logError("BlackBoxTest", "Can't continue because the transformation couldn't be loaded." + getPath(fileName));
return false;
}
try
{
trans.initializeVariablesFrom(null);
trans.getTransMeta().setInternalKettleVariables(trans);
trans.setSafeModeEnabled(true);
// see if the transformation checks ok
List<CheckResultInterface> remarks = new ArrayList<CheckResultInterface>();
trans.getTransMeta().checkSteps(remarks, false, null);
for( CheckResultInterface remark : remarks ) {
if( remark.getType() == CheckResultInterface.TYPE_RESULT_ERROR ) {
// add this to the log
addFailure("Check error: " + getPath(fileName) + ", "+remark.getErrorCode());
log.logError("BlackBoxTest", "Check error: " + getPath(fileName) + ", "+remark.getErrorCode() );
}
}
// TODO move this code to a separate test
/*
// clone it and convert it back into XML and compare it with the one we started with
// this tests that the clone and the conversion to and from XML are all consistent
TransMeta clone = (TransMeta) trans.getTransMeta().clone();
clone.setName( trans.getTransMeta().getName() );
clone.setModifiedDate( trans.getTransMeta().getModifiedDate() );
String xml = clone.getXML();
String tmpFileName = fileName.substring(0, fileName.length()-4)+"-tmp.ktr";
File tmpFile = new File( tmpFileName );
try {
// document encoding will be important here
OutputStream stream = new FileOutputStream( tmpFile );
stream.write( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n".getBytes() );
stream.write( xml.getBytes("UTF-8") );
stream.close();
// now compare the two transformation XML files
fileCompare( new File(fileName), tmpFile, log );
// if that succeeded we can remove the tmp file
tmpFile.delete();
} catch (Exception e)
{
addFailure("Could not write to tmp file: " + getPath(tmpFileName));
log.logError("BlackBoxTest", "Could not write to tmp file: " + getPath(tmpFileName), e);
}
*/
// allocate & run the required sub-threads
boolean ok = trans.execute(null);
if (!ok)
{
addFailure("Unable to prepare and initialize this transformation: " + getPath(fileName));
log.logError("BlackBoxTest", "Unable to prepare and initialize this transformation: " + getPath(fileName));
return false;
}
trans.waitUntilFinished();
trans.endProcessing("end");
return true;
}
catch(KettleException ke)
{
addFailure("Unexpected error occurred: " + getPath(fileName));
log.logError("BlackBoxTest", "Unexpected error occurred: " + getPath(fileName), ke);
}
return false;
}
public static void main( String args[] ) {
try {
BlackBoxTests test = new BlackBoxTests();
test.setUp();
test.testBlackBox();
test.tearDown();
} catch (Exception e) {
e.printStackTrace();
}
}
} |
package ch.fhnw.jobannotations.utils;
import org.apache.log4j.Logger;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
/**
* This class covers recurring file handling tasks.
*
* @author Hoang Tran <hoang.tran@students.fhnw.ch>
* @author Kevin Kirn <kevin.kirn@students.fhnw.ch>
*/
public class FileUtils {
final static Logger LOG = Logger.getLogger(FileUtils.class);
private FileUtils() {
}
/**
* Gets a stream to a file stored in resources of the project.
*
* @param path where file is located
* @return InputStream to file
*/
public static InputStream getResourceInputStream(String path) {
ClassLoader classLoader = FileUtils.class.getClassLoader();
return classLoader.getResourceAsStream(path);
}
/**
* Gets a stream to a file path.
*
* @param path where file is located
* @return InputStream to file
* @throws IOException if file could not be read
*/
public static InputStream getFileAsInputStream(String path) throws IOException {
return Files.newInputStream(Paths.get(path));
}
/**
* Write new data to train file. Duplicates will be deleted.
*
* @param data to learn
*/
public static void addDataToTrainFile(String filename, String data) {
if(StringUtils.isEmpty(data))
return;
try {
// determine if quarantine is enabled
boolean quarantine = ConfigurationUtil.get("configuration.ml.quarantine").equalsIgnoreCase("true");
if (quarantine) {
filename += ".quarantine";
// make sure quarantine exists
if (!Files.exists(Paths.get(filename))) {
Files.createFile(Paths.get(filename));
}
}
// make sure duplicates get ignored
HashSet<String> newContents = new HashSet<>();
// reader to train or train.quarantine file
BufferedReader br = new BufferedReader(new FileReader(filename));
// read first line to process
String line = br.readLine();
// read content into set
while (line != null) {
newContents.add(line);
line = br.readLine();
}
br.close();
PrintWriter pw = new PrintWriter(filename);
// persist content to file
for (String entry : newContents) {
pw.println(entry);
}
// print new data line
pw.println(data);
pw.flush();
pw.close();
LOG.debug("Saved found data in file " + filename);
} catch (IOException e) {
LOG.error("Something went wrong while learning new data.", e);
}
}
/**
* Gets configuration of external StanfordCoreNLP library
*
* @return Properties read from configuration file or null if reading property file failed
*/
public static Properties getStanfordCoreNLPGermanConfiguration() {
try {
final String STANFORD_CONFIGURATION = ConfigurationUtil.get("external.StanfordCoreNLP.configuration");
BufferedReader reader = new BufferedReader(new InputStreamReader(FileUtils.getFileAsInputStream(STANFORD_CONFIGURATION), "UTF8"));
Properties props = new Properties();
props.load(reader);
return props;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
/**
* Gets a List containing any line the file contains.
*
* @param filename to parse
* @return List of lines or null if reading file failed
*/
public static List<String> getFileContentAsList(String filename) {
if (StringUtils.isEmpty(filename))
throw new IllegalArgumentException("Filename must not be empty!");
try {
// initialize reader to file
BufferedReader r = new BufferedReader(new InputStreamReader(getFileAsInputStream(filename)));
// prepare result list
List<String> result = new ArrayList<>();
String line;
while ((line = r.readLine()) != null) {
result.add(line);
}
return result;
} catch (IOException e) {
LOG.error("Something went wrong while reading the following file: " + filename);
}
return null;
}
} |
package ch.uzh.csg.reimbursement.model;
import static ch.uzh.csg.reimbursement.model.ExpenseState.ASSIGNED_TO_FINANCE_ADMIN;
import static ch.uzh.csg.reimbursement.model.ExpenseState.ASSIGNED_TO_MANAGER;
import static ch.uzh.csg.reimbursement.model.ExpenseState.DRAFT;
import static ch.uzh.csg.reimbursement.model.ExpenseState.PRINTED;
import static ch.uzh.csg.reimbursement.model.ExpenseState.REJECTED;
import static ch.uzh.csg.reimbursement.model.ExpenseState.SIGNED;
import static ch.uzh.csg.reimbursement.model.ExpenseState.TO_BE_ASSIGNED;
import static ch.uzh.csg.reimbursement.model.ExpenseState.TO_SIGN_BY_FINANCE_ADMIN;
import static ch.uzh.csg.reimbursement.model.ExpenseState.TO_SIGN_BY_MANAGER;
import static ch.uzh.csg.reimbursement.model.ExpenseState.TO_SIGN_BY_USER;
import static ch.uzh.csg.reimbursement.model.Role.DEPARTMENT_MANAGER;
import static java.util.UUID.randomUUID;
import static javax.persistence.CascadeType.ALL;
import static javax.persistence.EnumType.STRING;
import static javax.persistence.FetchType.EAGER;
import static javax.persistence.GenerationType.IDENTITY;
import java.io.IOException;
import java.util.Date;
import java.util.Set;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.Table;
import javax.persistence.Transient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import com.fasterxml.jackson.annotation.JsonIdentityInfo;
import com.fasterxml.jackson.annotation.JsonView;
import com.fasterxml.jackson.annotation.ObjectIdGenerators;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import ch.uzh.csg.reimbursement.model.exception.ServiceException;
import ch.uzh.csg.reimbursement.model.exception.UnexpectedStateException;
import ch.uzh.csg.reimbursement.serializer.UserSerializer;
import ch.uzh.csg.reimbursement.view.View;
import lombok.Getter;
import lombok.Setter;
@Entity
@Table(name = "Expense_")
@Transactional
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "uid")
public class Expense {
@Transient
private final Logger LOG = LoggerFactory.getLogger(Expense.class);
@Id
@GeneratedValue(strategy = IDENTITY)
private int id;
@JsonView(View.SummaryWithUid.class)
@Getter
@Column(nullable = false, updatable = true, unique = true, name = "uid")
private String uid;
@JsonView(View.DashboardSummary.class)
@JsonSerialize(using = UserSerializer.class)
@Getter
@ManyToOne
@JoinColumn(name = "user_id")
private User user;
@JsonView(View.DashboardSummary.class)
public String getUserUid() {
User user = getUser();
if(user == null) {
return null;
}
return user.getUid();
}
@JsonView(View.DashboardSummary.class)
@Getter
@Column(nullable = false, updatable = true, unique = false, name = "date")
private Date date;
@JsonView(View.DashboardSummary.class)
@Getter
@Enumerated(STRING)
@Column(nullable = true, updatable = true, unique = false, name = "state")
private ExpenseState state;
@JsonView(View.Summary.class)
@JsonSerialize(using = UserSerializer.class)
@Getter
@Setter
@ManyToOne
@JoinColumn(name = "finance_admin_id")
private User financeAdmin;
@JsonView(View.DashboardSummary.class)
public String getFinanceAdminUid() {
User financeAdmin = getFinanceAdmin();
if(financeAdmin == null) {
return null;
}
return financeAdmin.getUid();
}
@JsonView(View.Summary.class)
@JsonSerialize(using = UserSerializer.class)
@Getter
@Setter
@ManyToOne
@JoinColumn(name = "assigned_manager_id")
private User assignedManager;
@JsonView(View.DashboardSummary.class)
public String getAssignedManagerUid() {
User assignedManager = getAssignedManager();
if(assignedManager == null) {
return null;
}
return assignedManager.getUid();
}
@JsonView(View.DashboardSummary.class)
@Getter
@Column(nullable = false, updatable = true, unique = false, name = "accounting")
private String accounting;
@JsonView(View.DashboardSummary.class)
@Column(nullable = true, updatable = true, unique = false, name = "total_amount")
private Double totalAmount = 0.0;
@JsonView(View.Summary.class)
@Getter
@Column(nullable = true, updatable = true, unique = false, name = "comment")
private String rejectComment;
@Getter
@OneToMany(mappedBy = "expense", fetch = EAGER, orphanRemoval = true)
private Set<ExpenseItem> expenseItems;
@OneToOne(cascade = ALL, orphanRemoval = true)
@JoinColumn(name = "document_id")
private Document expensePdf;
@JsonView(View.Summary.class)
@Getter
@Setter
@Column(nullable = false, updatable = true, columnDefinition = "boolean default true", name = "has_digital_signature")
private Boolean hasDigitalSignature = true;
public Expense(User user, User financeAdmin, String accounting) {
this.user = user;
this.date = new Date();
setState(DRAFT);
setFinanceAdmin(financeAdmin);
setAccounting(accounting);
this.uid = randomUUID().toString();
LOG.debug("Expense constructor: Expense created");
}
public void updateExpense() {
this.date = new Date();
setTotalAmount();
LOG.debug("Expense update method: Expense updated");
}
public Double getTotalAmount() {
Double totalAmount = 0.0;
for (ExpenseItem item : getExpenseItems()) {
totalAmount += item.getCalculatedAmount();
}
this.totalAmount = totalAmount;
return totalAmount;
}
public void setTotalAmount() {
Double totalAmount = 0.0;
if (getExpenseItems() != null) {
for (ExpenseItem item : getExpenseItems()) {
totalAmount += item.getCalculatedAmount();
}
}
this.totalAmount = totalAmount;
LOG.debug("Expense setTotalAmount method: Total amount set");
}
public Document setPdf(MultipartFile multipartFile) {
byte[] content = null;
try {
content = multipartFile.getBytes();
expensePdf.updateDocument(multipartFile.getContentType(), multipartFile.getSize(), content);
LOG.debug("The expensePdf has been updated with a signedPdf");
goToNextState();
} catch (IOException e) {
LOG.error("An IOException has been caught while creating a signature.", e);
throw new ServiceException();
}
return expensePdf;
}
public void setPdf(Document document) {
expensePdf = document;
if (!this.hasDigitalSignature) {
goToNextState();
}
LOG.debug("The expensePdf has been updated with a generatedPdf");
}
public Document getExpensePdf() {
return expensePdf;
}
public void goToNextState() {
if (state.equals(DRAFT) || state.equals(REJECTED)) {
if(this.assignedManager.getRoles().contains(DEPARTMENT_MANAGER)) {
setState(TO_BE_ASSIGNED);
} else {
setState(ASSIGNED_TO_MANAGER);
}
} else if (state.equals(ASSIGNED_TO_MANAGER)) {
setState(TO_BE_ASSIGNED);
} else if (state.equals(TO_BE_ASSIGNED)) {
setState(ASSIGNED_TO_FINANCE_ADMIN);
} else if (state.equals(ASSIGNED_TO_FINANCE_ADMIN)) {
setState(TO_SIGN_BY_USER);
} else if (state.equals(TO_SIGN_BY_USER)) {
setState(TO_SIGN_BY_MANAGER);
} else if (state.equals(TO_SIGN_BY_MANAGER)) {
setState(TO_SIGN_BY_FINANCE_ADMIN);
} else if (state.equals(TO_SIGN_BY_FINANCE_ADMIN)) {
setState(SIGNED);
} else if (state.equals(SIGNED)) {
setState(PRINTED);
} else if (state.equals(PRINTED)) {
setState(ASSIGNED_TO_FINANCE_ADMIN);
} else {
LOG.error("Unexpected State");
throw new UnexpectedStateException();
}
LOG.debug("Expense goToNextState method: State set to state: " + this.state);
}
public void setAccounting(String accounting) {
this.accounting = accounting;
updateExpense();
}
public void reject(String comment) {
setState(REJECTED);
rejectComment = comment;
LOG.debug("Expense reject method: Expense rejected");
}
private void setState(ExpenseState state) {
this.state = state;
updateExpense();
}
/*
* The default constructor is needed by Hibernate, but should not be used at
* all.
*/
protected Expense() {
}
} |
package chanjarster.weixin.api;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import chanjarster.weixin.bean.WxXmlMessage;
/**
* <pre>
* handler
*
*
* 1.
* 2. {@link Rule#next()}
* 3. {@link Rule#end()}{@link Rule#next()}
*
*
* WxMessageRouter router = new WxMessageRouter();
* router
* .rule()
* .msgType("MSG_TYPE").event("EVENT").eventKey("EVENT_KEY").content("CONTENT")
* .interceptor(interceptor, ...).handler(handler, ...)
* .end()
* .rule()
* //
* .end()
* ;
*
* // WxXmlMessage
* router.route(message);
*
* </pre>
* @author qianjia
*
*/
public class WxMessageRouter {
private List<Rule> rules = new ArrayList<Rule>();
/**
* Route
* @return
*/
public Rule rule() {
return new Rule(this);
}
/**
*
* @param wxMessage
*/
public void route(WxXmlMessage wxMessage) {
for (Rule rule : rules) {
if (rule.test(wxMessage)) {
rule.service(wxMessage);
if(!rule.reEnter) {
break;
}
}
}
}
public static class Rule {
private final WxMessageRouter routerBuilder;
private String msgType;
private String event;
private String eventKey;
private String content;
private boolean reEnter = false;
private List<WxMessageHandler> handlers = new ArrayList<WxMessageHandler>();
private List<WxMessageInterceptor> interceptors = new ArrayList<WxMessageInterceptor>();
protected Rule(WxMessageRouter routerBuilder) {
this.routerBuilder = routerBuilder;
}
/**
* msgType
* @param msgType
* @return
*/
public Rule msgType(String msgType) {
this.msgType = msgType;
return this;
}
/**
* event
* @param event
* @return
*/
public Rule event(String event) {
this.event = event;
return this;
}
/**
* eventKey
* @param eventKey
* @return
*/
public Rule eventKey(String eventKey) {
this.eventKey = eventKey;
return this;
}
/**
* content
* @param content
* @return
*/
public Rule content(String content) {
this.content = content;
return this;
}
/**
*
* @param interceptor
* @return
*/
public Rule interceptor(WxMessageInterceptor interceptor) {
return interceptor(interceptor, (WxMessageInterceptor[]) null);
}
/**
*
* @param interceptor
* @param otherInterceptors
* @return
*/
public Rule interceptor(WxMessageInterceptor interceptor, WxMessageInterceptor... otherInterceptors) {
this.interceptors.add(interceptor);
if (otherInterceptors != null && otherInterceptors.length > 0) {
for (WxMessageInterceptor i : otherInterceptors) {
this.interceptors.add(i);
}
}
return this;
}
/**
*
* @param handler
* @return
*/
public Rule handler(WxMessageHandler handler) {
return handler(handler, (WxMessageHandler[]) null);
}
/**
*
* @param handler
* @param otherHandlers
* @return
*/
public Rule handler(WxMessageHandler handler, WxMessageHandler... otherHandlers) {
this.handlers.add(handler);
if (otherHandlers != null && otherHandlers.length > 0) {
for (WxMessageHandler i : otherHandlers) {
this.handlers.add(i);
}
}
return this;
}
/**
*
* @return
*/
public WxMessageRouter end() {
this.routerBuilder.rules.add(this);
return this.routerBuilder;
}
/**
*
* @return
*/
public WxMessageRouter next() {
this.reEnter = true;
return end();
}
protected boolean test(WxXmlMessage wxMessage) {
return
(this.msgType == null || this.msgType.equals(wxMessage.getMsgType()))
&&
(this.event == null || this.event.equals(wxMessage.getEvent()))
&&
(this.eventKey == null || this.eventKey.equals(wxMessage.getEventKey()))
&&
(this.content == null || this.content.equals(wxMessage.getContent() == null ? null : wxMessage.getContent().trim()))
;
}
/**
*
* @param wxMessage
* @return true routerfalse router
*/
protected void service(WxXmlMessage wxMessage) {
Map<String, Object> context = new HashMap<String, Object>();
for (WxMessageInterceptor interceptor : this.interceptors) {
if (!interceptor.intercept(wxMessage, context)) {
return;
}
}
// handler
for (WxMessageHandler interceptor : this.handlers) {
interceptor.handle(wxMessage, context);
}
return;
}
}
} |
package com.akiban.ais.protobuf;
import com.akiban.ais.model.AkibanInformationSchema;
import com.akiban.ais.model.CharsetAndCollation;
import com.akiban.ais.model.Column;
import com.akiban.ais.model.DefaultNameGenerator;
import com.akiban.ais.model.Group;
import com.akiban.ais.model.GroupIndex;
import com.akiban.ais.model.GroupTable;
import com.akiban.ais.model.Index;
import com.akiban.ais.model.IndexColumn;
import com.akiban.ais.model.Join;
import com.akiban.ais.model.JoinColumn;
import com.akiban.ais.model.NameGenerator;
import com.akiban.ais.model.Table;
import com.akiban.ais.model.TableIndex;
import com.akiban.ais.model.TableName;
import com.akiban.ais.model.Type;
import com.akiban.ais.model.UserTable;
import com.akiban.server.error.ProtobufReadException;
import com.akiban.util.GrowableByteBuffer;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.CodedInputStream;
import com.google.protobuf.Descriptors;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class ProtobufReader {
private final AkibanInformationSchema destAIS;
private final AISProtobuf.AkibanInformationSchema.Builder pbAISBuilder = AISProtobuf.AkibanInformationSchema.newBuilder();
private final NameGenerator nameGenerator = new DefaultNameGenerator();
public ProtobufReader() {
this(new AkibanInformationSchema());
}
public ProtobufReader(AkibanInformationSchema destAIS) {
this.destAIS = destAIS;
}
public AkibanInformationSchema getAIS() {
return destAIS;
}
public ProtobufReader loadAIS() {
// AIS has two fields (types, schemas) and both are optional
AISProtobuf.AkibanInformationSchema pbAIS = pbAISBuilder.clone().build();
loadTypes(pbAIS.getTypesList());
loadSchemas(pbAIS.getSchemasList());
return this;
}
public ProtobufReader loadBuffer(GrowableByteBuffer buffer) {
loadFromBuffer(buffer);
return this;
}
public AkibanInformationSchema loadAndGetAIS(GrowableByteBuffer buffer) {
loadBuffer(buffer);
loadAIS();
return getAIS();
}
private void loadFromBuffer(GrowableByteBuffer buffer) {
checkBuffer(buffer);
final int serializedSize = buffer.getInt();
final int initialPos = buffer.position();
final int bufferSize = buffer.limit() - initialPos;
CodedInputStream codedInput = CodedInputStream.newInstance(buffer.array(), buffer.position(), Math.min(serializedSize, bufferSize));
try {
pbAISBuilder.mergeFrom(codedInput);
// Successfully consumed, update byte buffer
buffer.position(initialPos + serializedSize);
} catch(IOException e) {
throw new ProtobufReadException(
AISProtobuf.AkibanInformationSchema.getDescriptor().getFullName(),
String.format("Required size exceeded actual size: %d vs %d", serializedSize, bufferSize)
);
}
}
private void loadTypes(Collection<AISProtobuf.Type> pbTypes) {
for(AISProtobuf.Type pbType : pbTypes) {
hasRequiredFields(pbType);
Type.create(
destAIS,
pbType.getTypeName(),
pbType.getParameters(),
pbType.getFixedSize(),
pbType.getMaxSizeBytes(),
null,
null
);
}
}
private void loadSchemas(Collection<AISProtobuf.Schema> pbSchemas) {
List<List<NewGroupInfo>> allNewGroups = new ArrayList<List<NewGroupInfo>>();
for(AISProtobuf.Schema pbSchema : pbSchemas) {
hasRequiredFields(pbSchema);
List<NewGroupInfo> newGroups = loadGroups(pbSchema.getSchemaName(), pbSchema.getGroupsList());
allNewGroups.add(newGroups);
// Requires no tables, does not load indexes
loadTables(pbSchema.getSchemaName(), pbSchema.getTablesList());
}
// Assume no ordering of schemas or tables, load joins second
for(AISProtobuf.Schema pbSchema : pbSchemas) {
loadTableJoins(pbSchema.getSchemaName(), pbSchema.getTablesList());
}
// Hook up groups, create group tables and indexes after all in place
for(List<NewGroupInfo> newGroups : allNewGroups) {
createGroupTablesAndIndexes(newGroups);
}
}
private List<NewGroupInfo> loadGroups(String schema, Collection<AISProtobuf.Group> pbGroups) {
List<NewGroupInfo> newGroups = new ArrayList<NewGroupInfo>();
for(AISProtobuf.Group pbGroup : pbGroups) {
hasRequiredFields(pbGroup);
String rootTableName = pbGroup.getRootTableName();
Group group = Group.create(destAIS, nameGenerator.generateGroupName(rootTableName));
String treeName = pbGroup.hasTreeName() ? pbGroup.getTreeName() : null;
newGroups.add(new NewGroupInfo(schema, group, pbGroup, treeName));
}
return newGroups;
}
private void createGroupTablesAndIndexes(List<NewGroupInfo> newGroups) {
Set<Integer> currentIDs = new HashSet<Integer>();
// Cannot assert ID uniqueness here, no such restriction from proto (e.g. from adapter)
for(Table table : destAIS.getUserTables().values()) {
currentIDs.add(table.getTableId());
}
for(Table table : destAIS.getGroupTables().values()) {
currentIDs.add((table.getTableId()));
}
List<Join> joinsNeedingGroup = new ArrayList<Join>();
for(NewGroupInfo newGroupInfo : newGroups) {
String rootTableName = newGroupInfo.pbGroup.getRootTableName();
UserTable rootUserTable = destAIS.getUserTable(newGroupInfo.schema, rootTableName);
rootUserTable.setTreeName(newGroupInfo.pbGroup.getTreeName());
rootUserTable.setGroup(newGroupInfo.group);
joinsNeedingGroup.addAll(rootUserTable.getCandidateChildJoins());
GroupTable groupTable = GroupTable.create(
destAIS,
newGroupInfo.schema,
nameGenerator.generateGroupTableName(rootTableName),
computeNewTableID(currentIDs, rootUserTable.getTableId() + 1)
);
newGroupInfo.group.setGroupTable(groupTable);
groupTable.setGroup(newGroupInfo.group);
groupTable.setTreeName(newGroupInfo.treeName);
rootUserTable.setTreeName(newGroupInfo.treeName);
}
for(int i = 0; i < joinsNeedingGroup.size(); ++i) {
Join join = joinsNeedingGroup.get(i);
Group group = join.getParent().getGroup();
join.setGroup(group);
join.getChild().setGroup(group);
join.getChild().setTreeName(join.getParent().getTreeName());
joinsNeedingGroup.addAll(join.getChild().getCandidateChildJoins());
}
// Final pass (GI creation requires everything else be completed)
for(NewGroupInfo newGroupInfo : newGroups) {
loadGroupIndexes(newGroupInfo.group, newGroupInfo.pbGroup.getIndexesList());
}
}
private void loadTables(String schema, Collection<AISProtobuf.Table> pbTables) {
int generatedId = 1;
for(AISProtobuf.Table pbTable : pbTables) {
hasRequiredFields(pbTable);
UserTable userTable = UserTable.create(
destAIS,
schema,
pbTable.getTableName(),
pbTable.hasTableId() ? pbTable.getTableId() : generatedId++
);
userTable.setCharsetAndCollation(getCharColl(pbTable.hasCharColl(), pbTable.getCharColl()));
if(pbTable.hasVersion()) {
userTable.setVersion(pbTable.getVersion());
}
loadColumns(userTable, pbTable.getColumnsList());
loadTableIndexes(userTable, pbTable.getIndexesList());
}
}
private void loadTableJoins(String schema, Collection<AISProtobuf.Table> pbTables) {
for(AISProtobuf.Table pbTable : pbTables) {
if(pbTable.hasParentTable()) {
AISProtobuf.Join pbJoin = pbTable.getParentTable();
hasRequiredFields(pbJoin);
AISProtobuf.TableName pbParentName = pbJoin.getParentTable();
hasRequiredFields(pbParentName);
UserTable childTable = destAIS.getUserTable(schema, pbTable.getTableName());
UserTable parentTable = destAIS.getUserTable(pbParentName.getSchemaName(), pbParentName.getTableName());
if(parentTable == null) {
throw new ProtobufReadException(
pbTable.getDescriptorForType().getFullName(),
String.format("%s has unknown parentTable %s.%s", childTable.getName(),
pbParentName.getSchemaName(), pbParentName.getTableName())
);
}
String joinName = parentTable.getName() + "/" + childTable.getName();
Join join = Join.create(destAIS, joinName, parentTable, childTable);
for(AISProtobuf.JoinColumn pbJoinColumn : pbJoin.getColumnsList()) {
hasRequiredFields(pbJoinColumn);
JoinColumn.create(
join,
parentTable.getColumn(pbJoinColumn.getParentColumn()),
childTable.getColumn(pbJoinColumn.getChildColumn())
);
}
}
}
}
private void loadColumns(UserTable userTable, Collection<AISProtobuf.Column> pbColumns) {
for(AISProtobuf.Column pbColumn : pbColumns) {
hasRequiredFields(pbColumn);
Column.create(
userTable,
pbColumn.getColumnName(),
pbColumn.getPosition(),
destAIS.getType(pbColumn.getTypeName()),
pbColumn.getIsNullable(),
pbColumn.hasTypeParam1() ? pbColumn.getTypeParam1() : null,
pbColumn.hasTypeParam2() ? pbColumn.getTypeParam2() : null,
pbColumn.hasInitAutoInc() ? pbColumn.getInitAutoInc() : null,
getCharColl(pbColumn.hasCharColl(), pbColumn.getCharColl())
);
}
}
private void loadTableIndexes(UserTable userTable, Collection<AISProtobuf.Index> pbIndexes) {
for(AISProtobuf.Index pbIndex : pbIndexes) {
hasRequiredFields(pbIndex);
TableIndex tableIndex = TableIndex.create(
destAIS,
userTable,
pbIndex.getIndexName(),
pbIndex.getIndexId(),
pbIndex.getIsUnique(),
getIndexConstraint(pbIndex)
);
if(pbIndex.hasTreeName()) {
tableIndex.setTreeName(pbIndex.getTreeName());
}
loadIndexColumns(userTable, tableIndex, pbIndex.getColumnsList());
}
}
private void loadGroupIndexes(Group group, Collection<AISProtobuf.Index> pbIndexes) {
for(AISProtobuf.Index pbIndex : pbIndexes) {
hasRequiredFieldsGI(pbIndex);
GroupIndex groupIndex = GroupIndex.create(
destAIS,
group,
pbIndex.getIndexName(),
pbIndex.getIndexId(),
pbIndex.getIsUnique(),
getIndexConstraint(pbIndex),
convertJoinTypeOrNull(pbIndex.hasJoinType(), pbIndex.getJoinType())
);
if(pbIndex.hasTreeName()) {
groupIndex.setTreeName(pbIndex.getTreeName());
}
loadIndexColumns(null, groupIndex, pbIndex.getColumnsList());
}
}
private void loadIndexColumns(UserTable table, Index index, Collection<AISProtobuf.IndexColumn> pbIndexColumns) {
for(AISProtobuf.IndexColumn pbIndexColumn : pbIndexColumns) {
hasRequiredFields(pbIndexColumn);
if(pbIndexColumn.hasTableName()) {
hasRequiredFields(pbIndexColumn.getTableName());
table = destAIS.getUserTable(convertTableNameOrNull(true, pbIndexColumn.getTableName()));
}
IndexColumn.create(
index,
table != null ? table.getColumn(pbIndexColumn.getColumnName()) : null,
pbIndexColumn.getPosition(),
pbIndexColumn.getIsAscending(),
null /* indexedLength not in proto */
);
}
}
private static String getIndexConstraint(AISProtobuf.Index pbIndex) {
if(pbIndex.getIsPK()) {
return Index.PRIMARY_KEY_CONSTRAINT;
}
if(pbIndex.getIsAkFK()) {
return Index.FOREIGN_KEY_CONSTRAINT;
}
if(pbIndex.getIsUnique()) {
return Index.UNIQUE_KEY_CONSTRAINT;
}
return Index.KEY_CONSTRAINT;
}
private static CharsetAndCollation getCharColl(boolean isValid, AISProtobuf.CharCollation pbCharAndCol) {
if(isValid) {
hasRequiredFields(pbCharAndCol);
return CharsetAndCollation.intern(pbCharAndCol.getCharacterSetName(),
pbCharAndCol.getCollationOrderName());
}
return null;
}
private static Index.JoinType convertJoinTypeOrNull(boolean isValid, AISProtobuf.JoinType joinType) {
if(isValid) {
switch(joinType) {
case LEFT_OUTER_JOIN: return Index.JoinType.LEFT;
case RIGHT_OUTER_JOIN: return Index.JoinType.RIGHT;
}
throw new ProtobufReadException(AISProtobuf.JoinType.getDescriptor().getFullName(),
"Unsupported join type: " + joinType.name());
}
return null;
}
private static TableName convertTableNameOrNull(boolean isValid, AISProtobuf.TableName tableName) {
if(isValid) {
hasRequiredFields(tableName);
return new TableName(tableName.getSchemaName(), tableName.getTableName());
}
return null;
}
private static int computeNewTableID(Set<Integer> currentIDs, int starting) {
while(!currentIDs.add(starting)) {
++starting;
}
return starting;
}
/**
* Check that a given message instance has all (application) required fields.
* By default, this is all declared fields. See overloads for specific types.
* @param message Message to check
*/
private static void hasRequiredFields(AbstractMessage message) {
requireAllFieldsExcept(message);
}
private static void hasRequiredFields(AISProtobuf.Group pbGroup) {
requireAllFieldsExcept(
pbGroup,
AISProtobuf.Group.TREENAME_FIELD_NUMBER,
AISProtobuf.Group.INDEXES_FIELD_NUMBER
);
}
private static void hasRequiredFields(AISProtobuf.Schema pbSchema) {
requireAllFieldsExcept(
pbSchema,
AISProtobuf.Schema.TABLES_FIELD_NUMBER,
AISProtobuf.Schema.GROUPS_FIELD_NUMBER,
AISProtobuf.Schema.CHARCOLL_FIELD_NUMBER
);
}
private static void hasRequiredFields(AISProtobuf.Table pbTable) {
requireAllFieldsExcept(
pbTable,
AISProtobuf.Table.TABLEID_FIELD_NUMBER,
AISProtobuf.Table.ORDINAL_FIELD_NUMBER,
AISProtobuf.Table.CHARCOLL_FIELD_NUMBER,
AISProtobuf.Table.INDEXES_FIELD_NUMBER,
AISProtobuf.Table.PARENTTABLE_FIELD_NUMBER,
AISProtobuf.Table.DESCRIPTION_FIELD_NUMBER,
AISProtobuf.Table.PROTECTED_FIELD_NUMBER,
AISProtobuf.Table.VERSION_FIELD_NUMBER
);
}
private static void hasRequiredFields(AISProtobuf.Column pbColumn) {
requireAllFieldsExcept(
pbColumn,
AISProtobuf.Column.TYPEPARAM1_FIELD_NUMBER,
AISProtobuf.Column.TYPEPARAM2_FIELD_NUMBER,
AISProtobuf.Column.INITAUTOINC_FIELD_NUMBER,
AISProtobuf.Column.CHARCOLL_FIELD_NUMBER,
AISProtobuf.Column.DESCRIPTION_FIELD_NUMBER
);
}
private static void hasRequiredFields(AISProtobuf.Index pbIndex) {
requireAllFieldsExcept(
pbIndex,
AISProtobuf.Index.TREENAME_FIELD_NUMBER,
AISProtobuf.Index.DESCRIPTION_FIELD_NUMBER,
AISProtobuf.Index.JOINTYPE_FIELD_NUMBER
);
}
private static void hasRequiredFieldsGI(AISProtobuf.Index pbIndex) {
requireAllFieldsExcept(
pbIndex,
AISProtobuf.Index.TREENAME_FIELD_NUMBER,
AISProtobuf.Index.DESCRIPTION_FIELD_NUMBER
);
}
private static void hasRequiredFields(AISProtobuf.IndexColumn pbIndexColumn) {
requireAllFieldsExcept(
pbIndexColumn,
AISProtobuf.IndexColumn.TABLENAME_FIELD_NUMBER
);
}
private static void requireAllFieldsExcept(AbstractMessage message, int... fieldNumbersNotRequired) {
Collection<Descriptors.FieldDescriptor> required = new ArrayList<Descriptors.FieldDescriptor>(message.getDescriptorForType().getFields());
Collection<Descriptors.FieldDescriptor> actual = message.getAllFields().keySet();
required.removeAll(actual);
if(fieldNumbersNotRequired != null) {
for(int fieldNumber : fieldNumbersNotRequired) {
required.remove(message.getDescriptorForType().findFieldByNumber(fieldNumber));
}
}
if(!required.isEmpty()) {
Collection<String> names = new ArrayList<String>(required.size());
for(Descriptors.FieldDescriptor desc : required) {
names.add(desc.getName());
}
throw new ProtobufReadException(message.getDescriptorForType().getFullName(),
"Missing required fields: " + names.toString());
}
}
private static void checkBuffer(GrowableByteBuffer buffer) {
assert buffer != null;
assert buffer.hasArray() : "Array backed buffer required: " + buffer;
}
private static class NewGroupInfo {
final String schema;
final Group group;
final AISProtobuf.Group pbGroup;
final String treeName;
public NewGroupInfo(String schema, Group group, AISProtobuf.Group pbGroup, String treeName) {
this.schema = schema;
this.group = group;
this.pbGroup = pbGroup;
this.treeName = treeName;
}
}
} |
package com.altran.iot.observation;
import com.jayway.jsonpath.Configuration;
import com.jayway.jsonpath.JsonPath;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.text.SimpleDateFormat;
import java.util.*;
public class Observation {
private String radioGatewayId;
private String radioGatewayName;
private String radioGatewayDescription;
private String radioSensorId;
private String radioSensorName;
private String radioSensorDescription;
private String timestampCreated;
private String timestampReceived;
private Map<String, String> measurements;
private static SimpleDateFormat dateParser = new SimpleDateFormat("yyyy-MM-dd HH:mm");
private String luceneJson;
private static final Logger logger = LoggerFactory.getLogger(Observation.class);
public String getRadioGatewayId() {
return radioGatewayId;
}
public void setRadioGatewayId(String radioGatewayId) {
this.radioGatewayId = radioGatewayId;
}
public String getRadioGatewayName() {
return radioGatewayName;
}
public void setRadioGatewayName(String radioGatewayName) {
this.radioGatewayName = radioGatewayName;
}
public String getRadioGatewayDescription() {
return radioGatewayDescription;
}
public void setRadioGatewayDescription(String radioGatewayDescription) {
this.radioGatewayDescription = radioGatewayDescription;
}
public String getRadioSensorId() {
return radioSensorId;
}
public void setRadioSensorId(String radioSensorId) {
this.radioSensorId = radioSensorId;
}
public String getRadioSensorName() {
return radioSensorName;
}
public void setRadioSensorName(String radioSensorName) {
this.radioSensorName = radioSensorName;
}
public String getRadioSensorDescription() {
return radioSensorDescription;
}
public void setRadioSensorDescription(String radioSensorDescription) {
this.radioSensorDescription = radioSensorDescription;
}
public String getTimestampCreated() {
return timestampCreated;
}
public void setTimestampCreated(String timestampCreated) {
this.timestampCreated = timestampCreated;
}
public String getTimestampReceived() {
return timestampReceived;
}
public void setTimestampReceived(String timestampReceived) {
this.timestampReceived = timestampReceived;
}
public Map<String, String> getMeasurements() {
return measurements;
}
public void setMeasurements(Map<String, String> measurements) {
this.measurements = measurements;
}
private Observation() {
}
public static Observation fromD7dataTemplate(String s) {
Observation o = new Observation();
o.radioGatewayId = "001BC50C7100001E";
o.radioGatewayName = "001BC50C7100001E";
o.radioGatewayDescription = "001BC50C7100001E";
o.radioSensorId = "001BC50C7100001E";
o.radioSensorDescription = "001BC50C7100001E";
o.radioSensorName = "001BC50C7100001E";
o.timestampCreated = "1412099476264.7";
o.timestampReceived = "1412099476264.7";
Map<String, String> measurementsReveived = new HashMap<>();
measurementsReveived.put("SensorId1", "value1");
measurementsReveived.put("SensorId2", "value2");
measurementsReveived.put("SensorId3", "value3");
measurementsReveived.put("SensorId4", "value4");
measurementsReveived.put("SensorId5", "value5");
measurementsReveived.put("SensorId6", "value6");
o.measurements = measurementsReveived;
return o;
}
public static List<Observation> fromD7Data(String inputData) {
List<Observation> robservations = new ArrayList<Observation>();
Object document = Configuration.defaultConfiguration().jsonProvider().parse(inputData);
Map observations = (Map) JsonPath.read(document, "$.data");
for (Object key : observations.keySet()) {
logger.trace("\n\nRadioSensor = " + key);
Observation o = new Observation();
o.timestampReceived = getStringDate(getString("ts", inputData));
logger.trace("Entry - timestampReceived:{}", o.timestampReceived);
o.timestampCreated = getStringDate(getString("ts", inputData));
logger.trace("Entry - timestampCreated:{}", o.timestampCreated);
o.radioSensorId = key.toString();
logger.trace("Entry - radioSensorId:{}", o.radioSensorId);
o.setRadioGatewayId(inputData.substring(inputData.lastIndexOf("}") + 1));
// System.out.println("Sensorvalues = " + observations.get(key));
Map sensorvalues = (Map) observations.get(key);
Map<String, String> measurementsReceived = new HashMap<>();
for (Object sensortype : sensorvalues.keySet()) {
logger.trace("SensorType =" + sensortype);
logger.trace(" SensorReading =" + sensorvalues.get(sensortype));
if ("ts".equalsIgnoreCase(sensortype.toString())) {
o.timestampCreated = getStringDate(sensorvalues.get(sensortype).toString());
measurementsReceived.put(sensortype.toString(), getStringDate(sensorvalues.get(sensortype).toString()));
} else if ("btn1".equalsIgnoreCase(sensortype.toString())) {
measurementsReceived.put(sensortype.toString(), getStringDate(sensorvalues.get(sensortype).toString()));
} else if ("btn2".equalsIgnoreCase(sensortype.toString())) {
measurementsReceived.put(sensortype.toString(), getStringDate(sensorvalues.get(sensortype).toString()));
} else {
measurementsReceived.put(sensortype.toString(), sensorvalues.get(sensortype).toString());
}
}
o.setMeasurements(measurementsReceived);
robservations.add(o);
}
//Observation observation = Observation.fromD7data(inputData);
return robservations;
}
private static String getString(String key, String inputData) {
Object document = Configuration.defaultConfiguration().jsonProvider().parse(inputData);
try {
Double v = (Double) JsonPath.read(document, "$." + key);
return Double.toString(v);
} catch (ClassCastException cce) {
Long v = (Long) JsonPath.read(document, "$." + key);
return Long.toString(v);
}
}
public static Observation fromLucene(String radioGatewayId, String radioSensorId, String jsondata) {
Observation o = new Observation();
o.setRadioGatewayId(radioGatewayId);
o.setRadioSensorId(radioSensorId);
logger.trace("Entry - fromLucene:{}", jsondata);
Object document = Configuration.defaultConfiguration().jsonProvider().parse(jsondata);
if (o.getRadioSensorId() == null || o.getRadioSensorId().length() < 4) {
o.setRadioSensorId((String) JsonPath.read(document, "$.observation.RadioSensorId"));
}
if (o.getRadioGatewayId() == null || o.getRadioGatewayId().length() < 4) {
o.setRadioGatewayId((String) JsonPath.read(document, "$.observation.RadioGatewayId"));
}
o.timestampReceived = getStringDate(JsonPath.read(document, "$.observation.TimestampReceived"));
o.timestampCreated = getStringDate(JsonPath.read(document, "$.observation.TimestampCreated"));
o.measurements = JsonPath.read(document, "$.observation.Measurements");
o.luceneJson = jsondata;
return o;
}
@Override
public String toString() {
return "Observation{" +
"radioGatewayId='" + radioGatewayId + '\'' +
", radioGatewayName='" + radioGatewayName + '\'' +
", radioGatewayDescription='" + radioGatewayDescription + '\'' +
", radioSensorId='" + radioSensorId + '\'' +
", radioSensorName='" + radioSensorName + '\'' +
", radioSensorDescription='" + radioSensorDescription + '\'' +
", timestampCreated='" + timestampCreated + '\'' +
", timestampReceived='" + timestampReceived + '\'' +
", measurements=" + measurements +
'}';
}
public String toJsonString() {
return "{\n" +
" \"observation\":{ \n" +
" \"RadioGatewayId\":\"" + radioGatewayId + "\",\n" +
" \"RadioGatewayName\":\"" + radioGatewayName + "\",\n" +
" \"RadioGatewayDescription\":\"" + radioGatewayDescription + "\",\n" +
" \"RadioSensorId\":\"" + radioSensorId + "\",\n" +
" \"RadioSensorName\":\"" + radioSensorName + "\",\n" +
" \"RadioSensorDescription\":\"" + radioSensorDescription + "\",\n" +
" \"TimestampCreated\":\"" + timestampCreated + "\",\n" +
" \"TimestampReceived\":\"" + timestampReceived + "\",\n" +
" \"Measurements\": \n" +
" " + JSONValue.toJSONString(measurements) +
" " +
" }\n" +
"}";
}
/**
* could probably be written more elegant...
*
* @param timestampstring
* @return
*/
public static String getStringDate(Object timestampstring) {
try {
Double d1 = Double.parseDouble(timestampstring.toString()) / 1000;
Date date = new Date((long) d1.intValue() * 1000L);
return dateParser.format(date);
} catch (NumberFormatException ne) {
// Already converted
return timestampstring.toString();
}
}
} |
package com.avairebot.orion.audio;
import com.avairebot.orion.factories.MessageFactory;
import com.sedmelluq.discord.lavaplayer.player.AudioLoadResultHandler;
import com.sedmelluq.discord.lavaplayer.player.AudioPlayerManager;
import com.sedmelluq.discord.lavaplayer.player.DefaultAudioPlayerManager;
import com.sedmelluq.discord.lavaplayer.source.AudioSourceManagers;
import com.sedmelluq.discord.lavaplayer.tools.FriendlyException;
import com.sedmelluq.discord.lavaplayer.track.AudioPlaylist;
import com.sedmelluq.discord.lavaplayer.track.AudioTrack;
import net.dv8tion.jda.core.entities.Guild;
import net.dv8tion.jda.core.entities.Message;
import net.dv8tion.jda.core.entities.VoiceChannel;
import net.dv8tion.jda.core.managers.AudioManager;
import java.util.HashMap;
import java.util.Map;
public class AudioHandler {
private static final AudioPlayerManager AUDIO_PLAYER_MANAGER;
private static final Map<Long, GuildMusicManager> MUSIC_MANAGER;
static {
MUSIC_MANAGER = new HashMap<>();
AUDIO_PLAYER_MANAGER = new DefaultAudioPlayerManager();
AudioSourceManagers.registerRemoteSources(AUDIO_PLAYER_MANAGER);
AudioSourceManagers.registerLocalSource(AUDIO_PLAYER_MANAGER);
}
public static void loadAndPlay(final Message message, final String trackUrl) {
GuildMusicManager musicManager = getGuildAudioPlayer(message.getGuild());
musicManager.setLastActiveMessage(message);
AUDIO_PLAYER_MANAGER.loadItemOrdered(musicManager, trackUrl, new AudioLoadResultHandler() {
@Override
public void trackLoaded(AudioTrack track) {
if (musicManager.getPlayer().getPlayingTrack() != null) {
MessageFactory.makeSuccess(message, "<@%s> has added [%s](%s) to the queue. There are `%s` song(s) ahead of it in the queue.",
message.getAuthor().getId(),
track.getInfo().title,
track.getInfo().uri,
getQueueSize(musicManager)
).queue();
}
play(message, musicManager, track);
}
@Override
public void playlistLoaded(AudioPlaylist playlist) {
MessageFactory.makeSuccess(message, "<@%s> has added %s songs from the [%s](%s) playlist to the queue. There are `%s` song(s) ahead of it in the queue.",
message.getAuthor().getId(),
playlist.getTracks().size(),
playlist.getName(),
trackUrl,
getQueueSize(musicManager)
).queue();
for (AudioTrack track : playlist.getTracks()) {
play(message, musicManager, track);
}
}
@Override
public void noMatches() {
MessageFactory.makeWarning(message, "I found nothing with the given query `%s`", trackUrl).queue();
}
@Override
public void loadFailed(FriendlyException exception) {
MessageFactory.makeError(message, "I couldn't add that to the queue: %s", exception.getMessage()).queue();
}
});
}
public static void skipTrack(Message message) {
GuildMusicManager musicManager = getGuildAudioPlayer(message.getGuild());
musicManager.scheduler.nextTrack();
}
private static void play(Message message, GuildMusicManager musicManager, AudioTrack track) {
if (!connectToVoiceChannel(message)) {
MessageFactory.makeWarning(message, "You have to be connected to a voice channel.").queue();
return;
}
musicManager.scheduler.queue(track, message.getAuthor());
}
private static boolean connectToVoiceChannel(Message message) {
AudioManager audioManager = message.getGuild().getAudioManager();
if (!audioManager.isConnected() && !audioManager.isAttemptingToConnect()) {
VoiceChannel channel = message.getMember().getVoiceState().getChannel();
if (channel == null) {
return false;
}
audioManager.openAudioConnection(message.getMember().getVoiceState().getChannel());
}
return true;
}
private static synchronized GuildMusicManager getGuildAudioPlayer(Guild guild) {
long guildId = Long.parseLong(guild.getId());
GuildMusicManager musicManager = MUSIC_MANAGER.get(guildId);
if (musicManager == null) {
musicManager = new GuildMusicManager(AUDIO_PLAYER_MANAGER);
MUSIC_MANAGER.put(guildId, musicManager);
}
guild.getAudioManager().setSendingHandler(musicManager.getSendHandler());
return musicManager;
}
private static int getQueueSize(GuildMusicManager manager) {
return manager.getPlayer().getPlayingTrack() == null ?
manager.scheduler.getQueue().size() :
manager.scheduler.getQueue().size() + 1;
}
} |
package com.binatechnologies.varsim;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.log4j.Logger;
import org.kohsuke.args4j.CmdLineException;
import org.kohsuke.args4j.CmdLineParser;
import org.kohsuke.args4j.Option;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
/**
* Compare two VCF files, output the TPR and FDR for various bins and variant types
*
* @author johnmu
*/
// TODO this class does not yet compare the contents of the variants
// TODO it also ignores genotypes for now
public class VCFcompare {
private final static Logger log = Logger.getLogger(VCFcompare.class.getName());
public static final int SV_LEN_LIM = 50;
@Option(name = "-true_vcf", usage = "Truth VCF file generated by VarSim [Required]",metaVar = "file",required = true)
String true_vcf_filename;
@Option(name = "-new_vcf", usage = "New VCF file generated by VarSim [Required]",metaVar = "file",required = true)
String new_vcf_filename;
@Option(name = "-prefix", usage = "Prefix for output file [Required]",metaVar = "file",required = true)
String out_prefix;
static final double OVERLAP_ARG = 0.8;
@Option(name = "-over", usage = "Reciprocal overlap ratio allowance in validation ["+OVERLAP_ARG+"]")
double overlap_ratio = OVERLAP_ARG;
static final int WIGGLE_ARG = 20;
@Option(name = "-wig", usage = "Wiggle allowance in validation ["+WIGGLE_ARG+"]")
int wiggle = WIGGLE_ARG;
@Option(name = "-ignore_ins_len", usage = "Ignores insertion length when comparing")
boolean ignore_ins_len;
@Option(name = "-bed", usage = "BED file to restrict the analysis [Optional]",metaVar = "BED_file")
String bed_filename = "";
public static void main(String[] args) {
VCFcompare runner = new VCFcompare();
runner.run(args);
}
// end = true will add the indel to the end, other wise it will add to start
private void add_indels(ArrayList<Variant> var_list, int[] diff, byte[] ref, byte[][] alt,
Variant var, int curr_pos, boolean end) {
// add insertions or deletions for complex variants
if (diff[0] == diff[1] && diff[0] != 0) {
// homozygous
if (diff[0] > 0) {
// insertion
if (Arrays.equals(alt[0], alt[1])) {
byte[] phase = {1, 1};
if(end){
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos + ref.length, 0, new byte[0],
new FlexSeq[]{new FlexSeq(Arrays.copyOfRange(alt[0], 0, diff[0]))},
phase, true, var.getVar_id(), ".", ""));
}else {
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos, 0, new byte[0],
new FlexSeq[]{new FlexSeq(Arrays.copyOfRange(alt[0], 0, diff[0]))},
phase, true, var.getVar_id(), ".", ""));
}
} else {
byte[] phase = {0, 0};
if(end) {
phase[0] = 1;
phase[1] = 0;
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos + ref.length, 0, new byte[0],
new FlexSeq[]{new FlexSeq(Arrays.copyOfRange(alt[0], 0, diff[0]))},
phase, true, var.getVar_id(), ".", ""));
phase[0] = 0;
phase[1] = 1;
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos + ref.length, 0, new byte[0],
new FlexSeq[]{new FlexSeq(Arrays.copyOfRange(alt[1], 0, diff[1]))},
phase, true, var.getVar_id(), ".", ""));
}else{
phase[0] = 1;
phase[1] = 0;
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos, 0, new byte[0],
new FlexSeq[]{new FlexSeq(Arrays.copyOfRange(alt[0], 0, diff[0]))},
phase, true, var.getVar_id(), ".", ""));
phase[0] = 0;
phase[1] = 1;
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos, 0, new byte[0],
new FlexSeq[]{new FlexSeq(Arrays.copyOfRange(alt[1], 0, diff[1]))},
phase, true, var.getVar_id(), ".", ""));
}
}
} else if (diff[0] < 0) {
// deletion
byte[] phase = {1, 1};
if(end) {
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos + alt[0].length, -diff[0],
Arrays.copyOfRange(ref, alt[0].length, alt[0].length-diff[0]), new FlexSeq[]{new FlexSeq()},
phase, true, var.getVar_id(), ".", ""));
}else {
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos, -diff[0],
Arrays.copyOfRange(ref, 0, -diff[0]), new FlexSeq[]{new FlexSeq()},
phase, true, var.getVar_id(), ".", ""));
}
}
} else {
for (int a = 0; a < alt.length; a++) {
if (diff[a] > 0) {
// insertion
byte[] phase = {0, 0};
phase[a] = 1;
if(end) {
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos + ref.length, 0, new byte[0],
new FlexSeq[]{new FlexSeq(Arrays.copyOfRange(alt[a], 0, diff[a]))},
phase, true, var.getVar_id(), ".", ""));
}else{
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos, 0, new byte[0],
new FlexSeq[]{new FlexSeq(Arrays.copyOfRange(alt[a], 0, diff[a]))},
phase, true, var.getVar_id(), ".", ""));
}
} else if (diff[a] < 0) {
// deletion
byte[] phase = {0, 0};
phase[a] = 1;
if(end) {
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos + alt[a].length, -diff[a],
Arrays.copyOfRange(ref, alt[a].length, alt[a].length-diff[a]), new FlexSeq[]{new FlexSeq()},
phase, true, var.getVar_id(), ".", ""));
}else {
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos, -diff[a],
Arrays.copyOfRange(ref, 0, -diff[a]), new FlexSeq[]{new FlexSeq()},
phase, true, var.getVar_id(), ".", ""));
}
}
}
}
}
private ArrayList<Variant> convert_var_to_var_list(Variant var) {
ArrayList<Variant> var_list = convert_var_to_var_list(new Variant(var), false);
ArrayList<Variant> var_list_end = convert_var_to_var_list(new Variant(var), true);
if (var_list_end.size() < var_list.size()) {
var_list = var_list_end;
}
return var_list;
}
//if end = true, we add indels to the end
private ArrayList<Variant> convert_var_to_var_list(Variant var, boolean end) {
ArrayList<Variant> var_list = new ArrayList<Variant>();
//System.err.println("pat|mat: " + var.paternal() +"|"+ var.maternal());
// if the variant is an MNP or SNP, break it dooooownnn
boolean no_split = false;
if (var.getType() == Variant.OverallType.SNP) {
no_split = true;
}
if (var.paternal() == 0 && var.maternal() == 0) {
no_split = true;
}
if (var.paternal() > 0 && var.getAlt(var.paternal()).getType() != FlexSeq.Type.SEQ) {
no_split = true;
}
if (var.maternal() > 0 && var.getAlt(var.maternal()).getType() != FlexSeq.Type.SEQ) {
no_split = true;
}
if (var.paternal() > 0 && var.getAlt(var.paternal()).length() == 0 && var.getRef().length == 0) {
no_split = true;
}
if (var.maternal() > 0 && var.getAlt(var.maternal()).length() == 0 && var.getRef().length == 0) {
no_split = true;
}
if (no_split) {
var_list.add(var);
return var_list;
}
if (var.getType(var.paternal()) != Variant.Type.Reference
&& var.getType(var.maternal()) != Variant.Type.Reference) {
int[] allele = {var.get_allele(0), var.get_allele(1)};
byte[][] alt = {var.getAlt(allele[0]).getSeq(), var.getAlt(allele[1]).getSeq()};
byte[] ref = var.getRef();
int curr_pos = var.position();
// modify positions based on if ref matches alt
int[] match_len = {0, 0};
for (int i = 0; i < 2; i++) {
for (int j = 0; j < Math.min(ref.length, alt[i].length); j++) {
if (alt[i][j] == ref[j]) {
match_len[i]++;
} else {
break;
}
}
}
int min_match_len = Math.min(match_len[0], match_len[1]);
//System.err.println("min_match_len: " + min_match_len);
if (min_match_len > 0) {
ref = Arrays.copyOfRange(ref, min_match_len, ref.length);
for (int i = 0; i < 2; i++) {
alt[i] = Arrays.copyOfRange(alt[i], min_match_len, alt[i].length);
}
curr_pos += min_match_len;
}
int[] diff = {alt[0].length - ref.length, alt[1].length - ref.length};
add_indels(var_list, diff, ref, alt, var, curr_pos,end);
for (int i = 0; i < ref.length; i++, curr_pos++) {
int[] idx = new int[2];
if(end){
for(int j = 0;j<2;j++) {
if(i < ref.length + diff[j]){
idx[j] = i;
}else{
idx[j] = -1; // we are into deleted bases
}
}
}else{
for(int j = 0;j<2;j++) {
idx[j] = i + diff[j];
}
}
if (idx[0] < 0 && idx[1] < 0) {
// both deleted
} else if (idx[0] >= 0 && idx[1] < 0 && alt[0][idx[0]] != ref[i]) {
// one deleted, hence the other is homozygous
byte[] phase = {1, 1};
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos, 1, new byte[]{ref[i]},
new FlexSeq[]{new FlexSeq(alt[0][idx[0]])}, phase, true, var.getVar_id(), ".", ""));
} else if (idx[0] < 0 && idx[1] >= 0 && alt[1][idx[1]] != ref[i]) {
// one deleted, hence the other is homozygous
byte[] phase = {1, 1};
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos, 1, new byte[]{ref[i]},
new FlexSeq[]{new FlexSeq(alt[1][idx[1]])}, phase, true, var.getVar_id(), ".", ""));
} else if (idx[0] >= 0 && idx[1] < 0 && alt[0][idx[0]] == ref[i]) {
// ref call with del
} else if (idx[0] < 0 && idx[1] >= 0 && alt[1][idx[1]] == ref[i]) {
// ref call with del
} else if (alt[0][idx[0]] == ref[i] && alt[1][idx[1]] == ref[i]) {
// ref call
} else if (alt[0][idx[0]] == alt[1][idx[1]]) {
// homozygous
byte[] phase = {1, 1};
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos, 1, new byte[]{ref[i]},
new FlexSeq[]{new FlexSeq(alt[0][idx[0]])}, phase, true, var.getVar_id(), ".", ""));
} else if (alt[0][idx[0]] != ref[i] && alt[1][idx[1]] != ref[i]) {
// het but both alt
byte[] phase = {1, 2};
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos, 1, new byte[]{ref[i]},
new FlexSeq[]{new FlexSeq(alt[0][idx[0]]), new FlexSeq(alt[1][idx[1]])},
phase, true, var.getVar_id(), ".", ""));
} else {
// het with one ref
for (int a = 0; a < 2; a++) {
if (alt[a][idx[a]] != ref[i]) {
byte[] phase = {0, 0};
phase[a] = 1;
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos, 1, new byte[]{ref[i]},
new FlexSeq[]{new FlexSeq(alt[a][idx[a]])}, phase, true, var.getVar_id(), ".", ""));
}
}
}
}
var.set_allele(0, (byte) 0); // set to reference
var.set_allele(1, (byte) 0); // set to reference
} else {
for (int a = 0; a < 2; a++) {
int allele = var.get_allele(a);
if (var.getType(allele) == Variant.Type.Complex
|| var.getType(allele) == Variant.Type.MNP
|| var.getType(allele) == Variant.Type.SNP) {
byte[] alt = var.getAlt(allele).getSeq();
byte[] ref = var.getRef();
int curr_pos = var.position();
int diff = alt.length - ref.length;
// add insertions or deletions for complex variants
if (diff > 0) {
// insertion
byte[] phase = {0, 0};
phase[a] = 1;
if(end){
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos + ref.length, 0, new byte[0],
new FlexSeq[]{new FlexSeq(Arrays.copyOfRange(alt, 0, diff))},
phase, true, var.getVar_id(), ".", ""));
}else {
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos, 0, new byte[0],
new FlexSeq[]{new FlexSeq(Arrays.copyOfRange(alt, 0, diff))},
phase, true, var.getVar_id(), ".", ""));
}
} else if (diff < 0) {
// deletion
byte[] phase = {0, 0};
phase[a] = 1;
if(end){
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos + alt.length, -diff,
Arrays.copyOfRange(ref, alt.length, alt.length-diff),
new FlexSeq[]{new FlexSeq()},
phase, true, var.getVar_id(), ".", ""));
}else {
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos, -diff,
Arrays.copyOfRange(ref, 0, -diff),
new FlexSeq[]{new FlexSeq()},
phase, true, var.getVar_id(), ".", ""));
}
}
for (int i = 0; i < ref.length; i++) {
int idx;
if(end) {
if(i < ref.length + diff) {
idx = i;
}else{
idx = -1; // we are in a deleted region
}
}else{
idx = i + diff;
}
if (idx >= 0 && alt[idx] != ref[i]) {
byte[] phase = {0, 0};
phase[a] = 1;
var_list.add(new Variant(var.getChr_name(), var.chromosome(), curr_pos, 1, new byte[]{ref[i]},
new FlexSeq[]{new FlexSeq(alt[idx])}, phase, true, var.getVar_id(), ".", ""));
}
curr_pos++;
}
var.set_allele(a, (byte) 0); // set to reference
}
}
}
if (!var.isRef()) {
var_list.add(var);
}
return var_list;
}
private void run(String[] args) {
String VERSION = "VarSim " + getClass().getPackage().getImplementationVersion();
String usage = "Generates a JSON with accuracy statistics of a VCF file relative to a truth\n";
boolean compare_genotypes = false;
// these are the statistics we "ideally" want to collect
// number of variants correct (either genotype) (for each type)
// number homozygous correct (for each type)
// number heterozygous correct (for each type)
// number homozygous genotype correct (for each type)
// number heterozyous genotype correct (for each type)
CmdLineParser parser = new CmdLineParser(this);
// if you have a wider console, you could increase the value;
// here 80 is also the default
parser.setUsageWidth(80);
try {
parser.parseArgument(args);
} catch (CmdLineException e) {
System.err.println(VERSION);
System.err.println(e.getMessage());
System.err.println("java -jar vcfcompare.jar [options...]");
// print the list of available options
parser.printUsage(System.err);
System.err.println(usage);
return;
}
BedFile intersector = null;
boolean bed_exists = false;
// check if the file exists
try{
File f = new File(bed_filename);
if(f.exists()){
bed_exists = true;
}
}catch (Exception e){
e.printStackTrace();
}
if(bed_exists) {
intersector = new BedFile(bed_filename);
}
// load true VCF into interval tree
log.info("Load True VCF");
/**
* This is just for outputting to JSON
*/
class output_class {
CompareParams params;
EnumStatsRatioCounter<Variant.OverallType> num_true_correct;
output_class(CompareParams params, EnumStatsRatioCounter<Variant.OverallType> num_true_correct) {
this.params = params;
this.num_true_correct = num_true_correct;
}
output_class() {
}
public CompareParams getParams() {
return params;
}
public void setParams(CompareParams params) {
this.params = params;
}
public EnumStatsRatioCounter<Variant.OverallType> getNum_true_correct() {
return num_true_correct;
}
public void setNum_true_correct(EnumStatsRatioCounter<Variant.OverallType> num_true_correct) {
this.num_true_correct = num_true_correct;
}
}
output_class output_blob = new output_class();
output_blob.setParams(new CompareParams());
output_blob.getParams().setBed_filename(bed_filename);
output_blob.getParams().setNew_vcf_filename(new_vcf_filename);
output_blob.getParams().setOverlap_percent(overlap_ratio);
output_blob.getParams().setTrue_vcf_filename(true_vcf_filename);
output_blob.getParams().setWiggle(wiggle);
VCFparser true_parser = new VCFparser(true_vcf_filename, null, false);
// allow duplicates, this is needed because insertions don't actually take up a location
chrST<Variant> true_store = new chrST<Variant>(true);
int num_read = 0;
int num_added = 0;
// this is for the original variants
// it stores the total length of the original variant in bases
// Still check for validation of canonical full variants
ArrayList<Integer> full_validated_total = new ArrayList<Integer>();
ArrayList<Variant> true_var_list = new ArrayList<Variant>();
// For each true variant, if the number of bases validated is over a certain threshold
// call it correct
output_blob.setNum_true_correct(new EnumStatsRatioCounter<Variant.OverallType>());
// For called variants, break down into canonical ones and count based on that
// if any called variant overlaps a complex variant or MNP, count it as "complex"
// otherwise, simple count them in their canonical forms
// store true variants as canonical ones, but remember original form
while (true_parser.hasMoreInput()) {
Variant var = true_parser.parseLine();
if (var == null) {
log.info("skip line");
continue;
}
Genotypes geno = var.getGeno();
if (!geno.isNonRef()) {
continue;
}
String chr_name = var.getChr_name();
Variant.OverallType orig_type = var.getType();
// determine max variant region
// when comparing genotypes, we need to individually compare
// to make sure they really overlap
ArrayList<Variant> var_list = convert_var_to_var_list(new Variant(var));
int total_len = 0;
double max_len = 0;
// add to interval tree
for (Variant curr_var : var_list) {
int curr_len = curr_var.max_len();
if (curr_len > max_len) {
max_len = curr_len;
}
total_len += curr_len;
Interval1D curr_var_reg = null;
try {
curr_var_reg = curr_var.get_geno_var_interval();
} catch (Exception e) {
e.printStackTrace();
log.error("Original variant: " + var);
log.error("Bad variant: " + curr_var);
System.exit(1);
}
curr_var.idx = num_added;
curr_var.full_idx = num_read;
curr_var.original_type = orig_type;
true_store.put(chr_name, curr_var_reg, curr_var);
num_added++;
}
if (total_len >= SV_LEN_LIM && max_len / total_len >= overlap_ratio && var_list.size() > 1) {
// in this case we break down the variant into canoical forms since
// the original variant was probably a large deletion with a small insertion
for (Variant curr_var : var_list) {
int curr_len = curr_var.max_len();
full_validated_total.add(curr_len);
true_var_list.add(curr_var);
num_read++;
}
} else {
full_validated_total.add(total_len);
true_var_list.add(var);
num_read++;
}
}
log.info("Num read: " + num_read);
log.info("Num added: " + num_added);
log.info("Num nodes: " + true_store.size());
// this is for the split variants
// set to true if the canonical original variant was validated true
BitSet validated_true = new BitSet(num_added);
// this is for the original variants
// count of the number of bases validated for the original variant
int[] full_validated_count = new int[num_read];
// generate the output files
PrintWriter TP_writer = null;
PrintWriter FP_writer = null;
PrintWriter FN_writer = null;
PrintWriter JSON_writer = null;
try {
TP_writer = new PrintWriter(out_prefix + "_TP.vcf", "UTF-8");
FP_writer = new PrintWriter(out_prefix + "_FP.vcf", "UTF-8");
FN_writer = new PrintWriter(out_prefix + "_FN.vcf", "UTF-8");
JSON_writer = new PrintWriter(out_prefix + "_report.json", "UTF-8");
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
// for this case we add to false positives if the variant is not validated.
// However, do don't add to true positives, those that computed later
log.info("Load New VCF");
int num_new_vars = 0;
// iterate over new VCF and collect stats
VCFparser new_parser = new VCFparser(new_vcf_filename, null, false);
while (new_parser.hasMoreInput()) {
Variant var = new_parser.parseLine();
if (var == null) {
// System.err.println("Bad variant or not a variant line");
continue;
}
Genotypes geno = var.getGeno();
String chr_name = var.getChr_name();
Interval1D var_reg = var.get_geno_interval();
if (!(intersector == null || intersector.contains(chr_name, var_reg))) {
continue;
}
// the overall type of the called variant
Variant.OverallType curr_var_type = var.getType();
// if called as complex variant convert to indel+snps
ArrayList<Variant> var_list = convert_var_to_var_list(new Variant(var));
double total_len = 0;
double validated_len = 0;
double max_len = 0;
for (Variant curr_var : var_list) {
total_len += curr_var.max_len();
if (max_len < curr_var.max_len()) {
max_len = curr_var.max_len();
}
}
// split up variants that are basically one big variant and one small one
boolean compute_as_split = false;
if (total_len >= SV_LEN_LIM && max_len / total_len >= overlap_ratio && var_list.size() > 1){
compute_as_split = true;
}
for (Variant curr_var : var_list) {
// get genotype
geno = curr_var.getGeno();
result_comparator comp = new result_comparator(true_store, overlap_ratio, wiggle,ignore_ins_len);
if (curr_var.isHom()) {
int max_true_len = comp.compare_variant(curr_var, geno.geno[0], validated_true);
dual_idx idx;
if (compare_genotypes) {
idx = comp.isHomMatch();
} else {
idx = comp.isMatch();
}
if (idx.idx >= 0) {
// validated
validated_true.set(idx.idx);
full_validated_count[idx.full_idx] += max_true_len;// this 'should' be overlap len
validated_len += curr_var.max_len();
}else if(compute_as_split){
output_blob.getNum_true_correct().addFP(curr_var.getType(), var.max_len());
FP_writer.println(var);
}
} else {
// het
boolean matched = false;
int max_true_len = 0;
for (int i = 0; i < 2; i++) {
byte allele = geno.geno[i];
if (allele > 0) {
int len = comp.compare_variant(curr_var, allele, validated_true);
if(len > max_true_len){
max_true_len = len;
}
}
}
dual_idx idx;
if (compare_genotypes) {
idx = comp.isHetMatch();
} else {
idx = comp.isMatch();
}
if (idx.idx >= 0) {
validated_true.set(idx.idx);
full_validated_count[idx.full_idx] += curr_var.max_len(); // this 'should' be overlap len
validated_len += curr_var.max_len();
}else if(compute_as_split){
output_blob.getNum_true_correct().addFP(curr_var.getType(), var.max_len());
FP_writer.println(var);
}
}
}
if (!compute_as_split && validated_len < (total_len*overlap_ratio)) {
// this is a false positive!
output_blob.getNum_true_correct().addFP(curr_var_type, var.max_len());
FP_writer.println(var);
}
num_new_vars++;
}
log.info("Num new variants read: " + num_new_vars);
// read through again and compute for the true variants
num_read = 0;
for (Variant var : true_var_list) {
String chr_name = var.getChr_name();
Interval1D curr_var_reg = var.get_geno_interval();
if (intersector == null || intersector.contains(chr_name, curr_var_reg)) {
int total_len = full_validated_total.get(num_read);
int validated_len = full_validated_count[num_read];
if (validated_len >= (overlap_ratio * total_len)) {
// validated
output_blob.getNum_true_correct().addTP(var.getType(), var.max_len());
TP_writer.println(var);
} else {
FN_writer.println(var);
}
output_blob.getNum_true_correct().addT(var.getType(), var.max_len());
}
num_read++;
}
// output the stats
System.err.println(output_blob.getNum_true_correct());
ObjectMapper mapper = new ObjectMapper();
mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
try {
mapper.writeValue(JSON_writer, output_blob);
} catch (Exception e) {
e.printStackTrace();
}
try {
TP_writer.close();
FP_writer.close();
FN_writer.close();
JSON_writer.close();
} catch (Exception e) {
e.printStackTrace();
}
log.info("Done!"); // used to record the time
}
class CompareParams {
String true_vcf_filename;
String new_vcf_filename;
Double overlap_ratio;
int wiggle;
String bed_filename;
public CompareParams() {
}
public CompareParams(String true_vcf_filename, String new_vcf_filename, Double overlap_ratio, int wiggle, String bed_filename) {
this.true_vcf_filename = true_vcf_filename;
this.new_vcf_filename = new_vcf_filename;
this.overlap_ratio = overlap_ratio;
this.wiggle = wiggle;
this.bed_filename = bed_filename;
}
public String getTrue_vcf_filename() {
return true_vcf_filename;
}
public void setTrue_vcf_filename(String true_vcf_filename) {
this.true_vcf_filename = true_vcf_filename;
}
public String getNew_vcf_filename() {
return new_vcf_filename;
}
public void setNew_vcf_filename(String new_vcf_filename) {
this.new_vcf_filename = new_vcf_filename;
}
public Double getOverlap_percent() {
return overlap_ratio;
}
public void setOverlap_percent(Double overlap_ratio) {
this.overlap_ratio = overlap_ratio;
}
public int getWiggle() {
return wiggle;
}
public void setWiggle(int wiggle) {
this.wiggle = wiggle;
}
public String getBed_filename() {
return bed_filename;
}
public void setBed_filename(String bed_filename) {
this.bed_filename = bed_filename;
}
}
class dual_idx {
public int idx;
public int full_idx;
dual_idx(int idx, int full_idx) {
this.idx = idx;
this.full_idx = full_idx;
}
dual_idx() {
idx = -1;
full_idx = -1;
}
public boolean equals(Object obj) {
if (obj == null)
return false;
if (obj == this)
return true;
if (!(obj instanceof dual_idx))
return false;
// use EqualsBuilder?
dual_idx temp = (dual_idx) obj;
if (idx != temp.idx) {
return false;
}
if (full_idx != temp.full_idx) {
return false;
}
return true;
}
@Override
public String toString() {
return "dual_idx{" +
"idx=" + idx +
", full_idx=" + full_idx +
'}';
}
}
class result_comparator {
chrST<Variant> _true_store; // true variants
double _overlap_ratio;
boolean _overlap_complex;
int _wiggle;
boolean _ignore_ins_len;
// Results to store
// this stores the indexes of the true variants matched
ArrayList<dual_idx> matches_hom = new ArrayList<dual_idx>();
ArrayList<ArrayList<dual_idx>> matches_het = new ArrayList<ArrayList<dual_idx>>(2); // matches either parent
public result_comparator(chrST<Variant> true_store, double overlap_ratio, int wiggle) {
this(true_store, overlap_ratio, wiggle,false);
}
public result_comparator(chrST<Variant> true_store, double overlap_ratio, int wiggle, boolean ignore_ins_len) {
_true_store = true_store;
_overlap_ratio = overlap_ratio;
_wiggle = wiggle;
matches_het.add(new ArrayList<dual_idx>());
matches_het.add(new ArrayList<dual_idx>());
_overlap_complex = false;
_ignore_ins_len = ignore_ins_len;
}
public dual_idx isHomMatch() {
if (matches_hom.size() > 0) {
return matches_hom.get(0);
}
return new dual_idx();
}
public dual_idx isHetMatch() {
ArrayList<dual_idx> temp = new ArrayList<dual_idx>(matches_het.get(0));
temp.retainAll(matches_het.get(1));
if (temp.size() > 0) {
return temp.get(0);
} else if (matches_het.get(0).size() > 0 || matches_het.get(1).size() > 0) {
if (matches_het.get(0).size() > matches_het.get(1).size()) {
return matches_het.get(0).get(0);
} else {
return matches_het.get(1).get(0);
}
}
return new dual_idx();
}
public dual_idx isMatch() {
dual_idx idx = isHomMatch();
if (idx.idx >= 0) {
return idx;
}
idx = isHetMatch();
if (idx.idx >= 0) {
return idx;
}
return idx;
}
/**
* Only compares one allele at a time
* - don't match variants in the bitset
* - if match set the bitset
*
* @param var variant we want to compare
* @param geno allele of the variant to compare
* @param validated BitSet that records the true variants that have already been validated
* @return The maximum length of all true variants
*/
public int compare_variant(Variant var, int geno, BitSet validated) {
double overlap_ratio = _overlap_ratio;
// consider type to change overlap percent
Variant.Type type = var.getType(geno);
String chr_name = var.getChr_name();
Interval1D orig_inter;
if(type == Variant.Type.Insertion && _ignore_ins_len){
orig_inter = new Interval1D(var.position(),var.position());
}else{
orig_inter = var.get_var_interval(geno);
}
int max_true_var_len = 0;
// sometimes MNPs are called as SNPs?
if (type == Variant.Type.SNP) {
// handle SNPs differently
// require SNP content to match
Iterable<Variant> out = _true_store.getAll(chr_name, orig_inter, 0);
byte val = var.getAlt(geno).getSeq()[0];
int num_matches = 0;
if (out != null) {
for (Variant true_var : out) {
boolean has_snp = false;
int idx = true_var.idx;
int full_idx = true_var.full_idx;
if (true_var.original_type == Variant.OverallType.Complex) {
//System.err.println("Overlap complex SNP!");
_overlap_complex = true;
}
if (validated.get(idx)) {
// skip ones already validated
continue;
}
// check genotype
if (true_var.isHom()) {
// position is correct, check genotype
if (true_var.getType(true_var.paternal()) == Variant.Type.SNP
&& var.position() == true_var.position()) {
if (val == true_var.getAlt(true_var.paternal()).getSeq()[0]) {
matches_hom.add(new dual_idx(idx, full_idx));
}
has_snp = true;
}
} else {
for (int parent = 0; parent < 2; parent++) {
int allele = true_var.get_allele(parent);
if (allele > 0) {
if (true_var.getType(allele) == Variant.Type.SNP
&& var.position() == true_var.position()) {
if (val == true_var.getAlt(allele).getSeq()[0]) {
matches_het.get(parent).add(new dual_idx(idx, full_idx));
}
has_snp = true;
}
}
}
}
if (has_snp) {
num_matches++;
max_true_var_len = 1;
}
}
if (num_matches > 1) {
log.info("Something strange, multiple SNP matches in true set: " + num_matches);
}
}
} else {
// the rest
Interval1D wiggle_inter = new Interval1D(orig_inter.low - _wiggle, orig_inter.high + _wiggle);
Iterable<Variant> out = _true_store.getAll(chr_name, wiggle_inter, 0);
if (out == null) {
// nothing found
return max_true_var_len;
}
for (Variant true_var : out) {
int idx = true_var.idx;
int full_idx = true_var.full_idx;
if (true_var.original_type == Variant.OverallType.Complex) {
_overlap_complex = true;
}
if (validated.get(idx)) {
// skip ones already validated
//System.err.println("Skip..." + idx);
continue;
}
for (int parent = 0; parent < 2; parent++) {
if (true_var.isHom() && parent == 1) {
break;
}
int allele = true_var.get_allele(parent);
if (allele == 0) {
// reference allele
continue;
}
if (type != true_var.getType(allele)) {
// need type to be the same
continue;
}
boolean matched = false;
if(type == Variant.Type.Insertion && _ignore_ins_len){
// this is the case where we want to ignore insertion lengths when comparing
// just do a check of the start position
if(Math.abs(true_var.position() - var.position()) <= _wiggle){
// Matches!
if (true_var.isHom()) {
matches_hom.add(new dual_idx(idx, full_idx));
} else {
matches_het.get(parent).add(new dual_idx(idx, full_idx));
}
matched = true;
}
}else {
// this is the normal case
// check if the variant interval matches
if (orig_inter.intersects(true_var.get_var_interval(allele), overlap_ratio, _wiggle)) {
// it matches an allele!
// now check alternate allele length
int alt_len = var.getAlt(geno).length(); // TODO ignore copy number for now
int true_alt_len = true_var.getAlt(allele).length();
double ratio = (alt_len > 0) ? (true_alt_len / (double) alt_len) : 1.0;
double min_ratio = Math.min(ratio, 1 / ratio);
if (min_ratio >= overlap_ratio || Math.abs(alt_len - true_alt_len) < _wiggle) {
// yay, it is a match!
if (true_var.isHom()) {
matches_hom.add(new dual_idx(idx, full_idx));
} else {
matches_het.get(parent).add(new dual_idx(idx, full_idx));
}
matched = true;
}
}
}
if(matched){
int len = true_var.max_len(allele);
if(len > max_true_var_len){
max_true_var_len = len;
}
}
}
}
}
return max_true_var_len;
}
}
} |
package com.codeborne.selenide;
import org.apache.commons.io.IOUtils;
import org.junit.Assert;
import org.openqa.selenium.OutputType;
import org.openqa.selenium.TakesScreenshot;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.chrome.ChromeOptions;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.htmlunit.HtmlUnitDriver;
import org.openqa.selenium.ie.InternetExplorerDriver;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.remote.RemoteWebDriver;
import java.io.File;
import java.io.FileWriter;
import java.net.MalformedURLException;
import java.net.URL;
import static org.apache.commons.io.FileUtils.copyFile;
public class WebDriverRunner {
public static final String CHROME = "chrome";
public static final String INTERNET_EXPLORER = "ie";
public static final String HTMLUNIT = "htmlunit";
public static final String FIREFOX = "firefox";
/**
* If holdBrowserOpen is true, browser window stays open after running tests. It may be useful for debugging.
* Can be configured either programmatically or by system property "-Dselenide.holdBrowserOpen=true".
* <p/>
* Default value: false.
*/
public static boolean holdBrowserOpen = Boolean.getBoolean("selenide.holdBrowserOpen");
/**
* Which browser to use.
* Can be configured either programmatically or by system property "-Dbrowser=ie".
* Supported values: "chrome", "firefox", "ie", "htmlunit"
* <p/>
* Default value: "firefox"
*/
public static String browser = System.getProperty("browser", "firefox");
/**
* URL of remote web driver (in case of using Selenium Grid).
* Can be configured either programmatically or by system property "-Dremote=true".
*
* Default value: null (Grid is not used).
*/
public static String remote = System.getProperty("remote");
/**
* Value of "chrome.switches" parameter (in case of using Chrome driver).
* Can be configured either programmatically or by system property "-Dchrome.switches=--start-maximized".
* Default value: "--start-maximized"
*/
public static String chromeSwitches = System.getProperty("chrome.switches", "--start-maximized");
/**
* Folder to store screenshots to.
* Can be configured either programmatically or by system property "-Dselenide.reports=true".
*
* Default value: "build/reports/tests" (this is default for Gradle projects)
*/
public static String reportsFolder = System.getProperty("selenide.reports", "build/reports/tests");
private static WebDriver webdriver;
static {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
closeWebDriver();
}
});
}
public static WebDriver getWebDriver() {
if (webdriver == null) {
webdriver = createDriver();
}
return webdriver;
}
public static void closeWebDriver() {
if (webdriver != null) {
if (!holdBrowserOpen) {
webdriver.close();
}
webdriver = null;
}
}
public static boolean ie() {
return webdriver != null && webdriver instanceof InternetExplorerDriver;
}
public static void clearBrowserCache() {
if (webdriver != null) {
webdriver.manage().deleteAllCookies();
}
}
public static String takeScreenShot(String fileName) {
if (webdriver == null) {
return null;
} else if (webdriver instanceof TakesScreenshot) {
try {
File scrFile = ((TakesScreenshot) webdriver).getScreenshotAs(OutputType.FILE);
String pageSource = webdriver.getPageSource();
File screenshotFileName = new File(reportsFolder, fileName + ".png");
File htmlFileName = new File(reportsFolder, fileName + ".html");
copyFile(scrFile, screenshotFileName);
IOUtils.write(pageSource, new FileWriter(htmlFileName));
return screenshotFileName.getAbsolutePath();
} catch (Exception e) {
System.err.println(e);
}
} else {
System.err.println("Cannot take screenshot, driver does not support it: " + webdriver);
}
return null;
}
private static WebDriver createDriver() {
if (remote != null) {
return createRemoteDriver(remote, browser);
} else if (CHROME.equalsIgnoreCase(browser)) {
ChromeOptions options = new ChromeOptions();
options.addArguments("chrome.switches", chromeSwitches);
return new ChromeDriver(options);
} else if (INTERNET_EXPLORER.equalsIgnoreCase(browser)) {
DesiredCapabilities ieCapabilities = DesiredCapabilities.internetExplorer();
ieCapabilities.setCapability(InternetExplorerDriver.INTRODUCE_FLAKINESS_BY_IGNORING_SECURITY_DOMAINS, true);
return new InternetExplorerDriver(ieCapabilities);
} else if (HTMLUNIT.equalsIgnoreCase(browser)) {
DesiredCapabilities desiredCapabilities = DesiredCapabilities.htmlUnit();
desiredCapabilities.setCapability(HtmlUnitDriver.INVALIDSELECTIONERROR, true);
desiredCapabilities.setCapability(HtmlUnitDriver.INVALIDXPATHERROR, false);
desiredCapabilities.setJavascriptEnabled(true);
return new HtmlUnitDriver(desiredCapabilities);
} else if (FIREFOX.equalsIgnoreCase(browser)) {
return new FirefoxDriver();
} else {
throw new IllegalArgumentException("Unknown 'browser' parameter: " + browser);
}
}
private static WebDriver createRemoteDriver(String remote, String browser) {
try {
DesiredCapabilities capabilities = new DesiredCapabilities();
capabilities.setBrowserName(browser);
return new RemoteWebDriver(new URL(remote), capabilities);
} catch (MalformedURLException e) {
throw new IllegalArgumentException("Invalid 'remote' parameter: " + remote, e);
}
}
static <T> T fail(String message) {
if (webdriver == null) {
Assert.fail(message);
} else {
Assert.fail(message +
", browser.currentUrl=" + webdriver.getCurrentUrl() +
", browser.title=" + webdriver.getTitle()
);
}
return null;
}
} |
package org.exist.xquery;
import junit.framework.TestCase;
import junit.textui.TestRunner;
import org.exist.EXistException;
import org.exist.collections.Collection;
import org.exist.collections.IndexInfo;
import org.exist.collections.triggers.TriggerException;
import org.exist.dom.DocumentImpl;
import org.exist.security.PermissionDeniedException;
import org.exist.security.SecurityManager;
import org.exist.security.xacml.AccessContext;
import org.exist.storage.BrokerPool;
import org.exist.storage.DBBroker;
import org.exist.storage.serializers.Serializer;
import org.exist.storage.txn.TransactionException;
import org.exist.storage.txn.TransactionManager;
import org.exist.storage.txn.Txn;
import org.exist.util.Configuration;
import org.exist.util.LockException;
import org.exist.xmldb.XmldbURI;
import org.exist.xquery.CompiledXQuery;
import org.exist.xquery.XQuery;
import org.exist.xquery.XQueryContext;
import org.exist.xquery.value.IntegerValue;
import org.exist.xquery.value.NodeValue;
import org.exist.xquery.value.Sequence;
import org.xml.sax.SAXException;
import org.apache.log4j.BasicConfigurator;
public class XQueryUpdateTest extends TestCase {
public static void main(String[] args) {
TestRunner.run(XQueryUpdateTest.class);
}
protected static XmldbURI TEST_COLLECTION = XmldbURI.create(DBBroker.ROOT_COLLECTION + "/test");
protected static String TEST_XML =
"<?xml version=\"1.0\"?>" +
"<products/>";
protected static String UPDATE_XML =
"<progress total=\"100\" done=\"0\" failed=\"0\" passed=\"0\"/>";
protected final static int ITEMS_TO_APPEND = 500;
private BrokerPool pool;
public void testAppend() {
DBBroker broker = null;
try {
System.out.println("testAppend() ...\n");
broker = pool.get(SecurityManager.SYSTEM_USER);
XQuery xquery = broker.getXQueryService();
String query =
" declare variable $i external;\n" +
" update insert\n" +
" <product id='id{$i}' num='{$i}'>\n" +
" <description>Description {$i}</description>\n" +
" <price>{$i + 1.0}</price>\n" +
" <stock>{$i * 10}</stock>\n" +
" </product>\n" +
" into /products";
XQueryContext context = xquery.newContext(AccessContext.TEST);
CompiledXQuery compiled = xquery.compile(context, query);
for (int i = 0; i < ITEMS_TO_APPEND; i++) {
context.declareVariable("i", new Integer(i));
xquery.execute(compiled, null);
}
Sequence seq = xquery.execute("/products", null, AccessContext.TEST);
assertEquals(seq.getLength(), 1);
Serializer serializer = broker.getSerializer();
System.out.println(serializer.serialize((NodeValue) seq.itemAt(0)));
seq = xquery.execute("//product", null, AccessContext.TEST);
assertEquals(ITEMS_TO_APPEND, seq.getLength());
seq = xquery.execute("//product[price > 0.0]", null, AccessContext.TEST);
assertEquals(ITEMS_TO_APPEND, seq.getLength());
System.out.println("testAppend: PASS");
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
} finally {
pool.release(broker);
}
}
public void testAppendAttributes() {
testAppend();
DBBroker broker = null;
try {
System.out.println("testAppendAttributes() ...\n");
broker = pool.get(SecurityManager.SYSTEM_USER);
XQuery xquery = broker.getXQueryService();
String query =
" declare variable $i external;\n" +
" update insert\n" +
" attribute name { concat('n', $i) }\n" +
" into //product[@num = $i]";
XQueryContext context = xquery.newContext(AccessContext.TEST);
CompiledXQuery compiled = xquery.compile(context, query);
for (int i = 0; i < ITEMS_TO_APPEND; i++) {
context.declareVariable("i", new Integer(i));
xquery.execute(compiled, null);
}
Sequence seq = xquery.execute("/products", null, AccessContext.TEST);
assertEquals(seq.getLength(), 1);
Serializer serializer = broker.getSerializer();
System.out.println(serializer.serialize((NodeValue) seq.itemAt(0)));
seq = xquery.execute("//product", null, AccessContext.TEST);
assertEquals(ITEMS_TO_APPEND, seq.getLength());
seq = xquery.execute("//product[@name = 'n20']", null, AccessContext.TEST);
assertEquals(1, seq.getLength());
store(broker, "attribs.xml", "<test attr1='aaa' attr2='bbb'>ccc</test>");
query = "update insert attribute attr1 { 'eee' } into /test";
System.out.println("testing duplicate attribute ...");
xquery.execute(query, null, AccessContext.TEST);
seq = xquery.execute("/test[@attr1 = 'eee']", null, AccessContext.TEST);
assertEquals(1, seq.getLength());
System.out.println(serializer.serialize((NodeValue) seq.itemAt(0)));
System.out.println("testAppendAttributes: PASS");
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
} finally {
pool.release(broker);
}
}
public void testInsertBefore() {
DBBroker broker = null;
try {
System.out.println("testInsertBefore() ...\n");
broker = pool.get(SecurityManager.SYSTEM_USER);
String query =
" update insert\n" +
" <product id='original'>\n" +
" <description>Description</description>\n" +
" <price>0</price>\n" +
" <stock>10</stock>\n" +
" </product>\n" +
" into /products";
XQuery xquery = broker.getXQueryService();
xquery.execute(query, null, AccessContext.TEST);
query =
" declare variable $i external;\n" +
" update insert\n" +
" <product id='id{$i}'>\n" +
" <description>Description {$i}</description>\n" +
" <price>{$i + 1.0}</price>\n" +
" <stock>{$i * 10}</stock>\n" +
" </product>\n" +
" preceding /products/product[1]";
XQueryContext context = xquery.newContext(AccessContext.TEST);
CompiledXQuery compiled = xquery.compile(context, query);
for (int i = 0; i < ITEMS_TO_APPEND; i++) {
context.declareVariable("i", new Integer(i));
xquery.execute(compiled, null);
}
Sequence seq = xquery.execute("/products", null, AccessContext.TEST);
assertEquals(seq.getLength(), 1);
Serializer serializer = broker.getSerializer();
System.out.println(serializer.serialize((NodeValue) seq.itemAt(0)));
seq = xquery.execute("//product", null, AccessContext.TEST);
assertEquals(ITEMS_TO_APPEND + 1, seq.getLength());
seq = xquery.execute("//product[price > 0.0]", null, AccessContext.TEST);
assertEquals(ITEMS_TO_APPEND, seq.getLength());
System.out.println("testInsertBefore: PASS");
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
} finally {
pool.release(broker);
}
}
public void testInsertAfter() {
DBBroker broker = null;
try {
System.out.println("testInsertAfter() ...\n");
broker = pool.get(SecurityManager.SYSTEM_USER);
String query =
" update insert\n" +
" <product id='original'>\n" +
" <description>Description</description>\n" +
" <price>0</price>\n" +
" <stock>10</stock>\n" +
" </product>\n" +
" into /products";
XQuery xquery = broker.getXQueryService();
xquery.execute(query, null, AccessContext.TEST);
query =
" declare variable $i external;\n" +
" update insert\n" +
" <product id='id{$i}'>\n" +
" <description>Description {$i}</description>\n" +
" <price>{$i + 1.0}</price>\n" +
" <stock>{$i * 10}</stock>\n" +
" </product>\n" +
" following /products/product[1]";
XQueryContext context = xquery.newContext(AccessContext.TEST);
CompiledXQuery compiled = xquery.compile(context, query);
for (int i = 0; i < ITEMS_TO_APPEND; i++) {
context.declareVariable("i", new Integer(i));
xquery.execute(compiled, null);
}
Sequence seq = xquery.execute("/products", null, AccessContext.TEST);
assertEquals(seq.getLength(), 1);
Serializer serializer = broker.getSerializer();
System.out.println(serializer.serialize((NodeValue) seq.itemAt(0)));
seq = xquery.execute("//product", null, AccessContext.TEST);
assertEquals(ITEMS_TO_APPEND + 1, seq.getLength());
seq = xquery.execute("//product[price > 0.0]", null, AccessContext.TEST);
assertEquals(ITEMS_TO_APPEND, seq.getLength());
System.out.println("testInsertAfter: PASS");
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
} finally {
pool.release(broker);
}
}
public void testUpdate() {
testAppend();
DBBroker broker = null;
try {
System.out.println("testUpdate() ...\n");
broker = pool.get(SecurityManager.SYSTEM_USER);
XQuery xquery = broker.getXQueryService();
String query =
"for $prod in //product return\n" +
" update value $prod/description\n" +
" with 'Updated Description'";
Sequence seq = xquery.execute(query, null, AccessContext.TEST);
seq = xquery.execute("//product[starts-with(description, 'Updated')]", null, AccessContext.TEST);
assertEquals(seq.getLength(), ITEMS_TO_APPEND);
Serializer serializer = broker.getSerializer();
System.out.println(serializer.serialize((NodeValue) seq.itemAt(0)));
query =
"for $prod in //product return\n" +
" update value $prod/stock/text()\n" +
" with 400";
seq = xquery.execute(query, null, AccessContext.TEST);
seq = xquery.execute("//product[stock = 400]", null, AccessContext.TEST);
assertEquals(seq.getLength(), ITEMS_TO_APPEND);
System.out.println(serializer.serialize((NodeValue) seq.itemAt(0)));
query =
"for $prod in //product return\n" +
" update value $prod/@num\n" +
" with xs:int($prod/@num) * 3";
seq = xquery.execute(query, null, AccessContext.TEST);
seq = xquery.execute("/products", null, AccessContext.TEST);
assertEquals(seq.getLength(), 1);
seq = xquery.execute("//product[@num = 3]", null, AccessContext.TEST);
assertEquals(seq.getLength(), 1);
System.out.println(serializer.serialize((NodeValue) seq.itemAt(0)));
query =
"for $prod in //product return\n" +
" update value $prod/stock\n" +
" with (<local>10</local>,<external>1</external>)";
seq = xquery.execute(query, null, AccessContext.TEST);
seq = xquery.execute("/products", null, AccessContext.TEST);
assertEquals(seq.getLength(), 1);
seq = xquery.execute("//product/stock/external[. = 1]", null, AccessContext.TEST);
assertEquals(seq.getLength(), ITEMS_TO_APPEND);
System.out.println("testUpdate: PASS");
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
} finally {
pool.release(broker);
}
}
public void testRemove() {
testAppend();
DBBroker broker = null;
try {
broker = pool.get(SecurityManager.SYSTEM_USER);
XQuery xquery = broker.getXQueryService();
String query =
"for $prod in //product return\n" +
" update delete $prod\n";
Sequence seq = xquery.execute(query, null, AccessContext.TEST);
seq = xquery.execute("//product", null, AccessContext.TEST);
assertEquals(seq.getLength(), 0);
System.out.println("testRemove: PASS");
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
} finally {
pool.release(broker);
}
}
public void testRename() {
testAppend();
DBBroker broker = null;
try {
System.out.println("testUpdate() ...\n");
broker = pool.get(SecurityManager.SYSTEM_USER);
XQuery xquery = broker.getXQueryService();
String query =
"for $prod in //product return\n" +
" update rename $prod/description as 'desc'\n";
Sequence seq = xquery.execute(query, null, AccessContext.TEST);
seq = xquery.execute("//product/desc", null, AccessContext.TEST);
assertEquals(seq.getLength(), ITEMS_TO_APPEND);
query =
"for $prod in //product return\n" +
" update rename $prod/@num as 'count'\n";
seq = xquery.execute(query, null, AccessContext.TEST);
seq = xquery.execute("//product/@count", null, AccessContext.TEST);
assertEquals(seq.getLength(), ITEMS_TO_APPEND);
System.out.println("testUpdate: PASS");
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
} finally {
pool.release(broker);
}
}
public void testReplace() {
testAppend();
DBBroker broker = null;
try {
System.out.println("testReplace() ...\n");
broker = pool.get(SecurityManager.SYSTEM_USER);
XQuery xquery = broker.getXQueryService();
String query =
"for $prod in //product return\n" +
" update replace $prod/description with <desc>An updated description.</desc>\n";
Sequence seq = xquery.execute(query, null, AccessContext.TEST);
seq = xquery.execute("//product/desc", null, AccessContext.TEST);
assertEquals(seq.getLength(), ITEMS_TO_APPEND);
query =
"for $prod in //product return\n" +
" update replace $prod/@num with '1'\n";
seq = xquery.execute(query, null, AccessContext.TEST);
seq = xquery.execute("//product/@num", null, AccessContext.TEST);
assertEquals(seq.getLength(), ITEMS_TO_APPEND);
query =
"for $prod in //product return\n" +
" update replace $prod/desc/text() with 'A new update'\n";
seq = xquery.execute(query, null, AccessContext.TEST);
seq = xquery.execute("//product[starts-with(desc, 'A new')]", null, AccessContext.TEST);
assertEquals(seq.getLength(), ITEMS_TO_APPEND);
System.out.println("testUpdate: PASS");
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
} finally {
pool.release(broker);
}
}
public void testAttrUpdate() {
DBBroker broker = null;
try {
System.out.println("testAttrUpdate() ...\n");
broker = pool.get(SecurityManager.SYSTEM_USER);
store(broker, "test.xml", UPDATE_XML);
String query =
"let $progress := /progress\n" +
"for $i in 1 to 100\n" +
"let $done := $progress/@done\n" +
"return (\n" +
" update value $done with xs:int($done + 1),\n" +
" xs:int(/progress/@done)\n" +
")";
XQuery xquery = broker.getXQueryService();
Sequence result = xquery.execute(query, null, AccessContext.TEST);
System.out.println("testAttrUpdate(): PASSED\n");
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
} finally {
pool.release(broker);
}
}
public void testAppendCDATA() {
DBBroker broker = null;
try {
System.out.println("testAppendCDATA() ...\n");
broker = pool.get(SecurityManager.SYSTEM_USER);
XQuery xquery = broker.getXQueryService();
String query =
" declare variable $i external;\n" +
" update insert\n" +
" <product>\n" +
" <description><![CDATA[me & you <>]]></description>\n" +
" </product>\n" +
" into /products";
XQueryContext context = xquery.newContext(AccessContext.TEST);
CompiledXQuery compiled = xquery.compile(context, query);
for (int i = 0; i < ITEMS_TO_APPEND; i++) {
xquery.execute(compiled, null);
}
Sequence seq = xquery.execute("/products", null, AccessContext.TEST);
assertEquals(seq.getLength(), 1);
Serializer serializer = broker.getSerializer();
System.out.println(serializer.serialize((NodeValue) seq.itemAt(0)));
seq = xquery.execute("//product", null, AccessContext.TEST);
assertEquals(ITEMS_TO_APPEND, seq.getLength());
System.out.println("testAppendCDATA: PASS");
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
} finally {
pool.release(broker);
}
}
protected void setUp() throws Exception {
this.pool = startDB();
DBBroker broker = null;
try {
broker = pool.get(SecurityManager.SYSTEM_USER);
store(broker, "test.xml", TEST_XML);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
} finally {
pool.release(broker);
}
}
private void store(DBBroker broker, String docName, String data) throws PermissionDeniedException, EXistException, TriggerException, SAXException, LockException, TransactionException {
TransactionManager mgr = pool.getTransactionManager();
Txn transaction = mgr.beginTransaction();
System.out.println("Transaction started ...");
Collection root = broker.getOrCreateCollection(transaction, TEST_COLLECTION);
broker.saveCollection(transaction, root);
IndexInfo info = root.validateXMLResource(transaction, broker, XmldbURI.create(docName), data);
root.store(transaction, broker, info, data, false);
mgr.commit(transaction);
DocumentImpl doc = root.getDocument(broker, XmldbURI.create(docName));
broker.getSerializer().serialize(doc);
}
protected BrokerPool startDB() {
String home, file = "conf.xml";
home = System.getProperty("exist.home");
if (home == null)
home = System.getProperty("user.dir");
try {
Configuration config = new Configuration(file, home);
BrokerPool.configure(1, 5, config);
return BrokerPool.getInstance();
} catch (Exception e) {
fail(e.getMessage());
}
return null;
}
protected void tearDown() {
pool = null;
try {
BrokerPool.stopAll(false);
} catch (Exception e) {
fail(e.getMessage());
}
}
} |
package com.codeborne.selenide;
import org.apache.commons.io.IOUtils;
import org.junit.Assert;
import org.openqa.selenium.OutputType;
import org.openqa.selenium.TakesScreenshot;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.chrome.ChromeOptions;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.htmlunit.HtmlUnitDriver;
import org.openqa.selenium.ie.InternetExplorerDriver;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.remote.RemoteWebDriver;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import static org.apache.commons.io.FileUtils.copyFile;
public class WebDriverRunner {
public static final String CHROME = "chrome";
public static final String INTERNET_EXPLORER = "ie";
public static final String HTMLUNIT = "htmlunit";
public static final String FIREFOX = "firefox";
/**
* If holdBrowserOpen is true, browser window stays open after running tests. It may be useful for debugging.
* Can be configured either programmatically or by system property "-Dselenide.holdBrowserOpen=true".
* <p/>
* Default value: false.
*/
public static boolean holdBrowserOpen = Boolean.getBoolean("selenide.holdBrowserOpen");
/**
* Which browser to use.
* Can be configured either programmatically or by system property "-Dbrowser=ie".
* Supported values: "chrome", "firefox", "ie", "htmlunit"
* <p/>
* Default value: "firefox"
*/
public static String browser = System.getProperty("browser", "firefox");
/**
* URL of remote web driver (in case of using Selenium Grid).
* Can be configured either programmatically or by system property "-Dremote=true".
*
* Default value: null (Grid is not used).
*/
public static String remote = System.getProperty("remote");
/**
* Value of "chrome.switches" parameter (in case of using Chrome driver).
* Can be configured either programmatically or by system property "-Dchrome.switches=--start-maximized".
* Default value: "--start-maximized"
*/
public static String chromeSwitches = System.getProperty("chrome.switches", "--start-maximized");
/**
* Folder to store screenshots to.
* Can be configured either programmatically or by system property "-Dselenide.reports=true".
*
* Default value: "build/reports/tests" (this is default for Gradle projects)
*/
public static String reportsFolder = System.getProperty("selenide.reports", "build/reports/tests");
private static WebDriver webdriver;
static {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
closeWebDriver();
}
});
}
public static WebDriver getWebDriver() {
if (webdriver == null) {
webdriver = createDriver();
}
return webdriver;
}
public static void closeWebDriver() {
if (webdriver != null) {
if (!holdBrowserOpen) {
webdriver.close();
}
webdriver = null;
}
}
public static boolean ie() {
return webdriver != null && webdriver instanceof InternetExplorerDriver;
}
public static void clearBrowserCache() {
if (webdriver != null) {
webdriver.manage().deleteAllCookies();
}
}
public static String takeScreenShot(String fileName) {
if (webdriver == null) {
return null;
} else if (webdriver instanceof TakesScreenshot) {
try {
File scrFile = ((TakesScreenshot) webdriver).getScreenshotAs(OutputType.FILE);
String pageSource = webdriver.getPageSource();
File screenshotFileName = new File(reportsFolder, fileName + ".png");
File htmlFileName = new File(reportsFolder, fileName + ".html");
copyFile(scrFile, screenshotFileName);
writeToFile(pageSource, htmlFileName);
return screenshotFileName.getAbsolutePath();
} catch (Exception e) {
System.err.println(e);
}
} else {
System.err.println("Cannot take screenshot, driver does not support it: " + webdriver);
}
return null;
}
private static void writeToFile(String content, File fileName) throws IOException {
FileWriter output = new FileWriter(fileName);
try {
IOUtils.write(content, output);
}
finally {
output.close();
}
}
private static WebDriver createDriver() {
if (remote != null) {
return createRemoteDriver(remote, browser);
} else if (CHROME.equalsIgnoreCase(browser)) {
ChromeOptions options = new ChromeOptions();
options.addArguments("chrome.switches", chromeSwitches);
return new ChromeDriver(options);
} else if (INTERNET_EXPLORER.equalsIgnoreCase(browser)) {
DesiredCapabilities ieCapabilities = DesiredCapabilities.internetExplorer();
ieCapabilities.setCapability(InternetExplorerDriver.INTRODUCE_FLAKINESS_BY_IGNORING_SECURITY_DOMAINS, true);
return new InternetExplorerDriver(ieCapabilities);
} else if (HTMLUNIT.equalsIgnoreCase(browser)) {
DesiredCapabilities desiredCapabilities = DesiredCapabilities.htmlUnit();
desiredCapabilities.setCapability(HtmlUnitDriver.INVALIDSELECTIONERROR, true);
desiredCapabilities.setCapability(HtmlUnitDriver.INVALIDXPATHERROR, false);
desiredCapabilities.setJavascriptEnabled(true);
return new HtmlUnitDriver(desiredCapabilities);
} else if (FIREFOX.equalsIgnoreCase(browser)) {
return new FirefoxDriver();
} else {
throw new IllegalArgumentException("Unknown 'browser' parameter: " + browser);
}
}
private static WebDriver createRemoteDriver(String remote, String browser) {
try {
DesiredCapabilities capabilities = new DesiredCapabilities();
capabilities.setBrowserName(browser);
return new RemoteWebDriver(new URL(remote), capabilities);
} catch (MalformedURLException e) {
throw new IllegalArgumentException("Invalid 'remote' parameter: " + remote, e);
}
}
static <T> T fail(String message) {
if (webdriver == null) {
Assert.fail(message);
} else {
Assert.fail(message +
", browser.currentUrl=" + webdriver.getCurrentUrl() +
", browser.title=" + webdriver.getTitle()
);
}
return null;
}
} |
/*
* $Id: TestPollManager.java,v 1.61 2004-01-31 22:56:09 tlipkis Exp $
*/
package org.lockss.poller;
import java.io.*;
import java.net.*;
import java.security.*;
import java.util.*;
import org.lockss.daemon.*;
import org.lockss.plugin.*;
import org.lockss.protocol.*;
import org.lockss.util.*;
import org.lockss.test.*;
import org.lockss.repository.*;
/** JUnitTest case for class: org.lockss.poller.PollManager */
public class TestPollManager extends LockssTestCase {
private static String[] rooturls = {"http:
"http:
"http:
private static String urlstr = "http:
private static String lwrbnd = "test1.doc";
private static String uprbnd = "test3.doc";
private static long testduration = Constants.HOUR;
private static ArrayList testentries = (ArrayList)ListUtil.list(
new PollTally.NameListEntry(true,"test1.doc"),
new PollTally.NameListEntry(true,"test2.doc"),
new PollTally.NameListEntry(true,"test3.doc"));
protected static ArchivalUnit testau;
private MockLockssDaemon theDaemon;
protected IPAddr testaddr;
protected LcapIdentity testID;
protected LcapMessage[] testmsg;
protected PollManager pollmanager;
protected void setUp() throws Exception {
super.setUp();
TimeBase.setSimulated();
initRequiredServices();
initTestAddr();
initTestMsg();
}
public void tearDown() throws Exception {
pollmanager.stopService();
theDaemon.getLockssRepository(testau).stopService();
theDaemon.getHashService().stopService();
theDaemon.getRouterManager().stopService();
TimeBase.setReal();
super.tearDown();
}
/** test for method makePoll(..) */
public void testMakePoll() throws Exception {
// make a name poll
BasePoll p1 = pollmanager.makePoll(testmsg[0]);
// make sure we got the right type of poll here
assertTrue(p1 instanceof V1NamePoll);
// make a content poll
BasePoll p2 = pollmanager.makePoll(testmsg[1]);
// make sure we got the right type of poll here
assertTrue(p2 instanceof V1ContentPoll);
// make a verify poll
BasePoll p3 = pollmanager.makePoll(testmsg[2]);
// make sure we got the right type of poll here
assertTrue(p3 instanceof V1VerifyPoll);
}
public void testMakePollDoesntIfPluginMismatch() throws Exception {
// Make a string that's different from the plugin's version
String bogus = testau.getPlugin().getVersion() + "cruft";
// make a name poll witha bogus plugin version
MockPollSpec spec =
new MockPollSpec(testau, urlstr, lwrbnd, uprbnd);
spec.setPluginVersion(bogus);
LcapMessage msg1 =
LcapMessage.makeRequestMsg(spec,
testentries,
pollmanager.generateRandomBytes(),
pollmanager.generateRandomBytes(),
LcapMessage.NAME_POLL_REQ,
testduration,
testID);
BasePoll p1 = pollmanager.makePoll(msg1);
assertNull("Shouldn't create poll with plugin version mismatch", p1);
// make a content poll witha bogus plugin version
LcapMessage msg2 =
LcapMessage.makeRequestMsg(spec,
testentries,
pollmanager.generateRandomBytes(),
pollmanager.generateRandomBytes(),
LcapMessage.CONTENT_POLL_REQ,
testduration,
testID);
BasePoll p2 = pollmanager.makePoll(msg2);
assertNull("Shouldn't create poll with plugin version mismatch", p2);
}
/** test for method makePollRequest(..) */
public void testMakePollRequest() throws Exception {
try {
CachedUrlSet cus = null;
Plugin plugin = testau.getPlugin();
cus = plugin.makeCachedUrlSet(testau,
new RangeCachedUrlSetSpec(rooturls[1]));
PollSpec spec = new PollSpec(cus, lwrbnd, uprbnd);
pollmanager.sendPollRequest(LcapMessage.VERIFY_POLL_REQ, spec);
}
catch (IllegalStateException e) {
// ignore this for now
}
}
/** test for method findPoll(..) */
public void testFindPoll() {
// lets see if we can find our name poll
try {
BasePoll p1 = pollmanager.makePoll(testmsg[0]);
BasePoll p2 = pollmanager.findPoll(testmsg[0]);
assertEquals(p1, p2);
}
catch (IOException ex) {
fail("name poll couldn't be found");
}
}
/** test for method removePoll(..) */
public void testRemovePoll() {
try {
BasePoll p1 = pollmanager.makePoll(testmsg[0]);
assertNotNull(p1);
BasePoll p2 = pollmanager.removePoll(p1.m_key);
assertEquals(p1, p2);
}
catch (IOException ex) {
fail("name poll couldn't be found");
}
}
/** test for method checkForConflicts(..) */
public void testCheckForConflicts() throws Exception {
// lets try to run two content polls in the same location
LcapMessage[] sameroot = new LcapMessage[3];
for(int i= 0; i<3; i++) {
PollSpec spec =
new MockPollSpec(testau, urlstr, lwrbnd, uprbnd);
sameroot[i] =
LcapMessage.makeRequestMsg(spec,
testentries,
pollmanager.generateRandomBytes(),
pollmanager.generateRandomBytes(),
LcapMessage.NAME_POLL_REQ + (i * 2),
testduration,
testID);
}
// check content poll conflicts
BasePoll c1 = pollmanager.makePoll(sameroot[1]);
// differnt content poll should be ok
CachedUrlSet cus =
pollmanager.checkForConflicts(testmsg[1],
makeCachedUrlSet(testmsg[1]));
assertNull("different content poll s/b ok", cus);
// same content poll same range s/b a conflict
cus = pollmanager.checkForConflicts(sameroot[1],
makeCachedUrlSet(sameroot[1]));
assertNotNull("same content poll root s/b conflict", cus);
// different name poll should be ok
cus = pollmanager.checkForConflicts(testmsg[0],
makeCachedUrlSet(testmsg[0]));
assertNull("name poll with different root s/b ok", cus);
// same name poll s/b conflict
cus = pollmanager.checkForConflicts(sameroot[0],
makeCachedUrlSet(sameroot[0]));
assertNotNull("same name poll root s/b conflict", cus);
// verify poll should be ok
cus = pollmanager.checkForConflicts(testmsg[2],
makeCachedUrlSet(testmsg[2]));
assertNull("verify poll s/b ok", cus);
// remove the poll
pollmanager.removePoll(c1.m_key);
}
/** test for method closeThePoll(..) */
public void testCloseThePoll() throws Exception {
BasePoll p1 = pollmanager.makePoll(testmsg[0]);
// we should now be active
assertTrue(pollmanager.isPollActive(p1.m_key));
// we should not be closed
assertFalse(pollmanager.isPollClosed(p1.m_key));
pollmanager.closeThePoll(p1.m_key);
// we should not be active
assertFalse(pollmanager.isPollActive(p1.m_key));
// we should now be closed
assertTrue(pollmanager.isPollClosed(p1.m_key));
// we should reject an attempt to handle a packet with this key
pollmanager.handleIncomingMessage(testmsg[0]);
assertTrue(pollmanager.isPollClosed(p1.m_key));
assertFalse(pollmanager.isPollActive(p1.m_key));
pollmanager.closeThePoll(p1.m_key);
}
/** test for method suspendPoll(...) */
public void testSuspendPoll() throws Exception {
BasePoll p1 = null;
p1 = TestPoll.createCompletedPoll(theDaemon, testau, testmsg[0], 7, 2);
pollmanager.addPoll(p1);
// give it a pointless lock to avoid a null pointer
p1.getVoteTally().
setActivityLock(theDaemon.getActivityRegulator(testau).
getAuActivityLock(-1, 123));
// check our suspend
pollmanager.suspendPoll(p1.m_key);
assertTrue(pollmanager.isPollSuspended(p1.m_key));
assertFalse(pollmanager.isPollClosed(p1.m_key));
// now we resume...
pollmanager.resumePoll(false, p1.m_key);
assertFalse(pollmanager.isPollSuspended(p1.m_key));
}
/** test for method getHasher(..) */
public void testGetHasher() {
MessageDigest md = pollmanager.getHasher(null);
assertNotNull(md);
}
/** test for method makeVerifier(..) */
public void testMakeVerifier() {
// test for make verifier - this will also store the verify/secret pair
byte[] verifier = pollmanager.makeVerifier(10000);
assertNotNull("unable to make and store a verifier", verifier);
// retrieve our secret
byte[] secret = pollmanager.getSecret(verifier);
assertNotNull("unable to retrieve secret for verifier", secret);
// confirm that the verifier is the hash of the secret
MessageDigest md = pollmanager.getHasher(null);
md.update(secret, 0, secret.length);
byte[] verifier_check = md.digest();
assertTrue("secret does not match verifier",
Arrays.equals(verifier, verifier_check));
}
public void testMockPollManager() {
// This ensures that MockPollManager.canHashBeScheduledBefore() does
// what I intended
MockPollManager mpm = new MockPollManager();
mpm.setMinPollDeadline(Deadline.in(1000));
assertFalse(mpm.canHashBeScheduledBefore(100, Deadline.in(0)));
assertTrue(mpm.canHashBeScheduledBefore(100, Deadline.in(1000)));
assertTrue(mpm.canHashBeScheduledBefore(100, Deadline.in(1001)));
}
public void testCanSchedulePoll() {
MockPollManager mpm = new MockPollManager();
// Accept polls that finish no earlier than this
mpm.setMinPollDeadline(Deadline.in(1000));
// this one can't
assertFalse(mpm.canSchedulePoll(500, 100));
// can
assertTrue(mpm.canSchedulePoll(2000, 100));
// neededTime > duration
assertFalse(mpm.canSchedulePoll(500, 600));
}
void configPollTimes() {
Properties p = new Properties();
addRequiredConfig(p);
p.setProperty(PollManager.PARAM_NAMEPOLL_DEADLINE, "10000");
p.setProperty(PollManager.PARAM_CONTENTPOLL_MIN, "1000");
p.setProperty(PollManager.PARAM_CONTENTPOLL_MAX, "4100");
p.setProperty(PollManager.PARAM_QUORUM, "5");
p.setProperty(PollManager.PARAM_DURATION_MULTIPLIER_MIN, "3");
p.setProperty(PollManager.PARAM_DURATION_MULTIPLIER_MAX, "7");
p.setProperty(PollManager.PARAM_NAME_HASH_ESTIMATE, "1s");
ConfigurationUtil.setCurrentConfigFromProps(p);
}
public void testCalcDuration() {
MockCachedUrlSet mcus =
new MockCachedUrlSet((MockArchivalUnit)testau,
new RangeCachedUrlSetSpec("", "", ""));
PollSpec ps = new PollSpec(mcus);
MockPollManager mpm = new MockPollManager();
configPollTimes();
mpm.setBytesPerMsHashEstimate(100);
mpm.setSlowestHashSpeed(100);
mcus.setEstimatedHashDuration(100);
mpm.setMinPollDeadline(Deadline.in(1000));
assertEquals(1800, mpm.calcDuration(LcapMessage.CONTENT_POLL_REQ, mcus));
mpm.setMinPollDeadline(Deadline.in(2000));
assertEquals(2400, mpm.calcDuration(LcapMessage.CONTENT_POLL_REQ, mcus));
// this one should be limited by max content poll
mpm.setMinPollDeadline(Deadline.in(4000));
assertEquals(4100, mpm.calcDuration(LcapMessage.CONTENT_POLL_REQ, mcus));
mpm.setMinPollDeadline(Deadline.in(5000));
assertEquals(-1, mpm.calcDuration(LcapMessage.CONTENT_POLL_REQ, mcus));
// calulated poll time will be less than min, should be adjusted up to min
mcus.setEstimatedHashDuration(10);
mpm.setMinPollDeadline(Deadline.in(100));
assertEquals(1000, mpm.calcDuration(LcapMessage.CONTENT_POLL_REQ, mcus));
// name poll duration is randomized so less predictable, but should
// always be between min and max.
long ndur = mpm.calcDuration(LcapMessage.NAME_POLL_REQ, mcus);
assertTrue(ndur >= mpm.m_minNamePollDuration);
assertTrue(ndur <= mpm.m_maxNamePollDuration);
}
static class MockPollManager extends PollManager {
long bytesPerMsHashEstimate = 0;
long slowestHashSpeed = 0;
Deadline minPollDeadline = Deadline.EXPIRED;
boolean canHashBeScheduledBefore(long duration, Deadline when) {
return !when.before(minPollDeadline);
}
void setMinPollDeadline(Deadline when) {
minPollDeadline = when;
}
long getSlowestHashSpeed() {
return slowestHashSpeed;
}
void setSlowestHashSpeed(long speed) {
slowestHashSpeed = speed;
}
long getBytesPerMsHashEstimate() {
return bytesPerMsHashEstimate;
}
void setBytesPerMsHashEstimate(long est) {
bytesPerMsHashEstimate = est;
}
}
private void initRequiredServices() {
theDaemon = new MockLockssDaemon();
pollmanager = theDaemon.getPollManager();
theDaemon.getPluginManager();
testau = PollTestPlugin.PTArchivalUnit.createFromListOfRootUrls(rooturls);
((MockArchivalUnit)testau).setPlugin(new MyMockPlugin());
PluginUtil.registerArchivalUnit(testau);
Properties p = new Properties();
addRequiredConfig(p);
ConfigurationUtil.setCurrentConfigFromProps(p);
theDaemon.getSchedService().startService();
theDaemon.getHashService().startService();
theDaemon.getRouterManager().startService();
theDaemon.getActivityRegulator(testau).startService();
theDaemon.setNodeManager(new MockNodeManager(), testau);
pollmanager.startService();
}
private void addRequiredConfig(Properties p) {
String tempDirPath = null;
try {
tempDirPath = getTempDir().getAbsolutePath() + File.separator;
}
catch (IOException ex) {
fail("unable to create a temporary directory");
}
p.setProperty(IdentityManager.PARAM_IDDB_DIR, tempDirPath + "iddb");
p.setProperty(LockssRepositoryImpl.PARAM_CACHE_LOCATION, tempDirPath);
p.setProperty(IdentityManager.PARAM_LOCAL_IP, "127.0.0.1");
}
private void initTestAddr() {
try {
testaddr = IPAddr.getByName("127.0.0.1");
testID = theDaemon.getIdentityManager().findIdentity(testaddr);
}
catch (UnknownHostException ex) {
fail("can't open test host");
}
}
private void initTestMsg() throws Exception {
testmsg = new LcapMessage[3];
for(int i= 0; i<3; i++) {
PollSpec spec = new MockPollSpec(testau, rooturls[i], lwrbnd, uprbnd);
testmsg[i] =
LcapMessage.makeRequestMsg(spec,
testentries,
pollmanager.generateRandomBytes(),
pollmanager.generateRandomBytes(),
LcapMessage.NAME_POLL_REQ + (i * 2),
testduration,
testID);
}
}
private CachedUrlSet makeCachedUrlSet(LcapMessage msg) {
try {
PollSpec ps = new PollSpec(msg);
return theDaemon.getPluginManager().findCachedUrlSet(ps);
}
catch (Exception ex) {
return null;
}
}
public class MyMockPlugin extends MockPlugin {
public CachedUrlSet makeCachedUrlSet(ArchivalUnit owner,
CachedUrlSetSpec cuss) {
return new PollTestPlugin.PTCachedUrlSet((MockArchivalUnit)owner, cuss);
}
}
/** Executes the test case
* @param argv array of Strings containing command line arguments
* */
public static void main(String[] argv) {
String[] testCaseList = {TestPollManager.class.getName()};
junit.swingui.TestRunner.main(testCaseList);
}
} |
package org.lockss.test;
import java.io.IOException;
import java.util.*;
import org.lockss.config.*;
import org.lockss.util.*;
/** Utilities for Configuration and ConfigManager
*/
public class ConfigurationUtil {
public static Logger log = Logger.getLogger("ConfigUtil");
private static ConfigManager mgr() {
return ConfigManager.getConfigManager();
}
/** Read a Configuration from a file.
*/
public static Configuration fromFile(String f) throws IOException {
return mgr().readConfig(ListUtil.list(f));
}
/** Create a Configuration from the supplied string.
*/
public static Configuration fromString(String s)
throws IOException {
List l = ListUtil.list(FileTestUtil.urlOfString(s));
return mgr().readConfig(l);
}
/** Create a Configuration from the supplied Properties.
*/
public static Configuration fromProps(Properties props) {
PropertyTree tree = new PropertyTree(props);
try {
return (Configuration)PrivilegedAccessor.
invokeConstructor("org.lockss.config.ConfigurationPropTreeImpl", tree);
} catch (ClassNotFoundException e) {
// because I don't want to change all the callers of this
throw new RuntimeException(e.toString());
} catch (NoSuchMethodException e) {
// because I don't want to change all the callers of this
throw new RuntimeException(e.toString());
} catch (IllegalAccessException e) {
// because I don't want to change all the callers of this
throw new RuntimeException(e.toString());
} catch (java.lang.reflect.InvocationTargetException e) {
// because I don't want to change all the callers of this
throw new RuntimeException(e.toString());
} catch (InstantiationException e) {
// because I don't want to change all the callers of this
throw new RuntimeException(e.toString());
}
}
/** Return a Configuration that's the union of the two Configurations
*/
public static Configuration merge(Configuration c1, Configuration c2) {
Configuration res = c1.copy();
for (Iterator iter = c2.keyIterator(); iter.hasNext(); ) {
String key = (String)iter.next();
res.put(key, c2.get(key));
}
return res;
}
/** Create a Configuration from the contents of the URLs in the list
*/
public static Configuration fromUrlList(List l) throws IOException {
return mgr().readConfig(l);
}
/** Create a Configuration with a single param set to the specified
* value.
*/
public static Configuration fromArgs(String prop, String val) {
Properties props = new Properties();
props.put(prop, val);
return fromProps(props);
}
/** Create a Configuration with two params set to the specified
* values.
*/
public static Configuration fromArgs(String prop1, String val1,
String prop2, String val2) {
Properties props = new Properties();
props.put(prop1, val1);
props.put(prop2, val2);
return fromProps(props);
}
/** Create a Configuration with three params set to the specified
* values.
*/
public static Configuration fromArgs(String prop1, String val1,
String prop2, String val2,
String prop3, String val3) {
// JAVA5: merge fromArgs variants into fromArgs(String...) ?
Properties props = new Properties();
props.put(prop1, val1);
props.put(prop2, val2);
props.put(prop3, val3);
return fromProps(props);
}
/** Create a Configuration with four params set to the specified
* values.
*/
public static Configuration fromArgs(String prop1, String val1,
String prop2, String val2,
String prop3, String val3,
String prop4, String val4) {
// JAVA5: merge fromArgs variants into fromArgs(String...) ?
Properties props = new Properties();
props.put(prop1, val1);
props.put(prop2, val2);
props.put(prop3, val3);
props.put(prop4, val4);
return fromProps(props);
}
/** Create a Configuration from the supplied property list and install
* it as the current configuration.
*/
public static boolean setCurrentConfigFromProps(Properties props) {
return installConfig(fromProps(props));
}
/** Create a Configuration from the contents of the URLs in the list and
* install it as the current configuration.
*/
public static boolean setCurrentConfigFromUrlList(List l)
throws IOException {
return installConfig(fromUrlList(l));
}
/** Create a Configuration from the supplied string and install it as the
* current configuration.
*/
public static boolean setCurrentConfigFromString(String s)
throws IOException {
return installConfig(fromString(s));
}
/** Create a Configuration with a single param set to the specified
* value, and install it as the current configuration.
*/
public static boolean setFromArgs(String prop, String val) {
return installConfig(fromArgs(prop, val));
}
/** Create a Configuration with two params set to the specified
* values, and install it as the current configuration.
*/
public static boolean setFromArgs(String prop1, String val1,
String prop2, String val2) {
return installConfig(fromArgs(prop1, val1, prop2, val2));
}
/** Add the values to the current config
*/
public static boolean addFromProps(Properties props) {
return installConfig(merge(CurrentConfig.getCurrentConfig(),
fromProps(props)));
}
/** Add the value to the current config
*/
public static boolean addFromArgs(String prop, String val) {
return installConfig(merge(CurrentConfig.getCurrentConfig(),
fromArgs(prop, val)));
}
/** Add two values to the current config
*/
public static boolean addFromArgs(String prop1, String val1,
String prop2, String val2) {
return installConfig(merge(CurrentConfig.getCurrentConfig(),
fromArgs(prop1, val1, prop2, val2)));
}
/** Add three values to the current config
*/
public static boolean addFromArgs(String prop1, String val1,
String prop2, String val2,
String prop3, String val3) {
// JAVA5: merge addFromArgs variants into addFromArgs(String...) ?
return installConfig(merge(CurrentConfig.getCurrentConfig(),
fromArgs(prop1, val1, prop2, val2, prop3, val3)));
}
/** Add four values to the current config
*/
public static boolean addFromArgs(String prop1, String val1,
String prop2, String val2,
String prop3, String val3,
String prop4, String val4) {
// JAVA5: merge addFromArgs variants into addFromArgs(String...) ?
return installConfig(merge(CurrentConfig.getCurrentConfig(),
fromArgs(prop1, val1, prop2, val2,
prop3, val3, prop4, val4)));
}
/** Install the supplied Configuration as the current configuration.
*/
public static boolean installConfig(Configuration config) {
MemoryConfigFile cf = new MemoryConfigFile("foo", config, 1);
try {
PrivilegedAccessor.invokeMethod(mgr(), "updateConfig",
ListUtil.list(cf));
} catch (Exception e) {
// throw new RuntimeException(e.toString());
throw new RuntimeException(StringUtil.stackTraceString(e));
}
return true;
}
} |
package com.ecwid.consul.v1;
import com.ecwid.consul.ConsulException;
import com.ecwid.consul.transport.RawResponse;
/**
* @author Vasily Vasilkov (vgv@ecwid.com)
*/
public final class OperationException extends ConsulException {
private final int statusCode;
private final String statusMessage;
public OperationException(int statusCode, String statusMessage, String content) {
super(content);
this.statusCode = statusCode;
this.statusMessage = statusMessage;
}
public OperationException(RawResponse rawResponse) {
this(rawResponse.getStatusCode(), rawResponse.getStatusMessage(), rawResponse.getContent());
}
public int getStatusCode() {
return statusCode;
}
public String getStatusMessage() {
return statusMessage;
}
@Override
public String toString() {
return super.toString() + " {"
+ "statusCode=" + statusCode
+ ", statusMessage='" + statusMessage
+ "}";
}
} |
package com.enderio.core.common;
import java.awt.Point;
import java.util.Map;
import javax.annotation.Nullable;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.inventory.Container;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.Slot;
import net.minecraft.item.ItemStack;
import com.google.common.collect.Maps;
public class ContainerEnder<T extends IInventory> extends Container {
protected Map<Slot, Point> playerSlotLocations = Maps.newLinkedHashMap();
protected final int startPlayerSlot;
protected final int endPlayerSlot;
protected final int startHotBarSlot;
protected final int endHotBarSlot;
private T inv;
private InventoryPlayer playerInv;
public ContainerEnder(InventoryPlayer playerInv, @Nullable T inv) {
this.inv = inv;
this.playerInv = playerInv;
addSlots(playerInv);
int x = getPlayerInventoryOffset().x;
int y = getPlayerInventoryOffset().y;
// add players inventory
startPlayerSlot = inventorySlots.size();
for (int i = 0; i < 3; ++i) {
for (int j = 0; j < 9; ++j) {
Point loc = new Point(x + j * 18, y + i * 18);
Slot slot = new Slot(playerInv, j + i * 9 + 9, loc.x, loc.y);
addSlotToContainer(slot);
playerSlotLocations.put(slot, loc);
}
}
endPlayerSlot = inventorySlots.size();
startHotBarSlot = inventorySlots.size();
for (int i = 0; i < 9; ++i) {
Point loc = new Point(x + i * 18, y + 58);
Slot slot = new Slot(playerInv, i, loc.x, loc.y);
addSlotToContainer(slot);
playerSlotLocations.put(slot, loc);
}
endHotBarSlot = inventorySlots.size();
}
protected void addSlots(InventoryPlayer playerInv) {
}
public Point getPlayerInventoryOffset() {
return new Point(8, 84);
}
public Point getUpgradeOffset() {
return new Point(12, 60);
}
@Nullable
public T getInv() {
return inv;
}
@Override
public boolean canInteractWith(EntityPlayer player) {
return getInv() == null ? true : getInv().isUseableByPlayer(player);
}
@Override
public ItemStack transferStackInSlot(EntityPlayer p_82846_1_, int p_82846_2_) {
ItemStack itemstack = null;
Slot slot = (Slot) this.inventorySlots.get(p_82846_2_);
if (slot != null && slot.getHasStack()) {
ItemStack itemstack1 = slot.getStack();
itemstack = itemstack1.copy();
int minPlayerSlot = inventorySlots.size() - playerInv.mainInventory.length;
if (p_82846_2_ < minPlayerSlot) {
if (!this.mergeItemStack(itemstack1, minPlayerSlot, this.inventorySlots.size(), true)) {
return null;
}
} else if (!this.mergeItemStack(itemstack1, 0, minPlayerSlot, false)) {
return null;
}
if (itemstack1.stackSize == 0) {
slot.putStack((ItemStack) null);
} else {
slot.onSlotChanged();
}
}
return itemstack;
}
/**
* Added validation of slot input
*/
@Override
protected boolean mergeItemStack(ItemStack par1ItemStack, int fromIndex, int toIndex, boolean reversOrder) {
boolean result = false;
int checkIndex = fromIndex;
if (reversOrder) {
checkIndex = toIndex - 1;
}
Slot slot;
ItemStack itemstack1;
if (par1ItemStack.isStackable()) {
while (par1ItemStack.stackSize > 0 && (!reversOrder && checkIndex < toIndex || reversOrder && checkIndex >= fromIndex)) {
slot = (Slot) this.inventorySlots.get(checkIndex);
itemstack1 = slot.getStack();
if (itemstack1 != null && itemstack1.getItem() == par1ItemStack.getItem()
&& (!par1ItemStack.getHasSubtypes() || par1ItemStack.getItemDamage() == itemstack1.getItemDamage())
&& ItemStack.areItemStackTagsEqual(par1ItemStack, itemstack1) && slot.isItemValid(par1ItemStack) && par1ItemStack != itemstack1) {
int mergedSize = itemstack1.stackSize + par1ItemStack.stackSize;
int maxStackSize = Math.min(par1ItemStack.getMaxStackSize(), slot.getSlotStackLimit());
if (mergedSize <= maxStackSize) {
par1ItemStack.stackSize = 0;
itemstack1.stackSize = mergedSize;
slot.onSlotChanged();
result = true;
} else if (itemstack1.stackSize < maxStackSize) {
par1ItemStack.stackSize -= maxStackSize - itemstack1.stackSize;
itemstack1.stackSize = maxStackSize;
slot.onSlotChanged();
result = true;
}
}
if (reversOrder) {
--checkIndex;
} else {
++checkIndex;
}
}
}
if (par1ItemStack.stackSize > 0) {
if (reversOrder) {
checkIndex = toIndex - 1;
} else {
checkIndex = fromIndex;
}
while (!reversOrder && checkIndex < toIndex || reversOrder && checkIndex >= fromIndex) {
slot = (Slot) this.inventorySlots.get(checkIndex);
itemstack1 = slot.getStack();
if (itemstack1 == null && slot.isItemValid(par1ItemStack)) {
ItemStack in = par1ItemStack.copy();
in.stackSize = Math.min(in.stackSize, slot.getSlotStackLimit());
slot.putStack(in);
slot.onSlotChanged();
if (in.stackSize >= par1ItemStack.stackSize) {
par1ItemStack.stackSize = 0;
} else {
par1ItemStack.stackSize -= in.stackSize;
}
result = true;
break;
}
if (reversOrder) {
--checkIndex;
} else {
++checkIndex;
}
}
}
return result;
}
} |
package com.github.anba.es6draft.parser;
import static com.github.anba.es6draft.semantics.StaticSemantics.*;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.github.anba.es6draft.ast.AbruptNode.Abrupt;
import com.github.anba.es6draft.ast.*;
import com.github.anba.es6draft.ast.MethodDefinition.MethodType;
import com.github.anba.es6draft.parser.ParserException.ExceptionType;
import com.github.anba.es6draft.runtime.internal.CompatibilityOption;
import com.github.anba.es6draft.runtime.internal.Messages;
import com.github.anba.es6draft.runtime.internal.SmallArrayList;
/**
* Parser for ECMAScript6 source code
* <ul>
* <li>11 Expressions
* <li>12 Statements and Declarations
* <li>13 Functions and Generators
* <li>14 Scripts and Modules
* </ul>
*/
public class Parser {
private static final boolean MODULES_ENABLED = false;
private static final boolean DEBUG = false;
private static final List<Binding> NO_INHERITED_BINDING = Collections.emptyList();
private static final Set<String> EMPTY_LABEL_SET = Collections.emptySet();
private final String sourceFile;
private final int sourceLine;
private final EnumSet<Option> options;
private TokenStream ts;
private ParseContext context;
private enum StrictMode {
Unknown, Strict, NonStrict
}
private enum StatementType {
Iteration, Breakable, Statement
}
private enum ContextKind {
Script, Module, Function, Generator, ArrowFunction, Method
}
private static class ParseContext {
final ParseContext parent;
final ContextKind kind;
boolean superReference = false;
boolean yieldAllowed = false;
boolean returnAllowed = false;
StrictMode strictMode = StrictMode.Unknown;
boolean explicitStrict = false;
ParserException strictError = null;
List<FunctionNode> deferred = null;
ArrayDeque<ObjectLiteral> objectLiterals = null;
Map<String, LabelContext> labelSet = null;
LabelContext labels = null;
ScopeContext scopeContext;
final FunctionContext funContext;
ParseContext() {
this.parent = null;
this.kind = null;
this.funContext = null;
}
ParseContext(ParseContext parent, ContextKind kind) {
this.parent = parent;
this.kind = kind;
this.funContext = new FunctionContext(this);
this.scopeContext = funContext;
this.returnAllowed = isFunction();
if (parent.strictMode == StrictMode.Strict) {
this.strictMode = parent.strictMode;
}
}
ParseContext findSuperContext() {
ParseContext cx = this;
while (cx.kind == ContextKind.ArrowFunction) {
cx = cx.parent;
}
return cx;
}
void setReferencesSuper() {
superReference = true;
}
boolean hasSuperReference() {
return superReference;
}
boolean isFunction() {
switch (kind) {
case ArrowFunction:
case Function:
case Generator:
case Method:
return true;
case Module:
case Script:
default:
return false;
}
}
int countLiterals() {
return (objectLiterals != null ? objectLiterals.size() : 0);
}
void addLiteral(ObjectLiteral object) {
if (objectLiterals == null) {
objectLiterals = new ArrayDeque<>(4);
}
objectLiterals.push(object);
}
void removeLiteral(ObjectLiteral object) {
objectLiterals.removeFirstOccurrence(object);
}
}
// TODO: rename - not used exclusively for functions, also used for scripts and modules
private static class FunctionContext extends ScopeContext implements FunctionScope {
final ScopeContext enclosing;
Set<String> parameterNames = null;
boolean directEval = false;
FunctionContext(ParseContext context) {
super(null);
this.enclosing = context.parent.scopeContext;
}
private boolean isStrict() {
if (node instanceof FunctionNode) {
return IsStrict((FunctionNode) node);
} else {
assert node instanceof Script;
return IsStrict((Script) node);
}
}
@Override
public ScopeContext getEnclosingScope() {
return enclosing;
}
@Override
public boolean isDynamic() {
return directEval && !isStrict();
}
@Override
public Set<String> parameterNames() {
return parameterNames;
}
@Override
public Set<String> lexicallyDeclaredNames() {
return lexDeclaredNames;
}
@Override
public List<Declaration> lexicallyScopedDeclarations() {
return lexScopedDeclarations;
}
@Override
public Set<String> varDeclaredNames() {
return varDeclaredNames;
}
@Override
public List<StatementListItem> varScopedDeclarations() {
return varScopedDeclarations;
}
}
private static class BlockContext extends ScopeContext implements BlockScope {
final boolean dynamic;
BlockContext(ScopeContext parent, boolean dynamic) {
super(parent);
this.dynamic = dynamic;
}
@Override
public Set<String> lexicallyDeclaredNames() {
return lexDeclaredNames;
}
@Override
public List<Declaration> lexicallyScopedDeclarations() {
return lexScopedDeclarations;
}
@Override
public boolean isDynamic() {
return dynamic;
}
}
private abstract static class ScopeContext implements Scope {
final ScopeContext parent;
ScopedNode node = null;
HashSet<String> varDeclaredNames = null;
HashSet<String> lexDeclaredNames = null;
List<StatementListItem> varScopedDeclarations = null;
List<Declaration> lexScopedDeclarations = null;
ScopeContext(ScopeContext parent) {
this.parent = parent;
}
@Override
public Scope getParent() {
return parent;
}
@Override
public ScopedNode getNode() {
return node;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("var: ").append(varDeclaredNames != null ? varDeclaredNames : "<null>");
sb.append("\t");
sb.append("lex: ").append(lexDeclaredNames != null ? lexDeclaredNames : "<null>");
return sb.toString();
}
boolean isTopLevel() {
return (parent == null);
}
boolean addVarDeclaredName(String name) {
if (varDeclaredNames == null) {
varDeclaredNames = new HashSet<>();
}
varDeclaredNames.add(name);
return (lexDeclaredNames == null || !lexDeclaredNames.contains(name));
}
boolean addLexDeclaredName(String name) {
if (lexDeclaredNames == null) {
lexDeclaredNames = new HashSet<>();
}
return lexDeclaredNames.add(name)
&& (varDeclaredNames == null || !varDeclaredNames.contains(name));
}
void addVarScopedDeclaration(StatementListItem decl) {
if (varScopedDeclarations == null) {
varScopedDeclarations = newSmallList();
}
varScopedDeclarations.add(decl);
}
void addLexScopedDeclaration(Declaration decl) {
if (lexScopedDeclarations == null) {
lexScopedDeclarations = newSmallList();
}
lexScopedDeclarations.add(decl);
}
}
private static class LabelContext {
final LabelContext parent;
final StatementType type;
final Set<String> labelSet;
final EnumSet<Abrupt> abrupts = EnumSet.noneOf(Abrupt.class);
LabelContext(LabelContext parent, StatementType type, Set<String> labelSet) {
this.parent = parent;
this.type = type;
this.labelSet = labelSet;
}
void mark(Abrupt abrupt) {
abrupts.add(abrupt);
}
}
@SuppressWarnings("serial")
private static class RetryGenerator extends RuntimeException {
}
public enum Option {
Strict, FunctionCode, LocalScope, DirectEval, EvalScript,
/** B.1.1 Numeric Literals */
LegacyOctalIntegerLiteral,
/** B.1.2 String Literals */
OctalEscapeSequence,
/** B.1.3 HTML-like Comments */
HTMLComments,
/** Moz-Extension: for-each statement */
ForEachStatement,
/** Moz-Extension: guarded catch */
GuardedCatch,
/** Moz-Extension: expression closure */
ExpressionClosure,
/** Moz-Extension: let statement */
LetStatement,
/** Moz-Extension: let expression */
LetExpression,
/** Moz-Extension: legacy (star-less) generators */
LegacyGenerator,
/** Moz-Extension: legacy comprehension forms */
LegacyComprehension;
public static EnumSet<Option> from(Set<CompatibilityOption> compatOptions) {
EnumSet<Option> options = EnumSet.noneOf(Option.class);
if (compatOptions.contains(CompatibilityOption.LegacyOctalIntegerLiteral)) {
options.add(Option.LegacyOctalIntegerLiteral);
}
if (compatOptions.contains(CompatibilityOption.OctalEscapeSequence)) {
options.add(Option.OctalEscapeSequence);
}
if (compatOptions.contains(CompatibilityOption.HTMLComments)) {
options.add(Option.HTMLComments);
}
if (compatOptions.contains(CompatibilityOption.ForEachStatement)) {
options.add(Option.ForEachStatement);
}
if (compatOptions.contains(CompatibilityOption.GuardedCatch)) {
options.add(Option.GuardedCatch);
}
if (compatOptions.contains(CompatibilityOption.ExpressionClosure)) {
options.add(Option.ExpressionClosure);
}
if (compatOptions.contains(CompatibilityOption.LetStatement)) {
options.add(Option.LetStatement);
}
if (compatOptions.contains(CompatibilityOption.LetExpression)) {
options.add(Option.LetExpression);
}
if (compatOptions.contains(CompatibilityOption.LegacyGenerator)) {
options.add(Option.LegacyGenerator);
}
if (compatOptions.contains(CompatibilityOption.LegacyComprehension)) {
options.add(Option.LegacyComprehension);
}
return options;
}
}
public Parser(String sourceFile, int sourceLine, Set<Option> options) {
this.sourceFile = sourceFile;
this.sourceLine = sourceLine;
this.options = EnumSet.copyOf(options);
context = new ParseContext();
context.strictMode = this.options.contains(Option.Strict) ? StrictMode.Strict
: StrictMode.NonStrict;
}
boolean isEnabled(Option option) {
return options.contains(option);
}
private ParseContext newContext(ContextKind kind) {
return context = new ParseContext(context, kind);
}
private ParseContext restoreContext() {
if (context.parent.strictError == null) {
context.parent.strictError = context.strictError;
}
return context = context.parent;
}
private BlockContext enterWithContext() {
BlockContext cx = new BlockContext(context.scopeContext, true);
context.scopeContext = cx;
return cx;
}
private ScopeContext exitWithContext() {
return exitScopeContext();
}
private BlockContext enterBlockContext() {
BlockContext cx = new BlockContext(context.scopeContext, false);
context.scopeContext = cx;
return cx;
}
private BlockContext reenterBlockContext(BlockContext cx) {
context.scopeContext = cx;
return cx;
}
private ScopeContext exitBlockContext() {
return exitScopeContext();
}
private ScopeContext exitScopeContext() {
ScopeContext scope = context.scopeContext;
ScopeContext parent = scope.parent;
assert parent != null : "exitScopeContext() on top-level";
HashSet<String> varDeclaredNames = scope.varDeclaredNames;
if (varDeclaredNames != null) {
scope.varDeclaredNames = null;
for (String name : varDeclaredNames) {
addVarDeclaredName(parent, name);
}
}
return context.scopeContext = parent;
}
private void addFunctionDecl(FunctionDeclaration decl) {
String name = BoundName(decl.getIdentifier());
ScopeContext parentScope = context.parent.scopeContext;
if (parentScope.isTopLevel()) {
// top-level function declaration
parentScope.addVarScopedDeclaration(decl);
if (!parentScope.addVarDeclaredName(name)) {
reportSyntaxError(Messages.Key.VariableRedeclaration, name);
}
} else {
// block-scoped function declaration
parentScope.addLexScopedDeclaration(decl);
if (!parentScope.addLexDeclaredName(name)) {
reportSyntaxError(Messages.Key.VariableRedeclaration, name);
}
}
}
private void addGeneratorDecl(GeneratorDeclaration decl) {
String name = BoundName(decl.getIdentifier());
ScopeContext parentScope = context.parent.scopeContext;
parentScope.addLexScopedDeclaration(decl);
if (!parentScope.addLexDeclaredName(name)) {
reportSyntaxError(Messages.Key.VariableRedeclaration, name);
}
}
private void addLexScopedDeclaration(Declaration decl) {
context.scopeContext.addLexScopedDeclaration(decl);
}
private void addVarScopedDeclaration(VariableStatement decl) {
context.funContext.addVarScopedDeclaration(decl);
}
private void addVarDeclaredName(ScopeContext scope, String name) {
if (!scope.addVarDeclaredName(name)) {
reportSyntaxError(Messages.Key.VariableRedeclaration, name);
}
}
private void addLexDeclaredName(ScopeContext scope, String name) {
if (!scope.addLexDeclaredName(name)) {
reportSyntaxError(Messages.Key.VariableRedeclaration, name);
}
}
/**
* <strong>[12.1] Block</strong>
* <p>
* Static Semantics: Early Errors<br>
* <ul>
* <li>It is a Syntax Error if any element of the LexicallyDeclaredNames of StatementList also
* occurs in the VarDeclaredNames of StatementList.
* </ul>
*/
@SuppressWarnings("unused")
private void addVarDeclaredName(Binding binding) {
if (binding instanceof BindingIdentifier) {
addVarDeclaredName((BindingIdentifier) binding);
} else {
assert binding instanceof BindingPattern;
addVarDeclaredName((BindingPattern) binding);
}
}
private void addVarDeclaredName(BindingIdentifier bindingIdentifier) {
String name = BoundName(bindingIdentifier);
addVarDeclaredName(context.scopeContext, name);
}
private void addVarDeclaredName(BindingPattern bindingPattern) {
for (String name : BoundNames(bindingPattern)) {
addVarDeclaredName(context.scopeContext, name);
}
}
/**
* <strong>[12.1] Block</strong>
* <p>
* Static Semantics: Early Errors<br>
* <ul>
* <li>It is a Syntax Error if the LexicallyDeclaredNames of StatementList contains any
* duplicate entries.
* <li>It is a Syntax Error if any element of the LexicallyDeclaredNames of StatementList also
* occurs in the VarDeclaredNames of StatementList.
* </ul>
*/
private void addLexDeclaredName(Binding binding) {
if (binding instanceof BindingIdentifier) {
addLexDeclaredName((BindingIdentifier) binding);
} else {
assert binding instanceof BindingPattern;
addLexDeclaredName((BindingPattern) binding);
}
}
private void addLexDeclaredName(BindingIdentifier bindingIdentifier) {
String name = BoundName(bindingIdentifier);
addLexDeclaredName(context.scopeContext, name);
}
private void addLexDeclaredName(BindingPattern bindingPattern) {
for (String name : BoundNames(bindingPattern)) {
addLexDeclaredName(context.scopeContext, name);
}
}
private void addLexDeclaredNames(List<Binding> bindings) {
for (Binding binding : bindings) {
addLexDeclaredName(binding);
}
}
private void removeLexDeclaredNames(List<Binding> bindings) {
for (Binding binding : bindings) {
removeLexDeclaredName(binding);
}
}
private void removeLexDeclaredName(Binding binding) {
HashSet<String> lexDeclaredNames = context.scopeContext.lexDeclaredNames;
if (binding instanceof BindingIdentifier) {
BindingIdentifier bindingIdentifier = (BindingIdentifier) binding;
String name = BoundName(bindingIdentifier);
lexDeclaredNames.remove(name);
} else {
assert binding instanceof BindingPattern;
BindingPattern bindingPattern = (BindingPattern) binding;
for (String name : BoundNames(bindingPattern)) {
lexDeclaredNames.remove(name);
}
}
}
private LabelContext enterLabelled(StatementType type, Set<String> labelSet) {
LabelContext cx = context.labels = new LabelContext(context.labels, type, labelSet);
if (!labelSet.isEmpty() && context.labelSet == null) {
context.labelSet = new HashMap<>();
}
for (String label : labelSet) {
if (context.labelSet.containsKey(label)) {
reportSyntaxError(Messages.Key.DuplicateLabel, label);
}
context.labelSet.put(label, cx);
}
return cx;
}
private LabelContext exitLabelled() {
for (String label : context.labels.labelSet) {
context.labelSet.remove(label);
}
return context.labels = context.labels.parent;
}
private LabelContext enterIteration(Set<String> labelSet) {
return enterLabelled(StatementType.Iteration, labelSet);
}
private void exitIteration() {
exitLabelled();
}
private LabelContext enterBreakable(Set<String> labelSet) {
return enterLabelled(StatementType.Breakable, labelSet);
}
private void exitBreakable() {
exitLabelled();
}
private LabelContext findContinueTarget(String label) {
for (LabelContext cx = context.labels; cx != null; cx = cx.parent) {
if (label == null ? cx.type == StatementType.Iteration : cx.labelSet.contains(label)) {
return cx;
}
}
return null;
}
private LabelContext findBreakTarget(String label) {
for (LabelContext cx = context.labels; cx != null; cx = cx.parent) {
if (label == null ? cx.type != StatementType.Statement : cx.labelSet.contains(label)) {
return cx;
}
}
return null;
}
private static <T> List<T> newSmallList() {
return new SmallArrayList<>();
}
private static <T> List<T> newList() {
return new SmallArrayList<>();
}
private static <T> List<T> merge(List<T> list1, List<T> list2) {
if (!(list1.isEmpty() || list2.isEmpty())) {
List<T> merged = new ArrayList<>();
merged.addAll(list1);
merged.addAll(list2);
return merged;
}
return list1.isEmpty() ? list2 : list1;
}
private ParserException reportException(ParserException exception) {
throw exception;
}
private ParserException reportTokenMismatch(Token expected, Token actual) {
if (actual == Token.EOF) {
throw new ParserEOFException(ts.getLine(), ts.getColumn(),
Messages.Key.UnexpectedToken, actual.toString(), expected.toString());
}
throw new ParserException(ExceptionType.SyntaxError, ts.getLine(), ts.getColumn(),
Messages.Key.UnexpectedToken, actual.toString(), expected.toString());
}
private ParserException reportTokenMismatch(String expected, Token actual) {
if (actual == Token.EOF) {
throw new ParserEOFException(ts.getLine(), ts.getColumn(),
Messages.Key.UnexpectedToken, actual.toString(), expected);
}
throw new ParserException(ExceptionType.SyntaxError, ts.getLine(), ts.getColumn(),
Messages.Key.UnexpectedToken, actual.toString(), expected);
}
private ParserException reportTokenMismatch(Token expected, String actual) {
throw new ParserException(ExceptionType.SyntaxError, ts.getLine(), ts.getColumn(),
Messages.Key.UnexpectedToken, actual, expected.toString());
}
private ParserException reportError(ExceptionType type, int line, int column,
Messages.Key messageKey, String... args) {
throw new ParserException(type, line, column, messageKey, args);
}
private ParserException reportSyntaxError(Messages.Key messageKey, Node node, String... args) {
// TODO: store column info in Node
throw reportError(ExceptionType.SyntaxError, node.getLine(), -1, messageKey, args);
}
private ParserException reportSyntaxError(Messages.Key messageKey, String... args) {
throw reportError(ExceptionType.SyntaxError, ts.getLine(), ts.getColumn(), messageKey, args);
}
private ParserException reportReferenceError(Messages.Key messageKey, String... args) {
throw reportError(ExceptionType.ReferenceError, ts.getLine(), ts.getColumn(), messageKey,
args);
}
private void reportStrictModeError(ExceptionType type, int line, int column,
Messages.Key messageKey, String... args) {
if (context.strictMode == StrictMode.Unknown) {
if (context.strictError == null) {
context.strictError = new ParserException(type, line, column, messageKey, args);
}
} else if (context.strictMode == StrictMode.Strict) {
reportError(type, line, column, messageKey, args);
}
}
private void reportStrictModeSyntaxError(Messages.Key messageKey, Node node, String... args) {
// TODO: store column info in Node
reportStrictModeError(ExceptionType.SyntaxError, node.getLine(), -1, messageKey, args);
}
void reportStrictModeSyntaxError(Messages.Key messageKey, String... args) {
reportStrictModeError(ExceptionType.SyntaxError, ts.getLine(), ts.getColumn(), messageKey,
args);
}
void reportStrictModeReferenceError(Messages.Key messageKey, String... args) {
reportStrictModeError(ExceptionType.ReferenceError, ts.getLine(), ts.getColumn(),
messageKey, args);
}
/**
* Peeks the next token in the token-stream
*/
private Token peek() {
return ts.peekToken();
}
/**
* Checks whether the next token in the token-stream is equal to the input token
*/
private boolean LOOKAHEAD(Token token) {
return ts.peekToken() == token;
}
/**
* Returns the current token in the token-stream
*/
private Token token() {
return ts.currentToken();
}
/**
* Consumes the current token in the token-stream and advances the stream to the next token
*/
private void consume(Token tok) {
if (tok != token())
reportTokenMismatch(tok, token());
Token next = ts.nextToken();
if (DEBUG)
System.out.printf("consume(%s) -> %s\n", tok, next);
}
/**
* Consumes the current token in the token-stream and advances the stream to the next token
*/
private void consume(String name) {
String string = ts.getString();
consume(Token.NAME);
if (!name.equals(string))
reportSyntaxError(Messages.Key.UnexpectedName, string, name);
}
public Script parse(CharSequence source) throws ParserException {
return parseScript(source);
}
public Script parseScript(CharSequence source) throws ParserException {
if (ts != null)
throw new IllegalStateException();
ts = new TokenStream(this, new StringTokenStreamInput(source), sourceLine);
return script();
}
public ModuleDeclaration parseModule(CharSequence source) throws ParserException {
if (ts != null)
throw new IllegalStateException();
newContext(ContextKind.Script);
try {
applyStrictMode(true); // defaults to strict?
ModuleDeclaration module;
newContext(ContextKind.Module);
try {
ts = new TokenStream(this, new StringTokenStreamInput(source), sourceLine);
ts.init();
String moduleName = sourceFile; // only basename(sourceFile)?
List<StatementListItem> body = moduleBody(Token.EOF);
FunctionContext scope = context.funContext;
module = new ModuleDeclaration(moduleName, body, scope);
scope.node = module;
} finally {
restoreContext();
}
createScript(module);
return module;
} finally {
restoreContext();
}
}
public FunctionDefinition parseFunction(CharSequence formals, CharSequence bodyText)
throws ParserException {
if (ts != null)
throw new IllegalStateException();
newContext(ContextKind.Script);
try {
applyStrictMode(false);
FunctionExpression function;
newContext(ContextKind.Function);
try {
ts = new TokenStream(this, new StringTokenStreamInput(formals), sourceLine);
ts.init();
FormalParameterList parameters = formalParameters(Token.EOF);
if (token() != Token.EOF) {
reportSyntaxError(Messages.Key.InvalidFormalParameterList);
}
if (ts.position() != formals.length()) {
// more input after last token (whitespace, comments), add newlines to handle
// last token is single-line comment case
formals = "\n" + formals + "\n";
}
ts = new TokenStream(this, new StringTokenStreamInput(bodyText), sourceLine);
ts.init();
List<StatementListItem> statements = functionBody(Token.EOF);
if (token() != Token.EOF) {
reportSyntaxError(Messages.Key.InvalidFunctionBody);
}
String header = String.format("function anonymous (%s) ", formals);
String body = String.format("\n%s\n", bodyText);
FunctionContext scope = context.funContext;
function = new FunctionExpression(scope, "anonymous", parameters, statements,
header, body);
function.setLine(sourceLine);
scope.node = function;
function_StaticSemantics(function);
function = inheritStrictness(function);
} catch (RetryGenerator e) {
// don't bother with legacy support here
throw reportSyntaxError(Messages.Key.InvalidYieldStatement);
} finally {
restoreContext();
}
createScript(new ExpressionStatement(function));
return function;
} finally {
restoreContext();
}
}
public GeneratorDefinition parseGenerator(CharSequence formals, CharSequence bodyText)
throws ParserException {
if (ts != null)
throw new IllegalStateException();
newContext(ContextKind.Script);
try {
applyStrictMode(false);
GeneratorExpression generator;
newContext(ContextKind.Generator);
try {
ts = new TokenStream(this, new StringTokenStreamInput(formals), sourceLine);
ts.init();
FormalParameterList parameters = formalParameters(Token.EOF);
if (token() != Token.EOF) {
reportSyntaxError(Messages.Key.InvalidFormalParameterList);
}
if (ts.position() != formals.length()) {
// more input after last token (whitespace, comments), add newlines to handle
// last token is single-line comment case
formals = "\n" + formals + "\n";
}
ts = new TokenStream(this, new StringTokenStreamInput(bodyText), sourceLine);
ts.init();
List<StatementListItem> statements = functionBody(Token.EOF);
if (token() != Token.EOF) {
reportSyntaxError(Messages.Key.InvalidFunctionBody);
}
String header = String.format("function* anonymous (%s) ", formals);
String body = String.format("\n%s\n", bodyText);
FunctionContext scope = context.funContext;
generator = new GeneratorExpression(scope, "anonymous", parameters, statements,
header, body);
generator.setLine(sourceLine);
scope.node = generator;
generator_StaticSemantics(generator);
generator = inheritStrictness(generator);
} finally {
restoreContext();
}
createScript(new ExpressionStatement(generator));
return generator;
} finally {
restoreContext();
}
}
private Script createScript(StatementListItem statement) {
List<StatementListItem> statements = singletonList(statement);
boolean strict = (context.strictMode == StrictMode.Strict);
FunctionContext scope = context.funContext;
Script script = new Script(sourceFile, scope, statements, options, strict);
script.setLine(sourceLine);
scope.node = script;
return script;
}
/**
* <strong>[14.1] Script</strong>
*
* <pre>
* Script :
* ScriptBody<sub>opt</sub>
* ScriptBody :
* OuterStatementList
* </pre>
*/
private Script script() {
newContext(ContextKind.Script);
try {
ts.init();
List<StatementListItem> prologue = directivePrologue();
List<StatementListItem> body = outerStatementList();
boolean strict = (context.strictMode == StrictMode.Strict);
FunctionContext scope = context.funContext;
Script script = new Script(sourceFile, scope, merge(prologue, body), options, strict);
script.setLine(sourceLine);
scope.node = script;
return script;
} finally {
restoreContext();
}
}
/**
* <strong>[14.1] Script</strong>
*
* <pre>
* OuterStatementList :
* OuterItem
* OuterStatementList OuterItem
* OuterItem :
* ModuleDeclaration
* ImportDeclaration
* StatementListItem
* </pre>
*/
private List<StatementListItem> outerStatementList() {
List<StatementListItem> list = newList();
while (token() != Token.EOF) {
if (MODULES_ENABLED) {
// TODO: implement modules
if (token() == Token.IMPORT) {
list.add(importDeclaration());
} else if (isName("module") && (peek() == Token.STRING || isIdentifier(peek()))
&& !ts.hasNextLineTerminator()) {
list.add(moduleDeclaration());
} else {
list.add(statementListItem());
}
} else {
list.add(statementListItem());
}
}
return list;
}
/**
* <strong>[14.2] Modules</strong>
*
* <pre>
* ModuleDeclaration ::= "module" [NoNewline] StringLiteral "{" ModuleBody "}"
* | "module" Identifier "from" StringLiteral ";"
* </pre>
*/
private ModuleDeclaration moduleDeclaration() {
newContext(ContextKind.Module);
try {
consume("module");
if (token() == Token.STRING) {
String moduleName = stringLiteral();
consume(Token.LC);
List<StatementListItem> body = moduleBody(Token.RC);
consume(Token.RC);
FunctionContext scope = context.funContext;
ModuleDeclaration module = new ModuleDeclaration(moduleName, body, scope);
scope.node = module;
return module;
} else {
String identifier = identifier();
consume("from");
String moduleName = stringLiteral();
semicolon();
FunctionContext scope = context.funContext;
ModuleDeclaration module = new ModuleDeclaration(identifier, moduleName, scope);
scope.node = module;
return module;
}
} finally {
restoreContext();
}
}
/**
* <strong>[14.2] Modules</strong>
*
* <pre>
* ModuleBody ::= ModuleElement*
* ModuleElement ::= ScriptElement
* | ExportDeclaration
* </pre>
*/
private List<StatementListItem> moduleBody(Token end) {
List<StatementListItem> list = newList();
while (token() != end) {
// actually: ExportDeclaration | ImportDeclaration | StatementListItem
// TODO: are nested modules (still) allowed? (disabled for now)
if (token() == Token.EXPORT) {
list.add(exportDeclaration());
} else if (token() == Token.IMPORT) {
list.add(importDeclaration());
} else {
list.add(statementListItem());
}
}
return list;
}
/**
* <strong>[14.2] Modules</strong>
*
* <pre>
* ExportDeclaration ::= "export" ExportSpecifierSet ";"
* | "export" "default" AssignmentExpression ";"
* | "export" VariableDeclaration
* | "export" FunctionDeclaration
* | "export" ClassDeclaration
* </pre>
*/
private ExportDeclaration exportDeclaration() {
consume(Token.EXPORT);
switch (token()) {
case LC:
case MUL: {
// "export" ExportSpecifierSet ";"
ExportSpecifierSet exportSpecifierSet = exportSpecifierSet();
semicolon();
return new ExportDeclaration(exportSpecifierSet);
}
case DEFAULT: {
// "export" "default" AssignmentExpression ";"
consume(Token.DEFAULT);
Expression expression = assignmentExpression(true);
semicolon();
return new ExportDeclaration(expression);
}
case VAR: {
// "export" VariableDeclaration
VariableStatement variableStatement = variableStatement();
return new ExportDeclaration(variableStatement);
}
case FUNCTION:
case CLASS:
case LET:
case CONST: {
// "export" FunctionDeclaration
// "export" ClassDeclaration
Declaration declaration = declaration();
return new ExportDeclaration(declaration);
}
default:
throw reportSyntaxError(Messages.Key.InvalidToken, token().toString());
}
}
/**
* <strong>[14.2] Modules</strong>
*
* <pre>
* ExportSpecifierSet ::= "{" (ExportSpecifier ("," ExportSpecifier)* ","?)? "}"
* | "*" ("from" ModuleSpecifier)?
* </pre>
*/
private ExportSpecifierSet exportSpecifierSet() {
if (token() == Token.LC) {
List<ExportSpecifier> exports = newSmallList();
consume(Token.LC);
while (token() != Token.RC) {
exports.add(exportSpecifier());
if (token() == Token.COMMA) {
consume(Token.COMMA);
} else {
break;
}
}
consume(Token.RC);
// FIXME: re-export should also work with named exports
String sourceModule = null;
if (isName("from")) {
consume("from");
sourceModule = moduleSpecifier();
}
return new ExportSpecifierSet(exports, sourceModule);
} else {
consume(Token.MUL);
String sourceModule = null;
if (isName("from")) {
consume("from");
sourceModule = moduleSpecifier();
}
return new ExportSpecifierSet(sourceModule);
}
}
/**
* <strong>[14.2] Modules</strong>
*
* <pre>
* ExportSpecifier ::= Identifier ("as" IdentifierName)?
* </pre>
*/
private ExportSpecifier exportSpecifier() {
String localName = identifier();
String externalName;
if (isName("as")) {
consume("as");
externalName = identifierName();
} else {
externalName = localName;
}
return new ExportSpecifier(localName, externalName);
}
/**
* <strong>[14.2] Modules</strong>
*
* <pre>
* ModuleSpecifier ::= StringLiteral
* </pre>
*/
private String moduleSpecifier() {
return stringLiteral();
}
/**
* <strong>[14.2] Modules</strong>
*
* <pre>
* ImportDeclaration ::= "import" ImportSpecifierSet "from" ModuleSpecifier ";"
* | "import" ModuleSpecifier ";"
* </pre>
*/
private ImportDeclaration importDeclaration() {
consume(Token.IMPORT);
if (token() == Token.STRING) {
String moduleSpecifier = moduleSpecifier();
semicolon();
return new ImportDeclaration(moduleSpecifier);
} else {
ImportSpecifierSet importSpecifierSet = importSpecifierSet();
consume("from");
String moduleSpecifier = moduleSpecifier();
semicolon();
return new ImportDeclaration(importSpecifierSet, moduleSpecifier);
}
}
/**
* <strong>[14.2] Modules</strong>
*
* <pre>
* ImportSpecifierSet ::= Identifier
* | "{" (ImportSpecifier ("," ImportSpecifier)* ","?)? "}"
* </pre>
*/
private ImportSpecifierSet importSpecifierSet() {
if (isIdentifier(token())) {
String defaultImport = identifier();
return new ImportSpecifierSet(defaultImport);
} else {
List<ImportSpecifier> imports = newSmallList();
consume(Token.LC);
while (token() != Token.RC) {
imports.add(importSpecifier());
if (token() == Token.COMMA) {
consume(Token.COMMA);
} else {
break;
}
}
consume(Token.RC);
return new ImportSpecifierSet(imports);
}
}
/**
* <strong>[14.2] Modules</strong>
*
* <pre>
* ImportSpecifier ::= Identifier ("as" Identifier)?
* | ReservedWord "as" Identifier
* </pre>
*/
private ImportSpecifier importSpecifier() {
String externalName, localName;
if (isIdentifier(token())) {
externalName = identifier();
if (isName("as")) {
consume("as");
localName = identifier();
} else {
localName = externalName;
}
} else {
externalName = identifierName();
consume("as");
localName = identifier();
}
return new ImportSpecifier(externalName, localName);
}
/**
* <strong>[14.1] Directive Prologues and the Use Strict Directive</strong>
*
* <pre>
* DirectivePrologue :
* Directive<sub>opt</sub>
* Directive:
* StringLiteral ;
* Directive StringLiteral ;
* </pre>
*/
private List<StatementListItem> directivePrologue() {
List<StatementListItem> statements = newSmallList();
boolean strict = false;
directive: while (token() == Token.STRING) {
boolean hasEscape = ts.hasEscape(); // peek() may clear hasEscape flag
Token next = peek();
switch (next) {
case SEMI:
case RC:
case EOF:
break;
default:
if (ts.hasNextLineTerminator() && !isOperator(next)) {
break;
}
break directive;
}
// got a directive
String string = stringLiteral();
if (!hasEscape && "use strict".equals(string)) {
strict = true;
}
semicolon();
statements.add(new ExpressionStatement(new StringLiteral(string)));
}
applyStrictMode(strict);
return statements;
}
private static boolean isOperator(Token token) {
switch (token) {
case DOT:
case LB:
case LP:
case TEMPLATE:
case COMMA:
case HOOK:
case ASSIGN:
case ASSIGN_ADD:
case ASSIGN_BITAND:
case ASSIGN_BITOR:
case ASSIGN_BITXOR:
case ASSIGN_DIV:
case ASSIGN_MOD:
case ASSIGN_MUL:
case ASSIGN_SHL:
case ASSIGN_SHR:
case ASSIGN_SUB:
case ASSIGN_USHR:
case OR:
case AND:
case BITAND:
case BITOR:
case BITXOR:
case EQ:
case NE:
case SHEQ:
case SHNE:
case LT:
case LE:
case GT:
case GE:
case INSTANCEOF:
case IN:
case SHL:
case SHR:
case USHR:
case ADD:
case SUB:
case MUL:
case DIV:
case MOD:
return true;
default:
return false;
}
}
private void applyStrictMode(boolean strict) {
if (strict) {
context.strictMode = StrictMode.Strict;
context.explicitStrict = true;
if (context.strictError != null) {
reportException(context.strictError);
}
} else {
if (context.strictMode == StrictMode.Unknown) {
context.strictMode = context.parent.strictMode;
}
}
}
private static FunctionNode.StrictMode toFunctionStrictness(boolean strict, boolean explicit) {
if (strict) {
if (explicit) {
return FunctionNode.StrictMode.ExplicitStrict;
}
return FunctionNode.StrictMode.ImplicitStrict;
}
return FunctionNode.StrictMode.NonStrict;
}
private <FUNCTION extends FunctionNode> FUNCTION inheritStrictness(FUNCTION function) {
if (context.strictMode != StrictMode.Unknown) {
boolean strict = (context.strictMode == StrictMode.Strict);
function.setStrictMode(toFunctionStrictness(strict, context.explicitStrict));
if (context.deferred != null) {
for (FunctionNode func : context.deferred) {
func.setStrictMode(toFunctionStrictness(strict, false));
}
context.deferred = null;
}
} else {
// this case only applies for functions with default parameters
assert context.parent.strictMode == StrictMode.Unknown;
ParseContext parent = context.parent;
if (parent.deferred == null) {
parent.deferred = newSmallList();
}
parent.deferred.add(function);
if (context.deferred != null) {
parent.deferred.addAll(context.deferred);
context.deferred = null;
}
}
return function;
}
/**
* <strong>[13.1] Function Definitions</strong>
*
* <pre>
* FunctionDeclaration :
* function BindingIdentifier ( FormalParameters ) { FunctionBody }
* </pre>
*/
private FunctionDeclaration functionDeclaration() {
newContext(ContextKind.Function);
try {
int line = ts.getLine();
consume(Token.FUNCTION);
int startFunction = ts.position() - "function".length();
BindingIdentifier identifier = bindingIdentifier();
consume(Token.LP);
FormalParameterList parameters = formalParameters(Token.RP);
consume(Token.RP);
String header, body;
List<StatementListItem> statements;
if (token() != Token.LC && isEnabled(Option.ExpressionClosure)) {
// need to call manually b/c functionBody() isn't used here
applyStrictMode(false);
int startBody = ts.position();
statements = Collections.<StatementListItem> singletonList(new ReturnStatement(
assignmentExpression(true)));
int endFunction = ts.position();
header = ts.range(startFunction, startBody);
body = "return " + ts.range(startBody, endFunction);
} else {
consume(Token.LC);
int startBody = ts.position();
statements = functionBody(Token.RC);
consume(Token.RC);
int endFunction = ts.position() - 1;
header = ts.range(startFunction, startBody - 1);
body = ts.range(startBody, endFunction);
}
FunctionContext scope = context.funContext;
FunctionDeclaration function = new FunctionDeclaration(scope, identifier, parameters,
statements, header, body);
function.setLine(line);
scope.node = function;
function_StaticSemantics(function);
addFunctionDecl(function);
return inheritStrictness(function);
} finally {
restoreContext();
}
}
/**
* <strong>[13.1] Function Definitions</strong>
*
* <pre>
* FunctionExpression :
* function BindingIdentifier<sub>opt</sub> ( FormalParameters ) { FunctionBody }
* </pre>
*/
private FunctionExpression functionExpression() {
newContext(ContextKind.Function);
try {
int line = ts.getLine();
consume(Token.FUNCTION);
int startFunction = ts.position() - "function".length();
BindingIdentifier identifier = null;
if (token() != Token.LP) {
identifier = bindingIdentifier();
}
consume(Token.LP);
FormalParameterList parameters = formalParameters(Token.RP);
consume(Token.RP);
String header, body;
List<StatementListItem> statements;
if (token() != Token.LC && isEnabled(Option.ExpressionClosure)) {
// need to call manually b/c functionBody() isn't used here
applyStrictMode(false);
int startBody = ts.position();
statements = Collections.<StatementListItem> singletonList(new ReturnStatement(
assignmentExpression(true)));
int endFunction = ts.position();
header = ts.range(startFunction, startBody);
body = "return " + ts.range(startBody, endFunction);
} else {
consume(Token.LC);
int startBody = ts.position();
statements = functionBody(Token.RC);
consume(Token.RC);
int endFunction = ts.position() - 1;
header = ts.range(startFunction, startBody - 1);
body = ts.range(startBody, endFunction);
}
FunctionContext scope = context.funContext;
FunctionExpression function = new FunctionExpression(scope, identifier, parameters,
statements, header, body);
function.setLine(line);
scope.node = function;
function_StaticSemantics(function);
return inheritStrictness(function);
} finally {
restoreContext();
}
}
/**
* <strong>[13.1] Function Definitions</strong>
*
* <pre>
* StrictFormalParameters :
* FormalParameters
* </pre>
*/
private FormalParameterList strictFormalParameters(Token end) {
return formalParameters(end);
}
/**
* <strong>[13.1] Function Definitions</strong>
*
* <pre>
* FormalParameters :
* [empty]
* FormalParameterList
* </pre>
*/
private FormalParameterList formalParameters(Token end) {
if (token() == end) {
return new FormalParameterList(Collections.<FormalParameter> emptyList());
}
return formalParameterList();
}
/**
* <strong>[13.1] Function Definitions</strong>
*
* <pre>
* FormalParameterList :
* FunctionRestParameter
* FormalsList
* FormalsList, FunctionRestParameter
* FormalsList :
* FormalParameter
* FormalsList, FormalParameter
* FunctionRestParameter :
* ... BindingIdentifier
* FormalParameter :
* BindingElement
* </pre>
*/
private FormalParameterList formalParameterList() {
List<FormalParameter> formals = newSmallList();
for (;;) {
if (token() == Token.TRIPLE_DOT) {
consume(Token.TRIPLE_DOT);
formals.add(new BindingRestElement(bindingIdentifierStrict()));
break;
} else {
formals.add(bindingElement());
if (token() == Token.COMMA) {
consume(Token.COMMA);
} else {
break;
}
}
}
return new FormalParameterList(formals);
}
private static <T> T containsAny(Set<T> set, List<T> list) {
for (T element : list) {
if (set.contains(element)) {
return element;
}
}
return null;
}
private void checkFormalParameterRedeclaration(List<String> boundNames,
HashSet<String> declaredNames) {
if (!(declaredNames == null || declaredNames.isEmpty())) {
String redeclared = containsAny(declaredNames, boundNames);
if (redeclared != null) {
reportSyntaxError(Messages.Key.FormalParameterRedeclaration, redeclared);
}
}
}
private void function_StaticSemantics(FunctionDefinition function) {
assert context.scopeContext == context.funContext;
FunctionContext scope = context.funContext;
FormalParameterList parameters = function.getParameters();
List<String> boundNames = BoundNames(parameters);
scope.parameterNames = new HashSet<>(boundNames);
boolean simple = IsSimpleParameterList(parameters);
if (!simple) {
checkFormalParameterRedeclaration(boundNames, scope.varDeclaredNames);
}
checkFormalParameterRedeclaration(boundNames, scope.lexDeclaredNames);
formalParameters_StaticSemantics(boundNames, scope.parameterNames, simple);
}
private void strictFormalParameters_StaticSemantics(List<String> boundNames, Set<String> names) {
boolean hasDuplicates = (boundNames.size() != names.size());
boolean hasEvalOrArguments = (names.contains("eval") || names.contains("arguments"));
if (hasDuplicates) {
reportSyntaxError(Messages.Key.StrictModeDuplicateFormalParameter);
}
if (hasEvalOrArguments) {
reportSyntaxError(Messages.Key.StrictModeRestrictedIdentifier);
}
}
private void formalParameters_StaticSemantics(List<String> boundNames, Set<String> names,
boolean simple) {
boolean strict = (context.strictMode != StrictMode.NonStrict);
if (!strict && simple) {
return;
}
boolean hasDuplicates = (boundNames.size() != names.size());
boolean hasEvalOrArguments = (names.contains("eval") || names.contains("arguments"));
if (!simple) {
if (hasDuplicates) {
reportSyntaxError(Messages.Key.StrictModeDuplicateFormalParameter);
}
if (hasEvalOrArguments) {
reportSyntaxError(Messages.Key.StrictModeRestrictedIdentifier);
}
}
if (strict) {
if (hasDuplicates) {
reportStrictModeSyntaxError(Messages.Key.StrictModeDuplicateFormalParameter);
}
if (hasEvalOrArguments) {
reportStrictModeSyntaxError(Messages.Key.StrictModeRestrictedIdentifier);
}
}
}
/**
* <strong>[13.1] Function Definitions</strong>
*
* <pre>
* FunctionBody :
* FunctionStatementList
* FunctionStatementList :
* StatementList<sub>opt</sub>
* </pre>
*/
private List<StatementListItem> functionBody(Token end) {
// enable 'yield' if in generator
context.yieldAllowed = (context.kind == ContextKind.Generator);
List<StatementListItem> prologue = directivePrologue();
List<StatementListItem> body = statementList(end);
return merge(prologue, body);
}
/**
* <strong>[13.2] Arrow Function Definitions</strong>
*
* <pre>
* ArrowFunction :
* ArrowParameters => ConciseBody
* ArrowParameters :
* BindingIdentifier
* CoverParenthesisedExpressionAndArrowParameterList
* ConciseBody :
* [LA ∉ { <b>{</b> }] AssignmentExpression
* { FunctionBody }
* </pre>
*
* <h2>Supplemental Syntax</h2>
*
* <pre>
* ArrowFormalParameters :
* ( StrictFormalParameters )
* </pre>
*/
private ArrowFunction arrowFunction() {
newContext(ContextKind.ArrowFunction);
try {
int line = ts.getLine();
StringBuilder source = new StringBuilder();
source.append("function anonymous");
FormalParameterList parameters;
if (token() == Token.LP) {
consume(Token.LP);
int start = ts.position() - 1;
parameters = strictFormalParameters(Token.RP);
consume(Token.RP);
source.append(ts.range(start, ts.position()));
} else {
BindingIdentifier identifier = bindingIdentifierStrict();
FormalParameter parameter = new BindingElement(identifier, null);
parameters = new FormalParameterList(singletonList(parameter));
source.append('(').append(identifier.getName()).append(')');
}
consume(Token.ARROW);
if (token() == Token.LC) {
consume(Token.LC);
int startBody = ts.position();
List<StatementListItem> statements = functionBody(Token.RC);
consume(Token.RC);
int endFunction = ts.position() - 1;
String header = source.toString();
String body = ts.range(startBody, endFunction);
FunctionContext scope = context.funContext;
ArrowFunction function = new ArrowFunction(scope, parameters, statements, header,
body);
function.setLine(line);
scope.node = function;
arrowFunction_StaticSemantics(function);
return inheritStrictness(function);
} else {
// need to call manually b/c functionBody() isn't used here
applyStrictMode(false);
int startBody = ts.position();
Expression expression = assignmentExpression(true);
int endFunction = ts.position();
String header = source.toString();
String body = "return " + ts.range(startBody, endFunction);
FunctionContext scope = context.funContext;
ArrowFunction function = new ArrowFunction(scope, parameters, expression, header,
body);
function.setLine(line);
scope.node = function;
arrowFunction_StaticSemantics(function);
return inheritStrictness(function);
}
} finally {
restoreContext();
}
}
private void arrowFunction_StaticSemantics(ArrowFunction function) {
assert context.scopeContext == context.funContext;
FunctionContext scope = context.funContext;
FormalParameterList parameters = function.getParameters();
List<String> boundNames = BoundNames(parameters);
scope.parameterNames = new HashSet<>(boundNames);
checkFormalParameterRedeclaration(boundNames, scope.varDeclaredNames);
checkFormalParameterRedeclaration(boundNames, scope.lexDeclaredNames);
strictFormalParameters_StaticSemantics(boundNames, scope.parameterNames);
}
/**
* <strong>[13.3] Method Definitions</strong>
*
* <pre>
* MethodDefinition :
* PropertyName ( StrictFormalParameters ) { FunctionBody }
* GeneratorMethod
* get PropertyName ( ) { FunctionBody }
* set PropertyName ( PropertySetParameterList ) { FunctionBody }
* </pre>
*/
private MethodDefinition methodDefinition(boolean alwaysStrict) {
switch (methodType()) {
case Generator:
return generatorMethod(alwaysStrict);
case Getter:
return getterMethod(alwaysStrict);
case Setter:
return setterMethod(alwaysStrict);
case Function:
default:
return normalMethod(alwaysStrict);
}
}
/**
* <strong>[13.3] Method Definitions</strong>
*
* <pre>
* MethodDefinition :
* PropertyName ( StrictFormalParameters ) { FunctionBody }
* </pre>
*/
private MethodDefinition normalMethod(boolean alwaysStrict) {
int line = ts.getLine();
PropertyName propertyName = propertyName();
return normalMethod(line, propertyName, alwaysStrict);
}
private MethodDefinition normalMethod(int line, PropertyName propertyName, boolean alwaysStrict) {
newContext(ContextKind.Method);
if (alwaysStrict) {
context.strictMode = StrictMode.Strict;
}
try {
consume(Token.LP);
int startFunction = ts.position() - 1;
FormalParameterList parameters = strictFormalParameters(Token.RP);
consume(Token.RP);
consume(Token.LC);
int startBody = ts.position();
List<StatementListItem> statements = functionBody(Token.RC);
consume(Token.RC);
int endFunction = ts.position() - 1;
String header = "function " + ts.range(startFunction, startBody - 1);
String body = ts.range(startBody, endFunction);
FunctionContext scope = context.funContext;
MethodType type = MethodType.Function;
MethodDefinition method = new MethodDefinition(scope, type, propertyName, parameters,
statements, context.hasSuperReference(), header, body);
method.setLine(line);
scope.node = method;
methodDefinition_StaticSemantics(method);
return inheritStrictness(method);
} finally {
restoreContext();
}
}
/**
* <strong>[13.3] Method Definitions</strong>
*
* <pre>
* MethodDefinition :
* get PropertyName ( ) { FunctionBody }
* </pre>
*/
private MethodDefinition getterMethod(boolean alwaysStrict) {
int line = ts.getLine();
consume(Token.NAME);
PropertyName propertyName = propertyName();
newContext(ContextKind.Method);
if (alwaysStrict) {
context.strictMode = StrictMode.Strict;
}
try {
consume(Token.LP);
int startFunction = ts.position() - 1;
FormalParameterList parameters = new FormalParameterList(
Collections.<FormalParameter> emptyList());
consume(Token.RP);
List<StatementListItem> statements;
String header, body;
if (token() != Token.LC && isEnabled(Option.ExpressionClosure)) {
// need to call manually b/c functionBody() isn't used here
applyStrictMode(false);
int startBody = ts.position();
statements = Collections.<StatementListItem> singletonList(new ReturnStatement(
assignmentExpression(true)));
int endFunction = ts.position();
header = "function " + ts.range(startFunction, startBody);
body = "return " + ts.range(startBody, endFunction);
} else {
consume(Token.LC);
int startBody = ts.position();
statements = functionBody(Token.RC);
consume(Token.RC);
int endFunction = ts.position() - 1;
header = "function " + ts.range(startFunction, startBody - 1);
body = ts.range(startBody, endFunction);
}
FunctionContext scope = context.funContext;
MethodType type = MethodType.Getter;
MethodDefinition method = new MethodDefinition(scope, type, propertyName, parameters,
statements, context.hasSuperReference(), header, body);
method.setLine(line);
scope.node = method;
methodDefinition_StaticSemantics(method);
return inheritStrictness(method);
} finally {
restoreContext();
}
}
/**
* <strong>[13.3] Method Definitions</strong>
*
* <pre>
* MethodDefinition :
* set PropertyName ( PropertySetParameterList ) { FunctionBody }
* </pre>
*/
private MethodDefinition setterMethod(boolean alwaysStrict) {
int line = ts.getLine();
consume(Token.NAME);
PropertyName propertyName = propertyName();
newContext(ContextKind.Method);
if (alwaysStrict) {
context.strictMode = StrictMode.Strict;
}
try {
consume(Token.LP);
int startFunction = ts.position() - 1;
FormalParameterList parameters = propertySetParameterList();
consume(Token.RP);
List<StatementListItem> statements;
String header, body;
if (token() != Token.LC && isEnabled(Option.ExpressionClosure)) {
// need to call manually b/c functionBody() isn't used here
applyStrictMode(false);
int startBody = ts.position();
statements = Collections.<StatementListItem> singletonList(new ReturnStatement(
assignmentExpression(true)));
int endFunction = ts.position();
header = "function " + ts.range(startFunction, startBody);
body = "return " + ts.range(startBody, endFunction);
} else {
consume(Token.LC);
int startBody = ts.position();
statements = functionBody(Token.RC);
consume(Token.RC);
int endFunction = ts.position() - 1;
header = "function " + ts.range(startFunction, startBody - 1);
body = ts.range(startBody, endFunction);
}
FunctionContext scope = context.funContext;
MethodType type = MethodType.Setter;
MethodDefinition method = new MethodDefinition(scope, type, propertyName, parameters,
statements, context.hasSuperReference(), header, body);
method.setLine(line);
scope.node = method;
methodDefinition_StaticSemantics(method);
return inheritStrictness(method);
} finally {
restoreContext();
}
}
/**
* <strong>[13.3] Method Definitions</strong>
*
* <pre>
* PropertySetParameterList :
* BindingIdentifier
* BindingPattern
* </pre>
*/
private FormalParameterList propertySetParameterList() {
FormalParameter setParameter = new BindingElement(binding(), null);
return new FormalParameterList(singletonList(setParameter));
}
private MethodType methodType() {
if (token() == Token.MUL) {
return MethodType.Generator;
}
if (token() == Token.NAME) {
String name = getName(Token.NAME);
if (("get".equals(name) || "set".equals(name)) && isPropertyName(peek())) {
return "get".equals(name) ? MethodType.Getter : MethodType.Setter;
}
}
return MethodType.Function;
}
private boolean isPropertyName(Token token) {
return token == Token.STRING || token == Token.NUMBER || token == Token.LB
|| isIdentifierName(token);
}
private void methodDefinition_StaticSemantics(MethodDefinition method) {
assert context.scopeContext == context.funContext;
FunctionContext scope = context.funContext;
FormalParameterList parameters = method.getParameters();
List<String> boundNames = BoundNames(parameters);
scope.parameterNames = new HashSet<>(boundNames);
switch (method.getType()) {
case Function:
case Generator: {
checkFormalParameterRedeclaration(boundNames, scope.varDeclaredNames);
checkFormalParameterRedeclaration(boundNames, scope.lexDeclaredNames);
strictFormalParameters_StaticSemantics(boundNames, scope.parameterNames);
return;
}
case Setter: {
boolean simple = IsSimpleParameterList(parameters);
if (!simple) {
checkFormalParameterRedeclaration(boundNames, scope.varDeclaredNames);
}
checkFormalParameterRedeclaration(boundNames, scope.lexDeclaredNames);
propertySetParameterList_StaticSemantics(boundNames, scope.parameterNames, simple);
return;
}
case Getter:
default:
return;
}
}
private void propertySetParameterList_StaticSemantics(List<String> boundNames,
Set<String> names, boolean simple) {
boolean strict = (context.strictMode != StrictMode.NonStrict);
boolean hasDuplicates = (boundNames.size() != names.size());
boolean hasEvalOrArguments = (names.contains("eval") || names.contains("arguments"));
if (!simple) {
if (hasDuplicates) {
reportSyntaxError(Messages.Key.StrictModeDuplicateFormalParameter);
}
if (hasEvalOrArguments) {
reportSyntaxError(Messages.Key.StrictModeRestrictedIdentifier);
}
}
// FIXME: spec bug - duplicate check done twice
if (hasDuplicates) {
reportSyntaxError(Messages.Key.StrictModeDuplicateFormalParameter);
}
// FIXME: spec bug - not handled in draft
if (strict) {
if (hasEvalOrArguments) {
reportStrictModeSyntaxError(Messages.Key.StrictModeRestrictedIdentifier);
}
}
}
/**
* <strong>[13.4] Generator Function Definitions</strong>
*
* <pre>
* GeneratorMethod :
* * PropertyName ( StrictFormalParameters ) { FunctionBody }
* </pre>
*/
private MethodDefinition generatorMethod(boolean alwaysStrict) {
int line = ts.getLine();
consume(Token.MUL);
PropertyName propertyName = propertyName();
newContext(ContextKind.Generator);
if (alwaysStrict) {
context.strictMode = StrictMode.Strict;
}
try {
consume(Token.LP);
int startFunction = ts.position() - 1;
FormalParameterList parameters = strictFormalParameters(Token.RP);
consume(Token.RP);
consume(Token.LC);
int startBody = ts.position();
List<StatementListItem> statements = functionBody(Token.RC);
consume(Token.RC);
int endFunction = ts.position() - 1;
String header = "function* " + ts.range(startFunction, startBody - 1);
String body = ts.range(startBody, endFunction);
FunctionContext scope = context.funContext;
MethodType type = MethodType.Generator;
MethodDefinition method = new MethodDefinition(scope, type, propertyName, parameters,
statements, context.hasSuperReference(), header, body);
method.setLine(line);
scope.node = method;
methodDefinition_StaticSemantics(method);
return inheritStrictness(method);
} finally {
restoreContext();
}
}
/**
* <strong>[13.4] Generator Function Definitions</strong>
*
* <pre>
* GeneratorDeclaration :
* function * BindingIdentifier ( FormalParameters ) { FunctionBody }
* </pre>
*/
private GeneratorDeclaration generatorDeclaration(boolean starless) {
newContext(ContextKind.Generator);
try {
int line = ts.getLine();
consume(Token.FUNCTION);
int startFunction = ts.position() - "function".length();
if (!starless) {
consume(Token.MUL);
}
BindingIdentifier identifier = bindingIdentifier();
consume(Token.LP);
FormalParameterList parameters = formalParameters(Token.RP);
consume(Token.RP);
consume(Token.LC);
int startBody = ts.position();
List<StatementListItem> statements = functionBody(Token.RC);
consume(Token.RC);
int endFunction = ts.position() - 1;
String header = ts.range(startFunction, startBody - 1);
String body = ts.range(startBody, endFunction);
FunctionContext scope = context.funContext;
GeneratorDeclaration generator = new GeneratorDeclaration(scope, identifier,
parameters, statements, header, body);
generator.setLine(line);
scope.node = generator;
generator_StaticSemantics(generator);
addGeneratorDecl(generator);
return inheritStrictness(generator);
} finally {
restoreContext();
}
}
/**
* <strong>[13.4] Generator Function Definitions</strong>
*
* <pre>
* GeneratorExpression :
* function * BindingIdentifier<sub>opt</sub> ( FormalParameters ) { FunctionBody }
* </pre>
*/
private GeneratorExpression generatorExpression(boolean starless) {
newContext(ContextKind.Generator);
try {
int line = ts.getLine();
consume(Token.FUNCTION);
int startFunction = ts.position() - "function".length();
if (!starless) {
consume(Token.MUL);
}
BindingIdentifier identifier = null;
if (token() != Token.LP) {
identifier = bindingIdentifier();
}
consume(Token.LP);
FormalParameterList parameters = formalParameters(Token.RP);
consume(Token.RP);
consume(Token.LC);
int startBody = ts.position();
List<StatementListItem> statements = functionBody(Token.RC);
consume(Token.RC);
int endFunction = ts.position() - 1;
String header = ts.range(startFunction, startBody - 1);
String body = ts.range(startBody, endFunction);
FunctionContext scope = context.funContext;
GeneratorExpression generator = new GeneratorExpression(scope, identifier, parameters,
statements, header, body);
generator.setLine(line);
scope.node = generator;
generator_StaticSemantics(generator);
return inheritStrictness(generator);
} finally {
restoreContext();
}
}
private void generator_StaticSemantics(GeneratorDefinition generator) {
assert context.scopeContext == context.funContext;
FunctionContext scope = context.funContext;
FormalParameterList parameters = generator.getParameters();
List<String> boundNames = BoundNames(parameters);
scope.parameterNames = new HashSet<>(boundNames);
boolean simple = IsSimpleParameterList(parameters);
if (!simple) {
checkFormalParameterRedeclaration(boundNames, scope.varDeclaredNames);
}
checkFormalParameterRedeclaration(boundNames, scope.lexDeclaredNames);
formalParameters_StaticSemantics(boundNames, scope.parameterNames, simple);
}
/**
* <strong>[13.4] Generator Function Definitions</strong>
*
* <pre>
* YieldExpression :
* yield YieldDelegator<sub>opt</sub> <font size="-1">[Lexical goal <i>InputElementRegExp</i>]</font> AssignmentExpression
* YieldDelegator :
* *
* </pre>
*/
private YieldExpression yieldExpression() {
if (!context.yieldAllowed) {
if (context.kind == ContextKind.Function && isEnabled(Option.LegacyGenerator)) {
throw new RetryGenerator();
}
reportSyntaxError(Messages.Key.InvalidYieldStatement);
}
consume(Token.YIELD);
boolean delegatedYield = false;
if (token() == Token.MUL) {
consume(Token.MUL);
delegatedYield = true;
}
if (token() == Token.YIELD) {
// disallow `yield yield x` but allow `yield (yield x)`
// TODO: track spec changes, syntax not yet settled
reportSyntaxError(Messages.Key.InvalidYieldStatement);
}
// TODO: NoLineTerminator() restriction or context dependent?
Expression expr;
if (delegatedYield) {
expr = assignmentExpression(true);
} else if (assignmentExpressionForYield()) {
// TODO: make this an option
expr = assignmentExpression(true);
} else {
// extension: allow Spidermonkey syntax
expr = null;
}
return new YieldExpression(delegatedYield, expr);
}
private boolean assignmentExpressionForYield() {
switch (token()) {
case COLON:
case COMMA:
case RB:
case RC:
case RP:
case SEMI:
case EOF:
return false;
default:
return noLineTerminator();
}
}
/**
* <strong>[13.5] Class Definitions</strong>
*
* <pre>
* ClassDeclaration :
* class BindingIdentifier ClassTail
* ClassTail :
* ClassHeritage<sub>opt</sub> { ClassBody<sub>opt</sub> }
* ClassHeritage :
* extends AssignmentExpression
* </pre>
*/
private ClassDeclaration classDeclaration() {
consume(Token.CLASS);
BindingIdentifier name = bindingIdentifierStrict();
Expression heritage = null;
if (token() == Token.EXTENDS) {
consume(Token.EXTENDS);
heritage = assignmentExpression(true);
}
consume(Token.LC);
List<MethodDefinition> staticMethods = newList();
List<MethodDefinition> prototypeMethods = newList();
classBody(staticMethods, prototypeMethods);
consume(Token.RC);
ClassDeclaration decl = new ClassDeclaration(name, heritage, staticMethods,
prototypeMethods);
addLexDeclaredName(name);
addLexScopedDeclaration(decl);
return decl;
}
/**
* <strong>[13.5] Class Definitions</strong>
*
* <pre>
* ClassExpression :
* class BindingIdentifier<sub>opt</sub> ClassTail
* ClassTail :
* ClassHeritage<sub>opt</sub> { ClassBody<sub>opt</sub> }
* ClassHeritage :
* extends AssignmentExpression
* </pre>
*/
private ClassExpression classExpression() {
consume(Token.CLASS);
BindingIdentifier name = null;
if (token() != Token.EXTENDS && token() != Token.LC) {
name = bindingIdentifierStrict();
}
Expression heritage = null;
if (token() == Token.EXTENDS) {
consume(Token.EXTENDS);
heritage = assignmentExpression(true);
}
consume(Token.LC);
if (name != null) {
enterBlockContext();
addLexDeclaredName(name);
}
List<MethodDefinition> staticMethods = newList();
List<MethodDefinition> prototypeMethods = newList();
classBody(staticMethods, prototypeMethods);
if (name != null) {
exitBlockContext();
}
consume(Token.RC);
return new ClassExpression(name, heritage, staticMethods, prototypeMethods);
}
/**
* <strong>[13.5] Class Definitions</strong>
*
* <pre>
* ClassBody :
* ClassElementList
* ClassElementList :
* ClassElement
* ClassElementList ClassElement
* ClassElement :
* MethodDefinition
* static MethodDefinition
* ;
* </pre>
*/
private void classBody(List<MethodDefinition> staticMethods,
List<MethodDefinition> prototypeMethods) {
while (token() != Token.RC) {
if (token() == Token.SEMI) {
consume(Token.SEMI);
} else if (token() == Token.STATIC && !LOOKAHEAD(Token.LP)) {
consume(Token.STATIC);
staticMethods.add(methodDefinition(true));
} else {
prototypeMethods.add(methodDefinition(true));
}
}
classBody_StaticSemantics(staticMethods, true);
classBody_StaticSemantics(prototypeMethods, false);
}
private void classBody_StaticSemantics(List<MethodDefinition> defs, boolean isStatic) {
final int VALUE = 0, GETTER = 1, SETTER = 2;
Map<String, Integer> values = new HashMap<>();
for (MethodDefinition def : defs) {
String key = PropName(def);
if (key == null) {
assert def.getPropertyName() instanceof ComputedPropertyName;
continue;
}
if (isStatic) {
if ("prototype".equals(key)) {
reportSyntaxError(Messages.Key.InvalidPrototypeMethod);
}
} else {
if ("constructor".equals(key) && SpecialMethod(def)) {
reportSyntaxError(Messages.Key.InvalidConstructorMethod);
}
}
MethodDefinition.MethodType type = def.getType();
final int kind = type == MethodType.Getter ? GETTER
: type == MethodType.Setter ? SETTER : VALUE;
if (values.containsKey(key)) {
int prev = values.get(key);
if (kind == VALUE) {
reportSyntaxError(Messages.Key.DuplicatePropertyDefinition, key);
}
if (kind == GETTER && prev != SETTER) {
reportSyntaxError(Messages.Key.DuplicatePropertyDefinition, key);
}
if (kind == SETTER && prev != GETTER) {
reportSyntaxError(Messages.Key.DuplicatePropertyDefinition, key);
}
values.put(key, prev | kind);
} else {
values.put(key, kind);
}
}
}
/**
* <strong>[12] Statements</strong>
*
* <pre>
* Statement :
* BlockStatement
* VariableStatement
* EmptyStatement
* ExpressionStatement
* IfStatement
* BreakableStatement
* ContinueStatement
* BreakStatement
* ReturnStatement
* WithStatement
* LabelledStatement
* ThrowStatement
* TryStatement
* DebuggerStatement
*
* BreakableStatement :
* IterationStatement
* SwitchStatement
* </pre>
*/
private Statement statement() {
switch (token()) {
case LC:
return block(NO_INHERITED_BINDING);
case VAR:
return variableStatement();
case SEMI:
return emptyStatement();
case IF:
return ifStatement();
case FOR:
return forStatement(EMPTY_LABEL_SET);
case WHILE:
return whileStatement(EMPTY_LABEL_SET);
case DO:
return doWhileStatement(EMPTY_LABEL_SET);
case CONTINUE:
return continueStatement();
case BREAK:
return breakStatement();
case RETURN:
return returnStatement();
case WITH:
return withStatement();
case SWITCH:
return switchStatement(EMPTY_LABEL_SET);
case THROW:
return throwStatement();
case TRY:
return tryStatement();
case DEBUGGER:
return debuggerStatement();
case LET:
if (isEnabled(Option.LetStatement)) {
return letStatement();
}
break;
case NAME:
if (LOOKAHEAD(Token.COLON)) {
return labelledStatement();
}
default:
}
return expressionStatement();
}
/**
* <strong>[12.1] Block</strong>
*
* <pre>
* BlockStatement :
* Block
* Block :
* { StatementList<sub>opt</sub> }
* </pre>
*/
private BlockStatement block(List<Binding> inherited) {
consume(Token.LC);
BlockContext scope = enterBlockContext();
if (!inherited.isEmpty()) {
addLexDeclaredNames(inherited);
}
List<StatementListItem> list = statementList(Token.RC);
if (!inherited.isEmpty()) {
removeLexDeclaredNames(inherited);
}
exitBlockContext();
consume(Token.RC);
BlockStatement block = new BlockStatement(scope, list);
scope.node = block;
return block;
}
/**
* <strong>[12.1] Block</strong>
*
* <pre>
* StatementList :
* StatementItem
* StatementList StatementListItem
* </pre>
*/
private List<StatementListItem> statementList(Token end) {
List<StatementListItem> list = newList();
while (token() != end) {
list.add(statementListItem());
}
return list;
}
/**
* <strong>[12.1] Block</strong>
*
* <pre>
* StatementListItem :
* Statement
* Declaration
* Declaration :
* FunctionDeclaration
* GeneratorDeclaration
* ClassDeclaration
* LexicalDeclaration
* </pre>
*/
private StatementListItem statementListItem() {
switch (token()) {
case LET:
if (LOOKAHEAD(Token.LP)
&& (isEnabled(Option.LetStatement) || isEnabled(Option.LetExpression))) {
return statement();
}
case FUNCTION:
case CLASS:
case CONST:
return declaration();
default:
return statement();
}
}
/**
* <strong>[12.1] Block</strong>
*
* <pre>
* Declaration :
* FunctionDeclaration
* GeneratorDeclaration
* ClassDeclaration
* LexicalDeclaration
* </pre>
*/
private Declaration declaration() {
switch (token()) {
case FUNCTION:
return functionOrGeneratorDeclaration();
case CLASS:
return classDeclaration();
case LET:
case CONST:
return lexicalDeclaration(true);
default:
throw reportSyntaxError(Messages.Key.InvalidToken, token().toString());
}
}
private Declaration functionOrGeneratorDeclaration() {
if (LOOKAHEAD(Token.MUL)) {
return generatorDeclaration(false);
} else {
long position = ts.position(), lineinfo = ts.lineinfo();
try {
return functionDeclaration();
} catch (RetryGenerator e) {
ts.reset(position, lineinfo);
return generatorDeclaration(true);
}
}
}
/**
* <strong>[12.2.1] Let and Const Declarations</strong>
*
* <pre>
* LexicalDeclaration :
* LetOrConst BindingList ;
* LexicalDeclarationNoIn :
* LetOrConst BindingListNoIn
* LetOrConst :
* let
* const
* </pre>
*/
private LexicalDeclaration lexicalDeclaration(boolean allowIn) {
LexicalDeclaration.Type type;
if (token() == Token.LET) {
consume(Token.LET);
type = LexicalDeclaration.Type.Let;
} else {
consume(Token.CONST);
type = LexicalDeclaration.Type.Const;
}
List<LexicalBinding> list = bindingList((type == LexicalDeclaration.Type.Const), allowIn);
if (allowIn) {
semicolon();
}
LexicalDeclaration decl = new LexicalDeclaration(type, list);
addLexScopedDeclaration(decl);
return decl;
}
/**
* <strong>[12.2.1] Let and Const Declarations</strong>
*
* <pre>
* BindingList :
* LexicalBinding
* BindingList, LexicalBinding
* BindingListNoIn :
* LexicalBindingNoIn
* BindingListNoIn, LexicalBindingNoIn
* </pre>
*/
private List<LexicalBinding> bindingList(boolean isConst, boolean allowIn) {
List<LexicalBinding> list = newSmallList();
list.add(lexicalBinding(isConst, allowIn));
while (token() == Token.COMMA) {
consume(Token.COMMA);
list.add(lexicalBinding(isConst, allowIn));
}
return list;
}
/**
* <strong>[12.2.1] Let and Const Declarations</strong>
*
* <pre>
* LexicalBinding :
* BindingIdentifier Initialiser<sub>opt</sub>
* BindingPattern Initialiser
* LexicalBindingNoIn :
* BindingIdentifier InitialiserNoIn<sub>opt</sub>
* BindingPattern InitialiserNoIn
* </pre>
*/
private LexicalBinding lexicalBinding(boolean isConst, boolean allowIn) {
Binding binding;
Expression initialiser = null;
if (token() == Token.LC || token() == Token.LB) {
BindingPattern bindingPattern = bindingPattern();
addLexDeclaredName(bindingPattern);
if (allowIn) {
initialiser = initialiser(allowIn);
} else if (token() == Token.ASSIGN) {
// make initialiser optional if `allowIn == false`
initialiser = initialiser(allowIn);
}
binding = bindingPattern;
} else {
BindingIdentifier bindingIdentifier = bindingIdentifier();
addLexDeclaredName(bindingIdentifier);
if (token() == Token.ASSIGN) {
initialiser = initialiser(allowIn);
} else if (isConst && allowIn) {
// `allowIn == false` indicates for-loop, cf. validateFor{InOf}
reportSyntaxError(Messages.Key.ConstMissingInitialiser);
}
binding = bindingIdentifier;
}
return new LexicalBinding(binding, initialiser);
}
/**
* <strong>[12.2.1] Let and Const Declarations</strong>
*
* <pre>
* BindingIdentifier :
* Identifier
* </pre>
*/
private BindingIdentifier bindingIdentifier() {
String identifier = identifier();
if (context.strictMode != StrictMode.NonStrict) {
if ("arguments".equals(identifier) || "eval".equals(identifier)) {
reportStrictModeSyntaxError(Messages.Key.StrictModeRestrictedIdentifier);
}
}
return new BindingIdentifier(identifier);
}
/**
* <strong>[12.2.1] Let and Const Declarations</strong>
*
* <pre>
* BindingIdentifier :
* Identifier
* </pre>
*/
private BindingIdentifier bindingIdentifierStrict() {
String identifier = identifier();
if ("arguments".equals(identifier) || "eval".equals(identifier)) {
reportSyntaxError(Messages.Key.StrictModeRestrictedIdentifier);
}
return new BindingIdentifier(identifier);
}
/**
* <strong>[12.2.1] Let and Const Declarations</strong>
*
* <pre>
* Initialiser :
* = AssignmentExpression
* InitialiserNoIn :
* = AssignmentExpressionNoIn
* </pre>
*/
private Expression initialiser(boolean allowIn) {
consume(Token.ASSIGN);
return assignmentExpression(allowIn);
}
/**
* <strong>[12.2.2] Variable Statement</strong>
*
* <pre>
* VariableStatement :
* var VariableDeclarationList ;
* </pre>
*/
private VariableStatement variableStatement() {
consume(Token.VAR);
List<VariableDeclaration> decls = variableDeclarationList(true);
semicolon();
VariableStatement varStmt = new VariableStatement(decls);
addVarScopedDeclaration(varStmt);
return varStmt;
}
/**
* <strong>[12.2.2] Variable Statement</strong>
*
* <pre>
* VariableDeclarationList :
* VariableDeclaration
* VariableDeclarationList , VariableDeclaration
* VariableDeclarationListNoIn :
* VariableDeclarationNoIn
* VariableDeclarationListNoIn , VariableDeclarationNoIn
* </pre>
*/
private List<VariableDeclaration> variableDeclarationList(boolean allowIn) {
List<VariableDeclaration> list = newSmallList();
list.add(variableDeclaration(allowIn));
while (token() == Token.COMMA) {
consume(Token.COMMA);
list.add(variableDeclaration(allowIn));
}
return list;
}
/**
* <strong>[12.2.2] Variable Statement</strong>
*
* <pre>
* VariableDeclaration :
* BindingIdentifier Initialiser<sub>opt</sub>
* BindingPattern Initialiser
* VariableDeclarationNoIn :
* BindingIdentifier InitialiserNoIn<sub>opt</sub>
* BindingPattern InitialiserNoIn
* </pre>
*/
private VariableDeclaration variableDeclaration(boolean allowIn) {
Binding binding;
Expression initialiser = null;
if (token() == Token.LC || token() == Token.LB) {
BindingPattern bindingPattern = bindingPattern();
addVarDeclaredName(bindingPattern);
if (allowIn) {
initialiser = initialiser(allowIn);
} else if (token() == Token.ASSIGN) {
// make initialiser optional if `allowIn == false`
initialiser = initialiser(allowIn);
}
binding = bindingPattern;
} else {
BindingIdentifier bindingIdentifier = bindingIdentifier();
addVarDeclaredName(bindingIdentifier);
if (token() == Token.ASSIGN) {
initialiser = initialiser(allowIn);
}
binding = bindingIdentifier;
}
return new VariableDeclaration(binding, initialiser);
}
/**
* <strong>[12.2.4] Destructuring Binding Patterns</strong>
*
* <pre>
* BindingPattern :
* ObjectBindingPattern
* ArrayBindingPattern
* </pre>
*/
private BindingPattern bindingPattern() {
if (token() == Token.LC) {
return objectBindingPattern();
} else {
return arrayBindingPattern();
}
}
/**
* <strong>[12.2.4] Destructuring Binding Patterns</strong>
*
* <pre>
* ObjectBindingPattern :
* { }
* { BindingPropertyList }
* { BindingPropertyList , }
* BindingPropertyList :
* BindingProperty
* BindingPropertyList , BindingProperty
* BindingProperty :
* SingleNameBinding
* PropertyName : BindingElement
* BindingElement :
* SingleNameBinding
* BindingPattern Initialiser<sub>opt</sub>
* SingleNameBinding :
* BindingIdentifier Initialiser<sub>opt</sub>
* PropertyName :
* IdentifierName
* StringLiteral
* NumericLiteral
* </pre>
*/
private ObjectBindingPattern objectBindingPattern() {
List<BindingProperty> list = newSmallList();
consume(Token.LC);
while (token() != Token.RC) {
list.add(bindingProperty());
if (token() == Token.COMMA) {
consume(Token.COMMA);
} else {
break;
}
}
consume(Token.RC);
objectBindingPattern_StaticSemantics(list);
return new ObjectBindingPattern(list);
}
/**
* <strong>[12.2.4] Destructuring Binding Patterns</strong>
*
* <pre>
* BindingProperty :
* SingleNameBinding
* PropertyName : BindingElement
* BindingElement :
* SingleNameBinding
* BindingPattern Initialiser<sub>opt</sub>
* SingleNameBinding :
* BindingIdentifier Initialiser<sub>opt</sub>
* BindingIdentifier :
* Identifier
* </pre>
*/
private BindingProperty bindingProperty() {
if (LOOKAHEAD(Token.COLON)) {
PropertyName propertyName = propertyName();
consume(Token.COLON);
Binding binding;
if (token() == Token.LC) {
binding = objectBindingPattern();
} else if (token() == Token.LB) {
binding = arrayBindingPattern();
} else {
binding = bindingIdentifierStrict();
}
Expression initialiser = null;
if (token() == Token.ASSIGN) {
initialiser = initialiser(true);
}
return new BindingProperty(propertyName, binding, initialiser);
} else {
BindingIdentifier binding = bindingIdentifierStrict();
Expression initialiser = null;
if (token() == Token.ASSIGN) {
initialiser = initialiser(true);
}
return new BindingProperty(binding, initialiser);
}
}
/**
* <strong>[12.2.4] Destructuring Binding Patterns</strong>
*
* <pre>
* ArrayBindingPattern :
* [ Elision<sub>opt</sub> BindingRestElement<sub>opt</sub> ]
* [ BindingElementList ]
* [ BindingElementList , Elision<sub>opt</sub> BindingRestElement<sub>opt</sub> ]
* BindingElementList :
* Elision<sub>opt</sub> BindingElement
* BindingElementList , Elision<sub>opt</sub> BindingElement
* BindingRestElement :
* ... BindingIdentifier
* </pre>
*/
private ArrayBindingPattern arrayBindingPattern() {
List<BindingElementItem> list = newSmallList();
consume(Token.LB);
boolean needComma = false;
Token tok;
while ((tok = token()) != Token.RB) {
if (needComma) {
consume(Token.COMMA);
needComma = false;
} else if (tok == Token.COMMA) {
consume(Token.COMMA);
list.add(new BindingElision());
} else if (tok == Token.TRIPLE_DOT) {
consume(Token.TRIPLE_DOT);
list.add(new BindingRestElement(bindingIdentifierStrict()));
break;
} else {
list.add(bindingElementStrict());
needComma = true;
}
}
consume(Token.RB);
arrayBindingPattern_StaticSemantics(list);
return new ArrayBindingPattern(list);
}
/**
* <pre>
* Binding :
* BindingIdentifier
* BindingPattern
* </pre>
*/
private Binding binding() {
switch (token()) {
case LC:
return objectBindingPattern();
case LB:
return arrayBindingPattern();
default:
return bindingIdentifier();
}
}
/**
* <pre>
* Binding :
* BindingIdentifier
* BindingPattern
* </pre>
*/
private Binding bindingStrict() {
switch (token()) {
case LC:
return objectBindingPattern();
case LB:
return arrayBindingPattern();
default:
return bindingIdentifierStrict();
}
}
/**
* <strong>[12.2.4] Destructuring Binding Patterns</strong>
*
* <pre>
* BindingElement :
* SingleNameBinding
* BindingPattern Initialiser<sub>opt</sub>
* SingleNameBinding :
* BindingIdentifier Initialiser<sub>opt</sub>
* </pre>
*/
private BindingElement bindingElement() {
Binding binding = binding();
Expression initialiser = null;
if (token() == Token.ASSIGN) {
initialiser = initialiser(true);
}
return new BindingElement(binding, initialiser);
}
/**
* <strong>[12.2.4] Destructuring Binding Patterns</strong>
*
* <pre>
* BindingElement :
* SingleNameBinding
* BindingPattern Initialiser<sub>opt</sub>
* SingleNameBinding :
* BindingIdentifier Initialiser<sub>opt</sub>
* </pre>
*/
private BindingElement bindingElementStrict() {
Binding binding = bindingStrict();
Expression initialiser = null;
if (token() == Token.ASSIGN) {
initialiser = initialiser(true);
}
return new BindingElement(binding, initialiser);
}
private static String BoundName(BindingIdentifier binding) {
return binding.getName();
}
private static String BoundName(BindingRestElement element) {
return element.getBindingIdentifier().getName();
}
private void objectBindingPattern_StaticSemantics(List<BindingProperty> list) {
for (BindingProperty property : list) {
// BindingProperty : PropertyName ':' BindingElement
// BindingProperty : BindingIdentifier Initialiser<opt>
Binding binding = property.getBinding();
if (binding instanceof BindingIdentifier) {
String name = BoundName(((BindingIdentifier) binding));
if ("arguments".equals(name) || "eval".equals(name)) {
reportSyntaxError(Messages.Key.StrictModeRestrictedIdentifier);
}
} else {
assert binding instanceof BindingPattern;
assert property.getPropertyName() != null;
// already done implicitly
// objectBindingPattern_StaticSemantics(((ObjectBindingPattern) binding).getList());
// arrayBindingPattern_StaticSemantics(((ArrayBindingPattern)
// binding).getElements());
}
}
}
private void arrayBindingPattern_StaticSemantics(List<BindingElementItem> list) {
for (BindingElementItem element : list) {
if (element instanceof BindingElement) {
Binding binding = ((BindingElement) element).getBinding();
if (binding instanceof ArrayBindingPattern) {
// already done implicitly
// arrayBindingPattern_StaticSemantics(((ArrayBindingPattern) binding)
// .getElements());
} else if (binding instanceof ObjectBindingPattern) {
// already done implicitly
// objectBindingPattern_StaticSemantics(((ObjectBindingPattern)
// binding).getList());
} else {
assert (binding instanceof BindingIdentifier);
String name = BoundName(((BindingIdentifier) binding));
if ("arguments".equals(name) || "eval".equals(name)) {
reportSyntaxError(Messages.Key.StrictModeRestrictedIdentifier);
}
}
} else if (element instanceof BindingRestElement) {
String name = BoundName(((BindingRestElement) element));
if ("arguments".equals(name) || "eval".equals(name)) {
reportSyntaxError(Messages.Key.StrictModeRestrictedIdentifier);
}
} else {
assert element instanceof BindingElision;
}
}
}
/**
* <strong>[12.3] Empty Statement</strong>
*
* <pre>
* EmptyStatement:
* ;
* </pre>
*/
private EmptyStatement emptyStatement() {
consume(Token.SEMI);
return new EmptyStatement();
}
/**
* <strong>[12.4] Expression Statement</strong>
*
* <pre>
* ExpressionStatement :
* [LA ∉ { <b>{, function, class</b> }] Expression ;
* </pre>
*/
private ExpressionStatement expressionStatement() {
switch (token()) {
case LC:
case FUNCTION:
case CLASS:
reportSyntaxError(Messages.Key.InvalidToken, token().toString());
default:
Expression expr = expression(true);
semicolon();
return new ExpressionStatement(expr);
}
}
/**
* <strong>[12.5] The <code>if</code> Statement</strong>
*
* <pre>
* IfStatement :
* if ( Expression ) Statement else Statement
* if ( Expression ) Statement
* </pre>
*/
private IfStatement ifStatement() {
consume(Token.IF);
consume(Token.LP);
Expression test = expression(true);
consume(Token.RP);
Statement then = statement();
Statement otherwise = null;
if (token() == Token.ELSE) {
consume(Token.ELSE);
otherwise = statement();
}
return new IfStatement(test, then, otherwise);
}
/**
* <strong>[12.6.1] The <code>do-while</code> Statement</strong>
*
* <pre>
* IterationStatement :
* do Statement while ( Expression ) ;
* </pre>
*/
private DoWhileStatement doWhileStatement(Set<String> labelSet) {
consume(Token.DO);
LabelContext labelCx = enterIteration(labelSet);
Statement stmt = statement();
exitIteration();
consume(Token.WHILE);
consume(Token.LP);
Expression test = expression(true);
consume(Token.RP);
if (token() == Token.SEMI) {
consume(Token.SEMI);
}
return new DoWhileStatement(labelCx.abrupts, labelCx.labelSet, test, stmt);
}
/**
* <strong>[12.6.2] The <code>while</code> Statement</strong>
*
* <pre>
* IterationStatement :
* while ( Expression ) Statement
* </pre>
*/
private WhileStatement whileStatement(Set<String> labelSet) {
consume(Token.WHILE);
consume(Token.LP);
Expression test = expression(true);
consume(Token.RP);
LabelContext labelCx = enterIteration(labelSet);
Statement stmt = statement();
exitIteration();
return new WhileStatement(labelCx.abrupts, labelCx.labelSet, test, stmt);
}
/**
* <strong>[12.6.3] The <code>for</code> Statement</strong> <br>
* <strong>[12.6.4] The <code>for-in</code> and <code>for-of</code> Statements</strong>
*
* <pre>
* IterationStatement :
* for ( ExpressionNoIn<sub>opt</sub> ; Expression<sub>opt</sub> ; Expression <sub>opt</sub> ) Statement
* for ( var VariableDeclarationListNoIn ; Expression<sub>opt</sub> ; Expression <sub>opt</sub> ) Statement
* for ( LexicalDeclarationNoIn ; Expression<sub>opt</sub> ; Expression <sub>opt</sub> ) Statement
* for ( LeftHandSideExpression in Expression ) Statement
* for ( var ForBinding in Expression ) Statement
* for ( ForDeclaration in Expression ) Statement
* for ( LeftHandSideExpression of Expression ) Statement
* for ( var ForBinding of Expression ) Statement
* for ( ForDeclaration of Expression ) Statement
* ForDeclaration :
* LetOrConst ForBinding
* ForBinding :
* BindingIdentifier
* BindingPattern
* </pre>
*/
private IterationStatement forStatement(Set<String> labelSet) {
consume(Token.FOR);
boolean each = false;
if (token() != Token.LP && isName("each") && isEnabled(Option.ForEachStatement)) {
consume("each");
each = true;
}
consume(Token.LP);
BlockContext lexBlockContext = null;
Node head;
switch (token()) {
case VAR:
consume(Token.VAR);
VariableStatement varStmt = new VariableStatement(variableDeclarationList(false));
addVarScopedDeclaration(varStmt);
head = varStmt;
break;
case LET:
case CONST:
lexBlockContext = enterBlockContext();
head = lexicalDeclaration(false);
break;
case SEMI:
head = null;
break;
default:
head = expression(false);
break;
}
if (each && token() != Token.IN) {
reportTokenMismatch(Token.LP, "each");
}
if (token() == Token.SEMI) {
head = validateFor(head);
consume(Token.SEMI);
Expression test = null;
if (token() != Token.SEMI) {
test = expression(true);
}
consume(Token.SEMI);
Expression step = null;
if (token() != Token.RP) {
step = expression(true);
}
consume(Token.RP);
LabelContext labelCx = enterIteration(labelSet);
Statement stmt = statement();
exitIteration();
if (lexBlockContext != null) {
exitBlockContext();
}
ForStatement iteration = new ForStatement(lexBlockContext, labelCx.abrupts,
labelCx.labelSet, head, test, step, stmt);
if (lexBlockContext != null) {
lexBlockContext.node = iteration;
}
return iteration;
} else if (token() == Token.IN) {
head = validateForInOf(head);
consume(Token.IN);
Expression expr;
if (lexBlockContext == null) {
expr = expression(true);
} else {
exitBlockContext();
expr = expression(true);
reenterBlockContext(lexBlockContext);
}
consume(Token.RP);
LabelContext labelCx = enterIteration(labelSet);
Statement stmt = statement();
exitIteration();
if (lexBlockContext != null) {
exitBlockContext();
}
if (each) {
ForEachStatement iteration = new ForEachStatement(lexBlockContext, labelCx.abrupts,
labelCx.labelSet, head, expr, stmt);
if (lexBlockContext != null) {
lexBlockContext.node = iteration;
}
return iteration;
} else {
ForInStatement iteration = new ForInStatement(lexBlockContext, labelCx.abrupts,
labelCx.labelSet, head, expr, stmt);
if (lexBlockContext != null) {
lexBlockContext.node = iteration;
}
return iteration;
}
} else {
head = validateForInOf(head);
consume("of");
Expression expr;
if (lexBlockContext == null) {
expr = assignmentExpression(true);
} else {
exitBlockContext();
expr = assignmentExpression(true);
reenterBlockContext(lexBlockContext);
}
consume(Token.RP);
LabelContext labelCx = enterIteration(labelSet);
Statement stmt = statement();
exitIteration();
if (lexBlockContext != null) {
exitBlockContext();
}
ForOfStatement iteration = new ForOfStatement(lexBlockContext, labelCx.abrupts,
labelCx.labelSet, head, expr, stmt);
if (lexBlockContext != null) {
lexBlockContext.node = iteration;
}
return iteration;
}
}
/**
* @see #forStatement(Set)
*/
private Node validateFor(Node head) {
if (head instanceof VariableStatement) {
for (VariableDeclaration decl : ((VariableStatement) head).getElements()) {
if (decl.getBinding() instanceof BindingPattern && decl.getInitialiser() == null) {
reportSyntaxError(Messages.Key.DestructuringMissingInitialiser);
}
}
} else if (head instanceof LexicalDeclaration) {
boolean isConst = ((LexicalDeclaration) head).getType() == LexicalDeclaration.Type.Const;
for (LexicalBinding decl : ((LexicalDeclaration) head).getElements()) {
if (decl.getBinding() instanceof BindingPattern && decl.getInitialiser() == null) {
reportSyntaxError(Messages.Key.DestructuringMissingInitialiser);
}
if (isConst && decl.getInitialiser() == null) {
reportSyntaxError(Messages.Key.ConstMissingInitialiser);
}
}
}
return head;
}
/**
* @see #forStatement(Set)
*/
private Node validateForInOf(Node head) {
if (head instanceof VariableStatement) {
// expected: single variable declaration with no initialiser
List<VariableDeclaration> elements = ((VariableStatement) head).getElements();
if (elements.size() == 1 && elements.get(0).getInitialiser() == null) {
return head;
}
} else if (head instanceof LexicalDeclaration) {
// expected: single lexical binding with no initialiser
List<LexicalBinding> elements = ((LexicalDeclaration) head).getElements();
if (elements.size() == 1 && elements.get(0).getInitialiser() == null) {
return head;
}
} else if (head instanceof Expression) {
// expected: left-hand side expression
LeftHandSideExpression lhs = validateAssignment((Expression) head);
if (lhs == null) {
reportSyntaxError(Messages.Key.InvalidAssignmentTarget);
}
return lhs;
}
throw reportSyntaxError(Messages.Key.InvalidForInOfHead);
}
/**
* Static Semantics: IsValidSimpleAssignmentTarget
*/
private LeftHandSideExpression validateSimpleAssignment(Expression lhs) {
if (lhs instanceof Identifier) {
if (context.strictMode != StrictMode.NonStrict) {
String name = ((Identifier) lhs).getName();
if ("eval".equals(name) || "arguments".equals(name)) {
reportStrictModeSyntaxError(Messages.Key.StrictModeInvalidAssignmentTarget);
}
}
return (Identifier) lhs;
} else if (lhs instanceof ElementAccessor) {
return (ElementAccessor) lhs;
} else if (lhs instanceof PropertyAccessor) {
return (PropertyAccessor) lhs;
} else if (lhs instanceof SuperExpression) {
SuperExpression superExpr = (SuperExpression) lhs;
if (superExpr.getExpression() != null || superExpr.getName() != null) {
return superExpr;
}
}
// everything else => invalid lhs
return null;
}
/**
* Static Semantics: IsValidSimpleAssignmentTarget
*/
private LeftHandSideExpression validateAssignment(Expression lhs) {
// rewrite object/array literal to destructuring form
if (lhs instanceof ObjectLiteral) {
ObjectAssignmentPattern pattern = toDestructuring((ObjectLiteral) lhs);
if (lhs.isParenthesised()) {
pattern.addParentheses();
}
return pattern;
} else if (lhs instanceof ArrayLiteral) {
ArrayAssignmentPattern pattern = toDestructuring((ArrayLiteral) lhs);
if (lhs.isParenthesised()) {
pattern.addParentheses();
}
return pattern;
}
return validateSimpleAssignment(lhs);
}
private ObjectAssignmentPattern toDestructuring(ObjectLiteral object) {
List<AssignmentProperty> list = newSmallList();
for (PropertyDefinition p : object.getProperties()) {
AssignmentProperty property;
if (p instanceof PropertyValueDefinition) {
// AssignmentProperty : PropertyName ':' AssignmentElement
// AssignmentElement : DestructuringAssignmentTarget Initialiser{opt}
// DestructuringAssignmentTarget : LeftHandSideExpression
PropertyValueDefinition def = (PropertyValueDefinition) p;
PropertyName propertyName = def.getPropertyName();
Expression propertyValue = def.getPropertyValue();
LeftHandSideExpression target;
Expression initialiser;
if (propertyValue instanceof AssignmentExpression) {
AssignmentExpression assignment = (AssignmentExpression) propertyValue;
if (assignment.getOperator() != AssignmentExpression.Operator.ASSIGN) {
reportSyntaxError(Messages.Key.InvalidDestructuring, p);
}
target = destructuringAssignmentTarget(assignment.getLeft());
initialiser = assignment.getRight();
} else {
target = destructuringAssignmentTarget(propertyValue);
initialiser = null;
}
property = new AssignmentProperty(propertyName, target, initialiser);
} else if (p instanceof PropertyNameDefinition) {
// AssignmentProperty : Identifier
PropertyNameDefinition def = (PropertyNameDefinition) p;
property = assignmentProperty(def.getPropertyName(), null);
} else if (p instanceof CoverInitialisedName) {
// AssignmentProperty : Identifier Initialiser
CoverInitialisedName def = (CoverInitialisedName) p;
property = assignmentProperty(def.getPropertyName(), def.getInitialiser());
} else {
assert p instanceof MethodDefinition;
throw reportSyntaxError(Messages.Key.InvalidDestructuring, p);
}
list.add(property);
}
context.removeLiteral(object);
return new ObjectAssignmentPattern(list);
}
private ArrayAssignmentPattern toDestructuring(ArrayLiteral array) {
List<AssignmentElementItem> list = newSmallList();
for (Expression e : array.getElements()) {
AssignmentElementItem element;
if (e instanceof Elision) {
// Elision
element = (Elision) e;
} else if (e instanceof SpreadElement) {
// AssignmentRestElement : ... DestructuringAssignmentTarget
// DestructuringAssignmentTarget : LeftHandSideExpression
Expression expression = ((SpreadElement) e).getExpression();
LeftHandSideExpression target = destructuringSimpleAssignmentTarget(expression);
element = new AssignmentRestElement(target);
} else {
// AssignmentElement : DestructuringAssignmentTarget Initialiser{opt}
// DestructuringAssignmentTarget : LeftHandSideExpression
LeftHandSideExpression target;
Expression initialiser;
if (e instanceof AssignmentExpression) {
AssignmentExpression assignment = (AssignmentExpression) e;
if (assignment.getOperator() != AssignmentExpression.Operator.ASSIGN) {
reportSyntaxError(Messages.Key.InvalidDestructuring, e);
}
target = destructuringAssignmentTarget(assignment.getLeft());
initialiser = assignment.getRight();
} else {
target = destructuringAssignmentTarget(e);
initialiser = null;
}
element = new AssignmentElement(target, initialiser);
}
list.add(element);
}
return new ArrayAssignmentPattern(list);
}
private LeftHandSideExpression destructuringAssignmentTarget(Expression lhs) {
return destructuringAssignmentTarget(lhs, true);
}
private LeftHandSideExpression destructuringSimpleAssignmentTarget(Expression lhs) {
return destructuringAssignmentTarget(lhs, false);
}
private LeftHandSideExpression destructuringAssignmentTarget(Expression lhs, boolean extended) {
if (lhs instanceof Identifier) {
String name = ((Identifier) lhs).getName();
if ("eval".equals(name) || "arguments".equals(name)) {
reportSyntaxError(Messages.Key.InvalidAssignmentTarget, lhs);
}
return (Identifier) lhs;
} else if (lhs instanceof ElementAccessor) {
return (ElementAccessor) lhs;
} else if (lhs instanceof PropertyAccessor) {
return (PropertyAccessor) lhs;
} else if (extended && lhs instanceof ObjectAssignmentPattern) {
return (ObjectAssignmentPattern) lhs;
} else if (extended && lhs instanceof ArrayAssignmentPattern) {
return (ArrayAssignmentPattern) lhs;
} else if (extended && lhs instanceof ObjectLiteral) {
ObjectAssignmentPattern pattern = toDestructuring((ObjectLiteral) lhs);
if (lhs.isParenthesised()) {
pattern.addParentheses();
}
return pattern;
} else if (extended && lhs instanceof ArrayLiteral) {
ArrayAssignmentPattern pattern = toDestructuring((ArrayLiteral) lhs);
if (lhs.isParenthesised()) {
pattern.addParentheses();
}
return pattern;
} else if (lhs instanceof SuperExpression) {
SuperExpression superExpr = (SuperExpression) lhs;
if (superExpr.getExpression() != null || superExpr.getName() != null) {
return superExpr;
}
}
// FIXME: spec bug (IsInvalidAssignmentPattern not defined) (Bug 716)
// everything else => invalid lhs
throw reportSyntaxError(Messages.Key.InvalidDestructuring, lhs);
}
private AssignmentProperty assignmentProperty(Identifier identifier, Expression initialiser) {
switch (identifier.getName()) {
case "eval":
case "arguments":
case "this":
case "super":
reportSyntaxError(Messages.Key.InvalidDestructuring, identifier);
}
return new AssignmentProperty(identifier, initialiser);
}
/**
* <strong>[12.7] The <code>continue</code> Statement</strong>
*
* <pre>
* ContinueStatement :
* continue ;
* continue [no <i>LineTerminator</i> here] Identifier ;
* </pre>
*/
private ContinueStatement continueStatement() {
String label;
consume(Token.CONTINUE);
if (noLineTerminator() && isIdentifier(token())) {
label = identifier();
} else {
label = null;
}
semicolon();
LabelContext target = findContinueTarget(label);
if (target == null) {
if (label == null) {
reportSyntaxError(Messages.Key.InvalidContinueTarget);
} else {
reportSyntaxError(Messages.Key.LabelTargetNotFound, label);
}
}
if (target.type != StatementType.Iteration) {
reportSyntaxError(Messages.Key.InvalidContinueTarget);
}
target.mark(Abrupt.Continue);
return new ContinueStatement(label);
}
/**
* <strong>[12.8] The <code>break</code> Statement</strong>
*
* <pre>
* BreakStatement :
* break ;
* break [no <i>LineTerminator</i> here] Identifier ;
* </pre>
*/
private BreakStatement breakStatement() {
String label;
consume(Token.BREAK);
if (noLineTerminator() && isIdentifier(token())) {
label = identifier();
} else {
label = null;
}
semicolon();
LabelContext target = findBreakTarget(label);
if (target == null) {
if (label == null) {
reportSyntaxError(Messages.Key.InvalidBreakTarget);
} else {
reportSyntaxError(Messages.Key.LabelTargetNotFound, label);
}
}
target.mark(Abrupt.Break);
return new BreakStatement(label);
}
/**
* <strong>[12.9] The <code>return</code> Statement</strong>
*
* <pre>
* ReturnStatement :
* return ;
* return [no <i>LineTerminator</i> here] Expression ;
* </pre>
*/
private ReturnStatement returnStatement() {
if (!context.returnAllowed) {
reportSyntaxError(Messages.Key.InvalidReturnStatement);
}
Expression expr = null;
consume(Token.RETURN);
if (noLineTerminator()
&& !(token() == Token.SEMI || token() == Token.RC || token() == Token.EOF)) {
expr = expression(true);
}
semicolon();
return new ReturnStatement(expr);
}
/**
* <strong>[12.10] The <code>with</code> Statement</strong>
*
* <pre>
* WithStatement :
* with ( Expression ) Statement
* </pre>
*/
private WithStatement withStatement() {
reportStrictModeSyntaxError(Messages.Key.StrictModeWithStatement);
consume(Token.WITH);
consume(Token.LP);
Expression expr = expression(true);
consume(Token.RP);
BlockContext scope = enterWithContext();
Statement stmt = statement();
exitWithContext();
WithStatement withStatement = new WithStatement(scope, expr, stmt);
scope.node = withStatement;
return withStatement;
}
/**
* <strong>[12.11] The <code>switch</code> Statement</strong>
*
* <pre>
* SwitchStatement :
* switch ( Expression ) CaseBlock
* CaseBlock :
* { CaseClauses<sub>opt</sub> }
* { CaseClauses<sub>opt</sub> DefaultClause CaseClauses<sub>opt</sub> }
* CaseClauses :
* CaseClause
* CaseClauses CaseClause
* CaseClause :
* case Expression : StatementList<sub>opt</sub>
* DefaultClause :
* default : StatementList<sub>opt</sub>
* </pre>
*/
private SwitchStatement switchStatement(Set<String> labelSet) {
List<SwitchClause> clauses = newList();
consume(Token.SWITCH);
consume(Token.LP);
Expression expr = expression(true);
consume(Token.RP);
consume(Token.LC);
LabelContext labelCx = enterBreakable(labelSet);
BlockContext scope = enterBlockContext();
boolean hasDefault = false;
for (;;) {
Expression caseExpr;
Token tok = token();
if (tok == Token.CASE) {
consume(Token.CASE);
caseExpr = expression(true);
consume(Token.COLON);
} else if (tok == Token.DEFAULT && !hasDefault) {
hasDefault = true;
consume(Token.DEFAULT);
consume(Token.COLON);
caseExpr = null;
} else {
break;
}
List<StatementListItem> list = newList();
statementlist: for (;;) {
switch (token()) {
case CASE:
case DEFAULT:
case RC:
break statementlist;
default:
list.add(statementListItem());
}
}
clauses.add(new SwitchClause(caseExpr, list));
}
exitBlockContext();
exitBreakable();
consume(Token.RC);
SwitchStatement switchStatement = new SwitchStatement(scope, labelCx.abrupts,
labelCx.labelSet, expr, clauses);
scope.node = switchStatement;
return switchStatement;
}
/**
* <strong>[12.12] Labelled Statements</strong>
*
* <pre>
* LabelledStatement :
* Identifier : Statement
* </pre>
*/
private Statement labelledStatement() {
HashSet<String> labelSet = new HashSet<>(4);
labels: for (;;) {
switch (token()) {
case FOR:
return forStatement(labelSet);
case WHILE:
return whileStatement(labelSet);
case DO:
return doWhileStatement(labelSet);
case SWITCH:
return switchStatement(labelSet);
case NAME:
if (LOOKAHEAD(Token.COLON)) {
String name = identifier();
consume(Token.COLON);
labelSet.add(name);
break;
}
case LC:
case VAR:
case SEMI:
case IF:
case CONTINUE:
case BREAK:
case RETURN:
case WITH:
case THROW:
case TRY:
case DEBUGGER:
default:
break labels;
}
}
assert !labelSet.isEmpty();
LabelContext labelCx = enterLabelled(StatementType.Statement, labelSet);
Statement stmt = statement();
exitLabelled();
return new LabelledStatement(labelCx.abrupts, labelCx.labelSet, stmt);
}
/**
* <strong>[12.13] The <code>throw</code> Statement</strong>
*
* <pre>
* ThrowStatement :
* throw [no <i>LineTerminator</i> here] Expression ;
* </pre>
*/
private ThrowStatement throwStatement() {
consume(Token.THROW);
if (!noLineTerminator()) {
reportSyntaxError(Messages.Key.UnexpectedEndOfLine);
}
Expression expr = expression(true);
semicolon();
return new ThrowStatement(expr);
}
/**
* <strong>[12.14] The <code>try</code> Statement</strong>
*
* <pre>
* TryStatement :
* try Block Catch
* try Block Finally
* try Block Catch Finally
* Catch :
* catch ( CatchParameter ) Block
* Finally :
* finally Block
* CatchParameter :
* BindingIdentifier
* BindingPattern
* </pre>
*/
private TryStatement tryStatement() {
BlockStatement tryBlock, finallyBlock = null;
CatchNode catchNode = null;
List<GuardedCatchNode> guardedCatchNodes = emptyList();
consume(Token.TRY);
tryBlock = block(NO_INHERITED_BINDING);
Token tok = token();
if (tok == Token.CATCH) {
if (isEnabled(Option.GuardedCatch)) {
guardedCatchNodes = newSmallList();
while (token() == Token.CATCH && catchNode == null) {
consume(Token.CATCH);
BlockContext catchScope = enterBlockContext();
consume(Token.LP);
Binding catchParameter = binding();
addLexDeclaredName(catchParameter);
Expression guard;
if (token() == Token.IF) {
consume(Token.IF);
guard = expression(true);
} else {
guard = null;
}
consume(Token.RP);
// catch-block receives a blacklist of forbidden lexical declarable names
BlockStatement catchBlock = block(singletonList(catchParameter));
exitBlockContext();
if (guard != null) {
GuardedCatchNode guardedCatchNode = new GuardedCatchNode(catchScope,
catchParameter, guard, catchBlock);
catchScope.node = guardedCatchNode;
guardedCatchNodes.add(guardedCatchNode);
} else {
catchNode = new CatchNode(catchScope, catchParameter, catchBlock);
catchScope.node = catchNode;
}
}
} else {
consume(Token.CATCH);
BlockContext catchScope = enterBlockContext();
consume(Token.LP);
Binding catchParameter = binding();
addLexDeclaredName(catchParameter);
consume(Token.RP);
// catch-block receives a blacklist of forbidden lexical declarable names
BlockStatement catchBlock = block(singletonList(catchParameter));
exitBlockContext();
catchNode = new CatchNode(catchScope, catchParameter, catchBlock);
catchScope.node = catchNode;
}
if (token() == Token.FINALLY) {
consume(Token.FINALLY);
finallyBlock = block(NO_INHERITED_BINDING);
}
} else {
consume(Token.FINALLY);
finallyBlock = block(NO_INHERITED_BINDING);
}
return new TryStatement(tryBlock, catchNode, guardedCatchNodes, finallyBlock);
}
/**
* <strong>[12.15] The <code>debugger</code> Statement</strong>
*
* <pre>
* DebuggerStatement :
* debugger ;
* </pre>
*/
private DebuggerStatement debuggerStatement() {
int line = ts.getLine();
consume(Token.DEBUGGER);
semicolon();
DebuggerStatement debuggerStatement = new DebuggerStatement();
debuggerStatement.setLine(line);
return debuggerStatement;
}
/**
* <strong>[Extension] The <code>let</code> Statement</strong>
*
* <pre>
* LetStatement :
* let ( BindingList ) BlockStatement
* </pre>
*/
private Statement letStatement() {
BlockContext scope = enterBlockContext();
consume(Token.LET);
consume(Token.LP);
List<LexicalBinding> bindings = bindingList(false, true);
consume(Token.RP);
if (token() != Token.LC && isEnabled(Option.LetExpression)) {
// let expression disguised as let statement - also error in strict mode(!)
reportStrictModeSyntaxError(Messages.Key.UnexpectedToken, token().toString());
Expression expression = assignmentExpression(true);
exitBlockContext();
LetExpression letExpression = new LetExpression(scope, bindings, expression);
scope.node = letExpression;
return new ExpressionStatement(letExpression);
} else {
BlockStatement letBlock = block(toBindings(bindings));
exitBlockContext();
LetStatement block = new LetStatement(scope, bindings, letBlock);
scope.node = block;
return block;
}
}
private List<Binding> toBindings(List<LexicalBinding> lexicalBindings) {
ArrayList<Binding> bindings = new ArrayList<>(lexicalBindings.size());
for (LexicalBinding lexicalBinding : lexicalBindings) {
bindings.add(lexicalBinding.getBinding());
}
return bindings;
}
/**
* <strong>[11.1] Primary Expressions</strong>
*
* <pre>
* PrimaryExpresion :
* this
* Identifier
* Literal
* ArrayInitialiser
* ObjectLiteral
* FunctionExpression
* ClassExpression
* GeneratorExpression
* GeneratorComprehension
* RegularExpressionLiteral
* TemplateLiteral
* CoverParenthesisedExpressionAndArrowParameterList
* Literal :
* NullLiteral
* ValueLiteral
* ValueLiteral :
* BooleanLiteral
* NumericLiteral
* StringLiteral
* </pre>
*/
private Expression primaryExpression() {
Token tok = token();
switch (tok) {
case THIS:
consume(tok);
return new ThisExpression();
case NULL:
consume(tok);
return new NullLiteral();
case FALSE:
case TRUE:
consume(tok);
return new BooleanLiteral(tok == Token.TRUE);
case NUMBER:
return new NumericLiteral(numericLiteral());
case STRING:
return new StringLiteral(stringLiteral());
case DIV:
case ASSIGN_DIV:
return regularExpressionLiteral(tok);
case LB:
return arrayInitialiser();
case LC:
return objectLiteral();
case FUNCTION:
return functionOrGeneratorExpression();
case CLASS:
return classExpression();
case LP:
if (LOOKAHEAD(Token.FOR)) {
return generatorComprehension();
} else {
return coverParenthesisedExpressionAndArrowParameterList();
}
case TEMPLATE:
return templateLiteral(false);
case LET:
if (isEnabled(Option.LetExpression)) {
return letExpression();
}
default:
int line = ts.getLine();
Identifier identifier = new Identifier(identifier());
identifier.setLine(line);
return identifier;
}
}
private Expression functionOrGeneratorExpression() {
if (LOOKAHEAD(Token.MUL)) {
return generatorExpression(false);
} else {
long position = ts.position(), lineinfo = ts.lineinfo();
try {
return functionExpression();
} catch (RetryGenerator e) {
ts.reset(position, lineinfo);
return generatorExpression(true);
}
}
}
/**
* <strong>[11.1] Primary Expressions</strong>
*
* <pre>
* CoverParenthesisedExpressionAndArrowParameterList :
* ( Expression )
* ( )
* ( ... Identifier )
* ( Expression , ... Identifier)
* </pre>
*/
private Expression coverParenthesisedExpressionAndArrowParameterList() {
long position = ts.position(), lineinfo = ts.lineinfo();
consume(Token.LP);
Expression expr;
if (token() == Token.RP) {
expr = arrowFunctionEmptyParameters();
} else if (token() == Token.TRIPLE_DOT) {
expr = arrowFunctionRestParameter();
} else {
// inlined `expression(true)`
expr = assignmentExpressionNoValidation(true);
if (token() == Token.FOR && isEnabled(Option.LegacyComprehension)) {
ts.reset(position, lineinfo);
return legacyGeneratorComprehension();
}
if (token() == Token.COMMA) {
List<Expression> list = new ArrayList<>();
list.add(expr);
while (token() == Token.COMMA) {
consume(Token.COMMA);
if (token() == Token.TRIPLE_DOT) {
list.add(arrowFunctionRestParameter());
break;
}
expr = assignmentExpression(true);
list.add(expr);
}
expr = new CommaExpression(list);
}
}
expr.addParentheses();
consume(Token.RP);
return expr;
}
private EmptyExpression arrowFunctionEmptyParameters() {
if (!(token() == Token.RP && LOOKAHEAD(Token.ARROW))) {
reportSyntaxError(Messages.Key.EmptyParenthesisedExpression);
}
return new EmptyExpression();
}
private SpreadElement arrowFunctionRestParameter() {
consume(Token.TRIPLE_DOT);
SpreadElement spread = new SpreadElement(new Identifier(identifier()));
if (!(token() == Token.RP && LOOKAHEAD(Token.ARROW))) {
reportSyntaxError(Messages.Key.InvalidSpreadExpression);
}
return spread;
}
/**
* <strong>[11.1.4] Array Initialiser</strong>
*
* <pre>
* ArrayInitialiser :
* ArrayLiteral
* ArrayComprehension
* </pre>
*/
private ArrayInitialiser arrayInitialiser() {
if (LOOKAHEAD(Token.FOR)) {
return arrayComprehension();
} else {
if (isEnabled(Option.LegacyComprehension)) {
switch (peek()) {
case RB:
case COMMA:
case TRIPLE_DOT:
break;
default:
// TODO: report eclipse formatter bug
long position = ts.position(),
lineinfo = ts.lineinfo();
consume(Token.LB);
Expression expression = assignmentExpressionNoValidation(true);
if (token() == Token.FOR) {
ts.reset(position, lineinfo);
return legacyArrayComprehension();
}
return arrayLiteral(expression);
}
}
return arrayLiteral(null);
}
}
/**
* <strong>[11.1.4] Array Initialiser</strong>
*
* <pre>
* ArrayLiteral :
* [ Elision<sub>opt</sub> ]
* [ ElementList ]
* [ ElementList , Elision<sub>opt</sub> ]
* ElementList :
* Elision<sub>opt</sub> AssignmentExpression
* Elision<sub>opt</sub> SpreadElement
* ElementList , Elision<sub>opt</sub> AssignmentExpression
* ElementList , Elision<sub>opt</sub> SpreadElement
* Elision :
* ,
* Elision ,
* SpreadElement :
* ... AssignmentExpression
* </pre>
*/
private ArrayLiteral arrayLiteral(Expression expr) {
List<Expression> list = newList();
boolean needComma = false;
if (expr == null) {
consume(Token.LB);
} else {
list.add(expr);
needComma = true;
}
for (Token tok; (tok = token()) != Token.RB;) {
if (needComma) {
consume(Token.COMMA);
needComma = false;
} else if (tok == Token.COMMA) {
consume(Token.COMMA);
list.add(new Elision());
} else if (tok == Token.TRIPLE_DOT) {
consume(Token.TRIPLE_DOT);
list.add(new SpreadElement(assignmentExpression(true)));
needComma = true;
} else {
list.add(assignmentExpressionNoValidation(true));
needComma = true;
}
}
consume(Token.RB);
return new ArrayLiteral(list);
}
/**
* <strong>[11.1.4.2] Array Comprehension</strong>
*
* <pre>
* ArrayComprehension :
* [ Comprehension ]
* </pre>
*/
private ArrayComprehension arrayComprehension() {
consume(Token.LB);
Comprehension comprehension = comprehension();
consume(Token.RB);
return new ArrayComprehension(comprehension);
}
/**
* <strong>[11.1.4.2] Array Comprehension</strong>
*
* <pre>
* Comprehension :
* ComprehensionFor ComprehensionQualifierTail
* ComprehensionQualifierTail :
* AssignmentExpression
* ComprehensionQualifier ComprehensionQualifierTail
* ComprehensionQualifier :
* ComprehensionFor
* ComprehensionIf
* </pre>
*/
private Comprehension comprehension() {
assert token() == Token.FOR;
List<ComprehensionQualifier> list = newSmallList();
int scopes = 0;
for (;;) {
ComprehensionQualifier qualifier;
if (token() == Token.FOR) {
scopes += 1;
qualifier = comprehensionFor();
} else if (token() == Token.IF) {
qualifier = comprehensionIf();
} else {
break;
}
list.add(qualifier);
}
Expression expression = assignmentExpression(true);
while (scopes
exitBlockContext();
}
return new Comprehension(list, expression);
}
/**
* <strong>[11.1.4.2] Array Comprehension</strong>
*
* <pre>
* ComprehensionFor :
* for ( ForBinding of AssignmentExpression )
* ForBinding :
* BindingIdentifier
* BindingPattern
* </pre>
*/
private ComprehensionFor comprehensionFor() {
consume(Token.FOR);
consume(Token.LP);
Binding b = binding();
consume("of");
Expression expression = assignmentExpression(true);
consume(Token.RP);
BlockContext scope = enterBlockContext();
addLexDeclaredName(b);
return new ComprehensionFor(scope, b, expression);
}
/**
* <strong>[11.1.4.2] Array Comprehension</strong>
*
* <pre>
* ComprehensionIf :
* if ( AssignmentExpression )
* </pre>
*/
private ComprehensionIf comprehensionIf() {
consume(Token.IF);
consume(Token.LP);
Expression expression = assignmentExpression(true);
consume(Token.RP);
return new ComprehensionIf(expression);
}
/**
* <strong>[11.1.4.2] Array Comprehension</strong>
*
* <pre>
* LegacyArrayComprehension :
* [ LegacyComprehension ]
* </pre>
*/
private ArrayComprehension legacyArrayComprehension() {
consume(Token.LB);
LegacyComprehension comprehension = legacyComprehension();
consume(Token.RB);
return new ArrayComprehension(comprehension);
}
/**
* <strong>[11.1.4.2] Array Comprehension</strong>
*
* <pre>
* LegacyComprehension :
* AssignmentExpression LegacyComprehensionForList LegacyComprehensionIf<sub>opt</sub>
* LegacyComprehensionForList :
* LegacyComprehensionFor LegacyComprehensionForList<sub>opt</sub>
* LegacyComprehensionFor :
* for ( ForBinding of Expression )
* for ( ForBinding in Expression )
* for each ( ForBinding in Expression )
* LegacyComprehensionIf :
* if ( Expression )
* </pre>
*/
private LegacyComprehension legacyComprehension() {
BlockContext scope = enterBlockContext();
Expression expr = assignmentExpression(true);
assert token() == Token.FOR : "empty legacy comprehension";
List<ComprehensionQualifier> list = newSmallList();
while (token() == Token.FOR) {
consume(Token.FOR);
boolean each = false;
if (token() != Token.LP && isName("each")) {
consume("each");
each = true;
}
consume(Token.LP);
Binding b = binding();
addLexDeclaredName(b);
LegacyComprehensionFor.IterationKind iterationKind;
if (each) {
consume(Token.IN);
iterationKind = LegacyComprehensionFor.IterationKind.EnumerateValues;
} else if (token() == Token.IN) {
consume(Token.IN);
iterationKind = LegacyComprehensionFor.IterationKind.Enumerate;
} else {
consume("of");
iterationKind = LegacyComprehensionFor.IterationKind.Iterate;
}
Expression expression = expression(true);
consume(Token.RP);
list.add(new LegacyComprehensionFor(iterationKind, b, expression));
}
if (token() == Token.IF) {
consume(Token.IF);
consume(Token.LP);
Expression expression = expression(true);
consume(Token.RP);
list.add(new ComprehensionIf(expression));
}
exitBlockContext();
return new LegacyComprehension(scope, list, expr);
}
/**
* <strong>[11.1.5] Object Initialiser</strong>
*
* <pre>
* ObjectLiteral :
* { }
* { PropertyDefinitionList }
* { PropertyDefinitionList , }
* PropertyDefinitionList :
* PropertyDefinition
* PropertyDefinitionList , PropertyDefinition
* </pre>
*/
private ObjectLiteral objectLiteral() {
List<PropertyDefinition> defs = newList();
consume(Token.LC);
while (token() != Token.RC) {
defs.add(propertyDefinition());
if (token() == Token.COMMA) {
consume(Token.COMMA);
} else {
break;
}
}
consume(Token.RC);
ObjectLiteral object = new ObjectLiteral(defs);
context.addLiteral(object);
return object;
}
private void objectLiteral_StaticSemantics(int oldCount) {
ArrayDeque<ObjectLiteral> literals = context.objectLiterals;
for (int i = oldCount, newCount = literals.size(); i < newCount; ++i) {
objectLiteral_StaticSemantics(literals.pop());
}
}
private void objectLiteral_StaticSemantics(ObjectLiteral object) {
final int VALUE = 0, GETTER = 1, SETTER = 2;
Map<String, Integer> values = new HashMap<>();
for (PropertyDefinition def : object.getProperties()) {
PropertyName propertyName = def.getPropertyName();
String key = propertyName.getName();
if (key == null) {
assert propertyName instanceof ComputedPropertyName;
continue;
}
final int kind;
if (def instanceof PropertyValueDefinition || def instanceof PropertyNameDefinition) {
kind = VALUE;
} else if (def instanceof MethodDefinition) {
MethodDefinition method = (MethodDefinition) def;
if (method.hasSuperReference()) {
reportSyntaxError(Messages.Key.SuperOutsideClass, def);
}
MethodDefinition.MethodType type = method.getType();
kind = type == MethodType.Getter ? GETTER : type == MethodType.Setter ? SETTER
: VALUE;
} else {
assert def instanceof CoverInitialisedName;
// Always throw a Syntax Error if this production is present
throw reportSyntaxError(Messages.Key.MissingColonAfterPropertyId, def, key);
}
// It is a Syntax Error if PropertyNameList of PropertyDefinitionList contains any
// duplicate entries [...]
if (values.containsKey(key)) {
int prev = values.get(key);
if (kind == VALUE && prev != VALUE) {
reportSyntaxError(Messages.Key.DuplicatePropertyDefinition, def, key);
}
if (kind == VALUE && prev == VALUE) {
reportStrictModeSyntaxError(Messages.Key.DuplicatePropertyDefinition, def, key);
}
if (kind == GETTER && prev != SETTER) {
reportSyntaxError(Messages.Key.DuplicatePropertyDefinition, def, key);
}
if (kind == SETTER && prev != GETTER) {
reportSyntaxError(Messages.Key.DuplicatePropertyDefinition, def, key);
}
values.put(key, prev | kind);
} else {
values.put(key, kind);
}
}
}
/**
* <strong>[11.1.5] Object Initialiser</strong>
*
* <pre>
* PropertyDefinition :
* IdentifierName
* CoverInitialisedName
* PropertyName : AssignmentExpression
* MethodDefinition
* CoverInitialisedName :
* IdentifierName Initialiser
* </pre>
*/
private PropertyDefinition propertyDefinition() {
if (token() == Token.LB) {
// either `PropertyName : AssignmentExpression` or MethodDefinition (normal)
int line = ts.getLine();
PropertyName propertyName = computedPropertyName();
if (token() == Token.COLON) {
// it's the `PropertyName : AssignmentExpression` case
consume(Token.COLON);
Expression propertyValue = assignmentExpressionNoValidation(true);
return new PropertyValueDefinition(propertyName, propertyValue);
}
// otherwise it's MethodDefinition (normal)
return normalMethod(line, propertyName, false);
}
if (LOOKAHEAD(Token.COLON)) {
PropertyName propertyName = literalPropertyName();
consume(Token.COLON);
Expression propertyValue = assignmentExpressionNoValidation(true);
return new PropertyValueDefinition(propertyName, propertyValue);
}
if (LOOKAHEAD(Token.COMMA) || LOOKAHEAD(Token.RC)) {
// Static Semantics: It is a Syntax Error if IdentifierName is a
// ReservedWord.
int line = ts.getLine();
Identifier identifier = new Identifier(identifier());
identifier.setLine(line);
return new PropertyNameDefinition(identifier);
}
if (LOOKAHEAD(Token.ASSIGN)) {
int line = ts.getLine();
Identifier identifier = new Identifier(identifier());
identifier.setLine(line);
consume(Token.ASSIGN);
Expression initialiser = assignmentExpression(true);
return new CoverInitialisedName(identifier, initialiser);
}
return methodDefinition(false);
}
/**
* <strong>[11.1.5] Object Initialiser</strong>
*
* <pre>
* PropertyName :
* LiteralPropertyName
* ComputedPropertyName
* </pre>
*/
private PropertyName propertyName() {
if (token() != Token.LB) {
return literalPropertyName();
} else {
return computedPropertyName();
}
}
/**
* <strong>[11.1.5] Object Initialiser</strong>
*
* <pre>
* PropertyName :
* IdentifierName
* StringLiteral
* NumericLiteral
* </pre>
*/
private PropertyName literalPropertyName() {
switch (token()) {
case STRING:
return new StringLiteral(stringLiteral());
case NUMBER:
return new NumericLiteral(numericLiteral());
default:
return new Identifier(identifierName());
}
}
/**
* <strong>[11.1.5] Object Initialiser</strong>
*
* <pre>
* ComputedPropertyName :
* [ AssignmentExpression ]
* </pre>
*/
private PropertyName computedPropertyName() {
consume(Token.LB);
Expression expression = assignmentExpression(true);
consume(Token.RB);
return new ComputedPropertyName(expression);
}
/**
* <strong>[11.1.7] Generator Comprehensions</strong>
*
* <pre>
* GeneratorComprehension :
* ( Comprehension )
* </pre>
*/
private GeneratorComprehension generatorComprehension() {
boolean yieldAllowed = context.yieldAllowed;
try {
context.yieldAllowed = false;
consume(Token.LP);
Comprehension comprehension = comprehension();
consume(Token.RP);
return new GeneratorComprehension(comprehension);
} finally {
context.yieldAllowed = yieldAllowed;
}
}
/**
* <strong>[11.1.7] Generator Comprehensions</strong>
*
* <pre>
* LegacyGeneratorComprehension :
* ( LegacyComprehension )
* </pre>
*/
private GeneratorComprehension legacyGeneratorComprehension() {
boolean yieldAllowed = context.yieldAllowed;
try {
context.yieldAllowed = false;
consume(Token.LP);
LegacyComprehension comprehension = legacyComprehension();
consume(Token.RP);
return new GeneratorComprehension(comprehension);
} finally {
context.yieldAllowed = yieldAllowed;
}
}
/**
* <strong>[11.1.8] Regular Expression Literals</strong>
*
* <pre>
* RegularExpressionLiteral ::
* / RegularExpressionBody / RegularExpressionFlags
* </pre>
*/
private Expression regularExpressionLiteral(Token tok) {
String[] re = ts.readRegularExpression(tok);
regularExpressionLiteral_StaticSemantics(re[0], re[1]);
consume(tok);
return new RegularExpressionLiteral(re[0], re[1]);
}
private void regularExpressionLiteral_StaticSemantics(String p, String f) {
// parse to validate regular expression, but ignore actual result
RegExpParser.parse(p, f, ts.getLine(), ts.getColumn());
}
/**
* <strong>[11.1.9] Template Literals</strong>
*
* <pre>
* TemplateLiteral :
* NoSubstitutionTemplate
* TemplateHead Expression [Lexical goal <i>InputElementTemplateTail</i>] TemplateSpans
* TemplateSpans :
* TemplateTail
* TemplateMiddleList [Lexical goal <i>InputElementTemplateTail</i>] TemplateTail
* TemplateMiddleList :
* TemplateMiddle Expression
* TemplateMiddleList [Lexical goal <i>InputElementTemplateTail</i>] TemplateMiddle Expression
* </pre>
*/
private TemplateLiteral templateLiteral(boolean tagged) {
List<Expression> elements = newList();
String[] values = ts.readTemplateLiteral(Token.TEMPLATE);
elements.add(new TemplateCharacters(values[0], values[1]));
while (token() == Token.LC) {
consume(Token.LC);
elements.add(expression(true));
values = ts.readTemplateLiteral(Token.RC);
elements.add(new TemplateCharacters(values[0], values[1]));
}
consume(Token.TEMPLATE);
return new TemplateLiteral(tagged, elements);
}
/**
* <strong>[Extension] The <code>let</code> Expression</strong>
*
* <pre>
* LetExpression :
* let ( BindingList ) AssignmentExpression
* </pre>
*/
private LetExpression letExpression() {
BlockContext scope = enterBlockContext();
consume(Token.LET);
consume(Token.LP);
List<LexicalBinding> bindings = bindingList(false, true);
consume(Token.RP);
Expression expression = assignmentExpression(true);
exitBlockContext();
LetExpression letExpression = new LetExpression(scope, bindings, expression);
scope.node = letExpression;
return letExpression;
}
/**
* <strong>[11.2] Left-Hand-Side Expressions</strong>
*
* <pre>
* MemberExpression :
* PrimaryExpression
* MemberExpression [ Expression ]
* MemberExpression . IdentifierName
* MemberExpression QuasiLiteral
* super [ Expression ]
* super . IdentifierName
* new MemberExpression Arguments
* NewExpression :
* MemberExpression
* new NewExpression
* CallExpression :
* MemberExpression Arguments
* super Arguments
* CallExpression Arguments
* CallExpression [ Expression ]
* CallExpression . IdentifierName
* CallExpression QuasiLiteral
* LeftHandSideExpression :
* NewExpression
* CallExpression
* </pre>
*/
private Expression leftHandSideExpression(boolean allowCall) {
int line = ts.getLine();
Expression lhs;
if (token() == Token.NEW) {
consume(Token.NEW);
Expression expr = leftHandSideExpression(false);
List<Expression> args = null;
if (token() == Token.LP) {
args = arguments();
} else {
args = emptyList();
}
lhs = new NewExpression(expr, args);
} else if (token() == Token.SUPER) {
ParseContext cx = context.findSuperContext();
if (cx.kind == ContextKind.Script && !isEnabled(Option.FunctionCode)
|| cx.kind == ContextKind.Module) {
reportSyntaxError(Messages.Key.InvalidSuperExpression);
}
cx.setReferencesSuper();
consume(Token.SUPER);
switch (token()) {
case DOT:
consume(Token.DOT);
String name = identifierName();
lhs = new SuperExpression(name);
break;
case LB:
consume(Token.LB);
Expression expr = expression(true);
consume(Token.RB);
lhs = new SuperExpression(expr);
break;
case LP:
if (!allowCall) {
lhs = new SuperExpression();
} else {
List<Expression> args = arguments();
lhs = new SuperExpression(args);
}
break;
case TEMPLATE:
// handle "new super``" case
throw reportSyntaxError(Messages.Key.InvalidToken, token().toString());
default:
if (!allowCall) {
lhs = new SuperExpression();
} else {
throw reportSyntaxError(Messages.Key.InvalidToken, token().toString());
}
break;
}
} else {
lhs = primaryExpression();
}
lhs.setLine(line);
for (;;) {
switch (token()) {
case DOT:
line = ts.getLine();
consume(Token.DOT);
String name = identifierName();
lhs = new PropertyAccessor(lhs, name);
lhs.setLine(line);
break;
case LB:
line = ts.getLine();
consume(Token.LB);
Expression expr = expression(true);
consume(Token.RB);
lhs = new ElementAccessor(lhs, expr);
lhs.setLine(line);
break;
case LP:
if (!allowCall) {
return lhs;
}
if (lhs instanceof Identifier && "eval".equals(((Identifier) lhs).getName())) {
context.funContext.directEval = true;
}
line = ts.getLine();
List<Expression> args = arguments();
lhs = new CallExpression(lhs, args);
lhs.setLine(line);
break;
case TEMPLATE:
line = ts.getLine();
TemplateLiteral templ = templateLiteral(true);
lhs = new TemplateCallExpression(lhs, templ);
lhs.setLine(line);
break;
default:
return lhs;
}
}
}
/**
* <strong>[11.2] Left-Hand-Side Expressions</strong>
*
* <pre>
* Arguments :
* ()
* ( ArgumentList )
* ArgumentList :
* AssignmentExpression
* ... AssignmentExpression
* ArgumentList , AssignmentExpression
* ArgumentList , ... AssignmentExpression
* </pre>
*/
private List<Expression> arguments() {
List<Expression> args = newSmallList();
long position = ts.position(), lineinfo = ts.lineinfo();
consume(Token.LP);
if (token() != Token.RP) {
if (token() != Token.TRIPLE_DOT && isEnabled(Option.LegacyComprehension)) {
Expression expr = assignmentExpression(true);
if (token() == Token.FOR) {
ts.reset(position, lineinfo);
args.add(legacyGeneratorComprehension());
return args;
}
args.add(expr);
if (token() == Token.COMMA) {
consume(Token.COMMA);
} else {
consume(Token.RP);
return args;
}
}
for (;;) {
Expression expr;
if (token() == Token.TRIPLE_DOT) {
consume(Token.TRIPLE_DOT);
expr = new CallSpreadElement(assignmentExpression(true));
} else {
expr = assignmentExpression(true);
}
args.add(expr);
if (token() == Token.COMMA) {
consume(Token.COMMA);
} else {
break;
}
}
}
consume(Token.RP);
return args;
}
/**
* <strong>[11.3] Postfix Expressions</strong><br>
* <strong>[11.4] Unary Operators</strong>
*
* <pre>
* PostfixExpression :
* LeftHandSideExpression
* LeftHandSideExpression [no <i>LineTerminator</i> here] ++
* LeftHandSideExpression [no <i>LineTerminator</i> here] --
* UnaryExpression :
* PostfixExpression
* delete UnaryExpression
* void UnaryExpression
* typeof UnaryExpression
* ++ UnaryExpression
* -- UnaryExpression
* + UnaryExpression
* - UnaryExpression
* ~ UnaryExpression
* ! UnaryExpression
* </pre>
*/
private Expression unaryExpression() {
Token tok = token();
switch (tok) {
case DELETE:
case VOID:
case TYPEOF:
case INC:
case DEC:
case ADD:
case SUB:
case BITNOT:
case NOT: {
int line = ts.getLine();
consume(tok);
UnaryExpression unary = new UnaryExpression(unaryOp(tok, false), unaryExpression());
unary.setLine(line);
if (tok == Token.INC || tok == Token.DEC) {
if (validateSimpleAssignment(unary.getOperand()) == null) {
reportReferenceError(Messages.Key.InvalidIncDecTarget);
}
}
if (tok == Token.DELETE) {
Expression operand = unary.getOperand();
if (operand instanceof Identifier) {
reportStrictModeSyntaxError(Messages.Key.StrictModeInvalidDeleteOperand);
}
}
return unary;
}
default: {
Expression lhs = leftHandSideExpression(true);
if (noLineTerminator()) {
tok = token();
if (tok == Token.INC || tok == Token.DEC) {
if (validateSimpleAssignment(lhs) == null) {
reportReferenceError(Messages.Key.InvalidIncDecTarget);
}
int line = ts.getLine();
consume(tok);
UnaryExpression unary = new UnaryExpression(unaryOp(tok, true), lhs);
unary.setLine(line);
return unary;
}
}
return lhs;
}
}
}
private static UnaryExpression.Operator unaryOp(Token tok, boolean postfix) {
switch (tok) {
case DELETE:
return UnaryExpression.Operator.DELETE;
case VOID:
return UnaryExpression.Operator.VOID;
case TYPEOF:
return UnaryExpression.Operator.TYPEOF;
case INC:
return postfix ? UnaryExpression.Operator.POST_INC : UnaryExpression.Operator.PRE_INC;
case DEC:
return postfix ? UnaryExpression.Operator.POST_DEC : UnaryExpression.Operator.PRE_DEC;
case ADD:
return UnaryExpression.Operator.POS;
case SUB:
return UnaryExpression.Operator.NEG;
case BITNOT:
return UnaryExpression.Operator.BITNOT;
case NOT:
return UnaryExpression.Operator.NOT;
default:
return null;
}
}
private Expression binaryExpression(boolean allowIn) {
Expression lhs = unaryExpression();
return binaryExpression(allowIn, lhs, BinaryExpression.Operator.OR.getPrecedence());
}
private Expression binaryExpression(boolean allowIn, Expression lhs, int minpred) {
// Recursive-descent parsers require multiple levels of recursion to
// parse binary expressions, to avoid this we're using precedence
// climbing here
for (;;) {
Token tok = token();
if (tok == Token.IN && !allowIn) {
break;
}
BinaryExpression.Operator op = binaryOp(tok);
int pred = (op != null ? op.getPrecedence() : -1);
if (pred < minpred) {
break;
}
consume(tok);
Expression rhs = unaryExpression();
for (;;) {
BinaryExpression.Operator op2 = binaryOp(token());
int pred2 = (op2 != null ? op2.getPrecedence() : -1);
if (pred2 <= pred) {
break;
}
rhs = binaryExpression(allowIn, rhs, pred2);
}
lhs = new BinaryExpression(op, lhs, rhs);
}
return lhs;
}
private static BinaryExpression.Operator binaryOp(Token token) {
switch (token) {
case OR:
return BinaryExpression.Operator.OR;
case AND:
return BinaryExpression.Operator.AND;
case BITOR:
return BinaryExpression.Operator.BITOR;
case BITXOR:
return BinaryExpression.Operator.BITXOR;
case BITAND:
return BinaryExpression.Operator.BITAND;
case EQ:
return BinaryExpression.Operator.EQ;
case NE:
return BinaryExpression.Operator.NE;
case SHEQ:
return BinaryExpression.Operator.SHEQ;
case SHNE:
return BinaryExpression.Operator.SHNE;
case LT:
return BinaryExpression.Operator.LT;
case LE:
return BinaryExpression.Operator.LE;
case GT:
return BinaryExpression.Operator.GT;
case GE:
return BinaryExpression.Operator.GE;
case IN:
return BinaryExpression.Operator.IN;
case INSTANCEOF:
return BinaryExpression.Operator.INSTANCEOF;
case SHL:
return BinaryExpression.Operator.SHL;
case SHR:
return BinaryExpression.Operator.SHR;
case USHR:
return BinaryExpression.Operator.USHR;
case ADD:
return BinaryExpression.Operator.ADD;
case SUB:
return BinaryExpression.Operator.SUB;
case MUL:
return BinaryExpression.Operator.MUL;
case DIV:
return BinaryExpression.Operator.DIV;
case MOD:
return BinaryExpression.Operator.MOD;
default:
return null;
}
}
/**
* <strong>[11.12] Conditional Operator</strong><br>
* <strong>[11.13] Assignment Operators</strong>
*
* <pre>
* ConditionalExpression :
* LogicalORExpression
* LogicalORExpression ? AssignmentExpression : AssignmentExpression
* ConditionalExpressionNoIn :
* LogicalORExpressionNoIn
* LogicalORExpressionNoIn ? AssignmentExpression : AssignmentExpressionNoIn
* AssignmentExpression :
* ConditionalExpression
* YieldExpression
* ArrowFunction
* LeftHandSideExpression = AssignmentExpression
* LeftHandSideExpression AssignmentOperator AssignmentExpression
* AssignmentExpressionNoIn :
* ConditionalExpressionNoIn
* YieldExpression
* ArrowFunction
* LeftHandSideExpression = AssignmentExpressionNoIn
* LeftHandSideExpression AssignmentOperator AssignmentExpressionNoIn
* </pre>
*/
private Expression assignmentExpression(boolean allowIn) {
int count = context.countLiterals();
Expression expr = assignmentExpression(allowIn, count);
if (count < context.countLiterals()) {
objectLiteral_StaticSemantics(count);
}
return expr;
}
private Expression assignmentExpressionNoValidation(boolean allowIn) {
return assignmentExpression(allowIn, context.countLiterals());
}
private Expression assignmentExpression(boolean allowIn, int oldCount) {
// TODO: this may need to be changed...
if (token() == Token.YIELD) {
return yieldExpression();
}
long position = ts.position(), lineinfo = ts.lineinfo();
Expression left = binaryExpression(allowIn);
Token tok = token();
if (tok == Token.HOOK) {
consume(Token.HOOK);
Expression then = assignmentExpression(true);
consume(Token.COLON);
Expression otherwise = assignmentExpression(allowIn);
return new ConditionalExpression(left, then, otherwise);
} else if (tok == Token.ARROW) {
// discard parsed object literals
if (oldCount < context.countLiterals()) {
ArrayDeque<ObjectLiteral> literals = context.objectLiterals;
for (int i = oldCount, newCount = literals.size(); i < newCount; ++i) {
literals.pop();
}
}
ts.reset(position, lineinfo);
return arrowFunction();
} else if (tok == Token.ASSIGN) {
LeftHandSideExpression lhs = validateAssignment(left);
if (lhs == null) {
reportReferenceError(Messages.Key.InvalidAssignmentTarget);
}
consume(Token.ASSIGN);
Expression right = assignmentExpression(allowIn);
return new AssignmentExpression(assignmentOp(tok), lhs, right);
} else if (isAssignmentOperator(tok)) {
LeftHandSideExpression lhs = validateSimpleAssignment(left);
if (lhs == null) {
reportReferenceError(Messages.Key.InvalidAssignmentTarget);
}
consume(tok);
Expression right = assignmentExpression(allowIn);
return new AssignmentExpression(assignmentOp(tok), lhs, right);
} else {
return left;
}
}
private static AssignmentExpression.Operator assignmentOp(Token token) {
switch (token) {
case ASSIGN:
return AssignmentExpression.Operator.ASSIGN;
case ASSIGN_ADD:
return AssignmentExpression.Operator.ASSIGN_ADD;
case ASSIGN_SUB:
return AssignmentExpression.Operator.ASSIGN_SUB;
case ASSIGN_MUL:
return AssignmentExpression.Operator.ASSIGN_MUL;
case ASSIGN_DIV:
return AssignmentExpression.Operator.ASSIGN_DIV;
case ASSIGN_MOD:
return AssignmentExpression.Operator.ASSIGN_MOD;
case ASSIGN_SHL:
return AssignmentExpression.Operator.ASSIGN_SHL;
case ASSIGN_SHR:
return AssignmentExpression.Operator.ASSIGN_SHR;
case ASSIGN_USHR:
return AssignmentExpression.Operator.ASSIGN_USHR;
case ASSIGN_BITAND:
return AssignmentExpression.Operator.ASSIGN_BITAND;
case ASSIGN_BITOR:
return AssignmentExpression.Operator.ASSIGN_BITOR;
case ASSIGN_BITXOR:
return AssignmentExpression.Operator.ASSIGN_BITXOR;
default:
return null;
}
}
/**
* <strong>[11.13] Assignment Operators</strong>
*
* <pre>
* AssignmentOperator : <b>one of</b>
* *= /= %= += -= <<= >>= >>>= &= ^= |=
* </pre>
*/
private boolean isAssignmentOperator(Token tok) {
switch (tok) {
case ASSIGN_ADD:
case ASSIGN_BITAND:
case ASSIGN_BITOR:
case ASSIGN_BITXOR:
case ASSIGN_DIV:
case ASSIGN_MOD:
case ASSIGN_MUL:
case ASSIGN_SHL:
case ASSIGN_SHR:
case ASSIGN_SUB:
case ASSIGN_USHR:
return true;
default:
return false;
}
}
/**
* <strong>[11.14] Comma Operator</strong>
*
* <pre>
* Expression :
* AssignmentExpression
* Expression , AssignmentExpression
* ExpressionNoIn :
* AssignmentExpressionNoIn
* ExpressionNoIn , AssignmentExpressionNoIn
* </pre>
*/
private Expression expression(boolean allowIn) {
Expression expr = assignmentExpression(allowIn);
if (token() == Token.COMMA) {
List<Expression> list = new ArrayList<>();
list.add(expr);
while (token() == Token.COMMA) {
consume(Token.COMMA);
expr = assignmentExpression(allowIn);
list.add(expr);
}
return new CommaExpression(list);
}
return expr;
}
/**
* <strong>[7.9] Automatic Semicolon Insertion</strong>
*
* <pre>
* </pre>
*/
private void semicolon() {
switch (token()) {
case SEMI:
consume(Token.SEMI);
// fall-through
case RC:
case EOF:
break;
default:
if (noLineTerminator()) {
reportSyntaxError(Messages.Key.MissingSemicolon);
}
}
}
/**
* Peek next token and check for line-terminator
*/
private boolean noLineTerminator() {
return !ts.hasCurrentLineTerminator();
}
private boolean isName(String name) {
Token tok = token();
return (tok == Token.NAME && name.equals(getName(tok)));
}
/**
* Return token name
*/
private String getName(Token tok) {
if (tok == Token.NAME) {
return ts.getString();
}
return tok.getName();
}
/**
* <strong>[7.6] Identifier Names and Identifiers</strong>
*
* <pre>
* Identifier ::
* IdentifierName but not ReservedWord
* ReservedWord ::
* Keyword
* FutureReservedWord
* NullLiteral
* BooleanLiteral
* </pre>
*/
private String identifier() {
Token tok = token();
if (!isIdentifier(tok)) {
reportTokenMismatch("<identifier>", tok);
}
String name = getName(tok);
consume(tok);
return name;
}
/**
* <strong>[7.6] Identifier Names and Identifiers</strong>
*/
private boolean isIdentifier(Token tok) {
return isIdentifier(tok, context.strictMode);
}
/**
* <strong>[7.6] Identifier Names and Identifiers</strong>
*/
private boolean isIdentifier(Token tok, StrictMode strictMode) {
switch (tok) {
case NAME:
return true;
case IMPLEMENTS:
case INTERFACE:
case PACKAGE:
case PRIVATE:
case PROTECTED:
case PUBLIC:
case STATIC:
// TODO: otherwise cannot parse YieldExpression, context dependent syntax restriction?
// case YIELD:
if (strictMode != StrictMode.NonStrict) {
reportStrictModeSyntaxError(Messages.Key.StrictModeInvalidIdentifier, getName(tok));
}
return (strictMode != StrictMode.Strict);
default:
return false;
}
}
/**
* <strong>[7.6] Identifier Names and Identifiers</strong>
*/
private String identifierName() {
Token tok = token();
if (!isIdentifierName(tok)) {
reportTokenMismatch("<identifier-name>", tok);
}
String name = getName(tok);
consume(tok);
return name;
}
/**
* <strong>[7.6] Identifier Names and Identifiers</strong>
*/
private static boolean isIdentifierName(Token tok) {
switch (tok) {
case BREAK:
case CASE:
case CATCH:
case CLASS:
case CONST:
case CONTINUE:
case DEBUGGER:
case DEFAULT:
case DELETE:
case DO:
case ELSE:
case ENUM:
case EXPORT:
case EXTENDS:
case FALSE:
case FINALLY:
case FOR:
case FUNCTION:
case IF:
case IMPLEMENTS:
case IMPORT:
case IN:
case INSTANCEOF:
case INTERFACE:
case LET:
case NAME:
case NEW:
case NULL:
case PACKAGE:
case PRIVATE:
case PROTECTED:
case PUBLIC:
case RETURN:
case STATIC:
case SUPER:
case SWITCH:
case THIS:
case THROW:
case TRUE:
case TRY:
case TYPEOF:
case VAR:
case VOID:
case WHILE:
case WITH:
case YIELD:
return true;
default:
return false;
}
}
/**
* <strong>[7.8.3] Numeric Literals</strong>
*/
private double numericLiteral() {
double number = ts.getNumber();
consume(Token.NUMBER);
return number;
}
/**
* <strong>[7.8.4] String Literals</strong>
*/
private String stringLiteral() {
String string = ts.getString();
consume(Token.STRING);
return string;
}
} |
package com.john;
/**
* Gene Class for the Genetic Algorithm used for Learning
*/
public class Gene {
float [][] gene;
Gene(int rows, int cols){
gene = new float[rows][cols];
for( int i = 0; i < rows; i++){
for ( int j = 0; j < cols; j++) {
gene[i][j] = 0.0f;
}
}
}
/*
Return Length of Gene
*/
int getSize(){
return gene.length;
}
/*
Copy Weight Matrix to Gene Matrix
*/
void copyWeightMatrix(float [][] weight){
gene = weight;
}
} |
package com.github.davidmoten.rx.jdbc;
import static com.github.davidmoten.rx.RxUtil.constant;
import static com.github.davidmoten.rx.RxUtil.greaterThanZero;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.sql.Connection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import rx.Observable;
import rx.Observable.Operator;
import rx.Scheduler;
import rx.functions.Func0;
import rx.functions.Func1;
import rx.functions.Func2;
import rx.observables.StringObservable;
import rx.schedulers.Schedulers;
import com.github.davidmoten.rx.RxUtil;
import com.github.davidmoten.rx.RxUtil.CountingAction;
/**
* Main entry point for manipulations of a database using rx-java-jdbc style
* queries.
*/
final public class Database {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(Database.class);
/**
* Provides access for queries to a limited subset of {@link Database}
* methods.
*/
private final QueryContext context;
/**
* ThreadLocal storage of the current {@link Scheduler} factory to use with
* queries.
*/
private final ThreadLocal<Func0<Scheduler>> currentSchedulerFactory = new ThreadLocal<Func0<Scheduler>>();
/**
* ThreadLocal storage of the current {@link ConnectionProvider} to use with
* queries.
*/
private final ThreadLocal<ConnectionProvider> currentConnectionProvider = new ThreadLocal<ConnectionProvider>();
private final ThreadLocal<Boolean> isTransactionOpen = new ThreadLocal<Boolean>();
/**
* Records the result of the last finished transaction (committed =
* <code>true</code> or rolled back = <code>false</code>).
*/
private final ThreadLocal<Observable<Boolean>> lastTransactionResult = new ThreadLocal<Observable<Boolean>>();
/**
* Connection provider.
*/
private final ConnectionProvider cp;
/**
* Schedules non transactional queries.
*/
private final Func0<Scheduler> nonTransactionalSchedulerFactory;
/**
* Constructor.
*
* @param cp
* provides connections
* @param nonTransactionalSchedulerFactory
* schedules non transactional queries
*/
public Database(final ConnectionProvider cp, Func0<Scheduler> nonTransactionalSchedulerFactory) {
Conditions.checkNotNull(cp);
this.cp = cp;
currentConnectionProvider.set(cp);
if (nonTransactionalSchedulerFactory != null)
this.nonTransactionalSchedulerFactory = nonTransactionalSchedulerFactory;
else
this.nonTransactionalSchedulerFactory = CURRENT_THREAD_SCHEDULER_FACTORY;
this.context = new QueryContext(this);
}
/**
* Returns the {@link ConnectionProvider}.
*
* @return
*/
public ConnectionProvider getConnectionProvider() {
return cp;
}
/**
* Schedules on {@link Schedulers#io()}.
*/
private final Func0<Scheduler> IO_SCHEDULER_FACTORY = new Func0<Scheduler>() {
@Override
public Scheduler call() {
return Schedulers.io();
}
};
/**
* Schedules using {@link Schedulers}.trampoline().
*/
private static final Func0<Scheduler> CURRENT_THREAD_SCHEDULER_FACTORY = new Func0<Scheduler>() {
@Override
public Scheduler call() {
return Schedulers.trampoline();
}
};
/**
* Constructor. Thread pool size defaults to
* <code>{@link Runtime#getRuntime()}.availableProcessors()+1</code>. This
* may be too conservative if the database is on another server. If that is
* the case then you may want to use a thread pool size equal to the
* available processors + 1 on the database server.
*
* @param cp
* provides connections
*/
public Database(ConnectionProvider cp) {
this(cp, null);
}
/**
* Constructor. Uses a {@link ConnectionProviderFromUrl} based on the given
* url.
*
* @param url
* jdbc url
* @param username
* username for connection
* @param password
* password for connection
*/
public Database(String url, String username, String password) {
this(new ConnectionProviderFromUrl(url, username, password));
}
/**
* Constructor. Uses the single connection provided and current thread
* scheduler (trampoline) to run all queries. The connection will not be
* closed in reality though the log may indicate it as having received a
* close call.
*
* @param con
* the connection
*/
public Database(Connection con) {
this(new ConnectionProviderNonClosing(con), CURRENT_THREAD_SCHEDULER_FACTORY);
}
/**
* Returns a {@link Database} based on a jdbc connection string.
*
* @param url
* jdbc connection url
* @return
*/
public static Database from(String url) {
return new Database(url, null, null);
}
/**
* Returns a {@link Database} based on a jdbc connection string.
*
* @param url
* jdbc connection url
* @return
*/
public static Database from(String url, String username, String password) {
return new Database(url, username, password);
}
/**
* Returns a {@link Database} based on connections obtained from a
* javax.activation.DataSource based on looking up the current
* javax.naming.Context.
*
* @param jndiResource
* @return
*/
public static Database fromContext(String jndiResource) {
return new Database(new ConnectionProviderFromContext(jndiResource));
}
/**
* Returns a {@link Database} that obtains {@link Connection}s on demand
* from the given {@link ConnectionProvider}. When {@link Database#close()}
* is called, {@link ConnectionProvider#close()} is called.
*
* @param cp
* @return
*/
public static Database from(ConnectionProvider cp) {
return new Database(cp);
}
/**
* Factory method. Uses the single connection provided and current thread
* scheduler (trampoline) to run all queries. The connection will not be
* closed in reality though the log may indicate it as having received a
* close call.
*
* @param con
* the connection
*/
public static Database from(Connection con) {
return new Database(con);
}
/**
* Returns a new {@link Builder}.
*
* @return
*/
public static Builder builder() {
return new Builder();
}
/**
* Builds a {@link Database}.
*/
public final static class Builder {
private ConnectionProvider cp;
private Func0<Scheduler> nonTransactionalSchedulerFactory = null;
private Pool pool = null;
private String url;
private String username;
private String password;
private static class Pool {
int minSize;
int maxSize;
Pool(int minSize, int maxSize) {
super();
this.minSize = minSize;
this.maxSize = maxSize;
}
}
/**
* Constructor.
*/
private Builder() {
}
/**
* Sets the connection provider.
*
* @param cp
* @return
*/
public Builder connectionProvider(ConnectionProvider cp) {
this.cp = cp;
return this;
}
/**
* Sets the jdbc url.
*
* @param url
* @return
*/
public Builder url(String url) {
this.url = url;
return this;
}
public Builder username(String username) {
this.username = username;
return this;
}
public Builder password(String password) {
this.password = password;
return this;
}
/**
* Sets the {@link ConnectionProvider} to use a connection pool with the
* given jdbc url and pool size.
*
* @param url
* @param minPoolSize
* @param maxPoolSize
* @return
*/
public Builder pool(int minPoolSize, int maxPoolSize) {
pool = new Pool(minPoolSize, maxPoolSize);
return this;
}
/**
* Sets the {@link ConnectionProvider} to use a connection pool with the
* given jdbc url and min pool size of 0, max pool size of 10.
*
* @param url
* @return
*/
public Builder pooled(String url) {
this.cp = new ConnectionProviderPooled(url, 0, 10);
return this;
}
/**
* Sets the non transactional scheduler.
*
* @param factory
* @return
*/
public Builder nonTransactionalScheduler(Func0<Scheduler> factory) {
nonTransactionalSchedulerFactory = factory;
return this;
}
/**
* Requests that the non transactional queries are run using
* {@link Schedulers#trampoline()}.
*
* @return
*/
public Builder nonTransactionalSchedulerOnCurrentThread() {
nonTransactionalSchedulerFactory = CURRENT_THREAD_SCHEDULER_FACTORY;
return this;
}
/**
* Returns a {@link Database}.
*
* @return
*/
public Database build() {
if (url != null && pool != null)
cp = new ConnectionProviderPooled(url, username, password, pool.minSize,
pool.maxSize);
else if (url != null)
cp = new ConnectionProviderFromUrl(url, username, password);
return new Database(cp, nonTransactionalSchedulerFactory);
}
}
/**
* Returns the thread local current query context (will not return null).
* Will return overriden context (for example using Database returned from
* {@link Database#beginTransaction()} if set.
*
* @return
*/
public QueryContext queryContext() {
return context;
}
/**
* Returns a {@link QuerySelect.Builder} builder based on the given select
* statement sql.
*
* @param sql
* a select statement.
* @return select query builder
*/
public QuerySelect.Builder select(String sql) {
return new QuerySelect.Builder(sql, this);
}
/**
* Returns a {@link QueryUpdate.Builder} builder based on the given
* update/insert/delete/DDL statement sql.
*
* @param sql
* an update/insert/delete/DDL statement.
* @return update/insert query builder
*/
public QueryUpdate.Builder update(String sql) {
return new QueryUpdate.Builder(sql, this);
}
/**
* Starts a transaction. Until commit() or rollback() is called on the
* source this will set the query context for all created queries to be a
* single threaded executor with one (new) connection.
*
* @param dependency
* @return
*/
public Observable<Boolean> beginTransaction(Observable<?> dependency) {
return update("begin").dependsOn(dependency).count().map(constant(true));
}
/**
* Starts a transaction. Until commit() or rollback() is called on the
* source this will set the query context for all created queries to be a
* single threaded executor with one (new) connection.
*
* @return
*/
public Observable<Boolean> beginTransaction() {
return beginTransaction(Observable.empty());
}
/**
* Returns true if and only if integer is non-zero.
*/
private static final Func1<Integer, Boolean> IS_NON_ZERO = new Func1<Integer, Boolean>() {
@Override
public Boolean call(Integer i) {
return i != 0;
}
};
/**
* Commits a transaction and resets the current query context so that
* further queries will use the asynchronous version by default. All
* Observable dependencies must be complete before commit is called.
*
* @param depends
* depdencies that must complete before commit occurs.
* @return
*/
public Observable<Boolean> commit(Observable<?>... depends) {
return commitOrRollback(true, depends);
}
/**
* Waits for the source to complete before returning the result of
* db.commit();
*
* @return commit operator
*/
public <T> Operator<Boolean, T> commitOperator() {
return commitOrRollbackOperator(true);
}
/**
* Waits for the source to complete before returning the result of
* db.rollback();
*
* @return rollback operator
*/
public <T> Operator<Boolean, T> rollbackOperator() {
return commitOrRollbackOperator(false);
}
private <T> Operator<Boolean, T> commitOrRollbackOperator(final boolean commit) {
final QueryUpdate.Builder updateBuilder = createCommitOrRollbackQuery(commit);
return RxUtil.toOperator(new Func1<Observable<T>, Observable<Boolean>>() {
@Override
public Observable<Boolean> call(Observable<T> source) {
return updateBuilder.dependsOn(source).count().map(IS_NON_ZERO);
}
});
}
/**
* Commits or rolls back a transaction depending on the <code>commit</code>
* parameter and resets the current query context so that further queries
* will use the asynchronous version by default. All Observable dependencies
* must be complete before commit/rollback is called.
*
* @param commit
* @param depends
* @return
*/
private Observable<Boolean> commitOrRollback(boolean commit, Observable<?>... depends) {
QueryUpdate.Builder u = createCommitOrRollbackQuery(commit);
for (Observable<?> dep : depends)
u = u.dependsOn(dep);
Observable<Boolean> result = u.count().map(IS_NON_ZERO);
lastTransactionResult.set(result);
return result;
}
private QueryUpdate.Builder createCommitOrRollbackQuery(boolean commit) {
String action;
if (commit)
action = "commit";
else
action = "rollback";
QueryUpdate.Builder u = update(action);
return u;
}
/**
* Rolls back a transaction and resets the current query context so that
* further queries will use the asynchronous version by default. All
* Observable dependencies must be complete before rollback is called.
*
* @param depends
* depdencies that must complete before commit occurs.
* @return
*
**/
public Observable<Boolean> rollback(Observable<?>... depends) {
return commitOrRollback(false, depends);
}
/**
* Returns observable that emits true when last transaction committed or
* false when last transaction is rolled back.
*
* @return
*/
public Observable<Boolean> lastTransactionResult() {
Observable<Boolean> o = lastTransactionResult.get();
if (o == null)
return Observable.empty();
else
return o;
}
/**
* Close the database in particular closes the {@link ConnectionProvider}
* for the database. For a {@link ConnectionProviderPooled} this will be a
* required call for cleanup.
*
* @return
*/
public Database close() {
log.debug("closing connection provider");
cp.close();
log.debug("closed connection provider");
return this;
}
/**
* Returns the current thread local {@link Scheduler}.
*
* @return
*/
Scheduler currentScheduler() {
if (currentSchedulerFactory.get() == null)
return nonTransactionalSchedulerFactory.call();
else
return currentSchedulerFactory.get().call();
}
/**
* Returns the current thread local {@link ConnectionProvider}.
*
* @return
*/
ConnectionProvider connectionProvider() {
if (currentConnectionProvider.get() == null)
return cp;
else
return currentConnectionProvider.get();
}
/**
* Sets the current thread local {@link ConnectionProvider} to a singleton
* manual commit instance.
*/
void beginTransactionObserve() {
log.debug("beginTransactionObserve");
currentConnectionProvider.set(new ConnectionProviderSingletonManualCommit(cp));
if (isTransactionOpen.get() != null && isTransactionOpen.get())
throw new RuntimeException("cannot begin transaction as transaction open already");
isTransactionOpen.set(true);
}
/**
* Sets the current thread local {@link Scheduler} to be
* {@link Schedulers#trampoline()}.
*/
void beginTransactionSubscribe() {
log.debug("beginTransactionSubscribe");
currentSchedulerFactory.set(CURRENT_THREAD_SCHEDULER_FACTORY);
}
/**
* Resets the current thread local {@link Scheduler} to default.
*/
void endTransactionSubscribe() {
log.debug("endTransactionSubscribe");
currentSchedulerFactory.set(null);
}
/**
* Resets the current thread local {@link ConnectionProvider} to default.
*/
void endTransactionObserve() {
log.debug("endTransactionObserve");
currentConnectionProvider.set(cp);
isTransactionOpen.set(false);
}
/**
* Returns an {@link Operator} that performs commit or rollback of a
* transaction.
*
* @param isCommit
* @return
*/
private <T> Operator<Boolean, T> commitOrRollbackOnCompleteOperator(final boolean isCommit) {
return RxUtil.toOperator(new Func1<Observable<T>, Observable<Boolean>>() {
@Override
public Observable<Boolean> call(Observable<T> source) {
return commitOrRollbackOnCompleteOperatorIfAtLeastOneValue(isCommit, Database.this,
source);
}
});
}
/**
* Commits current transaction on the completion of source if and only if
* the source sequence is non-empty.
*
* @return operator that commits on completion of source.
*/
public <T> Operator<Boolean, T> commitOnCompleteOperator() {
return commitOrRollbackOnCompleteOperator(true);
}
/**
* Rolls back current transaction on the completion of source if and only if
* the source sequence is non-empty.
*
* @return operator that rolls back on completion of source.
*/
public <T> Operator<Boolean, T> rollbackOnCompleteOperator() {
return commitOrRollbackOnCompleteOperator(false);
}
/**
* Starts a database transaction for each onNext call. Following database
* calls will be subscribed on current thread (Schedulers.trampoline()) and
* share the same {@link Connection} until transaction is rolled back or
* committed.
*
* @return begin transaction operator
*/
public <T> Operator<T, T> beginTransactionOnNextOperator() {
return RxUtil.toOperator(new Func1<Observable<T>, Observable<T>>() {
@Override
public Observable<T> call(Observable<T> source) {
return beginTransactionOnNext(Database.this, source);
}
});
}
/**
* Commits the currently open transaction. Emits true.
*
* @return
*/
public <T> Operator<Boolean, T> commitOnNextOperator() {
return commitOrRollbackOnNextOperator(true);
}
public <T> Operator<Boolean, Observable<T>> commitOnNextListOperator() {
return commitOrRollbackOnNextListOperator(true);
}
public <T> Operator<Boolean, Observable<T>> rollbackOnNextListOperator() {
return commitOrRollbackOnNextListOperator(false);
}
private <T> Operator<Boolean, Observable<T>> commitOrRollbackOnNextListOperator(
final boolean isCommit) {
return RxUtil.toOperator(new Func1<Observable<Observable<T>>, Observable<Boolean>>() {
@Override
public Observable<Boolean> call(Observable<Observable<T>> source) {
return source.concatMap(new Func1<Observable<T>, Observable<Boolean>>() {
@Override
public Observable<Boolean> call(Observable<T> source) {
if (isCommit)
return commit(source);
else
return rollback(source);
}
});
}
});
}
/**
* Rolls back the current transaction. Emits false.
*
* @return
*/
public Operator<Boolean, ?> rollbackOnNextOperator() {
return commitOrRollbackOnNextOperator(false);
}
private <T> Operator<Boolean, T> commitOrRollbackOnNextOperator(final boolean isCommit) {
return RxUtil.toOperator(new Func1<Observable<T>, Observable<Boolean>>() {
@Override
public Observable<Boolean> call(Observable<T> source) {
return commitOrRollbackOnNext(isCommit, Database.this, source);
}
});
}
private static final <T> Observable<Boolean> commitOrRollbackOnCompleteOperatorIfAtLeastOneValue(
final boolean isCommit, final Database db, Observable<T> source) {
CountingAction<T> counter = RxUtil.counter();
Observable<Boolean> commit = counter
// get count
.count()
// greater than zero or empty
.filter(greaterThanZero())
// commit if at least one value
.lift(db.commitOrRollbackOperator(isCommit));
return Observable
// concatenate
.concat(source
// count emissions
.doOnNext(counter)
// ignore emissions
.ignoreElements()
// cast the empty sequence to type Boolean
.cast(Boolean.class),
// concat with commit
commit);
}
/**
* Emits true for commit and false for rollback.
*
* @param isCommit
* @param db
* @param source
* @return
*/
private static final <T> Observable<Boolean> commitOrRollbackOnNext(final boolean isCommit,
final Database db, Observable<T> source) {
return source.concatMap(new Func1<T, Observable<Boolean>>() {
@Override
public Observable<Boolean> call(T t) {
if (isCommit)
return db.commit();
else
return db.rollback();
}
});
}
private static <T> Observable<T> beginTransactionOnNext(final Database db, Observable<T> source) {
return source.concatMap(new Func1<T, Observable<T>>() {
@Override
public Observable<T> call(T t) {
return db.beginTransaction().map(constant(t));
}
});
}
/**
* Returns an {@link Observable} that is the result of running a sequence of
* update commands (insert/update/delete, ddl) read from the given
* {@link Observable} sequence.
*
* @param commands
* @return
*/
public Observable<Integer> run(Observable<String> commands) {
return commands.reduce(Observable.<Integer> empty(),
new Func2<Observable<Integer>, String, Observable<Integer>>() {
@Override
public Observable<Integer> call(Observable<Integer> dep, String command) {
return update(command).dependsOn(dep).count();
}
}).lift(RxUtil.<Integer> flatten());
}
/**
* Returns an {@link Operator} version of {@link #run(Observable)}.
*
* @return
*/
public Operator<Integer, String> run() {
return RxUtil.toOperator(new Func1<Observable<String>, Observable<Integer>>() {
@Override
public Observable<Integer> call(Observable<String> commands) {
return run(commands);
}
});
}
/**
* Returns an {@link Observable} that is the result of running a sequence of
* update commands (insert/update/delete, ddl) commands read from an
* InputStream using the given delimiter as the statement delimiter (for
* example semicolon).
*
* @param is
* @param delimiter
* @return
*/
public Observable<Integer> run(InputStream is, String delimiter) {
return StringObservable.split(StringObservable.from(new InputStreamReader(is)), ";").lift(
run());
}
/**
* Returns a Database based on the current Database except all queries run
* {@link Schedulers#io}.
*
* @return
*/
public Database asynchronous() {
return new Database(cp, IO_SCHEDULER_FACTORY);
}
} |
package com.github.davidmoten.rx.jdbc;
import static com.github.davidmoten.rx.RxUtil.greaterThanZero;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.Types;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import rx.Observable;
import rx.Observable.Operator;
import rx.Scheduler;
import rx.functions.Func0;
import rx.functions.Func1;
import rx.functions.Func2;
import rx.observables.StringObservable;
import rx.schedulers.Schedulers;
import com.github.davidmoten.rx.Functions;
import com.github.davidmoten.rx.RxUtil;
import com.github.davidmoten.rx.RxUtil.CountingAction;
/**
* Main entry point for manipulations of a database using rx-java-jdbc style
* queries.
*/
final public class Database {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(Database.class);
/**
* Provides access for queries to a limited subset of {@link Database}
* methods.
*/
private final QueryContext context;
/**
* ThreadLocal storage of the current {@link Scheduler} factory to use with
* queries.
*/
private final ThreadLocal<Func0<Scheduler>> currentSchedulerFactory = new ThreadLocal<Func0<Scheduler>>();
/**
* ThreadLocal storage of the current {@link ConnectionProvider} to use with
* queries.
*/
private final ThreadLocal<ConnectionProvider> currentConnectionProvider = new ThreadLocal<ConnectionProvider>();
private final ThreadLocal<Boolean> isTransactionOpen = new ThreadLocal<Boolean>();
/**
* Records the result of the last finished transaction (committed =
* <code>true</code> or rolled back = <code>false</code>).
*/
private final ThreadLocal<Observable<Boolean>> lastTransactionResult = new ThreadLocal<Observable<Boolean>>();
/**
* Connection provider.
*/
private final ConnectionProvider cp;
/**
* Schedules non transactional queries.
*/
private final Func0<Scheduler> nonTransactionalSchedulerFactory;
/**
* Constructor.
*
* @param cp
* provides connections
* @param nonTransactionalSchedulerFactory
* schedules non transactional queries
*/
public Database(final ConnectionProvider cp, Func0<Scheduler> nonTransactionalSchedulerFactory) {
Conditions.checkNotNull(cp);
this.cp = cp;
currentConnectionProvider.set(cp);
if (nonTransactionalSchedulerFactory != null)
this.nonTransactionalSchedulerFactory = nonTransactionalSchedulerFactory;
else
this.nonTransactionalSchedulerFactory = CURRENT_THREAD_SCHEDULER_FACTORY;
this.context = new QueryContext(this);
}
/**
* Returns the {@link ConnectionProvider}.
*
* @return
*/
public ConnectionProvider getConnectionProvider() {
return cp;
}
/**
* Schedules using {@link Schedulers}.trampoline().
*/
private static final Func0<Scheduler> CURRENT_THREAD_SCHEDULER_FACTORY = new Func0<Scheduler>() {
@Override
public Scheduler call() {
return Schedulers.trampoline();
}
};
/**
* Constructor. Thread pool size defaults to
* <code>{@link Runtime#getRuntime()}.availableProcessors()+1</code>. This
* may be too conservative if the database is on another server. If that is
* the case then you may want to use a thread pool size equal to the
* available processors + 1 on the database server.
*
* @param cp
* provides connections
*/
public Database(ConnectionProvider cp) {
this(cp, null);
}
/**
* Constructor. Uses a {@link ConnectionProviderFromUrl} based on the given
* url.
*
* @param url
* jdbc url
* @param username
* username for connection
* @param password
* password for connection
*/
public Database(String url, String username, String password) {
this(new ConnectionProviderFromUrl(url, username, password));
}
/**
* Constructor. Uses the single connection provided and current thread
* scheduler (trampoline) to run all queries. The connection will not be
* closed in reality though the log may indicate it as having received a
* close call.
*
* @param con
* the connection
*/
public Database(Connection con) {
this(new ConnectionProviderNonClosing(con), CURRENT_THREAD_SCHEDULER_FACTORY);
}
/**
* Returns a {@link Database} based on a jdbc connection string.
*
* @param url
* jdbc connection url
* @return
*/
public static Database from(String url) {
return new Database(url, null, null);
}
/**
* Returns a {@link Database} based on a jdbc connection string.
*
* @param url
* jdbc url
* @param username
* username for connection
* @param password
* password for connection
* @return the database object
*/
public static Database from(String url, String username, String password) {
return new Database(url, username, password);
}
/**
* Returns a {@link Database} based on connections obtained from a
* javax.activation.DataSource based on looking up the current
* javax.naming.Context.
*
* @param jndiResource
* @return
*/
public static Database fromContext(String jndiResource) {
return new Database(new ConnectionProviderFromContext(jndiResource));
}
/**
* Returns a {@link Database} that obtains {@link Connection}s on demand
* from the given {@link ConnectionProvider}. When {@link Database#close()}
* is called, {@link ConnectionProvider#close()} is called.
*
* @param cp
* @return
*/
public static Database from(ConnectionProvider cp) {
return new Database(cp);
}
/**
* Factory method. Uses the single connection provided and current thread
* scheduler (trampoline) to run all queries. The connection will not be
* closed in reality though the log may indicate it as having received a
* close call.
*
* @param con
* the connection
*/
public static Database from(Connection con) {
return new Database(con);
}
/**
* Returns a new {@link Builder}.
*
* @return
*/
public static Builder builder() {
return new Builder();
}
/**
* Builds a {@link Database}.
*/
public final static class Builder {
private ConnectionProvider cp;
private Func0<Scheduler> nonTransactionalSchedulerFactory = null;
private Pool pool = null;
private String url;
private String username;
private String password;
private static class Pool {
int minSize;
int maxSize;
Pool(int minSize, int maxSize) {
super();
this.minSize = minSize;
this.maxSize = maxSize;
}
}
/**
* Constructor.
*/
private Builder() {
}
/**
* Sets the connection provider.
*
* @param cp
* @return
*/
public Builder connectionProvider(ConnectionProvider cp) {
this.cp = cp;
return this;
}
/**
* Sets the jdbc url.
*
* @param url
* @return
*/
public Builder url(String url) {
this.url = url;
return this;
}
public Builder username(String username) {
this.username = username;
return this;
}
public Builder password(String password) {
this.password = password;
return this;
}
/**
* Sets the {@link ConnectionProvider} to use a connection pool with the
* given jdbc url and pool size.
*
* @param url
* @param minPoolSize
* @param maxPoolSize
* @return
*/
public Builder pool(int minPoolSize, int maxPoolSize) {
pool = new Pool(minPoolSize, maxPoolSize);
return this;
}
/**
* Sets the {@link ConnectionProvider} to use a connection pool with the
* given jdbc url and min pool size of 0, max pool size of 10.
*
* @param url
* @return
*/
public Builder pooled(String url) {
this.cp = new ConnectionProviderPooled(url, 0, 10);
return this;
}
/**
* Sets the non transactional scheduler.
*
* @param factory
* @return
*/
public Builder nonTransactionalScheduler(Func0<Scheduler> factory) {
nonTransactionalSchedulerFactory = factory;
return this;
}
/**
* Requests that the non transactional queries are run using
* {@link Schedulers#trampoline()}.
*
* @return
*/
public Builder nonTransactionalSchedulerOnCurrentThread() {
nonTransactionalSchedulerFactory = CURRENT_THREAD_SCHEDULER_FACTORY;
return this;
}
/**
* Returns a {@link Database}.
*
* @return
*/
public Database build() {
if (url != null && pool != null)
cp = new ConnectionProviderPooled(url, username, password, pool.minSize,
pool.maxSize);
else if (url != null)
cp = new ConnectionProviderFromUrl(url, username, password);
return new Database(cp, nonTransactionalSchedulerFactory);
}
}
/**
* Returns the thread local current query context (will not return null).
* Will return overriden context (for example using Database returned from
* {@link Database#beginTransaction()} if set.
*
* @return
*/
public QueryContext queryContext() {
return context;
}
/**
* Returns a {@link QuerySelect.Builder} builder based on the given select
* statement sql.
*
* @param sql
* a select statement.
* @return select query builder
*/
public QuerySelect.Builder select(String sql) {
return new QuerySelect.Builder(sql, this);
}
/**
* Returns a {@link QueryUpdate.Builder} builder based on the given
* update/insert/delete/DDL statement sql.
*
* @param sql
* an update/insert/delete/DDL statement.
* @return update/insert query builder
*/
public QueryUpdate.Builder update(String sql) {
return new QueryUpdate.Builder(sql, this);
}
/**
* Starts a transaction. Until commit() or rollback() is called on the
* source this will set the query context for all created queries to be a
* single threaded executor with one (new) connection.
*
* @param dependency
* @return
*/
public Observable<Boolean> beginTransaction(Observable<?> dependency) {
return update("begin").dependsOn(dependency).count().map(Functions.constant(true));
}
/**
* Starts a transaction. Until commit() or rollback() is called on the
* source this will set the query context for all created queries to be a
* single threaded executor with one (new) connection.
*
* @return
*/
public Observable<Boolean> beginTransaction() {
return beginTransaction(Observable.empty());
}
/**
* Returns true if and only if integer is non-zero.
*/
private static final Func1<Integer, Boolean> IS_NON_ZERO = new Func1<Integer, Boolean>() {
@Override
public Boolean call(Integer i) {
return i != 0;
}
};
/**
* Commits a transaction and resets the current query context so that
* further queries will use the asynchronous version by default. All
* Observable dependencies must be complete before commit is called.
*
* @param depends
* depdencies that must complete before commit occurs.
* @return
*/
public Observable<Boolean> commit(Observable<?>... depends) {
return commitOrRollback(true, depends);
}
/**
* Waits for the source to complete before returning the result of
* db.commit();
*
* @return commit operator
*/
public <T> Operator<Boolean, T> commitOperator() {
return commitOrRollbackOperator(true);
}
/**
* Waits for the source to complete before returning the result of
* db.rollback();
*
* @return rollback operator
*/
public <T> Operator<Boolean, T> rollbackOperator() {
return commitOrRollbackOperator(false);
}
private <T> Operator<Boolean, T> commitOrRollbackOperator(final boolean commit) {
final QueryUpdate.Builder updateBuilder = createCommitOrRollbackQuery(commit);
return RxUtil.toOperator(new Func1<Observable<T>, Observable<Boolean>>() {
@Override
public Observable<Boolean> call(Observable<T> source) {
return updateBuilder.dependsOn(source).count().map(IS_NON_ZERO);
}
});
}
/**
* Commits or rolls back a transaction depending on the <code>commit</code>
* parameter and resets the current query context so that further queries
* will use the asynchronous version by default. All Observable dependencies
* must be complete before commit/rollback is called.
*
* @param commit
* @param depends
* @return
*/
private Observable<Boolean> commitOrRollback(boolean commit, Observable<?>... depends) {
QueryUpdate.Builder u = createCommitOrRollbackQuery(commit);
for (Observable<?> dep : depends)
u = u.dependsOn(dep);
Observable<Boolean> result = u.count().map(IS_NON_ZERO);
lastTransactionResult.set(result);
return result;
}
private QueryUpdate.Builder createCommitOrRollbackQuery(boolean commit) {
String action;
if (commit)
action = "commit";
else
action = "rollback";
QueryUpdate.Builder u = update(action);
return u;
}
/**
* Rolls back a transaction and resets the current query context so that
* further queries will use the asynchronous version by default. All
* Observable dependencies must be complete before rollback is called.
*
* @param depends
* depdencies that must complete before commit occurs.
* @return
*
**/
public Observable<Boolean> rollback(Observable<?>... depends) {
return commitOrRollback(false, depends);
}
/**
* Returns observable that emits true when last transaction committed or
* false when last transaction is rolled back.
*
* @return
*/
public Observable<Boolean> lastTransactionResult() {
Observable<Boolean> o = lastTransactionResult.get();
if (o == null)
return Observable.empty();
else
return o;
}
/**
* Close the database in particular closes the {@link ConnectionProvider}
* for the database. For a {@link ConnectionProviderPooled} this will be a
* required call for cleanup.
*
* @return
*/
public Database close() {
log.debug("closing connection provider");
cp.close();
log.debug("closed connection provider");
return this;
}
/**
* Returns the current thread local {@link Scheduler}.
*
* @return
*/
Scheduler currentScheduler() {
if (currentSchedulerFactory.get() == null)
return nonTransactionalSchedulerFactory.call();
else
return currentSchedulerFactory.get().call();
}
/**
* Returns the current thread local {@link ConnectionProvider}.
*
* @return
*/
ConnectionProvider connectionProvider() {
if (currentConnectionProvider.get() == null)
return cp;
else
return currentConnectionProvider.get();
}
/**
* Sets the current thread local {@link ConnectionProvider} to a singleton
* manual commit instance.
*/
void beginTransactionObserve() {
log.debug("beginTransactionObserve");
currentConnectionProvider.set(new ConnectionProviderSingletonManualCommit(cp));
if (isTransactionOpen.get() != null && isTransactionOpen.get())
throw new RuntimeException("cannot begin transaction as transaction open already");
isTransactionOpen.set(true);
}
/**
* Sets the current thread local {@link Scheduler} to be
* {@link Schedulers#trampoline()}.
*/
void beginTransactionSubscribe() {
log.debug("beginTransactionSubscribe");
currentSchedulerFactory.set(CURRENT_THREAD_SCHEDULER_FACTORY);
}
/**
* Resets the current thread local {@link Scheduler} to default.
*/
void endTransactionSubscribe() {
log.debug("endTransactionSubscribe");
currentSchedulerFactory.set(null);
}
/**
* Resets the current thread local {@link ConnectionProvider} to default.
*/
void endTransactionObserve() {
log.debug("endTransactionObserve");
currentConnectionProvider.set(cp);
isTransactionOpen.set(false);
}
/**
* Returns an {@link Operator} that performs commit or rollback of a
* transaction.
*
* @param isCommit
* @return
*/
private <T> Operator<Boolean, T> commitOrRollbackOnCompleteOperator(final boolean isCommit) {
return RxUtil.toOperator(new Func1<Observable<T>, Observable<Boolean>>() {
@Override
public Observable<Boolean> call(Observable<T> source) {
return commitOrRollbackOnCompleteOperatorIfAtLeastOneValue(isCommit, Database.this,
source);
}
});
}
/**
* Commits current transaction on the completion of source if and only if
* the source sequence is non-empty.
*
* @return operator that commits on completion of source.
*/
public <T> Operator<Boolean, T> commitOnCompleteOperator() {
return commitOrRollbackOnCompleteOperator(true);
}
/**
* Rolls back current transaction on the completion of source if and only if
* the source sequence is non-empty.
*
* @return operator that rolls back on completion of source.
*/
public <T> Operator<Boolean, T> rollbackOnCompleteOperator() {
return commitOrRollbackOnCompleteOperator(false);
}
/**
* Starts a database transaction for each onNext call. Following database
* calls will be subscribed on current thread (Schedulers.trampoline()) and
* share the same {@link Connection} until transaction is rolled back or
* committed.
*
* @return begin transaction operator
*/
public <T> Operator<T, T> beginTransactionOnNextOperator() {
return RxUtil.toOperator(new Func1<Observable<T>, Observable<T>>() {
@Override
public Observable<T> call(Observable<T> source) {
return beginTransactionOnNext(Database.this, source);
}
});
}
/**
* Commits the currently open transaction. Emits true.
*
* @return
*/
public <T> Operator<Boolean, T> commitOnNextOperator() {
return commitOrRollbackOnNextOperator(true);
}
public <T> Operator<Boolean, Observable<T>> commitOnNextListOperator() {
return commitOrRollbackOnNextListOperator(true);
}
public <T> Operator<Boolean, Observable<T>> rollbackOnNextListOperator() {
return commitOrRollbackOnNextListOperator(false);
}
private <T> Operator<Boolean, Observable<T>> commitOrRollbackOnNextListOperator(
final boolean isCommit) {
return RxUtil.toOperator(new Func1<Observable<Observable<T>>, Observable<Boolean>>() {
@Override
public Observable<Boolean> call(Observable<Observable<T>> source) {
return source.concatMap(new Func1<Observable<T>, Observable<Boolean>>() {
@Override
public Observable<Boolean> call(Observable<T> source) {
if (isCommit)
return commit(source);
else
return rollback(source);
}
});
}
});
}
/**
* Rolls back the current transaction. Emits false.
*
* @return
*/
public Operator<Boolean, ?> rollbackOnNextOperator() {
return commitOrRollbackOnNextOperator(false);
}
private <T> Operator<Boolean, T> commitOrRollbackOnNextOperator(final boolean isCommit) {
return RxUtil.toOperator(new Func1<Observable<T>, Observable<Boolean>>() {
@Override
public Observable<Boolean> call(Observable<T> source) {
return commitOrRollbackOnNext(isCommit, Database.this, source);
}
});
}
private static <T> Observable<Boolean> commitOrRollbackOnCompleteOperatorIfAtLeastOneValue(
final boolean isCommit, final Database db, Observable<T> source) {
CountingAction<T> counter = RxUtil.counter();
Observable<Boolean> commit = counter
// get count
.count()
// greater than zero or empty
.filter(greaterThanZero())
// commit if at least one value
.lift(db.commitOrRollbackOperator(isCommit));
return Observable
// concatenate
.concat(source
// count emissions
.doOnNext(counter)
// ignore emissions
.ignoreElements()
// cast the empty sequence to type Boolean
.cast(Boolean.class),
// concat with commit
commit);
}
/**
* Emits true for commit and false for rollback.
*
* @param isCommit
* @param db
* @param source
* @return
*/
private static <T> Observable<Boolean> commitOrRollbackOnNext(final boolean isCommit,
final Database db, Observable<T> source) {
return source.concatMap(new Func1<T, Observable<Boolean>>() {
@Override
public Observable<Boolean> call(T t) {
if (isCommit)
return db.commit();
else
return db.rollback();
}
});
}
private static <T> Observable<T> beginTransactionOnNext(final Database db, Observable<T> source) {
return source.concatMap(new Func1<T, Observable<T>>() {
@Override
public Observable<T> call(T t) {
return db.beginTransaction().map(Functions.constant(t));
}
});
}
/**
* Returns an {@link Observable} that is the result of running a sequence of
* update commands (insert/update/delete, ddl) read from the given
* {@link Observable} sequence.
*
* @param commands
* @return
*/
public Observable<Integer> run(Observable<String> commands) {
return commands.reduce(Observable.<Integer> empty(),
new Func2<Observable<Integer>, String, Observable<Integer>>() {
@Override
public Observable<Integer> call(Observable<Integer> dep, String command) {
return update(command).dependsOn(dep).count();
}
}).lift(RxUtil.<Integer> flatten());
}
/**
* Returns an {@link Operator} version of {@link #run(Observable)}.
*
* @return
*/
public Operator<Integer, String> run() {
return RxUtil.toOperator(new Func1<Observable<String>, Observable<Integer>>() {
@Override
public Observable<Integer> call(Observable<String> commands) {
return run(commands);
}
});
}
/**
* Returns an {@link Observable} that is the result of running a sequence of
* update commands (insert/update/delete, ddl) commands read from an
* InputStream using the given delimiter as the statement delimiter (for
* example semicolon).
*
* @param is
* @param delimiter
* @return
*/
public Observable<Integer> run(InputStream is, String delimiter) {
return run(is, Charset.defaultCharset(), delimiter);
}
/**
* Returns an {@link Observable} that is the result of running a sequence of
* update commands (insert/update/delete, ddl) commands read from an
* {@link InputStream} with the given {@link Charset} using the given
* delimiter as the statement delimiter (for example semicolon).
*
* @param is
* @param delimiter
* @return
*/
public Observable<Integer> run(InputStream is, Charset charset, String delimiter) {
return StringObservable.split(StringObservable.from(new InputStreamReader(is, charset)),
";").lift(run());
}
/**
* Returns a Database based on the current Database except all non-transactional queries run
* {@link Schedulers#io}.
*
* @return new Database instance
*/
public Database asynchronous() {
return asynchronous(Schedulers.io());
}
/**
* Returns a Database based on the current Database except all non-transactional queries run
* on the given scheduler.
*
* @return new Database instance
*/
public Database asynchronous(final Scheduler nonTransactionalScheduler) {
return asynchronous(new Func0<Scheduler>() {
@Override
public Scheduler call() {
return nonTransactionalScheduler;
}});
}
/**
* Returns a Database based on the current Database except all non-transactional queries run
* on the scheduler provided by the given factory.
*
* @return new Database instance
*/
public Database asynchronous(final Func0<Scheduler> nonTransactionalSchedulerFactory) {
return new Database(cp, nonTransactionalSchedulerFactory);
}
/**
* Sentinel object used to indicate in parameters of a query that rather
* than calling {@link PreparedStatement#setObject(int, Object)} with a null
* we call {@link PreparedStatement#setNull(int, int)} with
* {@link Types#CLOB}. This is required by many databases for setting CLOB
* and BLOB fields to null.
*/
public static final Object NULL_CLOB = new Object();
public static final Object NULL_NUMBER = new Object();
public static Object toSentinelIfNull(String s) {
if (s == null)
return NULL_CLOB;
else
return s;
}
/**
* Sentinel object used to indicate in parameters of a query that rather
* than calling {@link PreparedStatement#setObject(int, Object)} with a null
* we call {@link PreparedStatement#setNull(int, int)} with
* {@link Types#CLOB}. This is required by many databases for setting CLOB
* and BLOB fields to null.
*/
public static final Object NULL_BLOB = new Object();
public static Object toSentinelIfNull(byte[] bytes) {
if (bytes == null)
return NULL_BLOB;
else
return bytes;
}
} |
package com.github.skjolberg.packing;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.lang.Math.max;
public class Container extends Box {
private int stackWeight = 0;
private int stackHeight = 0;
private ArrayList<Level> levels = new ArrayList<>();
public Container(Container container) {
super(container.getName(), container.getWidth(), container.getDepth(), container.getHeight(), container.getWeight());
}
/**
* Construct new instance.
*
* @param dimension maximum size the container can contain
* @param weight maximum weight the container can hold
*/
public Container(Dimension dimension, int weight) {
super(dimension.getName(), dimension.getWidth(), dimension.getDepth(), dimension.getHeight(), weight);
}
/**
* Construct new instance.
*
* @param w width
* @param d depth
* @param h height
* @param weight maximum weight the container can hold
*/
public Container(int w, int d, int h, int weight) {
super(w, d, h, weight);
}
/**
* Construct new instance.
*
* @param name container name
* @param w width
* @param d depth
* @param h height
* @param weight maximum weight the container can hold
*/
public Container(String name, int w, int d, int h, int weight) {
super(name, w, d, h, weight);
}
/**
* The 6 different possible rotations.
*
* It is sometimes useful to pass this list to the {@link LargestAreaFitFirstPackager}
* since it has a better chance to find a packaging than with a single container.
*/
public List<Container> rotations(){
return rotationsStream().collect(Collectors.toList());
}
Stream<Container> rotationsStream() {
return Stream.of(
new Container(width, height, depth, weight),
new Container(width, depth, height, weight),
new Container(height, width, depth, weight),
new Container(height, depth, width, weight),
new Container(depth, height, width, weight),
new Container(depth, width, height, weight));
}
public boolean add(Level element) {
if(!levels.isEmpty()) {
stackHeight += currentLevelStackHeight();
stackWeight += currentLevelStackWeight();
}
return levels.add(element);
}
public int getStackHeight() {
return stackHeight + currentLevelStackHeight();
}
public int getStackWeight() {
return stackWeight + currentLevelStackWeight();
}
private int currentLevelStackHeight() {
if(levels.isEmpty()) {
return 0;
}
return levels.get(levels.size() - 1).getHeight();
}
private int currentLevelStackWeight() {
if(levels.isEmpty()) {
return 0;
}
return levels.get(levels.size() - 1).getWeight();
}
public void add(Placement placement) {
levels.get(levels.size() - 1).add(placement);
}
public void addLevel() {
add(new Level());
}
public Dimension getFreeSpace() {
int remainder = height - getStackHeight();
if(remainder < 0) {
throw new IllegalArgumentException("Remaining free space is negative at " + remainder);
}
return new Dimension(width, depth, remainder);
}
public int getFreeWeight() {
int remainder = weight - getStackWeight();
if(remainder < 0) {
throw new IllegalArgumentException("Remaining weight is negative at " + remainder);
}
return remainder;
}
public List<Level> getLevels() {
return levels;
}
public Placement get(int level, int placement) {
return levels.get(level).get(placement);
}
// keep method for tests
public void validateCurrentLevel() {
levels.get(levels.size() - 1).validate();
}
public void clear() {
levels.clear();
stackHeight = 0;
stackWeight = 0;
}
int getBoxCount() {
int count = 0;
for(Level level : levels) {
count += level.size();
}
return count;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((levels == null) ? 0 : levels.hashCode());
result = prime * result + stackHeight;
result = prime * result + stackWeight;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
Container other = (Container) obj;
if (levels == null) {
if (other.levels != null)
return false;
} else if (!levels.equals(other.levels))
return false;
if (stackHeight != other.stackHeight)
return false;
if (stackWeight != other.stackWeight)
return false;
return true;
}
@Override
public String toString() {
return "Container [stackWeight=" + stackWeight + ", stackHeight=" + stackHeight + ", levels=" + levels
+ ", weight=" + weight + ", width=" + width + ", depth=" + depth + ", height=" + height + ", volume="
+ volume + ", name=" + name + "]";
}
public Dimension getUsedSpace() {
Dimension maxBox = Dimension.EMPTY;
int height = 0;
for (Level level : levels) {
maxBox = getUsedSpace(level, maxBox, height);
height += level.getHeight();
}
return maxBox;
}
private Dimension getUsedSpace(Level level, Dimension maxBox, int height) {
for (Placement placement : level) {
maxBox = boundingBox(maxBox, getUsedSpace(placement, height));
}
return maxBox;
}
private Dimension getUsedSpace(Placement placement, int height) {
final Box box = placement.getBox();
final Space space = placement.getSpace();
return new Dimension(
space.getX() + box.getWidth(),
space.getY() + box.getDepth(),
height + box.getHeight());
}
private Dimension boundingBox(final Dimension b1, final Dimension b2) {
return new Dimension(
max(b1.getWidth(), b2.getWidth()),
max(b1.getDepth(), b2.getDepth()),
max(b1.getHeight(), b2.getHeight()));
}
} |
package com.google.sps.servlets;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.lang.Process;
import java.io.IOException;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.ArrayList;
import java.util.Arrays;
import com.google.gson.Gson;
/** Servlet that actions items.*/
@WebServlet("/actions")
public class ActionsServlet extends HttpServlet {
private static final List<String> terms = Arrays.asList("Black Lives Matter", "COVID-19");
private static final String API_KEY = "API_KEY"; //Insert the API_KEY here for testing
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
List<String> jsonResultList = new ArrayList<>();
for (String term : terms) {
String queryTerm = encodeTerm(term);
String jsonResult = curlProjects(API_KEY, queryTerm);
jsonResultList.add(jsonResult);
}
String jsonResultString = "{\"results\": ["+String.join(",", jsonResultList)+"]}";
response.setContentType("application/json;");
response.getWriter().println(jsonResultString);
}
private String encodeTerm(String term) throws RuntimeException {
try {
return URLEncoder.encode(term, StandardCharsets.UTF_8.toString());
} catch (UnsupportedEncodingException ex) {
throw new RuntimeException(ex.getCause());
}
}
/**
* Returns the a json string with the API response given a queryTerm and the API key.
*/
private String curlProjects(String apiKey, String queryTerm) throws IOException{
String path = "https://api.globalgiving.org/api/public/services/search/projects";
String queryString = String.format("?api_key=%s&q=%s", apiKey, queryTerm);
String[] curlCommand = { "curl", "-H", "Accept: application/json", "-H", "Content-Type: application/json", "-X", "GET", path+queryString};
ProcessBuilder process = new ProcessBuilder(curlCommand);
Process p = process.start();
BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream()));
StringBuilder builder = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {
builder.append(line);
builder.append(System.getProperty("line.separator"));
}
return builder.toString();
}
} |
package com.growthbeat.analytics.model;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.codehaus.jackson.type.TypeReference;
import com.growthbeat.Context;
import com.growthbeat.http.JsonUtils;
public class Segment {
private String id;
private String originId;
private String originName;
private String description;
private String query;
private Date created;
public static Segment findById(String id, Context context) {
String json = context.getGrowthbeatHttpClient().get("/1/segments/" + id, new HashMap<String, Object>());
return JsonUtils.deserialize(json, Segment.class);
}
public static List<Segment> findByParentSegmentId(String parentSegmentId, Integer depth, Context context) {
Map<String, Object> params = new HashMap<String, Object>();
params.put("parentSegmentId", parentSegmentId);
if (depth != null)
params.put("depth", depth);
String json = context.getGrowthbeatHttpClient().get("/1/segments", params);
return JsonUtils.deserialize(json, new TypeReference<List<Segment>>() {
});
}
public static Segment create(String parentSegmentId, String name, String description, String query, Context context) {
Map<String, Object> params = new HashMap<String, Object>();
params.put("parentSegmentId", parentSegmentId);
params.put("name", name);
params.put("description", description);
params.put("query", query);
String json = context.getGrowthbeatHttpClient().post("/1/segments", params);
return JsonUtils.deserialize(json, Segment.class);
}
public static Segment update(String segmentId, String description, String query, Context context) {
Map<String, Object> params = new HashMap<String, Object>();
params.put("description", description);
params.put("query", query);
String json = context.getGrowthbeatHttpClient().put("/1/segments/" + segmentId, params);
return JsonUtils.deserialize(json, Segment.class);
}
public static void delete(String segmentId, Context context) {
Map<String, Object> params = new HashMap<String, Object>();
context.getGrowthbeatHttpClient().delete("/1/segments/" + segmentId, params);
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getOriginId() {
return originId;
}
public void setOriginId(String originId) {
this.originId = originId;
}
public String getOriginName() {
return originName;
}
public void setOriginName(String originName) {
this.originName = originName;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getQuery() {
return query;
}
public void setQuery(String query) {
this.query = query;
}
public Date getCreated() {
return created;
}
public void setCreated(Date created) {
this.created = created;
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.