answer
stringlengths 17
10.2M
|
|---|
package peergos.shared.storage;
import peergos.shared.cbor.*;
import peergos.shared.crypto.hash.*;
import peergos.shared.io.ipfs.multiaddr.*;
import peergos.shared.io.ipfs.multihash.*;
import peergos.shared.util.*;
import java.util.*;
import java.util.concurrent.*;
public class CachingStorage implements ContentAddressedStorage {
private final ContentAddressedStorage target;
private final LRUCache<Multihash, byte[]> cache;
private final LRUCache<Multihash, CompletableFuture<Optional<CborObject>>> pending;
private final LRUCache<Multihash, CompletableFuture<Optional<byte[]>>> pendingRaw;
private final int maxValueSize;
public CachingStorage(ContentAddressedStorage target, int cacheSize, int maxValueSize) {
this.target = target;
this.cache = new LRUCache<>(cacheSize);
this.maxValueSize = maxValueSize;
this.pending = new LRUCache<>(100);
this.pendingRaw = new LRUCache<>(100);
}
@Override
public CompletableFuture<Multihash> id() {
return target.id();
}
@Override
public CompletableFuture<TransactionId> startTransaction(PublicKeyHash owner) {
return target.startTransaction(owner);
}
@Override
public CompletableFuture<Boolean> closeTransaction(PublicKeyHash owner, TransactionId tid) {
return target.closeTransaction(owner, tid);
}
@Override
public CompletableFuture<List<Multihash>> put(PublicKeyHash owner,
PublicKeyHash writer,
List<byte[]> signatures,
List<byte[]> blocks,
TransactionId tid) {
return target.put(owner, writer, signatures, blocks, tid)
.thenApply(res -> {
for (int i=0; i < blocks.size(); i++)
cache.put(res.get(i), Arrays.copyOf(blocks.get(i), blocks.get(i).length));
return res;
});
}
@Override
public CompletableFuture<Optional<CborObject>> get(Multihash key) {
if (cache.containsKey(key))
return CompletableFuture.completedFuture(Optional.of(CborObject.fromByteArray(cache.get(key))));
if (pending.containsKey(key))
return pending.get(key);
CompletableFuture<Optional<CborObject>> pipe = new CompletableFuture<>();
pending.put(key, pipe);
CompletableFuture<Optional<CborObject>> result = new CompletableFuture<>();
target.get(key).thenAccept(cborOpt -> {
if (cborOpt.isPresent()) {
byte[] value = cborOpt.get().toByteArray();
if (value.length > 0 && value.length < maxValueSize)
cache.put(key, value);
}
pending.remove(key);
pipe.complete(cborOpt);
result.complete(cborOpt);
}).exceptionally(t -> {
pending.remove(key);
pipe.completeExceptionally(t);
result.completeExceptionally(t);
return null;
});
return result;
}
@Override
public CompletableFuture<List<Multihash>> putRaw(PublicKeyHash owner,
PublicKeyHash writer,
List<byte[]> signatures,
List<byte[]> blocks,
TransactionId tid) {
return target.putRaw(owner, writer, signatures, blocks, tid)
.thenApply(res -> {
for (int i=0; i < blocks.size(); i++)
cache.put(res.get(i), Arrays.copyOf(blocks.get(i), blocks.get(i).length));
return res;
});
}
@Override
public CompletableFuture<Optional<byte[]>> getRaw(Multihash key) {
if (cache.containsKey(key))
return CompletableFuture.completedFuture(Optional.of(cache.get(key)));
if (pendingRaw.containsKey(key))
return pendingRaw.get(key);
CompletableFuture<Optional<byte[]>> pipe = new CompletableFuture<>();
pendingRaw.put(key, pipe);
return target.getRaw(key).thenApply(rawOpt -> {
if (rawOpt.isPresent()) {
byte[] value = rawOpt.get();
if (value.length > 0 && value.length < maxValueSize)
cache.put(key, value);
}
pendingRaw.remove(key);
pipe.complete(rawOpt);
return rawOpt;
}).exceptionally(t -> {
pending.remove(key);
pipe.completeExceptionally(t);
return null;
});
}
@Override
public CompletableFuture<List<Multihash>> recursivePin(PublicKeyHash owner, Multihash h) {
return target.recursivePin(owner, h);
}
@Override
public CompletableFuture<List<Multihash>> recursiveUnpin(PublicKeyHash owner, Multihash h) {
return target.recursiveUnpin(owner, h);
}
@Override
public CompletableFuture<List<Multihash>> pinUpdate(PublicKeyHash owner, Multihash existing, Multihash updated) {
return target.pinUpdate(owner, existing, updated);
}
@Override
public CompletableFuture<List<Multihash>> getLinks(Multihash root) {
return target.getLinks(root);
}
@Override
public CompletableFuture<Optional<Integer>> getSize(Multihash block) {
return target.getSize(block);
}
}
|
package org.aksw.limes.core.evaluation;
import static org.junit.Assert.*;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.aksw.limes.core.datastrutures.TaskAlgorithm;
import org.aksw.limes.core.datastrutures.TaskData;
import org.aksw.limes.core.evaluation.evaluator.Evaluator;
import org.aksw.limes.core.evaluation.evaluator.EvaluatorType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.junit.Test;
import com.google.common.collect.Table;
public class EvaluatorsTest {
static Logger logger = LoggerFactory.getLogger(EvaluatorsTest.class);
final private String[] datasetsList = {"RESTAURANTS"/*,"PERSON1", "PERSON2", "PERSON1_CSV", "PERSON2_CSV", "OAEI2014BOOKS"*/};
final private String[] algorithmsListData = {"UNSUPERVISED:WOMBATSIMPLE","SUPERVISED_BATCH:WOMBATSIMPLE"/*,"SUPERVISED_ACTIVE:WOMBATSIMPLE"*/};
private static final int folds=5;
private static final boolean crossValidate=false;
@Test
public void test() {
/* if(crossValidate)
testCrossValidate();
else*/
testEvaluator();
}
public void testEvaluator() {
try {
DatasetsInitTest ds = new DatasetsInitTest();
EvaluatorsInitTest ev = new EvaluatorsInitTest();
AlgorithmsInitTest al = new AlgorithmsInitTest();
Evaluator evaluator = new Evaluator();
Set<TaskData> tasks = ds.initializeDataSets(datasetsList);
Set<EvaluatorType> evaluators = ev.initializeEvaluators();
List<TaskAlgorithm> algorithms = al.initializeMLAlgorithms(algorithmsListData,datasetsList.length);
Table<String, String, Map<EvaluatorType, Double>> results = evaluator.evaluate(algorithms, tasks, evaluators, null);
for (String mlAlgorithm : results.rowKeySet()) {
for (String dataset : results.columnKeySet()) {
for (EvaluatorType measure : results.get(mlAlgorithm, dataset).keySet()) {
System.out.println(mlAlgorithm+"\t"+dataset+"\t"+measure+"\t"+results.get(mlAlgorithm, dataset).get(measure));
}
}
}
} catch (Exception e) {
System.out.println(e.getMessage());
assertTrue(false);
}
assertTrue(true);
}
public void testCrossValidate() {
try {
DatasetsInitTest ds = new DatasetsInitTest();
EvaluatorsInitTest ev = new EvaluatorsInitTest();
AlgorithmsInitTest al = new AlgorithmsInitTest();
Evaluator evaluator = new Evaluator();
Set<TaskData> tasks = ds.initializeDataSets(datasetsList);
Set<EvaluatorType> evaluators = ev.initializeEvaluators();
List<TaskAlgorithm> algorithms = al.initializeMLAlgorithms(algorithmsListData,datasetsList.length);
for (TaskAlgorithm tAlgorithm : algorithms) {
Table<String, String, Map<EvaluatorType, Double>> results = evaluator.crossValidate(tAlgorithm.getMlAlgorithm(), tasks,folds, evaluators, null);
}
} catch (Exception e) {
System.out.println(e.getMessage());
assertTrue(false);
}
assertTrue(true);
}
}
|
package net.sf.jabref.model;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.TreeMap;
import java.util.stream.Collectors;
import net.sf.jabref.model.database.BibDatabaseMode;
import net.sf.jabref.model.entry.BibLatexEntryTypes;
import net.sf.jabref.model.entry.BibtexEntryTypes;
import net.sf.jabref.model.entry.CustomEntryType;
import net.sf.jabref.model.entry.EntryType;
import net.sf.jabref.model.entry.IEEETranEntryTypes;
public class EntryTypes {
/**
* This class is used to specify entry types for either BIBTEX and BIBLATEX.
*/
private static class InternalEntryTypes {
private final Map<String, EntryType> ALL_TYPES = new TreeMap<>();
private final Map<String, EntryType> STANDARD_TYPES;
private final EntryType defaultType;
public InternalEntryTypes(EntryType defaultType, List<List<EntryType>> entryTypes) {
this.defaultType = defaultType;
for (List<EntryType> list : entryTypes) {
for (EntryType type : list) {
ALL_TYPES.put(type.getName().toLowerCase(), type);
}
}
STANDARD_TYPES = new TreeMap<>(ALL_TYPES);
}
/**
* This method returns the BibtexEntryType for the name of a type,
* or null if it does not exist.
*/
public Optional<EntryType> getType(String name) {
return Optional.ofNullable(ALL_TYPES.get(name.toLowerCase()));
}
/**
* This method returns the EntryType for the name of a type,
* or the default type (*.MISC) if it does not exist.
*/
// Get an entry type defined in BibtexEntryType
public EntryType getTypeOrDefault(String type) {
return getType(type).orElse(defaultType);
}
/**
* This method returns the standard BibtexEntryType for the
* name of a type, or null if it does not exist.
*/
public Optional<EntryType> getStandardType(String name) {
return Optional.ofNullable(STANDARD_TYPES.get(name.toLowerCase()));
}
private void addOrModifyEntryType(EntryType type) {
ALL_TYPES.put(type.getName().toLowerCase(), type);
}
public Set<String> getAllTypes() {
return ALL_TYPES.keySet();
}
public Collection<EntryType> getAllValues() {
return ALL_TYPES.values();
}
/**
* Removes a customized entry type from the type map. If this type
* overrode a standard type, we reinstate the standard one.
*
* @param name The customized entry type to remove.
*/
public void removeType(String name) {
String toLowerCase = name.toLowerCase();
if (!ALL_TYPES.get(toLowerCase).equals(STANDARD_TYPES.get(toLowerCase))) {
ALL_TYPES.remove(toLowerCase);
if (STANDARD_TYPES.containsKey(toLowerCase)) {
// In this case the user has removed a customized version
// of a standard type. We reinstate the standard type.
addOrModifyEntryType(STANDARD_TYPES.get(toLowerCase));
}
}
}
}
public static final InternalEntryTypes BIBTEX = new InternalEntryTypes(BibtexEntryTypes.MISC,
Arrays.asList(BibtexEntryTypes.ALL, IEEETranEntryTypes.ALL));
public static final InternalEntryTypes BIBLATEX = new InternalEntryTypes(BibLatexEntryTypes.MISC,
Arrays.asList(BibLatexEntryTypes.ALL));
/**
* This method returns the BibtexEntryType for the name of a type,
* or null if it does not exist.
*/
public static Optional<EntryType> getType(String name, BibDatabaseMode type) {
return type == BibDatabaseMode.BIBLATEX ? BIBLATEX.getType(name) : BIBTEX.getType(name);
}
/**
* This method returns the EntryType for the name of a type,
* or the default type (*.MISC) if it does not exist.
*/
// Get an entry type defined in BibtexEntryType
public static EntryType getTypeOrDefault(String name, BibDatabaseMode mode) {
return mode == BibDatabaseMode.BIBLATEX ? BIBLATEX.getTypeOrDefault(name) : BIBTEX.getTypeOrDefault(name);
}
/**
* This method returns the standard BibtexEntryType for the
* name of a type, or null if it does not exist.
*/
public static Optional<EntryType> getStandardType(String name, BibDatabaseMode mode) {
return mode == BibDatabaseMode.BIBLATEX ? BIBLATEX.getStandardType(name) : BIBTEX.getStandardType(name);
}
public static void addOrModifyCustomEntryType(CustomEntryType customEntryType, BibDatabaseMode mode) {
if(BibDatabaseMode.BIBLATEX == mode) {
BIBLATEX.addOrModifyEntryType(customEntryType);
} else if (BibDatabaseMode.BIBTEX == mode) {
BIBTEX.addOrModifyEntryType(customEntryType);
}
}
public static Set<String> getAllTypes(BibDatabaseMode type) {
return type == BibDatabaseMode.BIBLATEX ? BIBLATEX.getAllTypes() : BIBTEX.getAllTypes();
}
public static Collection<EntryType> getAllValues(BibDatabaseMode type) {
return type == BibDatabaseMode.BIBLATEX ? BIBLATEX.getAllValues() : BIBTEX.getAllValues();
}
/**
* Determine all CustomTypes which are not overwritten standard types but real custom types for a given BibDatabaseMode
*
* I.e., a modified "article" type will not be included in the list, but an EntryType like "MyCustomType" will be included.
*
* @param mode the BibDatabaseMode to be checked
* @return the list of all found custom types
*/
public static List<EntryType> getAllCustomTypes(BibDatabaseMode mode) {
Collection<EntryType> allTypes = getAllValues(mode);
if(mode == BibDatabaseMode.BIBTEX) {
return allTypes.stream().filter(entryType -> !BibtexEntryTypes.getType(entryType.getName()).isPresent())
.filter(entryType -> !IEEETranEntryTypes.getType(entryType.getName()).isPresent())
.collect(Collectors.toList());
} else {
return allTypes.stream().filter(entryType -> !BibLatexEntryTypes.getType(entryType.getName()).isPresent()).collect(Collectors.toList());
}
}
public static List<EntryType> getAllModifiedStandardTypes(BibDatabaseMode mode) {
if (mode == BibDatabaseMode.BIBTEX) {
return BIBTEX.getAllValues().stream().filter(type -> type instanceof CustomEntryType)
.filter(type -> BIBTEX.getStandardType(type.getName()).isPresent())
.collect(Collectors.toList());
} else {
return BIBLATEX.getAllValues().stream().filter(type -> type instanceof CustomEntryType)
.filter(type -> BIBLATEX.getStandardType(type.getName()).isPresent())
.collect(Collectors.toList());
}
}
/**
* Removes a customized entry type from the type map. If this type
* overrode a standard type, we reinstate the standard one.
*
* @param name The customized entry type to remove.
*/
public static void removeType(String name, BibDatabaseMode type) {
if (type == BibDatabaseMode.BIBLATEX) {
BIBLATEX.removeType(name);
} else {
BIBTEX.removeType(name);
}
}
public static void removeAllCustomEntryTypes() {
for(BibDatabaseMode type : BibDatabaseMode.values()) {
for(String typeName : new HashSet<>(getAllTypes(type))) {
getType(typeName, type).ifPresent(entryType -> {
if (entryType instanceof CustomEntryType) {
removeType(typeName, type);
}
});
}
}
}
/**
* Checks whether two EntryTypes are equal or not based on the equality of the type names and on the equality of
* the required and optional field lists
*
* @param type1 the first EntryType to compare
* @param type2 the secend EntryType to compare
* @return returns true if the two compared entry types have the same name and equal required and optional fields
*/
public static boolean isEqualNameAndFieldBased(EntryType type1, EntryType type2) {
if (type1 == null && type2 == null) {
return true;
} else if (type1 == null || type2 == null) {
return false;
} else
return type1.getName().equals(type2.getName())
&& type1.getRequiredFields().equals(type2.getRequiredFields())
&& type1.getOptionalFields().equals(type2.getOptionalFields())
&& type1.getSecondaryOptionalFields().equals(type2.getSecondaryOptionalFields());
}
}
|
package io.freefair.gradle.plugins.lombok;
import lombok.Getter;
import lombok.Setter;
import org.gradle.api.file.ConfigurableFileCollection;
import org.gradle.api.file.DirectoryProperty;
import org.gradle.api.file.FileTree;
import org.gradle.api.internal.file.FileTreeInternal;
import org.gradle.api.internal.file.UnionFileTree;
import org.gradle.api.provider.Property;
import org.gradle.api.tasks.*;
import org.gradle.util.GUtil;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@Getter
@Setter
public class Delombok extends SourceTask {
/**
* Print the name of each file as it is being delombok-ed.
*/
@Console
private final Property<Boolean> verbose = getProject().getObjects().property(Boolean.class);
/**
* Sets formatting rules.
* Use --format-help to list all available rules.
* Unset format rules are inferred by scanning the source for usages.
*/
@Input
private Map<String, String> format = new HashMap<>();
/**
* No warnings or errors will be emitted to standard error.
*/
@Console
private final Property<Boolean> quiet = getProject().getObjects().property(Boolean.class);
/**
* Sets the encoding of your source files.
* Defaults to the system default charset.
* Example: "UTF-8"
*/
@Input
@Optional
private final Property<String> encoding = getProject().getObjects().property(String.class);
/**
* Print delombok-ed code to standard output instead of saving it in target directory.
*/
@Input
@Optional
private final Property<Boolean> print = getProject().getObjects().property(Boolean.class);
/**
* Directory to save delomboked files to.
*/
@OutputDirectory
private final DirectoryProperty target = getProject().getObjects().directoryProperty();
/**
* Classpath (analogous to javac -cp option).
*/
@Classpath
@Optional
private final ConfigurableFileCollection classpath = getProject().files();
/**
* Sourcepath (analogous to javac -sourcepath option).
*/
@InputFiles
@Optional
private final ConfigurableFileCollection sourcepath = getProject().files();
/**
* override Bootclasspath (analogous to javac -bootclasspath option)
*/
@Classpath
@Optional
private final ConfigurableFileCollection bootclasspath = getProject().files();
/**
* Lombok will only delombok source files.
* Without this option, non-java, non-class files are copied to the target directory.
*/
@Input
@Optional
private final Property<Boolean> nocopy = getProject().getObjects().property(Boolean.class);
@Classpath
private final ConfigurableFileCollection lombokClasspath = getProject().files();
@Internal
private final ConfigurableFileCollection input = getProject().files();
@InputFiles
@SkipWhenEmpty
protected FileTree getFilteredInput() {
List<FileTreeInternal> collect = input.getFiles().stream()
.filter(File::isDirectory)
.map(dir -> getProject().fileTree(dir))
.map(FileTreeInternal.class::cast)
.collect(Collectors.toList());
return new UnionFileTree("actual " + getName() + " input", collect);
}
@TaskAction
public void delombok() throws IOException {
getProject().delete(getTarget().getAsFile().get());
List<String> args = new LinkedList<>();
if (verbose.getOrElse(false)) {
args.add("--verbose");
}
getFormat().forEach((key, value) -> {
String formatValue = key + (GUtil.isTrue(value) ? ":" + value : "");
args.add("--format=" + formatValue);
});
if (quiet.getOrElse(false)) {
args.add("--quiet");
}
if (getEncoding().isPresent()) {
args.add("--encoding=" + getEncoding().get());
}
if (print.getOrElse(false)) {
args.add("--print");
}
if (target.isPresent()) {
args.add("--target=" + target.getAsFile().get().toString().replaceAll("\\\\", "\\\\\\\\"));
}
if (!classpath.isEmpty()) {
args.add("--classpath=" + getClasspath().getAsPath().replaceAll("\\\\", "\\\\\\\\"));
}
if (!sourcepath.isEmpty()) {
args.add("--sourcepath=" + getSourcepath().getAsPath().replaceAll("\\\\", "\\\\\\\\"));
}
if (!bootclasspath.isEmpty()) {
args.add("--bootclasspath=" + getBootclasspath().getAsPath().replaceAll("\\\\", "\\\\\\\\"));
}
if (nocopy.getOrElse(false)) {
args.add("--nocopy");
}
File optionsFile = new File(getTemporaryDir(), "delombok.options");
Files.write(optionsFile.toPath(), args);
getProject().javaexec(delombok -> {
delombok.setClasspath(getLombokClasspath());
delombok.setMain("lombok.launch.Main");
delombok.args("delombok");
delombok.args("@" + optionsFile);
delombok.args(input.getFiles().stream()
.filter(File::isDirectory)
.collect(Collectors.toList())
);
});
}
}
|
package com.digi.xbee.api;
import java.io.IOException;
import java.util.Arrays;
import com.digi.xbee.api.connection.DataReader;
import com.digi.xbee.api.connection.IConnectionInterface;
import com.digi.xbee.api.connection.serial.SerialPortParameters;
import com.digi.xbee.api.exceptions.InterfaceAlreadyOpenException;
import com.digi.xbee.api.exceptions.InterfaceNotOpenException;
import com.digi.xbee.api.exceptions.InvalidOperatingModeException;
import com.digi.xbee.api.exceptions.OperationNotSupportedException;
import com.digi.xbee.api.exceptions.TimeoutException;
import com.digi.xbee.api.exceptions.XBeeException;
import com.digi.xbee.api.listeners.IPacketReceiveListener;
import com.digi.xbee.api.listeners.ISerialDataReceiveListener;
import com.digi.xbee.api.models.ATCommand;
import com.digi.xbee.api.models.ATCommandResponse;
import com.digi.xbee.api.models.OperatingMode;
import com.digi.xbee.api.models.XBee16BitAddress;
import com.digi.xbee.api.models.XBee64BitAddress;
import com.digi.xbee.api.models.XBeeMessage;
import com.digi.xbee.api.models.XBeePacketsQueue;
import com.digi.xbee.api.models.XBeeTransmitOptions;
import com.digi.xbee.api.packet.APIFrameType;
import com.digi.xbee.api.packet.XBeeAPIPacket;
import com.digi.xbee.api.packet.XBeePacket;
import com.digi.xbee.api.packet.common.ReceivePacket;
import com.digi.xbee.api.packet.common.TransmitPacket;
import com.digi.xbee.api.packet.raw.RX16Packet;
import com.digi.xbee.api.packet.raw.RX64Packet;
import com.digi.xbee.api.packet.raw.TX64Packet;
import com.digi.xbee.api.utils.HexUtils;
public class XBeeDevice extends AbstractXBeeDevice {
// Constants.
private static int TIMEOUT_RESET = 5000;
private static int TIMEOUT_READ_PACKET = 3000;
private static String COMMAND_MODE_CHAR = "+";
private static String COMMAND_MODE_OK = "OK\r";
// Variables.
private Object resetLock = new Object();
private boolean modemStatusReceived = false;
public XBeeDevice(String port, int baudRate) {
super(port, baudRate);
}
public XBeeDevice(String port, int baudRate, int dataBits, int stopBits, int parity, int flowControl) {
super(port, baudRate, dataBits, stopBits, parity, flowControl);
}
/**
* Class constructor. Instantiates a new {@code XBeeDevice} object in the
* given serial port name and parameters.
*
* @param port Serial port name where XBee device is attached to.
* @param serialPortParameters Object containing the serial port parameters.
*
* @throws NullPointerException if {@code port == null} or
* if {@code serialPortParameters == null}.
*
* @see SerialPortParameters
*/
public XBeeDevice(String port, SerialPortParameters serialPortParameters) {
super(port, serialPortParameters);
}
/**
* Class constructor. Instantiates a new {@code XBeeDevice} object with the
* given connection interface.
*
* @param connectionInterface The connection interface with the physical
* XBee device.
*
* @throws NullPointerException if {@code connectionInterface == null}.
*
* @see IConnectionInterface
*/
public XBeeDevice(IConnectionInterface connectionInterface) {
super(connectionInterface);
}
/**
* Opens the connection interface associated with this XBee device.
*
* @throws XBeeException if there is any problem opening the device.
* @throws InterfaceAlreadyOpenException if the device is already open.
*
* @see #isOpen()
* @see #close()
*/
public void open() throws XBeeException {
logger.info(toString() + "Opening the connection interface...");
// First, verify that the connection is not already open.
if (connectionInterface.isOpen())
throw new InterfaceAlreadyOpenException();
// Connect the interface.
connectionInterface.open();
logger.info(toString() + "Connection interface open.");
// Initialize the data reader.
dataReader = new DataReader(connectionInterface, operatingMode, this);
dataReader.start();
// Wait 10 milliseconds until the dataReader thread is started.
// This is because when the connection is opened immediately after
// closing it, there is sometimes a concurrency problem and the
// dataReader thread never dies.
try {
Thread.sleep(10);
} catch (InterruptedException e) {}
// Determine the operating mode of the XBee device if it is unknown.
if (operatingMode == OperatingMode.UNKNOWN)
operatingMode = determineOperatingMode();
// Check if the operating mode is a valid and supported one.
if (operatingMode == OperatingMode.UNKNOWN) {
close();
throw new InvalidOperatingModeException("Could not determine operating mode.");
} else if (operatingMode == OperatingMode.AT) {
close();
throw new InvalidOperatingModeException(operatingMode);
}
// Always initialize the device (obtain its parameters and protocol). It doesn't matter
// if it is local or remote.
initializeDevice();
}
/**
* Closes the connection interface associated with this XBee device.
*
* @see #isOpen()
* @see #open()
*/
public void close() {
// Stop XBee reader.
if (dataReader != null && dataReader.isRunning())
dataReader.stopReader();
// Close interface.
connectionInterface.close();
logger.info(toString() + "Connection interface closed.");
}
/**
* Retrieves whether or not the connection interface associated to the
* device is open.
*
* @return {@code true} if the interface is open, {@code false} otherwise.
*
* @see #open()
* @see #close()
*/
public boolean isOpen() {
if (connectionInterface != null)
return connectionInterface.isOpen();
return false;
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#isRemote()
*/
@Override
public boolean isRemote() {
return false;
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#getOperatingMode()
*/
@Override
public OperatingMode getOperatingMode() {
return super.getOperatingMode();
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#getNextFrameID()
*/
@Override
protected int getNextFrameID() {
return super.getNextFrameID();
}
/**
* Retrieves the configured timeout for receiving packets in synchronous
* operations.
*
* @return The current receive timeout in milliseconds.
*
* @see #setReceiveTimeout(int)
*/
public int getReceiveTimeout() {
return receiveTimeout;
}
public void setReceiveTimeout(int receiveTimeout) {
if (receiveTimeout < 0)
throw new IllegalArgumentException("Receive timeout cannot be less than 0.");
this.receiveTimeout = receiveTimeout;
}
/**
* Determines the operating mode of the XBee device.
*
* @return The operating mode of the XBee device.
*
* @throws OperationNotSupportedException if the packet is being sent from
* a remote device.
* @throws InterfaceNotOpenException if the device is not open.
*
* @see OperatingMode
*/
protected OperatingMode determineOperatingMode() throws OperationNotSupportedException {
try {
// Check if device is in API or API Escaped operating modes.
operatingMode = OperatingMode.API;
dataReader.setXBeeReaderMode(operatingMode);
ATCommandResponse response = sendATCommand(new ATCommand("AP"));
if (response.getResponse() != null && response.getResponse().length > 0) {
if (response.getResponse()[0] != OperatingMode.API.getID())
operatingMode = OperatingMode.API_ESCAPE;
logger.debug(toString() + "Using {}.", operatingMode.getName());
return operatingMode;
}
} catch (TimeoutException e) {
// Check if device is in AT operating mode.
operatingMode = OperatingMode.AT;
dataReader.setXBeeReaderMode(operatingMode);
try {
// It is necessary to wait at least 1 second to enter in command mode after
// sending any data to the device.
Thread.sleep(TIMEOUT_BEFORE_COMMAND_MODE);
// Try to enter in AT command mode, if so the module is in AT mode.
boolean success = enterATCommandMode();
if (success)
return OperatingMode.AT;
} catch (TimeoutException e1) {
logger.error(e1.getMessage(), e1);
} catch (InvalidOperatingModeException e1) {
logger.error(e1.getMessage(), e1);
} catch (InterruptedException e1) {
logger.error(e1.getMessage(), e1);
}
} catch (InvalidOperatingModeException e) {
logger.error("Invalid operating mode", e);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return OperatingMode.UNKNOWN;
}
/**
* Attempts to put the device in AT Command mode. Only valid if device is
* working in AT mode.
*
* @return {@code true} if the device entered in AT command mode,
* {@code false} otherwise.
*
* @throws InvalidOperatingModeException if the operating mode cannot be
* determined or is not supported.
* @throws TimeoutException if the configured time expires.
* @throws InterfaceNotOpenException if the device is not open.
*/
private boolean enterATCommandMode() throws InvalidOperatingModeException, TimeoutException {
if (!connectionInterface.isOpen())
throw new InterfaceNotOpenException();
if (operatingMode != OperatingMode.AT)
throw new InvalidOperatingModeException("Invalid mode. Command mode can be only accessed while in AT mode.");
// Enter in AT command mode (send '+++'). The process waits 1,5 seconds for the 'OK\n'.
byte[] readData = new byte[256];
try {
// Send the command mode sequence.
connectionInterface.writeData(COMMAND_MODE_CHAR.getBytes());
connectionInterface.writeData(COMMAND_MODE_CHAR.getBytes());
connectionInterface.writeData(COMMAND_MODE_CHAR.getBytes());
// Wait some time to let the module generate a response.
Thread.sleep(TIMEOUT_ENTER_COMMAND_MODE);
// Read data from the device (it should answer with 'OK\r').
int readBytes = connectionInterface.readData(readData);
if (readBytes < COMMAND_MODE_OK.length())
throw new TimeoutException();
// Check if the read data is 'OK\r'.
String readString = new String(readData, 0, readBytes);
if (!readString.contains(COMMAND_MODE_OK))
return false;
// Read data was 'OK\r'.
return true;
} catch (IOException e) {
logger.error(e.getMessage(), e);
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
return false;
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#startListeningForPackets(com.digi.xbee.api.listeners.IPacketReceiveListener)
*/
@Override
public void startListeningForPackets(IPacketReceiveListener listener) {
super.startListeningForPackets(listener);
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#stopListeningForPackets(com.digi.xbee.api.listeners.IPacketReceiveListener)
*/
@Override
public void stopListeningForPackets(IPacketReceiveListener listener) {
super.stopListeningForPackets(listener);
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#startListeningForSerialData(com.digi.xbee.api.listeners.ISerialDataReceiveListener)
*/
@Override
public void startListeningForSerialData(ISerialDataReceiveListener listener) {
super.startListeningForSerialData(listener);
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#stopListeningForSerialData(com.digi.xbee.api.listeners.ISerialDataReceiveListener)
*/
@Override
public void stopListeningForSerialData(ISerialDataReceiveListener listener) {
super.stopListeningForSerialData(listener);
}
/**
* Sends the provided data to the XBee device of the network corresponding
* to the given 64-bit address asynchronously.
*
* <p>Asynchronous transmissions do not wait for answer from the remote
* device or for transmit status packet.</p>
*
* @param address The 64-bit address of the XBee that will receive the data.
* @param data Byte array containing data to be sent.
*
* @throws XBeeException if there is any XBee related exception.
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code address == null} or
* if {@code data == null}.
*
* @see XBee64BitAddress
* @see #sendSerialDataAsync(XBee16BitAddress, byte[])
* @see #sendSerialDataAsync(AbstractXBeeDevice, byte[])
* @see #sendSerialData(XBee16BitAddress, byte[])
* @see #sendSerialData(XBee64BitAddress, byte[])
* @see #sendSerialData(AbstractXBeeDevice, byte[])
*/
protected void sendSerialDataAsync(XBee64BitAddress address, byte[] data) throws XBeeException {
// Verify the parameters are not null, if they are null, throw an exception.
if (address == null)
throw new NullPointerException("Address cannot be null");
if (data == null)
throw new NullPointerException("Data cannot be null");
// Check connection.
if (!connectionInterface.isOpen())
throw new InterfaceNotOpenException();
// Check if device is remote.
if (isRemote())
throw new OperationNotSupportedException("Cannot send data to a remote device from a remote device.");
logger.info(toString() + "Sending serial data asynchronously to {} >> {}.", address, HexUtils.prettyHexString(data));
XBeePacket xbeePacket;
switch (getXBeeProtocol()) {
case RAW_802_15_4:
xbeePacket = new TX64Packet(getNextFrameID(), address, XBeeTransmitOptions.NONE, data);
break;
default:
xbeePacket = new TransmitPacket(getNextFrameID(), address, XBee16BitAddress.UNKNOWN_ADDRESS, 0, XBeeTransmitOptions.NONE, data);
}
sendAndCheckXBeePacket(xbeePacket, true);
}
/**
* Sends the provided data to the XBee device of the network corresponding
* to the given 64-Bit/16-Bit address asynchronously.
*
* <p>Asynchronous transmissions do not wait for answer from the remote
* device or for transmit status packet.</p>
*
* @param address64Bit The 64-bit address of the XBee that will receive the
* data.
* @param address16bit The 16-bit address of the XBee that will receive the
* data. If it is unknown the
* {@code XBee16BitAddress.UNKNOWN_ADDRESS} must be used.
* @param data Byte array containing data to be sent.
*
* @throws XBeeException if a remote device is trying to send serial data or
* if there is any other XBee related exception.
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code address64Bit == null} or
* if {@code address16bit == null} or
* if {@code data == null}.
*
* @see XBee64BitAddress
* @see XBee16BitAddress
* @see #getReceiveTimeout()
* @see #setReceiveTimeout(int)
* @see #sendSerialData(XBee64BitAddress, byte[])
* @see #sendSerialData(XBee16BitAddress, byte[])
* @see #sendSerialData(XBee64BitAddress, XBee16BitAddress, byte[])
* @see #sendSerialData(AbstractXBeeDevice, byte[])
* @see #sendSerialDataAsync(XBee64BitAddress, byte[])
* @see #sendSerialDataAsync(XBee16BitAddress, byte[])
* @see #sendSerialDataAsync(AbstractXBeeDevice, byte[])
*/
protected void sendSerialDataAsync(XBee64BitAddress address64Bit, XBee16BitAddress address16bit, byte[] data) throws XBeeException {
// Verify the parameters are not null, if they are null, throw an exception.
if (address64Bit == null)
throw new NullPointerException("64-bit address cannot be null");
if (address16bit == null)
throw new NullPointerException("16-bit address cannot be null");
if (data == null)
throw new NullPointerException("Data cannot be null");
// Check connection.
if (!connectionInterface.isOpen())
throw new InterfaceNotOpenException();
// Check if device is remote.
if (isRemote())
throw new OperationNotSupportedException("Cannot send data to a remote device from a remote device.");
logger.info(toString() + "Sending serial data asynchronously to {}[{}] >> {}.",
address64Bit, address16bit, HexUtils.prettyHexString(data));
XBeePacket xbeePacket = new TransmitPacket(getNextFrameID(), address64Bit, address16bit, 0, XBeeTransmitOptions.NONE, data);
sendAndCheckXBeePacket(xbeePacket, true);
}
/**
* Sends the provided data to the provided XBee device asynchronously.
*
* <p>Asynchronous transmissions do not wait for answer from the remote
* device or for transmit status packet.</p>
*
* @param xbeeDevice The XBee device of the network that will receive the data.
* @param data Byte array containing data to be sent.
*
* @throws XBeeException if there is any XBee related exception.
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code xbeeDevice == null} or
* if {@code data == null}.
*
* @see #sendSerialDataAsync(XBee64BitAddress, byte[])
* @see #sendSerialDataAsync(XBee16BitAddress, byte[])
* @see #sendSerialData(XBee64BitAddress, byte[])
* @see #sendSerialData(XBee16BitAddress, byte[])
* @see #sendSerialData(AbstractXBeeDevice, byte[])
*/
public void sendSerialDataAsync(RemoteXBeeDevice xbeeDevice, byte[] data) throws XBeeException {
if (xbeeDevice == null)
throw new NullPointerException("Remote XBee device cannot be null");
sendSerialDataAsync(xbeeDevice.get64BitAddress(), data);
}
/**
* Sends the provided data to all the XBee nodes of the network (broadcast)
* asynchronously.
*
* <p>Asynchronous transmissions do not wait for answer from the remote
* device or for transmit status packet.</p>
*
* @param data Byte array containing data to be sent.
*
* @throws XBeeException if there is any XBee related exception.
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code data == null}.
*
* @see #sendBroadcastSerialData(byte[])
*/
public void sendBroadcastSerialDataAsync(byte[] data) throws XBeeException {
sendSerialDataAsync(XBee64BitAddress.BROADCAST_ADDRESS, data);
}
/**
* Sends the provided data to the XBee device of the network corresponding
* to the given 64-bit address.
*
* <p>This method blocks till a success or error response arrives or the
* configured receive timeout expires.</p>
*
* <p>The received timeout is configured using the {@code setReceiveTimeout}
* method and can be consulted with {@code getReceiveTimeout} method.</p>
*
* <p>For non-blocking operations use the method
* {@link #sendSerialData(XBee64BitAddress, byte[])}.</p>
*
* @param address The 64-bit address of the XBee that will receive the data.
* @param data Byte array containing data to be sent.
*
* @throws TimeoutException if there is a timeout sending the serial data.
* @throws XBeeException if there is any other XBee related exception.
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code address == null} or
* if {@code data == null}.
*
* @see XBee64BitAddress
* @see #getReceiveTimeout()
* @see #setReceiveTimeout(int)
* @see #sendSerialData(XBee16BitAddress, byte[])
* @see #sendSerialData(AbstractXBeeDevice, byte[])
* @see #sendSerialDataAsync(XBee64BitAddress, byte[])
* @see #sendSerialDataAsync(XBee16BitAddress, byte[])
* @see #sendSerialDataAsync(AbstractXBeeDevice, byte[])
*/
protected void sendSerialData(XBee64BitAddress address, byte[] data) throws TimeoutException, XBeeException {
// Verify the parameters are not null, if they are null, throw an exception.
if (address == null)
throw new NullPointerException("Address cannot be null");
if (data == null)
throw new NullPointerException("Data cannot be null");
// Check connection.
if (!connectionInterface.isOpen())
throw new InterfaceNotOpenException();
// Check if device is remote.
if (isRemote())
throw new OperationNotSupportedException("Cannot send data to a remote device from a remote device.");
logger.info(toString() + "Sending serial data to {} >> {}.", address, HexUtils.prettyHexString(data));
XBeePacket xbeePacket;
switch (getXBeeProtocol()) {
case RAW_802_15_4:
xbeePacket = new TX64Packet(getNextFrameID(), address, XBeeTransmitOptions.NONE, data);
break;
default:
xbeePacket = new TransmitPacket(getNextFrameID(), address, XBee16BitAddress.UNKNOWN_ADDRESS, 0, XBeeTransmitOptions.NONE, data);
}
sendAndCheckXBeePacket(xbeePacket, false);
}
/**
* Sends the provided data to the XBee device of the network corresponding
* to the given 64-Bit/16-Bit address.
*
* <p>This method blocks till a success or error response arrives or the
* configured receive timeout expires.</p>
*
* <p>The received timeout is configured using the {@code setReceiveTimeout}
* method and can be consulted with {@code getReceiveTimeout} method.</p>
*
* <p>For non-blocking operations use the method
* {@link #sendSerialData(XBee16BitAddress, byte[])}.</p>
*
* @param address64Bit The 64-bit address of the XBee that will receive the
* data.
* @param address16bit The 16-bit address of the XBee that will receive the
* data. If it is unknown the
* {@code XBee16BitAddress.UNKNOWN_ADDRESS} must be used.
* @param data Byte array containing data to be sent.
*
* @throws TimeoutException if there is a timeout sending the serial data.
* @throws XBeeException if a remote device is trying to send serial data or
* if there is any other XBee related exception.
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code address64Bit == null} or
* if {@code address16bit == null} or
* if {@code data == null}.
*
* @see XBee64BitAddress
* @see XBee16BitAddress
* @see #getReceiveTimeout()
* @see #setReceiveTimeout(int)
* @see #sendSerialData(XBee64BitAddress, byte[])
* @see #sendSerialData(XBee16BitAddress, byte[])
* @see #sendSerialData(AbstractXBeeDevice, byte[])
* @see #sendSerialDataAsync(XBee64BitAddress, byte[])
* @see #sendSerialDataAsync(XBee16BitAddress, byte[])
* @see #sendSerialDataAsync(XBee64BitAddress, XBee16BitAddress, byte[])
* @see #sendSerialDataAsync(AbstractXBeeDevice, byte[])
*/
protected void sendSerialData(XBee64BitAddress address64Bit, XBee16BitAddress address16bit, byte[] data) throws TimeoutException, XBeeException {
// Verify the parameters are not null, if they are null, throw an exception.
if (address64Bit == null)
throw new NullPointerException("64-bit address cannot be null");
if (address16bit == null)
throw new NullPointerException("16-bit address cannot be null");
if (data == null)
throw new NullPointerException("Data cannot be null");
// Check connection.
if (!connectionInterface.isOpen())
throw new InterfaceNotOpenException();
// Check if device is remote.
if (isRemote())
throw new OperationNotSupportedException("Cannot send data to a remote device from a remote device.");
logger.info(toString() + "Sending serial data to {}[{}] >> {}.",
address64Bit, address16bit, HexUtils.prettyHexString(data));
XBeePacket xbeePacket = new TransmitPacket(getNextFrameID(), address64Bit, address16bit, 0, XBeeTransmitOptions.NONE, data);
sendAndCheckXBeePacket(xbeePacket, false);
}
/**
* Sends the provided data to the given XBee device choosing the optimal send method
* depending on the protocol of the local XBee device.
*
* <p>This method blocks till a success or error response arrives or the
* configured receive timeout expires.</p>
*
* <p>The received timeout is configured using the {@code setReceiveTimeout}
* method and can be consulted with {@code getReceiveTimeout} method.</p>
*
* <p>For non-blocking operations use the method
* {@link #sendSerialDataAsync(AbstractXBeeDevice, byte[])}.</p>
*
* @param xbeeDevice The XBee device of the network that will receive the data.
* @param data Byte array containing data to be sent.
*
* @throws TimeoutException if there is a timeout sending the serial data.
* @throws XBeeException if there is any other XBee related exception.
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code xbeeDevice == null} or
* if {@code data == null}.
*
* @see #getReceiveTimeout()
* @see #setReceiveTimeout(int)
* @see #sendSerialData(XBee64BitAddress, byte[])
* @see #sendSerialData(XBee16BitAddress, byte[])
* @see #sendSerialDataAsync(XBee64BitAddress, byte[])
* @see #sendSerialDataAsync(XBee16BitAddress, byte[])
* @see #sendSerialDataAsync(AbstractXBeeDevice, byte[])
*/
public void sendSerialData(RemoteXBeeDevice xbeeDevice, byte[] data) throws TimeoutException, XBeeException {
if (xbeeDevice == null)
throw new NullPointerException("Remote XBee device cannot be null");
switch (getXBeeProtocol()) {
case ZIGBEE:
case DIGI_POINT:
if (xbeeDevice.get16BitAddress() != null)
sendSerialData(xbeeDevice.get64BitAddress(), xbeeDevice.get16BitAddress(), data);
else
sendSerialData(xbeeDevice.get64BitAddress(), data);
break;
case RAW_802_15_4:
if (this instanceof Raw802Device) {
if (xbeeDevice.get64BitAddress() != null && xbeeDevice.get64BitAddress() != XBee64BitAddress.UNKNOWN_ADDRESS)
((Raw802Device)this).sendSerialData(xbeeDevice.get64BitAddress(), data);
else
((Raw802Device)this).sendSerialData(xbeeDevice.get16BitAddress(), data);
} else
sendSerialData(xbeeDevice.get64BitAddress(), data);
break;
case DIGI_MESH:
default:
sendSerialData(xbeeDevice.get64BitAddress(), data);
}
}
/**
* Sends the provided data to all the XBee nodes of the network (broadcast).
*
* <p>This method blocks till a success or error transmit status arrives or
* the configured receive timeout expires.</p>
*
* <p>The received timeout is configured using the {@code setReceiveTimeout}
* method and can be consulted with {@code getReceiveTimeout} method.</p>
*
* <p>For non-blocking operations use the method
* {@link #sendBroadcastSerialDataAsync(byte[])}.</p>
*
* @param data Byte array containing data to be sent.
*
* @throws NullPointerException if {@code data == null}.
* @throws InterfaceNotOpenException if the device is not open.
* @throws TimeoutException if there is a timeout sending the serial data.
* @throws XBeeException if there is any other XBee related exception.
*
* @see #getReceiveTimeout()
* @see #setReceiveTimeout(int)
* @see #sendBroadcastSerialDataAsync(byte[])
*/
public void sendBroadcastSerialData(byte[] data) throws TimeoutException, XBeeException {
sendSerialData(XBee64BitAddress.BROADCAST_ADDRESS, data);
}
/**
* Sends the given XBee packet and registers the given packet listener
* (if not {@code null}) to wait for an answer.
*
* @param packet XBee packet to be sent.
* @param packetReceiveListener Listener for the operation, {@code null}
* not to be notified when the answer arrives.
*
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code packet == null}.
* @throws XBeeException if there is any other XBee related exception.
*
* @see XBeePacket
* @see IPacketReceiveListener
* @see #sendXBeePacket(XBeePacket)
* @see #sendXBeePacketAsync(XBeePacket)
*/
public void sendPacket(XBeePacket packet, IPacketReceiveListener packetReceiveListener) throws XBeeException {
try {
sendXBeePacket(packet, packetReceiveListener);
} catch (IOException e) {
throw new XBeeException("Error writing in the communication interface.", e);
}
}
/**
* Sends the given XBee packet asynchronously.
*
* <p>To be notified when the answer is received, use
* {@link #sendXBeePacket(XBeePacket, IPacketReceiveListener)}.</p>
*
* @param packet XBee packet to be sent asynchronously.
*
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code packet == null}.
* @throws XBeeException if there is any other XBee related exception.
*
* @see XBeePacket
* @see #sendXBeePacket(XBeePacket)
* @see #sendXBeePacket(XBeePacket, IPacketReceiveListener)
*/
public void sendPacketAsync(XBeePacket packet) throws XBeeException {
try {
super.sendXBeePacket(packet, null);
} catch (IOException e) {
throw new XBeeException("Error writing in the communication interface.", e);
}
}
/**
* Sends the given XBee packet synchronously and blocks until the response
* is received or the configured receive timeout expires.
*
* <p>The received timeout is configured using the {@code setReceiveTimeout}
* method and can be consulted with {@code getReceiveTimeout} method.</p>
*
* <p>Use {@link #sendXBeePacketAsync(XBeePacket)} for non-blocking
* operations.</p>
*
* @param packet XBee packet to be sent.
*
* @return An {@code XBeePacket} object containing the response of the sent
* packet or {@code null} if there is no response.
*
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code packet == null}.
* @throws TimeoutException if there is a timeout sending the XBee packet.
* @throws XBeeException if there is any other XBee related exception.
*
* @see XBeePacket
* @see #sendXBeePacket(XBeePacket, IPacketReceiveListener)
* @see #sendXBeePacketAsync(XBeePacket)
* @see #setReceiveTimeout(int)
* @see #getReceiveTimeout()
*/
public XBeePacket sendPacket(XBeePacket packet) throws TimeoutException, XBeeException {
try {
return super.sendXBeePacket(packet);
} catch (IOException e) {
throw new XBeeException("Error writing in the communication interface.", e);
}
}
/**
* Waits until a Modem Status packet with status 0x00 (hardware reset) or
* 0x01 (Watchdog timer reset) is received or the timeout is reached.
*
* @return True if the Modem Status packet is received, false otherwise.
*/
private boolean waitForModemStatusPacket() {
modemStatusReceived = false;
startListeningForPackets(modemStatusListener);
synchronized (resetLock) {
try {
resetLock.wait(TIMEOUT_RESET);
} catch (InterruptedException e) { }
}
stopListeningForPackets(modemStatusListener);
return modemStatusReceived;
}
/**
* Custom listener for Modem Status packets.
*
* <p>When a Modem Status packet is received with status 0x00 or 0x01, it
* notifies the object that was waiting for the reception.</p>
*/
private IPacketReceiveListener modemStatusListener = new IPacketReceiveListener() {
/*
* (non-Javadoc)
* @see com.digi.xbee.api.listeners.IPacketReceiveListener#packetReceived(com.digi.xbee.api.packet.XBeePacket)
*/
public void packetReceived(XBeePacket receivedPacket) {
// Discard non API packets.
if (!(receivedPacket instanceof XBeeAPIPacket))
return;
byte[] hardwareReset = new byte[] {(byte) 0x8A, 0x00};
byte[] watchdogTimerReset = new byte[] {(byte) 0x8A, 0x01};
if (Arrays.equals(receivedPacket.getPacketData(), hardwareReset) ||
Arrays.equals(receivedPacket.getPacketData(), watchdogTimerReset)) {
modemStatusReceived = true;
// Continue execution by notifying the lock object.
synchronized (resetLock) {
resetLock.notify();
}
}
}
};
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#reset()
*/
@Override
public void reset() throws TimeoutException, XBeeException {
// Check connection.
if (!connectionInterface.isOpen())
throw new InterfaceNotOpenException();
logger.info(toString() + "Resetting the local module...");
ATCommandResponse response = null;
try {
response = sendATCommand(new ATCommand("FR"));
} catch (IOException e) {
throw new XBeeException("Error writing in the communication interface.", e);
}
// Check if AT Command response is valid.
checkATCommandResponseIsValid(response);
// Wait for a Modem Status packet.
if (!waitForModemStatusPacket())
throw new TimeoutException("Timeout waiting for the Modem Status packet.");
logger.info(toString() + "Module reset successfully.");
}
/**
* Retrieves an XBee Message object received by the local XBee device and
* containing the data and the source address of the node that sent the
* data.
*
* <p>The method will try to read (receive) a data packet during the configured
* receive timeout.</p>
*
* @return An XBee Message object containing the data and the source address
* of the node that sent the data. Null if the local device didn't
* receive a data packet during the configured receive timeout.
*
* @throws InterfaceNotOpenException if the device is not open.
*
* @see XBeeMessage
* @see #getReceiveTimeout()
* @see #setReceiveTimeout(int)
*/
public XBeeMessage readData() {
return readDataPacket(null, TIMEOUT_READ_PACKET);
}
public XBeeMessage readData(int timeout) {
if (timeout < 0)
throw new IllegalArgumentException("Read timeout must be 0 or greater.");
return readDataPacket(null, timeout);
}
/**
* Retrieves an XBee Message object received by the local XBee device that was
* sent by the provided remote XBee device. The XBee Message contains the data
* and the source address of the node that sent the data.
*
* <p>The method will try to read (receive) a data packet from the provided
* remote device during the configured receive timeout.</p>
*
* @param remoteXBeeDevice The remote device to get a data packet from.
* @return An XBee Message object containing the data and the source address
* of the node that sent the data. Null if the local device didn't
* receive a data packet from the remote XBee device during the
* configured receive timeout.
*
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code remoteXBeeDevice == null}.
*
* @see XBeeMessage
* @see RemoteXBeeDevice
* @see #getReceiveTimeout()
* @see #setReceiveTimeout(int)
*/
public XBeeMessage readDataFrom(RemoteXBeeDevice remoteXBeeDevice) {
if (remoteXBeeDevice == null)
throw new NullPointerException("Remote XBee device cannot be null.");
return readDataPacket(remoteXBeeDevice, TIMEOUT_READ_PACKET);
}
public XBeeMessage readDataFrom(RemoteXBeeDevice remoteXBeeDevice, int timeout) {
if (remoteXBeeDevice == null)
throw new NullPointerException("Remote XBee device cannot be null.");
if (timeout < 0)
throw new IllegalArgumentException("Read timeout must be 0 or greater.");
return readDataPacket(remoteXBeeDevice, timeout);
}
/**
* Retrieves an XBee Message object received by the local XBee device. The
* XBee Message contains the data and the source address of the node that
* sent the data. Depending on if the provided remote XBee device is null
* or not, the method will get the first data packet read from any remote
* XBee device or from the provided one.
*
* <p>The method will try to read (receive) a data packet from the provided
* remote device or any other device during the provided timeout.</p>
*
* @param remoteXBeeDevice The remote device to get a data packet from. Null to
* read a data packet sent by any remote XBee device.
* @param timeout The time to wait for a data packet in milliseconds.
* @return An XBee Message object containing the data and the source address
* of the node that sent the data.
*
* @throws InterfaceNotOpenException if the device is not open.
*
* @see XBeeMessage
* @see RemoteXBeeDevice
*/
private XBeeMessage readDataPacket(RemoteXBeeDevice remoteXBeeDevice, int timeout) {
// Check connection.
if (!connectionInterface.isOpen())
throw new InterfaceNotOpenException();
XBeePacketsQueue xbeePacketsQueue = dataReader.getXBeePacketsQueue();
XBeePacket xbeePacket = null;
if (remoteXBeeDevice != null)
xbeePacket = xbeePacketsQueue.getFirstDataPacketFrom(remoteXBeeDevice, timeout);
else
xbeePacket = xbeePacketsQueue.getFirstDataPacket(timeout);
if (xbeePacket == null)
return null;
// Obtain the source address and data from the packet.
RemoteXBeeDevice remoteDevice;
byte[] data;
APIFrameType packetType = ((XBeeAPIPacket)xbeePacket).getFrameType();
switch (packetType) {
case RECEIVE_PACKET:
remoteDevice = new RemoteXBeeDevice(this, ((ReceivePacket)xbeePacket).get64bitSourceAddress());
data = ((ReceivePacket)xbeePacket).getRFData();
break;
case RX_16:
remoteDevice = new RemoteRaw802Device(this, ((RX16Packet)xbeePacket).get16bitSourceAddress());
data = ((RX16Packet)xbeePacket).getRFData();
break;
case RX_64:
remoteDevice = new RemoteXBeeDevice(this, ((RX64Packet)xbeePacket).get64bitSourceAddress());
data = ((RX64Packet)xbeePacket).getRFData();
break;
default:
return null;
}
// TODO: The remote XBee device should be retrieved from the XBee Network (contained
// in the xbeeDevice variable). If the network does not contain such remote device,
// then it should be instantiated and added there.
// Create and return the XBee message.
return new XBeeMessage(remoteDevice, data, ((XBeeAPIPacket)xbeePacket).isBroadcast());
}
}
|
package nl.mpi.kinnate.svg;
import nl.mpi.kinnate.kindata.EntityData;
import java.util.ArrayList;
import nl.mpi.kinnate.kindata.DataTypes;
import nl.mpi.kinnate.kindata.EntityRelation;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.Text;
import org.w3c.dom.svg.SVGDocument;
public class RelationSvg {
private void addUseNode(SVGDocument doc, String svgNameSpace, Element targetGroup, String targetDefId) {
String useNodeId = targetDefId + "use";
Node useNodeOld = doc.getElementById(useNodeId);
if (useNodeOld != null) {
useNodeOld.getParentNode().removeChild(useNodeOld);
}
Element useNode = doc.createElementNS(svgNameSpace, "use");
useNode.setAttributeNS("http://www.w3.org/1999/xlink", "xlink:href", "#" + targetDefId); // the xlink: of "xlink:href" is required for some svg viewers to render correctly
// useNode.setAttribute("href", "#" + lineIdString);
useNode.setAttribute("id", useNodeId);
targetGroup.appendChild(useNode);
}
private void updateLabelNode(SVGDocument doc, String svgNameSpace, String lineIdString, String targetRelationId) {
// remove and readd the text on path label so that it updates with the new path
String labelNodeId = targetRelationId + "label";
Node useNodeOld = doc.getElementById(labelNodeId);
if (useNodeOld != null) {
Node textParentNode = useNodeOld.getParentNode();
String labelText = useNodeOld.getTextContent();
useNodeOld.getParentNode().removeChild(useNodeOld);
Element textPath = doc.createElementNS(svgNameSpace, "textPath");
textPath.setAttributeNS("http://www.w3.org/1999/xlink", "xlink:href", "#" + lineIdString); // the xlink: of "xlink:href" is required for some svg viewers to render correctly
textPath.setAttribute("startOffset", "50%");
textPath.setAttribute("id", labelNodeId);
Text textNode = doc.createTextNode(labelText);
textPath.appendChild(textNode);
textParentNode.appendChild(textPath);
}
}
private void setPolylinePointsAttribute(Element targetNode, DataTypes.RelationType relationType, float vSpacing, float egoX, float egoY, float alterX, float alterY) {
float midY = (egoY + alterY) / 2;
if (alterY == egoY) {
// make sure that union lines go below the entities and sibling lines go above
if (relationType == DataTypes.RelationType.sibling) {
midY = alterY - vSpacing / 2;
} else if (relationType == DataTypes.RelationType.union) {
midY = alterY + vSpacing / 2;
}
}
targetNode.setAttribute("points",
egoX + "," + egoY + " "
+ egoX + "," + midY + " "
+ alterX + "," + midY + " "
+ alterX + "," + alterY);
}
private void setPathPointsAttribute(Element targetNode, DataTypes.RelationType relationType, DataTypes.RelationLineType relationLineType, float hSpacing, float vSpacing, float egoX, float egoY, float alterX, float alterY) {
float fromBezX;
float fromBezY;
float toBezX;
float toBezY;
if ((egoX > alterX && egoY < alterY) || (egoX > alterX && egoY > alterY)) {
// prevent the label on the line from rendering upside down
float tempX = alterX;
float tempY = alterY;
alterX = egoX;
alterY = egoY;
egoX = tempX;
egoY = tempY;
}
if (relationLineType == DataTypes.RelationLineType.verticalCurve) {
fromBezX = egoX;
fromBezY = alterY;
toBezX = alterX;
toBezY = egoY;
// todo: update the bezier positions similar to in the follwing else statement
if (1 / (egoY - alterY) < vSpacing) {
fromBezX = egoX;
fromBezY = alterY - vSpacing / 2;
toBezX = alterX;
toBezY = egoY - vSpacing / 2;
}
} else {
fromBezX = alterX;
fromBezY = egoY;
toBezX = egoX;
toBezY = alterY;
// todo: if the nodes are almost in align then this test fails and it should insted check for proximity not equality
// System.out.println(1 / (egoX - alterX));
// if (1 / (egoX - alterX) < vSpacing) {
if (egoX > alterX) {
if (egoX - alterX < hSpacing / 4) {
fromBezX = egoX - hSpacing / 4;
toBezX = alterX - hSpacing / 4;
} else {
fromBezX = (egoX - alterX) / 2 + alterX;
toBezX = (egoX - alterX) / 2 + alterX;
}
} else {
if (alterX - egoX < hSpacing / 4) {
fromBezX = egoX + hSpacing / 4;
toBezX = alterX + hSpacing / 4;
} else {
fromBezX = (alterX - egoX) / 2 + egoX;
toBezX = (alterX - egoX) / 2 + egoX;
}
}
}
targetNode.setAttribute("d", "M " + egoX + "," + egoY + " C " + fromBezX + "," + fromBezY + " " + toBezX + "," + toBezY + " " + alterX + "," + alterY);
}
protected void insertRelation(GraphPanel graphPanel, String svgNameSpace, Element relationGroupNode, EntityData currentNode, EntityRelation graphLinkNode, int hSpacing, int vSpacing) {
int relationLineIndex = relationGroupNode.getChildNodes().getLength();
Element groupNode = graphPanel.doc.createElementNS(svgNameSpace, "g");
groupNode.setAttribute("id", "relation" + relationLineIndex);
Element defsNode = graphPanel.doc.createElementNS(svgNameSpace, "defs");
String lineIdString = "relation" + relationLineIndex + "Line";
new DataStoreSvg().storeRelationParameters(graphPanel.doc, groupNode, graphLinkNode.relationType, graphLinkNode.relationLineType, currentNode.getUniqueIdentifier(), graphLinkNode.getAlterNode().getUniqueIdentifier());
// set the line end points
Float[] egoSymbolPoint = graphPanel.entitySvg.getEntityLocation(currentNode.getUniqueIdentifier());
Float[] alterSymbolPoint = graphPanel.entitySvg.getEntityLocation(graphLinkNode.getAlterNode().getUniqueIdentifier());
float fromX = (egoSymbolPoint[0]); // * hSpacing + hSpacing
float fromY = (egoSymbolPoint[1]); // * vSpacing + vSpacing
float toX = (alterSymbolPoint[0]); // * hSpacing + hSpacing
float toY = (alterSymbolPoint[1]); // * vSpacing + vSpacing
switch (graphLinkNode.relationLineType) {
case kinTermLine:
// this case uses the following case
case verticalCurve:
// todo: groupNode.setAttribute("id", );
// System.out.println("link: " + graphLinkNode.linkedNode.xPos + ":" + graphLinkNode.linkedNode.yPos);
//// <line id="_15" transform="translate(146.0,112.0)" x1="0" y1="0" x2="100" y2="100" ="black" stroke-width="1"/>
// Element linkLine = doc.createElementNS(svgNS, "line");
// linkLine.setAttribute("x1", Integer.toString(currentNode.xPos * hSpacing + hSpacing));
// linkLine.setAttribute("y1", Integer.toString(currentNode.yPos * vSpacing + vSpacing));
// linkLine.setAttribute("x2", Integer.toString(graphLinkNode.linkedNode.xPos * hSpacing + hSpacing));
// linkLine.setAttribute("y2", Integer.toString(graphLinkNode.linkedNode.yPos * vSpacing + vSpacing));
// linkLine.setAttribute("stroke", "black");
// linkLine.setAttribute("stroke-width", "1");
// // Attach the rectangle to the root 'svg' element.
// svgRoot.appendChild(linkLine);
//System.out.println("link: " + graphLinkNode.getAlterNode().xPos + ":" + graphLinkNode.getAlterNode().yPos);
// <line id="_15" transform="translate(146.0,112.0)" x1="0" y1="0" x2="100" y2="100" ="black" stroke-width="1"/>
Element linkLine = graphPanel.doc.createElementNS(svgNameSpace, "path");
setPathPointsAttribute(linkLine, graphLinkNode.relationType, graphLinkNode.relationLineType, hSpacing, vSpacing, fromX, fromY, toX, toY);
// linkLine.setAttribute("x1", );
// linkLine.setAttribute("y1", );
// linkLine.setAttribute("x2", );
linkLine.setAttribute("fill", "none");
if (graphLinkNode.lineColour != null) {
linkLine.setAttribute("stroke", graphLinkNode.lineColour);
} else {
linkLine.setAttribute("stroke", "blue");
}
linkLine.setAttribute("stroke-width", Integer.toString(EntitySvg.strokeWidth));
linkLine.setAttribute("id", lineIdString);
defsNode.appendChild(linkLine);
break;
case sanguineLine:
// Element squareLinkLine = doc.createElement("line");
// squareLinkLine.setAttribute("x1", Integer.toString(currentNode.xPos * hSpacing + hSpacing));
// squareLinkLine.setAttribute("y1", Integer.toString(currentNode.yPos * vSpacing + vSpacing));
// squareLinkLine.setAttribute("x2", Integer.toString(graphLinkNode.linkedNode.xPos * hSpacing + hSpacing));
// squareLinkLine.setAttribute("y2", Integer.toString(graphLinkNode.linkedNode.yPos * vSpacing + vSpacing));
// squareLinkLine.setAttribute("stroke", "grey");
// squareLinkLine.setAttribute("stroke-width", Integer.toString(strokeWidth));
Element squareLinkLine = graphPanel.doc.createElementNS(svgNameSpace, "polyline");
setPolylinePointsAttribute(squareLinkLine, graphLinkNode.relationType, vSpacing, fromX, fromY, toX, toY);
squareLinkLine.setAttribute("fill", "none");
squareLinkLine.setAttribute("stroke", "grey");
squareLinkLine.setAttribute("stroke-width", Integer.toString(EntitySvg.strokeWidth));
squareLinkLine.setAttribute("id", lineIdString);
defsNode.appendChild(squareLinkLine);
break;
}
groupNode.appendChild(defsNode);
// insert the node that uses the above definition
addUseNode(graphPanel.doc, svgNameSpace, groupNode, lineIdString);
// add the relation label
if (graphLinkNode.labelString != null) {
Element labelText = graphPanel.doc.createElementNS(svgNameSpace, "text");
labelText.setAttribute("text-anchor", "middle");
// labelText.setAttribute("x", Integer.toString(labelX));
// labelText.setAttribute("y", Integer.toString(labelY));
if (graphLinkNode.lineColour != null) {
labelText.setAttribute("fill", graphLinkNode.lineColour);
} else {
labelText.setAttribute("fill", "blue");
}
labelText.setAttribute("stroke-width", "0");
labelText.setAttribute("font-size", "14");
// labelText.setAttribute("transform", "rotate(45)");
Element textPath = graphPanel.doc.createElementNS(svgNameSpace, "textPath");
textPath.setAttributeNS("http://www.w3.org/1999/xlink", "xlink:href", "#" + lineIdString); // the xlink: of "xlink:href" is required for some svg viewers to render correctly
textPath.setAttribute("startOffset", "50%");
textPath.setAttribute("id", "relation" + relationLineIndex + "label");
Text textNode = graphPanel.doc.createTextNode(graphLinkNode.labelString);
textPath.appendChild(textNode);
labelText.appendChild(textPath);
groupNode.appendChild(labelText);
}
relationGroupNode.appendChild(groupNode);
}
public void updateRelationLines(GraphPanel graphPanel, ArrayList<String> draggedNodeIds, String svgNameSpace, int hSpacing, int vSpacing) {
// todo: if an entity is above its ancestor then this must be corrected, if the ancestor data is stored in the relationLine attributes then this would be a good place to correct this
Element relationGroup = graphPanel.doc.getElementById("RelationGroup");
for (Node currentChild = relationGroup.getFirstChild(); currentChild != null; currentChild = currentChild.getNextSibling()) {
if ("g".equals(currentChild.getLocalName())) {
Node idAttrubite = currentChild.getAttributes().getNamedItem("id");
//System.out.println("idAttrubite: " + idAttrubite.getNodeValue());
DataStoreSvg.GraphRelationData graphRelationData = new DataStoreSvg().getEntitiesForRelations(currentChild);
if (graphRelationData != null) {
if (draggedNodeIds.contains(graphRelationData.egoNodeId) || draggedNodeIds.contains(graphRelationData.alterNodeId)) {
// todo: update the relation lines
//System.out.println("needs update on: " + idAttrubite.getNodeValue());
String lineElementId = idAttrubite.getNodeValue() + "Line";
Element relationLineElement = graphPanel.doc.getElementById(lineElementId);
//System.out.println("type: " + relationLineElement.getLocalName());
Float[] egoSymbolPoint = graphPanel.entitySvg.getEntityLocation(graphRelationData.egoNodeId);
Float[] alterSymbolPoint = graphPanel.entitySvg.getEntityLocation(graphRelationData.alterNodeId);
float egoX = egoSymbolPoint[0];
float egoY = egoSymbolPoint[1];
float alterX = alterSymbolPoint[0];
float alterY = alterSymbolPoint[1];
// SVGRect egoSymbolRect = new EntitySvg().getEntityLocation(doc, graphRelationData.egoNodeId);
// SVGRect alterSymbolRect = new EntitySvg().getEntityLocation(doc, graphRelationData.alterNodeId);
// float egoX = egoSymbolRect.getX() + egoSymbolRect.getWidth() / 2;
// float egoY = egoSymbolRect.getY() + egoSymbolRect.getHeight() / 2;
// float alterX = alterSymbolRect.getX() + alterSymbolRect.getWidth() / 2;
// float alterY = alterSymbolRect.getY() + alterSymbolRect.getHeight() / 2;
if ("polyline".equals(relationLineElement.getLocalName())) {
setPolylinePointsAttribute(relationLineElement, graphRelationData.relationType, vSpacing, egoX, egoY, alterX, alterY);
}
if ("path".equals(relationLineElement.getLocalName())) {
setPathPointsAttribute(relationLineElement, graphRelationData.relationType, graphRelationData.relationLineType, hSpacing, vSpacing, egoX, egoY, alterX, alterY);
}
addUseNode(graphPanel.doc, svgNameSpace, (Element) currentChild, lineElementId);
updateLabelNode(graphPanel.doc, svgNameSpace, lineElementId, idAttrubite.getNodeValue());
}
}
}
}
}
// new RelationSvg().addTestNode(doc, (Element) relationLineElement.getParentNode().getParentNode(), svgNameSpace);
// public void addTestNode(SVGDocument doc, Element addTarget, String svgNameSpace) {
// Element squareNode = doc.createElementNS(svgNameSpace, "rect");
// squareNode.setAttribute("x", "100");
// squareNode.setAttribute("y", "100");
// squareNode.setAttribute("width", "20");
// squareNode.setAttribute("height", "20");
// squareNode.setAttribute("fill", "green");
// squareNode.setAttribute("stroke", "black");
// squareNode.setAttribute("stroke-width", "2");
// addTarget.appendChild(squareNode);
}
|
package de.plushnikov.lombok;
import com.google.common.base.Function;
import com.google.common.base.Predicates;
import com.google.common.collect.Collections2;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.pom.PomNamedTarget;
import com.intellij.psi.*;
import com.intellij.testFramework.fixtures.LightCodeInsightFixtureTestCase;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.LocalTimeCounter;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.regex.Pattern;
/**
* Base test case for testing that the Lombok plugin parses the Lombok annotations correctly.
*/
public abstract class LombokParsingTestCase extends LightCodeInsightFixtureTestCase {
private static final Set<String> modifiers = new HashSet<String>(Arrays.asList(
PsiModifier.PUBLIC, PsiModifier.PACKAGE_LOCAL, PsiModifier.PROTECTED, PsiModifier.PRIVATE, PsiModifier.FINAL, PsiModifier.STATIC,
PsiModifier.ABSTRACT, PsiModifier.SYNCHRONIZED, PsiModifier.TRANSIENT, PsiModifier.VOLATILE, PsiModifier.NATIVE));
public static final String PACKAGE_LOMBOK = "package lombok;\n";
public static final String ANNOTATION_TYPE = "@java.lang.annotation.Target(java.lang.annotation.ElementType.TYPE)\n" +
"@java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.SOURCE)\n";
private static final String LOMBOK_SRC_PATH = "./lombok-api/target/generated-sources";
@Override
public void setUp() throws Exception {
super.setUp();
addLombokClassesToFixture();
}
private void addLombokClassesToFixture() {
//added java.lang.Object to 'classpath'
myFixture.addClass("package java.lang; public class Object {}");
// added some classes used by tests to 'classpath'
myFixture.addClass("package java.util; public class Timer {}");
List<File> filesByMask = FileUtil.findFilesByMask(Pattern.compile(".*\\.java"), new File(LOMBOK_SRC_PATH));
for (File javaFile : filesByMask) {
myFixture.configureByFile(javaFile.getPath().replace("\\", "/"));
}
}
public void doTest() throws IOException {
doTest(getTestName(true).replace('$', '/') + ".java");
}
protected void doTest(String fileName) throws IOException {
// final PsiFile psiDelombokFile = myFixture.configureByText(StdFileTypes.JAVA, loadDeLombokFile(fileName));//createPseudoPhysicalFile(getProject(), fileName, loadDeLombokFile(fileName));
// final PsiFile psiLombokFile = myFixture.configureByText(StdFileTypes.JAVA, loadLombokFile(fileName));//createPseudoPhysicalFile(getProject(), fileName, loadLombokFile(fileName));
final PsiFile psiDelombokFile = createPseudoPhysicalFile(getProject(), fileName, loadDeLombokFile(fileName));
final PsiFile psiLombokFile = createPseudoPhysicalFile(getProject(), fileName, loadLombokFile(fileName));
if (!(psiLombokFile instanceof PsiJavaFile) || !(psiDelombokFile instanceof PsiJavaFile)) {
fail("The test file type is not supported");
}
final PsiJavaFile intellij = (PsiJavaFile) psiLombokFile;
final PsiJavaFile theirs = (PsiJavaFile) psiDelombokFile;
PsiClass[] intellijClasses = intellij.getClasses();
PsiClass[] theirsClasses = theirs.getClasses();
assertEquals("Class counts are different", theirsClasses.length, intellijClasses.length);
for (PsiClass theirsClass : theirsClasses) {
boolean compared = false;
for (PsiClass intellijClass : intellijClasses) {
if (theirsClass.getName().equals(intellijClass.getName())) {
compareFields(intellijClass, theirsClass);
compareMethods(intellijClass, theirsClass);
compared = true;
}
}
assertTrue("Classnames are not equal, class (" + theirsClass.getName() + ") not found", compared);
}
}
private void compareFields(PsiClass intellij, PsiClass theirs) {
PsiField[] intellijFields = intellij.getFields();
PsiField[] theirsFields = theirs.getFields();
assertEquals("Field counts are different for Class " + intellij.getName(), theirsFields.length, intellijFields.length);
for (PsiField theirsField : theirsFields) {
boolean compared = false;
final PsiModifierList theirsFieldModifierList = theirsField.getModifierList();
for (PsiField intellijField : intellijFields) {
if (theirsField.getName().equals(intellijField.getName())) {
final PsiModifierList intellijFieldModifierList = intellijField.getModifierList();
compareModifiers(intellijFieldModifierList, theirsFieldModifierList);
compareType(intellijField.getType(), theirsField.getType(), theirsField);
compared = true;
}
}
assertTrue("Fieldnames are not equal, Field (" + theirsField.getName() + ") not found", compared);
}
}
private void compareType(PsiType intellij, PsiType theirs, PomNamedTarget whereTarget) {
if (null != intellij && null != theirs) {
final String theirsCanonicalText = stripJavaLang(theirs.getCanonicalText());
final String intellijCanonicalText = stripJavaLang(intellij.getCanonicalText());
assertEquals("Types are not equal for: " + whereTarget.getName(), theirsCanonicalText, intellijCanonicalText);
}
}
private String stripJavaLang(String theirsCanonicalText) {
final String prefix = "java.lang.";
if (theirsCanonicalText.startsWith(prefix)) {
theirsCanonicalText = theirsCanonicalText.substring(prefix.length());
}
return theirsCanonicalText;
}
private void compareModifiers(PsiModifierList intellij, PsiModifierList theirs) {
assertNotNull(intellij);
assertNotNull(theirs);
for (String modifier : modifiers) {
assertEquals(modifier + " Modifier is not equal; ", theirs.hasModifierProperty(modifier), intellij.hasModifierProperty(modifier));
}
Collection<String> intellijAnnotations = Lists.newArrayList(Collections2.transform(Arrays.asList(intellij.getAnnotations()), new QualifiedNameFunction()));
Collection<String> theirsAnnotations = Lists.newArrayList(Collections2.transform(Arrays.asList(theirs.getAnnotations()), new QualifiedNameFunction()));
Iterables.removeIf(intellijAnnotations, Predicates.containsPattern("lombok.*"));
//TODO assertEquals("Annotationcounts are different ", theirsAnnotations.size(), intellijAnnotations.size());
}
private void compareMethods(PsiClass intellij, PsiClass theirs) {
PsiMethod[] intellijMethods = intellij.getMethods();
PsiMethod[] theirsMethods = theirs.getMethods();
assertEquals("Methodscounts are different for Class " + intellij.getName(), theirsMethods.length, intellijMethods.length);
for (PsiMethod theirsMethod : theirsMethods) {
boolean compared = false;
final PsiModifierList theirsFieldModifierList = theirsMethod.getModifierList();
for (PsiMethod intellijMethod : intellijMethods) {
if (theirsMethod.getName().equals(intellijMethod.getName()) &&
theirsMethod.getParameterList().getParametersCount() == intellijMethod.getParameterList().getParametersCount()) {
PsiModifierList intellijFieldModifierList = intellijMethod.getModifierList();
compareModifiers(intellijFieldModifierList, theirsFieldModifierList);
compareType(intellijMethod.getReturnType(), theirsMethod.getReturnType(), theirsMethod);
compareParams(intellijMethod.getParameterList(), theirsMethod.getParameterList());
compared = true;
}
}
assertTrue("Methodnames are not equal, Method (" + theirsMethod.getName() + ") not found : " + intellij.getName(), compared);
}
}
private void compareParams(PsiParameterList intellij, PsiParameterList theirs) {
assertEquals(theirs.getParametersCount(), intellij.getParametersCount());
PsiParameter[] intellijParameters = intellij.getParameters();
PsiParameter[] theirsParameters = theirs.getParameters();
for (int i = 0; i < intellijParameters.length; i++) {
PsiParameter intellijParameter = intellijParameters[i];
PsiParameter theirsParameter = theirsParameters[i];
compareType(intellijParameter.getType(), theirsParameter.getType(), theirsParameter);
}
}
protected PsiFile createPseudoPhysicalFile(final Project project, final String fileName, final String text) throws IncorrectOperationException {
return PsiFileFactory.getInstance(project).createFileFromText(
fileName,
FileTypeManager.getInstance().getFileTypeByFileName(fileName),
text,
LocalTimeCounter.currentTime(),
true);
}
protected String loadLombokFile(String fileName) throws IOException {
return loadFileContent("/before/", fileName);
}
protected String loadDeLombokFile(String fileName) throws IOException {
return loadFileContent("/after/", fileName);
}
protected String getLombokTestDataDirectory() {
return "./lombok-plugin/src/test/data";
}
@Override
protected String getTestDataPath() {
return "";
}
private String loadFileContent(String subDir, String fileName) throws IOException {
final File fromFile = new File(getLombokTestDataDirectory(), subDir);
String text = FileUtil.loadFile(new File(fromFile, fileName), CharsetToolkit.UTF8).trim();
text = StringUtil.convertLineSeparators(text);
return text;
}
private static class QualifiedNameFunction implements Function<PsiAnnotation, String> {
@Override
public String apply(PsiAnnotation psiAnnotation) {
return psiAnnotation.getQualifiedName();
}
}
}
|
package org.jasig.portal.groups;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.Random;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.jasig.portal.EntityIdentifier;
import org.jasig.portal.EntityTypes;
import org.jasig.portal.IBasicEntity;
import org.jasig.portal.concurrency.CachingException;
import org.jasig.portal.concurrency.caching.ReferenceEntityCachingService;
import org.jasig.portal.services.GroupService;
public class GroupsTester extends TestCase {
private static Class GROUP_CLASS;
private static Class IPERSON_CLASS;
private static Class TEST_ENTITY_CLASS;
private static String CR = "\n";
private IEntity[] testEntities;
private String[] testEntityKeys;
private int numTestEntities = 0;
private Random random = new Random();
private class TestEntity implements IBasicEntity
{
private EntityIdentifier entityIdentifier;
private TestEntity(String entityKey) {
super();
entityIdentifier = new EntityIdentifier(entityKey, this.getClass());
}
public EntityIdentifier getEntityIdentifier() {
return entityIdentifier;
}
public boolean equals(Object o) {
if ( o == null )
return false;
if ( ! (o instanceof IBasicEntity) )
return false;
IBasicEntity ent = (IBasicEntity) o;
return ent.getEntityIdentifier().equals(getEntityIdentifier());
}
public String toString() {
return "TestEntity(" + getEntityIdentifier().getKey() + ")";
}
}
private class GroupsReadTester implements Runnable
{
protected IEntityGroup group = null;
protected int numTests = 0;
protected String testerID = null;
protected String printID = null;
protected GroupsReadTester(String id, IEntityGroup g, int tests)
{
super();
group = g;
numTests = tests;
testerID = id;
}
public void run() {
printID = "Tester " + testerID;
print(printID + " starting.");
for (int i=0; i<numTests; i++)
{
// print(printID + " running test
try { runTest(); }
catch (GroupsException ge) {}
int sleepMillis = random.nextInt(20);
// print(printID + " will now sleep for " + sleepMillis + " ms.");
try { Thread.sleep(sleepMillis); }
catch (Exception ex) {}
}
}
private void runTest() throws GroupsException {
int numMembers = 0, numEntities = 0, numContainingGroups = 0;
Iterator itr = null;
for (itr = group.getMembers(); itr.hasNext(); itr.next() )
{ numMembers++; }
for (itr = group.getEntities(); itr.hasNext(); itr.next() )
{ numEntities++; }
for (itr = group.getContainingGroups(); itr.hasNext(); itr.next() )
{ numContainingGroups++; }
// print (printID + " members: " + numMembers + " entities: " + numEntities + " containing groups: " + numContainingGroups);
}
}
/**
* EntityLockTester constructor comment.
*/
public GroupsTester(String name) {
super(name);
}
protected void addTestEntityType()
{
try
{
org.jasig.portal.EntityTypes.singleton().
addEntityType(TEST_ENTITY_CLASS, "Test Entity Type");
}
catch (Exception ex) { print("GroupsTester.addTestEntityType(): " + ex.getMessage());}
}
private void clearGroupCache() throws CachingException
{
((ReferenceEntityCachingService) ReferenceEntityCachingService.singleton())
.getCache(GROUP_CLASS).clearCache();
}
protected void deleteTestEntityType()
{
try
{
org.jasig.portal.EntityTypes.singleton().deleteEntityType(TEST_ENTITY_CLASS);
}
catch (Exception ex) { print("EntityCacheTester.deleteTestEntityType(): " + ex.getMessage());}
}
protected void deleteTestGroups()
{
String sql = " FROM UP_GROUP WHERE ENTITY_TYPE_ID = " +
EntityTypes.getEntityTypeID(TEST_ENTITY_CLASS);
String selectSql = "SELECT GROUP_ID" + sql;
String deleteSql = "DELETE" + sql;
String deleteMemberSql = "DELETE FROM UP_GROUP_MEMBERSHIP WHERE GROUP_ID = ";
Connection conn = null;
try
{
conn = org.jasig.portal.RDBMServices.getConnection();
Statement selectStmnt = conn.createStatement();
ResultSet rs = selectStmnt.executeQuery( selectSql );
while ( rs.next() )
{
String key = rs.getString(1);
Statement deleteMemberStmnt = conn.createStatement();
int memberRC = deleteMemberStmnt.executeUpdate( deleteMemberSql + key );
print("Test member rows deleted: " + memberRC);
}
Statement deleteGroupStmnt = conn.createStatement();
int rc = deleteGroupStmnt.executeUpdate( deleteSql );
print("Test group rows deleted: " + rc);
}
catch (Exception ex) { print("GroupsTester.deleteTestGroups(): " + ex.getMessage());}
finally { org.jasig.portal.RDBMServices.releaseConnection(conn); }
}
/**
* @return org.jasig.portal.groups.IEntityGroup
*/
private IEntityGroup findGroup(String key) throws GroupsException
{
IEntityGroup group = GroupService.findGroup(key);
return group;
}
/**
* @return org.jasig.portal.groups.ILockableEntityGroup
*/
private ILockableEntityGroup findLockableGroup(String key) throws GroupsException
{
String owner = "de3";
ILockableEntityGroup group = GroupService.findLockableGroup(key, owner);
return group;
}
/**
* @return org.jasig.portal.services.GroupService
*/
private Collection getAllGroupMembers(IGroupMember gm) throws GroupsException
{
Collection list = new ArrayList();
for( Iterator itr=gm.getAllMembers(); itr.hasNext(); )
{ list.add(itr.next()); }
return list;
}
/**
* @return RDBMEntityStore
*/
private IEntityStore getEntityStore() throws GroupsException
{
return RDBMEntityStore.singleton();
}
/**
* @return org.jasig.portal.services.GroupService
*/
private Collection getGroupMembers(IGroupMember gm) throws GroupsException
{
Collection list = new ArrayList();
for( Iterator itr=gm.getMembers(); itr.hasNext(); )
{ list.add(itr.next()); }
return list;
}
/**
* @return RDBMEntityGroupStore
*/
private RDBMEntityGroupStore getGroupStore() throws GroupsException
{
return RDBMEntityGroupStore.singleton();
}
/**
* @return org.jasig.portal.groups.IEntity
*/
private IEntity getNewEntity(String key) throws GroupsException
{
return GroupService.getEntity(key, TEST_ENTITY_CLASS);
}
/**
* @return org.jasig.portal.groups.IEntityGroup
*/
private IEntityGroup getNewGroup() throws GroupsException
{
IEntityGroup group = GroupService.newGroup(TEST_ENTITY_CLASS);
group.setName("name_" + group.getKey());
group.setCreatorID("de3");
return group;
}
/**
* @return java.lang.String
* @param length int
*/
private String getRandomString(java.util.Random r, int length) {
char[] chars = new char[length];
for(int i=0; i<length; i++)
{
int diff = ( r.nextInt(25) );
int charValue = (int)'A' + diff;
chars[i] = (char) charValue;
}
return new String(chars);
}
/**
* @return org.jasig.portal.services.GroupService
*/
private GroupService getService() throws GroupsException
{
return GroupService.instance();
}
/**
* Starts the application.
* @param args an array of command-line arguments
*/
public static void main(java.lang.String[] args) throws Exception
{
String[] mainArgs = {"org.jasig.portal.concurrency.caching.EntityCacheTester"};
print("START TESTING CACHE");
printBlankLine();
junit.swingui.TestRunner.main(mainArgs);
printBlankLine();
print("END TESTING CACHE");
}
/**
* @param msg java.lang.String
*/
private static void print (IEntity[] entities)
{
for ( int i=0; i<entities.length; i++ )
{
print("(" + (i+1) + ") " + entities[i]);
}
print(" Total: " + entities.length);
}
/**
* @param msg java.lang.String
*/
private static void print(String msg)
{
java.sql.Timestamp ts = new java.sql.Timestamp(System.currentTimeMillis());
System.out.println(ts + " : " + msg);
}
/**
* @param msg java.lang.String
*/
private static void printBlankLine()
{
System.out.println("");
}
protected void setUp()
{
try {
if ( GROUP_CLASS == null )
{ GROUP_CLASS = Class.forName("org.jasig.portal.groups.IEntityGroup"); }
if ( IPERSON_CLASS == null )
{ IPERSON_CLASS = Class.forName("org.jasig.portal.security.IPerson"); }
if ( TEST_ENTITY_CLASS == null )
{ TEST_ENTITY_CLASS = TestEntity.class; }
addTestEntityType();
numTestEntities = 100;
// Entities and their keys:
testEntityKeys = new String[numTestEntities];
testEntities = new IEntity[numTestEntities];
for (int i=0; i<numTestEntities; i++)
{
testEntityKeys[i] = (getRandomString(random, 3) + i);
testEntities[i] = getNewEntity(testEntityKeys[i]);
}
}
catch (Exception ex) { print("GroupsTester.setUp(): " + ex.getMessage());}
}
/**
* @return junit.framework.Test
*/
public static junit.framework.Test suite() {
TestSuite suite = new TestSuite();
suite.addTest(new GroupsTester("testAddAndDeleteGroups"));
suite.addTest(new GroupsTester("testAddAndDeleteMembers"));
suite.addTest(new GroupsTester("testGroupMemberValidation"));
suite.addTest(new GroupsTester("testGroupMemberUpdate"));
suite.addTest(new GroupsTester("testRetrieveParentGroups"));
suite.addTest(new GroupsTester("testUpdateMembersVisibility"));
suite.addTest(new GroupsTester("testUpdateLockableGroups"));
suite.addTest(new GroupsTester("testUpdateLockableGroupsWithRenewableLock"));
suite.addTest(new GroupsTester("testContains"));
suite.addTest(new GroupsTester("testDeleteChildGroup"));
suite.addTest(new GroupsTester("testMixLockableAndNonLockableGroups"));
suite.addTest(new GroupsTester("testConcurrentAccess"));
suite.addTest(new GroupsTester("testParseCompoundKeys"));
// Add more tests here.
// NB: Order of tests is not guaranteed.
return suite;
}
protected void tearDown()
{
try
{
testEntityKeys = null;
testEntities = null;
deleteTestGroups();
deleteTestEntityType();
clearGroupCache();
}
catch (Exception ex) { print("GroupTester.tearDown(): " + ex.getMessage());}
}
public void testAddAndDeleteGroups() throws Exception
{
print(CR + "***** ENTERING GroupsTester.testAddAndDeleteGroups() *****" + CR);
String msg = null;
msg = "Creating a new IEntityGroup.";
print(msg);
IEntityGroup newGroup = getNewGroup();
assertNotNull(msg, newGroup);
print("Now updating " + newGroup);
newGroup.setName("Test");
newGroup.setCreatorID("de3");
newGroup.update();
print("Now retrieving group just created from the store.");
String key = newGroup.getKey();
IEntityGroup retrievedGroup = GroupService.findGroup(key);
msg = "Testing retrieved group.";
print(msg);
assertEquals(msg, newGroup, retrievedGroup);
print("Now deleting group just created from the store.");
retrievedGroup.delete();
print("Attempting to retrieve deleted group from the store.");
retrievedGroup = GroupService.findGroup(key);
assertNull(msg, retrievedGroup);
print(CR + "***** LEAVING GroupsTester.testAddAndDeleteGroups() *****" + CR);
}
public void testAddAndDeleteMembers() throws Exception
{
print(CR + "***** ENTERING GroupsTester.testAddAndDeleteMembers() *****" + CR);
String msg = null;
Class type = TEST_ENTITY_CLASS;
int totNumGroups = 3;
int totNumEntities = 5;
IEntityGroup[] groups = new IEntityGroup[totNumGroups];
IEntity[] entities = new IEntity[totNumEntities];
IGroupMember[] groupMembers = null;
Iterator itr = null;
ArrayList list = null;
int idx = 0;
msg = "Creating " + totNumGroups + " new groups.";
print(msg);
for (idx=0; idx<totNumGroups; idx++)
{
groups[idx] = getNewGroup();
assertNotNull(msg, groups[idx]);
}
IEntityGroup rootGroup = groups[0];
IEntityGroup childGroup = groups[1];
msg = "Adding " + (totNumGroups - 1) + " to root group.";
print(msg);
for(idx=1; idx<totNumGroups; idx++)
{ rootGroup.addMember(groups[idx]); }
msg = "Retrieving members from root group.";
print(msg);
list = new ArrayList();
for( itr=rootGroup.getMembers(); itr.hasNext(); )
{ list.add(itr.next()); }
assertEquals(msg, (totNumGroups - 1), list.size());
msg = "Adding " + (totNumEntities - 2) + " to root group.";
print(msg);
for(idx=0; idx<(totNumEntities - 2) ; idx++)
{ rootGroup.addMember(testEntities[idx]); }
msg = "Retrieving members from root group.";
print(msg);
list = new ArrayList();
for( itr=rootGroup.getMembers(); itr.hasNext(); )
{ list.add(itr.next()); }
assertEquals(msg, (totNumGroups - 1 + totNumEntities - 2), list.size());
msg = "Adding 2 entities to child group.";
print(msg);
childGroup.addMember(testEntities[totNumEntities - 1]);
childGroup.addMember(testEntities[totNumEntities]);
msg = "Retrieving ALL members from root group.";
print(msg);
list = new ArrayList();
for( itr=rootGroup.getAllMembers(); itr.hasNext(); )
{ list.add(itr.next()); }
assertEquals(msg, (totNumGroups - 1 + totNumEntities), list.size());
msg = "Deleting child group from root group.";
print(msg);
rootGroup.removeMember(childGroup);
msg = "Retrieving ALL members from root group.";
print(msg);
list = new ArrayList();
for( itr=rootGroup.getAllMembers(); itr.hasNext(); )
{ list.add(itr.next()); }
assertEquals(msg, (totNumGroups - 2 + totNumEntities - 2 ), list.size());
print(CR + "***** LEAVING GroupsTester.testAddAndDeleteMembers() *****" + CR);
}
public void testContains() throws Exception
{
print(CR + "***** ENTERING GroupsTester.testContains() *****" + CR);
String msg = null;
Class type = TEST_ENTITY_CLASS;
int totNumEntities = 1;
IEntityGroup containingGroup, childGroup, dupContainingGroup = null;
IEntity[] entities = new IEntity[totNumEntities];
IGroupMember[] groupMembers = null;
Iterator itr = null;
ArrayList list = null;
int idx = 0;
boolean testValue = false;
msg = "Creating new parent group.";
print(msg);
containingGroup = getNewGroup();
assertNotNull(msg, containingGroup);
msg = "Creating new child group.";
print(msg);
childGroup = getNewGroup();
assertNotNull(msg, childGroup);
msg = "Creating " + totNumEntities + " new entities.";
print(msg);
for(idx=0; idx<totNumEntities; idx++)
{ entities[idx] = getNewEntity("E" + idx); }
msg = "Adding " + (totNumEntities) + " to containing group.";
print(msg);
for(idx=0; idx<totNumEntities; idx++)
{ containingGroup.addMember(entities[idx]); }
msg = "Testing if containing group contains entities.";
print(msg);
for(idx=0; idx<totNumEntities; idx++)
{
testValue = containingGroup.contains(entities[idx]);
assertTrue(msg, testValue);
}
msg = "Adding child group to containing group.";
print(msg);
containingGroup.addMember(childGroup);
msg = "Testing if containing group contains child group.";
print(msg);
testValue = containingGroup.contains(childGroup);
assertTrue(msg, testValue);
msg = "Updating containing group.";
print(msg);
containingGroup.update();
msg = "Getting duplicate containing group.";
print(msg);
dupContainingGroup = findGroup(containingGroup.getKey());
assertNotNull(msg,dupContainingGroup);
msg = "Testing if RETRIEVED containing group contains entities.";
print(msg);
for(idx=0; idx<totNumEntities; idx++)
{
testValue = dupContainingGroup.contains(entities[idx]);
assertTrue(msg, testValue);
}
msg = "Testing if RETRIEVED containing group contains child group.";
print(msg);
testValue = dupContainingGroup.contains(childGroup);
assertTrue(msg, testValue);
msg = "Deleting containing group from db.";
print(msg);
containingGroup.delete();
print(CR + "***** LEAVING GroupsTester.testContains() *****" + CR);
}
public void testDeleteChildGroup() throws Exception
{
print(CR + "***** ENTERING GroupsTester.testDeleteChildGroup() *****" + CR);
String msg = null;
Class type = TEST_ENTITY_CLASS;
int totNumGroups = 3;
int totNumEntities = 5;
IEntityGroup[] groups = new IEntityGroup[totNumGroups];
IEntity[] entities = new IEntity[totNumEntities];
IGroupMember[] groupMembers = null;
Iterator itr = null;
ArrayList list = null;
int idx = 0;
Exception e = null;
msg = "Creating 3 new groups; 2 parents and 1 child...";
print(msg);
for (idx=0; idx<totNumGroups; idx++)
{
groups[idx] = getNewGroup();
assertNotNull(msg, groups[idx]);
}
IEntityGroup child = groups[0];
msg = "Adding child to " + (totNumGroups - 1) + " parent groups.";
print(msg);
for(idx=1; idx<totNumGroups; idx++)
{
groups[idx].addMember(child);
groups[idx].update();
}
msg = "Retrieving containing groups from child.";
print(msg);
list = new ArrayList();
for( itr=child.getContainingGroups(); itr.hasNext(); )
{ list.add(itr.next()); }
assertEquals(msg, (totNumGroups - 1), list.size());
msg = "Adding " + (totNumEntities) + " to child group.";
print(msg);
for(idx=0; idx<(totNumEntities) ; idx++)
{ child.addMember(testEntities[idx]); }
msg = "Retrieving members from child group.";
print(msg);
list = new ArrayList();
for( itr=child.getMembers(); itr.hasNext(); )
{ list.add(itr.next()); }
assertEquals(msg, (totNumEntities), list.size());
msg = "Updating child.";
print(msg);
child.update();
msg = "Will now lock one of the parent groups. (Delete of child should fail.)";
print(msg);
ILockableEntityGroup lockedParent = findLockableGroup(groups[1].getKey());
assertNotNull(msg, lockedParent);
assertTrue(msg, lockedParent.getLock().isValid());
msg = "Deleting child. (Should FAIL).";
print(msg);
ILockableEntityGroup legDelete = findLockableGroup(child.getKey());
try
{
legDelete.delete();
}
catch (GroupsException ge) { e = ge; }
assertNotNull(msg, e);
msg = "Will now UN-lock the parent group.";
print(msg);
lockedParent.getLock().release();
assertTrue(msg, ! lockedParent.getLock().isValid());
msg = "Deleting child. (Should SUCCEED).";
print(msg);
ILockableEntityGroup legDeleteDup = findLockableGroup(child.getKey());
legDeleteDup.delete();
msg = "Retrieving members from parent groups (should be EMPTY).";
print(msg);
for(idx=1; idx<totNumGroups; idx++)
{
String groupKey = groups[idx].getKey();
IEntityGroup g = findGroup(groupKey);
list = new ArrayList();
for( itr=g.getMembers(); itr.hasNext(); )
{ list.add(itr.next()); }
assertEquals(msg, 0, list.size());
}
print(CR + "***** LEAVING GroupsTester.testDeleteChildGroup() *****" + CR);
}
public void testGroupMemberUpdate() throws Exception
{
print(CR + "***** ENTERING GroupsTester.testGroupMemberUpdate() *****" + CR);
String msg = null;
Iterator itr;
Collection list;
int idx = 0;
Exception e = null;
int numAddedEntities = 10;
int numDeletedEntities = 5;
print("Creating 2 new groups.");
IEntityGroup parent = getNewGroup(); parent.setName("parent"); parent.setCreatorID("de3");
String parentKey = parent.getKey();
IEntityGroup child = getNewGroup(); child.setName("child"); child.setCreatorID("de3");
String childKey = child.getKey();
print("Adding " + child + " to " + parent);
parent.addMember(child);
print("Adding " + numAddedEntities + " members to " + child);
for(idx=0; idx<numAddedEntities; idx++)
{ child.addMember(testEntities[idx]); }
msg = "Retrieving members from " + child; // child should have numAddedEntities group members.
print(msg);
list = getGroupMembers(child);
assertEquals(msg, (numAddedEntities), list.size());
msg = "Retrieving members from " + parent; // parent should have numAddedEntities + 1 group members.
print(msg);
list = getAllGroupMembers(parent);
assertEquals(msg, (numAddedEntities + 1), list.size());
print("Now updating " + parent + " and " + child);
child.update();
parent.update();
msg = "Retrieving " + parent + " and " + child + " from db.";
print(msg);
IEntityGroup retrievedParent = GroupService.findGroup(parentKey);
IEntityGroup retrievedChild = GroupService.findGroup(childKey);
assertEquals(msg, parent, retrievedParent);
assertEquals(msg, child, retrievedChild);
// retrievedChild should have numAddedEntities group members.
msg = "Retrieving members from " + retrievedChild;
print(msg);
list = getAllGroupMembers(retrievedChild);
assertEquals(msg, numAddedEntities, list.size());
// retrievedParent should have numAddedEntities + 1 group members.
msg = "Retrieving members from " + retrievedParent;
print(msg);
list = getAllGroupMembers(retrievedParent);
assertEquals(msg, (numAddedEntities + 1), list.size());
print("Deleting " + numDeletedEntities + " members from " + retrievedChild);
for(idx=0; idx<numDeletedEntities; idx++)
{ retrievedChild.removeMember(testEntities[idx]); }
// retrievedChild should have (numAddedEntities - numDeletedEntities) members.
msg = "Retrieving members from " + retrievedChild;
print(msg);
list = getAllGroupMembers(retrievedChild);
assertEquals(msg, (numAddedEntities - numDeletedEntities), list.size());
msg = "Adding back one member to " + retrievedChild;
print(msg);
retrievedChild.addMember(testEntities[0]);
// retrievedChild should have (numAddedEntities - numDeletedEntities + 1) members.
msg = "Retrieving members from " + retrievedChild;
print(msg);
list = getAllGroupMembers(retrievedChild);
assertEquals(msg, (numAddedEntities - numDeletedEntities + 1), list.size());
int numChildMembers = list.size();
print("Now updating " + retrievedChild);
retrievedChild.update();
msg = "Re-Retrieving " + retrievedChild + " from db.";
print(msg);
IEntityGroup reRetrievedChild = GroupService.findGroup(childKey);
assertEquals(msg, retrievedChild, reRetrievedChild);
// re-RetrievedChild should have (numAddedEntities - numDeletedEntities + 1) members.
msg = "Retrieving members from " + reRetrievedChild;
print(msg);
list = getAllGroupMembers(reRetrievedChild);
assertEquals(msg, numChildMembers, list.size());
// Remove parent and child groups from db.
msg = "Deleting " + retrievedParent + " and " + reRetrievedChild + " from db.";
print(msg);
retrievedParent.delete();
reRetrievedChild.delete();
IEntityGroup deletedParent = GroupService.findGroup(parentKey);
IEntityGroup deletedChild = GroupService.findGroup(childKey);
assertNull(msg, deletedParent);
assertNull(msg, deletedChild);
print(CR + "***** LEAVING GroupsTester.testGroupMemberUpdate() *****" + CR);
}
public void testGroupMemberValidation() throws Exception
{
print(CR + "***** ENTERING GroupsTester.testGroupMemberValidation() *****" + CR);
String msg = null;
Iterator itr;
Collection list;
int idx = 0;
Exception e = null;
IEntityGroup parent = getNewGroup(); parent.setName("parent"); parent.setCreatorID("de3");
IEntityGroup child = getNewGroup(); child.setName("child"); child.setCreatorID("de3");
IEntityGroup child2 = getNewGroup(); child2.setName("child"); child2.setCreatorID("de3");
IEntity entity1 = getNewEntity("child");
IEntity entity2 = getNewEntity("child");
IEntity ipersonEntity = GroupService.getEntity("00000", IPERSON_CLASS);
msg = "Adding " + child + " to " + parent;
print(msg);
parent.addMember(child);
msg = "Retrieving members from " + parent; // parent should have 1 group member.
print(msg);
list = getGroupMembers(parent);
assertEquals(msg, 1, list.size());
// Test adding a group with a duplicate name.
msg = "Adding " + child2 + " to " + parent + " (should fail).";
print(msg);
try { parent.addMember(child2); }
catch (GroupsException ge) {e = ge;}
assertNotNull(msg, e);
msg = "Retrieving members from " + parent; // parent should STILL have 1 group member.
print(msg);
list = getGroupMembers(parent);
assertEquals(msg, 1, list.size());
msg = "Adding renamed " + child2 + " to " + parent + " (should succeed).";
print(msg);
child2.setName("child2");
try { parent.addMember(child2); e=null;}
catch (GroupsException ge) {e=ge;}
assertNull(msg, e);
msg = "Retrieving members from " + parent; // parent should now have 2 group members.
print(msg);
list = getGroupMembers(parent);
assertEquals(msg, 2, list.size());
// Test adding an ENTITY with the same name as a member GROUP.
msg = "Adding entity w/same name as child group to " + parent;
print(msg);
parent.addMember(entity1);
msg = "Retrieving members from " + parent; // parent should now have 3 group members.
print(msg);
list = getGroupMembers(parent);
assertEquals(msg, 3, list.size());
// Test adding a group member with a duplicate key.
msg = "Adding another entity w/same name as child group to " + parent + " (should fail).";
print(msg);
try { parent.addMember(entity2); e = null;}
catch (GroupsException ge) {e = ge;}
assertNotNull(msg, e);
msg = "Retrieving members from " + parent; // parent should still have 3 group members.
print(msg);
list = getGroupMembers(parent);
assertEquals(msg, 3, list.size());
// Test adding a group member with a different type:
msg = "Adding an entity of different type to " + parent;
print(msg);
try { parent.addMember(ipersonEntity); e = null; }
catch (GroupsException ge) {e = ge;}
assertNotNull(msg, e);
msg = "Retrieving members from " + parent; // parent should still have 3 group members.
print(msg);
list = getGroupMembers(parent);
assertEquals(msg, 3, list.size());
// Test adding a circular reference.
try { child.addMember(parent); e = null; }
catch (GroupsException ge) { e = ge; }
assertNotNull(msg, e);
msg = "Retrieving members from " + child; // child should have 0 members.
print(msg);
list = getGroupMembers(child);
assertEquals(msg, 0, list.size());
print(CR + "***** LEAVING GroupsTester.testGroupMemberValidation() *****" + CR);
}
public void testRetrieveParentGroups() throws Exception
{
print(CR + "***** ENTERING GroupsTester.testRetrieveParentGroups() *****" + CR);
String msg = null;
int numAllGroups = 10;
int numContainingGroups = 8;
IEntityGroup[] allGroups = new IEntityGroup[numAllGroups];
IEntity testEntity = testEntities[0];
Iterator it = null;
Collection list = null;
int idx = 0;
msg = "Creating " + numAllGroups + " new groups...";
print(msg);
for (idx=0; idx < numAllGroups; idx++)
{
allGroups[idx] = getNewGroup();
assertNotNull(msg, allGroups[idx]);
allGroups[idx].setName("Parent Group " + idx);
allGroups[idx].setCreatorID("de3");
allGroups[idx].update();
print("Group " + allGroups[idx].getName() + " created.");
}
msg = numAllGroups + " new groups created";
print(msg);
msg = "Adding " + testEntity + " to " + numContainingGroups + " containing groups.";
print(msg);
for (idx=0; idx<numContainingGroups; idx++)
{
allGroups[idx].addMember(testEntity);
allGroups[idx].update();
}
msg = "Getting containing groups for " + testEntity;
print(msg);
list = new ArrayList();
for (it = testEntity.getContainingGroups(); it.hasNext();)
{ list.add(it.next()); }
assertEquals(msg, numContainingGroups, list.size());
msg = "Adding parents to the immediate containing groups.";
print(msg);
for (idx=numContainingGroups; idx<numAllGroups; idx++)
{
IEntityGroup parent = allGroups[idx];
IEntityGroup child = allGroups[idx - 1];
msg = "Adding " + child + " to " + parent;
print(msg);
parent.addMember(child);
parent.update();
}
msg = "Getting ALL containing groups for " + testEntity;
print(msg);
list = new ArrayList();
for (it = testEntity.getAllContainingGroups(); it.hasNext();)
{ list.add(it.next()); }
assertEquals(msg, numAllGroups, list.size());
IEntity duplicateTestEntity = GroupService.getEntity(testEntity.getKey(), testEntity.getType());
msg = "Getting ALL containing groups for DUPLICATE entity:" + testEntity;
print(msg);
list = new ArrayList();
for (it = duplicateTestEntity.getAllContainingGroups(); it.hasNext();)
{ list.add(it.next()); }
assertEquals(msg, numAllGroups, list.size());
print(CR + "***** LEAVING GroupsTester.testRetrieveParentGroups() *****" + CR);
}
public void testUpdateLockableGroups() throws Exception
{
print(CR + "***** ENTERING GroupsTester.testUpdateLockableGroups() *****" + CR);
String msg = null;
Class type = TEST_ENTITY_CLASS;
int totNumGroups = 3;
int totNumEntities = 5;
IEntityGroup[] groups = new IEntityGroup[totNumGroups];
IEntity[] entities = new IEntity[totNumEntities];
IGroupMember[] groupMembers = null;
Iterator itr = null;
ArrayList list = null;
int idx = 0;
boolean testValue = false;
Exception e = null;
msg = "Creating " + totNumGroups + " new groups.";
print(msg);
for (idx=0; idx<totNumGroups; idx++)
{
groups[idx] = getNewGroup();
groups[idx].update();
assertNotNull(msg, groups[idx]);
groups[idx].update();
}
msg = "Getting group keys.";
print(msg);
String[] groupKeys = new String[totNumGroups];
for (idx=0; idx<totNumGroups; idx++)
{
groupKeys[idx] = groups[idx].getKey();
}
msg = "Retrieving lockable group for key " + groupKeys[0];
print(msg);
ILockableEntityGroup lockableGroup1 = findLockableGroup(groupKeys[0]);
testValue = lockableGroup1.getLock().isValid();
assertTrue(msg, testValue);
msg = "Retrieving a duplicate lockable group for key " + groupKeys[0] + " (should FAIL)";
print(msg);
try
{
ILockableEntityGroup lockableGroup2 = findLockableGroup(groupKeys[0]);
}
catch (GroupsException ge) {e = ge;}
assertNotNull(msg, e);
e = null;
msg = "Checking lock of first group";
print(msg);
testValue = lockableGroup1.getLock().isValid();
assertTrue(msg, testValue);
String oldName = lockableGroup1.getName();
String newName = "NEW GROUP NAME";
msg = "Update name of lockable group but do not commit.";
print(msg);
lockableGroup1.setName(newName);
assertEquals(msg, newName, lockableGroup1.getName());
msg = "Checking lock of first group";
print(msg);
testValue = lockableGroup1.getLock().isValid();
assertTrue(msg, testValue);
msg = "Retrieving duplicate group from service; change should NOT be visible.";
print(msg);
IEntityGroup nonLockableGroup = findGroup(groupKeys[0]);
assertEquals(msg, oldName, nonLockableGroup.getName());
msg = "Checking lock of first group";
print(msg);
testValue = lockableGroup1.getLock().isValid();
assertTrue(msg, testValue);
msg = "Committing change to lockable group";
print(msg);
lockableGroup1.update();
testValue = lockableGroup1.getLock().isValid();
assertTrue(msg, ! testValue);
msg = "Retrieving duplicate group from service; change should be visible now.";
print(msg);
nonLockableGroup = findGroup(groupKeys[0]);
assertEquals(msg, newName, nonLockableGroup.getName());
msg = "Attempting to delete old version of group " + groupKeys[0] + " (should FAIL.)";
print(msg);
try
{
lockableGroup1.delete();
}
catch (GroupsException ge) {e = ge;}
assertNotNull(msg, e);
e = null;
msg = "Attempting to delete NEW version of group " + groupKeys[0];
print(msg);
ILockableEntityGroup lockableGroup3 = findLockableGroup(groupKeys[0]);
lockableGroup3.delete();
nonLockableGroup = findGroup(groupKeys[0]);
assertNull(msg, nonLockableGroup);
print(CR + "***** LEAVING GroupsTester.testUpdateLockableGroups() *****" + CR);
}
public void testUpdateLockableGroupsWithRenewableLock() throws Exception
{
print(CR + "***** ENTERING GroupsTester.testUpdateLockableGroupsWithRenewableLock() *****" + CR);
String msg = null;
Class type = TEST_ENTITY_CLASS;
IEntityGroup group = null;
boolean testValue = false;
Exception e = null;
String groupKey = null;
msg = "Creating new group.";
print(msg);
group = getNewGroup();
group.update();
assertNotNull(msg, group);
msg = "Getting group key.";
print(msg);
groupKey = group.getKey();
msg = "Retrieving lockable group for key " + groupKey;
print(msg);
ILockableEntityGroup lockableGroup = findLockableGroup(groupKey);
assertNotNull(msg, lockableGroup);
msg = "Checking lock of first group";
print(msg);
testValue = lockableGroup.getLock().isValid();
assertTrue(msg, testValue);
String oldName = lockableGroup.getName();
String newName = "NEW GROUP NAME";
msg = "Updating name of lockable group but not committing.";
print(msg);
lockableGroup.setName(newName);
assertEquals(msg, newName, lockableGroup.getName());
msg = "Checking lock of first group";
print(msg);
testValue = lockableGroup.getLock().isValid();
assertTrue(msg, testValue);
msg = "Committing change to lockable group and renewing lock.";
print(msg);
lockableGroup.updateAndRenewLock();
testValue = lockableGroup.getLock().isValid();
assertTrue(msg, testValue);
msg = "Retrieving duplicate group from service; change should be visible now.";
print(msg);
IEntityGroup nonLockableGroup = findGroup(groupKey);
assertEquals(msg, newName, nonLockableGroup.getName());
msg = "Update name of lockable group again.";
print(msg);
lockableGroup.setName(oldName);
assertEquals(msg, oldName, lockableGroup.getName());
msg = "Committing change to lockable group and renewing lock.";
print(msg);
lockableGroup.updateAndRenewLock();
testValue = lockableGroup.getLock().isValid();
assertTrue(msg, testValue);
msg = "Attempting to delete lockable group " + groupKey;
print(msg);
lockableGroup.delete();
nonLockableGroup = findGroup(groupKey);
assertNull(msg, nonLockableGroup);
print(CR + "***** LEAVING GroupsTester.testUpdateLockableGroupsWithRenewableLock() *****" + CR);
}
public void testUpdateMembersVisibility() throws Exception
{
print(CR + "***** ENTERING GroupsTester.testUpdateMembersVisibility() *****" + CR);
String msg = null;
Class type = TEST_ENTITY_CLASS;
int totNumGroups = 3;
int totNumEntities = 5;
IEntityGroup[] groups = new IEntityGroup[totNumGroups];
IEntity[] entities = new IEntity[totNumEntities];
IGroupMember[] groupMembers = null;
Iterator itr = null;
ArrayList list = null;
int idx = 0;
boolean testValue = false;
msg = "Creating " + totNumGroups + " new groups.";
print(msg);
for (idx=0; idx<totNumGroups; idx++)
{
groups[idx] = getNewGroup();
assertNotNull(msg, groups[idx]);
}
IEntityGroup rootGroup = groups[0];
IEntityGroup childGroup = groups[1];
msg = "Adding " + (totNumGroups - 1) + " to root group.";
print(msg);
for(idx=1; idx<totNumGroups; idx++)
{ rootGroup.addMember(groups[idx]); }
msg = "Retrieving members from root group.";
print(msg);
list = new ArrayList();
for( itr=rootGroup.getMembers(); itr.hasNext(); )
{ list.add(itr.next()); }
assertEquals(msg, (totNumGroups - 1), list.size());
msg = "Adding " + (totNumEntities - 2) + " to root group.";
print(msg);
for(idx=0; idx<(totNumEntities - 2) ; idx++)
{ rootGroup.addMember(testEntities[idx]); }
msg = "Retrieving members from root group.";
print(msg);
list = new ArrayList();
for( itr=rootGroup.getMembers(); itr.hasNext(); )
{ list.add(itr.next()); }
assertEquals(msg, (totNumGroups - 1 + totNumEntities - 2), list.size());
msg = "Adding 2 entities to child group.";
print(msg);
childGroup.addMember(testEntities[totNumEntities - 1]);
childGroup.addMember(testEntities[totNumEntities]);
msg = "Retrieving ALL members from root group.";
print(msg);
list = new ArrayList();
for( itr=rootGroup.getAllMembers(); itr.hasNext(); )
{ list.add(itr.next()); }
assertEquals(msg, (totNumGroups - 1 + totNumEntities), list.size());
// At this point, the child group members should not yet be aware of their parents.
msg = "Checking child groups for parents (should be none).";
print(msg);
list = new ArrayList();
for(idx=1; idx<totNumGroups; idx++)
{
for (itr = groups[idx].getContainingGroups(); itr.hasNext();)
{ list.add(itr.next()); }
assertEquals(msg, 0, list.size());
}
testValue = testEntities[0].isMemberOf(rootGroup);
assertEquals(msg, false, testValue);
// Update the parent group. Its children should now be aware of it.
msg = "Updating parent group.";
print(msg);
rootGroup.update();
msg = "Checking child entity for membership in parent.";
print(msg);
testValue = testEntities[0].isMemberOf(rootGroup);
assertEquals(msg, true, testValue);
// Child group not yet updated. Its child should still be unaware of it.
msg = "Checking child entity for membership in child group.";
print(msg);
testValue = testEntities[totNumEntities].isMemberOf(childGroup);
assertEquals(msg, false, testValue);
// Update the child group. Its children should now be aware of it.
msg = "Updating child group.";
print(msg);
childGroup.update();
msg = "Checking child entity for membership in child group.";
print(msg);
testValue = testEntities[totNumEntities].isMemberOf(childGroup);
assertEquals(msg, true, testValue);
msg = "Getting child entity thru the service (should be cached copy).";
print(msg);
EntityIdentifier entID = testEntities[totNumEntities].getUnderlyingEntityIdentifier();
IGroupMember ent = GroupService.getGroupMember(entID);
msg = "Checking child entity for membership in child group.";
print(msg);
testValue = ent.isMemberOf(childGroup);
assertEquals(msg, true, testValue);
// Child entity should now be aware of both of its parents.
msg = "Checking child entity for ALL containing groups.";
print(msg);
list = new ArrayList();
for (itr = ent.getAllContainingGroups(); itr.hasNext();)
{ list.add(itr.next()); }
assertEquals(msg, 2, list.size());
print(CR + "***** LEAVING GroupsTester.testUpdateMembersVisibility() *****" + CR);
}
public void testMixLockableAndNonLockableGroups() throws Exception
{
print(CR + "***** ENTERING GroupsTester.testMixLockableAndNonLockableGroups() *****" + CR);
String msg = null;
Class type = TEST_ENTITY_CLASS;
int totNumGroups = 3;
IEntityGroup[] groups = new IEntityGroup[totNumGroups];
boolean testValue = false;
Exception e = null;
int idx = 0;
msg = "Creating " + totNumGroups + " new groups.";
print(msg);
for (idx=0; idx<totNumGroups; idx++)
{
groups[idx] = getNewGroup();
groups[idx].update();
assertNotNull(msg, groups[idx]);
groups[idx].update();
}
msg = "Getting group keys.";
print(msg);
String[] groupKeys = new String[totNumGroups];
for (idx=0; idx<totNumGroups; idx++)
{
groupKeys[idx] = groups[idx].getKey();
}
msg = "Retrieving nonLockable group " + groupKeys[0];
print(msg);
IEntityGroup group1 = findGroup(groupKeys[0]);
assertNotNull(msg, group1);
msg = "Retrieving lockable group for key " + groupKeys[0];
print(msg);
ILockableEntityGroup lockableGroup = findLockableGroup(groupKeys[0]);
testValue = lockableGroup.getLock().isValid();
assertTrue(msg, testValue);
msg = "Updating lockable group.";
print(msg);
String oldName = lockableGroup.getName();
String newName = "NEW GROUP NAME";
print(msg);
lockableGroup.setName(newName);
lockableGroup.update();
msg = "Retrieving a second nonLockable group for " + groupKeys[0];
print(msg);
IEntityGroup group2 = findGroup(groupKeys[0]);
assertNotNull(msg, group2);
assertEquals(msg, newName, group2.getName());
msg = "Updating second nonLockable group.";
print(msg);
group2.setName(oldName);
group2.update();
msg = "Retrieving a second lockable group for key " + groupKeys[0];
print(msg);
ILockableEntityGroup lockableGroup2 = findLockableGroup(groupKeys[0]);
testValue = lockableGroup2.getLock().isValid();
assertTrue(msg, testValue);
msg = "Updating second lockable group.";
print(msg);
lockableGroup2.setName(newName);
lockableGroup2.update();
print(CR + "***** LEAVING GroupsTester.testMixLockableAndNonLockableGroups() *****" + CR);
}
public void testConcurrentAccess() throws Exception
{
print(CR + "***** ENTERING GroupsTester.testConcurrentAccess() *****" + CR);
String msg = null;
Class type = TEST_ENTITY_CLASS;
int totNumGroups = 3;
int numContainingGroups = totNumGroups - 1;
IEntityGroup[] groups = new IEntityGroup[totNumGroups];
int idx = 0;
int numReadTests = 50;
int numThreads = 10;
msg = "Creating " + totNumGroups + " new groups.";
print(msg);
for (idx=0; idx<totNumGroups; idx++)
{
groups[idx] = getNewGroup();
groups[idx].update();
assertNotNull(msg, groups[idx]);
groups[idx].update();
}
IEntityGroup child = groups[0];
msg = "Adding parents to child group " + child.getName();
print(msg);
for (idx=1; idx<totNumGroups; idx++)
{
IEntityGroup parent = groups[idx];
parent.addMember(child);
groups[idx].update();
}
print("Starting testing Threads.");
Thread[] testers = new Thread[numThreads];
for (idx=0; idx<numThreads; idx++)
{
String id = "" + idx;
GroupsReadTester grt = new GroupsReadTester(id, child, numReadTests);
testers[idx] = new Thread(grt);
testers[idx].start();
}
msg = "Adding members to " + child;
print(msg);
for (idx=0; idx<numTestEntities; idx++)
{
IEntity childEntity = testEntities[idx];
child.addMember(childEntity);
if ( idx % 10 == 0 ) // update once for every 10 adds
{ child.update(); }
assertTrue(msg,child.contains(childEntity));
// print("added entity # " + (idx + 1) + " to " + child);
}
msg = "Updating " + child;
print(msg);
child.update();
msg = "Removing members from " + child;
print(msg);
for (idx=0; idx<numTestEntities; idx++)
{
IEntity childEntity = testEntities[idx];
child.removeMember(childEntity);
assertTrue(msg,! child.contains(childEntity));
}
msg = "Updating " + child;
print(msg);
child.update();
Thread.sleep(numReadTests * 20); // let them die.
print(CR + "***** LEAVING GroupsTester.testConcurrentAccess() *****" + CR);
}
public void testParseCompoundKeys() throws Exception
{
print(CR + "***** ENTERING GroupsTester.testParseCompoundKeys() *****" + CR);
String msg = null;
int maxNodes=5;
int idx=0;
String[] keys = new String[maxNodes];
String[] nodes = new String[maxNodes];
String key = null;
String sep = GroupServiceConfiguration.getConfiguration().getNodeSeparator();
print("GroupServiceConfiguration node separator: " + sep);
print("Creating random node strings.");
for (idx=0; idx<maxNodes; idx++)
{ nodes[idx] = (getRandomString(random, 3) + idx); }
print ("Creating keys.");
for (idx=0; idx<maxNodes; idx++)
{
key = nodes[0];
for (int i = 1; i<=idx; i++)
{ key = key + sep + nodes[i]; }
keys[idx] = key;
print("key " + idx + " : " + key);
}
for (idx=1; idx<maxNodes; idx++)
{
CompositeEntityIdentifier cei = null;
msg = "Creating CompositeEntityIdentifier for " + keys[idx];
print(msg);
cei = new CompositeEntityIdentifier(keys[idx], GROUP_CLASS);
assertNotNull(msg, cei);
msg = "Testing COMPOUND key of " + cei;
assertEquals(msg, keys[idx], cei.getKey());
msg = "Testing LOCAL key of " + cei;
assertEquals(msg, nodes[idx], cei.getLocalKey());
msg = "Testing SERVICE NAME of " + cei;
assertEquals(msg, idx, cei.getServiceName().size());
}
print(CR + "***** LEAVING GroupsTester.testParseCompoundKeys() *****" + CR);
}
}
|
package nova.sample.block;
import nova.core.block.Block;
import nova.core.block.components.Stateful;
import nova.core.entity.Entity;
import nova.core.game.Game;
import nova.core.network.NetworkTarget;
import nova.core.network.Packet;
import nova.core.network.PacketHandler;
import nova.core.network.Sync;
import nova.core.render.model.Model;
import nova.core.retention.Storable;
import nova.core.retention.Stored;
import nova.core.util.Category;
import nova.core.util.transform.Quaternion;
import nova.core.util.transform.Vector3d;
import nova.sample.NovaTest;
/**
* This is a test block that has state.
* @author Calclavia
*/
public class BlockGrinder extends Block implements Storable, Stateful, PacketHandler, Category {
/**
* Angle to rotate around
*/
@Stored
@Sync
private double angle;
@Override
public boolean onRightClick(Entity entity, int side, Vector3d hit) {
if (NetworkTarget.Side.get() == NetworkTarget.Side.SERVER) {
angle = (angle + Math.PI / 12) % (Math.PI * 2);
Game.instance.networkManager.sync(this);
}
return true;
}
@Override
public void renderStatic(Model model) {
Model grinderModel = NovaTest.grinderModel.getModel();
grinderModel
.combineChildren("crank", "crank1", "crank2", "crank3")
.rotate(Quaternion.fromEuler(0, angle, 0));
model.children.add(grinderModel);
model.bindAll(NovaTest.grinderTexture);
}
@Override
public void read(Packet packet) {
PacketHandler.super.read(packet);
world().markStaticRender(position());
}
@Override
public String getID() {
return "stateful";
}
@Override
public String getCategory() {
return "buildingBlocks";
}
@Override
public boolean isOpaqueCube() {
return false;
}
}
|
package pl.pwr.hiervis.core;
import java.awt.Window;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import basic_hierarchy.interfaces.Hierarchy;
import basic_hierarchy.interfaces.Node;
import basic_hierarchy.reader.GeneratedCSVReader;
import pl.pwr.hiervis.ui.OperationProgressFrame;
import pl.pwr.hiervis.util.Event;
import pl.pwr.hiervis.util.HierarchyUtils;
public class HKPlusPlusWrapper
{
private static final File hkBaseDir = new File( "./hk" );
private static final File hkOutDir = new File( hkBaseDir, "out" );
private static final File hkJarFile = new File( hkBaseDir, "hk.jar" );
private static final File hkInputFile = new File( hkBaseDir, "in.csv" );
private static final File hkOutputFile = new File( hkOutDir, "finalHierarchyOfGroups.csv.csv" );
/**
* Sent when the subprocess terminates by itself.
*
* @param first
* the exit code of the subprocess
*/
public final Event<Integer> subprocessFinished = new Event<>();
/** Sent when the subprocess is terminated by the main process. */
public final Event<Void> subprocessAborted = new Event<>();
private volatile Process process;
private InputStreamObserverThread outObserver;
private OperationProgressFrame waitFrame;
public HKPlusPlusWrapper()
{
}
/**
* Creates and starts the subprocess with the specified arguments, and creates a wait dialog
* at the specified owner window.
*
* @param owner
* frame at which the wait dialog will be created.
* @param trueClassAttribute
* indicates that FIRST column of data is true class attribute, class should be indicated by string.
* @param instanceNames
* indicates that SECOND (if class attribute is present) or FIRST (otherwise) column is the name of every instance.
* @param diagonalMatrix
* use simple diagonal matrix instead of full matrix as a covariance matrix
* @param disableStaticCenter
* disable feature of placing static (background) center while going down in hierarchy
* @param generateImages
* store clusterisation results also as images (ONLY first two dimensions are visualized!).
* The dimension of each image is set to 800x800.
* @param epsilon
* epsilon value expressed as 10^-epsilon, used in comparing values to 0.0, reducing round-off error.
* Default value is 10.
* @param littleValue
* value of diagonal matrix elements expressed as 10^-littleValue, used in forcing covariance matrix to be non-singular.
* Default value is 5.
* @param clusters
* number of clusters generated by clusterisation algorithm
* @param iterations
* number of maximum iterations made by clusterisation algorithm
* @param repeats
* number of algorithm repeats (new initialization of clusters)
* @param dendrogramSize
* max dendrogram height
* @param maxNodeCount
* maximum number of created nodes
* @throws IOException
* if an I/O error occurs while starting the subprocess
*/
public void start(
Window owner,
boolean trueClassAttribute, boolean instanceNames,
boolean diagonalMatrix, boolean disableStaticCenter,
boolean generateImages,
int epsilon, int littleValue,
int clusters, int iterations, int repeats,
int dendrogramSize, int maxNodeCount ) throws IOException
{
hkOutDir.mkdirs();
// Clear the output dir so's not to litter
Arrays.stream( hkOutDir.listFiles() ).forEach( file -> file.delete() );
// Set HK's working dir to the output directory, so that we keep all output files in one place
process = new ProcessBuilder(
buildArgsList(
trueClassAttribute, instanceNames, diagonalMatrix, disableStaticCenter, generateImages,
epsilon, littleValue, clusters, iterations, repeats, dendrogramSize, maxNodeCount
)
).redirectErrorStream( true ).directory( hkOutDir ).start();
// Create a separate thread to wait for HK to terminate
Thread subprocessObserver = new Thread(
() -> {
try {
int exitCode = process.waitFor();
if ( process == null ) {
subprocessAborted.broadcast( null );
}
else {
destroy();
subprocessFinished.broadcast( exitCode );
}
}
catch ( InterruptedException e ) {
// Ignore.
}
}
);
subprocessObserver.setDaemon( true );
subprocessObserver.start();
outObserver = new InputStreamObserverThread( process.getInputStream() );
outObserver.start();
waitFrame = new OperationProgressFrame( owner, "HK++ subprocess" );
waitFrame.setAbortOperation( e -> destroy() );
waitFrame.setStatusUpdateCallback( this::getLatestMessage );
waitFrame.setProgressPollInterval( 100 );
waitFrame.setModal( true );
waitFrame.setSize( 300, 150 );
waitFrame.setLocationRelativeTo( owner );
waitFrame.setVisible( true );
}
/**
* Prepares the input file that the HK subprocess will load by serializing the specified hierarchy
* and saving it in the file that HK is configured to load.
*
* @param hierarchy
* the source hierarchy
* @param selectedNode
* the node in the specified hierarchy denoting the subtree that is to be
* serialized to the file
* @param withTrueClass
* whether the input file should include true class column
* @param withInstanceNames
* whether the input file should include instance name column
* @throws IOException
* if an I/O error occurs
*/
public void prepareInputFile(
Hierarchy hierarchy, Node selectedNode,
boolean withTrueClass, boolean withInstanceNames ) throws IOException
{
Hierarchy subHierarchy = HierarchyUtils.subHierarchyShallow( hierarchy, selectedNode.getId() );
HierarchyUtils.save( hkInputFile.getAbsolutePath(), subHierarchy, false, withTrueClass, withInstanceNames, true );
}
public Hierarchy getOutputHierarchy( boolean withTrueClass, boolean withInstanceNames, boolean useSubtree ) throws IOException
{
return new GeneratedCSVReader( false ).load(
hkOutputFile.getAbsolutePath(),
withInstanceNames, withTrueClass, true, false, useSubtree
);
}
/**
* Attempts to terminate the HK++ subprocess, and clean up.
*/
public void destroy()
{
waitFrame.dispose();
outObserver.interrupt();
process.destroy();
outObserver = null;
waitFrame = null;
process = null;
}
private String getLatestMessage()
{
return outObserver == null ? "" : outObserver.getLatestMessage();
}
/**
* Build the args list based on options selected by the user.
*
* @param trueClassAttribute
* indicates that FIRST column of data is class attribute, class should be indicated by string.
* @param instanceNames
* indicates that SECOND (if class attribute is present) or FIRST (otherwise) column is the name of every instance.
* @param diagonalMatrix
* use simple diagonal matrix instead of full matrix as a covariance matrix
* @param disableStaticCenter
* disable feature of placing static (background) center while going down in hierarchy
* @param generateImages
* store clusterisation results also as images (ONLY first two dimensions are visualized!).
* The dimension of each image is set to 800x800.
* @param epsilon
* epsilon value expressed as 10^-epsilon, used in comparing values to 0.0, reducing round-off error.
* Default value is 10.
* @param littleValue
* value of diagonal matrix elements expressed as 10^-littleValue, used in forcing covariance matrix to be non-singular.
* Default value is 5.
* @param clusters
* number of clusters generated by clusterisation algorithm
* @param iterations
* number of maximum iterations made by clusterisation algorithm
* @param repeats
* number of algorithm repeats (new initialization of clusters)
* @param dendrogramSize
* max dendrogram height
* @param maxNodeCount
* maximum number of created nodes
* @return list of arguments passed to ProcessBuilder to create the subprocess
*/
private List<String> buildArgsList(
boolean trueClassAttribute, boolean instanceNames,
boolean diagonalMatrix, boolean disableStaticCenter,
boolean generateImages,
int epsilon, int littleValue,
int clusters, int iterations, int repeats,
int dendrogramSize, int maxNodeCount )
{
List<String> args = new ArrayList<>();
args.add( "java" );
// Set encoding to UTF-8 so that files are loaded correctly
args.add( "-Dfile.encoding=utf8" );
args.add( "-jar" );
args.add( hkJarFile.getAbsolutePath() );
// Hardcode several parameters.
args.add( "-lgmm" );
// args.add( "-v" ); // verbose mode
args.add( "-cf" );
args.add( "1.0" );
args.add( "-rf" );
args.add( "1.0" );
if ( trueClassAttribute )
args.add( "-c" );
if ( instanceNames )
args.add( "-in" );
if ( diagonalMatrix )
args.add( "-dm" );
if ( disableStaticCenter )
args.add( "-ds" );
if ( generateImages ) {
args.add( "-gi" );
args.add( "800" );
}
args.add( "-e" );
args.add( Integer.toString( epsilon ) );
args.add( "-l" );
args.add( Integer.toString( littleValue ) );
args.add( "-k" );
args.add( Integer.toString( clusters ) );
args.add( "-n" );
args.add( Integer.toString( iterations ) );
args.add( "-r" );
args.add( Integer.toString( repeats ) );
args.add( "-s" );
args.add( Integer.toString( dendrogramSize ) );
args.add( "-w" );
args.add( Integer.toString( maxNodeCount ) );
args.add( "-i" );
args.add( hkInputFile.getAbsolutePath() );
args.add( "-o" );
args.add( hkOutDir.getAbsolutePath() );
return args;
}
}
|
package com.macro.mall.dao;
import com.macro.mall.model.PmsProductVertifyRecord;
import org.apache.ibatis.annotations.Param;
import java.util.List;
public interface PmsProductVertifyRecordDao {
int insertList(@Param("list") List<PmsProductVertifyRecord> list);
}
|
package org.apdplat.word;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apdplat.word.segmentation.SegmentationAlgorithm;
import org.apdplat.word.segmentation.SegmentationFactory;
import org.apdplat.word.recognition.StopWord;
import org.apdplat.word.segmentation.Word;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
*
*
* @author
*/
public class WordSegmenter {
private static final Logger LOGGER = LoggerFactory.getLogger(WordSegmenter.class);
/**
*
*
* @param text
* @param segmentationAlgorithm
* @return
*/
public static List<Word> segWithStopWords(String text, SegmentationAlgorithm segmentationAlgorithm){
return SegmentationFactory.getSegmentation(segmentationAlgorithm).seg(text);
}
/**
*
*
* @param text
* @return
*/
public static List<Word> segWithStopWords(String text){
return SegmentationFactory.getSegmentation(SegmentationAlgorithm.BidirectionalMaximumMatching).seg(text);
}
/**
*
*
* @param text
* @param segmentationAlgorithm
* @return
*/
public static List<Word> seg(String text, SegmentationAlgorithm segmentationAlgorithm){
List<Word> words = SegmentationFactory.getSegmentation(segmentationAlgorithm).seg(text);
return filterStopWords(words);
}
/**
*
*
* @param text
* @return
*/
public static List<Word> seg(String text){
List<Word> words = SegmentationFactory.getSegmentation(SegmentationAlgorithm.BidirectionalMaximumMatching).seg(text);
return filterStopWords(words);
}
/**
*
* @param words
* @return
*/
public static List<Word> filterStopWords(List<Word> words){
Iterator<Word> iter = words.iterator();
while(iter.hasNext()){
Word word = iter.next();
if(StopWord.is(word.getText())){
LOGGER.debug(""+word.getText());
iter.remove();
}
}
return words;
}
/**
*
*
* @param input
* @param output
* @param segmentationAlgorithm
* @throws Exception
*/
public static void segWithStopWords(File input, File output, SegmentationAlgorithm segmentationAlgorithm) throws Exception{
seg(input, output, false, segmentationAlgorithm);
}
/**
*
*
* @param input
* @param output
* @throws Exception
*/
public static void segWithStopWords(File input, File output) throws Exception{
seg(input, output, false, SegmentationAlgorithm.BidirectionalMaximumMatching);
}
/**
*
*
* @param input
* @param output
* @param segmentationAlgorithm
* @throws Exception
*/
public static void seg(File input, File output, SegmentationAlgorithm segmentationAlgorithm) throws Exception{
seg(input, output, true, segmentationAlgorithm);
}
/**
*
*
* @param input
* @param output
* @throws Exception
*/
public static void seg(File input, File output) throws Exception{
seg(input, output, true, SegmentationAlgorithm.BidirectionalMaximumMatching);
}
/**
*
*
* @param input
* @param output
* @param removeStopWords
* @param segmentationAlgorithm
* @throws Exception
*/
private static void seg(File input, File output, boolean removeStopWords, SegmentationAlgorithm segmentationAlgorithm) throws Exception{
LOGGER.info(""+input.toString());
float max=(float)Runtime.getRuntime().maxMemory()/1000000;
float total=(float)Runtime.getRuntime().totalMemory()/1000000;
float free=(float)Runtime.getRuntime().freeMemory()/1000000;
String pre=":"+max+"-"+total+"+"+free+"="+(max-total+free);
try(BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(input),"utf-8"));
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(output),"utf-8"))){
long size = Files.size(input.toPath());
LOGGER.info("size:"+size);
LOGGER.info(""+(float)size/1024/1024+" MB");
int textLength=0;
int progress=0;
long start = System.currentTimeMillis();
String line = null;
while((line = reader.readLine()) != null){
if("".equals(line.trim())){
writer.write("\n");
continue;
}
textLength += line.length();
List<Word> words = null;
if(removeStopWords){
words = seg(line, segmentationAlgorithm);
}else{
words = segWithStopWords(line, segmentationAlgorithm);
}
if(words == null){
continue;
}
for(Word word : words){
writer.write(word.getText()+" ");
}
writer.write("\n");
progress += line.length();
if( progress > 500000){
progress = 0;
LOGGER.info(""+(int)((float)textLength*2/size*100)+"%");
}
}
long cost = System.currentTimeMillis() - start;
float rate = textLength/cost;
LOGGER.info(""+textLength);
LOGGER.info(""+cost+" ");
LOGGER.info(""+rate+" /");
}
max=(float)Runtime.getRuntime().maxMemory()/1000000;
total=(float)Runtime.getRuntime().totalMemory()/1000000;
free=(float)Runtime.getRuntime().freeMemory()/1000000;
String post=":"+max+"-"+total+"+"+free+"="+(max-total+free);
LOGGER.info(pre);
LOGGER.info(post);
LOGGER.info(" "+input.toString()+" "+output);
}
private static void demo(){
long start = System.currentTimeMillis();
List<String> sentences = new ArrayList<>();
sentences.add("APDPlat");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add(",");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("wordysc");
sentences.add(",,,, google,");
sentences.add("24");
sentences.add("");
sentences.add("");
sentences.add("");
sentences.add("");
int i=1;
for(String sentence : sentences){
List<Word> words = segWithStopWords(sentence);
LOGGER.info((i++)+": "+sentence);
LOGGER.info(" "+words);
}
long cost = System.currentTimeMillis() - start;
LOGGER.info(": "+cost+" ");
}
public static void processCommand(String... args) {
if(args == null || args.length < 1){
LOGGER.info("");
return;
}
try{
switch(args[0].trim().charAt(0)){
case 'd':
demo();
break;
case 't':
if(args.length < 2){
showUsage();
}else{
StringBuilder str = new StringBuilder();
for(int i=1; i<args.length; i++){
str.append(args[i]).append(" ");
}
List<Word> words = segWithStopWords(str.toString());
LOGGER.info(""+str.toString());
LOGGER.info(""+words.toString());
}
break;
case 'f':
if(args.length != 3){
showUsage();
}else{
segWithStopWords(new File(args[1]), new File(args[2]));
}
break;
default:
StringBuilder str = new StringBuilder();
for(String a : args){
str.append(a).append(" ");
}
List<Word> words = segWithStopWords(str.toString());
LOGGER.info(""+str.toString());
LOGGER.info(""+words.toString());
break;
}
}catch(Exception e){
showUsage();
}
}
private static void run(String encoding) {
try(BufferedReader reader = new BufferedReader(new InputStreamReader(System.in, encoding))){
String line = null;
while((line = reader.readLine()) != null){
if("exit".equals(line)){
System.exit(0);
LOGGER.info("");
return;
}
if(line.trim().equals("")){
continue;
}
processCommand(line.split(" "));
showUsage();
}
} catch (IOException ex) {
LOGGER.error("", ex);
}
}
private static void showUsage(){
LOGGER.info("");
LOGGER.info("********************************************");
LOGGER.info(": command [text] [input] [output]");
LOGGER.info("commanddemotextfile");
LOGGER.info("d t ftext");
LOGGER.info("demo");
LOGGER.info("text APDPlat");
LOGGER.info("file d:/text.txt d:/word.txt");
LOGGER.info("exit");
LOGGER.info("********************************************");
LOGGER.info("");
}
public static void main(String[] args) {
String encoding = "utf-8";
if(args==null || args.length == 0){
showUsage();
run(encoding);
}else if(Charset.isSupported(args[0])){
showUsage();
run(args[0]);
}else{
processCommand(args);
//JVM
System.exit(0);
}
}
}
|
package org.jnosql.artemis.graph;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
import org.jnosql.artemis.Page;
import org.jnosql.artemis.Pagination;
import java.util.Collection;
import java.util.List;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toCollection;
public final class GraphPage<T> implements Page<T> {
private final Pagination pagination;
private final GraphConverter converter;
private final GraphTraversal<?, ?> graphTraversal;
private final List<T> entities;
GraphPage(Pagination pagination, GraphConverter converter, GraphTraversal<?, ?> graphTraversal) {
this.pagination = pagination;
this.converter = converter;
this.graphTraversal = graphTraversal;
this.entities = (List<T>) graphTraversal
.next((int) pagination.getLimit()).stream()
.map(converter::toVertex)
.collect(Collectors.toList());
}
@Override
public Pagination getPagination() {
return pagination.unmodifiable();
}
@Override
public Page<T> next() {
return new GraphPage<>(pagination, converter, graphTraversal);
}
@Override
public List<T> getContent() {
return entities;
}
@Override
public <C extends Collection<T>> C getContent(Supplier<C> collectionFactory) {
requireNonNull(collectionFactory, "collectionFactory is required");
return entities.stream().collect(toCollection(collectionFactory));
}
@Override
public Stream<T> get() {
return entities.stream();
}
}
|
package org.basex.server;
import static org.basex.core.Text.*;
import static org.basex.util.Token.*;
import java.io.IOException;
import java.io.OutputStream;
import java.net.Socket;
import org.basex.BaseXServer;
import org.basex.core.CommandParser;
import org.basex.core.Context;
import org.basex.core.Main;
import org.basex.core.Proc;
import org.basex.core.Prop;
import org.basex.core.proc.Close;
import org.basex.core.proc.Exit;
import org.basex.data.Data;
import org.basex.data.XMLSerializer;
import org.basex.io.BufferInput;
import org.basex.io.BufferedOutput;
import org.basex.io.PrintOutput;
import org.basex.query.QueryException;
import org.basex.query.QueryProcessor;
import org.basex.query.item.Item;
import org.basex.query.iter.Iter;
import org.basex.util.Performance;
public final class ServerProcess extends Thread {
/** Database context. */
public final Context context;
/** Socket reference. */
private final Socket socket;
/** Input stream. */
private BufferInput in;
/** Output stream. */
private PrintOutput out;
/** Current process. */
private Proc proc;
/** Timeout thread. */
private Thread timeout;
/** Log. */
private final Log log;
/**
* Constructor.
* @param s socket
* @param b server reference
*/
public ServerProcess(final Socket s, final BaseXServer b) {
context = new Context(b.context);
log = b.log;
socket = s;
}
/**
* Initializes the session via cram-md5 authentication.
* @return success flag
*/
public boolean init() {
try {
final String ts = Long.toString(System.nanoTime());
// send timestamp (cram-md5)
out = new PrintOutput(new BufferedOutput(socket.getOutputStream()));
out.print(ts);
send(true);
// evaluate login data
in = new BufferInput(socket.getInputStream());
final String us = in.readString();
final String pw = in.readString();
context.user = context.users.get(us);
final boolean ok = context.user != null &&
md5(string(context.user.pw) + ts).equals(pw);
send(ok);
if(ok) start();
else if(!us.isEmpty()) log.write(this, "LOGIN " + us, "failed");
return ok;
} catch(final IOException ex) {
ex.printStackTrace();
log.write(ex.getMessage());
return false;
}
}
@Override
public void run() {
log.write(this, "LOGIN " + context.user.name, "OK");
String input = null;
try {
while(true) {
try {
byte b = in.readByte();
if(b == 0) {
iterate();
return;
}
input = in.readString(b);
} catch(final IOException ex) {
// this exception is thrown for each session if the server is stopped
exit();
break;
}
// parse input and create process instance
final Performance perf = new Performance();
proc = null;
try {
final Proc[] procs = new CommandParser(input, context, true).parse();
if(procs.length != 1)
throw new QueryException(SERVERPROC, procs.length);
proc = procs[0];
} catch(final QueryException ex) {
// invalid command was sent by a client; create error feedback
log.write(this, input, perf, INFOERROR + ex.extended());
out.write(0);
out.print(ex.extended());
out.write(0);
send(false);
continue;
}
// stop console
if(proc instanceof Exit) {
exit();
break;
}
// process command and send results
startTimer(proc);
final boolean ok = proc.exec(context, out);
out.write(0);
final String inf = proc.info();
out.print(inf.equals(PROGERR) ? SERVERTIME : inf);
out.write(0);
send(ok);
stopTimer();
final String pr = proc.toString().replaceAll("\\r|\\n", " ");
log.write(this, pr, ok ? "OK" : INFOERROR + inf, perf);
}
log.write(this, "LOGOUT " + context.user.name, "OK");
} catch(final IOException ex) {
log.write(this, input, INFOERROR + ex.getMessage());
ex.printStackTrace();
exit();
}
}
/**
* Query is executed in iterate mode.
* @throws IOException Exception
*/
private void iterate() throws IOException {
String input = in.readString();
OutputStream o = socket.getOutputStream();
QueryProcessor processor = new QueryProcessor(input, context);
try {
Iter iter = processor.iter();
XMLSerializer serializer = new XMLSerializer(o);
o.write(0);
Item item;
while(in.read() == 0) {
if((item = iter.next()) != null) {
o.write(0);
item.serialize(serializer);
o.write(0);
} else {
o.write(1);
}
}
serializer.close();
processor.close();
} catch(QueryException ex) {
// invalid command was sent by a client; create error feedback
log.write(this, input, INFOERROR + ex.extended());
o.write(1);
out.print(ex.extended());
out.flush();
o.write(0);
}
}
/**
* Sends the success flag to the client.
* @param ok success flag
* @throws IOException I/O exception
*/
private void send(final boolean ok) throws IOException {
out.write(ok ? 0 : 1);
out.flush();
}
/**
* Starts a timeout thread for the specified process.
* @param p process reference
*/
private void startTimer(final Proc p) {
final long to = context.prop.num(Prop.TIMEOUT);
if(to == 0) return;
timeout = new Thread() {
@Override
public void run() {
Performance.sleep(to * 1000);
p.stop();
}
};
timeout.start();
}
/**
* Stops the current timeout thread.
*/
private void stopTimer() {
if(timeout != null) timeout.interrupt();
}
/**
* Exits the session.
*/
public void exit() {
new Close().exec(context);
if(proc != null) proc.stop();
stopTimer();
context.delete(this);
try {
socket.close();
} catch(final IOException ex) {
log.write(ex.getMessage());
ex.printStackTrace();
}
}
/**
* Returns session information.
* @return database information
*/
String info() {
final Data data = context.data;
return this + (data != null ? ": " + data.meta.name : "");
}
@Override
public String toString() {
final String host = socket.getInetAddress().getHostAddress();
final int port = socket.getPort();
return Main.info("[%:%]", host, port);
}
}
|
package com.alexstyl.specialdates.ui.base;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.view.MenuItem;
import com.alexstyl.specialdates.util.Utils;
import com.novoda.notils.exception.DeveloperError;
import java.util.List;
public class MementoActivity extends AppCompatActivity {
/**
* Override this method in order to let the activity handle the up button.
* When pressed it will navigate the user to the parent of the activity
*/
protected boolean shouldUseHomeAsUp() {
return false;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (shouldUseHomeAsUp()) {
ActionBar bar = getSupportActionBar();
if (bar != null) {
bar.setHomeButtonEnabled(true);
bar.setDisplayHomeAsUpEnabled(true);
}
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home) {
return handleUp();
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
List<Fragment> fragments = getSupportFragmentManager().getFragments();
if (fragments != null) {
for (Fragment fragment : fragments) {
if (fragment != null) {
fragment.onActivityResult(requestCode, resultCode, data);
}
}
}
}
private boolean handleUp() {
if (!shouldUseHomeAsUp()) {
return false;
}
Intent parent = getSupportParentActivityIntent();
complainForNoSetParent(parent);
parent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(parent);
return true;
}
private void complainForNoSetParent(Intent parent) {
if (parent == null) {
throw new DeveloperError("Make sure to set parent Activity through the AndroidManifest if you want to use shouldUseHomeAsUp()");
}
}
protected Context context() {
return this;
}
protected boolean supportsTransitions() {
return Utils.hasKitKat();
}
}
|
package org.databene.commons;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.databene.commons.iterator.ArrayIterator;
/**
* Provides array-related operations.<br/>
* <br/>
* Created: 09.06.2006 21:31:49
* @since 0.1
* @author Volker Bergmann
*/
public final class ArrayUtil {
public static <T> T[] copyOfRange(T[] array, int offset, int length) {
return copyOfRange(array, offset, length, componentType(array));
}
@SuppressWarnings("unchecked")
public static <T> Class<T> componentType(T[] array) {
Class<T[]> resultType = (Class<T[]>) array.getClass();
Class<T> componentType = (Class<T>) resultType.getComponentType();
return componentType;
}
@SuppressWarnings("unchecked")
public static <T> T[] copyOfRange(Object[] array, int offset, int length, Class<T> componentType) {
T[] result = (T[]) Array.newInstance(componentType, length);
System.arraycopy(array, offset, result, 0, length);
return result;
}
public static <T> T[] removeElement(T item, T[] array) {
int index = indexOf(item, array);
return remove(index, array);
}
@SuppressWarnings("unchecked")
public static <T> T[] remove(int indexToRemove, T[] array) {
Class<T> componentType = componentType(array);
T[] result = (T[]) Array.newInstance(componentType, array.length - 1);
if (indexToRemove > 0)
System.arraycopy(array, 0, result, 0, indexToRemove);
System.arraycopy(array, indexToRemove + 1, result, indexToRemove, array.length - indexToRemove - 1);
return result;
}
public static <T> T[] removeAll(T[] toRemove, T[] target) {
Class<T> componentType = componentType(target);
ArrayBuilder<T> builder = new ArrayBuilder<T>(componentType);
for (T element : target)
if (!contains(element, toRemove))
builder.add(element);
return builder.toArray();
}
/**
* Tells if an array contains a specific element
* @param element the element to search
* @param array the array to scan
* @return true if the element was found, else false
*/
public static boolean contains(Object element, Object array) {
int length = Array.getLength(array);
for (int i = 0; i < length; i++) {
Object o = Array.get(array, i);
if (NullSafeComparator.equals(o, element))
return true;
}
return false;
}
public static <T> boolean containsAll(T[] subArray, T[] superArray) {
for (T t : subArray)
if (!contains(t, superArray))
return false;
return true;
}
public static int indexOf(byte[] subArray, byte[] array) {
return indexOf(subArray, 0, array);
}
public static int indexOf(byte[] subArray, int fromIndex, byte[] array) {
for (int i = fromIndex; i <= array.length - subArray.length; i++) {
boolean match = true;
for (int j = 0; j < subArray.length; j++) {
if (array[i + j] != subArray[j])
match = false;
}
if (match)
return i;
}
return -1;
}
/**
* Tells if an array ends with a specified sub array
* @param candidates the array to scan
* @param searched the sub array that is searched
* @return true if the array ands with or equals the searched sub array
*/
public static <T> boolean endsWithSequence(T[] candidates, T[] searched) {
if (searched.length > candidates.length)
return false;
for (int i = 0; i < searched.length; i++) {
if (!candidates[candidates.length - searched.length + i].equals(searched[i]))
return false;
}
return true;
}
@SuppressWarnings("unchecked")
public static <T> T[] commonElements(T[]... sources) {
Class<T> componentType = null;
for (int arrayNumber = 0; arrayNumber < sources.length && componentType == null; arrayNumber++) {
T[] source = sources[arrayNumber];
for (int index = 0; index < source.length && componentType == null; index++)
if (source[index] != null)
componentType = (Class<T>) source[index].getClass();
}
return commonElements(componentType, sources);
}
public static <T> T[] commonElements(Class<T> componentType, T[]... sources) {
ArrayBuilder<T> builder = new ArrayBuilder<T>(componentType);
T[] firstArray = sources[0];
for (T element : firstArray) {
boolean common = true;
for (int i = 1; i < sources.length; i++)
if (!ArrayUtil.contains(element, sources[i])) {
common = false;
break;
}
if (common)
builder.add(element);
}
return builder.toArray();
}
/**
* Tells if two arrays have the same content, independent of the ordering
* @param a1 the first array to compare
* @param a2 the first array to compare
* @return true if the array have the same content, independent of the ordering
*/
public static <T> boolean equalsIgnoreOrder(T[] a1, T[] a2) {
if (a1 == a2)
return true;
if (a1 == null)
return false;
if (a1.length != a2.length)
return false;
List<T> l1 = new ArrayList<T>(a1.length);
for (T item : a1)
l1.add(item);
for (int i = a1.length - 1; i >= 0; i
if (contains(a1[i], a2))
l1.remove(i);
else
return false;
return l1.size() == 0;
}
public static boolean equals(Object a1, Object a2) {
if (a1 == a2)
return true;
if (a1 == null || !(a1.getClass().isArray()) || !(a2.getClass().isArray()))
return false;
int length = Array.getLength(a1);
if (length != Array.getLength(a2))
return false;
List<Object> l1 = new ArrayList<Object>(length);
for (int i = 0; i < length ; i++)
l1.add(Array.get(a1, i));
for (int i = length - 1; i >= 0; i
if (contains(Array.get(a1, i), a2))
l1.remove(i);
else
return false;
}
return l1.size() == 0;
}
/**
* Tells the first index under which an item is found in an array.
* @param searchedItem
* @param array
* @return the index of the searched item
*/
public static <T> int indexOf(T searchedItem, T[] array) {
for (int i = 0; i < array.length; i++) {
T candidate = array[i];
if (NullSafeComparator.equals(candidate, searchedItem))
return i;
}
return -1;
}
@SuppressWarnings("unchecked")
public static <T> T[] toArray(T... values) {
Class<T> componentType = (Class<T>) (values.length > 0 ? values[0].getClass() : Object.class);
return buildObjectArrayOfType(componentType, values);
}
public static int[] toIntArray(int... values) {
int[] array = new int[values.length];
System.arraycopy(values, 0, array, 0, values.length);
return array;
}
public static char[] toCharArray(char... values) {
char[] array = new char[values.length];
System.arraycopy(values, 0, array, 0, values.length);
return array;
}
public static Object buildArrayOfType(Class<?> componentType, Object ... values) {
Object array = Array.newInstance(componentType, values.length);
for (int i = 0; i < values.length; i++)
Array.set(array, i, values[i]); // explicit assignment since System.arraycopy() does not perform autoboxing
return array;
}
@SuppressWarnings("unchecked")
public static <T> T[] buildObjectArrayOfType(Class<T> componentType, T ... values) {
T[] array = (T[]) Array.newInstance(componentType, values.length);
System.arraycopy(values, 0, array, 0, values.length);
return array;
}
public static <T> Iterator<T> iterator(T[] array) {
return new ArrayIterator<T>(array);
}
public static <T> T[] revert(T[] array) {
for (int i = (array.length >> 1) - 1 ; i >= 0; i
T tmp = array[i];
array[i] = array[array.length - 1 - i];
array[array.length - 1 - i] = tmp;
}
return array;
}
public static char[] revert(char[] array) {
for (int i = (array.length >> 1) - 1; i >= 0; i
char tmp = array[i];
array[i] = array[array.length - 1 - i];
array[array.length - 1 - i] = tmp;
}
return array;
}
@SuppressWarnings("rawtypes")
public static Class arrayType(Class componentType) {
if (componentType == byte.class)
return byte[].class;
else if (componentType == char.class)
return char[].class;
else if (componentType == int.class)
return int[].class;
else if (componentType == long.class)
return long[].class;
else if (componentType == short.class)
return short[].class;
else if (componentType == double.class)
return double[].class;
else if (componentType == float.class)
return float[].class;
else if (componentType == boolean.class)
return boolean[].class;
Object[] array = (Object[]) Array.newInstance(componentType, 0);
return array.getClass();
}
@SuppressWarnings("unchecked")
public static <T> T[] newInstance(Class<T> componentType, int length) {
return (T[]) Array.newInstance(componentType, length);
}
public static <T> T[] append(T[] newValues, T[] array) {
if (array == null) {
return newValues.clone();
} else {
T[] newArray = newInstance(componentType(array), array.length + newValues.length);
System.arraycopy(array, 0, newArray, 0, array.length);
for (int i = 0; i < newValues.length; i++)
newArray[array.length + i] = newValues[i];
return newArray;
}
}
@SuppressWarnings("unchecked")
public static <T> T[] append(T value, T[] array) {
if (array == null) {
return toArray(value);
} else {
T[] newArray = newInstance(componentType(array), array.length + 1);
System.arraycopy(array, 0, newArray, 0, array.length);
newArray[array.length] = value;
return newArray;
}
}
public static byte[] append(byte value, byte[] array) {
if (array == null) {
return new byte[] { value };
} else {
byte[] newArray = new byte[array.length + 1];
System.arraycopy(array, 0, newArray, 0, array.length);
newArray[array.length] = value;
return newArray;
}
}
public static boolean isEmpty(Object values) {
return (values == null || Array.getLength(values) == 0);
}
public static <T> T lastElementOf(T[] array) {
if (isEmpty(array))
return null;
return array[array.length - 1];
}
public static Integer lastElementOf(int[] array) {
if (array == null || array.length == 0)
return -1;
return array[array.length - 1];
}
public static boolean allNull(Object[] values) {
if (values == null)
return true;
for (Object value : values)
if (value != null)
return false;
return true;
}
}
|
package com.autonomy.abc.selenium.keywords;
import com.autonomy.abc.selenium.actions.ServiceBase;
import com.autonomy.abc.selenium.config.Application;
import com.autonomy.abc.selenium.config.ApplicationType;
import com.autonomy.abc.selenium.element.GritterNotice;
import com.autonomy.abc.selenium.menu.NavBarTabId;
import com.autonomy.abc.selenium.page.ElementFactory;
import com.autonomy.abc.selenium.page.keywords.CreateNewKeywordsPage;
import com.autonomy.abc.selenium.page.keywords.KeywordsPage;
import com.autonomy.abc.selenium.page.search.SearchPage;
import com.autonomy.abc.selenium.util.Language;
import org.openqa.selenium.*;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.List;
public class KeywordService extends ServiceBase {
private final static Logger LOGGER = LoggerFactory.getLogger(KeywordService.class);
private KeywordsPage keywordsPage;
private CreateNewKeywordsPage newKeywordsPage;
public KeywordService(Application application, ElementFactory elementFactory) {
super(application, elementFactory);
}
public KeywordsPage goToKeywords() {
getBody().getSideNavBar().switchPage(NavBarTabId.KEYWORDS);
keywordsPage = getElementFactory().getKeywordsPage();
return keywordsPage;
}
public CreateNewKeywordsPage goToKeywordsWizard() {
goToKeywords();
keywordsPage.createNewKeywordsButton().click();
newKeywordsPage = getElementFactory().getCreateNewKeywordsPage();
return newKeywordsPage;
}
public SearchPage addSynonymGroup(String... synonyms) {
return addSynonymGroup(Arrays.asList(synonyms));
}
public SearchPage addSynonymGroup(Iterable<String> synonyms) {
return addSynonymGroup(Language.ENGLISH, synonyms);
}
public SearchPage addSynonymGroup(Language language, String... synonyms) {
return addSynonymGroup(language, Arrays.asList(synonyms));
}
public SearchPage addSynonymGroup(Language language, Iterable<String> synonyms) {
addKeywords(KeywordWizardType.SYNONYMS, language, synonyms);
SearchPage searchPage = getElementFactory().getSearchPage();
searchPage.waitForSearchLoadIndicatorToDisappear();
return searchPage;
}
public KeywordsPage addBlacklistTerms(String... blacklists) {
return addBlacklistTerms(Arrays.asList(blacklists));
}
public KeywordsPage addBlacklistTerms(Iterable<String> blacklists) {
return addBlacklistTerms(Language.ENGLISH, blacklists);
}
public KeywordsPage addBlacklistTerms(Language language, String... blacklists) {
return addBlacklistTerms(language, Arrays.asList(blacklists));
}
public KeywordsPage addBlacklistTerms(Language language, Iterable<String> blacklists) {
addKeywords(KeywordWizardType.BLACKLIST, language, blacklists);
new WebDriverWait(getDriver(), 30).until(GritterNotice.notificationContaining("to the blacklist"));
// terms appear asynchronously - must wait until they have ALL been added
new WebDriverWait(getDriver(), 20).until(GritterNotice.notificationsDisappear());
return getElementFactory().getKeywordsPage();
}
private void addKeywords(KeywordWizardType type, Language language, Iterable<String> keywords) {
goToKeywordsWizard();
if (getApplication().getType().equals(ApplicationType.HOSTED) && !language.equals(Language.ENGLISH)) {
LOGGER.warn("hosted mode does not support foreign keywords, using English instead");
language = Language.ENGLISH;
}
new KeywordGroup(type, language, keywords).makeWizard(newKeywordsPage).apply();
}
public KeywordsPage deleteAll(KeywordFilter type) {
goToKeywords();
keywordsPage.filterView(type);
int count = 0;
for (final String language : keywordsPage.getLanguageList()) {
count += keywordsPage.countKeywords();
try {
tryDeleteAll(language);
} catch (StaleElementReferenceException e) {
return deleteAll(type);
}
}
new WebDriverWait(getDriver(), 10 * (count + 1)).withMessage("deleting keywords").until(ExpectedConditions.textToBePresentInElement(keywordsPage, "No keywords found"));
return keywordsPage;
}
private void tryDeleteAll(String language) throws StaleElementReferenceException {
try {
keywordsPage.selectLanguage(language);
} catch (WebDriverException e) {
/* language dropdown disabled */
}
List<WebElement> keywordGroups = keywordsPage.allKeywordGroups();
for (WebElement group : keywordGroups) {
removeKeywordGroupAsync(group);
}
}
private void removeKeywordGroupAsync(WebElement group) {
List<WebElement> removeButtons = keywordsPage.removeButtons(group);
if (removeButtons.size() > 1) {
removeButtons.remove(0);
}
for (WebElement removeButton : removeButtons) {
removeButton.click();
}
}
public void removeKeywordGroup(WebElement group) {
removeKeywordGroupAsync(group);
keywordsPage.waitForRefreshIconToDisappear();
}
public KeywordsPage deleteKeyword(String term) {
goToKeywords();
keywordsPage.deleteKeyword(term);
return keywordsPage;
}
}
|
package org.dynmap.hdmap;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.LineNumberReader;
import java.util.ArrayList;
import java.util.HashMap;
import org.bukkit.Material;
import org.dynmap.Log;
/**
* Custom block models - used for non-cube blocks to represent the physical volume associated with the block
* Used by perspectives to determine if rays have intersected a block that doesn't occupy its whole block
*/
public class HDBlockModels {
private int blockid;
private int databits;
private long blockflags[];
private int nativeres;
private HashMap<Integer, short[]> scaledblocks;
private static HashMap<Integer, HDBlockModels> models_by_id_data = new HashMap<Integer, HDBlockModels>();
public static class HDScaledBlockModels {
private short[][][] modelvectors;
public final short[] getScaledModel(int blocktype, int blockdata) {
if(modelvectors[blocktype] == null) {
return null;
}
return modelvectors[blocktype][blockdata];
}
}
private static HashMap<Integer, HDScaledBlockModels> scaled_models_by_scale = new HashMap<Integer, HDScaledBlockModels>();
/**
* Block definition - copy from other
*/
public HDBlockModels(Material blocktype, int databits, HDBlockModels m) {
this.blockid = blocktype.getId();
this.databits = databits;
this.nativeres = m.nativeres;
this.blockflags = m.blockflags;
for(int i = 0; i < 16; i++) {
if((databits & (1<<i)) != 0)
models_by_id_data.put((blockid<<4)+i, this);
}
}
/**
* Block definition - positions correspond to Bukkit coordinates (+X is south, +Y is up, +Z is west)
* @param blockid - block ID
* @param databits - bitmap of block data bits matching this model (bit N is set if data=N would match)
* @param nativeres - native subblocks per edge of cube (up to 64)
* @param blockflags - array of native^2 long integers representing volume of block (bit X of element (nativeres*Y+Z) is set if that subblock is filled)
* if array is short, other elements area are assumed to be zero (fills from bottom of block up)
*/
public HDBlockModels(Material blocktype, int databits, int nativeres, long[] blockflags) {
this(blocktype.getId(), databits, nativeres, blockflags);
}
/**
* Block definition - positions correspond to Bukkit coordinates (+X is south, +Y is up, +Z is west)
* @param blockid - block ID
* @param databits - bitmap of block data bits matching this model (bit N is set if data=N would match)
* @param nativeres - native subblocks per edge of cube (up to 64)
* @param blockflags - array of native^2 long integers representing volume of block (bit X of element (nativeres*Y+Z) is set if that subblock is filled)
* if array is short, other elements area are assumed to be zero (fills from bottom of block up)
*/
public HDBlockModels(int blockid, int databits, int nativeres, long[] blockflags) {
this.blockid = blockid;
this.databits = databits;
this.nativeres = nativeres;
this.blockflags = new long[nativeres * nativeres];
System.arraycopy(blockflags, 0, this.blockflags, 0, blockflags.length);
for(int i = 0; i < 16; i++) {
if((databits & (1<<i)) != 0) {
models_by_id_data.put((blockid<<4)+i, this);
}
}
}
/**
* Test if given native block is filled
*/
public final boolean isSubblockSet(int x, int y, int z) {
return ((blockflags[nativeres*y+z] & (1 << x)) != 0);
}
/**
* Set subblock value
*/
public final void setSubblock(int x, int y, int z, boolean isset) {
if(isset)
blockflags[nativeres*y+z] |= (1 << x);
else
blockflags[nativeres*y+z] &= ~(1 << x);
}
/**
* Get scaled map of block: will return array of alpha levels, corresponding to how much of the
* scaled subblocks are occupied by the original blocks (indexed by Y*res*res + Z*res + X)
* @param res - requested scale (res subblocks per edge of block)
* @return array of alpha values (0-255), corresponding to resXresXres subcubes of block
*/
public short[] getScaledMap(int res) {
if(scaledblocks == null) { scaledblocks = new HashMap<Integer, short[]>(); }
short[] map = scaledblocks.get(Integer.valueOf(res));
if(map == null) {
map = new short[res*res*res];
if(res == nativeres) {
for(int i = 0; i < blockflags.length; i++) {
for(int j = 0; j < nativeres; j++) {
if((blockflags[i] & (1 << j)) != 0)
map[res*i+j] = 255;
}
}
}
/* If scaling from smaller sub-blocks to larger, each subblock contributes to 1-2 blocks
* on each axis: need to calculate crossovers for each, and iterate through smaller
* blocks to accumulate contributions
*/
else if(res > nativeres) {
int weights[] = new int[res];
int offsets[] = new int[res];
/* LCM of resolutions is used as length of line (res * nativeres)
* Each native block is (res) long, each scaled block is (nativeres) long
* Each scaled block overlaps 1 or 2 native blocks: starting with native block 'offsets[]' with
* 'weights[]' of its (res) width in the first, and the rest in the second
*/
for(int v = 0, idx = 0; v < res*nativeres; v += nativeres, idx++) {
offsets[idx] = (v/res); /* Get index of the first native block we draw from */
if((v+nativeres-1)/res == offsets[idx]) { /* If scaled block ends in same native block */
weights[idx] = nativeres;
}
else { /* Else, see how much is in first one */
weights[idx] = (offsets[idx] + res) - v;
weights[idx] = (offsets[idx]*res + res) - v;
}
}
/* Now, use weights and indices to fill in scaled map */
for(int y = 0, off = 0; y < res; y++) {
int ind_y = offsets[y];
int wgt_y = weights[y];
for(int z = 0; z < res; z++) {
int ind_z = offsets[z];
int wgt_z = weights[z];
for(int x = 0; x < res; x++, off++) {
int ind_x = offsets[x];
int wgt_x = weights[x];
int raw_w = 0;
for(int xx = 0; xx < 2; xx++) {
int wx = (xx==0)?wgt_x:(nativeres-wgt_x);
if(wx == 0) continue;
for(int yy = 0; yy < 2; yy++) {
int wy = (yy==0)?wgt_y:(nativeres-wgt_y);
if(wy == 0) continue;
for(int zz = 0; zz < 2; zz++) {
int wz = (zz==0)?wgt_z:(nativeres-wgt_z);
if(wz == 0) continue;
if(isSubblockSet(ind_x+xx, ind_y+yy, ind_z+zz)) {
raw_w += wx*wy*wz;
}
}
}
}
map[off] = (short)((255*raw_w) / (nativeres*nativeres*nativeres));
if(map[off] > 255) map[off] = 255;
if(map[off] < 0) map[off] = 0;
}
}
}
}
else { /* nativeres > res */
int weights[] = new int[nativeres];
int offsets[] = new int[nativeres];
/* LCM of resolutions is used as length of line (res * nativeres)
* Each native block is (res) long, each scaled block is (nativeres) long
* Each native block overlaps 1 or 2 scaled blocks: starting with scaled block 'offsets[]' with
* 'weights[]' of its (res) width in the first, and the rest in the second
*/
for(int v = 0, idx = 0; v < res*nativeres; v += res, idx++) {
offsets[idx] = (v/nativeres); /* Get index of the first scaled block we draw to */
if((v+res-1)/nativeres == offsets[idx]) { /* If native block ends in same scaled block */
weights[idx] = res;
}
else { /* Else, see how much is in first one */
weights[idx] = (offsets[idx]*nativeres + nativeres) - v;
}
}
/* Now, use weights and indices to fill in scaled map */
long accum[] = new long[map.length];
for(int y = 0; y < nativeres; y++) {
int ind_y = offsets[y];
int wgt_y = weights[y];
for(int z = 0; z < nativeres; z++) {
int ind_z = offsets[z];
int wgt_z = weights[z];
for(int x = 0; x < nativeres; x++) {
if(isSubblockSet(x, y, z)) {
int ind_x = offsets[x];
int wgt_x = weights[x];
for(int xx = 0; xx < 2; xx++) {
int wx = (xx==0)?wgt_x:(res-wgt_x);
if(wx == 0) continue;
for(int yy = 0; yy < 2; yy++) {
int wy = (yy==0)?wgt_y:(res-wgt_y);
if(wy == 0) continue;
for(int zz = 0; zz < 2; zz++) {
int wz = (zz==0)?wgt_z:(res-wgt_z);
if(wz == 0) continue;
accum[(ind_y+yy)*res*res + (ind_z+zz)*res + (ind_x+xx)] +=
wx*wy*wz;
}
}
}
}
}
}
}
for(int i = 0; i < map.length; i++) {
map[i] = (short)(accum[i]*255/nativeres/nativeres/nativeres);
if(map[i] > 255) map[i] = 255;
if(map[i] < 0) map[i] = 0;
}
}
scaledblocks.put(Integer.valueOf(res), map);
}
return map;
}
/**
* Get scaled set of models for all modelled blocks
* @param scale
* @return
*/
public static HDScaledBlockModels getModelsForScale(int scale) {
HDScaledBlockModels model = scaled_models_by_scale.get(Integer.valueOf(scale));
if(model == null) {
model = new HDScaledBlockModels();
short[][][] blockmodels = new short[256][][];
for(HDBlockModels m : models_by_id_data.values()) {
short[][] row = blockmodels[m.blockid];
if(row == null) {
row = new short[16][];
blockmodels[m.blockid] = row;
}
short[] smod = null;
for(int i = 0; i < 16; i++) {
if((m.databits & (1 << i)) != 0) {
if(smod == null) smod = m.getScaledMap(scale);
row[i] = smod;
/* if((m.blockid == 50) && (i == 5)) {
String v0 = "";
String v = "";
for(int x = 0; x < m.blockflags.length; x++)
v0 = v0 + " " + m.blockflags[x];
for(int x = 0; x < smod.length; x++) {
v = v + " " + smod[x];
if((x%scale) == (scale-1)) v += "|";
}
Log.info("src=" + v0);
Log.info("scaled=" + v);
} */
}
}
}
model.modelvectors = blockmodels;
scaled_models_by_scale.put(scale, model);
}
return model;
}
/**
* Load models
*/
public static void loadModels(File datadir) {
/* Load block models */
loadModelFile(new File(datadir, "models.txt"));
File custom = new File(datadir, "custom-models.txt");
if(custom.canRead()) {
loadModelFile(custom);
}
else {
try {
FileWriter fw = new FileWriter(custom);
fw.write("# The user is free to add new and custom models here - Dynmap's install will not overwrite it\n");
fw.close();
} catch (IOException iox) {
}
}
}
/**
* Load models from file
*/
private static void loadModelFile(File modelfile) {
LineNumberReader rdr = null;
int cnt = 0;
try {
String line;
ArrayList<HDBlockModels> modlist = new ArrayList<HDBlockModels>();
int layerbits = 0;
int rownum = 0;
int scale = 0;
rdr = new LineNumberReader(new FileReader(modelfile));
while((line = rdr.readLine()) != null) {
if(line.startsWith("block:")) {
ArrayList<Integer> blkids = new ArrayList<Integer>();
int databits = 0;
scale = 0;
line = line.substring(6);
String[] args = line.split(",");
for(String a : args) {
String[] av = a.split("=");
if(av.length < 2) continue;
if(av[0].equals("id")) {
blkids.add(Integer.parseInt(av[1]));
}
else if(av[0].equals("data")) {
if(av[1].equals("*"))
databits = 0xFFFF;
else
databits |= (1 << Integer.parseInt(av[1]));
}
else if(av[0].equals("scale")) {
scale = Integer.parseInt(av[1]);
}
}
/* If we have everything, build block */
if((blkids.size() > 0) && (databits != 0) && (scale > 0)) {
modlist.clear();
for(Integer id : blkids) {
modlist.add(new HDBlockModels(id.intValue(), databits, scale, new long[0]));
cnt++;
}
}
else {
Log.severe("Block model missing required parameters = line " + rdr.getLineNumber() + " of " + modelfile.getPath());
}
layerbits = 0;
}
else if(line.startsWith("layer:")) {
line = line.substring(6);
String args[] = line.split(",");
layerbits = 0;
rownum = 0;
for(String a: args) {
layerbits |= (1 << Integer.parseInt(a));
}
}
else if(line.startsWith("rotate:")) {
line = line.substring(7);
String args[] = line.split(",");
int id = -1;
int data = -1;
int rot = -1;
for(String a : args) {
String[] av = a.split("=");
if(av.length < 2) continue;
if(av[0].equals("id")) { id = Integer.parseInt(av[1]); }
if(av[0].equals("data")) { data = Integer.parseInt(av[1]); }
if(av[0].equals("rot")) { rot = Integer.parseInt(av[1]); }
}
/* get old model to be rotated */
HDBlockModels mod = models_by_id_data.get((id<<4)+data);
if((mod != null) && ((rot%90) == 0)) {
for(int x = 0; x < scale; x++) {
for(int y = 0; y < scale; y++) {
for(int z = 0; z < scale; z++) {
if(mod.isSubblockSet(x, y, z) == false) continue;
switch(rot) {
case 0:
for(HDBlockModels bm : modlist)
bm.setSubblock(x, y, z, true);
break;
case 90:
for(HDBlockModels bm : modlist)
bm.setSubblock(scale-z-1, y, x, true);
break;
case 180:
for(HDBlockModels bm : modlist)
bm.setSubblock(scale-x-1, y, scale-z-1, true);
break;
case 270:
for(HDBlockModels bm : modlist)
bm.setSubblock(z, y, scale-x-1, true);
break;
}
}
}
}
}
}
else if(line.startsWith("#") || line.startsWith(";")) {
}
else if(layerbits != 0) { /* If we're working pattern lines */
/* Layerbits determine Y, rows count from North to South (X=0 to X=N-1), columns Z are West to East (N-1 to 0) */
for(int i = 0; (i < scale) && (i < line.length()); i++) {
if(line.charAt(i) == '*') { /* If an asterix, set flag */
for(int y = 0; y < scale; y++) {
if((layerbits & (1<<y)) != 0) {
for(HDBlockModels mod : modlist) {
mod.setSubblock(rownum, y, scale-i-1, true);
}
}
}
}
}
/* See if we're done with layer */
rownum++;
if(rownum >= scale) {
rownum = 0;
layerbits = 0;
}
}
}
Log.verboseinfo("Loaded " + cnt + " block models from " + modelfile.getPath());
} catch (IOException iox) {
Log.severe("Error reading models.txt - " + iox.toString());
} catch (NumberFormatException nfx) {
Log.severe("Format error - line " + rdr.getLineNumber() + " of " + modelfile.getPath());
} finally {
if(rdr != null) {
try {
rdr.close();
rdr = null;
} catch (IOException e) {
}
}
}
}
}
|
package org.gitlab4j.api;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.List;
import javax.ws.rs.core.Form;
import javax.ws.rs.core.GenericType;
import javax.ws.rs.core.Response;
import org.gitlab4j.api.GitLabApi.ApiVersion;
import org.gitlab4j.api.models.Branch;
import org.gitlab4j.api.models.Tag;
import org.gitlab4j.api.models.TreeItem;
import org.gitlab4j.api.utils.FileUtils;
/**
* This class provides an entry point to all the GitLab API repository calls.
*/
public class RepositoryApi extends AbstractApi {
public RepositoryApi(GitLabApi gitLabApi) {
super(gitLabApi);
}
/**
* Get a list of repository branches from a project, sorted by name alphabetically.
*
* GET /projects/:id/repository/branches
*
* @param projectId the project to get the list of branches for
* @return the list of repository branches for the specified project ID
* @throws GitLabApiException if any exception occurs
*/
public List<Branch> getBranches(Integer projectId) throws GitLabApiException {
Form formData = new GitLabApiForm().withParam("per_page", getDefaultPerPage());
Response response = get(Response.Status.OK, formData.asMap(), "projects", projectId, "repository", "branches");
return (response.readEntity(new GenericType<List<Branch>>() {
}));
}
/**
* Get a single project repository branch.
*
* GET /projects/:id/repository/branches/:branch
*
* @param projectId the project to get the branch for
* @param branchName the name of the branch to get
* @return the branch info for the specified project ID/branch name pair
* @throws GitLabApiException if any exception occurs
*/
public Branch getBranch(Integer projectId, String branchName) throws GitLabApiException {
Response response = get(Response.Status.OK, null, "projects", projectId, "repository", "branches", branchName);
return (response.readEntity(Branch.class));
}
/**
* Creates a branch for the project. Support as of version 6.8.x
*
* POST /projects/:id/repository/branches
*
* @param projectId the project to create the branch for
* @param branchName the name of the branch to create
* @param ref Source to create the branch from, can be an existing branch, tag or commit SHA
* @return the branch info for the created branch
* @throws GitLabApiException if any exception occurs
*/
public Branch createBranch(Integer projectId, String branchName, String ref) throws GitLabApiException {
Form formData = new GitLabApiForm()
.withParam(isApiVersion(ApiVersion.V3) ? "branch_name" : "branch", branchName, true)
.withParam("ref", ref, true);
Response response = post(Response.Status.CREATED, formData.asMap(), "projects", projectId, "repository", "branches");
return (response.readEntity(Branch.class));
}
/**
* Delete a single project repository branch. This is an idempotent function,
* protecting an already protected repository branch will not produce an error.
*
* DELETE /projects/:id/repository/branches/:branch
*
* @param projectId the project that the branch belongs to
* @param branchName the name of the branch to delete
* @throws GitLabApiException if any exception occurs
*/
public void deleteBranch(Integer projectId, String branchName) throws GitLabApiException {
Response.Status expectedStatus = (isApiVersion(ApiVersion.V3) ? Response.Status.OK : Response.Status.NO_CONTENT);
delete(expectedStatus, null, "projects", projectId, "repository", "branches", urlEncode(branchName));
}
/**
* Protects a single project repository branch. This is an idempotent function,
* protecting an already protected repository branch will not produce an error.
*
* PUT /projects/:id/repository/branches/:branch/protect
*
* @param projectId the ID of the project to protect
* @param branchName the name of the branch to protect
* @return the branch info for the protected branch
* @throws GitLabApiException if any exception occurs
*/
public Branch protectBranch(Integer projectId, String branchName) throws GitLabApiException {
Response response = put(Response.Status.OK, null, "projects", projectId, "repository", "branches", branchName, "protect");
return (response.readEntity(Branch.class));
}
/**
* Unprotects a single project repository branch. This is an idempotent function, unprotecting an
* already unprotected repository branch will not produce an error.
*
* PUT /projects/:id/repository/branches/:branch/unprotect
*
* @param projectId the ID of the project to un-protect
* @param branchName the name of the branch to un-protect
* @return the branch info for the unprotected branch
* @throws GitLabApiException if any exception occurs
*/
public Branch unprotectBranch(Integer projectId, String branchName) throws GitLabApiException {
Response response = put(Response.Status.OK, null, "projects", projectId, "repository", "branches", branchName, "unprotect");
return (response.readEntity(Branch.class));
}
/**
* Get a list of repository tags from a project, sorted by name in reverse alphabetical order.
*
* GET /projects/:id/repository/tags
*
* @param projectId the ID of the project to get the tags for
* @return the list of tags for the specified project ID
* @throws GitLabApiException if any exception occurs
*/
public List<Tag> getTags(Integer projectId) throws GitLabApiException {
Response response = get(Response.Status.OK, null, "projects", projectId, "repository", "tags");
return (response.readEntity(new GenericType<List<Tag>>() {
}));
}
/**
* Creates a tag on a particular ref of the given project. A message and release notes are optional.
*
* POST /projects/:id/repository/tags
*
* @param projectId the ID of the project
* @param tagName The name of the tag Must be unique for the project
* @param ref the git ref to place the tag on
* @param message the message to included with the tag (optional)
* @param releaseNotes the release notes for the tag (optional)
* @return a Tag instance containing info on the newly created tag
* @throws GitLabApiException if any exception occurs
*/
public Tag createTag(Integer projectId, String tagName, String ref, String message, String releaseNotes) throws GitLabApiException {
Form formData = new GitLabApiForm()
.withParam("tag_name", tagName, true)
.withParam("ref", ref, true)
.withParam("message", message, false)
.withParam("release_description", releaseNotes, false);
Response response = post(Response.Status.CREATED, formData.asMap(), "projects", projectId, "repository", "tags");
return (response.readEntity(Tag.class));
}
/**
* Creates a tag on a particular ref of a given project. A message and a File instance containing the
* release notes are optional. This method is the same as {@link #createTag(Integer, String, String, String, String)},
* but instead allows the release notes to be supplied in a file.
*
* POST /projects/:id/repository/tags
*
* @param projectId the ID of the project
* @param tagName the name of the tag, must be unique for the project
* @param ref the git ref to place the tag on
* @param message the message to included with the tag (optional)
* @param releaseNotesFile a whose contents are the release notes (optional)
* @return a Tag instance containing info on the newly created tag
* @throws GitLabApiException if any exception occurs
*/
public Tag createTag(Integer projectId, String tagName, String ref, String message, File releaseNotesFile) throws GitLabApiException {
String releaseNotes;
if (releaseNotesFile != null) {
try {
releaseNotes = FileUtils.readFileContents(releaseNotesFile);
} catch (IOException ioe) {
throw (new GitLabApiException(ioe));
}
} else {
releaseNotes = null;
}
return (createTag(projectId, tagName, ref, message, releaseNotes));
}
/**
* Deletes the tag from a project with the specified tag name.
*
* DELETE /projects/:id/repository/tags/:tag_name
*
* @param projectId the ID of the project
* @param tagName The name of the tag to delete
* @throws GitLabApiException if any exception occurs
*/
public void deleteTag(Integer projectId, String tagName) throws GitLabApiException {
Response.Status expectedStatus = (isApiVersion(ApiVersion.V3) ? Response.Status.OK : Response.Status.NO_CONTENT);
delete(expectedStatus, null, "projects", projectId, "repository", "tags", tagName);
}
/**
* Get a list of repository files and directories in a project.
*
* GET /projects/:id/repository/tree
*
* @param projectId the ID of the project to get the files for
* @return a tree with the root directories and files of a project
* @throws GitLabApiException if any exception occurs
*/
public List<TreeItem> getTree(Integer projectId) throws GitLabApiException {
return this.getTree(projectId, "/", "master");
}
/**
* Get a list of repository files and directories in a project.
*
* GET /projects/:id/repository/tree
*
* id (required) - The ID of a project
* path (optional) - The path inside repository. Used to get content of subdirectories
* ref_name (optional) - The name of a repository branch or tag or if not given the default branch
*
* @param projectId the ID of the project to get the files for
* @param filePath the path inside repository, used to get content of subdirectories
* @param refName the name of a repository branch or tag or if not given the default branch
* @return a tree with the directories and files of a project
* @throws GitLabApiException if any exception occurs
*/
public List<TreeItem> getTree(Integer projectId, String filePath, String refName) throws GitLabApiException {
return (getTree(projectId, filePath, refName, false));
}
/**
* Get a list of repository files and directories in a project.
*
* GET /projects/:id/repository/tree
*
* id (required) - The ID of a project
* path (optional) - The path inside repository. Used to get contend of subdirectories
* ref_name (optional) - The name of a repository branch or tag or if not given the default branch
* recursive (optional) - Boolean value used to get a recursive tree (false by default)
*
* @param projectId the ID of the project to get the files for
* @param filePath the path inside repository, used to get content of subdirectories
* @param refName the name of a repository branch or tag or if not given the default branch
* @param recursive flag to get a recursive tree or not
* @return a tree with the directories and files of a project
* @throws GitLabApiException if any exception occurs
*/
public List<TreeItem> getTree(Integer projectId, String filePath, String refName, Boolean recursive) throws GitLabApiException {
Form formData = new GitLabApiForm()
.withParam("id", projectId, true)
.withParam("path", filePath, false)
.withParam("ref_name", refName, false)
.withParam("recursive", recursive, false);
Response response = get(Response.Status.OK, formData.asMap(), "projects", projectId, "repository", "tree");
return (response.readEntity(new GenericType<List<TreeItem>>() {
}));
}
/**
* Get the raw file contents for a file by commit sha and path.
*
* GET /projects/:id/repository/blobs/:sha
*
* @param projectId the ID of the project
* @param commitOrBranchName the commit or branch name to get the file contents for
* @param filepath the path of the file to get
* @return a string with the file content for the specified file
* @throws GitLabApiException if any exception occurs
*/
public String getRawFileContent(Integer projectId, String commitOrBranchName, String filepath) throws GitLabApiException {
Form formData = new GitLabApiForm().withParam("filepath", filepath, true);
Response response = get(Response.Status.OK, formData.asMap(), "projects", projectId, "repository", "blobs", commitOrBranchName);
return (response.readEntity(String.class));
}
/**
* Get the raw file contents for a blob by blob SHA.
*
* GET /projects/:id/repository/raw_blobs/:sha
*
* @param projectId the ID of the project
* @param sha the SHA of the file to get the contents for
* @return the raw file contents for the blob
* @throws GitLabApiException if any exception occurs
*/
public String getRawBlobCotent(Integer projectId, String sha) throws GitLabApiException {
Response response = get(Response.Status.OK, null, "projects", projectId, "repository", "raw_blobs", sha);
return (response.readEntity(String.class));
}
/**
* Get an archive of the complete repository by SHA (optional).
*
* GET /projects/:id/repository/archive
*
* @param projectId the ID of the project
* @param sha the SHA of the archive to get
* @return an input stream that can be used to save as a file
* or to read the content of the archive
* @throws GitLabApiException if any exception occurs
*/
public InputStream getRepositoryArchive(Integer projectId, String sha) throws GitLabApiException {
Form formData = new GitLabApiForm().withParam("sha", sha);
Response response = get(Response.Status.OK, formData.asMap(), "projects", projectId, "repository", "archive");
return (response.readEntity(InputStream.class));
}
/**
* Get an archive of the complete repository by SHA (optional) and saves to the specified directory.
* If the archive already exists in the directory it will be overwritten.
*
* GET /projects/:id/repository/archive
*
* @param projectId the ID of the project
* @param sha the SHA of the archive to get
* @param directory the File instance of the directory to save the archive to, if null will use "java.io.tmpdir"
* @return a File instance pointing to the downloaded instance
* @throws GitLabApiException if any exception occurs
*/
public File getRepositoryArchive(Integer projectId, String sha, File directory) throws GitLabApiException {
Form formData = new GitLabApiForm().withParam("sha", sha);
Response response = get(Response.Status.OK, formData.asMap(), "projects", projectId, "repository", "archive");
try {
if (directory == null)
directory = new File(System.getProperty("java.io.tmpdir"));
String filename = FileUtils.getFilenameFromContentDisposition(response);
File file = new File(directory, filename);
InputStream in = response.readEntity(InputStream.class);
Files.copy(in, file.toPath(), StandardCopyOption.REPLACE_EXISTING);
return (file);
} catch (IOException ioe) {
throw new GitLabApiException(ioe);
}
}
}
|
package org.jboss.apiviz;
import static org.jboss.apiviz.Constant.*;
import static org.jboss.apiviz.EdgeType.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.regex.Pattern;
import jdepend.framework.JDepend;
import jdepend.framework.JavaPackage;
import com.sun.javadoc.ClassDoc;
import com.sun.javadoc.Doc;
import com.sun.javadoc.MethodDoc;
import com.sun.javadoc.PackageDoc;
import com.sun.javadoc.RootDoc;
import com.sun.javadoc.SeeTag;
import com.sun.javadoc.Tag;
/**
* @author The APIviz Project (apiviz-dev@lists.jboss.org)
* @author Trustin Lee (tlee@redhat.com)
*
* @version $Rev$, $Date$
*
*/
public class ClassDocGraph {
final RootDoc root;
private final Map<String, ClassDoc> nodes = new TreeMap<String, ClassDoc>();
private final Map<ClassDoc, Set<Edge>> edges = new HashMap<ClassDoc, Set<Edge>>();
private final Map<ClassDoc, Set<Edge>> reversedEdges = new HashMap<ClassDoc, Set<Edge>>();
private int nonconfiguredCategoryCount = 0;
/**
* Key = category name<br>
* Value = color
*/
private final Map<String, CategoryOptions> categories = new HashMap<String, CategoryOptions>();
public ClassDocGraph(RootDoc root) {
this.root = root;
//get the colors for the categories
for (final String[] option : root.options()) {
if (OPTION_CATEGORY_FILL_COLOR.equals(option[0])) {
if (option.length == 2 || option[1].split(":").length < 2) {
final String[] split = option[1].split(":");
String lineColor = null;
if (split.length > 2) {
lineColor = split[2];
}
addCategory(split[0], split[1], lineColor);
} else {
root.printWarning("Bad " + OPTION_CATEGORY_FILL_COLOR +
", Ignoring. Use format '" + OPTION_CATEGORY_FILL_COLOR +
" <category>[:<fillcolor>[:linecolor]]");
}
}
}
root.printNotice("Building graph for all classes...");
for (ClassDoc node: root.classes()) {
addNode(node, true);
}
}
private void addCategory(final String categoryName, final String fillColor, final String lineColor) {
if (categories.containsKey(categoryName)) {
root.printWarning("Category defined multiple times: " + categoryName);
}
categories.put(categoryName, new CategoryOptions(fillColor, lineColor));
}
private void addNode(ClassDoc node, boolean addRelatedClasses) {
String key = node.qualifiedName();
if (!nodes.containsKey(key)) {
nodes.put(key, node);
edges.put(node, new TreeSet<Edge>());
}
if (addRelatedClasses) {
addRelatedClasses(node);
}
}
private void addRelatedClasses(ClassDoc type) {
// Generalization
ClassDoc superType = type.superclass();
if (superType != null &&
!superType.qualifiedName().equals("java.lang.Object") &&
!superType.qualifiedName().equals("java.lang.Annotation") &&
!superType.qualifiedName().equals("java.lang.Enum")) {
addNode(superType, false);
addEdge(new Edge(GENERALIZATION, type, superType));
}
// Realization
for (ClassDoc i: type.interfaces()) {
if (i.qualifiedName().equals("java.lang.annotation.Annotation")) {
continue;
}
addNode(i, false);
addEdge(new Edge(REALIZATION, type, i));
}
// Apply custom doclet tags.
for (Tag t: type.tags()) {
if (t.name().equals(TAG_USES)) {
addEdge(new Edge(root, DEPENDENCY, type, t.text()));
} else if (t.name().equals(TAG_HAS)) {
addEdge(new Edge(root, NAVIGABILITY, type, t.text()));
} else if (t.name().equals(TAG_OWNS)) {
addEdge(new Edge(root, AGGREGATION, type, t.text()));
} else if (t.name().equals(TAG_COMPOSED_OF)) {
addEdge(new Edge(root, COMPOSITION, type, t.text()));
}
}
// Add an edge with '<<see also>>' label for the classes with @see
// tags, but avoid duplication.
for (SeeTag t: type.seeTags()) {
try {
if (t.referencedClass() == null) {
continue;
}
} catch (Exception e) {
continue;
}
String a = type.qualifiedName();
String b = t.referencedClass().qualifiedName();
addNode(t.referencedClass(), false);
if (a.compareTo(b) != 0) {
if (a.compareTo(b) < 0) {
addEdge(new Edge(
root, SEE_ALSO, type,
b + " - - «see also»"));
} else {
addEdge(new Edge(
root, SEE_ALSO, t.referencedClass(),
a + " - - «see also»"));
}
}
}
}
private void addEdge(Edge edge) {
edges.get(edge.getSource()).add(edge);
Set<Edge> reversedEdgeSubset = reversedEdges.get(edge.getTarget());
if (reversedEdgeSubset == null) {
reversedEdgeSubset = new TreeSet<Edge>();
reversedEdges.put((ClassDoc) edge.getTarget(), reversedEdgeSubset);
}
reversedEdgeSubset.add(edge);
}
public String getOverviewSummaryDiagram(JDepend jdepend) {
Map<String, PackageDoc> packages = new TreeMap<String, PackageDoc>(new Comparator<String>() {
public int compare(String o1, String o2) {
return o2.compareTo(o1);
}
});
Set<Edge> edgesToRender = new TreeSet<Edge>();
addPackageDependencies(jdepend, packages, edgesToRender);
// Replace direct dependencies with transitive dependencies
// if possible to simplify the diagram.
//// Build the matrix first.
Map<Doc, Set<Doc>> dependencies = new HashMap<Doc, Set<Doc>>();
for (Edge edge: edgesToRender) {
Set<Doc> nextDependencies = dependencies.get(edge.getSource());
if (nextDependencies == null) {
nextDependencies = new HashSet<Doc>();
dependencies.put(edge.getSource(), nextDependencies);
}
nextDependencies.add(edge.getTarget());
}
//// Remove the edges which doesn't change the effective relationship
//// which can be calculated by indirect (transitive) dependency resolution.
for (int i = edgesToRender.size(); i > 0 ; i
for (Edge edge: edgesToRender) {
if (isIndirectlyReachable(dependencies, edge.getSource(), edge.getTarget())) {
edgesToRender.remove(edge);
Set<Doc> targets = dependencies.get(edge.getSource());
if (targets != null) {
targets.remove(edge.getTarget());
}
break;
}
}
}
// Get the least common prefix to compact the diagram even further.
int minPackageNameLen = Integer.MAX_VALUE;
int maxPackageNameLen = Integer.MIN_VALUE;
for (String pname: packages.keySet()) {
if (pname.length() > maxPackageNameLen) {
maxPackageNameLen = pname.length();
}
if (pname.length() < minPackageNameLen) {
minPackageNameLen = pname.length();
}
}
if (minPackageNameLen == 0) {
throw new IllegalStateException("Unexpected empty package name");
}
int prefixLen = 0;
if (!packages.keySet().isEmpty()) {
String firstPackageName = packages.keySet().iterator().next();
for (prefixLen = minPackageNameLen; prefixLen > 0; prefixLen
if (firstPackageName.charAt(prefixLen - 1) != '.') {
continue;
}
String candidatePrefix = firstPackageName.substring(0, prefixLen);
boolean found = true;
for (String pname: packages.keySet()) {
if (!pname.startsWith(candidatePrefix)) {
found = false;
break;
}
}
if (found) {
break;
}
}
}
StringBuilder buf = new StringBuilder(16384);
buf.append(
"digraph APIVIZ {" + NEWLINE +
"rankdir=LR;" + NEWLINE +
"ranksep=0.3;" + NEWLINE +
"nodesep=0.3;" + NEWLINE +
"mclimit=128;" + NEWLINE +
"outputorder=edgesfirst;" + NEWLINE +
"center=1;" + NEWLINE +
"remincross=true;" + NEWLINE +
"searchsize=65536;" + NEWLINE +
"edge [fontsize=10, fontname=\"" + NORMAL_FONT + "\", " +
"style=\"setlinewidth(0.6)\"]; " + NEWLINE +
"node [shape=box, fontsize=10, fontname=\"" + NORMAL_FONT + "\", " +
"width=0.1, height=0.1, style=\"setlinewidth(0.6)\"]; " + NEWLINE);
for (PackageDoc pkg: packages.values()) {
renderPackage(buf, pkg, prefixLen);
}
for (Edge edge: edgesToRender) {
renderEdge(null, buf, edge);
}
buf.append("}" + NEWLINE);
return buf.toString();
}
@SuppressWarnings("unchecked")
private void addPackageDependencies(
JDepend jdepend, Map<String, PackageDoc> packages, Set<Edge> edgesToRender) {
Map<String, PackageDoc> allPackages = APIviz.getPackages(root);
for (String pname: allPackages.keySet()) {
if (isHidden(allPackages.get(pname))) {
continue;
}
JavaPackage pkg = jdepend.getPackage(pname);
if (pkg == null) {
continue;
}
packages.put(pname, allPackages.get(pname));
Collection<JavaPackage> epkgs = pkg.getEfferents();
if (epkgs == null) {
continue;
}
for (JavaPackage epkg: epkgs) {
if (isHidden(allPackages.get(epkg.getName()))) {
continue;
}
addPackageDependency(edgesToRender, allPackages.get(pname), allPackages.get(epkg.getName()));
}
}
}
static boolean isHidden(Doc node) {
if (node.tags(TAG_HIDDEN).length > 0) {
return true;
}
Tag[] tags = node.tags(TAG_EXCLUDE);
if (tags == null) {
return false;
}
for (Tag t: tags) {
if (t.text() == null || t.text().trim().length() == 0) {
return true;
}
}
return false;
}
private static void addPackageDependency(
Set<Edge> edgesToRender, PackageDoc source, PackageDoc target) {
if (source != target && source.isIncluded() && target.isIncluded()) {
edgesToRender.add(
new Edge(EdgeType.DEPENDENCY, source, target));
}
}
private static boolean isIndirectlyReachable(Map<Doc, Set<Doc>> dependencyGraph, Doc source, Doc target) {
Set<Doc> intermediaryTargets = dependencyGraph.get(source);
if (intermediaryTargets == null || intermediaryTargets.isEmpty()) {
return false;
}
Set<Doc> visited = new HashSet<Doc>();
visited.add(source);
for (Doc t: intermediaryTargets) {
if (t == target) {
continue;
}
if (isIndirectlyReachable(dependencyGraph, t, target, visited)) {
return true;
}
}
return false;
}
private static boolean isIndirectlyReachable(Map<Doc, Set<Doc>> dependencyGraph, Doc source, Doc target, Set<Doc> visited) {
if (visited.contains(source)) {
// Evade cyclic dependency.
return false;
}
visited.add(source);
Set<Doc> intermediaryTargets = dependencyGraph.get(source);
if (intermediaryTargets == null || intermediaryTargets.isEmpty()) {
return false;
}
for (Doc t: intermediaryTargets) {
if (t == target) {
return true;
}
if (isIndirectlyReachable(dependencyGraph, t, target, visited)) {
return true;
}
}
return false;
}
public String getPackageSummaryDiagram(PackageDoc pkg) {
StringBuilder buf = new StringBuilder(16384);
buf.append(
"digraph APIVIZ {" + NEWLINE +
"rankdir=LR;" + NEWLINE +
"ranksep=0.3;" + NEWLINE +
"nodesep=0.3;" + NEWLINE +
"mclimit=1024;" + NEWLINE +
"outputorder=edgesfirst;" + NEWLINE +
"center=1;" + NEWLINE +
"remincross=true;" + NEWLINE +
"searchsize=65536;" + NEWLINE +
"edge [fontsize=10, fontname=\"" + NORMAL_FONT + "\", " +
"style=\"setlinewidth(0.6)\"]; " + NEWLINE +
"node [shape=box, fontsize=10, fontname=\"" + NORMAL_FONT + "\", " +
"width=0.1, height=0.1, style=\"setlinewidth(0.6)\"]; " + NEWLINE);
Map<String, ClassDoc> nodesToRender = new TreeMap<String, ClassDoc>();
Set<Edge> edgesToRender = new TreeSet<Edge>();
for (ClassDoc node: nodes.values()) {
fetchSubgraph(pkg, node, nodesToRender, edgesToRender, true, false, true);
}
renderSubgraph(pkg, null, buf, nodesToRender, edgesToRender, true);
buf.append("}" + NEWLINE);
return buf.toString();
}
private void checkCategoryExistance(Doc node) {
//check the if the category for this class exists
if (node.tags(TAG_CATEGORY).length > 0 && !categories.containsKey(node.tags(TAG_CATEGORY)[0].text())) {
final String categoryName = node.tags(TAG_CATEGORY)[0].text();
if (ColorCombination.values().length > nonconfiguredCategoryCount) {
categories.put(categoryName, new CategoryOptions(ColorCombination.values()[nonconfiguredCategoryCount]));
} else {
categories.put(categoryName, new CategoryOptions("#FFFFFF", null));
}
nonconfiguredCategoryCount++;
}
}
private void fetchSubgraph(
PackageDoc pkg, ClassDoc cls,
Map<String, ClassDoc> nodesToRender, Set<Edge> edgesToRender,
boolean useHidden, boolean useSee, boolean forceInherit) {
if (useHidden && isHidden(cls)) {
return;
}
if (forceInherit) {
for (Tag t: pkg.tags(TAG_EXCLUDE)) {
if (t.text() == null || t.text().trim().length() == 0) {
continue;
}
if (Pattern.compile(t.text().trim()).matcher(cls.qualifiedName()).find()) {
return;
}
}
}
if (cls.containingPackage() == pkg) {
Set<Edge> directEdges = edges.get(cls);
nodesToRender.put(cls.qualifiedName(), cls);
for (Edge edge: directEdges) {
if (!useSee && edge.getType() == SEE_ALSO) {
continue;
}
ClassDoc source = (ClassDoc) edge.getSource();
ClassDoc target = (ClassDoc) edge.getTarget();
boolean excluded = false;
if (forceInherit || cls.tags(TAG_INHERIT).length > 0) {
for (Tag t: pkg.tags(TAG_EXCLUDE)) {
if (t.text() == null || t.text().trim().length() == 0) {
continue;
}
Pattern p = Pattern.compile(t.text().trim());
if (p.matcher(source.qualifiedName()).find()) {
excluded = true;
break;
}
if (p.matcher(target.qualifiedName()).find()) {
excluded = true;
break;
}
}
if (excluded) {
continue;
}
}
for (Tag t: cls.tags(TAG_EXCLUDE)) {
if (t.text() == null || t.text().trim().length() == 0) {
continue;
}
Pattern p = Pattern.compile(t.text().trim());
if (p.matcher(source.qualifiedName()).find()) {
excluded = true;
break;
}
if (p.matcher(target.qualifiedName()).find()) {
excluded = true;
break;
}
}
if (excluded) {
continue;
}
if (!useHidden || !isHidden(source) && !isHidden(target)) {
edgesToRender.add(edge);
}
if (!useHidden || !isHidden(source)) {
nodesToRender.put(source.qualifiedName(), source);
}
if (!useHidden || !isHidden(target)) {
nodesToRender.put(target.qualifiedName(), target);
}
}
Set<Edge> reversedDirectEdges = reversedEdges.get(cls);
if (reversedDirectEdges != null) {
for (Edge edge: reversedDirectEdges) {
if (!useSee && edge.getType() == SEE_ALSO) {
continue;
}
if (cls.tags(TAG_EXCLUDE_SUBTYPES).length > 0 &&
(edge.getType() == EdgeType.GENERALIZATION ||
edge.getType() == EdgeType.REALIZATION)) {
continue;
}
ClassDoc source = (ClassDoc) edge.getSource();
ClassDoc target = (ClassDoc) edge.getTarget();
boolean excluded = false;
if (forceInherit || cls.tags(TAG_INHERIT).length > 0) {
for (Tag t: pkg.tags(TAG_EXCLUDE)) {
if (t.text() == null || t.text().trim().length() == 0) {
continue;
}
Pattern p = Pattern.compile(t.text().trim());
if (p.matcher(source.qualifiedName()).find()) {
excluded = true;
break;
}
if (p.matcher(target.qualifiedName()).find()) {
excluded = true;
break;
}
}
if (excluded) {
continue;
}
}
for (Tag t: cls.tags(TAG_EXCLUDE)) {
if (t.text() == null || t.text().trim().length() == 0) {
continue;
}
Pattern p = Pattern.compile(t.text().trim());
if (p.matcher(source.qualifiedName()).find()) {
excluded = true;
break;
}
if (p.matcher(target.qualifiedName()).find()) {
excluded = true;
break;
}
}
if (excluded) {
continue;
}
if (!useHidden || !isHidden(source) && !isHidden(target)) {
edgesToRender.add(edge);
}
if (!useHidden || !isHidden(source)) {
nodesToRender.put(source.qualifiedName(), source);
}
if (!useHidden || !isHidden(target)) {
nodesToRender.put(target.qualifiedName(), target);
}
}
}
}
}
public String getClassDiagram(ClassDoc cls) {
PackageDoc pkg = cls.containingPackage();
StringBuilder buf = new StringBuilder(16384);
Map<String, ClassDoc> nodesToRender = new TreeMap<String, ClassDoc>();
Set<Edge> edgesToRender = new TreeSet<Edge>();
fetchSubgraph(pkg, cls, nodesToRender, edgesToRender, false, true, false);
buf.append("digraph APIVIZ {" + NEWLINE);
// Determine the graph orientation automatically.
int nodesAbove = 0;
int nodesBelow = 0;
for (Edge e: edgesToRender) {
if (e.getType().isReversed()) {
if (e.getSource() == cls) {
nodesAbove ++;
} else {
nodesBelow ++;
}
} else {
if (e.getSource() == cls) {
nodesBelow ++;
} else {
nodesAbove ++;
}
}
}
boolean portrait;
if (Math.max(nodesAbove, nodesBelow) <= 5) {
// Landscape looks better usually up to 5.
// There are just a few subtypes and supertypes.
buf.append(
"rankdir=TB;" + NEWLINE +
"ranksep=0.4;" + NEWLINE +
"nodesep=0.3;" + NEWLINE);
portrait = false;
} else {
// Portrait looks better.
// There are too many subtypes or supertypes.
buf.append(
"rankdir=LR;" + NEWLINE +
"ranksep=1.0;" + NEWLINE +
"nodesep=0.2;" + NEWLINE);
portrait = true;
}
buf.append(
"mclimit=128;" + NEWLINE +
"outputorder=edgesfirst;" + NEWLINE +
"center=1;" + NEWLINE +
"remincross=true;" + NEWLINE +
"searchsize=65536;" + NEWLINE +
"edge [fontsize=10, fontname=\"" + NORMAL_FONT + "\", " +
"style=\"setlinewidth(0.6)\"]; " + NEWLINE +
"node [shape=box, fontsize=10, fontname=\"" + NORMAL_FONT + "\", " +
"width=0.1, height=0.1, style=\"setlinewidth(0.6)\"]; " + NEWLINE);
renderSubgraph(pkg, cls, buf, nodesToRender, edgesToRender, portrait);
buf.append("}" + NEWLINE);
return buf.toString();
}
private void renderSubgraph(PackageDoc pkg, ClassDoc cls,
StringBuilder buf, Map<String, ClassDoc> nodesToRender,
Set<Edge> edgesToRender, boolean portrait) {
List<ClassDoc> nodesToRenderCopy = new ArrayList<ClassDoc>(nodesToRender.values());
Collections.sort(nodesToRenderCopy, new ClassDocComparator(portrait));
for (ClassDoc node: nodesToRenderCopy) {
renderClass(pkg, cls, buf, node);
}
for (Edge edge: edgesToRender) {
renderEdge(pkg, buf, edge);
}
}
private void renderPackage(
StringBuilder buf, PackageDoc pkg, int prefixLen) {
checkCategoryExistance(pkg);
String href = pkg.name().replace('.', '/') + "/package-summary.html";
buf.append(getNodeId(pkg));
buf.append(" [label=\"");
buf.append(pkg.name().substring(prefixLen));
buf.append("\", style=\"filled");
if (pkg.tags("@deprecated").length > 0) {
buf.append(",dotted");
}
buf.append("\", fillcolor=\"");
buf.append(getFillColor(pkg));
buf.append("\", href=\"");
buf.append(href);
buf.append("\"];");
buf.append(NEWLINE);
}
private void renderClass(PackageDoc pkg, ClassDoc cls, StringBuilder buf, ClassDoc node) {
checkCategoryExistance(node);
String fillColor = getFillColor(pkg, cls, node);
String lineColor = getLineColor(pkg, cls, node);
String fontColor = getFontColor(pkg, node);
String href = getPath(pkg, node);
buf.append(getNodeId(node));
buf.append(" [label=\"");
buf.append(getNodeLabel(pkg, node));
buf.append("\", tooltip=\"");
buf.append(escape(getNodeLabel(pkg, node)));
buf.append("\"");
if (node.isAbstract() && !node.isInterface()) {
buf.append(", fontname=\"");
buf.append(ITALIC_FONT);
buf.append("\"");
}
buf.append(", style=\"filled");
if (node.tags("@deprecated").length > 0) {
buf.append(",dotted");
}
buf.append("\", color=\"");
buf.append(lineColor);
buf.append("\", fontcolor=\"");
buf.append(fontColor);
buf.append("\", fillcolor=\"");
buf.append(fillColor);
if (href != null) {
buf.append("\", href=\"");
buf.append(href);
}
buf.append("\"];");
buf.append(NEWLINE);
}
private void renderEdge(PackageDoc pkg, StringBuilder buf, Edge edge) {
EdgeType type = edge.getType();
String lineColor = getLineColor(pkg, edge);
String fontColor = getFontColor(pkg, edge);
// Graphviz lays out nodes upside down - adjust for
// important relationships.
boolean reverse = edge.getType().isReversed();
if (reverse) {
buf.append(getNodeId(edge.getTarget()));
buf.append(" -> ");
buf.append(getNodeId(edge.getSource()));
buf.append(" [arrowhead=\"");
buf.append(type.getArrowTail());
buf.append("\", arrowtail=\"");
buf.append(type.getArrowHead() == null? (edge.isOneway()? "open" : "none") : type.getArrowHead());
} else {
buf.append(getNodeId(edge.getSource()));
buf.append(" -> ");
buf.append(getNodeId(edge.getTarget()));
buf.append(" [arrowhead=\"");
buf.append(type.getArrowHead() == null? (edge.isOneway()? "open" : "none") : type.getArrowHead());
buf.append("\", arrowtail=\"");
buf.append(type.getArrowTail());
}
buf.append("\", style=\"" + type.getStyle());
buf.append("\", color=\"");
buf.append(lineColor);
buf.append("\", fontcolor=\"");
buf.append(fontColor);
buf.append("\", label=\"");
buf.append(escape(edge.getEdgeLabel()));
buf.append("\", headlabel=\"");
buf.append(escape(edge.getTargetLabel()));
buf.append("\", taillabel=\"");
buf.append(escape(edge.getSourceLabel()));
buf.append("\" ];");
buf.append(NEWLINE);
}
private static String getStereotype(ClassDoc node) {
String stereotype = node.isInterface()? "interface" : null;
if (node.isException() || node.isError()) {
stereotype = "exception";
} else if (node.isAnnotationType()) {
stereotype = "annotation";
} else if (node.isEnum()) {
stereotype = "enum";
} else if (isStaticType(node)) {
stereotype = "static";
}
if (node.tags(TAG_STEREOTYPE).length > 0) {
stereotype = node.tags(TAG_STEREOTYPE)[0].text();
}
return escape(stereotype);
}
static boolean isStaticType(ClassDoc node) {
boolean staticType = true;
int methods = 0;
for (MethodDoc m: node.methods()) {
if (m.isConstructor()) {
continue;
}
methods ++;
if (!m.isStatic()) {
staticType = false;
break;
}
}
return staticType && methods > 0;
}
private String getFillColor(PackageDoc pkg) {
String color = "white";
if (pkg.tags(TAG_CATEGORY).length > 0 && categories.containsKey(pkg.tags(TAG_CATEGORY)[0].text())) {
color = categories.get(pkg.tags(TAG_CATEGORY)[0].text()).getFillColor();
}
if (pkg.tags(TAG_LANDMARK).length > 0) {
color = "khaki1";
}
return color;
}
private String getFillColor(PackageDoc pkg, ClassDoc cls, ClassDoc node) {
String color = "white";
if (cls == null) {
//we are rendering for a package summary since there is no cls
//see if the node has a fill color
if (node.tags(TAG_CATEGORY).length > 0 && categories.containsKey(node.tags(TAG_CATEGORY)[0].text())) {
color = categories.get(node.tags(TAG_CATEGORY)[0].text()).getFillColor();
}
//override any previous values if a landmark is set
if (node.containingPackage() == pkg && node.tags(TAG_LANDMARK).length > 0) {
color = "khaki1";
}
} else if (cls == node) {
//this is class we are rending the class diagram for
color = "khaki1";
} else if (node.tags(TAG_CATEGORY).length > 0 && categories.containsKey(node.tags(TAG_CATEGORY)[0].text())) {
//not the class for the class diagram so use its fill color
color = categories.get(node.tags(TAG_CATEGORY)[0].text()).getFillColor();
if (node.containingPackage() != pkg && color.startsWith("
//grey out the fill color
final StringBuffer sb = new StringBuffer("
sb.append(shiftColor(color.substring(1,3)));
sb.append(shiftColor(color.substring(3,5)));
sb.append(shiftColor(color.substring(5,7)));
color = sb.toString();
}
}
return color;
}
/**
* Does the greying out effect
* @param number
* @return
*/
private static String shiftColor(String number) {
Integer colorValue = Integer.parseInt(number, 16);
colorValue = colorValue + 0x4D; //aproach white
if (colorValue > 0xFF) {
colorValue = 0xFF;
}
return Integer.toHexString(colorValue);
}
private String getLineColor(PackageDoc pkg, ClassDoc cls, ClassDoc node) {
String color = "#000000";
if (cls != node && node.tags(TAG_LANDMARK).length <= 0 && node.tags(TAG_CATEGORY).length > 0 && categories.containsKey(node.tags(TAG_CATEGORY)[0].text())) {
color = categories.get(node.tags(TAG_CATEGORY)[0].text()).getLineColor();
}
if (node.containingPackage() != pkg) {
//grey out the fill color
final StringBuffer sb = new StringBuffer("
sb.append(shiftColor(color.substring(1,3)));
sb.append(shiftColor(color.substring(3,5)));
sb.append(shiftColor(color.substring(5,7)));
color = sb.toString();
}
return color;
}
private String getLineColor(PackageDoc pkg, Edge edge) {
if (edge.getTarget() instanceof ClassDoc) {
//we have a class
return getLineColor(
pkg,
(ClassDoc) edge.getSource(),
(ClassDoc) edge.getTarget());
} else {
//not a class (a package or something)
String color = "#000000";
if (pkg != null &&
pkg.tags(TAG_CATEGORY).length > 0 &&
categories.containsKey(pkg.tags(TAG_CATEGORY)[0].text())) {
color = categories.get(pkg.tags(TAG_CATEGORY)[0].text()).getLineColor();
}
return color;
}
}
private static String getFontColor(PackageDoc pkg, ClassDoc doc) {
String color = "black";
if (!(doc.containingPackage() == pkg)) {
color = "gray30";
}
return color;
}
private static String getFontColor(PackageDoc pkg, Edge edge) {
if (edge.getTarget() instanceof ClassDoc) {
return getFontColor(pkg, (ClassDoc) edge.getTarget());
} else {
return "black";
}
}
private static String getNodeId(Doc node) {
String name;
if (node instanceof ClassDoc) {
name = ((ClassDoc) node).qualifiedName();
} else {
name = node.name();
}
return name.replace('.', '_');
}
private static String getNodeLabel(PackageDoc pkg, ClassDoc node) {
StringBuilder buf = new StringBuilder(256);
String stereotype = getStereotype(node);
if (stereotype != null) {
buf.append("«");
buf.append(stereotype);
buf.append("»\\n");
}
if (node.containingPackage() == pkg) {
buf.append(node.name());
} else {
if (node.containingPackage() == null) {
buf.append(node.name());
} else {
buf.append(node.name());
buf.append("\\n(");
buf.append(node.containingPackage().name());
buf.append(')');
}
}
return buf.toString();
}
private static String escape(String text) {
// Escape some characters to prevent syntax errors.
if (text != null) {
text = text.replaceAll("" +
"(\"|'|\\\\.?|\\s)+", " ");
}
return text;
}
private static String getPath(PackageDoc pkg, ClassDoc node) {
if (!node.isIncluded()) {
return null;
}
String sourcePath = pkg.name().replace('.', '/');
String targetPath =
node.containingPackage().name().replace('.', '/') + '/' +
node.name() + ".html";
String[] sourcePathElements = sourcePath.split("[\\/\\\\]+");
String[] targetPathElements = targetPath.split("[\\/\\\\]+");
int maxCommonLength = Math.min(sourcePathElements.length, targetPathElements.length);
int commonLength;
for (commonLength = 0; commonLength < maxCommonLength; commonLength ++) {
if (!sourcePathElements[commonLength].equals(targetPathElements[commonLength])) {
break;
}
}
StringBuilder buf = new StringBuilder();
for (int i = 0; i < sourcePathElements.length - commonLength; i ++) {
buf.append("/..");
}
for (int i = commonLength; i < targetPathElements.length; i ++) {
buf.append('/');
buf.append(targetPathElements[i]);
}
return buf.substring(1);
}
protected class CategoryOptions {
private String fillColor = "#FFFFFF";
private String lineColor = "#000000";
protected CategoryOptions(final String fillColor, final String lineColor) {
this.fillColor = Color.resolveColor(fillColor);
if (lineColor != null) {
this.lineColor = Color.resolveColor(lineColor);
}
root.printNotice("Category Options: " + this.fillColor + " - " + this.lineColor);
}
protected CategoryOptions(final ColorCombination combination) {
fillColor = combination.getFillColor().getRgbValue();
lineColor = combination.getLineColor().getRgbValue();
root.printNotice("Category Options: " + fillColor + " - " + lineColor);
}
public String getFillColor() {
return fillColor;
}
public String getLineColor() {
return lineColor;
}
}
}
|
package org.jtrfp.trcl.core;
import java.nio.ByteBuffer;
import java.nio.IntBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.Callable;
import javax.media.opengl.GL3;
import org.jtrfp.trcl.ObjectListWindow;
import org.jtrfp.trcl.Submitter;
import org.jtrfp.trcl.gpu.GLFrameBuffer;
import org.jtrfp.trcl.gpu.GLProgram;
import org.jtrfp.trcl.gpu.GLTexture;
import org.jtrfp.trcl.gpu.GLUniform;
import org.jtrfp.trcl.gpu.GPU;
import org.jtrfp.trcl.mem.PagedByteBuffer;
import org.jtrfp.trcl.obj.PositionedRenderable;
import org.jtrfp.trcl.obj.WorldObject;
public class RenderList {
public static final int NUM_SUBPASSES = 4;
public static final int NUM_BLOCKS_PER_SUBPASS = 1024 * 4;
public static final int NUM_BLOCKS_PER_PASS = NUM_BLOCKS_PER_SUBPASS
* NUM_SUBPASSES;
public static final int NUM_RENDER_PASSES = 2;// Opaque + transparent
private static final int OPAQUE_PASS = 0;
private static final int BLEND_PASS = 1;
private final TR tr;
private int[] hostRenderListPageTable;
private int dummyBufferID;
private int numOpaqueBlocks;
private int numTransparentBlocks;
private int modulusUintOffset;
private int opaqueIndex = 0, blendIndex = 0;
private GLUniform renderListOffsetUniform,
renderListPageTable,
useTextureMap,
cameraMatrixUniform,
rootBuffer;
private final GLFrameBuffer intermediateFrameBuffer,
depthQueueFrameBuffer;
private final GLTexture intermediateDepthTexture,
intermediateColorTexture,
intermediateNormTexture,
intermediateTextureIDTexture,
depthQueueTexture;
private final GLProgram depthQueueProgram;
private final ArrayList<WorldObject> nearbyWorldObjects = new ArrayList<WorldObject>();
private final Submitter<PositionedRenderable>
submitter = new Submitter<PositionedRenderable>() {
@Override
public void submit(PositionedRenderable item) {
if (item instanceof WorldObject) {
final WorldObject wo = (WorldObject)item;
if (!wo.isActive()) {
return;
}
synchronized(nearbyWorldObjects)
{nearbyWorldObjects.add(wo);}
if(!wo.isVisible())return;
}//end if(WorldObject)
final ByteBuffer opOD = item.getOpaqueObjectDefinitionAddresses();
final ByteBuffer trOD = item
.getTransparentObjectDefinitionAddresses();
numOpaqueBlocks += opOD.capacity() / 4;
numTransparentBlocks += trOD.capacity() / 4;
tr.objectListWindow.get().opaqueIDs.set(0, opaqueIndex, opOD);
opaqueIndex += opOD.capacity();
tr.objectListWindow.get().blendIDs.set(0, blendIndex, trOD);
blendIndex += trOD.capacity();
}// end submit(...)
@Override
public void submit(Collection<PositionedRenderable> items) {
synchronized(items){
for(PositionedRenderable r:items){submit(r);}
}//end for(items)
}//end submit(...)
};
public RenderList(final GL3 gl,final GLProgram primaryProgram,
final GLProgram deferredProgram, final GLProgram depthQueueProgram,
final GLFrameBuffer intermediateFrameBuffer,
final GLTexture intermediateColorTexture, final GLTexture intermediateDepthTexture,
final GLTexture intermediateNormTexture, final GLTexture intermediateTextureIDTexture,
final GLFrameBuffer depthQueueFrameBuffer, final GLTexture depthQueueTexture,
final TR tr) {
// Build VAO
final IntBuffer ib = IntBuffer.allocate(1);
this.tr = tr;
this.intermediateColorTexture=intermediateColorTexture;
this.intermediateDepthTexture=intermediateDepthTexture;
this.intermediateFrameBuffer=intermediateFrameBuffer;
this.intermediateNormTexture=intermediateNormTexture;
this.intermediateTextureIDTexture=intermediateTextureIDTexture;
this.depthQueueFrameBuffer=depthQueueFrameBuffer;
this.depthQueueProgram=depthQueueProgram;
this.depthQueueTexture=depthQueueTexture;
final TRFuture<Void> task0 = tr.getThreadManager().submitToGL(new Callable<Void>(){
@Override
public Void call() throws Exception {
gl.glGenBuffers(1, ib);
ib.clear();
dummyBufferID = ib.get();
gl.glBindBuffer(GL3.GL_ARRAY_BUFFER, dummyBufferID);
gl.glBufferData(GL3.GL_ARRAY_BUFFER, 1, null, GL3.GL_DYNAMIC_DRAW);
gl.glEnableVertexAttribArray(0);
gl.glVertexAttribPointer(0, 1, GL3.GL_BYTE, false, 0, 0);
renderListOffsetUniform = primaryProgram.getUniform("renderListOffset");
renderListPageTable = primaryProgram.getUniform("renderListPageTable");
useTextureMap = primaryProgram.getUniform("useTextureMap");
cameraMatrixUniform = primaryProgram.getUniform("cameraMatrix");
rootBuffer = deferredProgram.getUniform("rootBuffer");
return null;
}
});//end task0
hostRenderListPageTable = new int[ObjectListWindow.OBJECT_LIST_SIZE_BYTES_PER_PASS
* RenderList.NUM_RENDER_PASSES
/ PagedByteBuffer.PAGE_SIZE_BYTES];
final ObjectListWindow olWindow = RenderList.this.tr
.objectListWindow.get();
final Renderer renderer = tr.renderer.get();
task0.get();
tr.getThreadManager().submitToGL(new Callable<Void>(){
@Override
public Void call() throws Exception {
for (int i = 0; i < hostRenderListPageTable.length; i++) {
hostRenderListPageTable[i] = olWindow.logicalPage2PhysicalPage(i);
}// end for(hostRenderListPageTable.length)
depthQueueProgram.use();
depthQueueProgram.getUniform("renderListPageTable").setArrayui(hostRenderListPageTable);//TODO: Cache or consolidate
renderer.getPrimaryProgram().use();
renderListPageTable.setArrayui(hostRenderListPageTable);
modulusUintOffset = (olWindow
.getPhysicalAddressInBytes(0) % PagedByteBuffer.PAGE_SIZE_BYTES) / 4;
return null;
}
}).get();
}// end constructor
private static int frameCounter = 0;
private void updateStatesToGPU() {
synchronized(nearbyWorldObjects){
final int size=nearbyWorldObjects.size();
for (int i=0; i<size; i++) {
nearbyWorldObjects.get(i).updateStateToGPU();
}}
}//end updateStatesToGPU
public void sendToGPU(GL3 gl) {
frameCounter++;
frameCounter %= 100;
updateStatesToGPU();
}//end sendToGPU
public void render(GL3 gl) {
// OPAQUE STAGE
tr.renderer.get().getPrimaryProgram().use();
final float [] matrixAsFlatArray = tr.renderer.get().getCamera().getMatrixAsFlatArray();
cameraMatrixUniform.set4x4Matrix(matrixAsFlatArray,true);
useTextureMap.set((int)0);
intermediateFrameBuffer.bindToDraw();
gl.glBindBuffer(GL3.GL_ARRAY_BUFFER, dummyBufferID);
gl.glClear(GL3.GL_COLOR_BUFFER_BIT | GL3.GL_DEPTH_BUFFER_BIT);
final int numOpaqueVertices = numOpaqueBlocks
* GPU.GPU_VERTICES_PER_BLOCK;
final int numTransparentVertices = numTransparentBlocks
* GPU.GPU_VERTICES_PER_BLOCK;
// Turn on depth write, turn off transparency
gl.glDisable(GL3.GL_BLEND);
gl.glDepthFunc(GL3.GL_LESS);
if(tr.renderer.get().isBackfaceCulling())gl.glEnable(GL3.GL_CULL_FACE);
// renderModeUniform.set(OPAQUE_PASS);
final int verticesPerSubPass = (NUM_BLOCKS_PER_SUBPASS * GPU.GPU_VERTICES_PER_BLOCK);
final int numSubPasses = (numOpaqueVertices / verticesPerSubPass) + 1;
int remainingVerts = numOpaqueVertices;
if (frameCounter == 0) {
tr.getReporter().report(
"org.jtrfp.trcl.core.RenderList.numOpaqueBlocks",
"" + numOpaqueBlocks);
tr.getReporter().report(
"org.jtrfp.trcl.core.RenderList.numTransparentBlocks",
"" + numTransparentBlocks);
tr.getReporter().report(
"org.jtrfp.trcl.core.RenderList.approxNumSceneTriangles",
"" + ((numOpaqueBlocks+numTransparentBlocks)*GPU.GPU_VERTICES_PER_BLOCK)/3);
}
for (int sp = 0; sp < numSubPasses; sp++) {
final int numVerts = remainingVerts <= verticesPerSubPass ? remainingVerts
: verticesPerSubPass;
remainingVerts -= numVerts;
final int newOffset = modulusUintOffset + sp
* NUM_BLOCKS_PER_SUBPASS;// newOffset is in uints
renderListOffsetUniform.setui(newOffset);
gl.glDrawArrays(GL3.GL_TRIANGLES, 0, numVerts);
}// end for(subpasses)
// DEPTH QUEUE STAGE
// ERASE
tr.renderer.get().depthErasureProgram.use();
gl.glDisable(GL3.GL_CULL_FACE);
depthQueueFrameBuffer.bindToDraw();
gl.glEnable(GL3.GL_MULTISAMPLE);
gl.glEnable(GL3.GL_SAMPLE_MASK);
gl.glDepthFunc(GL3.GL_ALWAYS);
gl.glDepthMask(false);
gl.glEnable(GL3.GL_STENCIL_TEST);
for (int i = 0; i < Renderer.DEPTH_QUEUE_SIZE; i++) {
gl.glStencilFunc(GL3.GL_ALWAYS, i + 1, 0xff);
gl.glStencilOp(GL3.GL_REPLACE, GL3.GL_REPLACE, GL3.GL_REPLACE);
gl.glSampleMaski(0, 0x1 << i);
gl.glDrawArrays(GL3.GL_TRIANGLES, 0, 6);
}
gl.glSampleMaski(0, 0xFF);
// DRAW
depthQueueProgram.use();
gl.glDisable(GL3.GL_MULTISAMPLE);
gl.glStencilFunc(GL3.GL_EQUAL, 0x1, 0xFF);
gl.glStencilOp(GL3.GL_DECR, GL3.GL_DECR, GL3.GL_DECR);
gl.glSampleMaski(0, 0xFF);
GLTexture.specifyTextureUnit(gl, 0);
intermediateDepthTexture.bind(gl);
depthQueueProgram.getUniform("cameraMatrix").set4x4Matrix(
matrixAsFlatArray, true);// TODO: Consolidate or abbreviate
depthQueueProgram.getUniform("renderListOffset").setui(
modulusUintOffset + NUM_BLOCKS_PER_PASS);
tr.gpu.get().memoryManager.get().bindToUniform(4, depthQueueProgram,
depthQueueProgram.getUniform("rootBuffer"));
gl.glDrawArrays(GL3.GL_TRIANGLES, 0, numTransparentVertices);
gl.glEnable(GL3.GL_MULTISAMPLE);
gl.glStencilFunc(GL3.GL_ALWAYS, 0xFF, 0xFF);//NEW
gl.glDisable(GL3.GL_STENCIL_TEST);
// DEFERRED STAGE
gl.glDepthMask(true);
gl.glDepthFunc(GL3.GL_ALWAYS);
if(tr.renderer.get().isBackfaceCulling())gl.glDisable(GL3.GL_CULL_FACE);
final GLProgram deferredProgram = tr.renderer.get().getDeferredProgram();
deferredProgram.use();
gl.glBindFramebuffer(GL3.GL_FRAMEBUFFER, 0);// Zero means
// "Draw to screen"
GLTexture.specifyTextureUnit(gl, 1);
intermediateColorTexture.bind(gl);
GLTexture.specifyTextureUnit(gl, 2);
intermediateDepthTexture.bind(gl);
GLTexture.specifyTextureUnit(gl, 3);
intermediateNormTexture.bind(gl);
tr.gpu.get().memoryManager.get().bindToUniform(4, deferredProgram,
rootBuffer);
GLTexture.specifyTextureUnit(gl, 5);
tr.gpu.get().textureManager.get().vqCodebookManager.get().getRGBATexture().bind();
GLTexture.specifyTextureUnit(gl, 6);
intermediateTextureIDTexture.bind();
GLTexture.specifyTextureUnit(gl, 7);
depthQueueTexture.bind();
//Execute the draw to a screen quad
gl.glDrawArrays(GL3.GL_TRIANGLES, 0, 6);
}// end render()
public Submitter<PositionedRenderable> getSubmitter() {
return submitter;
}
public void reset() {
numOpaqueBlocks = 0;
numTransparentBlocks = 0;
blendIndex = 0;
opaqueIndex = 0;
synchronized(nearbyWorldObjects)
{nearbyWorldObjects.clear();}
}//end reset()
public List<WorldObject> getVisibleWorldObjectList(){
return nearbyWorldObjects;
}
}// end RenderList
|
package it.unibz.inf.ontop.utils;
import com.google.common.collect.*;
import java.util.*;
import java.util.function.*;
import java.util.stream.Collector;
public class ImmutableCollectors {
private static abstract class ImmutableCollectionCollector<T, A extends ImmutableCollection.Builder, R extends ImmutableCollection<T>>
implements Collector<T, A, R> {
@Override
public BiConsumer<A, T> accumulator() {
return (c, v) -> c.add(v);
}
@Override
public BinaryOperator<A> combiner() {
return (c1, c2) -> (A) c1.addAll(c2.build().iterator());
}
@Override
public Function<A, R> finisher() {
return (bl -> (R) bl.build());
}
@Override
public Set<Characteristics> characteristics() {
return Sets.newHashSet(Collector.Characteristics.CONCURRENT);
}
}
private static class ImmutableSetCollector<T> extends ImmutableCollectionCollector<T, ImmutableSet.Builder<T>,
ImmutableSet<T>> {
@Override
public Supplier<ImmutableSet.Builder<T>> supplier() {
return ImmutableSet::builder;
}
@Override
public Set<Characteristics> characteristics() {
return Sets.newHashSet(Characteristics.CONCURRENT, Characteristics.UNORDERED);
}
}
private static class ImmutableMultisetCollector<T> extends ImmutableCollectionCollector<T, ImmutableMultiset.Builder<T>,
ImmutableMultiset<T>> {
@Override
public Supplier<ImmutableMultiset.Builder<T>> supplier() {
return ImmutableMultiset::builder;
}
@Override
public Set<Characteristics> characteristics() {
return Sets.newHashSet(Characteristics.CONCURRENT, Characteristics.UNORDERED);
}
}
private static class ImmutableListCollector<T> extends ImmutableCollectionCollector<T, ImmutableList.Builder<T>,
ImmutableList<T>> {
@Override
public Supplier<ImmutableList.Builder<T>> supplier() {
return ImmutableList::builder;
}
}
private static final class Partition<T> extends AbstractMap<Boolean, T> implements Map<Boolean, T> {
final T forTrue;
final T forFalse;
Partition(T forTrue, T forFalse) {
this.forTrue = forTrue;
this.forFalse = forFalse;
}
@Override
public Set<Map.Entry<Boolean, T>> entrySet() {
return ImmutableSet.of(
new AbstractMap.SimpleImmutableEntry<>(false, forFalse),
new AbstractMap.SimpleImmutableEntry<>(true, forTrue)
);
}
}
public static <E> ImmutableListCollector<E> toList() {
return new ImmutableListCollector<>();
}
public static <E> ImmutableSetCollector<E> toSet() {
return new ImmutableSetCollector<>();
}
public static <E> ImmutableMultisetCollector<E> toMultiset() {
return new ImmutableMultisetCollector<>();
}
public static <T, K, U> Collector<T, ? ,ImmutableMap<K,U>> toMap(Function<? super T, ? extends K> keyMapper,
Function<? super T, ? extends U> valueMapper) {
return Collector.of(
// Supplier
ImmutableMap::<K,U>builder,
// Accumulator
(builder, e) -> builder.put(keyMapper.apply(e), valueMapper.apply(e)),
// Merger
(builder1, builder2) -> builder1.putAll(builder2.build()),
// Finisher
ImmutableMap.Builder::<K,U>build,
Collector.Characteristics.UNORDERED);
}
public static <T, K, U> Collector<T, ? ,ImmutableMap<K,U>> toMap(Function<? super T, ? extends K> keyMapper,
Function<? super T, ? extends U> valueMapper,
BinaryOperator<U> mergeFunction) {
return Collector.of(
// Supplier
Maps::<K,U>newHashMap,
// Accumulator
(m, e) -> m.merge(keyMapper.apply(e), valueMapper.apply(e), mergeFunction),
// Merger
mapMerger(mergeFunction),
// Finisher
ImmutableMap::copyOf,
Collector.Characteristics.UNORDERED);
}
private static <K, U>
BinaryOperator<Map<K,U>> mapMerger(BinaryOperator<U> mergeFunction) {
return (m1, m2) -> {
for (Map.Entry<K,U> e : m2.entrySet())
m1.merge(e.getKey(), e.getValue(), mergeFunction);
return m1;
};
}
public static <T extends Map.Entry<K,U>, K, U> Collector<T, ? ,ImmutableMap<K,U>> toMap() {
return Collector.of(
// Supplier
ImmutableMap::<K,U>builder,
// Accumulator
ImmutableMap.Builder::<K,U>put,
// Merger
(builder1, builder2) -> builder1.putAll(builder2.build()),
// Finisher
ImmutableMap.Builder::<K,U>build,
Collector.Characteristics.UNORDERED);
}
public static <T, K, U> Collector<T, ? ,ImmutableMultimap<K,U>> toMultimap(Function<? super T, ? extends K> keyMapper,
Function<? super T, ? extends U> valueMapper) {
return Collector.of(
// Supplier
ImmutableMultimap::<K,U>builder,
// Accumulator
(builder, e) -> builder.put(keyMapper.apply(e), valueMapper.apply(e)),
// Merger
(builder1, builder2) -> builder1.putAll(builder2.build()),
// Finisher
ImmutableMultimap.Builder::<K,U>build,
Collector.Characteristics.UNORDERED);
}
public static <T extends Map.Entry<K,U>, K, U> Collector<T, ? ,ImmutableMultimap<K,U>> toMultimap() {
return Collector.of(
// Supplier
ImmutableMultimap::<K,U>builder,
// Accumulator
ImmutableMultimap.Builder::<K,U>put,
// Merger
(builder1, builder2) -> builder1.putAll(builder2.build()),
// Finisher
ImmutableMultimap.Builder::<K,U>build,
Collector.Characteristics.UNORDERED);
}
public static <T> Collector<T, ?, ImmutableMap<Boolean, ImmutableList<T>>> partitioningBy(Predicate<? super T> predicate) {
return partitioningBy(predicate, toList());
}
public static <T, A , D> Collector<T, ?, ImmutableMap<Boolean, D>> partitioningBy (Predicate<? super T> predicate,
Collector<T, A , D> innerCollector){
//Supplier (stores a binary Partition, i.e. a (two entries) map from Boolean to the supplier type A of the
// innerCollector)
Supplier<Partition<A>> supplier = () -> new Partition<>(
innerCollector.supplier().get(),
innerCollector.supplier().get()
);
//Accumulator:
BiConsumer<A, ? super T> downstreamAccumulator = innerCollector.accumulator();
BiConsumer<Partition<A>, T> accumulator = (result, t) ->
downstreamAccumulator.accept(predicate.test(t) ? result.forTrue : result.forFalse, t);
//Merger
BinaryOperator<A> op = innerCollector.combiner();
BinaryOperator<Partition<A>> combiner = (left, right) ->
new Partition<>(op.apply(left.forTrue, right.forTrue),
op.apply(left.forFalse, right.forFalse));
//Finisher
Function<Partition<A>, ImmutableMap<Boolean, D>> finisher = par -> ImmutableMap.of(
true, innerCollector.finisher().apply(par.forTrue),
false, innerCollector.finisher().apply(par.forFalse)
);
return Collector.of(supplier, accumulator, combiner, finisher, Collector.Characteristics.UNORDERED);
}
}
|
package org.lantern;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicReference;
import javax.management.InstanceAlreadyExistsException;
import javax.management.MBeanRegistrationException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.NotCompliantMBeanException;
import javax.management.ObjectName;
import javax.security.auth.login.CredentialException;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.StatusLine;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.mime.MultipartEntity;
import org.apache.http.entity.mime.content.ByteArrayBody;
import org.apache.http.entity.mime.content.ContentBody;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.jboss.netty.handler.codec.http.HttpHeaders;
import org.jivesoftware.smack.Chat;
import org.jivesoftware.smack.MessageListener;
import org.jivesoftware.smack.PacketListener;
import org.jivesoftware.smack.Roster;
import org.jivesoftware.smack.RosterEntry;
import org.jivesoftware.smack.SmackConfiguration;
import org.jivesoftware.smack.XMPPConnection;
import org.jivesoftware.smack.XMPPException;
import org.jivesoftware.smack.filter.IQTypeFilter;
import org.jivesoftware.smack.filter.PacketFilter;
import org.jivesoftware.smack.packet.IQ;
import org.jivesoftware.smack.packet.Message;
import org.jivesoftware.smack.packet.Packet;
import org.jivesoftware.smack.packet.Presence;
import org.jivesoftware.smack.packet.Presence.Type;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.lantern.event.ClosedBetaEvent;
import org.lantern.event.Events;
import org.lantern.event.FriendStatusChangedEvent;
import org.lantern.event.GoogleTalkStateEvent;
import org.lantern.event.ResetEvent;
import org.lantern.event.UpdateEvent;
import org.lantern.event.UpdatePresenceEvent;
import org.lantern.kscope.KscopeAdHandler;
import org.lantern.kscope.LanternKscopeAdvertisement;
import org.lantern.state.Connectivity;
import org.lantern.state.Friend;
import org.lantern.state.Friend.Status;
import org.lantern.state.Friends;
import org.lantern.state.Model;
import org.lantern.state.ModelUtils;
import org.lantern.state.Notification.MessageType;
import org.lantern.state.Settings;
import org.lantern.state.SyncPath;
import org.lantern.udtrelay.UdtRelayServerFiveTupleListener;
import org.lantern.ui.FriendNotificationDialog;
import org.lantern.ui.NotificationManager;
import org.lantern.util.HttpClientFactory;
import org.lantern.util.Threads;
import org.lastbamboo.common.ice.MappedServerSocket;
import org.lastbamboo.common.p2p.P2PConnectionEvent;
import org.lastbamboo.common.p2p.P2PConnectionListener;
import org.lastbamboo.common.p2p.P2PConstants;
import org.lastbamboo.common.portmapping.NatPmpService;
import org.lastbamboo.common.portmapping.UpnpService;
import org.lastbamboo.common.stun.client.StunServerRepository;
import org.littleshoot.commom.xmpp.XmppCredentials;
import org.littleshoot.commom.xmpp.XmppP2PClient;
import org.littleshoot.commom.xmpp.XmppUtils;
import org.littleshoot.p2p.P2PEndpoints;
import org.littleshoot.util.FiveTuple;
import org.littleshoot.util.SessionSocketListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Charsets;
import com.google.common.eventbus.Subscribe;
import com.google.inject.Inject;
import com.google.inject.Singleton;
/**
* Handles logging in to the XMPP server and processing trusted users through
* the roster.
*/
@Singleton
public class DefaultXmppHandler implements XmppHandler {
private static final Logger LOG =
LoggerFactory.getLogger(DefaultXmppHandler.class);
private final AtomicReference<XmppP2PClient<FiveTuple>> client =
new AtomicReference<XmppP2PClient<FiveTuple>>();
static {
SmackConfiguration.setPacketReplyTimeout(30 * 1000);
}
private volatile long lastInfoMessageScheduled = 0L;
private final MessageListener typedListener = new MessageListener() {
@Override
public void processMessage(final Chat ch, final Message msg) {
// Note the Chat will always be null here. We try to avoid using
// actual Chat instances due to Smack's strange and inconsistent
// behavior with message listeners on chats.
final String from = msg.getFrom();
LOG.debug("Got chat participant: {} with message:\n {}", from,
msg.toXML());
if (msg.getType() == org.jivesoftware.smack.packet.Message.Type.error) {
LOG.warn("Received error message!! {}", msg.toXML());
return;
}
if (LanternUtils.isLanternHub(from)) {
processLanternHubMessage(msg);
}
final Integer type =
(Integer) msg.getProperty(P2PConstants.MESSAGE_TYPE);
if (type != null) {
LOG.debug("Processing typed message");
processTypedMessage(msg, type);
}
}
};
private String lastJson = "";
private String hubAddress;
private GoogleTalkState state;
private final NatPmpService natPmpService;
private final UpnpService upnpService;
private ClosedBetaEvent closedBetaEvent;
private final Object closedBetaLock = new Object();
private MappedServerSocket mappedServer;
//private final PeerProxyManager trustedPeerProxyManager;
private final Timer timer;
private final ClientStats stats;
private final LanternKeyStoreManager keyStoreManager;
private final LanternSocketsUtil socketsUtil;
private final LanternXmppUtil xmppUtil;
private final Model model;
private volatile boolean started;
private final ModelUtils modelUtils;
private final org.lantern.Roster roster;
private final ProxyTracker proxyTracker;
private final KscopeAdHandler kscopeAdHandler;
private TimerTask reconnectIfNoPong;
/**
* The XMPP message id that we are waiting for a pong on
*/
private String waitingForPong;
private long pingTimeout = 15 * 1000;
protected XMPPConnection previousConnection;
private final ExecutorService xmppProcessors =
Threads.newCachedThreadPool("Smack-XMPP-Message-Processing-");
private final NotificationManager notificationManager;
private HttpClientFactory httpClientFactory;
/**
* Creates a new XMPP handler.
*/
@Inject
public DefaultXmppHandler(final Model model,
final Timer updateTimer, final ClientStats stats,
final LanternKeyStoreManager keyStoreManager,
final LanternSocketsUtil socketsUtil,
final LanternXmppUtil xmppUtil,
final ModelUtils modelUtils,
final org.lantern.Roster roster,
final ProxyTracker proxyTracker,
final KscopeAdHandler kscopeAdHandler,
final NatPmpService natPmpService,
final UpnpService upnpService,
final NotificationManager notificationManager,
final HttpClientFactory httpClientFactory) {
this.model = model;
this.timer = updateTimer;
this.stats = stats;
this.keyStoreManager = keyStoreManager;
this.socketsUtil = socketsUtil;
this.xmppUtil = xmppUtil;
this.modelUtils = modelUtils;
this.roster = roster;
this.proxyTracker = proxyTracker;
this.kscopeAdHandler = kscopeAdHandler;
this.natPmpService = natPmpService;
this.upnpService = upnpService;
this.notificationManager = notificationManager;
this.httpClientFactory = httpClientFactory;
Events.register(this);
//setupJmx();
}
@Override
public MappedServerSocket getMappedServer() {
return mappedServer;
}
@Override
public void start() {
this.modelUtils.loadClientSecrets();
XmppUtils.setGlobalConfig(this.xmppUtil.xmppConfig());
XmppUtils.setGlobalProxyConfig(this.xmppUtil.xmppProxyConfig());
this.mappedServer = new LanternMappedTcpAnswererServer(natPmpService,
upnpService, new InetSocketAddress(this.model.getSettings().getServerPort()));
this.started = true;
}
@Override
public void stop() {
LOG.debug("Stopping XMPP handler...");
disconnect();
if (upnpService != null) {
upnpService.shutdown();
}
if (natPmpService != null) {
natPmpService.shutdown();
}
LOG.debug("Stopped XMPP handler...");
}
@Subscribe
public void onAuthStatus(final GoogleTalkStateEvent ase) {
this.state = ase.getState();
switch (state) {
case connected:
// We wait until we're logged in before creating our roster.
final XmppP2PClient<FiveTuple> cl = client.get();
if (cl == null) {
LOG.error("Null client for instance: "+hashCode());
return;
}
this.roster.onRoster(this);
break;
case notConnected:
this.roster.reset();
break;
case connecting:
break;
case LOGIN_FAILED:
this.roster.reset();
break;
}
}
@Subscribe
public void onConnectivityChanged(final ConnectivityChangedEvent e) {
if (!e.isConnected()) {
// send a ping message to determine if we need to reconnect; failed
// STUN connectivity is not necessarily a death sentence for the
// XMPP connection.
// If the ping fails, then XmppP2PClient will retry that connection
// in a loop.
ping();
return;
}
LOG.info("Connected to internet: {}", e);
LOG.info("Logged in? {}", this.isLoggedIn());
XmppP2PClient<FiveTuple> client = this.client.get();
if (client == null) {
LOG.debug("No client?");
return; //this is probably at startup
}
final XMPPConnection conn = client.getXmppConnection();
if (e.isIpChanged()) {
//definitely need to reconnect here
reconnect();
} else {
if (conn == null || !conn.isConnected()) {
//definitely need to reconnect here
reconnect();
} else {
ping();
}
}
}
private void ping() {
//if we are already pinging, cancel the existing ping
//and retry
if (reconnectIfNoPong != null) {
reconnectIfNoPong.cancel();
}
XmppP2PClient<FiveTuple> client = this.client.get();
if (client == null) {
//no connection yet, so we'll just return; the connection
//will be established when we can
return;
}
XMPPConnection connection = client.getXmppConnection();
IQ ping = new IQ() {
@Override
public String getChildElementXML() {
return "<ping xmlns='urn:xmpp:ping'/>";
}
};
waitingForPong = ping.getPacketID();
//set up timer to reconnect if we don't hear a pong
reconnectIfNoPong = new Reconnector();
timer.schedule(reconnectIfNoPong, getPingTimeout());
//and send the ping
connection.sendPacket(ping);
}
/**
* How long we wait,
* @return
*/
public long getPingTimeout() {
return pingTimeout;
}
public void setPingTimeout(long pingTimeout) {
this.pingTimeout = pingTimeout;
}
/**
* This will be cancelled if a pong is received,
* indicating that we have already successfully
* reconnected
*/
private class Reconnector extends TimerTask {
@Override
public void run() {
reconnect();
}
}
@Override
public synchronized void connect() throws IOException, CredentialException,
NotInClosedBetaException {
if (!this.started) {
LOG.warn("Can't connect when not started!!");
throw new Error("Can't connect when not started!!");
}
if (!this.modelUtils.isConfigured()) {
if (this.model.getSettings().isUiEnabled()) {
LOG.debug("Not connecting when not configured and UI enabled");
return;
}
}
if (isLoggedIn()) {
LOG.warn("Already logged in!! Not connecting");
return;
}
LOG.debug("Connecting to XMPP servers...");
if (this.modelUtils.isOauthConfigured()) {
connectViaOAuth2();
} else {
throw new Error("Oauth not configured properly?");
}
}
private void connectViaOAuth2() throws IOException,
CredentialException, NotInClosedBetaException {
final XmppCredentials credentials =
this.modelUtils.newGoogleOauthCreds(getResource());
LOG.debug("Logging in with credentials: {}", credentials);
connect(credentials);
}
@Override
public void connect(final String email, final String pass)
throws IOException, CredentialException, NotInClosedBetaException {
//connect(new PasswordCredentials(email, pass, getResource()));
}
private String getResource() {
return LanternConstants.UNCENSORED_ID;
}
/** listen to responses for XMPP pings, and if we get any,
cancel pending reconnects
*/
private class PingListener implements PacketListener {
@Override
public void processPacket(Packet packet) {
IQ iq = (IQ) packet;
if (iq.getPacketID().equals(waitingForPong)) {
LOG.debug("Got pong, cancelling pending reconnect");
reconnectIfNoPong.cancel();
}
}
}
private class DefaultP2PConnectionListener implements P2PConnectionListener {
@Override
public void onConnectivityEvent(final P2PConnectionEvent event) {
LOG.debug("Got connectivity event: {}", event);
Events.asyncEventBus().post(event);
XMPPConnection connection = client.get().getXmppConnection();
if (connection == previousConnection) {
//only add packet listener once
return;
}
previousConnection = connection;
connection.addPacketListener(new PingListener(),
new IQTypeFilter(org.jivesoftware.smack.packet.IQ.Type.RESULT));
}
}
/**
* Connect to Google Talk's XMPP servers using the supplied XmppCredentials
*/
private void connect(final XmppCredentials credentials)
throws IOException, CredentialException, NotInClosedBetaException {
LOG.debug("Connecting to XMPP servers with credentials...");
this.closedBetaEvent = null;
final InetSocketAddress plainTextProxyRelayAddress =
LanternUtils.isa("127.0.0.1",
LanternUtils.PLAINTEXT_LOCALHOST_PROXY_PORT);
if (this.client.get() == null) {
makeClient(plainTextProxyRelayAddress);
} else {
LOG.debug("Using existing client for xmpp handler: "+hashCode());
}
// This is a global, backup listener added to the client. We might
// get notifications of messages twice in some cases, but that's
// better than the alternative of sometimes not being notified
// at all.
LOG.debug("Adding message listener...");
this.client.get().addMessageListener(typedListener);
Events.eventBus().post(
new GoogleTalkStateEvent("", GoogleTalkState.connecting));
login(credentials);
// Note we don't consider ourselves connected in get mode until we
// actually get proxies to work with.
final XMPPConnection connection = this.client.get().getXmppConnection();
getStunServers(connection);
// Make sure all connections between us and the server are stored
// OTR.
modelUtils.syncConnectingStatus("Activating Google Talk pseudo-OTR...");
LanternUtils.activateOtr(connection);
LOG.debug("Connection ID: {}", connection.getConnectionID());
modelUtils.syncConnectingStatus("Waiting for message from Lantern...");
DefaultPacketListener listener = new DefaultPacketListener();
connection.addPacketListener(listener, listener);
gTalkSharedStatus();
updatePresence();
waitForClosedBetaStatus(credentials.getUsername());
modelUtils.syncConnectingStatus("Lantern message received...");
}
private void makeClient(final InetSocketAddress plainTextProxyRelayAddress)
throws IOException {
final SessionSocketListener sessionListener = new SessionSocketListener() {
@Override
public void reconnected() {
// We need to send a new presence message each time we
// reconnect to the XMPP server, as otherwise peers won't
// know we're available and we won't get data from the bot.
updatePresence();
}
@Override
public void onSocket(String arg0, Socket arg1) throws IOException {
}
};
client.set(makeXmppP2PHttpClient(plainTextProxyRelayAddress,
sessionListener));
LOG.debug("Set client for xmpp handler: "+hashCode());
client.get().addConnectionListener(new DefaultP2PConnectionListener());
}
private void getStunServers(final XMPPConnection connection) {
modelUtils.syncConnectingStatus("Gathering servers...");
final Collection<InetSocketAddress> googleStunServers =
XmppUtils.googleStunServers(connection);
StunServerRepository.setStunServers(googleStunServers);
this.model.getSettings().setStunServers(
new HashSet<String>(toStringServers(googleStunServers)));
}
private void login(final XmppCredentials credentials) throws IOException,
CredentialException {
try {
this.client.get().login(credentials);
modelUtils.syncConnectingStatus("Logged in to Google Talk...");
// Preemptively create our key.
this.keyStoreManager.getBase64Cert(getJid());
LOG.debug("Sending connected event");
Events.eventBus().post(
new GoogleTalkStateEvent(getJid(), GoogleTalkState.connected));
} catch (final IOException e) {
// Note that the XMPP library will internally attempt to connect
// to our backup proxy if it can.
handleConnectionFailure();
throw e;
} catch (final IllegalStateException e) {
handleConnectionFailure();
throw e;
} catch (final CredentialException e) {
handleConnectionFailure();
throw e;
}
}
private XmppP2PClient<FiveTuple> makeXmppP2PHttpClient(
final InetSocketAddress plainTextProxyRelayAddress,
final SessionSocketListener sessionListener) throws IOException {
return P2PEndpoints.newXmppP2PHttpClient(
"shoot", natPmpService,
this.upnpService, this.mappedServer,
this.socketsUtil.newTlsSocketFactory(),
this.socketsUtil.newTlsServerSocketFactory(),
plainTextProxyRelayAddress, sessionListener, false,
new UdtRelayServerFiveTupleListener());
}
private void handleConnectionFailure() {
Events.eventBus().post(
new GoogleTalkStateEvent("", GoogleTalkState.LOGIN_FAILED));
}
private void waitForClosedBetaStatus(final String email)
throws NotInClosedBetaException {
if (this.modelUtils.isInClosedBeta(email)) {
LOG.debug("Already in closed beta...");
return;
}
// The following is necessary because the call to login needs to either
// succeed or fail for the UI to work properly, but we don't know if
// a user is able to log in until we get an asynchronous XMPP message
// back from the server.
synchronized (this.closedBetaLock) {
if (this.closedBetaEvent == null) {
try {
this.closedBetaLock.wait(80 * 1000);
} catch (final InterruptedException e) {
LOG.info("Interrupted? Maybe on shutdown?", e);
}
}
}
if (this.closedBetaEvent != null) {
if(!this.closedBetaEvent.isInClosedBeta()) {
LOG.debug("Not in closed beta...");
notInClosedBeta("Not in closed beta");
} else {
LOG.debug("Server notified us we're in the closed beta!");
return;
}
} else {
LOG.warn("No closed beta event -- timed out!!");
notInClosedBeta("No closed beta event!!");
}
}
/** The default packet listener automatically
*
*
*/
private class DefaultPacketListener implements PacketListener, PacketFilter {
@Override
public void processPacket(final Packet pack) {
final Runnable runner = new Runnable() {
@Override
public void run() {
final Presence pres = (Presence) pack;
LOG.debug("Processing packet!! {}", pres.toXML());
final String from = pres.getFrom();
LOG.debug("Responding to presence from '{}' and to '{}'",
from, pack.getTo());
final Type type = pres.getType();
// Allow subscription requests from the lantern bot.
if (LanternUtils.isLanternHub(from)) {
handleHubMessage(pack, pres, from, type);
} else {
handlePeerMessage(pack, pres, from, type);
}
}
};
xmppProcessors.execute(runner);
}
private void handlePeerMessage(final Packet pack,
final Presence pres, final String from, final Type type) {
switch (type) {
case available:
peerAvailable(from);
return;
case error:
LOG.warn("Got error packet!! {}", pack.toXML());
return;
case subscribe:
LOG.debug("Adding subscription request from: {}", from);
// Did we originally invite them and they're
// subscribing back? Auto-allow if so.
if (roster.autoAcceptSubscription(from)) {
subscribed(from);
} else {
LOG.debug("We didn't invite " + from);
}
// XMPP requires says that we MUST reply to this request with
// either 'subscribed' or 'unsubscribed'. But we don't even know
// if this is a Lantern request yet, so we can't reply yet. But
// fortunately, we don't have a timeline to respond. We need to
// mark that we owe this user a reply, so that if we do decide to
// friend the user, we can approve the request.
roster.addIncomingSubscriptionRequest(pres);
break;
case subscribed:
break;
case unavailable:
// TODO: We should remove the peer from our proxy
// lists!!
return;
case unsubscribe:
// The user is unsubscribing from us, so we will no longer be
// able to send them messages. However, we still trust them
// so there is no reason to remove them from the friends list.
// If they later resubscribe to us, we don't need to go
// through the whole friending process again.
return;
case unsubscribed:
break;
}
}
/** Allow the hub to subscribe to messages from us. */
private void handleHubMessage(final Packet pack,
final Presence pres, final String from, final Type type) {
if (type == Type.subscribe) {
final Presence packet =
new Presence(Presence.Type.subscribed);
packet.setTo(from);
packet.setFrom(pack.getTo());
XMPPConnection connection = client.get().getXmppConnection();
connection.sendPacket(packet);
} else {
LOG.debug("Non-subscribe packet from hub? {}",
pres.toXML());
}
}
@Override
public boolean accept(final Packet packet) {
if (packet instanceof Presence) {
return true;
} else {
LOG.debug("Not a presence packet: {}", packet.toXML());
}
return false;
}
};
private void notInClosedBeta(final String msg)
throws NotInClosedBetaException {
LOG.debug("Not in closed beta!");
disconnect();
throw new NotInClosedBetaException(msg);
}
private Set<String> toStringServers(
final Collection<InetSocketAddress> googleStunServers) {
final Set<String> strings = new HashSet<String>();
for (final InetSocketAddress isa : googleStunServers) {
strings.add(isa.getAddress().getHostAddress()+":"+isa.getPort());
}
return strings;
}
@Override
public void disconnect() {
LOG.debug("Disconnecting!!");
lastJson = "";
/*
LanternHub.eventBus().post(
new GoogleTalkStateEvent(GoogleTalkState.LOGGING_OUT));
*/
final XmppP2PClient<FiveTuple> cl = this.client.get();
if (cl != null) {
this.client.get().logout();
//this.client.set(null);
}
Events.eventBus().post(
new GoogleTalkStateEvent("", GoogleTalkState.notConnected));
proxyTracker.clearPeerProxySet();
this.closedBetaEvent = null;
// This is mostly logged for debugging thorny shutdown issues...
LOG.debug("Finished disconnecting XMPP...");
}
private void processLanternHubMessage(final Message msg) {
Connectivity connectivity = model.getConnectivity();
if (!connectivity.getLanternController()) {
connectivity.setLanternController(true);
Events.sync(SyncPath.CONNECTIVITY_LANTERN_CONTROLLER, true);
}
LOG.debug("Lantern controlling agent response");
this.hubAddress = msg.getFrom();
final String to = XmppUtils.jidToUser(msg.getTo());
LOG.debug("Set hub address to: {}", hubAddress);
final String body = msg.getBody();
LOG.debug("Hub message body: {}", body);
final Object obj = JSONValue.parse(body);
final JSONObject json = (JSONObject) obj;
boolean handled = false;
handled |= handleSetDelay(json);
handled |= handleUpdate(json);
handled |= handleProcessedInvites(json);
handled |= handleFailedInvites(json);
handled |= handleFriends(json);
final Boolean inClosedBeta =
(Boolean) json.get(LanternConstants.INVITED);
if (inClosedBeta != null) {
Events.asyncEventBus().post(new ClosedBetaEvent(to, inClosedBeta));
} else {
if (!handled) {
// assume closed beta, because server replied with unhandled
// message
Events.asyncEventBus().post(new ClosedBetaEvent(to, false));
}
}
if ((Boolean) json.get(LanternConstants.NEED_REFRESH_TOKEN)
== Boolean.TRUE) {
sendToken();
}
}
private boolean handleFriends(JSONObject json) {
@SuppressWarnings("unchecked")
final List<Object> friendUpdates = (List<Object>) json.get(LanternConstants.FRIENDS);
Friends friends = model.getFriends();
if (friendUpdates == null) {
return false;
}
LOG.info("Handling friends update from server");
for (Object friendObj : friendUpdates) {
JSONObject friendJson = (JSONObject) friendObj;
String email = (String) friendJson.get("email");
Status status = Status.valueOf((String) friendJson.get("status"));
String name = (String) friendJson.get("name");
Long nextQuery = (Long) friendJson.get("nextQuery");
Long lastUpdated = (Long) friendJson.get("lastUpdated");
Friend friend = new Friend(email, status, name, nextQuery,
lastUpdated);
// we need to check if we have had a more-recent update of this
// friend.
// that could happen if we had made some local changes while waiting
// to hear back from the XMPP server. It's not very likely.
Friend old = friends.get(email);
if (old != null && old.getLastUpdated() > lastUpdated &&
old.getStatus() != Status.pending) {
friends.setNeedsSync(true);
} else {
if (old == null || old.getStatus() != friend.getStatus()) {
Events.asyncEventBus().post(new FriendStatusChangedEvent(friend));
}
friends.add(friend);
}
}
Events.sync(SyncPath.FRIENDS, friends.getFriends());
return true;
}
private boolean handleFailedInvites(final JSONObject json) {
//list of invites that the server has given up on processing
//perhaps because you are out of invites.
@SuppressWarnings("unchecked")
final List<Object> failedInvites = (List<Object>) json.get(LanternConstants.FAILED_INVITES_KEY);
LOG.info("Failed invites: " + failedInvites);
if (failedInvites == null) {
return false;
}
for (Object inviteObj : failedInvites) {
JSONObject invite = (JSONObject) inviteObj;
String invitee = (String) invite.get(LanternConstants.INVITED_EMAIL);
if (!model.getPendingInvites().contains(invitee)) {
// we already notified about this one
continue;
}
String reason = (String) invite
.get(LanternConstants.INVITE_FAILED_REASON);
LOG.info("Failed invite to " + invitee + " because " + reason);
model.removePendingInvite(invitee);
String message = "Invite to " + invitee + " failed: " + reason;
model.addNotification(message, MessageType.error);
Events.sync(SyncPath.NOTIFICATIONS, model.getNotifications());
}
return true;
}
private boolean handleProcessedInvites(final JSONObject json) {
//list of invites that the server has processed
@SuppressWarnings("unchecked")
final List<Object> invited = (List<Object>) json.get(LanternConstants.INVITED_KEY);
if (invited == null) {
return false;
}
for (Object invite : invited) {
model.removePendingInvite((String) invite);
}
return true;
}
@SuppressWarnings("unchecked")
private boolean handleUpdate(final JSONObject json) {
// This is really a JSONObject, but that itself is a map.
final JSONObject update =
(JSONObject) json.get(LanternConstants.UPDATE_KEY);
if (update == null) {
return false;
}
LOG.info("About to propagate update...");
final Map<String, Object> event = new HashMap<String, Object>();
event.putAll(update);
Events.asyncEventBus().post(new UpdateEvent(event));
return false;
}
private boolean handleSetDelay(final JSONObject json) {
final Long delay =
(Long) json.get(LanternConstants.UPDATE_TIME);
LOG.debug("Server sent delay of: "+delay);
if (delay == null) {
return false;
}
final long now = System.currentTimeMillis();
final long elapsed = now - lastInfoMessageScheduled;
if (elapsed > 10000 && delay != 0L) {
lastInfoMessageScheduled = now;
timer.schedule(new TimerTask() {
@Override
public void run() {
updatePresence();
}
}, delay);
LOG.debug("Scheduled next info request in {} milliseconds", delay);
} else {
LOG.debug("Ignoring duplicate info request scheduling- "
+ "scheduled request {} milliseconds ago.", elapsed);
}
return true;
}
@Subscribe
public void onClosedBetaEvent(final ClosedBetaEvent cbe) {
LOG.debug("Got closed beta event!!");
this.closedBetaEvent = cbe;
if (this.closedBetaEvent.isInClosedBeta()) {
this.modelUtils.addToClosedBeta(cbe.getTo());
}
synchronized (this.closedBetaLock) {
// We have to make sure that this event is actually intended for
// the user we're currently logged in as!
final String to = this.closedBetaEvent.getTo();
LOG.debug("Analyzing closed beta event for: {}", to);
if (isLoggedIn()) {
final String user = LanternUtils.toEmail(
this.client.get().getXmppConnection());
if (user.equals(to)) {
LOG.debug("Users match!");
this.closedBetaLock.notifyAll();
} else {
LOG.debug("Users don't match {}, {}", user, to);
}
}
}
}
private void gTalkSharedStatus() {
// This is for Google Talk compatibility. Surprising, all we need to
// do is grab our Google Talk shared status, signifying support for
// their protocol, and then we don't interfere with GChat visibility.
final Packet status = XmppUtils.getSharedStatus(
this.client.get().getXmppConnection());
LOG.info("Status:\n{}", status.toXML());
}
@Subscribe
public void onFriendsStatusChanged(final FriendStatusChangedEvent e) {
updatePresence();
}
/**
* Updates the user's presence. We also include any stats and friends
* updates in this message. Note that periodic presence updates are also
* used on the server side to verify which clients are actually available.
*
* We in part send presence updates instead of typical chat messages to get
* around these messages showing up in the user's gchat window.
*/
private void updatePresence() {
if (!isLoggedIn()) {
LOG.debug("Not updating presence when we're not connected");
return;
}
final XMPPConnection conn = this.client.get().getXmppConnection();
if (conn == null || !conn.isConnected()) {
return;
}
LOG.debug("Sending presence available");
// OK, this is bizarre. For whatever reason, we **have** to send the
// following packet in order to get presence events from our peers.
// DO NOT REMOVE THIS MESSAGE!! See XMPP spec.
final Presence pres = new Presence(Presence.Type.available);
conn.sendPacket(pres);
final Presence forHub = new Presence(Presence.Type.available);
forHub.setTo(LanternClientConstants.LANTERN_JID);
forHub.setProperty("language", SystemUtils.USER_LANGUAGE);
forHub.setProperty("instanceId", model.getInstanceId());
forHub.setProperty("mode", model.getSettings().getMode().toString());
final String str = JsonUtils.jsonify(stats);
LOG.debug("Reporting data: {}", str);
if (!this.lastJson.equals(str)) {
this.lastJson = str;
forHub.setProperty("stats", str);
stats.resetCumulativeStats();
} else {
LOG.info("No new stats to report");
}
final Friends friends = model.getFriends();
if (friends.needsSync()) {
syncFriends(friends);
}
conn.sendPacket(forHub);
}
private void syncFriends(final Friends friends) {
LOG.info("Syncing friends: {}", friends);
final String friendsJson = JsonUtils.jsonify(friends);
final HttpClient httpClient = this.httpClientFactory.newClient();
final String endpoint =
LanternClientConstants.CONTROLLER_URL+"/_ah/api/friends/v1/friends";
LOG.debug("Posting to endpoint: "+endpoint);
final String accessToken = this.model.getSettings().getAccessToken();
final HttpPost post = new HttpPost(endpoint);
post.setHeader(HttpHeaders.Names.AUTHORIZATION, "Bearer "+accessToken);
final byte[] raw = LanternUtils.compress(friendsJson);
final String base64 = Base64.encodeBase64String(raw);
fjkdafjdka
final List<? extends NameValuePair> nvps = Arrays.asList(
new BasicNameValuePair("friendsJson", friendsJson)
);
final HttpEntity requestEntity =
new UrlEncodedFormEntity(nvps, Charsets.UTF_8);
post.setEntity(requestEntity);
try {
LOG.debug("About to execute get!");
final HttpResponse response = httpClient.execute(post);
final StatusLine line = response.getStatusLine();
LOG.debug("Got response status: {}", line);
final HttpEntity entity = response.getEntity();
final String body = IOUtils.toString(entity.getContent());
EntityUtils.consume(entity);
LOG.debug("GOT RESPONSE BODY FOR FRIENDS!!!:\n"+body);
friends.setNeedsSync(false);
final int code = line.getStatusCode();
if (code < 200 || code > 299) {
LOG.error("OAuth error?\n"+line);
return;
}
return;
} catch (final IOException e) {
LOG.warn("Could not connect to Google?", e);
} finally {
post.reset();
}
}
@Subscribe
public void onUpdatePresenceEvent(final UpdatePresenceEvent upe) {
// This was originally added to decouple the roster from this class.
final Presence pres = upe.getPresence();
addOrRemovePeer(pres, pres.getFrom());
}
@Override
public void addOrRemovePeer(final Presence p, final String from) {
LOG.info("Processing peer: {}", from);
final URI uri;
try {
uri = new URI(from);
} catch (final URISyntaxException e) {
LOG.error("Could not create URI from: {}", from);
return;
}
if (p.isAvailable()) {
LOG.info("Processing available peer");
// Only exchange certs with peers based on kscope ads.
// OK, we just request a certificate every time we get a present
// peer. If we get a response, this peer will be added to active
// peer URIs.
//sendAndRequestCert(uri);
}
else {
LOG.info("Removing JID for peer '"+from);
this.proxyTracker.removePeer(uri);
}
}
private void processTypedMessage(final Message msg, final Integer type) {
final String from = msg.getFrom();
LOG.info("Processing typed message from {}", from);
switch (type) {
case (XmppMessageConstants.INFO_REQUEST_TYPE):
LOG.debug("Handling INFO request from {}", from);
if (!model.isRejected(from)) {
processInfoData(msg);
} else {
LOG.debug("Not processing message from rejected friend {}",
from);
}
sendInfoResponse(from);
break;
case (XmppMessageConstants.INFO_RESPONSE_TYPE):
LOG.debug("Handling INFO response from {}", from);
if (!model.isRejected(from)) {
processInfoData(msg);
}
break;
case (LanternConstants.KSCOPE_ADVERTISEMENT):
//only process kscope ads delivered by friends
if (model.isFriend(from)) {
LOG.debug("Handling KSCOPE ADVERTISEMENT");
final String payload =
(String) msg.getProperty(
LanternConstants.KSCOPE_ADVERTISEMENT_KEY);
if (StringUtils.isNotBlank(payload)) {
processKscopePayload(from, payload);
} else {
LOG.error("kscope ad with no payload? "+msg.toXML());
}
} else {
LOG.warn("kscope ad from non-friend");
}
break;
default:
LOG.warn("Did not understand type: "+type);
break;
}
}
private void processKscopePayload(final String from, final String payload) {
LOG.debug("Processing payload: {}", payload);
final ObjectMapper mapper = new ObjectMapper();
try {
final LanternKscopeAdvertisement ad =
mapper.readValue(payload, LanternKscopeAdvertisement.class);
final String jid = ad.getJid();
if (this.kscopeAdHandler.handleAd(jid, ad)) {
sendAndRequestCert(jid);
} else {
LOG.debug("Not requesting cert -- duplicate kscope ad?");
}
} catch (final JsonParseException e) {
LOG.warn("Could not parse JSON", e);
} catch (final JsonMappingException e) {
LOG.warn("Could not map JSON", e);
} catch (final IOException e) {
LOG.warn("IO error parsing JSON", e);
}
}
private void sendInfoResponse(final String from) {
final Message msg = new Message();
// The from becomes the to when we're responding.
msg.setTo(from);
msg.setProperty(P2PConstants.MESSAGE_TYPE,
XmppMessageConstants.INFO_RESPONSE_TYPE);
//msg.setProperty(P2PConstants.MAC, this.model.getNodeId());
msg.setProperty(P2PConstants.CERT,
this.keyStoreManager.getBase64Cert(getJid()));
this.client.get().getXmppConnection().sendPacket(msg);
}
private void processInfoData(final Message msg) {
LOG.debug("Processing INFO data from request or response.");
// This just makes sure it's a valid URI!!
final URI uri;
try {
uri = new URI(msg.getFrom());
} catch (final URISyntaxException e) {
LOG.error("Could not create URI from: {}", msg.getFrom());
return;
}
//final String mac = (String) msg.getProperty(P2PConstants.MAC);
final String base64Cert = (String) msg.getProperty(P2PConstants.CERT);
LOG.debug("Base 64 cert: {}", base64Cert);
if (StringUtils.isNotBlank(base64Cert)) {
LOG.debug("Got certificate:\n"+
new String(Base64.decodeBase64(base64Cert),
LanternConstants.UTF8).replaceAll("\u0007", "[bell]")); // don't ring any bells
// Add the peer if we're able to add the cert.
this.kscopeAdHandler.onBase64Cert(uri, base64Cert);
} else {
LOG.error("No cert for peer?");
}
}
@Override
public String getJid() {
// We may have already disconnected on shutdown, for example, so check
// for null.
if (this.client.get() != null &&
this.client.get().getXmppConnection() != null &&
this.client.get().getXmppConnection().getUser() != null) {
return this.client.get().getXmppConnection().getUser().trim();
}
return "";
}
private void sendAndRequestCert(final String peer) {
LOG.debug("Requesting cert from {}", peer);
final Message msg = new Message();
msg.setProperty(P2PConstants.MESSAGE_TYPE,
XmppMessageConstants.INFO_REQUEST_TYPE);
msg.setTo(peer);
// Set our certificate in the request as well -- we want to make
// extra sure these get through!
//msg.setProperty(P2PConstants.MAC, this.model.getNodeId());
msg.setProperty(P2PConstants.CERT,
this.keyStoreManager.getBase64Cert(getJid()));
if (isLoggedIn()) {
this.client.get().getXmppConnection().sendPacket(msg);
} else {
LOG.debug("No longer logged in? Not sending cert");
}
}
@Override
public XmppP2PClient<FiveTuple> getP2PClient() {
return client.get();
}
@Override
public boolean isLoggedIn() {
if (this.client.get() == null) {
return false;
}
final XMPPConnection conn = client.get().getXmppConnection();
if (conn == null) {
return false;
}
return conn.isAuthenticated();
}
@Override
public boolean sendInvite(final Friend friend, boolean redo,
final boolean addToRoster) {
LOG.info("Sending invite");
String email = friend.getEmail();
if (StringUtils.isBlank(this.hubAddress)) {
LOG.info("Blank hub address when sending invite?");
return false;
}
final Set<String> invited = roster.getInvited();
if ((!redo) && invited.contains(email)) {
LOG.info("Already invited");
return false;
}
final XMPPConnection conn = this.client.get().getXmppConnection();
final Roster rost = conn.getRoster();
final Presence pres = new Presence(Presence.Type.available);
pres.setTo(LanternClientConstants.LANTERN_JID);
// "emails" of the form xxx@public.talk.google.com aren't really
// e-mail addresses at all, so don't send 'em.
// In theory we might be able to use the Google Plus API to get
// actual e-mail addresses -- see:
if (LanternUtils.isNotJid(email)) {
pres.setProperty(LanternConstants.INVITED_EMAIL, email);
} else {
pres.setProperty(LanternConstants.INVITED_EMAIL, "");
}
pres.setProperty(LanternConstants.REFRESH_TOKEN,
this.model.getSettings().getRefreshToken());
final RosterEntry entry = rost.getEntry(email);
if (entry != null) {
final String name = entry.getName();
if (StringUtils.isNotBlank(name)) {
pres.setProperty(LanternConstants.INVITEE_NAME, name);
}
}
invited.add(email);
sendPresence(pres, "Invite-Thread");
addToRoster(email);
return true;
}
private void sendPresence(final Presence pres, final String threadName) {
final XMPPConnection conn = this.client.get().getXmppConnection();
final Runnable runner = new Runnable() {
@Override
public void run() {
conn.sendPacket(pres);
}
};
final Thread t = new Thread(runner, threadName);
t.setDaemon(true);
t.start();
}
/** Try to reconnect to the xmpp server */
private void reconnect() {
//this will trigger XmppP2PClient's internal reconnection logic
if (hasConnection()) {
client.get().getXmppConnection().disconnect();
}
// Otherwise the client should already be trying to connect.
}
private boolean hasConnection() {
return client.get() != null && client.get().getXmppConnection() != null;
}
@Override
public void subscribe(final String jid) {
LOG.debug("Sending subscribe message to: {}", jid);
final Presence packet = new Presence(Presence.Type.subscribe);
packet.setTo(jid);
//final String json = JsonUtils.jsonify(this.model.getProfile());
//packet.setProperty(XmppMessageConstants.PROFILE, json);
final XMPPConnection conn = this.client.get().getXmppConnection();
conn.sendPacket(packet);
}
@Override
public void subscribed(final String jid) {
LOG.debug("Sending subscribed message to: {}", jid);
sendTypedPacket(jid, Presence.Type.subscribed);
}
@Override
public void unsubscribe(final String jid) {
LOG.debug("Sending unsubscribe message to: {}", jid);
sendTypedPacket(jid, Presence.Type.unsubscribe);
}
@Override
public void unsubscribed(final String jid) {
LOG.debug("Sending unsubscribed message to: {}", jid);
sendTypedPacket(jid, Presence.Type.unsubscribed);
}
private void sendTypedPacket(final String jid, final Type type) {
final Presence packet = new Presence(type);
packet.setTo(jid);
XmppP2PClient<FiveTuple> xmppP2PClient = this.client.get();
if (xmppP2PClient == null) {
throw new IllegalStateException("Can't send packets without a client");
}
final XMPPConnection conn = xmppP2PClient.getXmppConnection();
if (conn == null) {
throw new IllegalStateException("Can't send packets while offline");
}
conn.sendPacket(packet);
}
@Override
public void addToRoster(final String email) {
// If the user is not already on our roster, we want to make sure to
// send them an invite. If the e-mail address specified does not
// correspond with a Jabber ID, then we're out of luck. If it does,
// then this will send the roster invite.
final XMPPConnection conn = this.client.get().getXmppConnection();
final Roster rost = conn.getRoster();
final RosterEntry entry = rost.getEntry(email);
if (entry == null) {
LOG.debug("Inviting user to join roster: {}", email);
try {
// Note this also sends a subscription request!!
rost.createEntry(email,
StringUtils.substringBefore(email, "@"), new String[]{});
} catch (final XMPPException e) {
LOG.error("Could not create entry?", e);
}
} else {
LOG.debug("User already on roster...");
}
}
@Override
public void removeFromRoster(final String email) {
final XMPPConnection conn = this.client.get().getXmppConnection();
final Roster rost = conn.getRoster();
final RosterEntry entry = rost.getEntry(email);
if (entry != null) {
LOG.debug("Removing user from roster: {}", email);
try {
rost.removeEntry(entry);
} catch (final XMPPException e) {
LOG.error("Could not create entry?", e);
}
}
}
private void setupJmx() {
final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
try {
final Class<? extends Object> clazz = getClass();
final String pack = clazz.getPackage().getName();
final String oName =
pack+":type="+clazz.getSimpleName()+"-"+clazz.getSimpleName();
LOG.debug("Registering MBean with name: {}", oName);
final ObjectName mxBeanName = new ObjectName(oName);
if(!mbs.isRegistered(mxBeanName)) {
mbs.registerMBean(this, mxBeanName);
}
} catch (final MalformedObjectNameException e) {
LOG.error("Could not set up JMX", e);
} catch (final InstanceAlreadyExistsException e) {
LOG.error("Could not set up JMX", e);
} catch (final MBeanRegistrationException e) {
LOG.error("Could not set up JMX", e);
} catch (final NotCompliantMBeanException e) {
LOG.error("Could not set up JMX", e);
}
}
@Subscribe
public void onReset(final ResetEvent event) {
disconnect();
}
@Override
public void sendPacket(final Packet packet) {
this.client.get().getXmppConnection().sendPacket(packet);
}
private void peerAvailable(final String from) {
if (!LanternXmppUtils.isLanternJid(from)) {
return;
}
String email = XmppUtils.jidToUser(from);
Friends friends = model.getFriends();
Friend friend = modelUtils.makeFriend(email);
if (email.equals(model.getProfile().getEmail())) {
//we'll assume that a user already trusts themselves
if (friend.getStatus() != Status.friend) {
subscribe(email);
subscribed(email);
friend.setStatus(Status.friend);
Events.asyncEventBus().post(new FriendStatusChangedEvent(friend));
friends.setNeedsSync(true);
Events.sync(SyncPath.FRIENDS, friends.getFriends());
}
return;
} else {
//sync this new friend so it appears in the friends modal
Events.sync(SyncPath.FRIENDS, friends.getFriends());
}
Settings settings = model.getSettings();
if (friend.shouldNotifyAgain() && settings.isShowFriendPrompts()
&& model.isSetupComplete()) {
FriendNotificationDialog notification;
notification = new FriendNotificationDialog(notificationManager, friends, friend);
notificationManager.notify(notification);
}
}
private void sendToken() {
LOG.info("Sending refresh token to controller.");
final Presence pres = new Presence(Presence.Type.available);
pres.setTo(LanternClientConstants.LANTERN_JID);
pres.setProperty(LanternConstants.REFRESH_TOKEN,
this.model.getSettings().getRefreshToken());
sendPresence(pres, "SendToken-Thread");
}
}
|
// $Id: QueryProcessor.java,v 1.93 2009/04/08 11:33:22 geir.gronmo Exp $
package net.ontopia.topicmaps.query.impl.basic;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.text.Collator;
import net.ontopia.utils.CompactHashSet;
import net.ontopia.utils.OntopiaRuntimeException;
import net.ontopia.utils.StringifierIF;
import net.ontopia.utils.StringUtils;
import net.ontopia.utils.ObjectUtils;
import net.ontopia.infoset.core.LocatorIF;
import net.ontopia.topicmaps.core.TMObjectIF;
import net.ontopia.topicmaps.core.TopicIF;
import net.ontopia.topicmaps.core.TopicMapIF;
import net.ontopia.topicmaps.core.TopicMapStoreIF;
import net.ontopia.topicmaps.core.TopicNameIF;
import net.ontopia.topicmaps.core.VariantNameIF;
import net.ontopia.topicmaps.core.index.IndexIF;
import net.ontopia.topicmaps.impl.rdbms.RDBMSTopicMapStore;
import net.ontopia.topicmaps.query.core.DeclarationContextIF;
import net.ontopia.topicmaps.query.core.InvalidQueryException;
import net.ontopia.topicmaps.query.core.ParsedQueryIF;
import net.ontopia.topicmaps.query.core.QueryProcessorIF;
import net.ontopia.topicmaps.query.core.QueryResultIF;
import net.ontopia.topicmaps.query.core.ParsedModificationStatementIF;
import net.ontopia.topicmaps.query.impl.utils.Prefetcher;
import net.ontopia.topicmaps.query.impl.utils.QueryAnalyzer;
import net.ontopia.topicmaps.query.impl.utils.QueryOptimizer;
import net.ontopia.topicmaps.query.impl.utils.QueryMatchesUtils;
import net.ontopia.topicmaps.query.utils.TologSpy;
import net.ontopia.topicmaps.query.parser.GlobalParseContext;
import net.ontopia.topicmaps.query.parser.LocalParseContext;
import net.ontopia.topicmaps.query.parser.ParseContextIF;
import net.ontopia.topicmaps.query.parser.TologParser;
import net.ontopia.topicmaps.query.parser.TologOptions;
import net.ontopia.topicmaps.query.parser.TologQuery;
import net.ontopia.topicmaps.query.parser.ModificationStatement;
import net.ontopia.topicmaps.query.parser.Variable;
import net.ontopia.topicmaps.utils.PSI;
import net.ontopia.topicmaps.utils.TopicStringifiers;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* INTERNAL: This query processor implementation can be used to query any topic
* map implementation; it makes no assumptions about the stored form of the
* topic map.
*/
public class QueryProcessor extends AbstractQueryProcessor implements
QueryProcessorIF, IndexIF {
protected TopicMapIF topicmap; // the topic map to query
protected Collator collator;
protected TologOptions options;
protected TologParser parser; // the default parser (may have state)
static Logger logger = LoggerFactory.getLogger(QueryProcessor.class.getName());
public QueryProcessor(TopicMapIF topicmap) {
this(topicmap, topicmap.getStore().getBaseAddress());
}
public QueryProcessor(TopicMapIF topicmap, LocatorIF base) {
this.topicmap = topicmap;
this.collator = getCollator(topicmap);
this.options = new TologOptions(TologOptions.defaults);
options.setOption("optimizer.role-player-type",
"" + !(topicmap instanceof RDBMSTopicMapStore));
options.setOption("optimizer.next-previous",
"" + !(topicmap instanceof RDBMSTopicMapStore));
options.loadProperties(); // loads tolog.properties from classpath
ParseContextIF context = new GlobalParseContext(new PredicateFactory(
topicmap, base), topicmap, base);
context = new LocalParseContext(context);
parser = new TologParser(context, options);
}
public TologOptions getOptions() {
return options;
}
// / query processor implementation
public QueryResultIF execute(String query) throws InvalidQueryException {
return execute(parseQuery(query, null));
}
public QueryResultIF execute(String query, DeclarationContextIF context)
throws InvalidQueryException {
return execute(parseQuery(query, context));
}
public QueryResultIF execute(String query, Map arguments)
throws InvalidQueryException {
return execute(parseQuery(query, null), arguments);
}
public QueryResultIF execute(String query, Map arguments,
DeclarationContextIF context) throws InvalidQueryException {
return execute(parseQuery(query, context), arguments);
}
public ParsedQueryIF parse(String query) throws InvalidQueryException {
return new ParsedQuery(this, parseQuery(query, null));
}
public ParsedQueryIF parse(String query, DeclarationContextIF context)
throws InvalidQueryException {
return new ParsedQuery(this, parseQuery(query, context));
}
protected TologQuery parseQuery(String query, DeclarationContextIF context)
throws InvalidQueryException {
if (context == null)
// there is no context, so we just use the default parser
return optimize(parser.parseQuery(query));
// there is a context, so we have to use a new parser for this
TologParser localparser = new TologParser((ParseContextIF) context, options);
return optimize(localparser.parseQuery(query));
}
public void load(String ruleset) throws InvalidQueryException {
parser.load(ruleset);
}
public void load(Reader ruleset) throws InvalidQueryException, IOException {
parser.load(ruleset);
}
// / query execution code
public QueryResultIF execute(TologQuery query) throws InvalidQueryException {
return execute(query, null);
}
public QueryResultIF execute(TologQuery query, Map arguments)
throws InvalidQueryException {
long start = System.currentTimeMillis();
QueryAnalyzer.verifyParameters(query, arguments);
if (logger.isDebugEnabled())
logger.debug("Parsed query: " + query);
QueryMatches matches;
QueryTracer.startQuery();
try {
matches = createInitialMatches(query, arguments);
List clauses = query.getClauses();
matches = satisfy(clauses, matches);
matches = reduce(query, matches);
matches = count(query, matches);
sort(query, matches);
} finally {
QueryTracer.endQuery();
}
TologSpy.recordExecute(query, start, System.currentTimeMillis());
return new QueryResult(matches, query.getLimit(), query.getOffset());
}
public int update(String query) throws InvalidQueryException {
return update(query, null, null);
}
public int update(String query, DeclarationContextIF context)
throws InvalidQueryException {
return update(query, null, context);
}
public int update(String query, Map<String, ?> params)
throws InvalidQueryException {
return update(query, params, null);
}
public int update(String query, Map<String, ?> params,
DeclarationContextIF context)
throws InvalidQueryException {
return runUpdate(parseUpdateStatement(query, context), params);
}
public ParsedModificationStatementIF parseUpdate(String statement)
throws InvalidQueryException {
return parseUpdate(statement, null);
}
public ParsedModificationStatementIF parseUpdate(String statement,
DeclarationContextIF context)
throws InvalidQueryException {
return new ParsedModificationStatement(parseUpdateStatement(statement,
context));
}
protected ModificationStatement parseUpdateStatement(String statement,
DeclarationContextIF ctx)
throws InvalidQueryException {
if (ctx == null)
return (ModificationStatement) parser.parseStatement(statement);
else {
// there is a context, so we have to use a new parser for this
TologParser localparser = new TologParser((ParseContextIF) ctx, options);
return (ModificationStatement) localparser.parseStatement(statement);
}
}
protected int runUpdate(ModificationStatement statement, Map<String, ?> params)
throws InvalidQueryException {
if (statement.getEmbeddedQuery() != null) {
TologQuery subquery = optimize(statement.getEmbeddedQuery());
QueryTracer.startQuery();
try {
QueryMatches matches = createInitialMatches(subquery, params);
matches = satisfy(subquery.getClauses(), matches);
matches = reduce(subquery, matches);
return statement.doUpdates(matches);
} finally {
QueryTracer.endQuery();
}
} else
return statement.doStaticUpdates(topicmap, params);
}
// / actual query processor implementation
// satisfy lives in AbstractQueryProcessor
// takes the query and sets up the matches table with a single row
// ready for use
public QueryMatches createInitialMatches(TologQuery query, Map arguments) {
Collection items = findClauseItems(query.getClauses(), arguments);
return createInitialMatches(query, items, arguments);
}
public QueryMatches createInitialMatches(TologQuery query, Collection items,
Map arguments) {
QueryContext context = new QueryContext(topicmap, query, arguments, query.getOptions());
return QueryMatchesUtils.createInitialMatches(context, items);
}
/**
* INTERNAL: Projects the query results onto the set of variables specified in
* the 'select' clause. If there is no 'select' clause nothing is done.
*
* @param query The parsed query.
* @param matches The query result.
*/
public QueryMatches reduce(TologQuery query, QueryMatches matches) {
// WARNING: method used by rdbms tolog
if (!query.hasSelectClause() && !matches.hasLiteralColumns())
return matches; // only run if no select clause
QueryTracer.enterSelect(matches);
List projection = new ArrayList();
int[] varix = new int[query.getSelectedVariables().size()];
for (int ix = 0; ix < varix.length; ix++) {
Variable var = (Variable) query.getSelectedVariables().get(ix);
projection.add(var);
varix[ix] = matches.getIndex(var);
}
QueryMatches result = new QueryMatches(projection, matches
.getQueryContext());
Set alreadyAdded = new CompactHashSet();
Object[][] mdata = matches.data;
Object[][] rdata = result.data;
ArrayWrapper wrapper = new ArrayWrapper(); // for instance reuse...
result.last = 0; // we use one row too many all the way through
for (int row = 0; row <= matches.last; row++) {
for (int col = 0; col < varix.length; col++)
rdata[result.last][col] = mdata[row][varix[col]];
wrapper.setArray(rdata[result.last]); // reuse previous wrapper
if (!alreadyAdded.contains(wrapper)) {
alreadyAdded.add(wrapper);
wrapper = new ArrayWrapper(); // can't reuse, so make new wrapper
if (result.last + 1 == result.size) {
result.increaseCapacity();
rdata = result.data;
}
result.last++;
}
}
result.last--; // reclaim the temporary last row
QueryTracer.leaveSelect(result);
return result;
}
/**
* INTERNAL: Replaces count($A) variables by their relevant counts.
*
* @param query The parsed query.
* @param matches The query result.
*/
public QueryMatches count(TologQuery query, QueryMatches matches) {
// WARNING: method used by rdbms tolog
if (query.getCountedVariables().isEmpty())
return matches;
Collection countVars = query.getCountedVariables();
int[] countcols = new int[countVars.size()];
int ix = 0;
for (Iterator it = countVars.iterator(); it.hasNext();)
countcols[ix++] = matches.getIndex(it.next());
// fixes issue 80: return 0 if the query did not match anything, and
// the select clauses contain only counted variables
if (countVars.size() == matches.colcount &&
matches.last == -1 && matches.size == 1) {
Object[] row = matches.data[matches.size - 1];
for (int i = 0; i < matches.colcount; i++)
row[i] = new Integer(0);
matches.last = 0;
return matches;
}
ArrayWrapper wrapper = new ArrayWrapper(); // for instance reuse...
Map counters = new HashMap();
for (ix = 0; ix <= matches.last; ix++) {
Object[] row = matches.data[ix];
for (int i = 0; i < countcols.length; i++)
row[countcols[i]] = null;
wrapper.setArray(row);
if (counters.containsKey(wrapper))
((Counter) counters.get(wrapper)).counter++;
else {
counters.put(wrapper, new Counter());
wrapper = new ArrayWrapper();
}
}
int next = 0; // next row to use
Iterator it = counters.keySet().iterator();
while (it.hasNext()) {
wrapper = (ArrayWrapper) it.next();
Object[] row = wrapper.row;
Counter count = (Counter) counters.get(wrapper);
for (int i = 0; i < countcols.length; i++)
row[countcols[i]] = new Integer(count.counter);
matches.data[next++] = row; // no need to expand...
}
matches.last = next - 1;
return matches;
}
/**
* INTERNAL: Sorts the query result as requested.
*
* @param query The parsed query.
* @param matches The query result.
*/
public void sort(TologQuery query, QueryMatches matches) {
// WARNING: method used by rdbms tolog
if (query.getOrderBy().isEmpty())
return;
if (matches.isEmpty()) // no use sorting an empty table
return;
QueryTracer.enterOrderBy();
java.util.Arrays.sort(matches.data, 0, matches.last + 1, new RowComparator(
query, matches));
QueryTracer.leaveOrderBy();
}
/**
* Optimizes the query before executing it.
*/
private TologQuery optimize(TologQuery query) throws InvalidQueryException {
return QueryOptimizer.getOptimizer(query).optimize(query);
}
class Counter {
public int counter = 1;
}
class RowComparator implements java.util.Comparator {
private int[] orderColumns;
private int[] orderType;
private boolean[] isAscending;
private TopicIF sort;
private final static int ORDER_UNKNOWN = -1;
private final static int ORDER_TOPIC = 0;
private final static int ORDER_STRING = 1;
private final static int ORDER_OBJECT = 2;
private final static int ORDER_INT = 3;
private final static int ORDER_FLOAT = 4;
public RowComparator(TologQuery query, QueryMatches result) {
Collection counted = query.getCountedVariables();
int orderVars = query.getOrderBy().size();
orderColumns = new int[orderVars];
orderType = new int[orderVars];
isAscending = new boolean[orderVars];
for (int ix = 0; ix < orderVars; ix++) {
Variable orderBy = (Variable) query.getOrderBy().get(ix);
orderColumns[ix] = result.getIndex(orderBy);
Object[] types = (Object[]) query.getVariableTypes().get(
orderBy.getName());
if (counted.contains(orderBy))
orderType[ix] = ORDER_INT;
else if (types == null) // we don't know the type of the variable
orderType[ix] = ORDER_UNKNOWN;
else if (types.length > 1) // multiple types (possibly TMObjectIFs)
orderType[ix] = ORDER_OBJECT;
else if (types[0].equals(String.class))
orderType[ix] = ORDER_STRING;
else if (types[0].equals(TopicIF.class)) {
orderType[ix] = ORDER_TOPIC;
Prefetcher.prefetch(topicmap, result, orderColumns[ix],
Prefetcher.TopicIF, Prefetcher.TopicIF_topicmap, false);
Prefetcher.prefetch(topicmap, result, orderColumns[ix],
Prefetcher.TopicIF, Prefetcher_OB_fields, Prefetcher_OB_traverse);
}
else if (types[0].equals(Integer.class))
orderType[ix] = ORDER_INT;
else if (types[0].equals(Float.class))
orderType[ix] = ORDER_FLOAT;
else
orderType[ix] = ORDER_OBJECT; // single type (possibly TMObjectIF)
isAscending[ix] = query.isOrderedAscending(orderBy.getName());
}
sort = result.getQueryContext().getTopicMap()
.getTopicBySubjectIdentifier(PSI.getXTMSort());
}
public int compare(Object o1, Object o2) {
Object[] row1 = (Object[]) o1;
Object[] row2 = (Object[]) o2;
int comp = 0;
int ix;
for (ix = 0; comp == 0 && ix < orderColumns.length; ix++) {
// null checks first
if (row1[orderColumns[ix]] == null) {
if (row2[orderColumns[ix]] == null)
comp = 0;
else
comp = -1;
continue;
} else if (row2[orderColumns[ix]] == null) {
comp = 1;
continue;
}
// no nulls, we can compare
switch (orderType[ix]) {
case ORDER_TOPIC:
if (row1[orderColumns[ix]] == row2[orderColumns[ix]])
comp = 0;
else {
String name1 = getSortName((TopicIF) row1[orderColumns[ix]], sort);
String name2 = getSortName((TopicIF) row2[orderColumns[ix]], sort);
if (name1 == null)
comp = name2 == null ? 0 : -1;
else if (name2 == null)
comp = 1;
else
comp = (collator != null ?
collator.compare(name1, name2) :
name1.compareTo(name2));
}
break;
case ORDER_INT:
comp = ((Integer) row1[orderColumns[ix]]).intValue()
- ((Integer) row2[orderColumns[ix]]).intValue();
break;
case ORDER_FLOAT:
Float f1 = (Float) row1[orderColumns[ix]];
Float f2 = (Float) row2[orderColumns[ix]];
comp = f1.compareTo(f2);
break;
case ORDER_STRING:
comp = (collator != null ?
collator.compare((String)row1[orderColumns[ix]], (String)row2[orderColumns[ix]]) :
((Comparable) row1[orderColumns[ix]]).compareTo(row2[orderColumns[ix]]));
break;
case ORDER_OBJECT:
// if both objects are topic then sort them as topics
if (row1[orderColumns[ix]] instanceof TopicIF &&
row2[orderColumns[ix]] instanceof TopicIF) {
if (row1[orderColumns[ix]] == row2[orderColumns[ix]])
comp = 0;
else {
String name1 = getSortName((TopicIF) row1[orderColumns[ix]], sort);
String name2 = getSortName((TopicIF) row2[orderColumns[ix]], sort);
if (name1 == null)
comp = name2 == null ? 0 : -1;
else if (name2 == null)
comp = 1;
else
comp = (collator != null ?
collator.compare(name1, name2) :
name1.compareTo(name2));
}
} else {
Object x1 = row1[orderColumns[ix]];
Object x2 = row2[orderColumns[ix]];
String id1 = (x1 instanceof TMObjectIF ? ((TMObjectIF) x1).getObjectId() : ObjectUtils.toString(x1));
String id2 = (x2 instanceof TMObjectIF ? ((TMObjectIF) x2).getObjectId() : ObjectUtils.toString(x2));
comp = id1.compareTo(id2);
}
break;
case ORDER_UNKNOWN:
throw new OntopiaRuntimeException(
"INTERNAL ERROR: Could not infer type " + "of column "
+ orderColumns[ix] + ". "
+ "Please report to <support@ontopia.net>.");
default:
// unknown kind of ordering. complain!
throw new OntopiaRuntimeException("INTERNAL ERROR: Unknown ordering"
+ " type " + orderType[ix] + " in position " + ix);
}
}
ix--; // get back to previous value
if (comp != 0 && !isAscending[ix])
comp *= -1;
return comp;
}
}
// We have to use this to get meaningful implementations of
// hashCode() and equals() for arrays. Arrays have these methods,
// but they are, stupidly, the same as for Object.
final class ArrayWrapper {
public Object[] row;
private int hashCode;
public void setArray(Object[] row) {
this.row = row;
hashCode = 0;
for (int ix = 0; ix < row.length; ix++)
if (row[ix] != null)
hashCode = (hashCode + row[ix].hashCode()) & 0x7FFFFFFF;
}
public int hashCode() {
return hashCode;
}
public boolean equals(Object o) {
// this class is only used here, so we are making some simplifying
// assumptions:
// - o is not null
// - o is an ArrayWrapper
// - o contains an Object[] array of the same length as row
Object[] orow = ((ArrayWrapper) o).row;
for (int ix = 0; ix < orow.length; ix++)
if (orow[ix] != null && !orow[ix].equals(row[ix]))
return false;
return true;
}
}
// -- helper method
/**
* Returns the sort name used to sort the given topic.
*/
public static String getSortName(TopicIF topic, TopicIF sort) {
// 0: verify that we have a topic at all
if (topic == null)
return "[No name]";
// 1: pick base name with the fewest topics in scope
// (and avoid typed names)
TopicNameIF bn = null;
int least = 0xEFFF;
Collection bns = topic.getTopicNames();
if (!bns.isEmpty()) {
Iterator it = bns.iterator();
while (it.hasNext()) {
TopicNameIF candidate = (TopicNameIF) it.next();
int score = candidate.getScope().size() * 10;
if (candidate.getType() != null)
score++;
if (score < least) {
bn = candidate;
least = score;
}
}
}
if (bn == null)
return "[No name]";
// 2: if we have a sort name, pick variant with fewest topics in scope
// beyond sort name; penalty for no sort name = 0xFF topics
if (sort == null)
return bn.getValue();
VariantNameIF vn = null;
least = 0xEFFF;
Collection vns = bn.getVariants();
if (!vns.isEmpty()) {
Iterator it = vns.iterator();
while (it.hasNext()) {
VariantNameIF candidate = (VariantNameIF) it.next();
Collection scope = candidate.getScope();
int themes;
if (scope.contains(sort))
themes = scope.size() - 1;
else
themes = 0xFF + scope.size();
if (themes < least) {
vn = candidate;
least = themes;
}
}
}
if (vn == null || vn.getValue() == null)
return bn.getValue();
return vn.getValue();
}
// -- Prefetcher constants
private final static int[] Prefetcher_OB_fields = new int[] {
Prefetcher.TopicIF_names, Prefetcher.TopicNameIF_variants };
private final static boolean[] Prefetcher_OB_traverse = new boolean[] {
false, false };
// -- Collation handling
private Collator getCollator(TopicMapIF tm) {
if (tm.getStore().getImplementation() == TopicMapStoreIF.RDBMS_IMPLEMENTATION) {
// look up locale settings in properties file
RDBMSTopicMapStore store = (RDBMSTopicMapStore) tm.getStore();
String locale = store.getProperty("net.ontopia.topicmaps.query.core.QueryProcessorIF.locale");
Collator c = getCollator(locale);
if (c != null) return c;
}
// fallback to using system property
try {
return getCollator(System.getProperty("net.ontopia.topicmaps.query.core.QueryProcessorIF.locale"));
} catch (SecurityException e) {
return null;
}
}
private Locale getLocale(String _locale) {
if (_locale == null) return null;
String language = null;
String country = null;
String variant = null;
String[] locale = StringUtils.split(_locale, "_");
if (locale.length >= 1)
language = locale[0];
if (locale.length >= 2)
country = locale[1];
if (locale.length >= 3)
variant = locale[2];
if (country == null) country = "";
if (variant == null) variant = "";
return new Locale(language, country, variant);
}
private Collator getCollator(String _locale) {
Locale locale = getLocale(_locale);
if (locale == null) return null;
return Collator.getInstance(locale);
}
// -- ParsedModificationStatement
class ParsedModificationStatement implements ParsedModificationStatementIF {
private ModificationStatement stmt;
private ParsedModificationStatement(ModificationStatement stmt) {
this.stmt = stmt;
}
public int update() throws InvalidQueryException {
return runUpdate(stmt, null);
}
public int update(Map<String, ?> params) throws InvalidQueryException {
return runUpdate(stmt, params);
}
public String toString() {
return stmt.toString();
}
}
}
|
package org.lightmare.ejb;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Proxy;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import javax.ejb.Stateless;
import javax.persistence.EntityManagerFactory;
import org.lightmare.cache.ConnectionData;
import org.lightmare.cache.ConnectionSemaphore;
import org.lightmare.cache.MetaContainer;
import org.lightmare.cache.MetaData;
import org.lightmare.config.Configuration;
import org.lightmare.ejb.handlers.BeanHandler;
import org.lightmare.ejb.handlers.BeanLocalHandler;
import org.lightmare.jpa.JPAManager;
import org.lightmare.libraries.LibraryLoader;
import org.lightmare.remote.rpc.RPCall;
import org.lightmare.utils.ObjectUtils;
import org.lightmare.utils.reflect.MetaUtils;
/**
* Connector class for get ejb beans or call remote procedure in ejb bean (RPC)
* by interface class
*
* @author Levan
*
*/
public class EjbConnector {
private static final int RPC_ARGS_LENGTH = 2;
private void loadLibraries(MetaData metaData) {
ClassLoader loader = metaData.getLoader();
LibraryLoader.loadCurrentLibraries(loader);
}
/**
* Gets {@link MetaData} from {@link MetaContainer} and waits while
* {@link MetaData#isInProgress()}
*
* @param beanName
* @return {@link MetaData}
* @throws IOException
*/
private MetaData getMeta(String beanName) throws IOException {
MetaData metaData = MetaContainer.getSyncMetaData(beanName);
return metaData;
}
/**
* Gets connection for {@link Stateless} bean {@link Class} from cache
*
* @param unitName
* @return {@link EntityManagerFactory}
* @throws IOException
*/
private void getEntityManagerFactory(ConnectionData connection)
throws IOException {
if (connection.getEmf() == null) {
String unitName = connection.getUnitName();
if (ObjectUtils.available(unitName)) {
ConnectionSemaphore semaphore = JPAManager
.getConnection(unitName);
connection.setConnection(semaphore);
}
}
}
/**
* Gets connections for {@link Stateless} bean {@link Class} from cache
*
* @param unitName
* @return {@link EntityManagerFactory}
* @throws IOException
*/
private void getEntityManagerFactories(MetaData metaData)
throws IOException {
Collection<ConnectionData> connections = metaData.getConnections();
if (ObjectUtils.available(connections)) {
for (ConnectionData connection : connections) {
getEntityManagerFactory(connection);
}
}
}
/**
* Instantiates bean by class
*
* @param metaData
* @return Bean instance
* @throws IOException
*/
private <T> T getBeanInstance(MetaData metaData) throws IOException {
@SuppressWarnings("unchecked")
Class<? extends T> beanClass = (Class<? extends T>) metaData
.getBeanClass();
T beanInstance = MetaUtils.instantiate(beanClass);
return beanInstance;
}
/**
* Creates {@link InvocationHandler} implementation for server mode
*
* @param metaData
* @return {@link InvocationHandler}
* @throws IOException
*/
public <T> InvocationHandler getHandler(MetaData metaData)
throws IOException {
T beanInstance = getBeanInstance(metaData);
getEntityManagerFactories(metaData);
BeanHandler handler = new BeanHandler(metaData, beanInstance);
handler.configure();
return handler;
}
/**
* Instantiates bean with {@link Proxy} utility
*
* @param interfaces
* @param handler
* @return <code>T</code> implementation of bean interface
*/
private <T> T instatiateBean(Class<T>[] interfaces,
InvocationHandler handler, ClassLoader loader) {
if (loader == null) {
loader = LibraryLoader.getContextClassLoader();
}
@SuppressWarnings("unchecked")
T beanInstance = (T) Proxy
.newProxyInstance(loader, interfaces, handler);
return beanInstance;
}
/**
* Instantiates bean with {@link Proxy} utility
*
* @param interfaceClass
* @param handler
* @return <code>T</code> implementation of bean interface
*/
private <T> T instatiateBean(Class<T> interfaceClass,
InvocationHandler handler, ClassLoader loader) {
Class<?>[] interfaceArray = { interfaceClass };
if (loader == null) {
loader = LibraryLoader.getContextClassLoader();
}
@SuppressWarnings("unchecked")
T beanInstance = (T) Proxy.newProxyInstance(loader, interfaceArray,
handler);
return beanInstance;
}
private Class<?>[] setInterfaces(MetaData metaData) {
Class<?>[] interfaceClasses = metaData.getInterfaceClasses();
if (ObjectUtils.notAvailable(interfaceClasses)) {
List<Class<?>> interfacesList = new ArrayList<Class<?>>();
Class<?>[] interfaces = metaData.getLocalInterfaces();
if (ObjectUtils.available(interfaces)) {
interfacesList.addAll(Arrays.asList(interfaces));
}
interfaces = metaData.getRemoteInterfaces();
if (ObjectUtils.available(interfaces)) {
interfacesList.addAll(Arrays.asList(interfaces));
}
int size = interfacesList.size();
interfaceClasses = interfacesList.toArray(new Class[size]);
}
return interfaceClasses;
}
/**
*
* @param metaData
* @param rpcArgs
* @return <code>T</code> implementation of bean interface
* @throws IOException
*/
@SuppressWarnings("unchecked")
public <T> T connectToBean(MetaData metaData, Object... rpcArgs)
throws IOException {
loadLibraries(metaData);
InvocationHandler handler = getHandler(metaData);
Class<?>[] interfaces = setInterfaces(metaData);
ClassLoader loader = metaData.getLoader();
loadLibraries(metaData);
T beanInstance = (T) instatiateBean((Class<T>[]) interfaces, handler,
loader);
return beanInstance;
}
/**
* Creates custom implementation of bean {@link Class} by class name and its
* proxy interface {@link Class} instance
*
* @param interfaceClass
* @return <code>T</code> implementation of bean interface
* @throws IOException
*/
public <T> T connectToBean(String beanName, Class<T> interfaceClass,
Object... rpcArgs) throws IOException {
InvocationHandler handler;
ClassLoader loader;
if (Configuration.isServer()) {
MetaData metaData = getMeta(beanName);
setInterfaces(metaData);
handler = getHandler(metaData);
loader = metaData.getLoader();
loadLibraries(metaData);
} else {
if (rpcArgs.length != RPC_ARGS_LENGTH) {
throw new IOException(
"Could not resolve host and port arguments");
}
String host = (String) rpcArgs[0];
int port = (Integer) rpcArgs[1];
handler = new BeanLocalHandler(new RPCall(host, port));
loader = null;
}
T beanInstance = (T) instatiateBean(interfaceClass, handler, loader);
return beanInstance;
}
/**
* Creates custom implementation of bean {@link Class} by class name and its
* proxy interface name
*
* @param beanName
* @param interfaceName
* @param rpcArgs
* @return <code>T</code> implementation of bean interface
* @throws IOException
*/
public <T> T connectToBean(String beanName, String interfaceName,
Object... rpcArgs) throws IOException {
MetaData metaData = getMeta(beanName);
ClassLoader loader = metaData.getLoader();
@SuppressWarnings("unchecked")
Class<T> interfaceClass = (Class<T>) MetaUtils.classForName(
interfaceName, Boolean.FALSE, loader);
T beanInstance = (T) connectToBean(beanName, interfaceClass);
return beanInstance;
}
}
|
package org.myrobotlab.service;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.apache.commons.io.FilenameUtils;
import org.myrobotlab.codec.CodecUtils;
import org.myrobotlab.framework.Platform;
import org.myrobotlab.framework.Service;
import org.myrobotlab.framework.ServiceType;
import org.myrobotlab.framework.Status;
import org.myrobotlab.framework.interfaces.Attachable;
import org.myrobotlab.inmoov.Utils;
import org.myrobotlab.inmoov.Vision;
import org.myrobotlab.io.FileIO;
import org.myrobotlab.logging.Level;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.logging.LoggingFactory;
import org.myrobotlab.opencv.OpenCVData;
import org.myrobotlab.service.abstracts.AbstractSpeechSynthesis.Voice;
import org.myrobotlab.service.data.JoystickData;
import org.myrobotlab.service.data.Locale;
import org.myrobotlab.service.data.Pin;
import org.myrobotlab.service.interfaces.JoystickListener;
import org.myrobotlab.service.interfaces.PinArrayControl;
import org.myrobotlab.service.interfaces.LocaleProvider;
import org.myrobotlab.service.interfaces.ServoControl;
import org.myrobotlab.service.interfaces.Simulator;
import org.myrobotlab.service.interfaces.SpeechRecognizer;
import org.myrobotlab.service.interfaces.SpeechSynthesis;
import org.myrobotlab.service.interfaces.TextListener;
import org.myrobotlab.service.interfaces.TextPublisher;
import org.slf4j.Logger;
public class InMoov2 extends Service implements TextListener, TextPublisher, JoystickListener, LocaleProvider {
public final static Logger log = LoggerFactory.getLogger(InMoov2.class);
public static LinkedHashMap<String, String> lpVars = new LinkedHashMap<String, String>();
// FIXME - why
static boolean RobotCanMoveRandom = true;
private static final long serialVersionUID = 1L;
static String speechRecognizer = "WebkitSpeechRecognition";
/**
* This static method returns all the details of the class without it having to
* be constructed. It has description, categories, dependencies, and peer
* definitions.
*
* @return ServiceType - returns all the data
*
*/
static public ServiceType getMetaData() {
ServiceType meta = new ServiceType(InMoov2.class);
meta.addDescription("InMoov2 Service");
meta.addCategory("robot");
meta.sharePeer("mouthControl.mouth", "mouth", "MarySpeech", "shared Speech");
meta.addPeer("eye", "OpenCV", "eye");
meta.addPeer("servomixer", "ServoMixer", "for making gestures");
meta.addPeer("ultraSonicRight", "UltrasonicSensor", "measure distance");
meta.addPeer("ultraSonicLeft", "UltrasonicSensor", "measure distance");
meta.addPeer("pir", "Pir", "infrared sensor");
// the two legacy controllers .. :(
meta.addPeer("left", "Arduino", "legacy controller");
meta.addPeer("right", "Arduino", "legacy controller");
meta.addPeer("controller3", "Arduino", "legacy controller");
meta.addPeer("controller4", "Arduino", "legacy controller");
meta.addPeer("htmlFilter", "HtmlFilter", "filter speaking html");
meta.addPeer("brain", "ProgramAB", "brain");
meta.addPeer("simulator", "JMonkeyEngine", "simulator");
meta.addPeer("head", "InMoov2Head", "head");
meta.addPeer("torso", "InMoov2Torso", "torso");
// meta.addPeer("eyelids", "InMoovEyelids", "eyelids");
meta.addPeer("leftArm", "InMoov2Arm", "left arm");
meta.addPeer("leftHand", "InMoov2Hand", "left hand");
meta.addPeer("rightArm", "InMoov2Arm", "right arm");
meta.addPeer("rightHand", "InMoov2Hand", "right hand");
meta.addPeer("mouthControl", "MouthControl", "MouthControl");
// meta.addPeer("imageDisplay", "ImageDisplay", "image display service");
meta.addPeer("mouth", "MarySpeech", "InMoov speech service");
meta.addPeer("ear", speechRecognizer, "InMoov webkit speech recognition service");
meta.addPeer("headTracking", "Tracking", "Head tracking system");
meta.sharePeer("headTracking.opencv", "eye", "OpenCV", "shared head OpenCV");
// meta.sharePeer("headTracking.controller", "left", "Arduino", "shared head
// Arduino"); NO !!!!
meta.sharePeer("headTracking.x", "head.rothead", "Servo", "shared servo");
meta.sharePeer("headTracking.y", "head.neck", "Servo", "shared servo");
// Global - undecorated by self name
meta.addRootPeer("python", "Python", "shared Python service");
// latest - not ready until repo is ready
meta.addDependency("fr.inmoov", "inmoov2", null, "zip");
return meta;
}
/**
* This method will load a python file into the python interpreter.
*/
public static boolean loadFile(String file) {
File f = new File(file);
Python p = (Python) Runtime.getService("python");
log.info("Loading Python file {}", f.getAbsolutePath());
if (p == null) {
log.error("Python instance not found");
return false;
}
String script = null;
try {
script = FileIO.toString(f.getAbsolutePath());
} catch (IOException e) {
log.error("IO Error loading file : ", e);
return false;
}
// evaluate the scripts in a blocking way.
boolean result = p.exec(script, true);
if (!result) {
log.error("Error while loading file {}", f.getAbsolutePath());
return false;
} else {
log.debug("Successfully loaded {}", f.getAbsolutePath());
}
return true;
}
public static void main(String[] args) {
try {
LoggingFactory.init(Level.INFO);
Platform.setVirtual(true);
// Runtime.main(new String[] { "--install", "InMoov2" });
// Runtime.main(new String[] { "--interactive", "--id", "inmoov",
// "--install-dependency","fr.inmoov", "inmoov2", "latest", "zip"});
Runtime.main(new String[] {
"--resource-override",
"InMoov2=/lhome/grperry/github/mrl.develop/myrobotlab/src/main/resources/resource/InMoov2/resource/InMoov2",
"WebGui=/lhome/grperry/github/mrl.develop/myrobotlab/src/main/resources/resource/InMoov2/resource/WebGui",
"ProgramAB=/lhome/grperry/github/mrl.develop/myrobotlab/src/main/resources/resource/InMoov2/resource/ProgramAB",
"--interactive", "--id", "inmoov" });
String[] langs = java.util.Locale.getISOLanguages();
java.util.Locale[] locales = java.util.Locale.getAvailableLocales();
log.info("{}", locales.length);
for (java.util.Locale l : locales) {
log.info("
log.info(CodecUtils.toJson(l));
log.info(l.getDisplayLanguage());
log.info(l.getLanguage());
log.info(l.getCountry());
log.info(l.getDisplayCountry());
log.info(CodecUtils.toJson(new Locale(l)));
if (l.getLanguage().equals("en")) {
log.info("here");
}
}
InMoov2 i01 = (InMoov2) Runtime.start("i01", "InMoov2");
WebGui webgui = (WebGui) Runtime.create("webgui", "WebGui");
webgui.autoStartBrowser(false);
webgui.startService();
boolean done = true;
if (done) {
return;
}
i01.startBrain();
i01.startAll("COM3", "COM4");
Runtime.start("python", "Python");
// Runtime.start("log", "Log");
/*
* OpenCV cv = (OpenCV) Runtime.start("cv", "OpenCV"); cv.setCameraIndex(2);
*/
// i01.startSimulator();
/*
* Arduino mega = (Arduino) Runtime.start("mega", "Arduino");
* mega.connect("/dev/ttyACM0");
*/
} catch (Exception e) {
log.error("main threw", e);
}
}
boolean autoStartBrowser = false;
transient ProgramAB brain;
Set<String> configs = null;
String currentConfigurationName = "default";
transient SpeechRecognizer ear;
transient OpenCV eye;
transient Tracking eyesTracking;
// waiting controable threaded gestures we warn user
boolean gestureAlreadyStarted = false;
// FIXME - what the hell is this for ?
Set<String> gestures = new TreeSet<String>();
transient InMoov2Head head;
transient Tracking headTracking;
transient HtmlFilter htmlFilter;
transient UltrasonicSensor ultraSonicRight;
transient UltrasonicSensor ultraSonicLeft;
transient Pir pir;
private PinArrayControl pirArduino;
public Integer pirPin = null;
// transient ImageDisplay imageDisplay;
/**
* simple booleans to determine peer state of existence FIXME - should be an
* auto-peer variable
*/
boolean isBrainActivated = false;
boolean isEarActivated = false;
boolean isEyeActivated = false;
boolean isEyeLidsActivated = false;
boolean isHeadActivated = false;
boolean isLeftArmActivated = false;
boolean isLeftHandActivated = false;
boolean isMouthActivated = false;
boolean isRightArmActivated = false;
boolean isRightHandActivated = false;
boolean isSimulatorActivated = false;
boolean isTorsoActivated = false;
boolean isNeopixelActivated = false;
boolean isPirActivated = false;
boolean isUltraSonicRightActivated = false;
boolean isUltraSonicLeftActivated = false;
boolean isServoMixerActivated = false;
// TODO - refactor into a Simulator interface when more simulators are borgd
transient JMonkeyEngine jme;
String lastGestureExecuted;
Long lastPirActivityTime;
transient InMoov2Arm leftArm;
// transient LanguagePack languagePack = new LanguagePack();
// transient InMoovEyelids eyelids; eyelids are in the head
transient InMoov2Hand leftHand;
Locale locale;
/**
* supported locales
*/
Map<String, Locale> locales = null;
int maxInactivityTimeSeconds = 120;
transient SpeechSynthesis mouth;
// FIXME ugh - new MouthControl service that uses AudioFile output
transient public MouthControl mouthControl;
boolean mute = false;
transient NeoPixel neopixel;
transient ServoMixer servomixer;
transient Python python;
transient InMoov2Arm rightArm;
transient InMoov2Hand rightHand;
/**
* used to remember/serialize configuration the user's desired speech type
*/
String speechService = "MarySpeech";
transient InMoov2Torso torso;
@Deprecated
public Vision vision;
// FIXME - remove all direct references
// transient private HashMap<String, InMoov2Arm> arms = new HashMap<>();
protected List<Voice> voices = null;
protected String voiceSelected;
transient WebGui webgui;
public InMoov2(String n, String id) {
super(n, id);
// by default all servos will auto-disable
Servo.setAutoDisableDefault(true);
locales = Locale.getLocaleMap("en-US", "fr-FR", "es-ES", "de-DE", "nl-NL", "ru-RU", "hi-IN", "it-IT", "fi-FI",
"pt-PT");
locale = Runtime.getInstance().getLocale();
python = (Python) startPeer("python");
load(locale.getTag());
// get events of new services and shutdown
Runtime r = Runtime.getInstance();
subscribe(r.getName(), "shutdown");
listConfigFiles();
// FIXME - Framework should auto-magically auto-start peers AFTER
// construction - unless explicitly told not to
// peers to start on construction
// imageDisplay = (ImageDisplay) startPeer("imageDisplay");
}
@Override /* local strong type - is to be avoided - use name string */
public void addTextListener(TextListener service) {
// CORRECT WAY ! - no direct reference - just use the name in a subscription
addListener("publishText", service.getName());
}
@Override
public void attachTextListener(TextListener service) {
addListener("publishText", service.getName());
}
public void attachTextPublisher(String name) {
subscribe(name, "publishText");
}
@Override
public void attachTextPublisher(TextPublisher service) {
subscribe(service.getName(), "publishText");
}
public void beginCheckingOnInactivity() {
beginCheckingOnInactivity(maxInactivityTimeSeconds);
}
public void beginCheckingOnInactivity(int maxInactivityTimeSeconds) {
this.maxInactivityTimeSeconds = maxInactivityTimeSeconds;
// speakBlocking("power down after %s seconds inactivity is on",
// this.maxInactivityTimeSeconds);
log.info("power down after %s seconds inactivity is on", this.maxInactivityTimeSeconds);
addTask("checkInactivity", 5 * 1000, 0, "checkInactivity");
}
public long checkInactivity() {
// speakBlocking("checking");
long lastActivityTime = getLastActivityTime();
long now = System.currentTimeMillis();
long inactivitySeconds = (now - lastActivityTime) / 1000;
if (inactivitySeconds > maxInactivityTimeSeconds) {
// speakBlocking("%d seconds have passed without activity",
// inactivitySeconds);
powerDown();
} else {
// speakBlocking("%d seconds have passed without activity",
// inactivitySeconds);
info("checking checkInactivity - %d seconds have passed without activity", inactivitySeconds);
}
return lastActivityTime;
}
public void closeAllImages() {
// imageDisplay.closeAll();
log.error("implement webgui.closeAllImages");
}
public void cycleGestures() {
// if not loaded load -
// FIXME - this needs alot of "help" :P
// WHY IS THIS DONE ?
if (gestures.size() == 0) {
loadGestures();
}
for (String gesture : gestures) {
try {
String methodName = gesture.substring(0, gesture.length() - 3);
speakBlocking(methodName);
log.info("executing gesture {}", methodName);
python.eval(methodName + "()");
// wait for finish - or timeout ?
} catch (Exception e) {
error(e);
}
}
}
public void disable() {
if (head != null) {
head.disable();
}
if (rightHand != null) {
rightHand.disable();
}
if (leftHand != null) {
leftHand.disable();
}
if (rightArm != null) {
rightArm.disable();
}
if (leftArm != null) {
leftArm.disable();
}
if (torso != null) {
torso.disable();
}
}
public void displayFullScreen(String src) {
try {
// imageDisplay.displayFullScreen(src);
log.error("implement webgui.displayFullScreen");
} catch (Exception e) {
error("could not display picture %s", src);
}
}
public void enable() {
if (head != null) {
head.enable();
}
if (rightHand != null) {
rightHand.enable();
}
if (leftHand != null) {
leftHand.enable();
}
if (rightArm != null) {
rightArm.enable();
}
if (leftArm != null) {
leftArm.enable();
}
if (torso != null) {
torso.enable();
}
}
/**
* This method will try to launch a python command with error handling
*/
public String execGesture(String gesture) {
lastGestureExecuted = gesture;
if (python == null) {
log.warn("execGesture : No jython engine...");
return null;
}
subscribe(python.getName(), "publishStatus", this.getName(), "onGestureStatus");
startedGesture(lastGestureExecuted);
return python.evalAndWait(gesture);
}
public void finishedGesture() {
finishedGesture("unknown");
}
public void finishedGesture(String nameOfGesture) {
if (gestureAlreadyStarted) {
waitTargetPos();
RobotCanMoveRandom = true;
gestureAlreadyStarted = false;
log.info("gesture : {} finished...", nameOfGesture);
}
}
public void fullSpeed() {
if (head != null) {
head.fullSpeed();
}
if (rightHand != null) {
rightHand.fullSpeed();
}
if (leftHand != null) {
leftHand.fullSpeed();
}
if (rightArm != null) {
rightArm.fullSpeed();
}
if (leftArm != null) {
leftArm.fullSpeed();
}
if (torso != null) {
torso.fullSpeed();
}
}
public String get(String param) {
if (lpVars.containsKey(param.toUpperCase())) {
return lpVars.get(param.toUpperCase());
}
return "not yet translated";
}
public InMoov2Arm getArm(String side) {
if ("left".equals(side)) {
return leftArm;
} else if ("right".equals(side)) {
return rightArm;
} else {
log.error("can not get arm {}", side);
}
return null;
}
public InMoov2Hand getHand(String side) {
if ("left".equals(side)) {
return leftHand;
} else if ("right".equals(side)) {
return rightHand;
} else {
log.error("can not get arm {}", side);
}
return null;
}
public InMoov2Head getHead() {
return head;
}
/**
* get current language
*/
public String getLanguage() {
return locale.getLanguage();
}
/**
* finds most recent activity
*
* @return the timestamp of the last activity time.
*/
public long getLastActivityTime() {
long lastActivityTime = 0;
if (leftHand != null) {
lastActivityTime = Math.max(lastActivityTime, leftHand.getLastActivityTime());
}
if (leftArm != null) {
lastActivityTime = Math.max(lastActivityTime, leftArm.getLastActivityTime());
}
if (rightHand != null) {
lastActivityTime = Math.max(lastActivityTime, rightHand.getLastActivityTime());
}
if (rightArm != null) {
lastActivityTime = Math.max(lastActivityTime, rightArm.getLastActivityTime());
}
if (head != null) {
lastActivityTime = Math.max(lastActivityTime, head.getLastActivityTime());
}
if (torso != null) {
lastActivityTime = Math.max(lastActivityTime, torso.getLastActivityTime());
}
if (lastPirActivityTime != null) {
lastActivityTime = Math.max(lastActivityTime, lastPirActivityTime);
}
if (lastActivityTime == 0) {
error("invalid activity time - anything connected?");
lastActivityTime = System.currentTimeMillis();
}
return lastActivityTime;
}
public InMoov2Arm getLeftArm() {
return leftArm;
}
public InMoov2Hand getLeftHand() {
return leftHand;
}
@Override
public Locale getLocale() {
return locale;
}
@Override
public Map<String, Locale> getLocales() {
return locales;
}
public InMoov2Arm getRightArm() {
return rightArm;
}
public InMoov2Hand getRightHand() {
return rightHand;
}
public Simulator getSimulator() {
return jme;
}
public InMoov2Torso getTorso() {
return torso;
}
public void halfSpeed() {
if (head != null) {
head.setSpeed(25.0, 25.0, 25.0, 25.0, -1.0, 25.0);
}
if (rightHand != null) {
rightHand.setSpeed(30.0, 30.0, 30.0, 30.0, 30.0, 30.0);
}
if (leftHand != null) {
leftHand.setSpeed(30.0, 30.0, 30.0, 30.0, 30.0, 30.0);
}
if (rightArm != null) {
rightArm.setSpeed(25.0, 25.0, 25.0, 25.0);
}
if (leftArm != null) {
leftArm.setSpeed(25.0, 25.0, 25.0, 25.0);
}
if (torso != null) {
torso.setSpeed(20.0, 20.0, 20.0);
}
}
public boolean isCameraOn() {
if (eye != null) {
if (eye.isCapturing()) {
return true;
}
}
return false;
}
public boolean isEyeLidsActivated() {
return isEyeLidsActivated;
}
public boolean isHeadActivated() {
return isHeadActivated;
}
public boolean isLeftArmActivated() {
return isLeftArmActivated;
}
public boolean isLeftHandActivated() {
return isLeftHandActivated;
}
public boolean isMute() {
return mute;
}
public boolean isNeopixelActivated() {
return isNeopixelActivated;
}
public boolean isRightArmActivated() {
return isRightArmActivated;
}
public boolean isRightHandActivated() {
return isRightHandActivated;
}
public boolean isTorsoActivated() {
return isTorsoActivated;
}
public boolean isPirActivated() {
return isPirActivated;
}
public boolean isUltraSonicRightActivated() {
return isUltraSonicRightActivated;
}
public boolean isUltraSonicLeftActivated() {
return isUltraSonicLeftActivated;
}
public boolean isServoMixerActivated() {
return isServoMixerActivated;
}
public Set<String> listConfigFiles() {
configs = new HashSet<>();
// data list
String configDir = getResourceDir() + fs + "config";
File f = new File(configDir);
if (!f.exists()) {
f.mkdirs();
}
String[] files = f.list();
for (String config : files) {
configs.add(config);
}
// data list
configDir = getDataDir() + fs + "config";
f = new File(configDir);
if (!f.exists()) {
f.mkdirs();
}
files = f.list();
for (String config : files) {
configs.add(config);
}
return configs;
}
/*
* iterate over each txt files in the directory
*/
public void load(String locale) {
String extension = "lang";
File dir = Utils.makeDirectory(getResourceDir() + File.separator + "system" + File.separator + "languagePack"
+ File.separator + locale);
if (dir.exists()) {
lpVars.clear();
for (File f : dir.listFiles()) {
if (f.isDirectory()) {
continue;
}
if (FilenameUtils.getExtension(f.getAbsolutePath()).equalsIgnoreCase(extension)) {
log.info("Inmoov languagePack load : {}", f.getName());
try {
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(f), "UTF-8"));
for (String line = br.readLine(); line != null; line = br.readLine()) {
String[] parts = line.split("::");
if (parts.length > 1) {
lpVars.put(parts[0].toUpperCase(), parts[1]);
}
}
} catch (IOException e) {
log.error("LanguagePack : {}", e);
}
} else {
log.warn("{} is not a {} file", f.getAbsolutePath(), extension);
}
}
}
}
// FIXME - what is this for ???
public void loadGestures() {
loadGestures(getResourceDir() + fs + "gestures");
}
/**
* This blocking method will look at all of the .py files in a directory. One by
* one it will load the files into the python interpreter. A gesture python file
* should contain 1 method definition that is the same as the filename.
*
* @param directory - the directory that contains the gesture python files.
*/
public boolean loadGestures(String directory) {
speakBlocking(get("STARTINGGESTURES"));
// iterate over each of the python files in the directory
// and load them into the python interpreter.
String extension = "py";
Integer totalLoaded = 0;
Integer totalError = 0;
File dir = new File(directory);
dir.mkdirs();
if (dir.exists()) {
for (File f : dir.listFiles()) {
if (FilenameUtils.getExtension(f.getAbsolutePath()).equalsIgnoreCase(extension)) {
if (loadFile(f.getAbsolutePath()) == true) {
totalLoaded += 1;
String methodName = f.getName().substring(0, f.getName().length() - 3) + "()";
gestures.add(methodName);
} else {
error("could not load %s", f.getName());
totalError += 1;
}
} else {
log.info("{} is not a {} file", f.getAbsolutePath(), extension);
}
}
}
info("%s Gestures loaded, %s Gestures with error", totalLoaded, totalError);
broadcastState();
if (totalError > 0) {
speakAlert(get("GESTURE_ERROR"));
return false;
}
return true;
}
public void moveArm(String which, double bicep, double rotate, double shoulder, double omoplate) {
InMoov2Arm arm = getArm(which);
if (arm == null) {
info("%s arm not started", which);
return;
}
arm.moveTo(bicep, rotate, shoulder, omoplate);
}
public void moveEyelids(double eyelidleftPos, double eyelidrightPos) {
if (head != null) {
head.moveEyelidsTo(eyelidleftPos, eyelidrightPos);
} else {
log.warn("moveEyelids - I have a null head");
}
}
public void moveEyes(double eyeX, double eyeY) {
if (head != null) {
head.moveTo(null, null, eyeX, eyeY, null, null);
} else {
log.warn("moveEyes - I have a null head");
}
}
public void moveHand(String which, double thumb, double index, double majeure, double ringFinger, double pinky) {
moveHand(which, thumb, index, majeure, ringFinger, pinky, null);
}
public void moveHand(String which, Double thumb, Double index, Double majeure, Double ringFinger, Double pinky,
Double wrist) {
InMoov2Hand hand = getHand(which);
if (hand == null) {
log.warn("{} hand does not exist");
return;
}
hand.moveTo(thumb, index, majeure, ringFinger, pinky, wrist);
}
public void moveHead(double neck, double rothead) {
moveHead(neck, rothead, null);
}
public void moveHead(double neck, double rothead, double eyeX, double eyeY, double jaw) {
moveHead(neck, rothead, eyeX, eyeY, jaw, null);
}
public void moveHead(Double neck, Double rothead, Double rollNeck) {
moveHead(neck, rothead, null, null, null, rollNeck);
}
public void moveHead(Double neck, Double rothead, Double eyeX, Double eyeY, Double jaw, Double rollNeck) {
if (head != null) {
head.moveTo(neck, rothead, eyeX, eyeY, jaw, rollNeck);
} else {
log.error("I have a null head");
}
}
public void moveHeadBlocking(double neck, double rothead) {
moveHeadBlocking(neck, rothead, null);
}
public void moveHeadBlocking(double neck, double rothead, Double rollNeck) {
moveHeadBlocking(neck, rothead, null, null, null, rollNeck);
}
public void moveHeadBlocking(double neck, double rothead, Double eyeX, Double eyeY, Double jaw) {
moveHeadBlocking(neck, rothead, eyeX, eyeY, jaw, null);
}
public void moveHeadBlocking(Double neck, Double rothead, Double eyeX, Double eyeY, Double jaw, Double rollNeck) {
if (head != null) {
head.moveToBlocking(neck, rothead, eyeX, eyeY, jaw, rollNeck);
} else {
log.error("I have a null head");
}
}
public void moveTorso(double topStom, double midStom, double lowStom) {
if (torso != null) {
torso.moveTo(topStom, midStom, lowStom);
} else {
log.error("moveTorso - I have a null torso");
}
}
public void moveTorsoBlocking(double topStom, double midStom, double lowStom) {
if (torso != null) {
torso.moveToBlocking(topStom, midStom, lowStom);
} else {
log.error("moveTorsoBlocking - I have a null torso");
}
}
public void onGestureStatus(Status status) {
if (!status.equals(Status.success()) && !status.equals(Status.warn("Python process killed !"))) {
error("I cannot execute %s, please check logs", lastGestureExecuted);
}
finishedGesture(lastGestureExecuted);
unsubscribe(python.getName(), "publishStatus", this.getName(), "onGestureStatus");
}
@Override
public void onJoystickInput(JoystickData input) throws Exception {
// TODO Auto-generated method stub
}
public OpenCVData onOpenCVData(OpenCVData data) {
return data;
}
@Override
public void onText(String text) {
// FIXME - we should be able to "re"-publish text but text is coming from
// different sources
// some might be coming from the ear - some from the mouth ... - there has
// to be a distinction
log.info("onText - {}", text);
invoke("publishText", text);
}
// TODO FIX/CHECK this, migrate from python land
public void powerDown() {
rest();
purgeTasks();
disable();
if (ear != null) {
ear.lockOutAllGrammarExcept("power up");
}
python.execMethod("power_down");
}
// TODO FIX/CHECK this, migrate from python land
public void powerUp() {
enable();
rest();
if (ear != null) {
ear.clearLock();
}
beginCheckingOnInactivity();
python.execMethod("power_up");
}
/**
* all published text from InMoov2 - including ProgramAB
*/
@Override
public String publishText(String text) {
return text;
}
public void releaseService() {
try {
disable();
releasePeers();
super.releaseService();
} catch (Exception e) {
error(e);
}
}
// FIXME NO DIRECT REFERENCES - publishRest --> (onRest) --> rest
public void rest() {
log.info("InMoov2.rest()");
if (head != null) {
head.rest();
}
if (rightHand != null) {
rightHand.rest();
}
if (leftHand != null) {
leftHand.rest();
}
if (rightArm != null) {
rightArm.rest();
}
if (leftArm != null) {
leftArm.rest();
}
if (torso != null) {
torso.rest();
}
}
@Deprecated
public void setArmVelocity(String which, Double bicep, Double rotate, Double shoulder, Double omoplate) {
InMoov2Arm arm = getArm(which);
if (arm == null) {
warn("%s hand not started", which);
return;
}
arm.setSpeed(bicep, rotate, shoulder, omoplate);
}
public void setAutoDisable(Boolean param) {
if (head != null) {
head.setAutoDisable(param);
}
if (rightArm != null) {
rightArm.setAutoDisable(param);
}
if (leftArm != null) {
leftArm.setAutoDisable(param);
}
if (leftHand != null) {
leftHand.setAutoDisable(param);
}
if (rightHand != null) {
leftHand.setAutoDisable(param);
}
if (torso != null) {
torso.setAutoDisable(param);
}
/*
* if (eyelids != null) { eyelids.setAutoDisable(param); }
*/
}
public void setHandSpeed(String which, Double thumb, Double index, Double majeure, Double ringFinger,
Double pinky) {
setHandSpeed(which, thumb, index, majeure, ringFinger, pinky, null);
}
public void setHandSpeed(String which, Double thumb, Double index, Double majeure, Double ringFinger, Double pinky,
Double wrist) {
InMoov2Hand hand = getHand(which);
if (hand == null) {
warn("%s hand not started", which);
return;
}
hand.setSpeed(thumb, index, majeure, ringFinger, pinky, wrist);
}
@Deprecated
public void setHandVelocity(String which, Double thumb, Double index, Double majeure, Double ringFinger,
Double pinky) {
setHandSpeed(which, thumb, index, majeure, ringFinger, pinky, null);
}
@Deprecated
public void setHandVelocity(String which, Double thumb, Double index, Double majeure, Double ringFinger,
Double pinky, Double wrist) {
setHandSpeed(which, thumb, index, majeure, ringFinger, pinky, wrist);
}
public void setHeadSpeed(Double rothead, Double neck) {
setHeadSpeed(rothead, neck, null, null, null);
}
public void setHeadSpeed(Double rothead, Double neck, Double eyeXSpeed, Double eyeYSpeed, Double jawSpeed) {
setHeadSpeed(rothead, neck, eyeXSpeed, eyeYSpeed, jawSpeed, null);
}
public void setHeadSpeed(Double rothead, Double neck, Double eyeXSpeed, Double eyeYSpeed, Double jawSpeed,
Double rollNeckSpeed) {
if (head == null) {
warn("setHeadSpeed - head not started");
return;
}
head.setSpeed(rothead, neck, eyeXSpeed, eyeYSpeed, jawSpeed, rollNeckSpeed);
}
@Deprecated
public void setHeadVelocity(Double rothead, Double neck) {
setHeadSpeed(rothead, neck, null, null, null, null);
}
@Deprecated
public void setHeadVelocity(Double rothead, Double neck, Double rollNeck) {
setHeadSpeed(rothead, neck, null, null, null, rollNeck);
}
@Deprecated
public void setHeadVelocity(Double rothead, Double neck, Double eyeXSpeed, Double eyeYSpeed, Double jawSpeed) {
setHeadSpeed(rothead, neck, eyeXSpeed, eyeYSpeed, jawSpeed, null);
}
@Deprecated
public void setHeadVelocity(Double rothead, Double neck, Double eyeXSpeed, Double eyeYSpeed, Double jawSpeed,
Double rollNeckSpeed) {
setHeadSpeed(rothead, neck, eyeXSpeed, eyeYSpeed, jawSpeed, rollNeckSpeed);
}
/**
* TODO : use system locale set language for InMoov service used by chatbot +
*
* @param code
* @return
*/
@Deprecated /* use setLocale */
public String setLanguage(String code) {
setLocale(code);
return code;
}
@Override
public void setLocale(String code) {
if (code == null) {
log.warn("setLocale null");
return;
}
// filter of the set of supported locales
if (!locales.containsKey(code)) {
error("InMooov does not support %s only %s", code, locales.keySet());
return;
}
locale = new Locale(code);
speakBlocking("setting language to %s", locale.getDisplayLanguage());
// attempt to set all other language providers to the same language as me
List<String> providers = Runtime.getServiceNamesFromInterface(LocaleProvider.class);
for (String provider : providers) {
if (!provider.equals(getName())) {
log.info("{} setting locale to %s", provider, code);
send(provider, "setLocale", code);
send(provider, "broadcastState");
}
}
load(locale.getTag());
}
public void setMute(boolean mute) {
info("Set mute to %s", mute);
this.mute = mute;
sendToPeer("mouth", "setMute", mute);
broadcastState();
}
public void setNeopixelAnimation(String animation, Integer red, Integer green, Integer blue, Integer speed) {
if (neopixel != null /* && neopixelArduino != null */) {
neopixel.setAnimation(animation, red, green, blue, speed);
} else {
warn("No Neopixel attached");
}
}
public String setSpeechType(String speechType) {
speechService = speechType;
setPeer("mouth", speechType);
return speechType;
}
public void setTorsoSpeed(Double topStom, Double midStom, Double lowStom) {
if (torso != null) {
torso.setSpeed(topStom, midStom, lowStom);
} else {
log.warn("setTorsoSpeed - I have no torso");
}
}
@Deprecated
public void setTorsoVelocity(Double topStom, Double midStom, Double lowStom) {
if (torso != null) {
torso.setVelocity(topStom, midStom, lowStom);
} else {
log.warn("setTorsoVelocity - I have no torso");
}
}
/**
* overridden setVirtual for InMoov sets "all" services to virtual
*/
public boolean setVirtual(boolean virtual) {
super.setVirtual(virtual);
Platform.setVirtual(virtual);
return virtual;
}
public void setVoice(String name) {
if (mouth != null) {
mouth.setVoice(name);
voiceSelected = name;
speakBlocking("setting voice to %s", name);
}
}
public void speak(String toSpeak) {
sendToPeer("mouth", "speak", toSpeak);
}
public void speakAlert(String toSpeak) {
speakBlocking(get("ALERT"));
speakBlocking(toSpeak);
}
public void speakBlocking(String speak) {
speakBlocking(speak, null);
}
// FIXME - publish text regardless if mouth exists ...
public void speakBlocking(String format, Object... args) {
if (format == null) {
return;
}
String toSpeak = format;
if (args != null) {
toSpeak = String.format(format, args);
}
// FIXME - publish onText when listening
invoke("publishText", toSpeak);
if (!mute) {
// sendToPeer("mouth", "speakBlocking", toSpeak);
invokePeer("mouth", "speakBlocking", toSpeak);
}
}
public void startAll() throws Exception {
startAll(null, null);
}
public void startAll(String leftPort, String rightPort) throws Exception {
startMouth();
startBrain();
startHeadTracking();
// startEyesTracking();
// startOpenCV();
startEar();
startServos(leftPort, rightPort);
// startMouthControl(head.jaw, mouth);
speakBlocking("startup sequence completed");
}
public ProgramAB startBrain() {
try {
brain = (ProgramAB) startPeer("brain");
isBrainActivated = true;
speakBlocking(get("CHATBOTACTIVATED"));
// GOOD EXAMPLE ! - no type, uses name - does a set of subscriptions !
// attachTextPublisher(brain.getName());
/*
* not necessary - ear needs to be attached to mouth not brain if (ear != null)
* { ear.attachTextListener(brain); }
*/
brain.attachTextPublisher(ear);
// this.attach(brain); FIXME - attach as a TextPublisher - then re-publish
// FIXME - deal with language
// speakBlocking(get("CHATBOTACTIVATED"));
brain.repetitionCount(10);
brain.setPath(getResourceDir() + fs + "chatbot");
brain.startSession("default", locale.getTag());
// reset some parameters to default...
brain.setPredicate("topic", "default");
brain.setPredicate("questionfirstinit", "");
brain.setPredicate("tmpname", "");
brain.setPredicate("null", "");
// load last user session
if (!brain.getPredicate("name").isEmpty()) {
if (brain.getPredicate("lastUsername").isEmpty()
|| brain.getPredicate("lastUsername").equals("unknown")) {
brain.setPredicate("lastUsername", brain.getPredicate("name"));
}
}
brain.setPredicate("parameterHowDoYouDo", "");
try {
brain.savePredicates();
} catch (IOException e) {
log.error("saving predicates threw", e);
}
// start session based on last recognized person
if (!brain.getPredicate("default", "lastUsername").isEmpty()
&& !brain.getPredicate("default", "lastUsername").equals("unknown")) {
brain.startSession(brain.getPredicate("lastUsername"));
}
htmlFilter = (HtmlFilter) startPeer("htmlFilter");// Runtime.start("htmlFilter",
// "HtmlFilter");
brain.attachTextListener(htmlFilter);
htmlFilter.attachTextListener((TextListener) getPeer("mouth"));
brain.attachTextListener(this);
} catch (Exception e) {
speak("could not load brain");
error(e.getMessage());
speak(e.getMessage());
}
broadcastState();
return brain;
}
public SpeechRecognizer startEar() {
ear = (SpeechRecognizer) startPeer("ear");
isEarActivated = true;
ear.attachSpeechSynthesis((SpeechSynthesis) getPeer("mouth"));
ear.attachTextListener(brain);
speakBlocking(get("STARTINGEAR"));
broadcastState();
return ear;
}
public void startedGesture() {
startedGesture("unknown");
}
public void startedGesture(String nameOfGesture) {
if (gestureAlreadyStarted) {
warn("Warning 1 gesture already running, this can break spacetime and lot of things");
} else {
log.info("Starting gesture : {}", nameOfGesture);
gestureAlreadyStarted = true;
RobotCanMoveRandom = false;
}
}
// FIXME - universal (good) way of handling all exceptions - ie - reporting
// back to the user the problem in a short concise way but have
// expandable detail in appropriate places
public OpenCV startEye() throws Exception {
speakBlocking(get("STARTINGOPENCV"));
eye = (OpenCV) startPeer("eye", "OpenCV");
subscribeTo(eye.getName(), "publishOpenCVData");
isEyeActivated = true;
return eye;
}
public Tracking startEyesTracking() throws Exception {
if (head == null) {
startHead();
}
return startHeadTracking(head.eyeX, head.eyeY);
}
public Tracking startEyesTracking(ServoControl eyeX, ServoControl eyeY) throws Exception {
if (eye == null) {
startEye();
}
speakBlocking(get("TRACKINGSTARTED"));
eyesTracking = (Tracking) this.startPeer("eyesTracking");
eyesTracking.connect(eye, head.eyeX, head.eyeY);
return eyesTracking;
}
public InMoov2Head startHead() throws Exception {
return startHead(null, null, null, null, null, null, null, null);
}
public InMoov2Head startHead(String port) throws Exception {
return startHead(port, null, null, null, null, null, null, null);
}
// legacy inmoov head exposed pins
public InMoov2Head startHead(String port, String type, Integer headYPin, Integer headXPin, Integer eyeXPin,
Integer eyeYPin, Integer jawPin, Integer rollNeckPin) {
// log.warn(InMoov.buildDNA(myKey, serviceClass))
// speakBlocking(get("STARTINGHEAD") + " " + port);
// ??? SHOULD THERE BE REFERENCES AT ALL ??? ... probably not
speakBlocking(get("STARTINGHEAD"));
head = (InMoov2Head) startPeer("head");
isHeadActivated = true;
if (headYPin != null) {
head.setPins(headYPin, headXPin, eyeXPin, eyeYPin, jawPin, rollNeckPin);
}
// lame assumption - port is specified - it must be an Arduino :(
if (port != null) {
try {
speakBlocking(get(port));
Arduino arduino = (Arduino) startPeer("left", "Arduino");
arduino.connect(port);
arduino.attach(head.neck);
arduino.attach(head.rothead);
arduino.attach(head.eyeX);
arduino.attach(head.eyeY);
arduino.attach(head.jaw);
arduino.attach(head.rollNeck);
} catch (Exception e) {
error(e);
}
}
speakBlocking(get("STARTINGMOUTHCONTROL"));
mouthControl = (MouthControl) startPeer("mouthControl");
mouthControl.attach(head.jaw);
mouthControl.attach((Attachable) getPeer("mouth"));
mouthControl.setmouth(10, 50);// <-- FIXME - not the right place for
// config !!!
return head;
}
public void startHeadTracking() throws Exception {
if (eye == null) {
startEye();
}
if (head == null) {
startHead();
}
if (headTracking == null) {
speakBlocking(get("TRACKINGSTARTED"));
headTracking = (Tracking) this.startPeer("headTracking");
headTracking.connect(this.eye, head.rothead, head.neck);
}
}
public Tracking startHeadTracking(ServoControl rothead, ServoControl neck) throws Exception {
if (eye == null) {
startEye();
}
if (headTracking == null) {
speakBlocking(get("TRACKINGSTARTED"));
headTracking = (Tracking) this.startPeer("headTracking");
headTracking.connect(this.eye, rothead, neck);
}
return headTracking;
}
public InMoov2Arm startLeftArm() {
return startLeftArm(null);
}
public InMoov2Arm startLeftArm(String port) {
// log.warn(InMoov.buildDNA(myKey, serviceClass))
// speakBlocking(get("STARTINGHEAD") + " " + port);
// ??? SHOULD THERE BE REFERENCES AT ALL ??? ... probably not
speakBlocking(get("STARTINGLEFTARM"));
leftArm = (InMoov2Arm) startPeer("leftArm");
isLeftArmActivated = true;
if (port != null) {
try {
speakBlocking(port);
Arduino arduino = (Arduino) startPeer("left", "Arduino");
arduino.connect(port);
arduino.attach(leftArm.bicep);
arduino.attach(leftArm.omoplate);
arduino.attach(leftArm.rotate);
arduino.attach(leftArm.shoulder);
} catch (Exception e) {
error(e);
}
}
return leftArm;
}
public InMoov2Hand startLeftHand() {
return startLeftHand(null);
}
public InMoov2Hand startLeftHand(String port) {
speakBlocking(get("STARTINGLEFTHAND"));
leftHand = (InMoov2Hand) startPeer("leftHand");
isLeftHandActivated = true;
if (port != null) {
try {
speakBlocking(port);
Arduino arduino = (Arduino) startPeer("left", "Arduino");
arduino.connect(port);
arduino.attach(leftHand.thumb);
arduino.attach(leftHand.index);
arduino.attach(leftHand.majeure);
arduino.attach(leftHand.ringFinger);
arduino.attach(leftHand.pinky);
arduino.attach(leftHand.wrist);
} catch (Exception e) {
error(e);
}
}
return leftHand;
}
// TODO - general objective "might" be to reduce peers down to something
// that does not need a reference - where type can be switched before creation
// and the onnly thing needed is pubs/subs that are not handled in abstracts
public SpeechSynthesis startMouth() {
mouth = (SpeechSynthesis) startPeer("mouth");
voices = mouth.getVoices();
Voice voice = mouth.getVoice();
if (voice != null) {
voiceSelected = voice.getName();
}
isMouthActivated = true;
if (mute) {
mouth.setMute(true);
}
mouth.attachSpeechRecognizer(ear);
// mouth.attach(htmlFilter); // same as brain not needed
// this.attach((Attachable) mouth);
// if (ear != null) ....
broadcastState();
speakBlocking(get("STARTINGMOUTH"));
if (Platform.isVirtual()) {
speakBlocking("in virtual hardware mode");
}
speakBlocking(get("WHATISTHISLANGUAGE"));
return mouth;
}
@Deprecated /* use start eye */
public void startOpenCV() throws Exception {
startEye();
}
public InMoov2Arm startRightArm() {
return startRightArm(null);
}
public InMoov2Arm startRightArm(String port) {
speakBlocking(get("STARTINGRIGHTARM"));
rightArm = (InMoov2Arm) startPeer("rightArm");
isRightArmActivated = true;
if (port != null) {
try {
speakBlocking(port);
Arduino arduino = (Arduino) startPeer("right", "Arduino");
arduino.connect(port);
arduino.attach(rightArm.bicep);
arduino.attach(rightArm.omoplate);
arduino.attach(rightArm.rotate);
arduino.attach(rightArm.shoulder);
} catch (Exception e) {
error(e);
}
}
return rightArm;
}
public InMoov2Hand startRightHand() {
return startRightHand(null);
}
public InMoov2Hand startRightHand(String port) {
speakBlocking(get("STARTINGRIGHTHAND"));
rightHand = (InMoov2Hand) startPeer("rightHand");
isRightHandActivated = true;
if (port != null) {
try {
speakBlocking(port);
Arduino arduino = (Arduino) startPeer("right", "Arduino");
arduino.connect(port);
arduino.attach(rightHand.thumb);
arduino.attach(rightHand.index);
arduino.attach(rightHand.majeure);
arduino.attach(rightHand.ringFinger);
arduino.attach(rightHand.pinky);
arduino.attach(rightHand.wrist);
} catch (Exception e) {
error(e);
}
}
return rightHand;
}
public Double getUltraSonicRightDistance() {
if (ultraSonicRight != null) {
return ultraSonicRight.range();
} else {
warn("No UltraSonicRight attached");
return 0.0;
}
}
public Double getUltraSonicLeftDistance() {
if (ultraSonicLeft != null) {
return ultraSonicLeft.range();
} else {
warn("No UltraSonicLeft attached");
return 0.0;
}
}
public void publishPin(Pin pin) {
log.info("{} - {}", pin.pin, pin.value);
if (pin.value == 1) {
lastPIRActivityTime = System.currentTimeMillis();
}
// if its PIR & PIR is active & was sleeping - then wake up !
if (pirPin == pin.pin && startSleep != null && pin.value == 1) {
// attach(); // good morning / evening / night... asleep for % hours
powerUp();
}
}
public void startServos(String leftPort, String rightPort) throws Exception {
startHead(leftPort);
startLeftArm(leftPort);
startLeftHand(leftPort);
startRightArm(rightPort);
startRightHand(rightPort);
startTorso(leftPort);
}
// FIXME .. externalize in a json file included in InMoov2
public Simulator startSimulator() throws Exception {
speakBlocking(get("STARTINGVIRTUAL"));
if (jme != null) {
log.info("start called twice - starting simulator is reentrant");
return jme;
}
jme = (JMonkeyEngine) startPeer("simulator");
isSimulatorActivated = true;
// adding InMoov2 asset path to the jonkey simulator
String assetPath = getResourceDir() + fs + JMonkeyEngine.class.getSimpleName();
File check = new File(assetPath);
log.info("loading assets from {}", assetPath);
if (!check.exists()) {
log.warn("%s does not exist");
}
// disable the frustrating servo events ...
// Servo.eventsEnabledDefault(false);
// jme.loadModels(assetPath); not needed - as InMoov2 unzips the model into
// /resource/JMonkeyEngine/assets
jme.setRotation(getName() + ".head.jaw", "x");
jme.setRotation(getName() + ".head.neck", "x");
jme.setRotation(getName() + ".head.rothead", "y");
jme.setRotation(getName() + ".head.rollNeck", "z");
jme.setRotation(getName() + ".head.eyeY", "x");
jme.setRotation(getName() + ".head.eyeX", "y");
jme.setRotation(getName() + ".torso.topStom", "z");
jme.setRotation(getName() + ".torso.midStom", "y");
jme.setRotation(getName() + ".torso.lowStom", "x");
jme.setRotation(getName() + ".rightArm.bicep", "x");
jme.setRotation(getName() + ".leftArm.bicep", "x");
jme.setRotation(getName() + ".rightArm.shoulder", "x");
jme.setRotation(getName() + ".leftArm.shoulder", "x");
jme.setRotation(getName() + ".rightArm.rotate", "y");
jme.setRotation(getName() + ".leftArm.rotate", "y");
jme.setRotation(getName() + ".rightArm.omoplate", "z");
jme.setRotation(getName() + ".leftArm.omoplate", "z");
jme.setRotation(getName() + ".rightHand.wrist", "y");
jme.setRotation(getName() + ".leftHand.wrist", "y");
jme.setMapper(getName() + ".head.jaw", 0, 180, -5, 80);
jme.setMapper(getName() + ".head.neck", 0, 180, 20, -20);
jme.setMapper(getName() + ".head.rollNeck", 0, 180, 30, -30);
jme.setMapper(getName() + ".head.eyeY", 0, 180, 40, 140);
jme.setMapper(getName() + ".head.eyeX", 0, 180, -10, 70); // HERE there need
// to be
// two eyeX (left and
// right?)
jme.setMapper(getName() + ".rightArm.bicep", 0, 180, 0, -150);
jme.setMapper(getName() + ".leftArm.bicep", 0, 180, 0, -150);
jme.setMapper(getName() + ".rightArm.shoulder", 0, 180, 30, -150);
jme.setMapper(getName() + ".leftArm.shoulder", 0, 180, 30, -150);
jme.setMapper(getName() + ".rightArm.rotate", 0, 180, 80, -80);
jme.setMapper(getName() + ".leftArm.rotate", 0, 180, -80, 80);
jme.setMapper(getName() + ".rightArm.omoplate", 0, 180, 10, -180);
jme.setMapper(getName() + ".leftArm.omoplate", 0, 180, -10, 180);
jme.setMapper(getName() + ".rightHand.wrist", 0, 180, -20, 60);
jme.setMapper(getName() + ".leftHand.wrist", 0, 180, 20, -60);
jme.setMapper(getName() + ".torso.topStom", 0, 180, -30, 30);
jme.setMapper(getName() + ".torso.midStom", 0, 180, 50, 130);
jme.setMapper(getName() + ".torso.lowStom", 0, 180, -30, 30);
jme.attach(getName() + ".leftHand.thumb", getName() + ".leftHand.thumb1", getName() + ".leftHand.thumb2",
getName() + ".leftHand.thumb3");
jme.setRotation(getName() + ".leftHand.thumb1", "y");
jme.setRotation(getName() + ".leftHand.thumb2", "x");
jme.setRotation(getName() + ".leftHand.thumb3", "x");
jme.attach(getName() + ".leftHand.index", getName() + ".leftHand.index", getName() + ".leftHand.index2",
getName() + ".leftHand.index3");
jme.setRotation(getName() + ".leftHand.index", "x");
jme.setRotation(getName() + ".leftHand.index2", "x");
jme.setRotation(getName() + ".leftHand.index3", "x");
jme.attach(getName() + ".leftHand.majeure", getName() + ".leftHand.majeure", getName() + ".leftHand.majeure2",
getName() + ".leftHand.majeure3");
jme.setRotation(getName() + ".leftHand.majeure", "x");
jme.setRotation(getName() + ".leftHand.majeure2", "x");
jme.setRotation(getName() + ".leftHand.majeure3", "x");
jme.attach(getName() + ".leftHand.ringFinger", getName() + ".leftHand.ringFinger",
getName() + ".leftHand.ringFinger2", getName() + ".leftHand.ringFinger3");
jme.setRotation(getName() + ".leftHand.ringFinger", "x");
jme.setRotation(getName() + ".leftHand.ringFinger2", "x");
jme.setRotation(getName() + ".leftHand.ringFinger3", "x");
jme.attach(getName() + ".leftHand.pinky", getName() + ".leftHand.pinky", getName() + ".leftHand.pinky2",
getName() + ".leftHand.pinky3");
jme.setRotation(getName() + ".leftHand.pinky", "x");
jme.setRotation(getName() + ".leftHand.pinky2", "x");
jme.setRotation(getName() + ".leftHand.pinky3", "x");
// left hand mapping complexities of the fingers
jme.setMapper(getName() + ".leftHand.index", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.index2", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.index3", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.majeure", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.majeure2", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.majeure3", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.ringFinger", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.ringFinger2", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.ringFinger3", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.pinky", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.pinky2", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.pinky3", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.thumb1", 0, 180, -30, -100);
jme.setMapper(getName() + ".leftHand.thumb2", 0, 180, 80, 20);
jme.setMapper(getName() + ".leftHand.thumb3", 0, 180, 80, 20);
// right hand
jme.attach(getName() + ".rightHand.thumb", getName() + ".rightHand.thumb1", getName() + ".rightHand.thumb2",
getName() + ".rightHand.thumb3");
jme.setRotation(getName() + ".rightHand.thumb1", "y");
jme.setRotation(getName() + ".rightHand.thumb2", "x");
jme.setRotation(getName() + ".rightHand.thumb3", "x");
jme.attach(getName() + ".rightHand.index", getName() + ".rightHand.index", getName() + ".rightHand.index2",
getName() + ".rightHand.index3");
jme.setRotation(getName() + ".rightHand.index", "x");
jme.setRotation(getName() + ".rightHand.index2", "x");
jme.setRotation(getName() + ".rightHand.index3", "x");
jme.attach(getName() + ".rightHand.majeure", getName() + ".rightHand.majeure",
getName() + ".rightHand.majeure2", getName() + ".rightHand.majeure3");
jme.setRotation(getName() + ".rightHand.majeure", "x");
jme.setRotation(getName() + ".rightHand.majeure2", "x");
jme.setRotation(getName() + ".rightHand.majeure3", "x");
jme.attach(getName() + ".rightHand.ringFinger", getName() + ".rightHand.ringFinger",
getName() + ".rightHand.ringFinger2", getName() + ".rightHand.ringFinger3");
jme.setRotation(getName() + ".rightHand.ringFinger", "x");
jme.setRotation(getName() + ".rightHand.ringFinger2", "x");
jme.setRotation(getName() + ".rightHand.ringFinger3", "x");
jme.attach(getName() + ".rightHand.pinky", getName() + ".rightHand.pinky", getName() + ".rightHand.pinky2",
getName() + ".rightHand.pinky3");
jme.setRotation(getName() + ".rightHand.pinky", "x");
jme.setRotation(getName() + ".rightHand.pinky2", "x");
jme.setRotation(getName() + ".rightHand.pinky3", "x");
jme.setMapper(getName() + ".rightHand.index", 0, 180, 65, -10);
jme.setMapper(getName() + ".rightHand.index2", 0, 180, 70, -10);
jme.setMapper(getName() + ".rightHand.index3", 0, 180, 70, -10);
jme.setMapper(getName() + ".rightHand.majeure", 0, 180, 65, -10);
jme.setMapper(getName() + ".rightHand.majeure2", 0, 180, 70, -10);
jme.setMapper(getName() + ".rightHand.majeure3", 0, 180, 70, -10);
jme.setMapper(getName() + ".rightHand.ringFinger", 0, 180, 65, -10);
jme.setMapper(getName() + ".rightHand.ringFinger2", 0, 180, 70, -10);
jme.setMapper(getName() + ".rightHand.ringFinger3", 0, 180, 70, -10);
jme.setMapper(getName() + ".rightHand.pinky", 0, 180, 65, -10);
jme.setMapper(getName() + ".rightHand.pinky2", 0, 180, 70, -10);
jme.setMapper(getName() + ".rightHand.pinky3", 0, 180, 60, -10);
jme.setMapper(getName() + ".rightHand.thumb1", 0, 180, 30, 110);
jme.setMapper(getName() + ".rightHand.thumb2", 0, 180, -100, -150);
jme.setMapper(getName() + ".rightHand.thumb3", 0, 180, -100, -160);
// additional experimental mappings
/*
* simulator.attach(getName() + ".leftHand.pinky", getName() +
* ".leftHand.index2"); simulator.attach(getName() + ".leftHand.thumb",
* getName() + ".leftHand.index3"); simulator.setRotation(getName() +
* ".leftHand.index2", "x"); simulator.setRotation(getName() +
* ".leftHand.index3", "x"); simulator.setMapper(getName() + ".leftHand.index",
* 0, 180, -90, -270); simulator.setMapper(getName() + ".leftHand.index2", 0,
* 180, -90, -270); simulator.setMapper(getName() + ".leftHand.index3", 0, 180,
* -90, -270);
*/
return jme;
}
public InMoov2Torso startTorso() {
return startTorso(null);
}
public InMoov2Torso startTorso(String port) {
if (torso == null) {
speakBlocking(get("STARTINGTORSO"));
isTorsoActivated = true;
torso = (InMoov2Torso) startPeer("torso");
if (port != null) {
try {
speakBlocking(port);
Arduino left = (Arduino) startPeer("left");
left.connect(port);
left.attach(torso.lowStom);
left.attach(torso.midStom);
left.attach(torso.topStom);
} catch (Exception e) {
error(e);
}
}
}
return torso;
}
/**
* called with only port - will default with defaulted pins
* @param port
* @return
*/
public UltrasonicSensor startUltraSonicRight(String port) {
return startUltraSonicRight(port, 64, 63);
}
/**
* called explicitly with pin values
* @param port
* @param trigPin
* @param echoPin
* @return
*/
public UltrasonicSensor startUltraSonicRight(String port, int trigPin, int echoPin) {
if (ultraSonicRight == null) {
speakBlocking(get("STARTINGULTRASONIC"));
isUltraSonicRightActivated = true;
ultraSonicRight = (UltrasonicSensor) startPeer("ultraSonicRight");
if (port != null) {
try {
speakBlocking(port);
Arduino right = (Arduino) startPeer("right");
right.connect(port);
right.attach(ultraSonicRight, trigPin, echoPin);
} catch (Exception e) {
error(e);
}
}
}
return ultraSonicRight;
}
public UltrasonicSensor startUltraSonicLeft(String port) {
return startUltraSonicLeft(port, 64, 63);
}
public UltrasonicSensor startUltraSonicLeft(String port, int trigPin, int echoPin) {
if (ultraSonicLeft == null) {
speakBlocking(get("STARTINGULTRASONIC"));
isUltraSonicLeftActivated = true;
ultraSonicLeft = (UltrasonicSensor) startPeer("ultraSonicLeft");
if (port != null) {
try {
speakBlocking(port);
Arduino left = (Arduino) startPeer("left");
left.connect(port);
left.attach(ultraSonicLeft, trigPin, echoPin);
} catch (Exception e) {
error(e);
}
}
}
return ultraSonicLeft;
}
public Pir startPir(String port) {
return startPir(port, 23);
}
public Pir startPir(String port, int pin) {
if (pir == null) {
speakBlocking(get("STARTINGPIR"));
isPirActivated = true;
pir = (Pir) startPeer("pir");
if (port != null) {
try {
speakBlocking(port);
Arduino right = (Arduino) startPeer("right");
right.connect(port);
right.enablePin(pin, pirPin);
pirArduino = right;
pirPin = pin;
right.addListener("publishPin", this.getName(), "publishPin");
} catch (Exception e) {
error(e);
}
}
}
return pir;
}
public ServoMixer startServoMixer() {
servomixer = (ServoMixer) startPeer("servomixer");
isServoMixerActivated = true;
speakBlocking(get("STARTINGSERVOMIXER"));
broadcastState();
return servomixer;
}
public void stop() {
if (head != null) {
head.stop();
}
if (rightHand != null) {
rightHand.stop();
}
if (leftHand != null) {
leftHand.stop();
}
if (rightArm != null) {
rightArm.stop();
}
if (leftArm != null) {
leftArm.stop();
}
if (torso != null) {
torso.stop();
}
}
public void stopBrain() {
speakBlocking(get("STOPCHATBOT"));
releasePeer("brain");
isBrainActivated = false;
}
public void stopHead() {
speakBlocking(get("STOPHEAD"));
releasePeer("head");
isHeadActivated = false;
}
public void stopEar() {
speakBlocking(get("STOPEAR"));
releasePeer("ear");
isEarActivated = false;
broadcastState();
}
public void stopEye() {
speakBlocking(get("STOPOPENCV"));
isEyeActivated = false;
releasePeer("eye");
}
public void stopGesture() {
Python p = (Python) Runtime.getService("python");
p.stop();
}
public void stopLeftArm() {
speakBlocking(get("STOPLEFTARM"));
releasePeer("leftArm");
isLeftArmActivated = false;
}
public void stopLeftHand() {
speakBlocking(get("STOPLEFTHAND"));
releasePeer("leftHand");
isLeftHandActivated = false;
}
public void stopMouth() {
speakBlocking(get("STOPMOUTH"));
releasePeer("mouth");
// TODO - potentially you could set the field to null in releasePeer
mouth = null;
isMouthActivated = false;
}
public void stopRightArm() {
speakBlocking(get("STOPRIGHTARM"));
releasePeer("rightArm");
isRightArmActivated = false;
}
public void stopRightHand() {
speakBlocking(get("STOPRIGHTHAND"));
releasePeer("rightHand");
isRightHandActivated = false;
}
public void stopTorso() {
speakBlocking(get("STOPTORSO"));
releasePeer("torso");
isTorsoActivated = false;
}
public void stopSimulator() {
speakBlocking(get("STOPVIRTUAL"));
releasePeer("simulator");
jme = null;
isSimulatorActivated = false;
}
public void stopUltraSonicRight() {
speakBlocking(get("STOPULTRASONIC"));
releasePeer("ultraSonicRight");
isUltraSonicRightActivated = false;
}
public void stopUltraSonicLeft() {
speakBlocking(get("STOPULTRASONIC"));
releasePeer("ultraSonicLeft");
isUltraSonicLeftActivated = false;
}
public void stopPir() {
speakBlocking(get("STOPPIR"));
releasePeer("pir");
isPirActivated = false;
if (pirArduino != null && pirPin != null) {
pirArduino.disablePin(pirPin);
pirPin = null;
pirArduino = null;
}
}
public void stopServoMixer() {
speakBlocking(get("STOPSERVOMIXER"));
releasePeer("servomixer");
isServoMixerActivated = false;
}
public void waitTargetPos() {
if (head != null) {
head.waitTargetPos();
}
if (leftArm != null) {
leftArm.waitTargetPos();
}
if (rightArm != null) {
rightArm.waitTargetPos();
}
if (leftHand != null) {
leftHand.waitTargetPos();
}
if (rightHand != null) {
rightHand.waitTargetPos();
}
if (torso != null) {
torso.waitTargetPos();
}
}
}
|
package prefuse.data.util;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import prefuse.data.Tuple;
public class SortedTupleIterator implements Iterator {
private ArrayList m_tuples;
private Comparator m_cmp;
private Iterator m_iter;
/**
* Create a new SortedTupleIterator that sorts tuples in the given
* iterator using the given comparator.
* @param iter the source iterator of tuples
* @param c the comparator to use for sorting
*/
public SortedTupleIterator(Iterator iter, Comparator c) {
this(iter, 128, c);
}
/**
* Create a new SortedTupleIterator that sorts tuples in the given
* iterator using the given comparator.
* @param iter the source iterator of tuples
* @param size the expected number of tuples in the iterator
* @param c the comparator to use for sorting
*/
public SortedTupleIterator(Iterator iter, int size, Comparator c) {
m_tuples = new ArrayList(size);
init(iter, c);
}
/**
* Initialize this iterator for the given source iterator and
* comparator.
* @param iter the source iterator of tuples
* @param size the expected number of tuples in the iterator
*/
public void init(Iterator iter, Comparator c) {
m_tuples.clear();
m_cmp = c;
// populate tuple list
while ( iter.hasNext() ) {
Tuple t = (Tuple)iter.next();
m_tuples.add(t);
}
// sort tuple list
Collections.sort(m_tuples, m_cmp);
// create sorted iterator
m_iter = m_tuples.iterator();
}
/**
* @see java.util.Iterator#hasNext()
*/
public boolean hasNext() {
return m_iter.hasNext();
}
/**
* @see java.util.Iterator#next()
*/
public Object next() {
return m_iter.next();
}
/**
* Throws an UnsupportedOperationException
* @see java.util.Iterator#remove()
* @throws UnsuuportedOperationException
*/
public void remove() {
throw new UnsupportedOperationException();
}
} // end of class SortedTupleIterator
|
package org.myrobotlab.service;
import java.io.File;
import java.io.IOException;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.apache.commons.io.FilenameUtils;
import org.myrobotlab.framework.Platform;
import org.myrobotlab.framework.Service;
import org.myrobotlab.framework.ServiceType;
import org.myrobotlab.framework.Status;
import org.myrobotlab.framework.interfaces.Attachable;
import org.myrobotlab.framework.interfaces.ServiceInterface;
import org.myrobotlab.inmoov.Vision;
import org.myrobotlab.io.FileIO;
import org.myrobotlab.logging.Level;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.logging.LoggingFactory;
import org.myrobotlab.opencv.OpenCVData;
import org.myrobotlab.service.abstracts.AbstractSpeechSynthesis.Voice;
import org.myrobotlab.service.data.JoystickData;
import org.myrobotlab.service.data.Locale;
import org.myrobotlab.service.interfaces.JoystickListener;
import org.myrobotlab.service.interfaces.LocaleProvider;
import org.myrobotlab.service.interfaces.ServoControl;
import org.myrobotlab.service.interfaces.Simulator;
import org.myrobotlab.service.interfaces.SpeechRecognizer;
import org.myrobotlab.service.interfaces.SpeechSynthesis;
import org.myrobotlab.service.interfaces.TextListener;
import org.myrobotlab.service.interfaces.TextPublisher;
import org.slf4j.Logger;
public class InMoov2 extends Service implements TextListener, TextPublisher, JoystickListener, LocaleProvider {
public final static Logger log = LoggerFactory.getLogger(InMoov2.class);
public static LinkedHashMap<String, String> lpVars = new LinkedHashMap<String, String>();
// FIXME - why
@Deprecated
static boolean RobotCanMoveRandom = true;
private static final long serialVersionUID = 1L;
public Arduino neopixelArduino = null;
static String speechRecognizer = "WebkitSpeechRecognition";
/**
* This static method returns all the details of the class without it having
* to be constructed. It has description, categories, dependencies, and peer
* definitions.
*
* @return ServiceType - returns all the data
*
*/
static public ServiceType getMetaData() {
ServiceType meta = new ServiceType(InMoov2.class);
meta.addDescription("InMoov2 Service");
meta.addCategory("robot");
meta.sharePeer("mouthControl.mouth", "mouth", "MarySpeech", "shared Speech");
meta.addPeer("opencv", "OpenCV", "opencv");
meta.addPeer("servomixer", "ServoMixer", "for making gestures");
meta.addPeer("ultraSonicRight", "UltrasonicSensor", "measure distance");
meta.addPeer("ultraSonicLeft", "UltrasonicSensor", "measure distance");
meta.addPeer("pir", "Pir", "infrared sensor");
// the four legacy controllers .. :(
meta.addPeer("left", "Arduino", "legacy controller");
meta.addPeer("right", "Arduino", "legacy controller");
meta.addPeer("controller3", "Arduino", "legacy controller");
meta.addPeer("controller4", "Arduino", "legacy controller");
meta.addPeer("htmlFilter", "HtmlFilter", "filter speaking html");
meta.addPeer("chatBot", "ProgramAB", "chatBot");
meta.addPeer("simulator", "JMonkeyEngine", "simulator");
meta.addPeer("head", "InMoov2Head", "head");
meta.addPeer("torso", "InMoov2Torso", "torso");
// meta.addPeer("eyelids", "InMoovEyelids", "eyelids");
meta.addPeer("leftArm", "InMoov2Arm", "left arm");
meta.addPeer("leftHand", "InMoov2Hand", "left hand");
meta.addPeer("rightArm", "InMoov2Arm", "right arm");
meta.addPeer("rightHand", "InMoov2Hand", "right hand");
meta.addPeer("mouthControl", "MouthControl", "MouthControl");
// meta.addPeer("imageDisplay", "ImageDisplay", "image display service");
meta.addPeer("mouth", "MarySpeech", "InMoov speech service");
meta.addPeer("ear", speechRecognizer, "InMoov webkit speech recognition service");
meta.addPeer("headTracking", "Tracking", "Head tracking system");
meta.sharePeer("headTracking.opencv", "opencv", "OpenCV", "shared head OpenCV");
// meta.sharePeer("headTracking.controller", "left", "Arduino", "shared head
// Arduino"); NO !!!!
meta.sharePeer("headTracking.x", "head.rothead", "Servo", "shared servo");
meta.sharePeer("headTracking.y", "head.neck", "Servo", "shared servo");
// Global - undecorated by self name
meta.addRootPeer("python", "Python", "shared Python service");
// latest - not ready until repo is ready
meta.addDependency("fr.inmoov", "inmoov2", null, "zip");
return meta;
}
/**
* execute a resource script
* @param someScriptName
*/
public void execScript(String someScriptName) {
try {
Python p = (Python)Runtime.start("python", "Python");
String script = getResourceAsString(someScriptName);
p.exec(script, true);
} catch (Exception e) {
error("unable to execute script %s", someScriptName);
}
}
/**
* This method will load a python file into the python interpreter.
*/
public static boolean loadFile(String file) {
File f = new File(file);
Python p = (Python) Runtime.getService("python");
log.info("Loading Python file {}", f.getAbsolutePath());
if (p == null) {
log.error("Python instance not found");
return false;
}
String script = null;
try {
script = FileIO.toString(f.getAbsolutePath());
} catch (IOException e) {
log.error("IO Error loading file : ", e);
return false;
}
// evaluate the scripts in a blocking way.
boolean result = p.exec(script, true);
if (!result) {
log.error("Error while loading file {}", f.getAbsolutePath());
return false;
} else {
log.debug("Successfully loaded {}", f.getAbsolutePath());
}
return true;
}
public static void main(String[] args) {
try {
LoggingFactory.init(Level.INFO);
Platform.setVirtual(true);
Runtime.main(new String[] { "--interactive", "--id", "inmoov" });
InMoov2 i01 = (InMoov2) Runtime.start("i01", "InMoov2");
WebGui webgui = (WebGui) Runtime.create("webgui", "WebGui");
webgui.autoStartBrowser(false);
webgui.startService();
boolean done = true;
if (done) {
return;
}
i01.startChatBot();
i01.startAll("COM3", "COM4");
Runtime.start("python", "Python");
// Runtime.start("log", "Log");
/*
* OpenCV cv = (OpenCV) Runtime.start("cv", "OpenCV");
* cv.setCameraIndex(2);
*/
// i01.startSimulator();
/*
* Arduino mega = (Arduino) Runtime.start("mega", "Arduino");
* mega.connect("/dev/ttyACM0");
*/
} catch (Exception e) {
log.error("main threw", e);
}
}
boolean autoStartBrowser = false;
transient ProgramAB chatBot;
Set<String> configs = null;
String currentConfigurationName = "default";
transient SpeechRecognizer ear;
transient OpenCV opencv;
transient Tracking eyesTracking;
// waiting controable threaded gestures we warn user
boolean gestureAlreadyStarted = false;
// FIXME - what the hell is this for ?
Set<String> gestures = new TreeSet<String>();
transient InMoov2Head head;
transient Tracking headTracking;
transient HtmlFilter htmlFilter;
transient UltrasonicSensor ultraSonicRight;
transient UltrasonicSensor ultraSonicLeft;
transient Pir pir;
// transient ImageDisplay imageDisplay;
/**
* simple booleans to determine peer state of existence FIXME - should be an
* auto-peer variable
*/
boolean isChatBotActivated = false;
boolean isEarActivated = false;
boolean isOpenCvActivated = false;
boolean isEyeLidsActivated = false;
boolean isHeadActivated = false;
boolean isLeftArmActivated = false;
boolean isLeftHandActivated = false;
boolean isMouthActivated = false;
boolean isRightArmActivated = false;
boolean isRightHandActivated = false;
boolean isSimulatorActivated = false;
boolean isTorsoActivated = false;
boolean isNeopixelActivated = false;
boolean isPirActivated = false;
boolean isUltraSonicRightActivated = false;
boolean isUltraSonicLeftActivated = false;
boolean isServoMixerActivated = false;
// TODO - refactor into a Simulator interface when more simulators are borgd
transient JMonkeyEngine jme;
String lastGestureExecuted;
Long lastPirActivityTime;
transient InMoov2Arm leftArm;
// transient LanguagePack languagePack = new LanguagePack();
// transient InMoovEyelids eyelids; eyelids are in the head
transient InMoov2Hand leftHand;
/**
* supported locales
*/
Map<String, Locale> locales = null;
int maxInactivityTimeSeconds = 120;
transient SpeechSynthesis mouth;
// FIXME ugh - new MouthControl service that uses AudioFile output
transient public MouthControl mouthControl;
boolean mute = false;
transient NeoPixel neopixel;
transient ServoMixer servomixer;
transient Python python;
transient InMoov2Arm rightArm;
transient InMoov2Hand rightHand;
/**
* used to remember/serialize configuration the user's desired speech type
*/
String speechService = "MarySpeech";
transient InMoov2Torso torso;
@Deprecated
public Vision vision;
// FIXME - remove all direct references
// transient private HashMap<String, InMoov2Arm> arms = new HashMap<>();
protected List<Voice> voices = null;
protected String voiceSelected;
transient WebGui webgui;
public InMoov2(String n, String id) {
super(n, id);
// by default all servos will auto-disable
Servo.setAutoDisableDefault(true);
locales = Locale.getLocaleMap("en-US", "fr-FR", "es-ES", "de-DE", "nl-NL", "ru-RU", "hi-IN", "it-IT", "fi-FI", "pt-PT");
locale = Runtime.getInstance().getLocale();
python = (Python) startPeer("python");
load(locale.getTag());
// get events of new services and shutdown
Runtime r = Runtime.getInstance();
subscribe(r.getName(), "shutdown");
listConfigFiles();
// FIXME - Framework should auto-magically auto-start peers AFTER
// construction - unless explicitly told not to
// peers to start on construction
// imageDisplay = (ImageDisplay) startPeer("imageDisplay");
}
@Override /* local strong type - is to be avoided - use name string */
public void addTextListener(TextListener service) {
// CORRECT WAY ! - no direct reference - just use the name in a subscription
addListener("publishText", service.getName());
}
@Override
public void attachTextListener(TextListener service) {
addListener("publishText", service.getName());
}
public void attachTextPublisher(String name) {
subscribe(name, "publishText");
}
@Override
public void attachTextPublisher(TextPublisher service) {
subscribe(service.getName(), "publishText");
}
public void beginCheckingOnInactivity() {
beginCheckingOnInactivity(maxInactivityTimeSeconds);
}
public void beginCheckingOnInactivity(int maxInactivityTimeSeconds) {
this.maxInactivityTimeSeconds = maxInactivityTimeSeconds;
// speakBlocking("power down after %s seconds inactivity is on",
// this.maxInactivityTimeSeconds);
log.info("power down after %s seconds inactivity is on", this.maxInactivityTimeSeconds);
addTask("checkInactivity", 5 * 1000, 0, "checkInactivity");
}
public long checkInactivity() {
// speakBlocking("checking");
long lastActivityTime = getLastActivityTime();
long now = System.currentTimeMillis();
long inactivitySeconds = (now - lastActivityTime) / 1000;
if (inactivitySeconds > maxInactivityTimeSeconds) {
// speakBlocking("%d seconds have passed without activity",
// inactivitySeconds);
powerDown();
} else {
// speakBlocking("%d seconds have passed without activity",
// inactivitySeconds);
info("checking checkInactivity - %d seconds have passed without activity", inactivitySeconds);
}
return lastActivityTime;
}
public void closeAllImages() {
// imageDisplay.closeAll();
log.error("implement webgui.closeAllImages");
}
public void cycleGestures() {
// if not loaded load -
// FIXME - this needs alot of "help" :P
// WHY IS THIS DONE ?
if (gestures.size() == 0) {
loadGestures();
}
for (String gesture : gestures) {
try {
String methodName = gesture.substring(0, gesture.length() - 3);
speakBlocking(methodName);
log.info("executing gesture {}", methodName);
python.eval(methodName + "()");
// wait for finish - or timeout ?
} catch (Exception e) {
error(e);
}
}
}
public void disable() {
if (head != null) {
head.disable();
}
if (rightHand != null) {
rightHand.disable();
}
if (leftHand != null) {
leftHand.disable();
}
if (rightArm != null) {
rightArm.disable();
}
if (leftArm != null) {
leftArm.disable();
}
if (torso != null) {
torso.disable();
}
}
public void displayFullScreen(String src) {
try {
// imageDisplay.displayFullScreen(src);
log.error("implement webgui.displayFullScreen");
} catch (Exception e) {
error("could not display picture %s", src);
}
}
public void enable() {
if (head != null) {
head.enable();
}
if (rightHand != null) {
rightHand.enable();
}
if (leftHand != null) {
leftHand.enable();
}
if (rightArm != null) {
rightArm.enable();
}
if (leftArm != null) {
leftArm.enable();
}
if (torso != null) {
torso.enable();
}
}
/**
* This method will try to launch a python command with error handling
*/
public String execGesture(String gesture) {
lastGestureExecuted = gesture;
if (python == null) {
log.warn("execGesture : No jython engine...");
return null;
}
subscribe(python.getName(), "publishStatus", this.getName(), "onGestureStatus");
startedGesture(lastGestureExecuted);
return python.evalAndWait(gesture);
}
public void finishedGesture() {
finishedGesture("unknown");
}
public void finishedGesture(String nameOfGesture) {
if (gestureAlreadyStarted) {
waitTargetPos();
RobotCanMoveRandom = true;
gestureAlreadyStarted = false;
log.info("gesture : {} finished...", nameOfGesture);
}
}
public void fullSpeed() {
if (head != null) {
head.fullSpeed();
}
if (rightHand != null) {
rightHand.fullSpeed();
}
if (leftHand != null) {
leftHand.fullSpeed();
}
if (rightArm != null) {
rightArm.fullSpeed();
}
if (leftArm != null) {
leftArm.fullSpeed();
}
if (torso != null) {
torso.fullSpeed();
}
}
public String get(String key) {
String ret = localize(key);
if (ret != null) {
return ret;
}
return "not yet translated";
}
public InMoov2Arm getArm(String side) {
if ("left".equals(side)) {
return leftArm;
} else if ("right".equals(side)) {
return rightArm;
} else {
log.error("can not get arm {}", side);
}
return null;
}
public InMoov2Hand getHand(String side) {
if ("left".equals(side)) {
return leftHand;
} else if ("right".equals(side)) {
return rightHand;
} else {
log.error("can not get arm {}", side);
}
return null;
}
public InMoov2Head getHead() {
return head;
}
/**
* finds most recent activity
*
* @return the timestamp of the last activity time.
*/
public long getLastActivityTime() {
long lastActivityTime = 0;
if (leftHand != null) {
lastActivityTime = Math.max(lastActivityTime, leftHand.getLastActivityTime());
}
if (leftArm != null) {
lastActivityTime = Math.max(lastActivityTime, leftArm.getLastActivityTime());
}
if (rightHand != null) {
lastActivityTime = Math.max(lastActivityTime, rightHand.getLastActivityTime());
}
if (rightArm != null) {
lastActivityTime = Math.max(lastActivityTime, rightArm.getLastActivityTime());
}
if (head != null) {
lastActivityTime = Math.max(lastActivityTime, head.getLastActivityTime());
}
if (torso != null) {
lastActivityTime = Math.max(lastActivityTime, torso.getLastActivityTime());
}
if (lastPirActivityTime != null) {
lastActivityTime = Math.max(lastActivityTime, lastPirActivityTime);
}
if (lastActivityTime == 0) {
error("invalid activity time - anything connected?");
lastActivityTime = System.currentTimeMillis();
}
return lastActivityTime;
}
public InMoov2Arm getLeftArm() {
return leftArm;
}
public InMoov2Hand getLeftHand() {
return leftHand;
}
@Override
public Map<String, Locale> getLocales() {
return locales;
}
public InMoov2Arm getRightArm() {
return rightArm;
}
public InMoov2Hand getRightHand() {
return rightHand;
}
public Simulator getSimulator() {
return jme;
}
public InMoov2Torso getTorso() {
return torso;
}
public void halfSpeed() {
if (head != null) {
head.setSpeed(25.0, 25.0, 25.0, 25.0, -1.0, 25.0);
}
if (rightHand != null) {
rightHand.setSpeed(30.0, 30.0, 30.0, 30.0, 30.0, 30.0);
}
if (leftHand != null) {
leftHand.setSpeed(30.0, 30.0, 30.0, 30.0, 30.0, 30.0);
}
if (rightArm != null) {
rightArm.setSpeed(25.0, 25.0, 25.0, 25.0);
}
if (leftArm != null) {
leftArm.setSpeed(25.0, 25.0, 25.0, 25.0);
}
if (torso != null) {
torso.setSpeed(20.0, 20.0, 20.0);
}
}
public boolean isCameraOn() {
if (opencv != null) {
if (opencv.isCapturing()) {
return true;
}
}
return false;
}
public boolean isEyeLidsActivated() {
return isEyeLidsActivated;
}
public boolean isHeadActivated() {
return isHeadActivated;
}
public boolean isLeftArmActivated() {
return isLeftArmActivated;
}
public boolean isLeftHandActivated() {
return isLeftHandActivated;
}
public boolean isMute() {
return mute;
}
public boolean isNeopixelActivated() {
return isNeopixelActivated;
}
public boolean isRightArmActivated() {
return isRightArmActivated;
}
public boolean isRightHandActivated() {
return isRightHandActivated;
}
public boolean isTorsoActivated() {
return isTorsoActivated;
}
public boolean isPirActivated() {
return isPirActivated;
}
public boolean isUltraSonicRightActivated() {
return isUltraSonicRightActivated;
}
public boolean isUltraSonicLeftActivated() {
return isUltraSonicLeftActivated;
}
public boolean isServoMixerActivated() {
return isServoMixerActivated;
}
public Set<String> listConfigFiles() {
configs = new HashSet<>();
// data list
String configDir = getResourceDir() + fs + "config";
File f = new File(configDir);
if (!f.exists()) {
f.mkdirs();
}
String[] files = f.list();
for (String config : files) {
configs.add(config);
}
// data list
configDir = getDataDir() + fs + "config";
f = new File(configDir);
if (!f.exists()) {
f.mkdirs();
}
files = f.list();
for (String config : files) {
configs.add(config);
}
return configs;
}
/*
* iterate over each txt files in the directory
*/
public void load(String locale) {
setLocale(locale);
}
// FIXME - what is this for ???
public void loadGestures() {
loadGestures(getResourceDir() + fs + "gestures");
}
/**
* This blocking method will look at all of the .py files in a directory. One
* by one it will load the files into the python interpreter. A gesture python
* file should contain 1 method definition that is the same as the filename.
*
* @param directory
* - the directory that contains the gesture python files.
*/
public boolean loadGestures(String directory) {
speakBlocking(get("STARTINGGESTURES"));
// iterate over each of the python files in the directory
// and load them into the python interpreter.
String extension = "py";
Integer totalLoaded = 0;
Integer totalError = 0;
File dir = new File(directory);
dir.mkdirs();
if (dir.exists()) {
for (File f : dir.listFiles()) {
if (FilenameUtils.getExtension(f.getAbsolutePath()).equalsIgnoreCase(extension)) {
if (loadFile(f.getAbsolutePath()) == true) {
totalLoaded += 1;
String methodName = f.getName().substring(0, f.getName().length() - 3) + "()";
gestures.add(methodName);
} else {
error("could not load %s", f.getName());
totalError += 1;
}
} else {
log.info("{} is not a {} file", f.getAbsolutePath(), extension);
}
}
}
info("%s Gestures loaded, %s Gestures with error", totalLoaded, totalError);
broadcastState();
if (totalError > 0) {
speakAlert(get("GESTURE_ERROR"));
return false;
}
return true;
}
public void moveArm(String which, double bicep, double rotate, double shoulder, double omoplate) {
InMoov2Arm arm = getArm(which);
if (arm == null) {
info("%s arm not started", which);
return;
}
arm.moveTo(bicep, rotate, shoulder, omoplate);
}
public void moveEyelids(double eyelidleftPos, double eyelidrightPos) {
if (head != null) {
head.moveEyelidsTo(eyelidleftPos, eyelidrightPos);
} else {
log.warn("moveEyelids - I have a null head");
}
}
public void moveEyes(double eyeX, double eyeY) {
if (head != null) {
head.moveTo(null, null, eyeX, eyeY, null, null);
} else {
log.warn("moveEyes - I have a null head");
}
}
public void moveHand(String which, double thumb, double index, double majeure, double ringFinger, double pinky) {
moveHand(which, thumb, index, majeure, ringFinger, pinky, null);
}
public void moveHand(String which, Double thumb, Double index, Double majeure, Double ringFinger, Double pinky, Double wrist) {
InMoov2Hand hand = getHand(which);
if (hand == null) {
log.warn("{} hand does not exist");
return;
}
hand.moveTo(thumb, index, majeure, ringFinger, pinky, wrist);
}
public void moveHead(double neck, double rothead) {
moveHead(neck, rothead, null);
}
public void moveHead(double neck, double rothead, double eyeX, double eyeY, double jaw) {
moveHead(neck, rothead, eyeX, eyeY, jaw, null);
}
public void moveHead(Double neck, Double rothead, Double rollNeck) {
moveHead(neck, rothead, null, null, null, rollNeck);
}
public void moveHead(Double neck, Double rothead, Double eyeX, Double eyeY, Double jaw, Double rollNeck) {
if (head != null) {
head.moveTo(neck, rothead, eyeX, eyeY, jaw, rollNeck);
} else {
log.error("I have a null head");
}
}
public void moveHeadBlocking(double neck, double rothead) {
moveHeadBlocking(neck, rothead, null);
}
public void moveHeadBlocking(double neck, double rothead, Double rollNeck) {
moveHeadBlocking(neck, rothead, null, null, null, rollNeck);
}
public void moveHeadBlocking(double neck, double rothead, Double eyeX, Double eyeY, Double jaw) {
moveHeadBlocking(neck, rothead, eyeX, eyeY, jaw, null);
}
public void moveHeadBlocking(Double neck, Double rothead, Double eyeX, Double eyeY, Double jaw, Double rollNeck) {
if (head != null) {
head.moveToBlocking(neck, rothead, eyeX, eyeY, jaw, rollNeck);
} else {
log.error("I have a null head");
}
}
public void moveTorso(double topStom, double midStom, double lowStom) {
if (torso != null) {
torso.moveTo(topStom, midStom, lowStom);
} else {
log.error("moveTorso - I have a null torso");
}
}
public void moveTorsoBlocking(double topStom, double midStom, double lowStom) {
if (torso != null) {
torso.moveToBlocking(topStom, midStom, lowStom);
} else {
log.error("moveTorsoBlocking - I have a null torso");
}
}
public void onGestureStatus(Status status) {
if (!status.equals(Status.success()) && !status.equals(Status.warn("Python process killed !"))) {
error("I cannot execute %s, please check logs", lastGestureExecuted);
}
finishedGesture(lastGestureExecuted);
unsubscribe(python.getName(), "publishStatus", this.getName(), "onGestureStatus");
}
@Override
public void onJoystickInput(JoystickData input) throws Exception {
// TODO Auto-generated method stub
}
public OpenCVData onOpenCVData(OpenCVData data) {
return data;
}
@Override
public void onText(String text) {
// FIXME - we should be able to "re"-publish text but text is coming from
// different sources
// some might be coming from the ear - some from the mouth ... - there has
// to be a distinction
log.info("onText - {}", text);
invoke("publishText", text);
}
// TODO FIX/CHECK this, migrate from python land
public void powerDown() {
rest();
purgeTasks();
disable();
if (ear != null) {
ear.lockOutAllGrammarExcept("power up");
}
python.execMethod("power_down");
}
// TODO FIX/CHECK this, migrate from python land
public void powerUp() {
enable();
rest();
if (ear != null) {
ear.clearLock();
}
beginCheckingOnInactivity();
python.execMethod("power_up");
}
/**
* all published text from InMoov2 - including ProgramAB
*/
@Override
public String publishText(String text) {
return text;
}
public void releaseService() {
try {
disable();
releasePeers();
super.releaseService();
} catch (Exception e) {
error(e);
}
}
// FIXME NO DIRECT REFERENCES - publishRest --> (onRest) --> rest
public void rest() {
log.info("InMoov2.rest()");
if (head != null) {
head.rest();
}
if (rightHand != null) {
rightHand.rest();
}
if (leftHand != null) {
leftHand.rest();
}
if (rightArm != null) {
rightArm.rest();
}
if (leftArm != null) {
leftArm.rest();
}
if (torso != null) {
torso.rest();
}
}
@Deprecated
public void setArmVelocity(String which, Double bicep, Double rotate, Double shoulder, Double omoplate) {
InMoov2Arm arm = getArm(which);
if (arm == null) {
warn("%s hand not started", which);
return;
}
arm.setSpeed(bicep, rotate, shoulder, omoplate);
}
public void setAutoDisable(Boolean param) {
if (head != null) {
head.setAutoDisable(param);
}
if (rightArm != null) {
rightArm.setAutoDisable(param);
}
if (leftArm != null) {
leftArm.setAutoDisable(param);
}
if (leftHand != null) {
leftHand.setAutoDisable(param);
}
if (rightHand != null) {
leftHand.setAutoDisable(param);
}
if (torso != null) {
torso.setAutoDisable(param);
}
/*
* if (eyelids != null) { eyelids.setAutoDisable(param); }
*/
}
public void setHandSpeed(String which, Double thumb, Double index, Double majeure, Double ringFinger, Double pinky) {
setHandSpeed(which, thumb, index, majeure, ringFinger, pinky, null);
}
public void setHandSpeed(String which, Double thumb, Double index, Double majeure, Double ringFinger, Double pinky, Double wrist) {
InMoov2Hand hand = getHand(which);
if (hand == null) {
warn("%s hand not started", which);
return;
}
hand.setSpeed(thumb, index, majeure, ringFinger, pinky, wrist);
}
@Deprecated
public void setHandVelocity(String which, Double thumb, Double index, Double majeure, Double ringFinger, Double pinky) {
setHandSpeed(which, thumb, index, majeure, ringFinger, pinky, null);
}
@Deprecated
public void setHandVelocity(String which, Double thumb, Double index, Double majeure, Double ringFinger, Double pinky, Double wrist) {
setHandSpeed(which, thumb, index, majeure, ringFinger, pinky, wrist);
}
public void setHeadSpeed(Double rothead, Double neck) {
setHeadSpeed(rothead, neck, null, null, null);
}
public void setHeadSpeed(Double rothead, Double neck, Double eyeXSpeed, Double eyeYSpeed, Double jawSpeed) {
setHeadSpeed(rothead, neck, eyeXSpeed, eyeYSpeed, jawSpeed, null);
}
public void setHeadSpeed(Double rothead, Double neck, Double eyeXSpeed, Double eyeYSpeed, Double jawSpeed, Double rollNeckSpeed) {
if (head == null) {
warn("setHeadSpeed - head not started");
return;
}
head.setSpeed(rothead, neck, eyeXSpeed, eyeYSpeed, jawSpeed, rollNeckSpeed);
}
@Deprecated
public void setHeadVelocity(Double rothead, Double neck) {
setHeadSpeed(rothead, neck, null, null, null, null);
}
@Deprecated
public void setHeadVelocity(Double rothead, Double neck, Double rollNeck) {
setHeadSpeed(rothead, neck, null, null, null, rollNeck);
}
@Deprecated
public void setHeadVelocity(Double rothead, Double neck, Double eyeXSpeed, Double eyeYSpeed, Double jawSpeed) {
setHeadSpeed(rothead, neck, eyeXSpeed, eyeYSpeed, jawSpeed, null);
}
@Deprecated
public void setHeadVelocity(Double rothead, Double neck, Double eyeXSpeed, Double eyeYSpeed, Double jawSpeed, Double rollNeckSpeed) {
setHeadSpeed(rothead, neck, eyeXSpeed, eyeYSpeed, jawSpeed, rollNeckSpeed);
}
@Override
public void setLocale(String code) {
if (code == null) {
log.warn("setLocale null");
return;
}
// filter of the set of supported locales
if (!Locale.hasLanguage(locales, code)) {
error("InMoov does not support %s only %s", code, locales.keySet());
return;
}
super.setLocale(code);
for (ServiceInterface si : Runtime.getLocalServices().values()) {
if (!si.equals(this)) {
si.setLocale(code);
}
}
}
public void setMute(boolean mute) {
info("Set mute to %s", mute);
this.mute = mute;
sendToPeer("mouth", "setMute", mute);
broadcastState();
}
public void setNeopixelAnimation(String animation, Integer red, Integer green, Integer blue, Integer speed) {
if (neopixel != null /* && neopixelArduino != null */) {
neopixel.setAnimation(animation, red, green, blue, speed);
} else {
warn("No Neopixel attached");
}
}
public String setSpeechType(String speechType) {
speechService = speechType;
setPeer("mouth", speechType);
return speechType;
}
public void setTorsoSpeed(Double topStom, Double midStom, Double lowStom) {
if (torso != null) {
torso.setSpeed(topStom, midStom, lowStom);
} else {
log.warn("setTorsoSpeed - I have no torso");
}
}
@Deprecated
public void setTorsoVelocity(Double topStom, Double midStom, Double lowStom) {
if (torso != null) {
torso.setVelocity(topStom, midStom, lowStom);
} else {
log.warn("setTorsoVelocity - I have no torso");
}
}
/**
* overridden setVirtual for InMoov sets "all" services to virtual
*/
public boolean setVirtual(boolean virtual) {
super.setVirtual(virtual);
Platform.setVirtual(virtual);
speakBlocking(get("STARTINGVIRTUALHARD"));
return virtual;
}
public void setVoice(String name) {
if (mouth != null) {
mouth.setVoice(name);
voiceSelected = name;
speakBlocking(get("SETLANG"), "%s", name);
}
}
public void speak(String toSpeak) {
sendToPeer("mouth", "speak", toSpeak);
}
public void speakAlert(String toSpeak) {
speakBlocking(get("ALERT"));
speakBlocking(toSpeak);
}
public void speakBlocking(String speak) {
speakBlocking(speak, (Object[]) null);
}
// FIXME - publish text regardless if mouth exists ...
public void speakBlocking(String format, Object... args) {
if (format == null) {
return;
}
String toSpeak = format;
if (args != null) {
toSpeak = String.format(format, args);
}
// FIXME - publish onText when listening
invoke("publishText", toSpeak);
if (!mute) {
// sendToPeer("mouth", "speakBlocking", toSpeak);
invokePeer("mouth", "speakBlocking", toSpeak);
}
}
public void startAll() throws Exception {
startAll(null, null);
}
public void startAll(String leftPort, String rightPort) throws Exception {
startMouth();
startChatBot();
// startHeadTracking();
// startEyesTracking();
// startOpenCV();
startEar();
startServos(leftPort, rightPort);
// startMouthControl(head.jaw, mouth);
speakBlocking(get("STARTINGSEQUENCE"));
}
public ProgramAB startChatBot() {
try {
chatBot = (ProgramAB) startPeer("chatBot");
isChatBotActivated = true;
speakBlocking(get("CHATBOTACTIVATED"));
// GOOD EXAMPLE ! - no type, uses name - does a set of subscriptions !
// attachTextPublisher(chatBot.getName());
/*
* not necessary - ear needs to be attached to mouth not chatBot if (ear
* != null) { ear.attachTextListener(chatBot); }
*/
chatBot.attachTextPublisher(ear);
// this.attach(chatBot); FIXME - attach as a TextPublisher - then
// re-publish
// FIXME - deal with language
// speakBlocking(get("CHATBOTACTIVATED"));
chatBot.repetitionCount(10);
chatBot.setPath(getResourceDir() + fs + "chatbot");
chatBot.startSession("default", locale.getTag());
// reset some parameters to default...
chatBot.setPredicate("topic", "default");
chatBot.setPredicate("questionfirstinit", "");
chatBot.setPredicate("tmpname", "");
chatBot.setPredicate("null", "");
// load last user session
if (!chatBot.getPredicate("name").isEmpty()) {
if (chatBot.getPredicate("lastUsername").isEmpty() || chatBot.getPredicate("lastUsername").equals("unknown")) {
chatBot.setPredicate("lastUsername", chatBot.getPredicate("name"));
}
}
chatBot.setPredicate("parameterHowDoYouDo", "");
try {
chatBot.savePredicates();
} catch (IOException e) {
log.error("saving predicates threw", e);
}
// start session based on last recognized person
if (!chatBot.getPredicate("default", "lastUsername").isEmpty() && !chatBot.getPredicate("default", "lastUsername").equals("unknown")) {
chatBot.startSession(chatBot.getPredicate("lastUsername"));
}
htmlFilter = (HtmlFilter) startPeer("htmlFilter");// Runtime.start("htmlFilter",
// "HtmlFilter");
chatBot.attachTextListener(htmlFilter);
htmlFilter.attachTextListener((TextListener) getPeer("mouth"));
chatBot.attachTextListener(this);
} catch (Exception e) {
speak("could not load chatBot");
error(e.getMessage());
speak(e.getMessage());
}
broadcastState();
return chatBot;
}
public SpeechRecognizer startEar() {
ear = (SpeechRecognizer) startPeer("ear");
isEarActivated = true;
ear.attachSpeechSynthesis((SpeechSynthesis) getPeer("mouth"));
ear.attachTextListener(chatBot);
speakBlocking(get("STARTINGEAR"));
broadcastState();
return ear;
}
public void startedGesture() {
startedGesture("unknown");
}
public void startedGesture(String nameOfGesture) {
if (gestureAlreadyStarted) {
warn("Warning 1 gesture already running, this can break spacetime and lot of things");
} else {
log.info("Starting gesture : {}", nameOfGesture);
gestureAlreadyStarted = true;
RobotCanMoveRandom = false;
}
}
// FIXME - universal (good) way of handling all exceptions - ie - reporting
// back to the user the problem in a short concise way but have
// expandable detail in appropriate places
public OpenCV startOpenCV() throws Exception {
speakBlocking(get("STARTINGOPENCV"));
opencv = (OpenCV) startPeer("opencv", "OpenCV");
subscribeTo(opencv.getName(), "publishOpenCVData");
isOpenCvActivated = true;
return opencv;
}
public Tracking startEyesTracking() throws Exception {
if (head == null) {
startHead();
}
return startHeadTracking(head.eyeX, head.eyeY);
}
public Tracking startEyesTracking(ServoControl eyeX, ServoControl eyeY) throws Exception {
if (opencv == null) {
startOpenCV();
}
speakBlocking(get("TRACKINGSTARTED"));
eyesTracking = (Tracking) this.startPeer("eyesTracking");
eyesTracking.connect(opencv, head.eyeX, head.eyeY);
return eyesTracking;
}
public InMoov2Head startHead() throws Exception {
return startHead(null, null, null, null, null, null, null, null);
}
public InMoov2Head startHead(String port) throws Exception {
return startHead(port, null, null, null, null, null, null, null);
}
// legacy inmoov head exposed pins
public InMoov2Head startHead(String port, String type, Integer headYPin, Integer headXPin, Integer eyeXPin, Integer eyeYPin, Integer jawPin, Integer rollNeckPin) {
// log.warn(InMoov.buildDNA(myKey, serviceClass))
// speakBlocking(get("STARTINGHEAD") + " " + port);
// ??? SHOULD THERE BE REFERENCES AT ALL ??? ... probably not
speakBlocking(get("STARTINGHEAD"));
head = (InMoov2Head) startPeer("head");
isHeadActivated = true;
if (headYPin != null) {
head.setPins(headYPin, headXPin, eyeXPin, eyeYPin, jawPin, rollNeckPin);
}
// lame assumption - port is specified - it must be an Arduino :(
if (port != null) {
try {
speakBlocking(get(port));
Arduino arduino = (Arduino) startPeer("left", "Arduino");
arduino.connect(port);
arduino.attach(head.neck);
arduino.attach(head.rothead);
arduino.attach(head.eyeX);
arduino.attach(head.eyeY);
arduino.attach(head.jaw);
// FIXME rollNeck and eyelids must be connected to right controller
// arduino.attach(head.rollNeck);
// arduino.attach(head.eyelidLeft);
// arduino.attach(head.eyelidRight);
} catch (Exception e) {
error(e);
}
}
speakBlocking(get("STARTINGMOUTHCONTROL"));
mouthControl = (MouthControl) startPeer("mouthControl");
mouthControl.attach(head.jaw);
mouthControl.attach((Attachable) getPeer("mouth"));
mouthControl.setmouth(10, 50);// <-- FIXME - not the right place for
// config !!!
return head;
}
public void startHeadTracking() throws Exception {
if (opencv == null) {
startOpenCV();
}
if (head == null) {
startHead();
}
if (headTracking == null) {
speakBlocking(get("TRACKINGSTARTED"));
headTracking = (Tracking) this.startPeer("headTracking");
headTracking.connect(this.opencv, head.rothead, head.neck);
}
}
public Tracking startHeadTracking(ServoControl rothead, ServoControl neck) throws Exception {
if (opencv == null) {
startOpenCV();
}
if (headTracking == null) {
speakBlocking(get("TRACKINGSTARTED"));
headTracking = (Tracking) this.startPeer("headTracking");
headTracking.connect(this.opencv, rothead, neck);
}
return headTracking;
}
public InMoov2Arm startLeftArm() {
return startLeftArm(null);
}
public InMoov2Arm startLeftArm(String port) {
// log.warn(InMoov.buildDNA(myKey, serviceClass))
// speakBlocking(get("STARTINGHEAD") + " " + port);
// ??? SHOULD THERE BE REFERENCES AT ALL ??? ... probably not
speakBlocking(get("STARTINGLEFTARM"));
leftArm = (InMoov2Arm) startPeer("leftArm");
isLeftArmActivated = true;
if (port != null) {
try {
speakBlocking(port);
Arduino arduino = (Arduino) startPeer("left", "Arduino");
arduino.connect(port);
arduino.attach(leftArm.bicep);
arduino.attach(leftArm.omoplate);
arduino.attach(leftArm.rotate);
arduino.attach(leftArm.shoulder);
} catch (Exception e) {
error(e);
}
}
return leftArm;
}
public InMoov2Hand startLeftHand() {
return startLeftHand(null);
}
public InMoov2Hand startLeftHand(String port) {
speakBlocking(get("STARTINGLEFTHAND"));
leftHand = (InMoov2Hand) startPeer("leftHand");
isLeftHandActivated = true;
if (port != null) {
try {
speakBlocking(port);
Arduino arduino = (Arduino) startPeer("left", "Arduino");
arduino.connect(port);
arduino.attach(leftHand.thumb);
arduino.attach(leftHand.index);
arduino.attach(leftHand.majeure);
arduino.attach(leftHand.ringFinger);
arduino.attach(leftHand.pinky);
arduino.attach(leftHand.wrist);
} catch (Exception e) {
error(e);
}
}
return leftHand;
}
// TODO - general objective "might" be to reduce peers down to something
// that does not need a reference - where type can be switched before creation
// and the onnly thing needed is pubs/subs that are not handled in abstracts
public SpeechSynthesis startMouth() {
mouth = (SpeechSynthesis) startPeer("mouth");
voices = mouth.getVoices();
Voice voice = mouth.getVoice();
if (voice != null) {
voiceSelected = voice.getName();
}
isMouthActivated = true;
if (mute) {
mouth.setMute(true);
}
mouth.attachSpeechRecognizer(ear);
// mouth.attach(htmlFilter); // same as chatBot not needed
// this.attach((Attachable) mouth);
// if (ear != null) ....
broadcastState();
speakBlocking(get("STARTINGMOUTH"));
if (Platform.isVirtual()) {
speakBlocking(get("STARTINGVIRTUALHARD"));
}
speakBlocking(get("WHATISTHISLANGUAGE"));
return mouth;
}
public InMoov2Arm startRightArm() {
return startRightArm(null);
}
public InMoov2Arm startRightArm(String port) {
speakBlocking(get("STARTINGRIGHTARM"));
rightArm = (InMoov2Arm) startPeer("rightArm");
isRightArmActivated = true;
if (port != null) {
try {
speakBlocking(port);
Arduino arduino = (Arduino) startPeer("right", "Arduino");
arduino.connect(port);
arduino.attach(rightArm.bicep);
arduino.attach(rightArm.omoplate);
arduino.attach(rightArm.rotate);
arduino.attach(rightArm.shoulder);
} catch (Exception e) {
error(e);
}
}
return rightArm;
}
public InMoov2Hand startRightHand() {
return startRightHand(null);
}
public InMoov2Hand startRightHand(String port) {
speakBlocking(get("STARTINGRIGHTHAND"));
rightHand = (InMoov2Hand) startPeer("rightHand");
isRightHandActivated = true;
if (port != null) {
try {
speakBlocking(port);
Arduino arduino = (Arduino) startPeer("right", "Arduino");
arduino.connect(port);
arduino.attach(rightHand.thumb);
arduino.attach(rightHand.index);
arduino.attach(rightHand.majeure);
arduino.attach(rightHand.ringFinger);
arduino.attach(rightHand.pinky);
arduino.attach(rightHand.wrist);
} catch (Exception e) {
error(e);
}
}
return rightHand;
}
public Double getUltraSonicRightDistance() {
if (ultraSonicRight != null) {
return ultraSonicRight.range();
} else {
warn("No UltraSonicRight attached");
return 0.0;
}
}
public Double getUltraSonicLeftDistance() {
if (ultraSonicLeft != null) {
return ultraSonicLeft.range();
} else {
warn("No UltraSonicLeft attached");
return 0.0;
}
}
// public void publishPin(Pin pin) {
// log.info("{} - {}", pin.pin, pin.value);
// if (pin.value == 1) {
// lastPIRActivityTime = System.currentTimeMillis();
/// if its PIR & PIR is active & was sleeping - then wake up !
// if (pin == pin.pin && startSleep != null && pin.value == 1) {
// powerUp();
public void startServos(String leftPort, String rightPort) throws Exception {
startHead(leftPort);
startLeftArm(leftPort);
startLeftHand(leftPort);
startRightArm(rightPort);
startRightHand(rightPort);
startTorso(leftPort);
}
// FIXME .. externalize in a json file included in InMoov2
public Simulator startSimulator() throws Exception {
speakBlocking(get("STARTINGVIRTUAL"));
if (jme != null) {
log.info("start called twice - starting simulator is reentrant");
return jme;
}
jme = (JMonkeyEngine) startPeer("simulator");
isSimulatorActivated = true;
// adding InMoov2 asset path to the jonkey simulator
String assetPath = getResourceDir() + fs + JMonkeyEngine.class.getSimpleName();
File check = new File(assetPath);
log.info("loading assets from {}", assetPath);
if (!check.exists()) {
log.warn("%s does not exist");
}
// disable the frustrating servo events ...
// Servo.eventsEnabledDefault(false);
// jme.loadModels(assetPath); not needed - as InMoov2 unzips the model into
// /resource/JMonkeyEngine/assets
jme.setRotation(getName() + ".head.jaw", "x");
jme.setRotation(getName() + ".head.neck", "x");
jme.setRotation(getName() + ".head.rothead", "y");
jme.setRotation(getName() + ".head.rollNeck", "z");
jme.setRotation(getName() + ".head.eyeY", "x");
jme.setRotation(getName() + ".head.eyeX", "y");
jme.setRotation(getName() + ".torso.topStom", "z");
jme.setRotation(getName() + ".torso.midStom", "y");
jme.setRotation(getName() + ".torso.lowStom", "x");
jme.setRotation(getName() + ".rightArm.bicep", "x");
jme.setRotation(getName() + ".leftArm.bicep", "x");
jme.setRotation(getName() + ".rightArm.shoulder", "x");
jme.setRotation(getName() + ".leftArm.shoulder", "x");
jme.setRotation(getName() + ".rightArm.rotate", "y");
jme.setRotation(getName() + ".leftArm.rotate", "y");
jme.setRotation(getName() + ".rightArm.omoplate", "z");
jme.setRotation(getName() + ".leftArm.omoplate", "z");
jme.setRotation(getName() + ".rightHand.wrist", "y");
jme.setRotation(getName() + ".leftHand.wrist", "y");
jme.setMapper(getName() + ".head.jaw", 0, 180, -5, 80);
jme.setMapper(getName() + ".head.neck", 0, 180, 20, -20);
jme.setMapper(getName() + ".head.rollNeck", 0, 180, 30, -30);
jme.setMapper(getName() + ".head.eyeY", 0, 180, 40, 140);
jme.setMapper(getName() + ".head.eyeX", 0, 180, -10, 70); // HERE there need
// to be
// two eyeX (left and
// right?)
jme.setMapper(getName() + ".rightArm.bicep", 0, 180, 0, -150);
jme.setMapper(getName() + ".leftArm.bicep", 0, 180, 0, -150);
jme.setMapper(getName() + ".rightArm.shoulder", 0, 180, 30, -150);
jme.setMapper(getName() + ".leftArm.shoulder", 0, 180, 30, -150);
jme.setMapper(getName() + ".rightArm.rotate", 0, 180, 80, -80);
jme.setMapper(getName() + ".leftArm.rotate", 0, 180, -80, 80);
jme.setMapper(getName() + ".rightArm.omoplate", 0, 180, 10, -180);
jme.setMapper(getName() + ".leftArm.omoplate", 0, 180, -10, 180);
jme.setMapper(getName() + ".rightHand.wrist", 0, 180, -20, 60);
jme.setMapper(getName() + ".leftHand.wrist", 0, 180, 20, -60);
jme.setMapper(getName() + ".torso.topStom", 0, 180, -30, 30);
jme.setMapper(getName() + ".torso.midStom", 0, 180, 50, 130);
jme.setMapper(getName() + ".torso.lowStom", 0, 180, -30, 30);
jme.attach(getName() + ".leftHand.thumb", getName() + ".leftHand.thumb1", getName() + ".leftHand.thumb2", getName() + ".leftHand.thumb3");
jme.setRotation(getName() + ".leftHand.thumb1", "y");
jme.setRotation(getName() + ".leftHand.thumb2", "x");
jme.setRotation(getName() + ".leftHand.thumb3", "x");
jme.attach(getName() + ".leftHand.index", getName() + ".leftHand.index", getName() + ".leftHand.index2", getName() + ".leftHand.index3");
jme.setRotation(getName() + ".leftHand.index", "x");
jme.setRotation(getName() + ".leftHand.index2", "x");
jme.setRotation(getName() + ".leftHand.index3", "x");
jme.attach(getName() + ".leftHand.majeure", getName() + ".leftHand.majeure", getName() + ".leftHand.majeure2", getName() + ".leftHand.majeure3");
jme.setRotation(getName() + ".leftHand.majeure", "x");
jme.setRotation(getName() + ".leftHand.majeure2", "x");
jme.setRotation(getName() + ".leftHand.majeure3", "x");
jme.attach(getName() + ".leftHand.ringFinger", getName() + ".leftHand.ringFinger", getName() + ".leftHand.ringFinger2", getName() + ".leftHand.ringFinger3");
jme.setRotation(getName() + ".leftHand.ringFinger", "x");
jme.setRotation(getName() + ".leftHand.ringFinger2", "x");
jme.setRotation(getName() + ".leftHand.ringFinger3", "x");
jme.attach(getName() + ".leftHand.pinky", getName() + ".leftHand.pinky", getName() + ".leftHand.pinky2", getName() + ".leftHand.pinky3");
jme.setRotation(getName() + ".leftHand.pinky", "x");
jme.setRotation(getName() + ".leftHand.pinky2", "x");
jme.setRotation(getName() + ".leftHand.pinky3", "x");
// left hand mapping complexities of the fingers
jme.setMapper(getName() + ".leftHand.index", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.index2", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.index3", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.majeure", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.majeure2", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.majeure3", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.ringFinger", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.ringFinger2", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.ringFinger3", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.pinky", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.pinky2", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.pinky3", 0, 180, -110, -179);
jme.setMapper(getName() + ".leftHand.thumb1", 0, 180, -30, -100);
jme.setMapper(getName() + ".leftHand.thumb2", 0, 180, 80, 20);
jme.setMapper(getName() + ".leftHand.thumb3", 0, 180, 80, 20);
// right hand
jme.attach(getName() + ".rightHand.thumb", getName() + ".rightHand.thumb1", getName() + ".rightHand.thumb2", getName() + ".rightHand.thumb3");
jme.setRotation(getName() + ".rightHand.thumb1", "y");
jme.setRotation(getName() + ".rightHand.thumb2", "x");
jme.setRotation(getName() + ".rightHand.thumb3", "x");
jme.attach(getName() + ".rightHand.index", getName() + ".rightHand.index", getName() + ".rightHand.index2", getName() + ".rightHand.index3");
jme.setRotation(getName() + ".rightHand.index", "x");
jme.setRotation(getName() + ".rightHand.index2", "x");
jme.setRotation(getName() + ".rightHand.index3", "x");
jme.attach(getName() + ".rightHand.majeure", getName() + ".rightHand.majeure", getName() + ".rightHand.majeure2", getName() + ".rightHand.majeure3");
jme.setRotation(getName() + ".rightHand.majeure", "x");
jme.setRotation(getName() + ".rightHand.majeure2", "x");
jme.setRotation(getName() + ".rightHand.majeure3", "x");
jme.attach(getName() + ".rightHand.ringFinger", getName() + ".rightHand.ringFinger", getName() + ".rightHand.ringFinger2", getName() + ".rightHand.ringFinger3");
jme.setRotation(getName() + ".rightHand.ringFinger", "x");
jme.setRotation(getName() + ".rightHand.ringFinger2", "x");
jme.setRotation(getName() + ".rightHand.ringFinger3", "x");
jme.attach(getName() + ".rightHand.pinky", getName() + ".rightHand.pinky", getName() + ".rightHand.pinky2", getName() + ".rightHand.pinky3");
jme.setRotation(getName() + ".rightHand.pinky", "x");
jme.setRotation(getName() + ".rightHand.pinky2", "x");
jme.setRotation(getName() + ".rightHand.pinky3", "x");
jme.setMapper(getName() + ".rightHand.index", 0, 180, 65, -10);
jme.setMapper(getName() + ".rightHand.index2", 0, 180, 70, -10);
jme.setMapper(getName() + ".rightHand.index3", 0, 180, 70, -10);
jme.setMapper(getName() + ".rightHand.majeure", 0, 180, 65, -10);
jme.setMapper(getName() + ".rightHand.majeure2", 0, 180, 70, -10);
jme.setMapper(getName() + ".rightHand.majeure3", 0, 180, 70, -10);
jme.setMapper(getName() + ".rightHand.ringFinger", 0, 180, 65, -10);
jme.setMapper(getName() + ".rightHand.ringFinger2", 0, 180, 70, -10);
jme.setMapper(getName() + ".rightHand.ringFinger3", 0, 180, 70, -10);
jme.setMapper(getName() + ".rightHand.pinky", 0, 180, 65, -10);
jme.setMapper(getName() + ".rightHand.pinky2", 0, 180, 70, -10);
jme.setMapper(getName() + ".rightHand.pinky3", 0, 180, 60, -10);
jme.setMapper(getName() + ".rightHand.thumb1", 0, 180, 30, 110);
jme.setMapper(getName() + ".rightHand.thumb2", 0, 180, -100, -150);
jme.setMapper(getName() + ".rightHand.thumb3", 0, 180, -100, -160);
// We set the correct location view
jme.cameraLookAt(getName() + ".torso.lowStom");
// additional experimental mappings
/*
* simulator.attach(getName() + ".leftHand.pinky", getName() +
* ".leftHand.index2"); simulator.attach(getName() + ".leftHand.thumb",
* getName() + ".leftHand.index3"); simulator.setRotation(getName() +
* ".leftHand.index2", "x"); simulator.setRotation(getName() +
* ".leftHand.index3", "x"); simulator.setMapper(getName() +
* ".leftHand.index", 0, 180, -90, -270); simulator.setMapper(getName() +
* ".leftHand.index2", 0, 180, -90, -270); simulator.setMapper(getName() +
* ".leftHand.index3", 0, 180, -90, -270);
*/
return jme;
}
public InMoov2Torso startTorso() {
return startTorso(null);
}
public InMoov2Torso startTorso(String port) {
if (torso == null) {
speakBlocking(get("STARTINGTORSO"));
isTorsoActivated = true;
torso = (InMoov2Torso) startPeer("torso");
if (port != null) {
try {
speakBlocking(port);
Arduino left = (Arduino) startPeer("left");
left.connect(port);
left.attach(torso.lowStom);
left.attach(torso.midStom);
left.attach(torso.topStom);
} catch (Exception e) {
error(e);
}
}
}
return torso;
}
/**
* called with only port - will default with defaulted pins
*
* @param port
* @return
*/
public UltrasonicSensor startUltraSonicRight(String port) {
return startUltraSonicRight(port, 64, 63);
}
/**
* called explicitly with pin values
*
* @param port
* @param trigPin
* @param echoPin
* @return
*/
public UltrasonicSensor startUltraSonicRight(String port, int trigPin, int echoPin) {
if (ultraSonicRight == null) {
speakBlocking(get("STARTINGULTRASONIC"));
isUltraSonicRightActivated = true;
ultraSonicRight = (UltrasonicSensor) startPeer("ultraSonicRight");
if (port != null) {
try {
speakBlocking(port);
Arduino right = (Arduino) startPeer("right");
right.connect(port);
right.attach(ultraSonicRight, trigPin, echoPin);
} catch (Exception e) {
error(e);
}
}
}
return ultraSonicRight;
}
public UltrasonicSensor startUltraSonicLeft(String port) {
return startUltraSonicLeft(port, 64, 63);
}
public UltrasonicSensor startUltraSonicLeft(String port, int trigPin, int echoPin) {
if (ultraSonicLeft == null) {
speakBlocking(get("STARTINGULTRASONIC"));
isUltraSonicLeftActivated = true;
ultraSonicLeft = (UltrasonicSensor) startPeer("ultraSonicLeft");
if (port != null) {
try {
speakBlocking(port);
Arduino left = (Arduino) startPeer("left");
left.connect(port);
left.attach(ultraSonicLeft, trigPin, echoPin);
} catch (Exception e) {
error(e);
}
}
}
return ultraSonicLeft;
}
public Pir startPir(String port) {
return startPir(port, 23);
}
public Pir startPir(String port, int pin) {
if (pir == null) {
speakBlocking(get("STARTINGPIR"));
isPirActivated = true;
pir = (Pir) startPeer("pir");
if (port != null) {
try {
speakBlocking(port);
Arduino right = (Arduino) startPeer("right");
right.connect(port);
right.attach(pir, pin);
} catch (Exception e) {
error(e);
}
}
}
return pir;
}
public ServoMixer startServoMixer() {
servomixer = (ServoMixer) startPeer("servomixer");
isServoMixerActivated = true;
speakBlocking(get("STARTINGSERVOMIXER"));
broadcastState();
return servomixer;
}
public void stop() {
if (head != null) {
head.stop();
}
if (rightHand != null) {
rightHand.stop();
}
if (leftHand != null) {
leftHand.stop();
}
if (rightArm != null) {
rightArm.stop();
}
if (leftArm != null) {
leftArm.stop();
}
if (torso != null) {
torso.stop();
}
}
public void stopChatBot() {
speakBlocking(get("STOPCHATBOT"));
releasePeer("chatBot");
isChatBotActivated = false;
}
public void stopHead() {
speakBlocking(get("STOPHEAD"));
releasePeer("head");
releasePeer("mouthControl");
isHeadActivated = false;
}
public void stopEar() {
speakBlocking(get("STOPEAR"));
releasePeer("ear");
isEarActivated = false;
broadcastState();
}
public void stopOpenCV() {
speakBlocking(get("STOPOPENCV"));
isOpenCvActivated = false;
releasePeer("opencv");
}
public void stopGesture() {
Python p = (Python) Runtime.getService("python");
p.stop();
}
public void stopLeftArm() {
speakBlocking(get("STOPLEFTARM"));
releasePeer("leftArm");
isLeftArmActivated = false;
}
public void stopLeftHand() {
speakBlocking(get("STOPLEFTHAND"));
releasePeer("leftHand");
isLeftHandActivated = false;
}
public void stopMouth() {
speakBlocking(get("STOPMOUTH"));
releasePeer("mouth");
// TODO - potentially you could set the field to null in releasePeer
mouth = null;
isMouthActivated = false;
}
public void stopRightArm() {
speakBlocking(get("STOPRIGHTARM"));
releasePeer("rightArm");
isRightArmActivated = false;
}
public void stopRightHand() {
speakBlocking(get("STOPRIGHTHAND"));
releasePeer("rightHand");
isRightHandActivated = false;
}
public void stopTorso() {
speakBlocking(get("STOPTORSO"));
releasePeer("torso");
isTorsoActivated = false;
}
public void stopSimulator() {
speakBlocking(get("STOPVIRTUAL"));
releasePeer("simulator");
jme = null;
isSimulatorActivated = false;
}
public void stopUltraSonicRight() {
speakBlocking(get("STOPULTRASONIC"));
releasePeer("ultraSonicRight");
isUltraSonicRightActivated = false;
}
public void stopUltraSonicLeft() {
speakBlocking(get("STOPULTRASONIC"));
releasePeer("ultraSonicLeft");
isUltraSonicLeftActivated = false;
}
public void stopPir() {
speakBlocking(get("STOPPIR"));
releasePeer("pir");
isPirActivated = false;
}
public void stopNeopixelAnimation() {
if (neopixel != null && neopixelArduino != null) {
neopixel.animationStop();
} else {
warn("No Neopixel attached");
}
}
public void stopServoMixer() {
speakBlocking(get("STOPSERVOMIXER"));
releasePeer("servomixer");
isServoMixerActivated = false;
}
public void waitTargetPos() {
if (head != null) {
head.waitTargetPos();
}
if (leftArm != null) {
leftArm.waitTargetPos();
}
if (rightArm != null) {
rightArm.waitTargetPos();
}
if (leftHand != null) {
leftHand.waitTargetPos();
}
if (rightHand != null) {
rightHand.waitTargetPos();
}
if (torso != null) {
torso.waitTargetPos();
}
}
}
|
package org.opennms.netmgt.linkd;
import static org.opennms.core.utils.InetAddressUtils.str;
import java.lang.reflect.Constructor;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.opennms.core.utils.LogUtils;
import org.opennms.netmgt.capsd.snmp.SnmpStore;
import org.opennms.netmgt.capsd.snmp.SnmpTable;
import org.opennms.netmgt.linkd.scheduler.ReadyRunnable;
import org.opennms.netmgt.linkd.scheduler.Scheduler;
import org.opennms.netmgt.linkd.snmp.CdpCacheTable;
import org.opennms.netmgt.linkd.snmp.IpNetToMediaTable;
import org.opennms.netmgt.linkd.snmp.LldpLocTable;
import org.opennms.netmgt.linkd.snmp.LldpLocalGroup;
import org.opennms.netmgt.linkd.snmp.LldpRemTable;
import org.opennms.netmgt.linkd.snmp.OspfGeneralGroup;
import org.opennms.netmgt.linkd.snmp.OspfNbrTable;
import org.opennms.netmgt.linkd.snmp.VlanTable;
import org.opennms.netmgt.linkd.snmp.VlanTableBasic;
import org.opennms.netmgt.model.OnmsVlan;
import org.opennms.netmgt.model.events.EventBuilder;
import org.opennms.netmgt.snmp.CollectionTracker;
import org.opennms.netmgt.snmp.SnmpAgentConfig;
import org.opennms.netmgt.snmp.SnmpUtils;
import org.opennms.netmgt.snmp.SnmpWalker;
/**
* This class is designed to collect the necessary SNMP information from the
* target address and store the collected information. When the class is
* initially constructed no information is collected. The SNMP Session
* creating and collection occurs in the main run method of the instance. This
* allows the collection to occur in a thread if necessary.
*/
public final class SnmpCollection implements ReadyRunnable {
/**
* The SnmpPeer object used to communicate via SNMP with the remote host.
*/
private final SnmpAgentConfig m_agentConfig;
/**
* The node ID of the system used to collect the SNMP information
*/
private final int m_nodeid;
/**
* The IP address used to collect the SNMP information
*/
private final InetAddress m_address;
/**
* The Class used to collect the VLAN IDs
*/
private String m_vlanClass = null;
/**
* The Class used to collect the ipRoute IDs
*/
private String m_ipRouteClass = null;
/**
* A boolean used to decide if you can collect VLAN Table and Bridge Data
*/
private boolean m_collectVlan = false;
/**
* A boolean used to decide if you can collect Route Table
*/
private boolean m_collectIpRoute = false;
/**
* A boolean used to decide if you can collect STP Base Info
*/
private boolean m_collectStp = false;
/**
* A boolean used to decide if you can collect Bridge Forwarding Table
*/
private boolean m_collectBridge = false;
/**
* A boolean used to decide if you can collect CDP Table
*/
private boolean m_collectCdp = false;
/**
* A boolean used to decide if you can collect LLDP Table
*/
private boolean m_collectLldp = false;
/**
* A boolean used to decide if you can collect OSPF Table
*/
private boolean m_collectOspf = false;
public LldpLocalGroup m_lldpLocalGroup;
public LldpLocTable m_lldpLocTable;
public LldpRemTable m_lldpRemTable;
public OspfGeneralGroup m_ospfGeneralGroup;
public OspfNbrTable m_osNbrTable;
/**
* The ipnettomedia table information
*/
public IpNetToMediaTable m_ipNetToMedia;
/**
* The ipRoute table information
*/
public SnmpTable<SnmpStore> m_ipRoute;
/**
* The CdpCache table information
*/
public CdpCacheTable m_CdpCache;
/**
* The VLAN Table information
*/
public SnmpTable<SnmpStore> m_vlanTable;
/**
* The list of VLAN SNMP collection object
*/
public final Map<OnmsVlan, SnmpVlanCollection> m_snmpVlanCollection = new HashMap<OnmsVlan, SnmpVlanCollection>();
/**
* The scheduler object
*/
private Scheduler m_scheduler;
/**
* The interval, default value 30 minutes
*/
private long poll_interval = 1800000;
/**
* The initial sleep time, default value 5 minutes
*/
private long initial_sleep_time = 600000;
private boolean suspendCollection = false;
private boolean runned = false;
private String packageName;
private final Linkd m_linkd;
/**
* Constructs a new SNMP collector for a node using the passed interface
* as the collection point. The collection does not occur until the
* <code>run</code> method is invoked.
*
* @param nodeid
* @param config
* The SnmpPeer object to collect from.
*/
public SnmpCollection(final Linkd linkd, final int nodeid,
final SnmpAgentConfig config) {
m_linkd = linkd;
m_agentConfig = config;
m_nodeid = nodeid;
m_address = m_agentConfig.getEffectiveAddress();
}
boolean hasOspfGeneralGroup() {
return (m_ospfGeneralGroup != null && !m_ospfGeneralGroup.failed() && m_ospfGeneralGroup.getOspfRouterId() != null);
}
OspfGeneralGroup getOspfGeneralGroup() {
return m_ospfGeneralGroup;
}
public boolean hasOspfNbrTable() {
return (m_osNbrTable != null && !m_osNbrTable.failed() && !m_osNbrTable.isEmpty());
}
OspfNbrTable getOspfNbrTable() {
return m_osNbrTable;
}
boolean hasLldpLocalGroup() {
return (m_lldpLocalGroup != null && !m_lldpLocalGroup.failed() && m_lldpLocalGroup.getLldpLocChassisid() != null);
}
LldpLocalGroup getLldpLocalGroup() {
return m_lldpLocalGroup;
}
boolean hasLldpRemTable() {
return (m_lldpRemTable != null && !m_lldpRemTable.failed() && !m_lldpRemTable.isEmpty());
}
LldpRemTable getLldpRemTable() {
return m_lldpRemTable;
}
boolean hasLldpLocTable() {
return (m_lldpLocTable != null && !m_lldpLocTable.failed() && !m_lldpLocTable.isEmpty());
}
LldpLocTable getLldpLocTable() {
return m_lldpLocTable;
}
/**
* Returns true if the IP net to media table was collected.
*/
boolean hasIpNetToMediaTable() {
return (m_ipNetToMedia != null && !m_ipNetToMedia.failed() && !m_ipNetToMedia.isEmpty());
}
/**
* Returns the collected IP net to media table.
*/
IpNetToMediaTable getIpNetToMediaTable() {
return m_ipNetToMedia;
}
/**
* Returns true if the IP route table was collected.
*/
boolean hasRouteTable() {
return (m_ipRoute != null && !m_ipRoute.failed() && !m_ipRoute.isEmpty());
}
/**
* Returns the collected IP route table.
*/
SnmpTable<SnmpStore> getIpRouteTable() {
return m_ipRoute;
}
/**
* Returns true if the CDP Cache table was collected.
*/
boolean hasCdpCacheTable() {
return (m_CdpCache != null && !m_CdpCache.failed() && !m_CdpCache.isEmpty());
}
/**
* Returns the collected IP route table.
*/
CdpCacheTable getCdpCacheTable() {
return m_CdpCache;
}
/**
* Returns true if the VLAN table was collected.
*/
boolean hasVlanTable() {
return (m_vlanTable != null && !m_vlanTable.failed() && !m_vlanTable.isEmpty());
}
/**
* Returns the collected VLAN table.
*/
SnmpTable<SnmpStore> getVlanTable() {
return m_vlanTable;
}
Map<OnmsVlan, SnmpVlanCollection> getSnmpVlanCollections() {
return m_snmpVlanCollection;
}
/**
* <p>
* Performs the collection for the targeted IP address. The success or
* failure of the collection should be tested via the <code>failed</code>
* method.
* </p>
* <p>
* No synchronization is performed, so if this is used in a separate
* thread context synchronization must be added.
* </p>
*/
public void run() {
if (suspendCollection) {
LogUtils.debugf(this, "run: address: %s Suspended!",
str(m_address));
} else {
runCollection();
}
}
private class TrackerBuilder {
private final CollectionTracker[] OF_TRACKERS = new CollectionTracker[0];
private String m_msg = null;
private List<CollectionTracker> m_trackerList = new ArrayList<CollectionTracker>();
public void add(String label, CollectionTracker... trackers) {
if (m_msg == null) {
m_msg = label;
} else {
m_msg += "/" + label;
}
m_trackerList.addAll(Arrays.asList(trackers));
}
public String getMessage() { return m_msg; }
public CollectionTracker[] getTrackers() { return m_trackerList.toArray(OF_TRACKERS); }
public boolean isEmpty() { return m_trackerList.isEmpty(); }
}
private void runCollection() {
EventBuilder builder = new EventBuilder(
"uei.opennms.org/internal/linkd/nodeLinkDiscoveryStarted",
"Linkd");
builder.setNodeid(m_nodeid);
builder.setInterface(m_address);
m_linkd.getEventForwarder().sendNow(builder.getEvent());
final String hostAddress = str(m_address);
m_ipNetToMedia = new IpNetToMediaTable(m_address);
m_CdpCache = new CdpCacheTable(m_address);
m_lldpLocalGroup = new LldpLocalGroup(m_address);
m_lldpRemTable = new LldpRemTable(m_address);
m_lldpLocTable = new LldpLocTable(m_address);
m_ospfGeneralGroup = new OspfGeneralGroup(m_address);
m_osNbrTable = new OspfNbrTable(m_address);
if (m_collectIpRoute) {
m_ipRoute = createClass(m_ipRouteClass, m_address);
}
if (m_collectVlan) {
m_vlanTable = createClass(m_vlanClass, m_address);
}
LogUtils.debugf(this, "run: collecting : %s", m_agentConfig);
LogUtils.debugf(this, "run: collectVlan/collectIpRoute/collectStp/m_collectBridge/m_collectCdp/m_collectLldp/m_collectOspf: %b/%b/%b/%b/%b/%b/%b",
m_collectVlan, m_collectIpRoute,
m_collectStp, m_collectBridge,
m_collectCdp,m_collectLldp,m_collectOspf);
SnmpWalker walker = null;
TrackerBuilder bldr = new TrackerBuilder();
if (m_collectBridge) {
bldr.add("ipNetToMediaTable", m_ipNetToMedia);
}
if (m_collectOspf) {
bldr.add("ospfGeneralGroup/ospfNbrTable", m_ospfGeneralGroup, m_osNbrTable);
}
if (m_collectLldp) {
bldr.add("lldpLocalGroup/lldpLocTable/lldpRemTable", m_lldpLocalGroup, m_lldpLocTable, m_lldpRemTable);
}
if (m_collectIpRoute && m_ipRoute != null) {
bldr.add("ipRouteTable", m_ipRoute);
}
if (m_collectCdp) {
bldr.add("cdpCacheTable", m_CdpCache);
}
if (m_collectVlan && m_vlanTable != null) {
bldr.add("vlanTable", m_vlanTable);
}
LogUtils.infof(this, "run: Collecting %s from %s", bldr.getMessage(),
str(m_agentConfig.getEffectiveAddress()));
if (!bldr.isEmpty()) {
walker = SnmpUtils.createWalker(m_agentConfig, bldr.getMessage(), bldr.getTrackers());
walker.start();
try {
walker.waitFor();
} catch (final InterruptedException e) {
LogUtils.errorf(this, e, "run: collection interrupted, exiting");
return;
}
}
// Log any failures
if (m_collectOspf && !this.hasOspfGeneralGroup())
LogUtils.infof(this,
"run: failed to collect ospfGeneralGroup for %s",
hostAddress);
if (m_collectOspf && !this.hasOspfNbrTable())
LogUtils.infof(this,
"run: failed to collect ospfNbrTable for %s",
hostAddress);
if (m_collectLldp && !this.hasLldpLocalGroup())
LogUtils.infof(this,
"run: failed to collect lldpLocalGroup for %s",
hostAddress);
if (m_collectLldp && !this.hasLldpLocTable())
LogUtils.infof(this,
"run: failed to collect lldpLocTable for %s",
hostAddress);
if (m_collectLldp && !this.hasLldpRemTable())
LogUtils.infof(this,
"run: failed to collect lldpRemTable for %s",
hostAddress);
if (m_collectBridge && !this.hasIpNetToMediaTable())
LogUtils.infof(this,
"run: failed to collect ipNetToMediaTable for %s",
hostAddress);
if (m_collectIpRoute && m_ipRoute != null && !this.hasRouteTable())
LogUtils.infof(this,
"run: failed to collect ipRouteTable for %s",
hostAddress);
if (m_collectCdp && !this.hasCdpCacheTable())
LogUtils.infof(this,
"run: failed to collect dpCacheTable for %s",
hostAddress);
if (m_collectVlan && m_vlanTable != null && !this.hasVlanTable())
LogUtils.infof(this, "run: failed to collect VLAN for %s",
hostAddress);
// Schedule SNMP VLAN collection only on VLAN.
// If it has not VLAN collection no data download is done.
// OnmsVlan vlan = null;
if (this.hasVlanTable()) {
VlanTableBasic basicvlans = (VlanTableBasic) m_vlanTable;
LogUtils.debugf(this,
"run: start snmp collection for %d VLAN entries",
basicvlans.size());
for (OnmsVlan vlan: basicvlans.getVlansForSnmpCollection()) {
String community = m_agentConfig.getReadCommunity();
Integer vlanindex = vlan.getVlanId();
LogUtils.debugf(this,
"run: peer community: %s with VLAN %s",
community, vlanindex);
if (vlanindex != 1)
m_agentConfig.setReadCommunity(community + "@"
+ vlanindex);
runAndSaveSnmpVlanCollection(vlan);
m_agentConfig.setReadCommunity(community);
}
} else {
runAndSaveSnmpVlanCollection(new OnmsVlan(VlanTable.DEFAULT_VLAN_INDEX, VlanTable.DEFAULT_VLAN_NAME, VlanTable.DEFAULT_VLAN_STATUS));
}
// update info in linkd used correctly by {@link DiscoveryLink}
LogUtils.debugf(this, "run: saving collection into database for %s",
str(m_agentConfig.getEffectiveAddress()));
m_linkd.updateNodeSnmpCollection(this);
// clean memory
// first make everything clean
m_ipNetToMedia = null;
m_ipRoute = null;
m_CdpCache = null;
m_vlanTable = null;
m_lldpLocalGroup = null;
m_lldpLocTable = null;
m_lldpRemTable = null;
m_ospfGeneralGroup = null;
m_osNbrTable = null;
m_snmpVlanCollection.clear();
builder = new EventBuilder(
"uei.opennms.org/internal/linkd/nodeLinkDiscoveryCompleted",
"Linkd");
builder.setNodeid(m_nodeid);
builder.setInterface(m_address);
m_linkd.getEventForwarder().sendNow(builder.getEvent());
// reschedule itself
reschedule();
runned = true;
}
@SuppressWarnings("unchecked")
private SnmpTable<SnmpStore> createClass(String className, InetAddress address) {
SnmpTable<SnmpStore> vlanTable = null;
Class<SnmpTable<SnmpStore>> getter = null;
try {
getter = (Class<SnmpTable<SnmpStore>>) Class.forName(className);
} catch (ClassNotFoundException e) {
LogUtils.warnf(this, e, "run: %s class not found", className);
}
Class<?>[] classes = { InetAddress.class };
Constructor<SnmpTable<SnmpStore>> constr = null;
try {
constr = getter.getConstructor(classes);
} catch (NoSuchMethodException e) {
LogUtils.warnf(this, e, "run: %s class has no such method",
className);
} catch (SecurityException s) {
LogUtils.warnf(this, s, "run: %s class security violation",
className);
}
Object[] argum = { address };
try {
vlanTable = (SnmpTable<SnmpStore>) constr.newInstance(argum);
} catch (Throwable e) {
LogUtils.warnf(this, e,
"run: unable to instantiate class %s",
className);
}
return vlanTable;
}
private void runAndSaveSnmpVlanCollection(OnmsVlan vlan) {
SnmpVlanCollection snmpvlancollection = new SnmpVlanCollection(
m_agentConfig,
m_collectStp,
m_collectBridge);
snmpvlancollection.setPackageName(getPackageName());
snmpvlancollection.run();
if (snmpvlancollection.failed()) {
LogUtils.debugf(this,
"runAndSaveSnmpVlanCollection: no bridge info found for %s",
m_agentConfig);
} else {
LogUtils.debugf(this,
"runAndSaveSnmpVlanCollection: adding bridge info to snmpcollection, VLAN = %s, SnmpVlanCollection = %s",
vlan, snmpvlancollection);
m_snmpVlanCollection.put(vlan, snmpvlancollection);
}
}
/**
* <p>
* getScheduler
* </p>
*
* @return a {@link org.opennms.netmgt.linkd.scheduler.Scheduler} object.
*/
public Scheduler getScheduler() {
return m_scheduler;
}
/**
* <p>
* setScheduler
* </p>
*
* @param scheduler
* a {@link org.opennms.netmgt.linkd.scheduler.Scheduler}
* object.
*/
public void setScheduler(Scheduler scheduler) {
m_scheduler = scheduler;
}
/**
* <p>
* getInitialSleepTime
* </p>
*
* @return Returns the initial_sleep_time.
*/
public long getInitialSleepTime() {
return initial_sleep_time;
}
/**
* <p>
* setInitialSleepTime
* </p>
*
* @param initial_sleep_time
* The initial_sleep_timeto set.
*/
public void setInitialSleepTime(long initial_sleep_time) {
this.initial_sleep_time = initial_sleep_time;
}
/**
* <p>
* getPollInterval
* </p>
*
* @return Returns the initial_sleep_time.
*/
public long getPollInterval() {
return poll_interval;
}
/**
* <p>
* setPollInterval
* </p>
*
* @param interval
* a long.
*/
public void setPollInterval(long interval) {
this.poll_interval = interval;
}
/**
* <p>
* schedule
* </p>
*/
public void schedule() {
if (m_scheduler == null)
throw new IllegalStateException(
"Cannot schedule a service whose scheduler is set to null");
m_scheduler.schedule(initial_sleep_time, this);
}
private void reschedule() {
if (m_scheduler == null)
throw new IllegalStateException(
"Cannot schedule a service whose scheduler is set to null");
m_scheduler.schedule(poll_interval, this);
}
/**
* <p>
* isReady
* </p>
*
* @return a boolean.
*/
public boolean isReady() {
return true;
}
/**
* <p>
* isSuspended
* </p>
*
* @return Returns the suspendCollection.
*/
public boolean isSuspended() {
return suspendCollection;
}
/**
* <p>
* suspend
* </p>
*/
public void suspend() {
this.suspendCollection = true;
}
/**
* <p>
* wakeUp
* </p>
*/
public void wakeUp() {
this.suspendCollection = false;
}
/**
* <p>
* unschedule
* </p>
*/
public void unschedule() {
if (m_scheduler == null)
throw new IllegalStateException(
"rescedule: Cannot schedule a service whose scheduler is set to null");
if (runned) {
m_scheduler.unschedule(this, poll_interval);
} else {
m_scheduler.unschedule(this, poll_interval + initial_sleep_time);
}
}
public String getIpRouteClass() {
return m_ipRouteClass;
}
public void setIpRouteClass(String className) {
if (className == null || className.equals(""))
return;
m_ipRouteClass = className;
m_collectIpRoute = true;
}
/**
* <p>
* getVlanClass
* </p>
*
* @return Returns the m_vlanClass.
*/
public String getVlanClass() {
return m_vlanClass;
}
/**
* <p>
* setVlanClass
* </p>
*
* @param className
* a {@link java.lang.String} object.
*/
public void setVlanClass(String className) {
if (className == null || className.equals(""))
return;
m_vlanClass = className;
m_collectVlan = true;
}
/**
* Returns the target address that the collection occurred for.
*
* @return a {@link java.net.InetAddress} object.
*/
public InetAddress getTarget() {
return m_address;
}
/**
* <p>
* collectVlanTable
* </p>
*
* @return Returns the m_collectVlanTable.
*/
public boolean collectVlanTable() {
return m_collectVlan;
}
/**
* <p>
* getReadCommunity
* </p>
*
* @return a {@link java.lang.String} object.
*/
public String getReadCommunity() {
return m_agentConfig.getReadCommunity();
}
/**
* <p>
* getPeer
* </p>
*
* @return a {@link org.opennms.netmgt.snmp.SnmpAgentConfig} object.
*/
public SnmpAgentConfig getPeer() {
return m_agentConfig;
}
/**
* <p>
* getPort
* </p>
*
* @return a int.
*/
public int getPort() {
return m_agentConfig.getPort();
}
/** {@inheritDoc} */
public boolean equals(ReadyRunnable run) {
if (run instanceof SnmpCollection
&& this.getPackageName().equals(run.getPackageName())) {
SnmpCollection c = (SnmpCollection) run;
if (c.getTarget().equals(m_address) && c.getPort() == getPort()
&& c.getReadCommunity().equals(getReadCommunity()))
return true;
}
return false;
}
/**
* <p>
* getInfo
* </p>
*
* @return a {@link java.lang.String} object.
*/
public String getInfo() {
return "ReadyRunnable SnmpCollection" + " ip=" + str(getTarget())
+ " port=" + getPort() + " community=" + getReadCommunity()
+ " package=" + getPackageName()
+ " collectBridge="
+ getCollectBridge() + " collectStpNode="
+ getCollectStp() + " collectCdp="
+ getCollectCdp() + " collectIpRoute="
+ getCollectIpRoute();
}
public boolean getCollectLldpTable() {
return m_collectLldp;
}
public void collectLldp(boolean collectLldpTable) {
m_collectLldp = collectLldpTable;
}
/**
* <p>
* getCollectBridgeForwardingTable
* </p>
*
* @return a boolean.
*/
public boolean getCollectBridge() {
return m_collectBridge;
}
/**
* <p>
* collectBridgeForwardingTable
* </p>
*
* @param bridgeForwardingTable
* a boolean.
*/
public void collectBridge(boolean bridgeForwardingTable) {
m_collectBridge = bridgeForwardingTable;
}
/**
* <p>
* getCollectCdpTable
* </p>
*
* @return a boolean.
*/
public boolean getCollectCdp() {
return m_collectCdp;
}
/**
* <p>
* collectCdpTable
* </p>
*
* @param cdpTable
* a boolean.
*/
public void collectCdp(boolean cdpTable) {
m_collectCdp = cdpTable;
}
/**
* <p>
* getCollectIpRouteTable
* </p>
*
* @return a boolean.
*/
public boolean getCollectIpRoute() {
return m_collectIpRoute;
}
/**
* <p>
* collectIpRouteTable
* </p>
*
* @param ipRouteTable
* a boolean.
*/
public void collectIpRoute(boolean ipRouteTable) {
m_collectIpRoute = ipRouteTable;
}
/**
* <p>
* getCollectStpNode
* </p>
*
* @return a boolean.
*/
public boolean getCollectStp() {
return m_collectStp;
}
/**
* <p>
* collectStpNode
* </p>
*
* @param stpNode
* a boolean.
*/
public void collectStp(boolean stpNode) {
m_collectStp = stpNode;
}
/**
* <p>
* Getter for the field <code>packageName</code>.
* </p>
*
* @return a {@link java.lang.String} object.
*/
public String getPackageName() {
return packageName;
}
/** {@inheritDoc} */
public void setPackageName(String packageName) {
this.packageName = packageName;
}
public void collectOspf(boolean collectOspfTable) {
m_collectOspf = collectOspfTable;
}
public boolean getCollectOspfTable() {
return m_collectOspf;
}
}
|
package org.scribe.utils;
import java.util.regex.Pattern;
import org.scribe.model.OAuthConstants;
/**
* Utils for checking preconditions and invariants
*
* @author Pablo Fernandez
*/
public class Preconditions
{
private static final String DEFAULT_MESSAGE = "Received an invalid parameter";
// scheme = alpha *( alpha | digit | "+" | "-" | "." )
private static final Pattern URL_PATTERN = Pattern.compile("^[a-zA-Z][a-zA-Z0-9+.-]*:
private Preconditions(){}
public static void checkNotNull(Object object, String errorMsg)
{
check(object != null, errorMsg);
}
public static void checkEmptyString(String string, String errorMsg)
{
check(string != null && !string.trim().equals(""), errorMsg);
}
/**
* Checks that a URL is valid
*
* @param url any string
* @param errorMsg error message
*/
public static void checkValidUrl(String url, String errorMsg)
{
checkEmptyString(url, errorMsg);
check(isUrl(url), errorMsg);
}
/**
* Checks that a URL is a valid OAuth callback
*
* @param url any string
* @param errorMsg error message
*/
public static void checkValidOAuthCallback(String url, String errorMsg)
{
checkEmptyString(url, errorMsg);
if(url.compareToIgnoreCase(OAuthConstants.OUT_OF_BAND) != 0)
{
check(isUrl(url), errorMsg);
}
}
private static boolean isUrl(String url)
{
return URL_PATTERN.matcher(url).matches();
}
private static void check(boolean requirements, String error)
{
String message = (error == null || error.trim().length() <= 0) ? DEFAULT_MESSAGE : error;
if (!requirements)
{
throw new IllegalArgumentException(message);
}
}
}
|
package prm4j.indexing;
import prm4j.api.BaseEvent;
import prm4j.api.Event;
import prm4j.api.MatchHandler;
/**
* A base monitor holding a {@link BaseMonitorState} which is updated when processing {@link BaseEvent}s.
*
*/
public class StatefulMonitor extends BaseMonitor {
protected BaseMonitorState state;
public StatefulMonitor(BaseMonitorState state) {
this.state = state;
}
@Override
public boolean processEvent(Event event) {
if (state == null) {
return false;
}
state = state.getSuccessor(event.getBaseEvent());
if (state == null) {
return false;
}
MatchHandler matchHandler = state.getMatchHandler();
if (matchHandler != null) {
matchHandler.handleMatch(getBindings());
// when a state is a final state, it is still possible we will reach another final state (or loop on a
// final state), so we don't return false here
}
return true;
}
@Override
public BaseMonitor copy() {
return new StatefulMonitor(state);
}
@Override
public boolean isAcceptingStateReachable() {
// TODO co-enable set calculation or similar
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((state == null) ? 0 : state.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
StatefulMonitor other = (StatefulMonitor) obj;
if (state == null) {
if (other.state != null)
return false;
} else if (!state.equals(other.state))
return false;
return true;
}
}
|
package question1;
import org.apache.commons.collections4.comparators.ComparatorChain;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
public class Q1GroupingComparator extends WritableComparator
{
public static final ComparatorChain<Q1IntermediateKey> comparator;
static {
comparator = new ComparatorChain<>();
comparator.addComparator((a,b) -> Integer.compare(a.getUser1(), b.getUser1()));
comparator.addComparator((a,b) -> Integer.compare(a.getUser2(), b.getUser2()));
}
public Q1GroupingComparator()
{
super(Q1IntermediateKey.class, true);
}
@Override
public int compare(WritableComparable a, WritableComparable b)
{
Q1IntermediateKey key1 = (Q1IntermediateKey) a;
Q1IntermediateKey key2 = (Q1IntermediateKey) b;
return comparator.compare(key1, key2);
}
}
|
import java.util.Iterator;
import java.util.List;
import javax.persistence.Transient;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Pattern;
import javax.validation.constraints.Size;
import org.gluu.persist.model.base.BaseEntry;
import org.gluu.site.ldap.persistence.annotation.LdapAttribute;
import org.gluu.site.ldap.persistence.annotation.LdapEntry;
import org.gluu.site.ldap.persistence.annotation.LdapJsonObject;
import org.gluu.site.ldap.persistence.annotation.LdapObjectClass;
import org.xdi.model.ProgrammingLanguage;
import org.xdi.model.ScriptLocationType;
import org.xdi.model.SimpleCustomProperty;
import org.xdi.model.SimpleExtendedCustomProperty;
import org.xdi.model.custom.script.CustomScriptType;
import org.xdi.util.StringHelper;
@LdapEntry(sortBy = "level")
@LdapObjectClass(values = { "top", "oxCustomScript" })
public class CustomScript extends BaseEntry {
public static final String LOCATION_TYPE_MODEL_PROPERTY = "location_type";
public static final String LOCATION_PATH_MODEL_PROPERTY = "location_path";
@LdapAttribute(ignoreDuringUpdate = true)
private String inum;
@LdapAttribute(name = "displayName")
@Pattern(regexp = "^[a-zA-Z0-9_]+$", message = "Name should contain only letters, digits and underscores")
@Size(min = 1, max = 30, message = "Length of the Name should be between 1 and 30")
private String name;
@LdapAttribute(name = "description")
private String description;
@LdapAttribute(name = "oxScript")
private String script;
@LdapAttribute(name = "oxScriptType")
private CustomScriptType scriptType;
@LdapAttribute(name = "programmingLanguage")
private ProgrammingLanguage programmingLanguage;
@LdapJsonObject
@LdapAttribute(name = "oxModuleProperty")
private List<SimpleCustomProperty> moduleProperties;
@LdapJsonObject
@LdapAttribute(name = "oxConfigurationProperty")
private List<SimpleExtendedCustomProperty> configurationProperties;
@LdapAttribute(name = "oxLevel")
private int level;
@LdapAttribute(name = "oxRevision")
private long revision;
@LdapAttribute(name = "gluuStatus")
private boolean enabled;
@LdapJsonObject
@LdapAttribute(name = "oxScriptError")
private ScriptError scriptError;
@Transient
private boolean modified;
@Transient
private boolean internal;
public CustomScript() {
}
public CustomScript(String dn, String inum, String name) {
super(dn);
this.inum = inum;
this.name = name;
}
public CustomScript(CustomScript customScript) {
super(customScript.getDn());
this.inum = customScript.inum;
this.name = customScript.name;
this.description = customScript.description;
this.script = customScript.script;
this.scriptType = customScript.scriptType;
this.programmingLanguage = customScript.programmingLanguage;
this.moduleProperties = customScript.moduleProperties;
this.configurationProperties = customScript.configurationProperties;
this.level = customScript.level;
this.revision = customScript.revision;
this.enabled = customScript.enabled;
this.modified = customScript.modified;
this.internal = customScript.internal;
}
public String getInum() {
return inum;
}
public void setInum(String inum) {
this.inum = inum;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getScript() {
return script;
}
public void setScript(String script) {
this.script = script;
}
public CustomScriptType getScriptType() {
return scriptType;
}
public void setScriptType(CustomScriptType scriptType) {
this.scriptType = scriptType;
}
public ProgrammingLanguage getProgrammingLanguage() {
return programmingLanguage;
}
public void setProgrammingLanguage(ProgrammingLanguage programmingLanguage) {
this.programmingLanguage = programmingLanguage;
}
public List<SimpleCustomProperty> getModuleProperties() {
return moduleProperties;
}
public void setModuleProperties(List<SimpleCustomProperty> moduleProperties) {
this.moduleProperties = moduleProperties;
}
public List<SimpleExtendedCustomProperty> getConfigurationProperties() {
return configurationProperties;
}
public void setConfigurationProperties(List<SimpleExtendedCustomProperty> properties) {
this.configurationProperties = properties;
}
public int getLevel() {
return level;
}
public void setLevel(int level) {
this.level = level;
}
public long getRevision() {
return revision;
}
public void setRevision(long revision) {
this.revision = revision;
}
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public boolean isModified() {
return modified;
}
public void setModified(boolean modified) {
this.modified = modified;
}
public boolean isInternal() {
return internal;
}
public void setInternal(boolean internal) {
this.internal = internal;
}
public ScriptLocationType getLocationType() {
SimpleCustomProperty moduleProperty = getModuleProperty(LOCATION_TYPE_MODEL_PROPERTY);
if (moduleProperty == null) {
return null;
}
return ScriptLocationType.getByValue(moduleProperty.getValue2());
}
public void setLocationType(ScriptLocationType locationType) {
if (locationType != null) {
setModuleProperty(LOCATION_TYPE_MODEL_PROPERTY, locationType.getValue());
}
}
public String getLocationPath() {
SimpleCustomProperty moduleProperty = getModuleProperty(LOCATION_PATH_MODEL_PROPERTY);
if (moduleProperty == null) {
return null;
}
return moduleProperty.getValue2();
}
public void setLocationPath(String locationPath) {
setModuleProperty(LOCATION_PATH_MODEL_PROPERTY, locationPath);
}
protected SimpleCustomProperty getModuleProperty(final String modulePropertyName) {
SimpleCustomProperty result = null;
List<SimpleCustomProperty> moduleProperties = getModuleProperties();
if (moduleProperties == null) {
return result;
}
for (SimpleCustomProperty moduleProperty : getModuleProperties()) {
if (StringHelper.equalsIgnoreCase(moduleProperty.getValue1(), modulePropertyName)) {
result = moduleProperty;
break;
}
}
return result;
}
protected void setModuleProperty(String name, String value) {
SimpleCustomProperty moduleProperty = getModuleProperty(name);
if (moduleProperty == null) {
addModuleProperty(name, value);
} else {
moduleProperty.setValue2(value);
}
}
public void addModuleProperty(final String name, final String value) {
SimpleCustomProperty usageTypeModuleProperties = new SimpleCustomProperty(name, value);
getModuleProperties().add(usageTypeModuleProperties);
}
public void removeModuleProperty(final String modulePropertyName) {
List<SimpleCustomProperty> moduleProperties = getModuleProperties();
if (moduleProperties == null) {
return;
}
for (Iterator<SimpleCustomProperty> it = moduleProperties.iterator(); it.hasNext();) {
SimpleCustomProperty moduleProperty = (SimpleCustomProperty) it.next();
if (StringHelper.equalsIgnoreCase(moduleProperty.getValue1(), modulePropertyName)) {
it.remove();
break;
}
}
}
public final ScriptError getScriptError() {
return scriptError;
}
public final void setScriptError(ScriptError scriptError) {
this.scriptError = scriptError;
}
}
|
package sample.java.project;
import java.util.Timer;
import java.util.TimerTask;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.Setter;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
/**
* The main class of the application. It contains the main() method,
* the first method called.
*/
@NoArgsConstructor
@AllArgsConstructor
public class SampleJavaProject extends TimerTask {
/** The delay between printed messages. */
private static final long PRINT_DELAY = 1000L;
/** The name to be printed in the output message. */
@Getter @Setter @NonNull
private String name = "world";
/**
* Print the "Hello, world!" string.
* @param args application input arguments
*/
public static void main(final String[] args) {
/* Set up the command line arguments. */
Options options = new Options();
options.addOption(new Option("name", true, "set the user's name"));
options.addOption(new Option("loop", "print endlessly, hotswap demo"));
options.addOption(new Option("help", "print this help message"));
CommandLine line = null;
try {
line = new GnuParser().parse(options, args);
} catch (org.apache.commons.cli.ParseException e) {
System.err.println(e.getMessage());
System.exit(1);
}
/* Handle each argument. */
SampleJavaProject sjp;
if (line.hasOption("help")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("SampleJavaProject [options]", options);
System.exit(0);
}
if (line.hasOption("name")) {
sjp = new SampleJavaProject(line.getOptionValue("name"));
} else {
sjp = new SampleJavaProject();
}
if (line.hasOption("loop")) {
new Timer().schedule(sjp, 0L, PRINT_DELAY);
} else {
sjp.run();
}
}
@Override
public final void run() {
System.out.printf("Hello, %s!\n", name);
}
}
|
package ru.r2cloud.loraat;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fazecast.jSerialComm.SerialPort;
import ru.r2cloud.model.DeviceConnectionStatus;
import ru.r2cloud.util.Util;
public class LoraAtClient {
private static final Logger LOG = LoggerFactory.getLogger(LoraAtClient.class);
private static final Pattern COMMA = Pattern.compile(",");
private final String portDescriptor;
private final int timeout;
public LoraAtClient(String portDescriptor, int timeout) {
this.portDescriptor = portDescriptor;
this.timeout = timeout;
}
public LoraAtStatus getStatus() {
LoraAtStatus result = new LoraAtStatus();
List<String> response;
try {
response = sendRequest("AT+CHIP?\r\n");
} catch (LoraAtException e) {
LOG.info(e.getMessage());
result.setDeviceStatus(DeviceConnectionStatus.FAILED);
return result;
}
result.setStatus("IDLE");
result.setDeviceStatus(DeviceConnectionStatus.CONNECTED);
List<ModulationConfig> configs = new ArrayList<>();
for (String cur : response) {
// interested only in lora parameters
if (!cur.startsWith("LORA")) {
continue;
}
String[] parts = COMMA.split(cur);
if (parts.length != 3) {
LOG.error("malformed response from lora: {}", cur);
continue;
}
ModulationConfig loraConfig = new ModulationConfig();
loraConfig.setName(parts[0].toLowerCase());
loraConfig.setMinFrequency(Float.parseFloat(parts[1]));
loraConfig.setMaxFrequency(Float.parseFloat(parts[2]));
configs.add(loraConfig);
}
result.setConfigs(configs);
return result;
}
public LoraAtResponse startObservation(LoraAtObservationRequest loraRequest) {
// make sure lora internal clock is OK
try {
sendRequest("AT+TIME=" + (System.currentTimeMillis() / 1000));
} catch (LoraAtException e) {
return new LoraAtResponse(e.getMessage());
}
LoraAtResponse result = startObservationImpl(loraRequest);
if (result.getStatus().equals(ResponseStatus.RECEIVING)) {
LOG.info("lora-at is already receiving. stopping previous and starting again");
LoraAtResponse response = stopObservation();
if (response.getFrames() != null && response.getFrames().size() > 0) {
for (LoraAtFrame cur : response.getFrames()) {
LOG.info("previous unknown observation got some data. Logging it here for manual recovery: {}", Arrays.toString(cur.getData()));
}
}
result = startObservation(loraRequest);
}
return result;
}
private LoraAtResponse startObservationImpl(LoraAtObservationRequest loraRequest) {
LoraAtResponse result = new LoraAtResponse();
String request = "AT+LORARX=" + loraRequest.getFrequency() + "," + loraRequest.getBw() + "," + loraRequest.getSf() + "," + loraRequest.getCr() + "," + loraRequest.getSyncword() + ",10," + loraRequest.getPreambleLength() + "," + loraRequest.getGain() + "," + loraRequest.getLdro() + "\r\n";
try {
sendRequest(request);
} catch (LoraAtException e) {
String failure = e.getMessage();
if (failure.contains("already receiving")) {
result.setStatus(ResponseStatus.RECEIVING);
return result;
}
return new LoraAtResponse(e.getMessage());
}
result.setStatus(ResponseStatus.SUCCESS);
return result;
}
public LoraAtResponse stopObservation() {
List<String> response;
try {
response = sendRequest("AT+STOPRX\r\n");
} catch (LoraAtException e) {
return new LoraAtResponse(e.getMessage());
}
LoraAtResponse result = new LoraAtResponse();
result.setStatus(ResponseStatus.SUCCESS);
if (!response.isEmpty()) {
List<LoraAtFrame> frames = new ArrayList<>(response.size());
for (String cur : response) {
String[] parts = COMMA.split(cur);
if (parts.length != 5) {
LOG.error("malformed response from lora: {}", cur);
continue;
}
LoraAtFrame curFrame = new LoraAtFrame();
curFrame.setData(Util.hexStringToByteArray(parts[0]));
curFrame.setRssi(Float.parseFloat(parts[1]));
curFrame.setSnr(Float.parseFloat(parts[2]));
curFrame.setFrequencyError(Float.parseFloat(parts[3]));
curFrame.setTimestamp(Long.parseLong(parts[4]));
}
result.setFrames(frames);
}
return result;
}
private List<String> sendRequest(String request) throws LoraAtException {
SerialPort port = SerialPort.getCommPort(portDescriptor);
// this is important
port.setComPortTimeouts(SerialPort.TIMEOUT_READ_SEMI_BLOCKING, timeout, timeout);
// some defaults
port.setBaudRate(115200);
port.setParity(SerialPort.NO_PARITY);
port.setNumDataBits(8);
port.setNumStopBits(SerialPort.ONE_STOP_BIT);
if (!port.openPort()) {
throw new LoraAtException("can't open port: " + portDescriptor);
}
try {
port.getOutputStream().write(request.getBytes(StandardCharsets.ISO_8859_1));
} catch (IOException e) {
if (!port.closePort()) {
LOG.info("can't close the port");
}
throw new LoraAtException("unable to get status: " + e.getMessage());
}
try {
return readResponse(port);
} catch (IOException e) {
throw new LoraAtException("unable to read status: " + e.getMessage());
} finally {
if (!port.closePort()) {
LOG.info("can't close the port");
}
}
}
private static List<String> readResponse(SerialPort port) throws IOException, LoraAtException {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(port.getInputStream(), StandardCharsets.ISO_8859_1))) {
String curLine = null;
List<String> result = new ArrayList<>();
StringBuilder errorMessage = new StringBuilder();
while ((curLine = reader.readLine()) != null) {
curLine = curLine.trim();
LOG.info("response: {}", curLine);
// skip logging
if (curLine.charAt(0) == '[') {
continue;
}
if (curLine.equalsIgnoreCase("ERROR")) {
throw new LoraAtException(errorMessage.toString());
}
if (curLine.equalsIgnoreCase("OK")) {
return result;
}
// not clear yet if the response is valid or error message
// update both
if (errorMessage.length() > 0) {
errorMessage.append(": ");
}
errorMessage.append(curLine);
result.add(curLine);
}
}
return Collections.emptyList();
}
}
|
//@author A0144939R
package seedu.task.model.task;
import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.time.format.FormatStyle;
import java.time.temporal.ChronoUnit;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Optional;
import org.ocpsoft.prettytime.PrettyTime;
import org.ocpsoft.prettytime.nlp.PrettyTimeParser;
import seedu.task.commons.exceptions.IllegalValueException;
/**
* Represents a Date and Time in the task list
* Guarantees: immutable; is valid as declared in {@link #isValidDateTime(String)}
*/
public class DateTime {
public static final String MESSAGE_DATETIME_CONSTRAINTS = "You have entered an invalid Date/Time format. For a complete list of all acceptable formats, please view our user guide.";
//@@author A0141052Y
private static final String DATE_TIME_DISPLAY_FORMAT = "%s (%s)";
//@@author
public final Optional<Instant> value;
private static PrettyTime p = new PrettyTime();
public DateTime(String dateTime) throws IllegalValueException {
if (dateTime == null || dateTime.equals("")) {
this.value = Optional.empty();
return;
}
if (!isValidDateTime(dateTime)) {
throw new IllegalValueException(MESSAGE_DATETIME_CONSTRAINTS);
}
List<Date> possibleDates = new PrettyTimeParser().parse(dateTime);
this.value = Optional.of(possibleDates.get(0).toInstant().truncatedTo(ChronoUnit.MINUTES));
}
public DateTime(Long epochMilli, boolean isEpoch) {
if (epochMilli == null || !isEpoch) {
this.value = Optional.empty();
return;
}
this.value = Optional.of(Instant.ofEpochMilli(epochMilli).truncatedTo(ChronoUnit.MINUTES));
}
/**
* Returns true if a given string is a valid date/time that can be parsed
*
* @param test output from date/time parser
*/
public static boolean isValidDateTime(String dateTime) {
List<Date> possibleDates = new PrettyTimeParser().parse(dateTime);
if(!possibleDates.isEmpty() && (possibleDates.size() == 1)) {
return true;
} else {
return false;
}
}
@Override
public String toString() {
if(value.isPresent()) {
DateTimeFormatter formatter =
DateTimeFormatter.ofLocalizedDateTime( FormatStyle.SHORT )
.withLocale( Locale.UK )
.withZone( ZoneId.systemDefault() );
return formatter.format( value.get() );
} else {
return "";
}
}
public String toPrettyString() {
if(value.isPresent()) {
return p.format(Date.from(this.value.get()));
} else {
return "";
}
}
//@@author A0141052Y
/**
* Gets a display friendly representation of the DateTime
*/
public String toDisplayString() {
if (this.toString().isEmpty()) {
return "";
} else {
return String.format(DATE_TIME_DISPLAY_FORMAT, this.toString(), this.toPrettyString());
}
}
//@@author
public Long getSaveableValue() {
if(value.isPresent()) {
return this.value.get().toEpochMilli();
} else {
return null;
}
}
@Override
public boolean equals(Object other) {
return other == this // short circuit if same object
|| (other instanceof DateTime// instanceof handles nulls
&& this.value.equals(((DateTime) other).value)); // state check
}
@Override
public int hashCode() {
return value.hashCode();
}
/**
* Returns an optional corresponding to the value of the DateTime object
* @return value of DateTime object
*/
public Optional<Instant> getDateTimeValue() {
return this.value;
}
/**
* Checks if there is a DateTime specified
*
* @return true if a DateTime is specified, else false
*/
public boolean isEmpty() {
return !this.value.isPresent();
}
}
|
package org.prevayler.foundation;
import org.prevayler.foundation.serialization.JavaSerializationStrategy;
/**
* @deprecated Use an appropriate SerializationStrategy instead.
*/
public class DeepCopier {
public static Object deepCopy(Object original, String errorMessage) {
try {
return new JavaSerializationStrategy().deepCopy(original);
} catch (Exception ex) {
ex.printStackTrace();
throw new RuntimeException(errorMessage);
}
}
}
|
package org.openspaces.jpa;
import java.lang.reflect.Method;
import java.rmi.RemoteException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Stack;
import javax.persistence.EmbeddedId;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import net.jini.core.entry.UnusableEntryException;
import net.jini.core.lease.Lease;
import net.jini.core.transaction.Transaction;
import net.jini.core.transaction.TransactionException;
import net.jini.core.transaction.TransactionFactory;
import org.apache.openjpa.abstractstore.AbstractStoreManager;
import org.apache.openjpa.conf.OpenJPAConfiguration;
import org.apache.openjpa.enhance.PersistenceCapable;
import org.apache.openjpa.kernel.FetchConfiguration;
import org.apache.openjpa.kernel.OpenJPAStateManager;
import org.apache.openjpa.kernel.PCState;
import org.apache.openjpa.kernel.QueryLanguages;
import org.apache.openjpa.kernel.StateManager;
import org.apache.openjpa.kernel.StoreQuery;
import org.apache.openjpa.kernel.exps.ExpressionParser;
import org.apache.openjpa.lib.rop.ResultObjectProvider;
import org.apache.openjpa.meta.ClassMetaData;
import org.apache.openjpa.meta.FieldMetaData;
import org.apache.openjpa.util.ApplicationIds;
import org.openspaces.jpa.openjpa.SpaceConfiguration;
import org.openspaces.jpa.openjpa.StoreManagerQuery;
import org.openspaces.jpa.openjpa.StoreManagerSQLQuery;
import com.gigaspaces.annotation.pojo.SpaceId;
import com.gigaspaces.internal.client.QueryResultTypeInternal;
import com.gigaspaces.internal.client.spaceproxy.ISpaceProxy;
import com.gigaspaces.internal.client.spaceproxy.metadata.ObjectType;
import com.gigaspaces.internal.metadata.ITypeDesc;
import com.gigaspaces.internal.metadata.SpaceTypeInfo;
import com.gigaspaces.internal.metadata.SpaceTypeInfoRepository;
import com.gigaspaces.internal.transport.IEntryPacket;
import com.gigaspaces.internal.transport.ITemplatePacket;
import com.gigaspaces.internal.transport.TemplatePacketFactory;
import com.gigaspaces.internal.transport.TransportPacketType;
import com.j_spaces.core.IJSpace;
import com.j_spaces.core.client.ReadModifiers;
import com.j_spaces.core.client.UpdateModifiers;
import com.j_spaces.jdbc.QueryProcessorFactory;
import com.j_spaces.jdbc.driver.GConnection;
/**
* A GigaSpaces back-end implementation for OpenJPA.
* Responsible for storing and fetching data from GigaSpaces using space API.
*
* @author idan
* @since 8.0
*
*/
@SuppressWarnings("unchecked")
public class StoreManager extends AbstractStoreManager {
private Transaction _transaction = null;
private static final Map<Class<?>, Integer> _classesRelationStatus = new HashMap<Class<?>, Integer>();
private static final HashSet<Class<?>> _processedClasses = new HashSet<Class<?>>();
private GConnection _connection;
private RelationsManager _relationsManager;
public StoreManager() {
_relationsManager = new RelationsManager();
}
@Override
protected void open() {
// Specific gigaspaces initialization (space proxy)
getConfiguration().initialize();
}
@Override
protected Collection<String> getUnsupportedOptions() {
Collection<String> unsupportedOptions = (Collection<String>) super.getUnsupportedOptions();
unsupportedOptions.remove(OpenJPAConfiguration.OPTION_ID_DATASTORE);
unsupportedOptions.remove(OpenJPAConfiguration.OPTION_OPTIMISTIC);
unsupportedOptions.remove(OpenJPAConfiguration.OPTION_INC_FLUSH);
return unsupportedOptions;
}
@Override
public boolean syncVersion(OpenJPAStateManager sm, Object edata) {
try {
// Read object from space
IEntryPacket result = readObjectFromSpace(sm);
if (result == null)
return false;
// Populate fields
loadFields(sm, result, sm.getMetaData().getFields());
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
return true;
}
@Override
public void begin() {
try {
if (_transaction != null)
{
if(getConfiguration().getOptimistic())
return;
throw new TransactionException("Attempted to start a new transaction when there's already an active transaction.");
}
long timeout = (getConfiguration().getLockTimeout() == 0)?
Lease.FOREVER : getConfiguration().getLockTimeout();
_transaction = (TransactionFactory.create(getConfiguration().getTransactionManager(),
timeout)).transaction;
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
@Override
public void commit() {
try {
_transaction.commit(Long.MAX_VALUE);
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
_transaction = null;
}
}
@Override
public void rollback() {
try {
_transaction.abort(Long.MAX_VALUE);
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
_transaction = null;
}
}
@Override
public void beginOptimistic() {
begin();
}
@Override
public void rollbackOptimistic() {
rollback();
}
@Override
public StoreQuery newQuery(String language) {
ExpressionParser ep = QueryLanguages.parserForLanguage(language);
if(ep != null)
return new StoreManagerQuery(ep, this);
if (QueryLanguages.LANG_SQL.equals(language)) {
return new StoreManagerSQLQuery(this);
}
return null;
}
@Override
protected OpenJPAConfiguration newConfiguration() {
return new SpaceConfiguration();
}
public SpaceConfiguration getConfiguration() {
return (SpaceConfiguration) getContext().getConfiguration();
}
/**
* Returns whether the state manager's managed object exists in space.
*/
public boolean exists(OpenJPAStateManager sm, Object edata) {
ClassMetaData cm = sm.getMetaData();
final Object[] ids = ApplicationIds.toPKValues(sm.getObjectId(), cm);
ISpaceProxy proxy = (ISpaceProxy) getConfiguration().getSpace();
try {
Object result = proxy.readById(cm.getDescribedType().getName(), ids[0], null, _transaction,
0, ReadModifiers.DIRTY_READ, false, QueryResultTypeInternal.EXTERNAL_ENTRY);
return result != null;
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
public boolean isCached(List<Object> oids, BitSet edata) {
return false;
}
@SuppressWarnings({ "rawtypes" })
@Override
public Collection loadAll(Collection sms, PCState state, int load, FetchConfiguration fetch, Object edata) {
return super.loadAll(sms, state, load, fetch, edata);
}
@Override
public boolean initialize(OpenJPAStateManager sm, PCState state,
FetchConfiguration fetchConfiguration, Object edata) {
final ClassMetaData cm = sm.getMetaData();
try {
// If we already have the result and only need to initialize.. (relevant for nested objects & JPQL)
IEntryPacket result =
(edata == null) ? readObjectFromSpace(sm) : (IEntryPacket) edata;
if (result == null)
return false;
// Initialize
sm.initialize(cm.getDescribedType(), state);
loadFields(sm, result, cm.getFields());
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
return true;
}
/**
* Loads the provided IEntryPacket field values to the provided StateManager.
* Note that for gaining better performance OneToOne & OneToMany relationships are loaded but
* not initialized (lazy initialization).
*
* @param sm The state manager.
* @param entry The IEntryPacket containing the field values.
* @param fms The fields meta data.
*/
private void loadFields(OpenJPAStateManager sm, IEntryPacket entry, FieldMetaData[] fms) {
int spacePropertyIndex = -1;
for (int i = 0; i < fms.length; i++) {
//ignore version which is not part of the entry packet
if(fms[i].isVersion())
continue;
spacePropertyIndex++;
// Skip primary keys and non-persistent keys
if (fms[i].isPrimaryKey() || sm.getLoaded().get(fms[i].getIndex()))
continue;
Integer associationType = _classesRelationStatus.get(fms[i].getElement().getDeclaredType());
if (associationType != null)
fms[i].setAssociationType(associationType);
// Handle one-to-one
if (fms[i].getAssociationType() == FieldMetaData.ONE_TO_ONE) {
sm.store(i, entry.getFieldValue(fms[i].getIndex()));
sm.getLoaded().set(fms[i].getIndex(), false);
// Handle one-to-many
} else if (fms[i].getAssociationType() == FieldMetaData.ONE_TO_MANY) {
sm.store(i, entry.getFieldValue(fms[i].getIndex()));
sm.getLoaded().set(fms[i].getIndex(), false);
// Handle embedded property
} else if (fms[i].isEmbeddedPC()) {
loadEmbeddedObject(fms[i], sm, entry.getFieldValue(spacePropertyIndex));
// Otherwise, store the value as is
} else {
sm.store(i, entry.getFieldValue(spacePropertyIndex));
}
}
sm.setVersion(entry.getVersion());
((StateManager) sm).resetClearedState();
}
/**
* Loads a One-to-one relationship object to the provided owner's state manager.
* @param fmd The owner's field meta data.
* @param sm The owner's state manager.
* @param fieldValue The One-to-one field value to load into the owner's state manager.
*/
private void loadOneToOneObject(FieldMetaData fmd, OpenJPAStateManager sm, Object fieldValue) {
if (fieldValue == null) {
sm.storeObject(fmd.getIndex(), null);
} else {
final ISpaceProxy proxy = (ISpaceProxy) getConfiguration().getSpace();
final IEntryPacket entry = proxy.getDirectProxy().getTypeManager().getEntryPacketFromObject(
fieldValue, ObjectType.POJO, proxy);
final ClassMetaData cmd = fmd.getDeclaredTypeMetaData();
final Object oid = ApplicationIds.fromPKValues(new Object[] { entry.getID() }, cmd);
final BitSet exclude = new BitSet(cmd.getFields().length);
final Object managedObject = getContext().find(oid, null, exclude, entry, 0);
_relationsManager.setOwnerStateManagerForPersistentInstance(managedObject, sm);
sm.storeObject(fmd.getIndex(), managedObject);
}
}
/**
* Loads an embedded object field.
* @param fmd The embedded field meta data.
* @param sm The parent object state manager.
* @param fieldValue The value to load for the embedded field.
*/
private void loadEmbeddedObject(FieldMetaData fmd, OpenJPAStateManager sm, Object fieldValue) {
if (fieldValue == null) {
sm.storeObject(fmd.getIndex(), null);
} else {
if (fieldValue != null) {
final OpenJPAStateManager em = ctx.embed(null, null, sm, fmd);
((StateManager) em).setOwnerStateManager((StateManager) sm);
sm.storeObject(fmd.getIndex(), em.getManagedInstance());
final ISpaceProxy proxy = (ISpaceProxy) getConfiguration().getSpace();
final IEntryPacket entry = proxy.getDirectProxy().getTypeManager().getEntryPacketFromObject(
fieldValue, ObjectType.POJO, proxy);
loadFields(em, entry, fmd.getDeclaredTypeMetaData().getFields());
}
}
}
/**
* Loads One-to-many relationship objects to the owner's state manager.
*
* @param fmd The One-to-many field's meta data.
* @param sm The owner's state manager.
* @param fieldValue The value to be stored for the current field.
*/
private void loadOneToManyObjects(FieldMetaData fmd, OpenJPAStateManager sm, Object fieldValue) {
final Object collection = sm.newProxy(fmd.getIndex());
if (fieldValue != null) {
final ISpaceProxy proxy = (ISpaceProxy) getConfiguration().getSpace();
final ClassMetaData cmd = fmd.getElement().getDeclaredTypeMetaData();
final BitSet exclude = new BitSet(cmd.getFields().length);
// Initialize each of the collection's items
for (Object item : (Collection<?>) fieldValue) {
final IEntryPacket entry = proxy.getDirectProxy().getTypeManager().getEntryPacketFromObject(
item, ObjectType.POJO, proxy);
final Object oid = ApplicationIds.fromPKValues(new Object[] { entry.getID() }, cmd);
// Initialize a state manager for the current item
final Object managedObject = getContext().find(oid, null, exclude, entry, 0);
_relationsManager.setOwnerStateManagerForPersistentInstance(managedObject, sm);
((Collection<Object>) collection).add(managedObject);
}
}
sm.storeObject(fmd.getIndex(), collection);
}
/**
* Reads an IEntryPacket implementation from space according to the provided StateManager.
* @param sm The state manager.
* @return The IEntryPacket implementation for the provided StateManager.
*/
private IEntryPacket readObjectFromSpace(OpenJPAStateManager sm)
throws UnusableEntryException, TransactionException, InterruptedException, RemoteException {
IEntryPacket result;
final ISpaceProxy proxy = (ISpaceProxy) getConfiguration().getSpace();
final ITypeDesc typeDescriptor = proxy.getDirectProxy().getTypeManager().getTypeDescByName(
sm.getMetaData().getDescribedType().getName());
final Object[] ids = ApplicationIds.toPKValues(sm.getObjectId(), sm.getMetaData());
final int readModifier = (_transaction != null)? getConfiguration().getReadModifier()
: ReadModifiers.REPEATABLE_READ;
ITemplatePacket template;
if (typeDescriptor.isAutoGenerateId())
template = TemplatePacketFactory.createUidPacket((String) ids[0], null, 0, TransportPacketType.ENTRY_PACKET);
else
template = TemplatePacketFactory.createIdPacket(ids[0], null, 0, typeDescriptor, TransportPacketType.ENTRY_PACKET);
result = (IEntryPacket) proxy.read(template, _transaction, 0, readModifier);
return result;
}
/**
* This method loads specific fields from the data store for updating them.
* Note: The state manager's fields are cleared.
*/
@Override
public boolean load(OpenJPAStateManager sm, BitSet fields, FetchConfiguration fetch, int lockLevel, Object context) {
final ClassMetaData cm = (ClassMetaData) sm.getMetaData();
final StateManager stateManager = (StateManager) sm;
final SpaceTypeInfo typeInfo = SpaceTypeInfoRepository.getTypeInfo(cm.getDescribedType());
final StateManager gsm = (StateManager) sm;
try {
if (!gsm.isCleared()) {
loadSpecificFields(sm, fields, typeInfo);
return true;
} else {
// If this is a relationship owner, read object from space and lazy initialize its fields
// And initialize the fields specified in the provided 'fields' BitSet
if (stateManager.getOwnerStateManager() == null) {
final IEntryPacket entry = readObjectFromSpace(sm);
if (entry == null)
return false;
loadFields(sm, entry, cm.getFields());
loadSpecificFields(sm, fields, typeInfo);
return true;
// If this is an owned instance (Owner->Pet), read owner from space and find
// the instance according to its Id and load & initialize its fields.
} else {
// Save route to owner state manager
Stack<StateManager> sms = new Stack<StateManager>();
StateManager stateManagerToRead = stateManager;
while (stateManagerToRead.getOwnerStateManager() != null) {
sms.push(stateManagerToRead);
stateManagerToRead = stateManagerToRead.getOwnerStateManager();
}
final IEntryPacket entry = readObjectFromSpace(stateManagerToRead);
if (entry == null)
return false;
// Find the desired instance
final IEntryPacket foundEntryPacket = _relationsManager.findObjectInEntry(stateManagerToRead, entry, sms);
if (foundEntryPacket == null)
return false;
loadFields(sm, foundEntryPacket, sm.getMetaData().getFields());
loadSpecificFields(sm, fields, typeInfo);
}
return true;
}
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
/**
* Load the fields specified in the provided 'fields' BitSet.
*
* @param sm The state manager to load fields for.
* @param fields The fields to load.
* @param typeInfo {@link SpaceTypeInfo} used for reflection.
*/
private void loadSpecificFields(OpenJPAStateManager sm, BitSet fields, final SpaceTypeInfo typeInfo) {
for (FieldMetaData fmd : sm.getMetaData().getFields()) {
if (fields.get(fmd.getIndex())) {
Object instance = sm.getManagedInstance();
// Remove state manager before using reflection
((PersistenceCapable) instance).pcReplaceStateManager(null);
Object value = typeInfo.getProperty(fmd.getName()).getValue(sm.getManagedInstance());
((PersistenceCapable) instance).pcReplaceStateManager(sm);
if (fmd.getAssociationType() == FieldMetaData.ONE_TO_MANY) {
loadOneToManyObjects(fmd, sm, value);
} else if (fmd.getAssociationType() == FieldMetaData.ONE_TO_ONE) {
loadOneToOneObject(fmd, sm, value);
} else if (fmd.isEmbeddedPC()) {
loadEmbeddedObject(fmd, sm, value);
}
}
}
}
@Override
public ResultObjectProvider executeExtent(ClassMetaData classmetadata, boolean flag,
FetchConfiguration fetchconfiguration) {
return null;
}
/**
* Flushes changes to GigaSpaces.
* Returns a list of exceptions that occurred.
*/
@SuppressWarnings({ "rawtypes" })
@Override
protected Collection flush(Collection pNew, Collection pNewUpdated, Collection pNewFlushedDeleted,
Collection pDirty, Collection pDeleted) {
IJSpace space = getConfiguration().getSpace();
ArrayList<Exception> exceptions = new ArrayList<Exception>();
if (_relationsManager.shouldInitializeClassesRelationStatus())
_relationsManager.initializeClassesRelationStatus();
if (pNew.size() > 0)
handleNewObjects(pNew, space);
if (pDirty.size() > 0)
handleUpdatedObjects(pDirty, exceptions, space);
if (pDeleted.size() > 0)
handleDeletedObjects(pDeleted, exceptions, space);
return exceptions;
}
/**
* Clears the removed objects from the space.
*/
private void handleDeletedObjects(Collection<OpenJPAStateManager> sms, ArrayList<Exception> exceptions, IJSpace space) {
for (OpenJPAStateManager sm : sms) {
ClassMetaData cm = sm.getMetaData();
if (_classesRelationStatus.containsKey(cm.getDescribedType()))
continue;
try {
// Remove object from space
final Object[] ids = ApplicationIds.toPKValues(sm.getObjectId(), cm);
final ISpaceProxy proxy = (ISpaceProxy) space;
final ITypeDesc typeDescriptor = proxy.getDirectProxy().getTypeManager().getTypeDescByName(sm.getMetaData().getDescribedType().getName());
final Object routing = sm.fetch(typeDescriptor.getRoutingPropertyId());
ITemplatePacket template;
if (typeDescriptor.isAutoGenerateId())
template = TemplatePacketFactory.createUidPacket((String) ids[0], routing, 0, TransportPacketType.ENTRY_PACKET);
else
template = TemplatePacketFactory.createIdPacket(ids[0], routing, 0, typeDescriptor, TransportPacketType.ENTRY_PACKET);
int result = proxy.clear(template, _transaction, 0);
if (result != 1)
throw new Exception("Unable to clear object from space.");
} catch (Exception e) {
exceptions.add(e);
}
}
}
/**
* Partially updates dirty fields to the space.
*/
private void handleUpdatedObjects(Collection<OpenJPAStateManager> sms, ArrayList<Exception> exceptions, IJSpace space) {
// Generate a template for each state manager and use partial update for updating..
HashSet<OpenJPAStateManager> stateManagersToRestore = new HashSet<OpenJPAStateManager>();
for (OpenJPAStateManager sm : sms) {
final ClassMetaData cm = sm.getMetaData();
try {
// Find relationship owner and flush it to space
if (_classesRelationStatus.containsKey(cm.getDescribedType())) {
final FieldOwnerInformation ownerInformation = _relationsManager.getStateManagerToUpdate((StateManager) sm);
final IEntryPacket entry = getEntryPacketFromStateManager(space, ownerInformation.getStateManager());
// Write changes to the space
for (FieldMetaData fmd : cm.getFields()) {
_relationsManager.initializeOwnerReferencesForField((StateManager) sm, fmd);
}
_relationsManager.removeOwnedEntitiesStateManagers(stateManagersToRestore, ownerInformation.getStateManager());
if (ownerInformation.getStateManager().getVersion() != null)
entry.setVersion((Integer) ownerInformation.getStateManager().getVersion());
final FieldMetaData[] fmds = ownerInformation.getStateManager().getMetaData().getFields();
int spacePropertyIndex = -1;
for (int i = 0; i < fmds.length; i++) {
//ignore version which is not part of the entry packet
if(fmds[i].isVersion())
continue;
spacePropertyIndex++;
if (i != ownerInformation.getMetaData().getIndex() && !fmds[i].isPrimaryKey()) {
entry.setFieldValue(spacePropertyIndex, null);
}
}
space.write(entry, _transaction, Lease.FOREVER, 0, UpdateModifiers.PARTIAL_UPDATE);
//update the version
ownerInformation.getStateManager().setVersion(entry.getVersion());
} else {
// Create an entry packet from the updated POJO and set all the fields
// but the updated & primary key to null.
final IEntryPacket entry = getEntryPacketFromStateManager(space, sm);
final FieldMetaData[] fmds = cm.getFields();
int spacePropertyIndex = -1;
for (int i = 0; i < fmds.length; i++) {
//ignore version which is not part of the entry packet
if(fmds[i].isVersion())
continue;
spacePropertyIndex++;
if (!sm.getDirty().get(i) && !fmds[i].isPrimaryKey()) {
entry.setFieldValue(spacePropertyIndex, null);
} else {
_relationsManager.initializeOwnerReferencesForField((StateManager) sm, fmds[i]);
}
}
if(sm.getVersion() != null)
entry.setVersion((Integer) sm.getVersion());
// Write changes to the space
space.write(entry, _transaction, Lease.FOREVER, 0, UpdateModifiers.PARTIAL_UPDATE);
//update the version
sm.setVersion(entry.getVersion());
}
} catch (Exception e) {
exceptions.add(e);
} finally {
_relationsManager.restoreRemovedStateManagers(stateManagersToRestore);
}
}
}
/**
* Gets an {@link IEntryPacket} instance from the provided {@link OpenJPAStateManager}'s managed instance.
* The conversion is made after removing the state manager from the managed object because its fields
* are accessed using reflection and this might cause problems due to OpenJPA's entities enhancement.
*
* @param space Space instance the conversion will be called for (using its type manager).
* @param sm The state manager whose managed object will be converted.
* @return An {@link IEntryPacket} instance representing the managed object.
*/
private IEntryPacket getEntryPacketFromStateManager(IJSpace space, OpenJPAStateManager sm) {
try {
final ISpaceProxy proxy = (ISpaceProxy) space;
sm.getPersistenceCapable().pcReplaceStateManager(null);
IEntryPacket entry = proxy.getDirectProxy().getTypeManager().getEntryPacketFromObject(
sm.getManagedInstance(), ObjectType.POJO, proxy);
return entry;
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
sm.getPersistenceCapable().pcReplaceStateManager(sm);
}
}
/**
* Converts the provided {@link Entity} to an {@link IEntryPacket} instance.
* The conversion is made after removing the entity's state manager (if it exists) and returning it
* after the conversion is made due to conflicts between reflection and OpenJPA's enhancement.
*
* @param entity The {@link Entity} to convert.
* @return An {@link IEntryPacket} instance representing the provided entity.
*/
private IEntryPacket getEntryPacketFromEntity(Object entity) {
PersistenceCapable pc = (PersistenceCapable) entity;
final ISpaceProxy proxy = (ISpaceProxy) getConfiguration().getSpace();
IEntryPacket entry;
if (pc.pcGetStateManager() != null) {
StateManager sm = (StateManager) pc.pcGetStateManager();
try {
pc.pcReplaceStateManager(null);
entry = proxy.getDirectProxy().getTypeManager().getEntryPacketFromObject(
entity, ObjectType.POJO, proxy);
} finally {
pc.pcReplaceStateManager(sm);
}
} else {
entry = proxy.getDirectProxy().getTypeManager().getEntryPacketFromObject(
entity, ObjectType.POJO, proxy);
}
return entry;
}
/**
* Writes new persistent objects to the space.
*/
private void handleNewObjects(Collection<OpenJPAStateManager> sms, IJSpace space) {
final HashMap<Class<?>, ArrayList<Object>> objectsToWriteByType = new HashMap<Class<?>, ArrayList<Object>>();
final ArrayList<OpenJPAStateManager> stateManagersToRestore = new ArrayList<OpenJPAStateManager>();
Class<?> previousType = null;
ArrayList<Object> currentList = null;
for (OpenJPAStateManager sm : sms) {
// If the current object is in a relation skip it
if (_classesRelationStatus.containsKey(sm.getMetaData().getDescribedType())) {
continue;
}
// If the object has managed instances in its fields we need to remove the state manager from these instances
// since they are serialized when written to space and can cause a deadlock when written
// by writeMultiple.
_relationsManager.removeOwnedEntitiesStateManagers(stateManagersToRestore, sm);
// In order to use writeMultiple we need to gather each type's instances to its own list
if (!sm.getMetaData().getDescribedType().equals(previousType)) {
currentList = objectsToWriteByType.get(sm.getMetaData().getDescribedType());
if (currentList == null) {
currentList = new ArrayList<Object>();
objectsToWriteByType.put(sm.getMetaData().getDescribedType(), currentList);
}
previousType = sm.getMetaData().getDescribedType();
}
// Each persisted class should have its state manager removed
// before being written to space since gigaspaces reflection conflicts with
// OpenJPA's class monitoring.
sm.getPersistenceCapable().pcReplaceStateManager(null);
stateManagersToRestore.add(sm);
currentList.add(sm.getManagedInstance());
}
// Write objects to space in batches by type
try {
for (Map.Entry<Class<?>, ArrayList<Object>> entry : objectsToWriteByType.entrySet()) {
space.writeMultiple(entry.getValue().toArray(), _transaction, Lease.FOREVER, UpdateModifiers.WRITE_ONLY);
}
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
// Restore the removed state managers.
_relationsManager.restoreRemovedStateManagers(stateManagersToRestore);
}
}
/**
* Validates the provided class' annotations.
* Currently the only validation performed is for @Id & @SpaceId annotations
* that must be declared on the same getter.
*/
private void validateClassAnnotations(Class<?> type) {
// Validation is only relevant for Entities
if (type.getAnnotation(Entity.class) == null)
return;
for (Method getter : type.getMethods()) {
if (!getter.getName().startsWith("get"))
continue;
SpaceId spaceId = getter.getAnnotation(SpaceId.class);
boolean hasJpaId = getter.getAnnotation(Id.class) != null || getter.getAnnotation(EmbeddedId.class) != null;
if (spaceId != null || hasJpaId) {
if (!hasJpaId || spaceId == null)
throw new IllegalArgumentException("SpaceId and Id annotations must both be declared on the same property in JPA entities in type: " + type.getName());
if (spaceId.autoGenerate()) {
GeneratedValue generatedValue = getter.getAnnotation(GeneratedValue.class);
if (generatedValue == null)
throw new IllegalArgumentException("SpaceId with autoGenerate=true annotated property should also have a JPA GeneratedValue annotation.");
}
break;
}
}
}
/**
* Initializes an ExternalEntry result as a state managed Pojo.
* (used by JPQL's query executor)
*/
public Object loadObject(ClassMetaData classMetaData, IEntryPacket entry) {
// Get object id
Object[] ids = new Object[1];
ids[0] = entry.getID();
Object objectId = ApplicationIds.fromPKValues(ids, classMetaData);
return getContext().find(objectId, null, null, entry, 0);
}
/**
* Gets the current active transaction.
*/
public Transaction getCurrentTransaction() {
return _transaction;
}
/**
* Gets a JDBC connection using the configuration's space instance.
* Each store manager has its own Connection for Multithreaded reasons.
*/
public GConnection getJdbcConnection() throws SQLException {
if (_connection == null) {
if (_connection == null) {
Properties connectionProperties = new Properties();
connectionProperties.put(
QueryProcessorFactory.COM_GIGASPACES_EMBEDDED_QP_ENABLED, "true");
_connection = GConnection.getInstance(getConfiguration().getSpace(), connectionProperties);
if (_connection.getAutoCommit())
_connection.setAutoCommit(false);
}
}
return _connection;
}
/**
* Gets the class relation status (one-to-one etc..) for the provided type.
*/
public synchronized int getClassRelationStatus(Class<?> type) {
// In case relations status was not initialized already..
if (_relationsManager.shouldInitializeClassesRelationStatus())
_relationsManager.initializeClassesRelationStatus();
// Get relation status..
Integer relationStatus = _classesRelationStatus.get(type);
return (relationStatus == null) ? FieldMetaData.MANAGE_NONE : relationStatus;
}
/**
* Keeps information for a field's owner.
*/
private static class FieldOwnerInformation {
private StateManager stateManager;
private FieldMetaData metaData;
public FieldOwnerInformation() {
}
public FieldOwnerInformation(StateManager stateManager, FieldMetaData metaData) {
this.stateManager = stateManager;
this.metaData = metaData;
}
public StateManager getStateManager() {
return stateManager;
}
public void setStateManager(StateManager stateManager) {
this.stateManager = stateManager;
}
public FieldMetaData getMetaData() {
return metaData;
}
public void setMetaData(FieldMetaData metaData) {
this.metaData = metaData;
}
}
/**
* StoreManager's relationships manager.
* Provides methods for handling relationships in GigaSpaces owned relationships model.
*/
private class RelationsManager {
public RelationsManager() {
}
/**
* Removes owned entities state managers (before writing them to space due to serialization deadlock problem).
* The removed state managers are kept in the provided collection for restoring them later.
*
* @param stateManagersToRestore The collection for storing the removed state managers.
* @param sm The owning entity's state manager.
*/
public void removeOwnedEntitiesStateManagers(Collection<OpenJPAStateManager> stateManagersToRestore,
OpenJPAStateManager sm) {
// Remove the state manager from objects in relation for making their serialization not
// handled by OpenJPA which can cause a deadlock when writing to space.
// The deadlock is caused because when serializing a monitored instance, OpenJPA takes over
// and attempts to access an already locked layer in OpenJPA's hierarchy which causes
// a deadlock.
for (FieldMetaData fmd : sm.getMetaData().getFields()) {
if (!sm.getLoaded().get(fmd.getIndex()))
continue;
if (fmd.isEmbeddedPC()) {
Object value = sm.fetch(fmd.getDeclaredIndex());
if (value != null) {
PersistenceCapable pc = (PersistenceCapable) value;
OpenJPAStateManager stateManager = (OpenJPAStateManager) pc.pcGetStateManager();
removeOwnedEntitiesStateManagers(stateManagersToRestore, stateManager);
pc.pcReplaceStateManager(null);
stateManagersToRestore.add(stateManager);
}
} else if (fmd.getAssociationType() == FieldMetaData.ONE_TO_MANY) {
Collection<?> collection = (Collection<?>) sm.fetch(fmd.getIndex());
if (collection != null) {
for (Object item : collection) {
// Set relationship owner
setOwnerStateManagerForPersistentInstance(item, sm);
PersistenceCapable pc = (PersistenceCapable) item;
OpenJPAStateManager stateManager = (OpenJPAStateManager) pc.pcGetStateManager();
removeOwnedEntitiesStateManagers(stateManagersToRestore, stateManager);
stateManagersToRestore.add(stateManager);
pc.pcReplaceStateManager(null);
}
}
} else if (fmd.getAssociationType() == FieldMetaData.ONE_TO_ONE) {
Object value = sm.fetch(fmd.getIndex());
if (value != null) {
setOwnerStateManagerForPersistentInstance(value, sm);
PersistenceCapable pc = (PersistenceCapable) value;
OpenJPAStateManager stateManager = (OpenJPAStateManager) pc.pcGetStateManager();
removeOwnedEntitiesStateManagers(stateManagersToRestore, stateManager);
stateManagersToRestore.add(stateManager);
pc.pcReplaceStateManager(null);
}
}
}
}
/**
* Sets the provided state manager as the managed object's owner.
* @param managedObject The managed object to set the owner for.
* @param sm The owner's state manager.
*/
public void setOwnerStateManagerForPersistentInstance(Object managedObject, OpenJPAStateManager sm) {
StateManager stateManager = (StateManager)((PersistenceCapable) managedObject).pcGetStateManager();
if (stateManager == null)
throw new IllegalStateException("Attempted to set an Owner back-reference for an unmanaged instance: "
+ managedObject.toString() + " of type: " + managedObject.getClass().getName());
stateManager.setOwnerStateManager((StateManager) sm);
}
/**
* Sets the provided state manager as the owner for the provided field value.
* @param sm The owner's state manager.
* @param fmd The field's value the owner will be set for.
*/
public void initializeOwnerReferencesForField(StateManager sm, FieldMetaData fmd) {
if (fmd.getAssociationType() == FieldMetaData.ONE_TO_MANY) {
Collection<?> collection = (Collection<?>) sm.fetch(fmd.getIndex());
if (collection != null) {
for (Object item : collection) {
if (item != null) {
_relationsManager.setOwnerStateManagerForPersistentInstance(item, sm);
}
}
}
} else if (fmd.getAssociationType() == FieldMetaData.ONE_TO_ONE || fmd.isEmbeddedPC()) {
Object value = sm.fetch(fmd.getIndex());
if (value != null) {
_relationsManager.setOwnerStateManagerForPersistentInstance(value, sm);
}
}
}
/**
* Attempts to find the super-owner of the provided state manager in a relationship to update.
* Throws an exception if such a state manager doesn't exist.
* @param sm The owned relationship state manager.
* @return The super-owner state manager of the relationship.
*/
public FieldOwnerInformation getStateManagerToUpdate(StateManager sm) {
final Integer associationType = _classesRelationStatus.get(sm.getMetaData().getDescribedType());
if (associationType == null)
throw new IllegalStateException("Error updating: " + sm.getMetaData().getClass().getName()
+ " with id: " + sm.getId());
final StateManager ownerStateManager = sm.getOwnerStateManager();
if (ownerStateManager != null) {
if (associationType == FieldMetaData.ONE_TO_MANY) {
for (FieldMetaData fmd : ownerStateManager.getMetaData().getFields()) {
if (fmd.getElement().getDeclaredType().equals(sm.getMetaData().getDescribedType())) {
Collection<?> collection = (Collection<?>) ownerStateManager.fetch(fmd.getIndex());
if (collection == null || !collection.contains(sm.getManagedInstance()))
break;
if (ownerStateManager.getOwnerStateManager() != null)
return getStateManagerToUpdate(ownerStateManager);
return new FieldOwnerInformation(ownerStateManager, fmd);
}
}
} else if (associationType == FieldMetaData.ONE_TO_ONE) {
for (FieldMetaData fmd : ownerStateManager.getMetaData().getFields()) {
if (fmd.getDeclaredType().equals(sm.getMetaData().getDescribedType())) {
Object value = ownerStateManager.fetch(fmd.getIndex());
if (value == null || !value.equals(sm.getManagedInstance()))
break;
if (ownerStateManager.getOwnerStateManager() != null)
return getStateManagerToUpdate(ownerStateManager);
return new FieldOwnerInformation(ownerStateManager, fmd);
}
}
}
}
throw new IllegalStateException("Attempted to update an owned entity: "
+ sm.getMetaData().getClass().getName() + " with Id: " + sm.getId() + " which has no owner.");
}
/**
* Restores state managers for the provided collection of state managers.
* @param stateManagersToRestore State managers collection to restore.
*/
public void restoreRemovedStateManagers(Collection<OpenJPAStateManager> stateManagersToRestore) {
for (OpenJPAStateManager sm : stateManagersToRestore) {
sm.getPersistenceCapable().pcReplaceStateManager(sm);
}
}
/**
* Collects information on current OpenJPA listed class meta data list.
* On every call to flush() the method is called & checks if there are new classes to initialize.
*/
public synchronized void initializeClassesRelationStatus() {
if (!shouldInitializeClassesRelationStatus())
return;
// Collect information regarding relationships.
// Eventually classes which are in a relation should not be saved to the space
// since we only support owned relationships and these instances will be saved as nested instances
// of their owning instance.
ClassMetaData[] cms = getConfiguration().getMetaDataRepositoryInstance().getMetaDatas();
for (ClassMetaData cm : cms) {
// Process class
if (!_processedClasses.contains(cm.getDescribedType())) {
for (FieldMetaData fmd : cm.getFields()) {
if (fmd.getAssociationType() == FieldMetaData.ONE_TO_ONE) {
if (!_classesRelationStatus.containsKey(fmd.getDeclaredType())) {
_classesRelationStatus.put(fmd.getDeclaredType(), FieldMetaData.ONE_TO_ONE);
}
} else if (fmd.getAssociationType() == FieldMetaData.ONE_TO_MANY) {
if (!_classesRelationStatus.containsKey(fmd.getDeclaredType())) {
_classesRelationStatus.put(fmd.getElement().getDeclaredType(), FieldMetaData.ONE_TO_MANY);
}
} else if (fmd.getAssociationType() == FieldMetaData.MANY_TO_MANY) {
throw new IllegalArgumentException("Many-to-many is not supported.");
}
}
validateClassAnnotations(cm.getDescribedType());
_processedClasses.add(cm.getDescribedType());
}
}
}
/**
* Gets whether classes relations status is not complete and should be synchronized.
* OpenJPA creates class meta data only after an entity is persisted for the first time.
*/
public boolean shouldInitializeClassesRelationStatus() {
return getConfiguration().getMetaDataRepositoryInstance().getMetaDatas().length != _processedClasses.size();
}
/**
* Attempts to find the instance represented by the provided state manager in the provided
* relationships tree.
* @param sm The state manager holding the instance to find.
* @param sms The state managers which potentially holds the instance to find.
* @return The found instance, otherwise null.
*/
public IEntryPacket findObjectInEntry(StateManager sm, IEntryPacket entry, Stack<StateManager> sms) {
if (!sms.isEmpty()) {
final StateManager tempStateManager = sms.pop();
int ownerIndex = tempStateManager.getOwnerIndex();
final SpaceTypeInfo ownerTypeInfo = SpaceTypeInfoRepository.getTypeInfo(sm.getMetaData().getDescribedType());
final SpaceTypeInfo ownedTypeInfo = SpaceTypeInfoRepository.getTypeInfo(tempStateManager.getMetaData().getDescribedType());
FieldMetaData[] fms = sm.getMetaData().getFields();
Integer associationType = getClassRelationStatus(tempStateManager.getMetaData().getDescribedType());
int spacePropertyIndex = -1;
for (int i = 0; i < fms.length; i++) {
if (fms[i].isVersion())
continue;
spacePropertyIndex++;
if (fms[i].getAssociationType() == associationType) {
// One-to-many
if (associationType == FieldMetaData.ONE_TO_MANY
&& fms[i].getElement().getDeclaredType().equals(tempStateManager.getMetaData().getDescribedType())) {
final Object id = ApplicationIds.toPKValues(tempStateManager.getId(), tempStateManager.getMetaData())[0];
final Collection<?> values = (Collection<?>) entry.getFieldValue(spacePropertyIndex);
if (values != null) {
for (Object item : values) {
Object itemId = ownedTypeInfo.getIdProperty().getValue(item);
if (id.equals(itemId)) {
final IEntryPacket entryPacket = getEntryPacketFromEntity(item);
return (sms.isEmpty()) ? entryPacket : findObjectInEntry(tempStateManager, entryPacket, sms);
}
}
}
// One-to-one
} else if (associationType == FieldMetaData.ONE_TO_ONE
&& fms[i].getDeclaredType().equals(tempStateManager.getMetaData().getDescribedType())) {
final Object id = ApplicationIds.toPKValues(tempStateManager.getId(), tempStateManager.getMetaData())[0];
final Object value = entry.getFieldValue(spacePropertyIndex);
if (value != null) {
Object objectId = ownedTypeInfo.getIdProperty().getValue(value);
if (id.equals(objectId)) {
final IEntryPacket entryPacket = getEntryPacketFromEntity(value);
return (sms.isEmpty()) ? entryPacket : findObjectInEntry(tempStateManager, entryPacket, sms);
}
}
// Embedded
} else if (fms[i].isEmbeddedPC()
&& fms[i].getDeclaredType().equals(tempStateManager.getMetaData().getDescribedType())) {
final Object value = entry.getFieldValue(spacePropertyIndex);
final IEntryPacket entryPacket = getEntryPacketFromEntity(value);
return (sms.isEmpty()) ? entryPacket : findObjectInEntry(tempStateManager, entryPacket, sms);
}
}
}
}
// Object not found..
return null;
}
}
}
|
package org.openspaces.jpa;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import net.jini.core.lease.Lease;
import net.jini.core.transaction.Transaction;
import net.jini.core.transaction.TransactionException;
import net.jini.core.transaction.TransactionFactory;
import org.apache.openjpa.abstractstore.AbstractStoreManager;
import org.apache.openjpa.conf.OpenJPAConfiguration;
import org.apache.openjpa.enhance.PersistenceCapable;
import org.apache.openjpa.kernel.FetchConfiguration;
import org.apache.openjpa.kernel.OpenJPAStateManager;
import org.apache.openjpa.kernel.PCState;
import org.apache.openjpa.kernel.QueryLanguages;
import org.apache.openjpa.kernel.StoreQuery;
import org.apache.openjpa.kernel.exps.ExpressionParser;
import org.apache.openjpa.lib.rop.ResultObjectProvider;
import org.apache.openjpa.meta.ClassMetaData;
import org.apache.openjpa.meta.FieldMetaData;
import org.apache.openjpa.util.ApplicationIds;
import org.openspaces.jpa.openjpa.SpaceConfiguration;
import org.openspaces.jpa.openjpa.StoreManagerQuery;
import com.gigaspaces.internal.client.QueryResultTypeInternal;
import com.gigaspaces.internal.client.spaceproxy.ISpaceProxy;
import com.gigaspaces.internal.client.spaceproxy.metadata.ObjectType;
import com.gigaspaces.internal.metadata.ITypeDesc;
import com.gigaspaces.internal.transport.IEntryPacket;
import com.gigaspaces.internal.transport.ITemplatePacket;
import com.gigaspaces.internal.transport.TemplatePacketFactory;
import com.gigaspaces.internal.transport.TransportPacketType;
import com.j_spaces.core.IJSpace;
import com.j_spaces.core.client.ReadModifiers;
import com.j_spaces.core.client.UpdateModifiers;
import com.j_spaces.jdbc.QueryProcessorFactory;
import com.j_spaces.jdbc.driver.GConnection;
/**
* A GigaSpaces back-end implementation for OpenJPA.
* Responsible for storing and fetching data from GigaSpaces using space API.
*
* @author idan
* @since 8.0
*
*/
@SuppressWarnings("unchecked")
public class StoreManager extends AbstractStoreManager {
private Transaction _transaction = null;
private static final Map<Class<?>, Integer> _classesRelationStatus = new HashMap<Class<?>, Integer>();
private static final HashSet<Class<?>> _processedClasses = new HashSet<Class<?>>();
private GConnection _connection;
@Override
protected void open() {
// Specific gigaspaces initialization (space proxy)
getConfiguration().initialize();
}
@Override
protected Collection<String> getUnsupportedOptions() {
Collection<String> unsupportedOptions = (Collection<String>) super.getUnsupportedOptions();
unsupportedOptions.remove(OpenJPAConfiguration.OPTION_ID_DATASTORE);
return unsupportedOptions;
}
@Override
public boolean syncVersion(OpenJPAStateManager sm, Object edata) {
return true;
}
@Override
public void begin() {
try {
if (_transaction != null)
throw new TransactionException("Attempted to start a new transaction when there's already an active transaction.");
long timeout = (getConfiguration().getLockTimeout() == 0)?
Lease.FOREVER : getConfiguration().getLockTimeout();
_transaction = (TransactionFactory.create(getConfiguration().getTransactionManager(),
timeout)).transaction;
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
@Override
public void commit() {
try {
_transaction.commit(Long.MAX_VALUE);
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
_transaction = null;
}
}
@Override
public void rollback() {
try {
_transaction.abort(Long.MAX_VALUE);
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
_transaction = null;
}
}
@Override
public StoreQuery newQuery(String language) {
ExpressionParser ep = QueryLanguages.parserForLanguage(language);
return new StoreManagerQuery(ep, this);
}
@Override
protected OpenJPAConfiguration newConfiguration() {
return new SpaceConfiguration();
}
public SpaceConfiguration getConfiguration() {
return (SpaceConfiguration) getContext().getConfiguration();
}
/**
* Returns whether the state manager's managed object exists in space.
*/
public boolean exists(OpenJPAStateManager sm, Object edata) {
ClassMetaData cm = sm.getMetaData();
final Object[] ids = ApplicationIds.toPKValues(sm.getObjectId(), cm);
ISpaceProxy proxy = (ISpaceProxy) getConfiguration().getSpace();
try {
Object result = proxy.readById(cm.getDescribedType().getName(), ids[0], null, _transaction,
0, ReadModifiers.DIRTY_READ, false, QueryResultTypeInternal.EXTERNAL_ENTRY);
return result != null;
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
public boolean isCached(List<Object> oids, BitSet edata) {
return false;
}
@SuppressWarnings({ "rawtypes" })
@Override
public Collection loadAll(Collection sms, PCState state, int load, FetchConfiguration fetch, Object edata) {
return super.loadAll(sms, state, load, fetch, edata);
}
@Override
public boolean initialize(OpenJPAStateManager sm, PCState state,
FetchConfiguration fetchConfiguration, Object edata) {
final ClassMetaData cm = sm.getMetaData();
int readModifier = (_transaction != null)? getConfiguration().getReadModifier()
: ReadModifiers.REPEATABLE_READ;
try {
IEntryPacket result = null;
// If we already have the result and only need to initialize.. (relevant for JPQL)
if (edata != null) {
result = (IEntryPacket) edata;
} else {
final ISpaceProxy proxy = (ISpaceProxy) getConfiguration().getSpace();
final ITypeDesc typeDescriptor = proxy.getDirectProxy().getTypeManager().getTypeDescByName(cm.getDescribedType().getName());
final Object[] ids = ApplicationIds.toPKValues(sm.getObjectId(), cm);
ITemplatePacket template = TemplatePacketFactory.createIdPacket(ids[0], null, 0, typeDescriptor, TransportPacketType.ENTRY_PACKET);
result = (IEntryPacket) proxy.read(template, _transaction, 0, readModifier);
if (result == null)
return false;
}
// TODO: Handle sub-classes etc...
sm.initialize(cm.getDescribedType(), state);
FieldMetaData[] fms = cm.getFields();
for (int i = 0; i < fms.length; i++) {
// Skip primary keys and non-persistent keys
if (fms[i].isPrimaryKey() || sm.getLoaded().get(fms[i].getIndex()))
continue;
sm.store(i, result.getFieldValue(i));
}
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
return true;
}
/**
* This method loads specific fields from the data store for updating them.
*/
@Override
public boolean load(OpenJPAStateManager sm, BitSet fields, FetchConfiguration fetch, int lockLevel, Object context) {
ClassMetaData cm = (ClassMetaData)sm.getMetaData();
Object[] ids = ApplicationIds.toPKValues(sm.getObjectId(), cm);
final IJSpace space = getConfiguration().getSpace();
final ITypeDesc typeDescriptor = ((ISpaceProxy) space).getDirectProxy().getTypeManager().getTypeDescByName(cm.getDescribedType().getName());
final ITemplatePacket template = TemplatePacketFactory.createIdPacket(ids[0], null, 0, typeDescriptor, TransportPacketType.ENTRY_PACKET);
try {
// Read object from space
IEntryPacket result = (IEntryPacket) space.read(template, _transaction, 0);
if (result == null)
return false;
// Process result - store only the relevant fields in the state manager
for (int i = 0; i < cm.getDeclaredFields().length; i++) {
if (fields.get(i))
sm.store(i, result.getFieldValue(i));
}
return true;
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
@Override
public ResultObjectProvider executeExtent(ClassMetaData classmetadata, boolean flag,
FetchConfiguration fetchconfiguration) {
return null;
}
/**
* Flushes changes to GigaSpaces.
* Returns a list of exceptions that occurred.
*/
@SuppressWarnings({ "rawtypes" })
@Override
protected Collection flush(Collection pNew, Collection pNewUpdated, Collection pNewFlushedDeleted,
Collection pDirty, Collection pDeleted) {
IJSpace space = getConfiguration().getSpace();
ArrayList<Exception> exceptions = new ArrayList<Exception>();
if (shouldInitializeClassesRelationStatus())
initializeClassesRelationStatus();
handleNewObjects(pNew, space);
handleUpdatedObjects(pDirty, exceptions, space);
handleDeletedObjects(pDeleted, exceptions, space);
return exceptions;
}
/**
* Clears the removed objects from the space.
*/
private void handleDeletedObjects(Collection<OpenJPAStateManager> sms, ArrayList<Exception> exceptions, IJSpace space) {
for (OpenJPAStateManager sm : sms) {
ClassMetaData cm = sm.getMetaData();
if (_classesRelationStatus.containsKey(cm.getDescribedType()))
continue;
try {
// Remove object from space
final Object[] ids = ApplicationIds.toPKValues(sm.getObjectId(), cm);
final ISpaceProxy proxy = (ISpaceProxy) space;
final ITypeDesc typeDescriptor = proxy.getDirectProxy().getTypeManager().getTypeDescByName(sm.getMetaData().getDescribedType().getName());
final Object routing = sm.fetch(typeDescriptor.getRoutingPropertyId());
final ITemplatePacket template = TemplatePacketFactory.createIdPacket(ids[0], routing, 0, typeDescriptor, TransportPacketType.ENTRY_PACKET);
int result = proxy.clear(template, _transaction, 0);
if (result != 1)
throw new Exception("Unable to clear object from space.");
} catch (Exception e) {
exceptions.add(e);
}
}
}
/**
* Partially updates dirty fields to the space.
*/
private void handleUpdatedObjects(Collection<OpenJPAStateManager> sms, ArrayList<Exception> exceptions, IJSpace space) {
// Generate a template for each state manager and use partial update for updating..
for (OpenJPAStateManager sm : sms) {
ClassMetaData cm = sm.getMetaData();
if (_classesRelationStatus.containsKey(cm.getDescribedType()))
throw new RuntimeException("Updating an instance which is a part of a relation is not supported.");
try {
// Create an entry packet from the updated pojo and set all the fields but the updated & primary key to null.
final ISpaceProxy proxy = (ISpaceProxy) space;
final IEntryPacket entry = proxy.getDirectProxy().getTypeManager().getEntryPacketFromObject(
sm.getManagedInstance(), ObjectType.POJO, proxy);
for (int i = 0; i < cm.getDeclaredFields().length; i++) {
if (!sm.getDirty().get(i) && !cm.getFields()[i].isPrimaryKey()) {
entry.setFieldValue(i, null);
}
}
// Write changes to the space
space.write(entry, _transaction, Lease.FOREVER, 0, UpdateModifiers.PARTIAL_UPDATE);
} catch (Exception e) {
exceptions.add(e);
}
}
}
/**
* Writes new persistent objects to the space.
*/
private void handleNewObjects(Collection<OpenJPAStateManager> sms, IJSpace space) {
final HashMap<Class<?>, ArrayList<Object>> objectsToWriteByType = new HashMap<Class<?>, ArrayList<Object>>();
final ArrayList<OpenJPAStateManager> stateManagersToRestore = new ArrayList<OpenJPAStateManager>();
Class<?> previousType = null;
ArrayList<Object> currentList = null;
for (OpenJPAStateManager sm : sms) {
// If the current object is in a relation skip it
if (_classesRelationStatus.containsKey(sm.getMetaData().getDescribedType())) {
// Remove the state manager from objects in relation for making their serialization not
// handled by OpenJPA which can cause a deadlock.
sm.getPersistenceCapable().pcReplaceStateManager(null);
stateManagersToRestore.add(sm);
continue;
}
// If the object has embedded relations we need to remove the state manager from them two
// since they are also serialized.
for (FieldMetaData fmd : sm.getMetaData().getFields()) {
if (fmd.isEmbeddedPC()) {
Object value = sm.fetch(fmd.getDeclaredIndex());
if (value != null) {
PersistenceCapable pc = (PersistenceCapable) value;
OpenJPAStateManager stateManager = (OpenJPAStateManager) pc.pcGetStateManager();
pc.pcReplaceStateManager(null);
stateManagersToRestore.add(stateManager);
}
}
}
if (!sm.getMetaData().getDescribedType().equals(previousType)) {
currentList = objectsToWriteByType.get(sm.getMetaData().getDescribedType());
if (currentList == null) {
currentList = new ArrayList<Object>();
objectsToWriteByType.put(sm.getMetaData().getDescribedType(), currentList);
}
}
currentList.add(sm.getManagedInstance());
}
try {
for (Map.Entry<Class<?>, ArrayList<Object>> entry : objectsToWriteByType.entrySet()) {
space.writeMultiple(entry.getValue().toArray(), _transaction, Lease.FOREVER, UpdateModifiers.WRITE_ONLY);
}
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
// Restore the removed state managers.
for (OpenJPAStateManager sm : stateManagersToRestore) {
sm.getPersistenceCapable().pcReplaceStateManager(sm);
}
}
}
/**
* Collects information on current OpenJPA listed class meta data list.
* On every call to flush() the method is called & checks if there are new classes to initialize.
*/
private synchronized void initializeClassesRelationStatus() {
if (!shouldInitializeClassesRelationStatus())
return;
// Collect information regarding relationships.
// Eventually classes which are in a relation should not be saved to the space
// since we only support owned relationship and these instances will saved as nested instances
// of their owning instance.
ClassMetaData[] cms = getConfiguration().getMetaDataRepositoryInstance().getMetaDatas();
for (ClassMetaData cm : cms) {
if (!_processedClasses.contains(cm.getDescribedType())) {
// Process class
if (!_processedClasses.contains(cm.getDescribedType())) {
for (FieldMetaData fmd : cm.getFields()) {
if (fmd.getAssociationType() == FieldMetaData.ONE_TO_ONE) {
if (!_classesRelationStatus.containsKey(fmd.getDeclaredType())) {
_classesRelationStatus.put(fmd.getDeclaredType(), FieldMetaData.ONE_TO_ONE);
}
} else if (fmd.getAssociationType() == FieldMetaData.ONE_TO_MANY) {
if (!_classesRelationStatus.containsKey(fmd.getDeclaredType())) {
_classesRelationStatus.put(fmd.getElement().getDeclaredType(), FieldMetaData.ONE_TO_MANY);
}
} else if (fmd.getAssociationType() == FieldMetaData.MANY_TO_MANY) {
throw new RuntimeException("Many-to-many is not supported.");
}
}
_processedClasses.add(cm.getDescribedType());
}
}
}
}
/**
* Initializes an ExternalEntry result as a state managed Pojo.
* (used by JPQL's query executor)
*/
public Object loadObject(ClassMetaData classMetaData, IEntryPacket entry) {
// Get object id
Object[] ids = new Object[1];
ids[0] = entry.getID();
Object objectId = ApplicationIds.fromPKValues(ids, classMetaData);
return getContext().find(objectId, null, null, entry, 0);
}
/**
* Gets the current active transaction.
*/
public Transaction getCurrentTransaction() {
return _transaction;
}
/**
* Gets whether classes relations status is not complete and should be synchronized.
* OpenJPA creates class meta data only after an entity is persisted for the first time.
*/
private boolean shouldInitializeClassesRelationStatus() {
return getConfiguration().getMetaDataRepositoryInstance().getMetaDatas().length != _processedClasses.size();
}
/**
* Gets a JDBC connection using the configuration's space instance.
* Each store manager has its own Connection for Multithreaded reasons.
*/
public GConnection getJdbcConnection() throws SQLException {
if (_connection == null) {
if (_connection == null) {
Properties connectionProperties = new Properties();
connectionProperties.put(
QueryProcessorFactory.COM_GIGASPACES_EMBEDDED_QP_ENABLED, "true");
_connection = GConnection.getInstance(getConfiguration().getSpace(), connectionProperties);
if (_connection.getAutoCommit())
_connection.setAutoCommit(false);
}
}
return _connection;
}
/**
* Gets the class relation status (one-to-one etc..) for the provided type.
*/
public synchronized int getClassRelationStatus(Class<?> type) {
Integer relationStatus = _classesRelationStatus.get(type);
return (relationStatus == null) ? FieldMetaData.MANAGE_NONE : relationStatus;
}
}
|
package team1100.season2010.robot;
import edu.wpi.first.wpilibj.AnalogChannel;
import edu.wpi.first.wpilibj.Jaguar;
import edu.wpi.first.wpilibj.PIDSource;
import edu.wpi.first.wpilibj.PIDOutput;
import edu.wpi.first.wpilibj.PIDController;
/**
*
* @author mark
*/
public class SteeringPID {
final double kLinearPct = 2.0;
final double kPidI = 0.0;
final double kPidD = 0.0;
final int kOperatingRangePct = 20;
final int kCenterPct = 50;
final double kInChannelMin = 0.0;
final double kInChannelMax = 1000.0;
final double kInWidth = kInChannelMax - kInChannelMin;
final double kOutChannelMin = -1.0;
final double kOutChannelMax = 1.0;
PIDSource m_in;
PIDOutput m_out;
PIDController m_pid;
PIDOutput m_scaledOut;
int m_opRangePct = 20;
double m_operatingRange;
double m_rangeCenterPct = 50.0;
double m_rangeCenter;
double m_linearPct;
double m_PidP;
boolean m_running = false;
double m_PidI = kPidI;
double m_PidD = kPidD;
double m_initialPotVal;
/**
* Construct a SteeringPID using default module slots
* @param inputChannel - channel on the default analog input module
* connected to the sensing potentiometer for this device.
* @param outputChannel - channel on the default digital output module
* connected to the motor controller for this device.
* @param invertOutput - invert the polarity of the output value.
*/
public SteeringPID(int inputChannel, int outputChannel, boolean invertOutput)
{
this(AnalogChannel.getDefaultAnalogModule(), inputChannel,
Jaguar.getDefaultDigitalModule(), outputChannel,
invertOutput);
}
/**
* Construct a SteeringPID
* @param inputSlot - cRIO slot hosting the analog input module used for
* this device.
* @param inputChannel - channel on the selected analog module connected to
* the sensing potentiometer for this device.
* @param outputSlot - cRIO slot hosting the digital output module used for
* this device.
* @param outputChannel - channel on the selected digital module connected
* to the motor controller for this device.
* @param invertOutput - invert the polarity of the output value.
*/
public SteeringPID(int inputSlot, int inputChannel,
int outputSlot, int outputChannel, boolean invertOutput)
{
m_in = new AnalogChannel(inputSlot, inputChannel);
m_out = new Jaguar(outputSlot, outputChannel);
m_scaledOut = new PIDOutputInverter(m_out, invertOutput);
m_pid = new PIDController(m_PidP, m_PidI, m_PidD, m_in, m_scaledOut);
m_pid.setOutputRange(kOutChannelMin, kOutChannelMax);
m_initialPotVal = m_in.pidGet();
setOperatingRangePct(kOperatingRangePct);
setCenterPct(kCenterPct);
setLinearPct(kLinearPct);
}
public double getInitialPositionPct()
{
return 100 * (m_initialPotVal - kInChannelMin)/(kInChannelMax - kInChannelMin);
}
public double getPot()
{
return m_in.pidGet();
}
public double getCtr()
{
return m_rangeCenterPct;
}
/**
* Specify the portion of the input range where the PIDController operates
* in a linear (output not clipped) fashion. This is a percentage of the
* entire range of input values; it is not affected by changes to the
* operating range.
*/
public void setLinearPct(double pct)
{
m_linearPct = pct/100;
// initially compute P so the motor input is linear over the whole input range
m_PidP = (kOutChannelMax - kOutChannelMin)/(m_operatingRange);
// increase P so the motor input reaches its limit at m_linearPct/2 from the center
m_PidP /= (pct/m_opRangePct);
m_PidI = m_PidP/20;
// update the PIDController
m_pid.setPID(m_PidP, m_PidI, m_PidD);
}
/**
* Specify the portion of the input range in use.
* @param width - percent of input range to use
*/
public void setOperatingRangePct(int widthPct)
{
// ignore invalid input
if (widthPct > 100 || widthPct < 0) return;
if (m_rangeCenterPct + widthPct/2 > 100) return;
if (m_rangeCenterPct - widthPct/2 < 0) return;
m_opRangePct = widthPct;
m_operatingRange = kInWidth * widthPct / 100;
}
/**
* Specify the midpoint of the operating range as a percent of the input range.
* @param centerPct - the target value, as a percent of the input range, when
* the direction is centered (direction is 0);
*
*/
public void setCenterPct(double centerPct)
{
// ignore invalid input
if (centerPct > 100 || centerPct < 0) return;
if (centerPct + m_opRangePct/2 > 100) return;
if (centerPct - m_opRangePct/2 < 0) return;
m_rangeCenterPct = centerPct;
m_rangeCenter = kInWidth * centerPct / 100.0;
}
public void setI(double i)
{
m_PidI = i;
m_pid.setPID(m_PidP, m_PidI, m_PidD);
}
public void setD(double d)
{
m_PidD = d;
m_pid.setPID(m_PidP, m_PidI, m_PidD);
}
/**
* Set the steering direction. The direction input is mapped onto the
* operating range of the steering device. When the specified direction
* is zero, the steering device will find the specified center.
* @param Desired steering direction, range is -1.0 to +1.0
*/
public void setDirection(double direction)
{
m_pid.setSetpoint((direction * m_operatingRange/2) + m_rangeCenter);
if (!m_running)
{
m_pid.enable();
m_running = true;
}
/* System.out.println("PID Error: " + m_pid.getError() +
"; Result: " + m_pid.get() +
"; Setpoint: " + m_pid.getSetpoint() +
"; Joystick: " + direction +
"; Input: " + m_in.pidGet() +
"; P: " + m_pid.getP() +
"; I: " + m_pid.getI() +
"; D: " + m_pid.getD() +
"; width: " + m_operatingRange +
"; center: " + m_rangeCenter);
*/
}
}
|
package BlueTurtle.TSE;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import BlueTurtle.commandbuilders.CheckStyleCommandBuilder;
import BlueTurtle.commandbuilders.CoberturaCommandBuilder;
import BlueTurtle.commandbuilders.CommandBuilder;
import BlueTurtle.commandbuilders.PMDCommandBuilder;
import BlueTurtle.parsers.CheckStyleXMLParser;
import BlueTurtle.parsers.XMLParser;
import BlueTurtle.settings.CheckStyleSettings;
import BlueTurtle.settings.CoberturaSettings;
import BlueTurtle.settings.PMDSettings;
import BlueTurtle.warnings.Warning;
/**
* Unit test for simple Analyser.
*/
public class AnalyserTest {
@Before
/**
* Set up a command to run PMD and run CheckStyle. These commands are handed to the analyser which runs them.
* @throws IOException
*/
public void initialize() throws IOException {
ArrayList<AnalyserCommand> commands = new ArrayList<AnalyserCommand>();
CommandBuilder commandBuilder;
PMDSettings pmdSettings = new PMDSettings();
CheckStyleSettings checkStyleSettings = new CheckStyleSettings(new File("CheckStyle_Settings.xml"));
commandBuilder = new PMDCommandBuilder(pmdSettings);
String[] pmdCommands = commandBuilder.buildCommand();
AnalyserCommand c1 = new AnalyserCommand(pmdSettings.getDefaultOutputFilePath(), pmdCommands);
commands.add(c1);
commandBuilder = new CheckStyleCommandBuilder(checkStyleSettings);
String[] checkStyleCommands = commandBuilder.buildCommand();
AnalyserCommand c2 = new AnalyserCommand(checkStyleSettings.getDefaultOutputFilePath(), checkStyleCommands);
commands.add(c2);
Analyser analyser = new Analyser(commands);
analyser.analyse();
}
/**
* Simple test to check if running the analyser actually produces output for checkstyle.
* @throws IOException
*/
@Test
public void testCheckStyleOutput() throws IOException {
File file = new File(JavaController.getUserDir() + "/Runnables/Testcode/checkstyle.xml");
assertTrue(file.length() > 0);
}
/**
* Simple test to check if running the analyser actually produces output for PMD.
* @throws IOException
*/
@Test
public void testPMDOutput() throws IOException {
File file = new File(JavaController.getUserDir() + "/Runnables/Testcode/PMD.xml");
assertTrue(file.length() > 0);
}
}
|
package com.jcabi.github;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface OAuthScope {
/**
* The Github OAuth scopes required.
*/
Scope[] value();
/**
* The enum represents the available OAuth scopes.
*/
public enum Scope {
/**
* Represents "no scope" scope.
*/
NO_SCOPE,
/**
* Represents "user" scope.
*/
USER,
/**
* Represents "user:email" scope.
*/
USER_EMAIL,
/**
* Represents "user:follow" scope.
*/
USER_FOLLOW,
/**
* Represents "public_repo" scope.
*/
PUBLIC_REPO,
/**
* Represents "repo" scope.
*/
REPO,
/**
* Represents "repo_deployment" scope.
*/
REPO_DEPLOYMENT,
/**
* Represents "repo_status" scope.
*/
REPO_STATUS,
/**
* Represents "delete_repo" scope.
*/
DELETE_REPO,
/**
* Represents "notifications" scope.
*/
NOTIFICATIONS,
/**
* Represents "gist" scope.
*/
GIST,
/**
* Represents "read:repo_hook" scope.
*/
READ_REPO_HOOK,
/**
* Represents "write:repo_hook" scope.
*/
WRITE_REPO_HOOK,
/**
* Represents "admin:repo_hook" scope.
*/
ADMIN_REPO_HOOK,
/**
* Represents "admin:org_hook" scope.
*/
ADMIN_ORG_HOOK,
/**
* Represents "read:org" scope.
*/
READ_ORG,
/**
* Represents "write:org" scope.
*/
WRITE_ORG,
/**
* Represents "admin:org" scope.
*/
ADMIN_ORG,
/**
* Represents "read:public_key" scope.
*/
READ_PUBLIC_KEY,
/**
* Represents "write:public_key" scope.
*/
WRITE_PUBLIC_KEY,
/**
* Represents "admin:public_key" scope.
*/
ADMIN_PUBLIC_KEY;
}
}
|
package com.jcabi.github;
import com.jcabi.http.Request;
import com.jcabi.http.mock.MkAnswer;
import com.jcabi.http.mock.MkContainer;
import com.jcabi.http.mock.MkGrizzlyContainer;
import com.jcabi.http.request.ApacheRequest;
import com.jcabi.http.request.FakeRequest;
import java.net.HttpURLConnection;
import java.util.Random;
import javax.json.Json;
import org.apache.commons.lang3.RandomStringUtils;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Test;
import org.mockito.Mockito;
@SuppressWarnings({"PMD.AvoidDuplicateLiterals", "PMD.TooManyMethods" })
public final class RtUserTest {
/**
* RtUser can understand who am I.
* @throws Exception If some problem inside
*/
@Test
public void checksWhoAmI() throws Exception {
final String login = "monalia";
final RtUser user = new RtUser(
Mockito.mock(Github.class),
new FakeRequest().withBody(
Json.createObjectBuilder()
.add("login", login)
.build().toString()
)
);
MatcherAssert.assertThat(
user.login(),
Matchers.equalTo(login)
);
}
/**
* RtUser can check if he has a name.
* @throws Exception If some problem inside
*/
@Test
public void checksIfHeHasAName() throws Exception {
final User.Smart smart = new User.Smart(
new RtUser(
Mockito.mock(Github.class),
new FakeRequest().withBody(
Json.createObjectBuilder()
.add("name", "octoc")
.build()
.toString()
),
"octoc"
)
);
MatcherAssert.assertThat(
smart.hasName(),
Matchers.equalTo(true)
);
}
/**
* RtUser can check if he has NO name.
* @throws Exception If some problem inside
*/
@Test
public void checksIfHeHasNoName() throws Exception {
final User.Smart smart = new User.Smart(
new RtUser(
Mockito.mock(Github.class),
new FakeRequest().withBody(
Json.createObjectBuilder()
.build()
.toString()
),
"octoc"
)
);
MatcherAssert.assertThat(
smart.hasName(),
Matchers.equalTo(false)
);
}
/**
* RtUser can describe as a JSON object.
*
* @throws Exception if there is any problem
*/
@Test
public void describeAsJson() throws Exception {
final RtUser user = new RtUser(
Mockito.mock(Github.class),
new FakeRequest().withBody(
Json.createObjectBuilder()
.add("name", "monalisa")
.add("email", "octocat@github.com")
.build()
.toString()
),
"octoc"
);
MatcherAssert.assertThat(
user.json().toString(),
Matchers.equalTo(
"{\"name\":\"monalisa\",\"email\":\"octocat@github.com\"}"
)
);
}
/**
* RtUser can execute PATCH request.
*
* @throws Exception if there is any problem
*/
@Test
public void executePatchRequest() throws Exception {
final MkContainer container = new MkGrizzlyContainer().next(
new MkAnswer.Simple(
HttpURLConnection.HTTP_OK,
"{\"login\":\"octocate\"}"
)
).start();
final RtUser json = new RtUser(
Mockito.mock(Github.class),
new ApacheRequest(container.home())
);
json.patch(
Json.createObjectBuilder()
.add("location", "San Francisco")
.build()
);
MatcherAssert.assertThat(
container.take().method(),
Matchers.equalTo(Request.PATCH)
);
container.stop();
}
/**
* RtUser can fetch emails.
*/
@Test
public void fetchesEmails() {
final Github github = Mockito.mock(Github.class);
Mockito.when(github.entry()).thenReturn(new FakeRequest());
final User user = new RtUser(github, new FakeRequest());
MatcherAssert.assertThat(user.emails(), Matchers.notNullValue());
}
/**
* Tests if a User.Smart object handles gravatar_id JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasGravatar() throws Exception {
final String value = RandomStringUtils.random(256);
final User.Smart smart = this.userWith("gravatar_id", value);
MatcherAssert.assertThat(smart.gravatar(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles html_url JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasHtmlUrl() throws Exception {
final String value = RandomStringUtils.random(256);
final User.Smart smart = this.userWith("html_url", value);
MatcherAssert.assertThat(smart.htmlUrl(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles followers_url JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasFollwersUrl() throws Exception {
final String value = RandomStringUtils.random(256);
final User.Smart smart = this.userWith("followers_url", value);
MatcherAssert.assertThat(smart.follwersUrl(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles following_url JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasFollowingUrl() throws Exception {
final String value = RandomStringUtils.random(256);
final User.Smart smart = this.userWith("following_url", value);
MatcherAssert.assertThat(smart.followingUrl(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles gists_url JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasGistsUrl() throws Exception {
final String value = RandomStringUtils.random(256);
final User.Smart smart = this.userWith("gists_url", value);
MatcherAssert.assertThat(smart.gistsUrl(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles starred_url JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasStarredUrl() throws Exception {
final String value = RandomStringUtils.random(256);
final User.Smart smart = this.userWith("starred_url", value);
MatcherAssert.assertThat(smart.starredUrl(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles subscriptions_url JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasSubscriptionsUrl() throws Exception {
final String value = RandomStringUtils.random(256);
final User.Smart smart = this.userWith("subscriptions_url", value);
MatcherAssert.assertThat(smart.subscriptionsUrl(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles organizations_url JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasOrganizationsUrl() throws Exception {
final String value = RandomStringUtils.random(256);
final User.Smart smart = this.userWith("organizations_url", value);
MatcherAssert.assertThat(smart.organizationsUrl(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles repos_url JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasReposUrl() throws Exception {
final String value = RandomStringUtils.random(256);
final User.Smart smart = this.userWith("repos_url", value);
MatcherAssert.assertThat(smart.reposUrl(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles events_url JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasEventsUrl() throws Exception {
final String value = RandomStringUtils.random(256);
final User.Smart smart = this.userWith("events_url", value);
MatcherAssert.assertThat(smart.eventsUrl(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles received_events_url JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasReceivedEventsUrl() throws Exception {
final String value = RandomStringUtils.random(256);
final User.Smart smart = this.userWith("received_events_url", value);
MatcherAssert.assertThat(smart.receivedEventsUrl(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles type JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasType() throws Exception {
final String value = RandomStringUtils.random(256);
final User.Smart smart = this.userWith("type", value);
MatcherAssert.assertThat(smart.type(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles site_admin JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasSiteAdmin() throws Exception {
final User.Smart smart = this.userWith("site_admin", "true");
MatcherAssert.assertThat(smart.siteAdmin(), Matchers.is(true));
}
/**
* Tests if a User.Smart object handles blog JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasBlog() throws Exception {
final String value = RandomStringUtils.random(256);
final User.Smart smart = this.userWith("blog", value);
MatcherAssert.assertThat(smart.blog(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles hireable JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasHireable() throws Exception {
final User.Smart smart = this.userWith("hireable", "true");
MatcherAssert.assertThat(smart.hireable(), Matchers.is(true));
}
/**
* Tests if a User.Smart object handles bio JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasBio() throws Exception {
final String value = RandomStringUtils.random(256);
final User.Smart smart = this.userWith("bio", value);
MatcherAssert.assertThat(smart.bio(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles public_repos JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasPublicRepos() throws Exception {
final int value = new Random().nextInt();
final User.Smart smart = this.userWith(
"public_repos",
String.valueOf(value)
);
MatcherAssert.assertThat(smart.publicRepos(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles public_gists JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasPublicGists() throws Exception {
final int value = new Random().nextInt();
final User.Smart smart = this.userWith(
"public_gists",
String.valueOf(value)
);
MatcherAssert.assertThat(smart.publicGists(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles followers JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasFollowersCount() throws Exception {
final int value = new Random().nextInt();
final User.Smart smart = this.userWith(
"followers",
String.valueOf(value)
);
MatcherAssert.assertThat(smart.followersCount(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles following JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasFollowingCount() throws Exception {
final int value = new Random().nextInt();
final User.Smart smart = this.userWith(
"following",
String.valueOf(value)
);
MatcherAssert.assertThat(smart.followingCount(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles created_at JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasCreated() throws Exception {
final Github.Time value = new Github.Time(this.currentTimeInSeconds());
final User.Smart smart = this.userWith("created_at", value.toString());
MatcherAssert.assertThat(smart.created(), Matchers.is(value));
}
/**
* Tests if a User.Smart object handles updated_at JSON property.
* @throws Exception if any problem occurs.
*/
@Test
public void hasUpdated() throws Exception {
final Github.Time value = new Github.Time(this.currentTimeInSeconds());
final User.Smart smart = this.userWith("updated_at", value.toString());
MatcherAssert.assertThat(smart.updated(), Matchers.is(value));
}
private User.Smart userWith(final String property, final String value) {
return new User.Smart(
new RtUser(
Mockito.mock(Github.class),
new FakeRequest().withBody(
Json.createObjectBuilder()
.add(property, value)
.build()
.toString()
),
"octoc"
)
);
}
/**
* Get current time with precision up to a second.
* @return Current time with precision up to a second.
*/
private long currentTimeInSeconds() {
// @checkstyle MagicNumberCheck (1 line)
return System.currentTimeMillis() / 1000 * 1000;
}
}
|
package cz.hobrasoft.pdfmu;
import com.tngtech.java.junit.dataprovider.DataProvider;
import com.tngtech.java.junit.dataprovider.DataProviderRunner;
import com.tngtech.java.junit.dataprovider.UseDataProvider;
import cz.hobrasoft.pdfmu.error.ErrorType;
import cz.hobrasoft.pdfmu.jackson.Inspect;
import cz.hobrasoft.pdfmu.operation.OperationException;
import cz.hobrasoft.pdfmu.operation.OperationInspect;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.Rule;
import org.junit.contrib.java.lang.system.Assertion;
import org.junit.contrib.java.lang.system.ExpectedSystemExit;
import org.junit.contrib.java.lang.system.SystemErrRule;
import org.junit.contrib.java.lang.system.SystemOutRule;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
/**
* @author Filip Bartek
*/
@RunWith(DataProviderRunner.class)
public class MainTest {
@Rule
public final ExpectedSystemExit exit = ExpectedSystemExit.none();
@Rule
public final SystemOutRule systemOutRule = new SystemOutRule().mute().enableLog();
@Rule
public final SystemErrRule systemErrRule = new SystemErrRule().mute().enableLog();
@Rule
public TemporaryFolder folder = new TemporaryFolder();
@Test
public void testTooFewArguments() {
exit.expectSystemExitWithStatus(ErrorType.PARSER_TOO_FEW_ARGUMENTS.getCode());
Main.main(new String[]{});
assert false;
}
@Test
public void testVersion() {
exit.expectSystemExitWithStatus(0);
exit.checkAssertionAfterwards(new Assertion() {
@Override
public void checkAssertion() {
// The terminating line break is introduced by Argparse4j
Assert.assertEquals(String.format("%1$s\n", Main.getProjectVersion()),
systemOutRule.getLogWithNormalizedLineSeparator());
}
});
Main.main(new String[]{"--version"});
assert false;
}
@Test
public void testHelp() {
exit.expectSystemExitWithStatus(0);
Main.main(new String[]{"--help"});
assert false;
}
@Test
public void testLegalNotice() {
exit.expectSystemExitWithStatus(0);
exit.checkAssertionAfterwards(new Assertion() {
@Override
public void checkAssertion() {
Assert.assertEquals(Main.getLegalNotice(),
systemOutRule.getLogWithNormalizedLineSeparator());
}
});
Main.main(new String[]{"--legal-notice"});
assert false;
}
@Ignore
@Test
public void testInspect() {
Assert.fail();
}
public enum PdfVersion {
V12, V13, V14, V15, V16, V17;
static PdfVersion DEFAULT = V16;
private char toChar() {
switch (this) {
case V12:
return '2';
case V13:
return '3';
case V14:
return '4';
case V15:
return '5';
case V16:
return '6';
case V17:
return '7';
}
assert false;
return 0;
}
@Override
public String toString() {
return String.format("1.%1$c", toChar());
}
public String resourceName() {
return String.format("blank-1%1$c.pdf", toChar());
}
public File getFile(TemporaryFolder folder) throws IOException {
String resourceName = resourceName();
assert resourceName != null;
File document = folder.newFile(resourceName);
assert document.exists();
ClassLoader classLoader = this.getClass().getClassLoader();
InputStream in = classLoader.getResourceAsStream(resourceName);
assert in != null;
OutputStream out = new FileOutputStream(document);
assert out != null;
IOUtils.copy(in, out);
return document;
}
}
public enum OnlyIfLower {
No,
Yes;
public boolean toBoolean() {
return this == Yes;
}
}
public enum Force {
No,
Yes;
public boolean toBoolean() {
return this == Yes;
}
}
public static class UpdateVersionInput {
public UpdateVersionInput(Force force, PdfVersion inputVersion,
PdfVersion requestedVersion, OnlyIfLower onlyIfLower) {
this.force = force;
this.inputVersion = inputVersion;
this.requestedVersion = requestedVersion;
this.onlyIfLower = onlyIfLower;
}
public Force force;
public PdfVersion inputVersion;
public PdfVersion requestedVersion;
public OnlyIfLower onlyIfLower;
@Override
public String toString() {
List<String> argsList = new ArrayList<>();
argsList.add("update-version");
argsList.add(inputVersion.resourceName());
if (force.toBoolean()) {
argsList.add("--force");
} else {
argsList.add("--out");
final String outFileName = "out.pdf";
assert !outFileName.equals(inputVersion.resourceName());
argsList.add(outFileName);
}
if (requestedVersion != null) {
argsList.add("--version");
argsList.add(requestedVersion.toString());
}
if (onlyIfLower.toBoolean()) {
argsList.add("--only-if-lower");
}
return String.join(" ", argsList);
}
}
public static List<UpdateVersionInput> updateVersionInputs() {
List<UpdateVersionInput> result = new ArrayList<>();
for (Force force : Force.values()) {
for (PdfVersion inputVersion : PdfVersion.values()) {
for (OnlyIfLower onlyIfLower : OnlyIfLower.values()) {
for (PdfVersion requestedVersion : PdfVersion.values()) {
result.add(new UpdateVersionInput(force, inputVersion, requestedVersion, onlyIfLower));
}
}
}
}
return result;
}
@DataProvider
public static Object[][] dataProviderUpdateVersion() {
List<Object[]> result = new ArrayList<>();
for (final UpdateVersionInput updateVersionInput : updateVersionInputs()) {
final Force force = updateVersionInput.force;
assert force != null;
final PdfVersion inputVersion = updateVersionInput.inputVersion;
assert inputVersion != null;
final PdfVersion requestedVersion = updateVersionInput.requestedVersion;
final OnlyIfLower onlyIfLower = updateVersionInput.onlyIfLower;
assert onlyIfLower != null;
PdfVersion expectedVersion = requestedVersion;
if (expectedVersion == null) {
expectedVersion = PdfVersion.DEFAULT;
}
if (!force.toBoolean() && onlyIfLower.toBoolean() && inputVersion.compareTo(expectedVersion) >= 0) {
// Discard combinations that do not create an output file
continue;
}
if (onlyIfLower.toBoolean() && inputVersion.compareTo(expectedVersion) > 0) {
expectedVersion = inputVersion;
}
result.add(new Object[]{updateVersionInput, expectedVersion});
}
return result.toArray(new Object[][]{});
}
private File outFile;
@Test
@UseDataProvider
public void testUpdateVersion(final UpdateVersionInput updateVersionInput,
final PdfVersion expectedVersion) throws IOException {
final Force force = updateVersionInput.force;
final PdfVersion inputVersion = updateVersionInput.inputVersion;
final PdfVersion requestedVersion = updateVersionInput.requestedVersion;
final OnlyIfLower onlyIfLower = updateVersionInput.onlyIfLower;
final File document = inputVersion.getFile(folder);
outFile = document;
List<String> argsList = new ArrayList<>();
argsList.add("update-version");
argsList.add(document.getAbsolutePath());
if (force.toBoolean()) {
argsList.add("--force");
} else {
argsList.add("--out");
final String outFileName = "out.pdf";
assert !outFileName.equals(inputVersion.resourceName());
outFile = folder.newFile(outFileName);
{ // success
final boolean success = outFile.delete();
assert success;
}
assert !outFile.exists();
argsList.add(outFile.getAbsolutePath());
}
if (requestedVersion != null) {
argsList.add("--version");
argsList.add(requestedVersion.toString());
}
if (onlyIfLower.toBoolean()) {
argsList.add("--only-if-lower");
}
exit.expectSystemExitWithStatus(0);
exit.checkAssertionAfterwards(new Assertion() {
@Override
public void checkAssertion() throws OperationException, IOException {
Inspect inspect = OperationInspect.getInstance().execute(outFile);
Assert.assertEquals(expectedVersion.toString(), inspect.version);
}
});
Main.main(argsList.toArray(new String[]{}));
assert false;
}
@Ignore
@Test
public void testUpdateProperties() {
Assert.fail();
}
@Ignore
@Test
public void testAttach() {
Assert.fail();
}
@Ignore
@Test
public void testSign() {
Assert.fail();
}
}
|
package integration;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.FileUploadException;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.eclipse.jetty.server.*;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import javax.servlet.ServletException;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.ConcurrentSkipListSet;
import java.util.logging.Logger;
import static com.google.common.base.Joiner.on;
import static java.lang.Thread.currentThread;
import static java.util.logging.Level.SEVERE;
import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND;
import static javax.servlet.http.HttpServletResponse.SC_OK;
import static org.junit.Assert.assertTrue;
public class LocalHttpServer {
private static final Logger log = Logger.getLogger(LocalHttpServer.class.getName());
private static final String CONTENT_TYPE_HTML_TEXT = "text/html";
private static final String CONTENT_TYPE_IMAGE_PNG = "image/png";
private final Server server;
/**
* @param port
* @param ssl
* @throws IOException
*/
public LocalHttpServer(int port, boolean ssl) {
server = new Server();
if (ssl) {
configureHttps(port);
} else {
ServerConnector connector = new ServerConnector(server);
connector.setPort(port);
server.setConnectors(new Connector[]{connector});
}
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
context.setContextPath("/");
server.setHandler(context);
/**
* Method may be used to locally run test server used by Selenide own tests
*
* @param args not used
*/
public static void main(String[] args) throws Exception {
LocalHttpServer server = new LocalHttpServer(8080, false).start();
Thread.currentThread().join();
}
}
|
package org.cactoos.list;
import java.util.List;
import org.hamcrest.core.IsEqual;
import org.hamcrest.core.IsNot;
import org.junit.Test;
import org.llorllale.cactoos.matchers.Assertion;
import org.llorllale.cactoos.matchers.IsTrue;
/**
* Test case for {@link org.cactoos.collection.Joined}.
*
* @since 0.20
* @checkstyle JavadocMethodCheck (500 lines)
* @checkstyle MagicNumber (500 line)
* @checkstyle DiamondOperatorCheck (500 lines)
*/
@SuppressWarnings("PMD.TooManyMethods")
public final class JoinedTest {
/**
* Literal ONE value.
*/
private static final String LITERAL_ONE = "ONE";
/**
* Literal TWO value.
*/
private static final String LITERAL_TWO = "TWO";
/**
* Literal THREE value.
*/
private static final String LITERAL_THREE = "THREE";
/**
* Literal FOUR value.
*/
private static final String LITERAL_FOUR = "FOUR";
@Test
public void behavesAsCollection() {
new Assertion<>(
"Can't behave as a list",
new Joined<String>(
new ListOf<>(
JoinedTest.LITERAL_ONE, JoinedTest.LITERAL_TWO
),
new ListOf<>(
JoinedTest.LITERAL_THREE, JoinedTest.LITERAL_FOUR
)
),
new BehavesAsList<>(JoinedTest.LITERAL_TWO)
).affirm();
}
@Test
public void size() {
new Assertion<>(
"must evaluate the size of the joined list",
new Joined<String>(
new ListOf<>(
JoinedTest.LITERAL_ONE, JoinedTest.LITERAL_TWO
),
new ListOf<>(
JoinedTest.LITERAL_THREE, JoinedTest.LITERAL_FOUR
)
).size(),
new IsEqual<>(4)
).affirm();
}
@Test
public void isEmpty() {
new Assertion<>(
"must be evaluated as an empty list",
new Joined<String>(
new ListOf<>(
JoinedTest.LITERAL_ONE, JoinedTest.LITERAL_TWO
),
new ListOf<>(
JoinedTest.LITERAL_THREE, JoinedTest.LITERAL_FOUR
)
).isEmpty(),
new IsNot<>(new IsTrue())
).affirm();
}
@Test
public void contains() {
new Assertion<>(
"must contain element specified",
new Joined<String>(
new ListOf<>(
JoinedTest.LITERAL_ONE, JoinedTest.LITERAL_TWO
),
new ListOf<>(
JoinedTest.LITERAL_THREE, JoinedTest.LITERAL_FOUR
)
).contains(JoinedTest.LITERAL_THREE),
new IsTrue()
).affirm();
}
@Test
public void iterator() {
new Assertion<>(
"Joined Iterator must return next element equal to the first added",
new Joined<String>(
new ListOf<>(
JoinedTest.LITERAL_ONE, JoinedTest.LITERAL_TWO
),
new ListOf<>(
JoinedTest.LITERAL_THREE, JoinedTest.LITERAL_FOUR
)
).iterator().next(),
new IsEqual<>(
JoinedTest.LITERAL_ONE
)
).affirm();
}
@Test
public void add() {
final List<String> joined = new Joined<String>(
new ListOf<>(JoinedTest.LITERAL_ONE),
new ListOf<>(JoinedTest.LITERAL_TWO)
);
joined.add(JoinedTest.LITERAL_THREE);
new Assertion<>(
"must be able to add element specified",
joined,
new IsEqual<>(
new ListOf<>(
JoinedTest.LITERAL_ONE,
JoinedTest.LITERAL_TWO,
JoinedTest.LITERAL_THREE
)
)
).affirm();
}
@Test
public void remove() {
final List<String> joined = new Joined<String>(
new ListOf<>(JoinedTest.LITERAL_ONE),
new ListOf<>(JoinedTest.LITERAL_TWO)
);
joined.remove(JoinedTest.LITERAL_TWO);
new Assertion<>(
"must be able to remove element specified",
joined,
new IsEqual<>(
new ListOf<>(
JoinedTest.LITERAL_ONE
)
)
).affirm();
}
@Test
public void containsAll() {
new Assertion<>(
"must contain all elements",
new Joined<String>(
new ListOf<>(JoinedTest.LITERAL_ONE, JoinedTest.LITERAL_THREE),
new ListOf<>(JoinedTest.LITERAL_TWO, JoinedTest.LITERAL_FOUR)
).containsAll(
new ListOf<>(
JoinedTest.LITERAL_ONE,
JoinedTest.LITERAL_TWO
)
),
new IsTrue()
).affirm();
}
@Test
public void addAll() {
final List<String> joined = new Joined<String>(
new ListOf<>(JoinedTest.LITERAL_ONE),
new ListOf<>(JoinedTest.LITERAL_TWO)
);
joined.addAll(
new ListOf<>(
JoinedTest.LITERAL_THREE,
JoinedTest.LITERAL_FOUR
)
);
new Assertion<>(
"must be able to addAll elements specified",
joined,
new IsEqual<>(
new ListOf<>(
JoinedTest.LITERAL_ONE,
JoinedTest.LITERAL_TWO,
JoinedTest.LITERAL_THREE,
JoinedTest.LITERAL_FOUR
)
)
).affirm();
}
@Test
public void addAllInFront() {
final List<String> joined = new Joined<String>(
new ListOf<>(JoinedTest.LITERAL_ONE),
new ListOf<>(JoinedTest.LITERAL_TWO)
);
joined.addAll(
0,
new ListOf<>(
JoinedTest.LITERAL_THREE,
JoinedTest.LITERAL_FOUR
)
);
new Assertion<>(
"must be able to addAll elements in front",
joined,
new IsEqual<>(
new ListOf<>(
JoinedTest.LITERAL_THREE,
JoinedTest.LITERAL_FOUR,
JoinedTest.LITERAL_ONE,
JoinedTest.LITERAL_TWO
)
)
).affirm();
}
@Test
public void removeAll() {
final List<String> joined = new Joined<String>(
new ListOf<>(
JoinedTest.LITERAL_ONE,
JoinedTest.LITERAL_TWO
),
new ListOf<>(JoinedTest.LITERAL_THREE)
);
joined.removeAll(
new ListOf<>(
JoinedTest.LITERAL_TWO,
JoinedTest.LITERAL_THREE
)
);
new Assertion<>(
"must be able to removeAll elements specified",
joined,
new IsEqual<>(
new ListOf<>(
JoinedTest.LITERAL_ONE
)
)
).affirm();
}
@Test
public void retainAll() {
final List<String> joined = new Joined<String>(
new ListOf<>(JoinedTest.LITERAL_ONE),
new ListOf<>(
JoinedTest.LITERAL_TWO,
JoinedTest.LITERAL_THREE
)
);
joined.retainAll(
new ListOf<>(
JoinedTest.LITERAL_TWO,
JoinedTest.LITERAL_THREE
)
);
new Assertion<>(
"must be able to retain all",
joined,
new IsEqual<>(
new ListOf<>(
JoinedTest.LITERAL_TWO,
JoinedTest.LITERAL_THREE
)
)
).affirm();
}
@Test
public void clear() {
final List<String> joined = new Joined<String>(
new ListOf<>(
JoinedTest.LITERAL_TWO,
JoinedTest.LITERAL_THREE
),
new ListOf<>(
JoinedTest.LITERAL_ONE
)
);
joined.clear();
new Assertion<>(
"must be able to clear",
joined.size(),
new IsEqual<>(0)
).affirm();
}
@Test
public void get() {
new Assertion<>(
"must get element",
new Joined<String>(
new ListOf<>(
JoinedTest.LITERAL_TWO,
JoinedTest.LITERAL_THREE
),
new ListOf<>(
JoinedTest.LITERAL_ONE
)
).get(1),
new IsEqual<>(JoinedTest.LITERAL_THREE)
).affirm();
}
@Test
public void set() {
final List<String> joined = new Joined<String>(
new ListOf<>(JoinedTest.LITERAL_ONE),
new ListOf<>(JoinedTest.LITERAL_TWO)
);
joined.set(0, JoinedTest.LITERAL_THREE);
new Assertion<>(
"must be able to set element by specified index",
joined.get(0),
new IsEqual<>(JoinedTest.LITERAL_THREE)
).affirm();
}
@Test
public void addByIndex() {
final List<String> joined = new Joined<String>(
new ListOf<>(JoinedTest.LITERAL_ONE),
new ListOf<>(JoinedTest.LITERAL_TWO)
);
joined.add(0, JoinedTest.LITERAL_THREE);
new Assertion<>(
"must be able to add element by specified index",
joined.get(0),
new IsEqual<>(JoinedTest.LITERAL_THREE)
).affirm();
}
@Test
public void removeByIndex() {
final List<String> joined = new Joined<String>(
new ListOf<>(JoinedTest.LITERAL_ONE),
new ListOf<>(JoinedTest.LITERAL_TWO)
);
joined.remove(0);
new Assertion<>(
"must be able to remove element by specified index",
joined.get(0),
new IsEqual<>(JoinedTest.LITERAL_TWO)
).affirm();
}
@Test
public void removeByElement() {
final List<String> joined = new Joined<String>(
new ListOf<>(JoinedTest.LITERAL_ONE),
new ListOf<>(JoinedTest.LITERAL_TWO)
);
joined.remove(JoinedTest.LITERAL_ONE);
new Assertion<>(
"must be able to remove element by specified element",
joined.get(0),
new IsEqual<>(JoinedTest.LITERAL_TWO)
).affirm();
}
@Test(expected = IndexOutOfBoundsException.class)
public void listIteratorSecond() {
new Joined<Integer>().listIterator(66);
}
@Test
public void subList() {
new Assertion<>(
"must be able to to get sub list",
new Joined<String>(
new ListOf<>(JoinedTest.LITERAL_ONE),
new ListOf<>(JoinedTest.LITERAL_TWO, JoinedTest.LITERAL_THREE)
).subList(1, 3),
new IsEqual<>(
new ListOf<>(
JoinedTest.LITERAL_TWO,
JoinedTest.LITERAL_THREE
)
)
).affirm();
}
@Test
public void itemAndList() {
new Assertion<>(
"must be able to join element with a list",
new Joined<>(
JoinedTest.LITERAL_ONE,
new ListOf<>(JoinedTest.LITERAL_TWO, JoinedTest.LITERAL_THREE)
),
new IsEqual<>(
new ListOf<>(
JoinedTest.LITERAL_ONE,
JoinedTest.LITERAL_TWO,
JoinedTest.LITERAL_THREE
)
)
).affirm();
}
}
|
/**
* @file 2016/12/08
*/
package quiz.model;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList;
import org.dbunit.database.DatabaseConfig;
import org.dbunit.database.DatabaseConnection;
import org.dbunit.database.IDatabaseConnection;
import org.dbunit.ext.mysql.MySqlDataTypeFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
/**
* MySQL
*
* @author Yuka Yoshikawa
*
*/
public class SqlDataStoreTest {
// /** SqlDataStoreconnection */
// private java.lang.reflect.Field jdbc;
// /** SqlDataStorepassword */
// private java.lang.reflect.Field pass;
/** Connectionn */
private java.lang.reflect.Field con;
// /** DBUnitConnection */
// private IDatabaseConnection dbconn;
//
// private File file;
/** MySQL */
private SqlDataStore sds;
/**
*
*
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
sds = new SqlDataStore();
java.sql.Connection connection = getConnection();
// /** SqlDataStoreConnection */
// jdbc = SqlDataStore.class.getDeclaredField("jdbc");
// jdbc.setAccessible(true);
// jdbc.set(sds, "jdbc:mysql://localhost/test");
// /** SqlDataStorepassword */
// pass = SqlDataStore.class.getDeclaredField("pass");
// pass.setAccessible(true);
// pass.set(sds, "");
/** SqlDataStoreConnection */
con = SqlDataStore.class.getDeclaredField("con");
con.setAccessible(true);
con.set(sds, connection);
IDatabaseConnection dbconn = new DatabaseConnection(connection);
DatabaseConfig config = dbconn.getConfig();
config.setProperty(DatabaseConfig.PROPERTY_DATATYPE_FACTORY, new MySqlDataTypeFactory());
//
// QueryDataSet partialDataSet = new QueryDataSet(dbconn);
// partialDataSet.addTable("englishword");
// file = File.createTempFile("escape", ".xml");
// FlatXmlDataSet.write(partialDataSet, new FileOutputStream(file));
// /** XmlDB */
// IDataSet dataset = new FlatXmlDataSetBuilder().build(new
// File("testData.xml"));
// DatabaseOperation.CLEAN_INSERT.execute(dbconn, dataset);
}
/**
* DB
*
* @return testDBconnection
* @throws SQLException
* @throws ClassNotFoundException
*/
public static java.sql.Connection getConnection() throws SQLException, ClassNotFoundException {
Class.forName("com.mysql.jdbc.Driver");
return DriverManager.getConnection("jdbc:mysql://localhost/test?useUnicode=true&characterEncoding=utf8", "root", "");
}
/**
*
*
* @throws java.lang.Exception
*/
@After
public void tearDown() throws Exception {
// dbconn = new DatabaseConnection(getConnection());
//
// IDataSet dataset = new FlatXmlDataSetBuilder().build(file);
// DatabaseOperation.CLEAN_INSERT.execute(dbconn, dataset);
if (con != null) {
sds.close();
}
}
/**
* {@link quiz.model.SqlDataStore#open()}
*
* @note
*/
@Ignore
public void testOpen() {
try {
sds.open();
} catch (Exception e) {
fail(e.getMessage());
}
}
/**
* {@link quiz.model.SqlDataStore#close()}
*
* @note
*/
@Test
public void testClose() {
try {
sds.close();
} catch (Exception e) {
fail(e.getMessage());
}
}
/**
* {@link quiz.model.SqlDataStore#getAll()}
*
* @note testDBenglishword
*/
@Test
public void testGetAll() {
try {
ArrayList<EnglishWordBean> list = sds.getAll();
assertThat(list.size(), is(4));
assertThat(list.get(3).getId(), is(4));
assertThat(list.get(0).getWord(), is("apple"));
assertThat(list.get(2).getPart(), is(Part.getPart("")));
} catch (Exception e) {
fail(e.getMessage());
}
}
/**
* {@link quiz.model.SqlDataStore#insert()}
*
* @note testDBenglishword1
*/
@Test
public void testInsert() {
try {
EnglishWordBean bean = new EnglishWordBean();
bean.setWord("soccer");
bean.setPart(Part.getPart(""));
bean.setMean("");
sds.insert(bean);
} catch (Exception e) {
fail(e.getMessage());
}
}
/**
* {@link quiz.model.SqlDataStore#update(quiz.model.EnglishWordBean)}
*
*/
@Ignore
public void testUpdate() {
fail(""); // TODO
}
/**
* {@link quiz.model.SqlDataStore#delete(quiz.model.EnglishWordBean)}
*
*/
@Ignore
public void testDelete() {
fail(""); // TODO
}
/**
* {@link quiz.model.SqlDataStore#searchWord()}
*
* @note testDBenglishword1
*/
@Test
public void testSearchWord() {
try {
EnglishWordBean bean = new EnglishWordBean();
bean.setWord("cat");
bean.setMean("");
assertNotNull(sds.searchWord(bean));
} catch (Exception e) {
fail(e.getMessage());
}
}
/**
* {@link quiz.model.SqlDataStore#getRandom()}
*
* @note testDBenglishword1
*/
@Test
public void testGetRandom() {
try {
EnglishWordBean resultBean = sds.getRandom();
assertNotNull(resultBean);
} catch (Exception e) {
fail(e.getMessage());
}
}
}
|
// jTDS JDBC Driver for Microsoft SQL Server and Sybase
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package net.sourceforge.jtds.jdbc;
import java.math.BigDecimal;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Statement;
import junit.framework.TestSuite;
import net.sourceforge.jtds.util.Logger;
/**
* @author Alin Sinpalean
* @version $Id: AsTest.java,v 1.9.2.1 2009-08-04 10:33:54 ickzon Exp $
*/
public class AsTest extends DatabaseTestCase {
public AsTest(String name) {
super(name);
}
public static void main(String args[]) {
Logger.setActive(true);
if (args.length > 0) {
junit.framework.TestSuite s = new TestSuite();
for (int i = 0; i < args.length; i++) {
s.addTest(new AsTest(args[i]));
}
junit.textui.TestRunner.run(s);
} else
junit.textui.TestRunner.run(AsTest.class);
}
/**
* Test for missing ResultSet, reported in support request #35.
*/
public void testProc0()
throws Exception
{
Statement stmt = con.createStatement();
dropProcedure( "#spTestProc0" );
stmt.executeUpdate( "create table #tableTestProc0 ( A varchar( 10 ) )" );
stmt.executeUpdate( "insert into #tableTestProc0 values( 'testval' )" );
stmt.executeUpdate( "create procedure #spTestProc0 as set nocount off select * into #tmp from #tableTestProc0 select * from #tmp" );
stmt.close();
CallableStatement cstmt = con.prepareCall( "#spTestProc0" );
assertFalse( cstmt.execute() );
assertEquals( 1, cstmt.getUpdateCount() );
// The JDBC-ODBC driver does not return update counts from stored
// procedures so we won't, either.
// SAfe Yes, we will. It seems like that's how it should work. The idea
// however is to only return valid update counts (e.g. not from
// SET, EXEC or such).
assertTrue( cstmt.getMoreResults() );
boolean passed = false;
ResultSet rs = cstmt.getResultSet();
while( rs.next() )
{
passed = true;
}
assertTrue( "Expecting at least one result row", passed );
assertTrue( !cstmt.getMoreResults() && cstmt.getUpdateCount() == -1 );
cstmt.close();
// stmt.executeQuery("execute spTestExec");
}
public void testProc1() throws Exception {
Statement stmt = con.createStatement();
dropProcedure("#spTestExec");
dropProcedure("#spTestExec2");
stmt.executeUpdate(" create procedure #spTestExec2 as " +
"select 'Did it work?' as Result");
stmt.executeUpdate("create procedure #spTestExec as " +
"set nocount off " +
"create table #tmp ( Result varchar(50) ) " +
"insert #tmp execute #spTestExec2 " +
"select * from #tmp");
stmt.close();
CallableStatement cstmt = con.prepareCall("#spTestExec");
assertFalse(cstmt.execute());
assertEquals(1, cstmt.getUpdateCount());
// The JDBC-ODBC driver does not return update counts from stored
// procedures so we won't, either.
// SAfe Yes, we will. It seems like that's how it should work. The idea
// however is to only return valid update counts (e.g. not from
// SET, EXEC or such).
assertTrue(cstmt.getMoreResults());
boolean passed = false;
ResultSet rs = cstmt.getResultSet();
while (rs.next()) {
passed = true;
}
assertTrue("Expecting at least one result row", passed);
assertTrue(!cstmt.getMoreResults() && cstmt.getUpdateCount() == -1);
cstmt.close();
// stmt.executeQuery("execute spTestExec");
}
public void testProc2() throws Exception {
Statement stmt = con.createStatement();
String sqlwithcount =
"create procedure #multi1withcount as " +
" set nocount off " +
" select 'a' " +
" select 'b' " +
" create table #multi1withcountt (A VARCHAR(20)) " +
" insert into #multi1withcountt VALUES ('a') " +
" insert into #multi1withcountt VALUES ('a') " +
" insert into #multi1withcountt VALUES ('a') " +
" select 'a' " +
" select 'b' ";
String sqlnocount =
"create procedure #multi1nocount as " +
" set nocount on " +
" select 'a' " +
" select 'b' " +
" create table #multi1nocountt (A VARCHAR(20)) " +
" insert into #multi1nocountt VALUES ('a') " +
" insert into #multi1nocountt VALUES ('a') " +
" insert into #multi1nocountt VALUES ('a') " +
" select 'a' " +
" select 'b' ";
dropProcedure("#multi1withcount");
dropProcedure("#multi1nocount");
stmt.executeUpdate(sqlwithcount);
stmt.executeUpdate(sqlnocount);
stmt.close();
CallableStatement cstmt = con.prepareCall("#multi1nocount");
assertTrue(cstmt.execute());
ResultSet rs = cstmt.getResultSet();
assertTrue(rs.next());
assertTrue(rs.getString(1).equals("a"));
assertTrue(!rs.next());
assertTrue(cstmt.getMoreResults());
rs = cstmt.getResultSet();
assertTrue(rs.next());
assertTrue(rs.getString(1).equals("b"));
assertTrue(!rs.next());
assertTrue(cstmt.getMoreResults());
rs = cstmt.getResultSet();
assertTrue(rs.next());
assertTrue(!rs.next());
assertTrue(cstmt.getMoreResults());
rs = cstmt.getResultSet();
assertTrue(rs.next());
assertTrue(!rs.next());
assertTrue(!cstmt.getMoreResults() && cstmt.getUpdateCount() == -1);
cstmt.close();
cstmt = con.prepareCall("#multi1withcount");
// The JDBC-ODBC driver does not return update counts from stored
// procedures so we won't, either.
// SAfe Yes, we will. It seems like that's how it should work. The idea
// however is to only return valid update counts (e.g. not from
// SET, EXEC or such).
assertTrue(cstmt.execute());
rs = cstmt.getResultSet();
assertTrue(rs.next());
assertTrue(rs.getString(1).equals("a"));
assertTrue(!rs.next());
assertTrue(cstmt.getMoreResults());
rs = cstmt.getResultSet();
assertTrue(rs.next());
assertTrue(rs.getString(1).equals("b"));
assertTrue(!rs.next());
assertTrue(!cstmt.getMoreResults() && cstmt.getUpdateCount() == 1); // insert
assertTrue(!cstmt.getMoreResults() && cstmt.getUpdateCount() == 1); // insert
assertTrue(!cstmt.getMoreResults() && cstmt.getUpdateCount() == 1); // insert
assertTrue(cstmt.getMoreResults()); // select
rs = cstmt.getResultSet();
assertTrue(rs.next());
assertTrue(!rs.next());
assertTrue(cstmt.getMoreResults());
rs = cstmt.getResultSet();
assertTrue(rs.next());
assertTrue(!rs.next());
assertTrue(!cstmt.getMoreResults() && cstmt.getUpdateCount() == -1);
cstmt.close();
}
public void testBatch1() throws Exception {
Statement stmt = con.createStatement();
String sqlwithcount1 =
" set nocount off " +
" select 'a' " +
" select 'b' " +
" create table #multi2withcountt (A VARCHAR(20)) " +
" insert into #multi2withcountt VALUES ('a') " +
" insert into #multi2withcountt VALUES ('a') " +
" insert into #multi2withcountt VALUES ('a') " +
" select 'a' " +
" select 'b' " +
" drop table #multi2withcountt";
String sqlnocount1 =
" set nocount on " +
" select 'a' " +
" select 'b' " +
" create table #multi2nocountt (A VARCHAR(20)) " +
" insert into #multi2nocountt VALUES ('a') " +
" insert into #multi2nocountt VALUES ('a') " +
" insert into #multi2nocountt VALUES ('a') " +
" select 'a' " +
" select 'b' " +
" drop table #multi2nocountt";
assertTrue(stmt.execute(sqlwithcount1)); // set
ResultSet rs = stmt.getResultSet();
assertTrue(rs.next());
assertTrue(rs.getString(1).equals("a"));
assertTrue(!rs.next());
assertTrue(stmt.getMoreResults());
rs = stmt.getResultSet();
assertTrue(rs.next());
assertTrue(rs.getString(1).equals("b"));
assertTrue(!rs.next());
assertTrue(!stmt.getMoreResults() && stmt.getUpdateCount() == 1);
assertTrue(!stmt.getMoreResults() && stmt.getUpdateCount() == 1);
assertTrue(!stmt.getMoreResults() && stmt.getUpdateCount() == 1);
assertTrue(stmt.getMoreResults());
rs = stmt.getResultSet();
assertTrue(rs.next());
assertTrue(!rs.next());
assertTrue(stmt.getMoreResults());
rs = stmt.getResultSet();
assertTrue(rs.next());
assertTrue(!rs.next());
assertTrue(!stmt.getMoreResults() && stmt.getUpdateCount() == -1);
assertTrue(stmt.execute(sqlnocount1)); // set
rs = stmt.getResultSet();
assertTrue(rs.next());
assertTrue(rs.getString(1).equals("a"));
assertTrue(!rs.next());
assertTrue(stmt.getMoreResults());
rs = stmt.getResultSet();
assertTrue(rs.next());
assertTrue(rs.getString(1).equals("b"));
assertTrue(!rs.next());
assertTrue(stmt.getMoreResults()); // select
rs = stmt.getResultSet();
assertTrue(rs.next());
assertTrue(!rs.next());
assertTrue(stmt.getMoreResults());
rs = stmt.getResultSet();
assertTrue(rs.next());
assertTrue(!rs.next());
assertTrue(!stmt.getMoreResults() && stmt.getUpdateCount() == -1);
stmt.close();
}
public void testBug457955() throws Exception {
Statement stmt = con.createStatement();
dropProcedure("#Bug457955");
stmt.executeUpdate(" create procedure #Bug457955 (@par1 VARCHAR(10)) as select @par1");
stmt.close();
String param = "123456789";
CallableStatement cstmt = con.prepareCall("exec #Bug457955 ?");
cstmt.setString(1, param);
cstmt.executeQuery();
cstmt.close();
}
public void testBugAttTest2() throws Exception {
String tabdef =
"CREATE TABLE #ICEributeTest_AttributeTest2( " +
" ICEobjectId NUMERIC(19) " +
" /*CONSTRAINT ICEributeTest_AttributeTest2_PKICEobjectId PRIMARY KEY */ " +
" , " +
" ICEtestShort INTEGER " +
" NULL, " +
" ICEtestFloat NUMERIC(28,10) " +
" NULL, " +
" ICEtestDecimal NUMERIC(28,10) " +
" NULL, " +
" ICEtestCharacter INTEGER " +
" NULL, " +
" ICEtestInteger INTEGER " +
" NULL, " +
" ICEtestString VARCHAR(20) " +
" NULL, " +
" ICEtestBoolean BIT " +
" NULL, " +
" ICEtestByte INTEGER " +
" NULL, " +
" ICEtestDouble NUMERIC(28,10) " +
" NULL, " +
" ICEtestLong NUMERIC(19) " +
" NULL, " +
" ICEtestCombined1 VARBINARY(8000) " +
" NULL, " +
" ICEtestDate DATETIME " +
" NULL, " +
" testCombined_testFloat NUMERIC(28,10) " +
" NULL, " +
" testCombined_testShort INTEGER " +
" NULL, " +
" testCombined_testDecimal NUMERIC(28,10) " +
" NULL, " +
" testCombined_testCharacter INTEGER " +
" NULL, " +
" testCombined_testInteger INTEGER " +
" NULL, " +
" testCombined_testString VARCHAR(50) " +
" NULL, " +
" testCombined_testBoolean BIT " +
" NULL, " +
" testCombined_testByte INTEGER " +
" NULL, " +
" testCombined_testDouble NUMERIC(28,10) " +
" NULL, " +
" testCombined_testLong NUMERIC(19) " +
" NULL, " +
" testCombined_testDate DATETIME " +
" NULL, " +
" ICEtestContainedArrays VARBINARY(8000) " +
" NULL, " +
" BSF_FILTER_ATTRIBUTE_NAME INTEGER " +
" NOT NULL, " +
" updateCount INTEGER " +
" NOT NULL " +
" ) ";
Statement stmt = con.createStatement();
dropTable("#ICEributeTest_AttributeTest2");
stmt.executeUpdate(tabdef);
stmt.close();
PreparedStatement istmt = con.prepareStatement(
"INSERT INTO #ICEributeTest_AttributeTest2 ("
+ "ICEobjectId,BSF_FILTER_ATTRIBUTE_NAME,ICEtestShort,ICEtestFloat,ICEtestDecimal,"
+ "ICEtestCharacter,ICEtestInteger,ICEtestString,ICEtestBoolean,ICEtestByte,"
+ "ICEtestDouble,ICEtestLong,ICEtestCombined1,ICEtestDate,testCombined_testFloat,"
+ "testCombined_testShort,testCombined_testDecimal,testCombined_testCharacter,testCombined_testInteger,testCombined_testString,"
+ "testCombined_testBoolean,testCombined_testByte,testCombined_testDouble,testCombined_testLong"
+ ",testCombined_testDate,ICEtestContainedArrays,updateCount ) "
+ "VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)");
istmt.setLong(1, 650002);
istmt.setInt(2, -1461101755);
istmt.setNull(3, java.sql.Types.INTEGER);
istmt.setNull(4, java.sql.Types.REAL);
try {
istmt.setNull(5, java.sql.Types.NUMERIC);
} catch (java.sql.SQLException e) {
istmt.setNull(5, java.sql.Types.DECIMAL);
}
istmt.setNull(6, java.sql.Types.INTEGER);
istmt.setNull(7, java.sql.Types.INTEGER);
istmt.setNull(8, java.sql.Types.VARCHAR);
istmt.setNull(9, java.sql.Types.BIT);
istmt.setNull(10, java.sql.Types.INTEGER);
istmt.setNull(11, java.sql.Types.DOUBLE);
istmt.setNull(12, java.sql.Types.BIGINT);
istmt.setNull(13, java.sql.Types.LONGVARBINARY);
istmt.setNull(14, java.sql.Types.TIMESTAMP);
istmt.setNull(15, java.sql.Types.REAL);
istmt.setNull(16, java.sql.Types.INTEGER);
try {
istmt.setNull(17, java.sql.Types.NUMERIC);
} catch (java.sql.SQLException e) {
istmt.setNull(17, java.sql.Types.DECIMAL);
}
istmt.setNull(18, java.sql.Types.INTEGER);
istmt.setNull(19, java.sql.Types.INTEGER);
istmt.setNull(20, java.sql.Types.VARCHAR);
istmt.setNull(21, java.sql.Types.BIT);
istmt.setNull(22, java.sql.Types.INTEGER);
istmt.setNull(23, java.sql.Types.DOUBLE);
istmt.setNull(24, java.sql.Types.BIGINT);
istmt.setNull(25, java.sql.Types.TIMESTAMP);
istmt.setNull(26, java.sql.Types.LONGVARBINARY);
istmt.setInt(27, 1);
assertEquals(1, istmt.executeUpdate());
istmt.close();
}
public void testBigInt() throws Throwable {
// String crtab = "create table #testBigInt (a bigint)";
String crtab = "create table #testBigInt (a NUMERIC(19) NULL)";
dropTable("#testBigInt");
Statement stmt = con.createStatement();
stmt.executeUpdate(crtab);
stmt.close();
PreparedStatement pstmt = con.prepareStatement("insert into #testBigInt values (?)");
pstmt.setNull(1, java.sql.Types.BIGINT);
assertTrue(!pstmt.execute());
assertTrue(pstmt.getUpdateCount() == 1);
pstmt.setLong(1, 99999999999L);
assertTrue(!pstmt.execute());
assertTrue(pstmt.getUpdateCount() == 1);
pstmt.setLong(1, -99999999999L);
assertTrue(!pstmt.execute());
assertTrue(pstmt.getUpdateCount() == 1);
pstmt.setLong(1, 9999999999999L);
assertTrue(!pstmt.execute());
assertTrue(pstmt.getUpdateCount() == 1);
pstmt.setLong(1, -9999999999999L);
assertTrue(!pstmt.execute());
assertTrue(pstmt.getUpdateCount() == 1);
pstmt.setLong(1, 99999999999L);
assertTrue(!pstmt.execute());
assertTrue(pstmt.getUpdateCount() == 1);
pstmt.close();
}
public void testBoolean() throws Throwable {
// String crtab = "create table #testBigInt (a bigint)";
String crtab = "create table #testBit (a BIT NULL)";
dropTable("#testBit");
Statement stmt = con.createStatement();
stmt.executeUpdate(crtab);
stmt.executeUpdate("insert into #testBit values (NULL)");
stmt.executeUpdate("insert into #testBit values (0)");
stmt.executeUpdate("insert into #testBit values (1)");
ResultSet rs = stmt.executeQuery("select * from #testBit where a is NULL");
rs.next();
rs.getBoolean(1);
rs = stmt.executeQuery("select * from #testBit where a = 0");
rs.next();
rs.getBoolean(1);
rs = stmt.executeQuery("select * from #testBit where a = 1");
rs.next();
rs.getBoolean(1);
stmt.close();
PreparedStatement pstmt = con.prepareStatement("insert into #testBit values (?)");
pstmt.setBoolean(1, true);
assertTrue(!pstmt.execute());
assertTrue(pstmt.getUpdateCount() == 1);
pstmt.setBoolean(1, false);
assertTrue(!pstmt.execute());
assertTrue(pstmt.getUpdateCount() == 1);
pstmt.setNull(1, java.sql.Types.BIT);
assertTrue(!pstmt.execute());
assertTrue(pstmt.getUpdateCount() == 1);
pstmt.close();
}
public void testBinary() throws Throwable {
String crtab = "create table #testBinary (a varbinary(8000))";
dropTable("#testBinary");
byte[] ba = new byte[8000];
for (int i = 0; i < ba.length; i++) {
ba[i] = (byte) (i % 256);
}
Statement stmt = con.createStatement();
stmt.executeUpdate(crtab);
stmt.close();
PreparedStatement pstmt = con.prepareStatement("insert into #testBinary values (?)");
pstmt.setObject(1, ba);
pstmt.execute();
pstmt.close();
}
private void checkTime(long time) throws Throwable {
PreparedStatement pstmt = con.prepareStatement("insert into #testTimestamp values (?)");
java.sql.Timestamp ts = new java.sql.Timestamp(time);
pstmt.setTimestamp(1, ts);
pstmt.executeUpdate();
pstmt.close();
Statement stmt = con.createStatement();
ResultSet rs = stmt.executeQuery("select * from #testTimestamp");
rs.next();
java.sql.Timestamp tsres = rs.getTimestamp(1);
assertTrue(ts.equals(tsres));
stmt.executeUpdate("truncate table #testTimestamp");
stmt.close();
}
public void testSpecTime() throws Throwable {
String crtab = "create table #testTimestamp (a datetime)";
dropTable("#testTimestamp");
Statement stmt = con.createStatement();
stmt.executeUpdate(crtab);
stmt.close();
checkTime(92001000);
checkTime(4200000); // sent in 4 Bytes
checkTime(4201000);
checkTime(1234567000);
checkTime(420000000000L); // sent in 4 Bytes
checkTime(840000000000L);
}
public void testBigDecimal() throws Throwable {
String crtab = "create table #testBigDecimal (a decimal(28,10) NULL)";
dropTable("#testBigDecimal");
Statement stmt = con.createStatement();
stmt.executeUpdate(crtab);
stmt.close();
PreparedStatement pstmt = con.prepareStatement("insert into #testBigDecimal values (?)");
pstmt.setObject(1, new BigDecimal("10.200"));
pstmt.execute();
// FIXME With Sybase this should probably throw a DataTruncation, not just a plain SQLException
pstmt.setObject(1, new BigDecimal(10.200));
pstmt.execute();
pstmt.setObject(1, null);
pstmt.execute();
pstmt.setObject(1, new Integer(20));
pstmt.execute();
pstmt.setObject(1, new Double(2.10));
pstmt.execute();
pstmt.setObject(1, new BigDecimal(-10.200));
pstmt.execute();
pstmt.setObject(1, new Long(200));
pstmt.execute();
pstmt.setByte(1, (byte) 1);
pstmt.execute();
pstmt.setInt(1, 200);
pstmt.execute();
pstmt.setLong(1, 200L);
pstmt.execute();
pstmt.setFloat(1, (float) 1.1);
pstmt.execute();
pstmt.setDouble(1, 1.1);
pstmt.execute();
pstmt.close();
}
}
|
package wycs.transforms;
import static wycc.lang.SyntaxError.*;
import static wycs.solver.Solver.*;
import java.io.IOException;
import java.math.BigInteger;
import java.util.*;
import wyautl.core.*;
import wyautl.io.PrettyAutomataWriter;
import wyautl.rw.*;
import wyautl.util.BigRational;
import wybs.lang.Builder;
import wycc.lang.SyntacticElement;
import wycc.lang.Transform;
import wycc.util.Logger;
import wycc.util.Pair;
import wycc.util.Triple;
import wycs.builders.Wyal2WycsBuilder;
import wycs.core.Code;
import wycs.core.NormalForms;
import wycs.core.SemanticType;
import wycs.core.Types;
import wycs.core.Value;
import wycs.core.WycsFile;
import wycs.io.WycsFilePrinter;
import wycs.solver.Solver;
import wycs.solver.SolverUtil;
import wyfs.util.Trie;
/**
* Responsible for converting a <code>WycsFile</code> into an automaton that can
* then be simplified to test for satisfiability. The key challenge here is to
* break down the rich language of expressions described by the
* <code>WycsFile</code> format, such that they can be handled effectively by
* the <code>Solver</code>.
*
* @author David J. Pearce
*
*/
public class VerificationCheck implements Transform<WycsFile> {
private enum RewriteMode { SIMPLE, STATICDISPATCH, GLOBALDISPATCH, RANDOM };
/**
* Determines whether this transform is enabled or not.
*/
private boolean enabled = getEnable();
/**
* Determines whether debugging is enabled or not
*/
private boolean debug = getDebug();
/**
* Determine what rewriter to use.
*/
private RewriteMode rwMode = RewriteMode.STATICDISPATCH;
/**
* Determine the maximum number of rewrite steps.
*/
private int maxSteps = getMaxsteps();
/**
* The rewrite engine used to actually check assertions are true or false.
*/
private Rewriter rewriter;
private final Wyal2WycsBuilder builder;
private String filename;
// Constructor(s)
public VerificationCheck(Builder builder) {
this.builder = (Wyal2WycsBuilder) builder;
}
// Configuration Methods
public static String describeEnable() {
return "Enable/disable verification";
}
public static boolean getEnable() {
return true; // default value
}
public void setEnable(boolean flag) {
this.enabled = flag;
}
public static String describeDebug() {
return "Enable/disable debugging information";
}
public static boolean getDebug() {
return false; // default value
}
public void setDebug(boolean flag) {
this.debug = flag;
}
public static String describeRwMode() {
return "Set the rewrite mode to use (simple or static-dispatch)";
}
public static String getRwmode() {
return "staticdispatch"; // default value
}
public void setRwmode(String mode) {
for(RewriteMode rw : RewriteMode.values()) {
if(mode.equals(rw.name().toLowerCase())) {
this.rwMode = rw;
return;
}
}
throw new RuntimeException("unknown rewrite mode: " + mode);
}
public static String describeMaxSteps() {
return "Limits the number of rewrite steps permitted";
}
public static int getMaxsteps() {
return 100000; // default value
}
public void setMaxsteps(int limit) {
this.maxSteps = limit;
}
// Apply Method
/**
* Verify the given list of Wycs statements.
*
* @param statements
* @return the set of failing assertions (if any).
*/
public void apply(WycsFile wf) {
if (enabled) {
this.filename = wf.filename();
// First, construct a fresh rewriter for this file.
switch(rwMode) {
case STATICDISPATCH:
this.rewriter = new StaticDispatchRewriter(Solver.inferences,Solver.reductions,Solver.SCHEMA, maxSteps);
break;
case GLOBALDISPATCH:
// NOTE: I don't supply a max steps value here because the
// default value would be way too small for the simple rewriter.
this.rewriter = new GlobalDispatchRewriter(Solver.inferences,Solver.reductions,Solver.SCHEMA);
break;
case RANDOM:
// NOTE: I don't supply a max steps value here because the
// default value would be way too small for the simple rewriter.
this.rewriter = new RandomRewriter(Solver.inferences,Solver.reductions,Solver.SCHEMA);
break;
default:
// NOTE: I don't supply a max steps value here because the
// default value would be way too small for the simple rewriter.
this.rewriter = new SimpleRewriter(Solver.inferences,Solver.reductions,Solver.SCHEMA);
break;
}
// Second, traverse each statement and verify any assertions we
// encounter.
List<WycsFile.Declaration> statements = wf.declarations();
int count = 0;
for (int i = 0; i != statements.size(); ++i) {
WycsFile.Declaration stmt = statements.get(i);
if (stmt instanceof WycsFile.Assert) {
checkValid((WycsFile.Assert) stmt, ++count);
} else if (stmt instanceof WycsFile.Function
|| stmt instanceof WycsFile.Macro) {
// TODO: we could try to verify that the function makes
// sense (i.e. that it's specification is satisfiable for at
// least one input).
} else {
internalFailure("unknown statement encountered " + stmt,
filename, stmt);
}
}
}
}
private void checkValid(WycsFile.Assert stmt, int number) {
Runtime runtime = Runtime.getRuntime();
long startTime = System.currentTimeMillis();
long startMemory = runtime.freeMemory();
Automaton automaton = new Automaton();
Automaton original = null;
Code neg = Code.Unary(SemanticType.Bool,
Code.Op.NOT, stmt.condition);
// The following conversion is potentially very expensive, but is
// currently necessary for the instantiate axioms phase.
Code nnf = NormalForms.negationNormalForm(neg);
Code vc = instantiateAxioms(nnf);
int assertion = translate(vc,automaton,new HashMap<String,Integer>());
automaton.setRoot(0, assertion);
automaton.minimise();
automaton.compact();
if (debug) {
ArrayList<WycsFile.Declaration> tmpDecls = new ArrayList();
tmpDecls.add(new WycsFile.Assert("", neg));
WycsFile tmp = new WycsFile(Trie.ROOT,filename, tmpDecls);
try {
new WycsFilePrinter(System.err).write(tmp);
} catch(IOException e) {}
original = new Automaton(automaton);
//debug(original);
}
rewriter.resetStats();
rewriter.apply(automaton);
if(!automaton.get(automaton.getRoot(0)).equals(Solver.False)) {
String msg = stmt.message;
msg = msg == null ? "assertion failure" : msg;
throw new AssertionFailure(msg,stmt,rewriter,automaton,original);
}
long endTime = System.currentTimeMillis();
builder.logTimedMessage("[" + filename + "] Verified assertion #" + number,
endTime - startTime, startMemory - runtime.freeMemory());
}
private int translate(Code expr, Automaton automaton, HashMap<String,Integer> environment) {
int r;
if(expr instanceof Code.Constant) {
r = translate((Code.Constant) expr,automaton,environment);
} else if(expr instanceof Code.Variable) {
r = translate((Code.Variable) expr,automaton,environment);
} else if(expr instanceof Code.Binary) {
r = translate((Code.Binary) expr,automaton,environment);
} else if(expr instanceof Code.Unary) {
r = translate((Code.Unary) expr,automaton,environment);
} else if(expr instanceof Code.Nary) {
r = translate((Code.Nary) expr,automaton,environment);
} else if(expr instanceof Code.Load) {
r = translate((Code.Load) expr,automaton,environment);
} else if(expr instanceof Code.Quantifier) {
r = translate((Code.Quantifier) expr,automaton,environment);
} else if(expr instanceof Code.FunCall) {
r = translate((Code.FunCall) expr,automaton,environment);
} else {
internalFailure("unknown: " + expr.getClass().getName(),
filename, expr);
return -1; // dead code
}
//debug(automaton,r);
return r;
}
private int translate(Code.Constant expr, Automaton automaton, HashMap<String,Integer> environment) {
return convert(expr.value,expr,automaton);
}
private int translate(Code.Variable code, Automaton automaton, HashMap<String,Integer> environment) {
if(code.operands.length > 0) {
throw new RuntimeException("need to add support for variables with sub-components");
}
// TODO: just use an integer for variables directly
String name = "r" + code.index;
Integer idx = environment.get(name);
// FIXME: need to handle code.operands as well!
if(idx == null) {
// FIXME: this is a hack to work around modified operands after a
// loop.
return Var(automaton,name);
} else {
return idx;
}
}
private int translate(Code.Binary code, Automaton automaton, HashMap<String,Integer> environment) {
int lhs = translate(code.operands[0],automaton,environment);
int rhs = translate(code.operands[1],automaton,environment);
int type = convert(automaton,code.type);
switch(code.opcode) {
case ADD:
return SolverUtil.Add(automaton,lhs,rhs);
case SUB:
return SolverUtil.Sub(automaton,lhs,rhs);
case MUL:
return SolverUtil.Mul(automaton, lhs, rhs);
case DIV:
return SolverUtil.Div(automaton, lhs, rhs);
case REM:
return automaton.add(False);
case EQ:
return SolverUtil.Equals(automaton, type, lhs, rhs);
case NEQ:
return Not(automaton, SolverUtil.Equals(automaton, type, lhs, rhs));
case LT:
return SolverUtil.LessThan(automaton, type, lhs, rhs);
case LTEQ:
return SolverUtil.LessThanEq(automaton, type, lhs, rhs);
case IN:
return SubsetEq(automaton, type, Set(automaton, lhs), rhs);
case SUBSET:
return And(automaton,
SubsetEq(automaton, type, lhs, rhs),
Not(automaton, SolverUtil.Equals(automaton, type, lhs, rhs)));
case SUBSETEQ:
return SubsetEq(automaton, type, lhs, rhs);
}
internalFailure("unknown binary bytecode encountered (" + code + ")",
filename, code);
return -1;
}
private int translate(Code.Unary code, Automaton automaton, HashMap<String,Integer> environment) {
int e = translate(code.operands[0],automaton,environment);
switch(code.opcode) {
case NOT:
return Not(automaton, e);
case NEG:
return SolverUtil.Neg(automaton, e);
case LENGTH:
return LengthOf(automaton, e);
}
internalFailure("unknown unary bytecode encountered (" + code + ")",
filename, code);
return -1;
}
private int translate(Code.Nary code, Automaton automaton, HashMap<String,Integer> environment) {
Code[] operands = code.operands;
int[] es = new int[operands.length];
for(int i=0;i!=es.length;++i) {
es[i] = translate(operands[i],automaton,environment);
}
switch(code.opcode) {
case AND:
return And(automaton,es);
case OR:
return Or(automaton,es);
case SET:
return Set(automaton,es);
case TUPLE:
return Tuple(automaton,es);
}
internalFailure("unknown nary expression encountered (" + code + ")",
filename, code);
return -1;
}
private int translate(Code.Load code, Automaton automaton, HashMap<String,Integer> environment) {
int e = translate(code.operands[0],automaton,environment);
int i = automaton.add(new Automaton.Int(code.index));
return Solver.Load(automaton,e,i);
}
private int translate(Code.FunCall code, Automaton automaton,
HashMap<String, Integer> environment) {
// uninterpreted function call
int argument = translate(code.operands[0], automaton, environment);
int[] es = new int[] {
automaton.add(new Automaton.Strung(code.nid.toString())),
argument };
return Fn(automaton, es);
}
private int translate(Code.Quantifier code, Automaton automaton, HashMap<String,Integer> environment) {
HashMap<String,Integer> nEnvironment = new HashMap<String,Integer>(environment);
Pair<SemanticType,Integer>[] variables = code.types;
int[] vars = new int[variables.length];
for (int i = 0; i != variables.length; ++i) {
Pair<SemanticType,Integer> p = variables[i];
SemanticType type = p.first();
String var = "r" + p.second();
int varIdx = Var(automaton, var);
nEnvironment.put(var, varIdx);
int srcIdx;
// FIXME: generate actual type of variable here
srcIdx = automaton.add(AnyT);
vars[i] = automaton.add(new Automaton.List(varIdx, srcIdx));
}
int avars = automaton.add(new Automaton.Set(vars));
if(code.opcode == Code.Op.FORALL) {
return ForAll(automaton, avars, translate(code.operands[0], automaton, nEnvironment));
} else {
return Exists(automaton, avars, translate(code.operands[0], automaton, nEnvironment));
}
}
/**
* Convert between a WYIL value and a WYRL value. Basically, this is really
* stupid and it would be good for them to be the same.
*
* @param value
* @return
*/
private int convert(Value value, SyntacticElement element, Automaton automaton) {
if (value instanceof Value.Bool) {
Value.Bool b = (Value.Bool) value;
return b.value ? automaton.add(True) : automaton.add(False);
} else if (value instanceof Value.Integer) {
Value.Integer v = (Value.Integer) value;
return Num(automaton , BigRational.valueOf(v.value));
} else if (value instanceof Value.Decimal) {
Value.Decimal v = (Value.Decimal) value;
return Num(automaton, new BigRational(v.value));
} else if (value instanceof Value.String) {
Value.String v = (Value.String) value;
return Solver.String(automaton,v.value);
} else if (value instanceof Value.Set) {
Value.Set vs = (Value.Set) value;
int[] vals = new int[vs.values.size()];
int i = 0;
for (Value c : vs.values) {
vals[i++] = convert(c,element,automaton);
}
return Set(automaton , vals);
} else if (value instanceof Value.Tuple) {
Value.Tuple vt = (Value.Tuple) value;
int[] vals = new int[vt.values.size()];
for (int i = 0; i != vals.length; ++i) {
vals[i] = convert(vt.values.get(i),element,automaton);
}
return Tuple(automaton , vals);
} else {
internalFailure("unknown value encountered (" + value + ", " + value.getClass().getName() + ")",
filename,element);
return -1;
}
}
public static int convert(Automaton automaton, SemanticType type) {
Automaton type_automaton = type.automaton();
// The following is important to make sure that the type is in minimised
// form before verification begins. This firstly reduces the amount of
// work during verification, and also allows the functions in
// SolverUtils to work properly.
StaticDispatchRewriter rewriter = new StaticDispatchRewriter(
Types.inferences, Types.reductions, Types.SCHEMA);
rewriter.apply(type_automaton);
return automaton.addAll(type_automaton.getRoot(0), type_automaton);
}
public static void debug(Automaton automaton) {
try {
// System.out.println(automaton);
PrettyAutomataWriter writer = new PrettyAutomataWriter(System.out,
SCHEMA, "Or", "And");
writer.write(automaton);
writer.flush();
} catch(IOException e) {
System.out.println("I/O Exception - " + e);
}
}
public static class AssertionFailure extends RuntimeException {
private final WycsFile.Assert assertion;
private final Rewriter rewriter;
private final Automaton reduced;
private final Automaton original;
public AssertionFailure(String msg, WycsFile.Assert assertion,
Rewriter rewriter, Automaton reduced, Automaton original) {
super(msg);
this.assertion = assertion;
this.rewriter = rewriter;
this.reduced = reduced;
this.original = original;
}
public WycsFile.Assert assertion() {
return assertion;
}
public Rewriter rewriter() {
return rewriter;
}
public Automaton reduction() {
return reduced;
}
public Automaton original() {
return original;
}
}
// Axiom Instantiation
/**
* Blindly instantiate all axioms. Note, this function is assuming the
* verification condition has already been negated for
* proof-by-contradiction and converted into Negation Normal Form.
*
* @param condition
* Condition over which all axioms should be instantiated.
* @return
*/
public Code instantiateAxioms(Code condition) {
if (condition instanceof Code.Variable || condition instanceof Code.Constant) {
// do nothing
return condition;
} else if (condition instanceof Code.Unary) {
return instantiateAxioms((Code.Unary)condition);
} else if (condition instanceof Code.Binary) {
return instantiateAxioms((Code.Binary)condition);
} else if (condition instanceof Code.Nary) {
return instantiateAxioms((Code.Nary)condition);
} else if (condition instanceof Code.Quantifier) {
return instantiateAxioms((Code.Quantifier)condition);
} else if (condition instanceof Code.FunCall) {
return instantiateAxioms((Code.FunCall)condition);
} else if (condition instanceof Code.Load) {
return instantiateAxioms((Code.Load)condition);
} else {
internalFailure("invalid boolean expression encountered (" + condition
+ ")", filename, condition);
return null;
}
}
private Code instantiateAxioms(Code.Unary condition) {
switch(condition.opcode) {
case NOT:
return Code.Unary(condition.type, condition.opcode,
instantiateAxioms(condition.operands[0]), condition.attributes());
default:
internalFailure("invalid boolean expression encountered (" + condition
+ ")", filename, condition);
return null;
}
}
private Code instantiateAxioms(Code.Binary condition) {
switch (condition.opcode) {
case EQ:
case NEQ:
case LT:
case LTEQ:
case IN:
case SUBSET:
case SUBSETEQ: {
ArrayList<Code> axioms = new ArrayList<Code>();
instantiateFromExpression(condition, axioms);
return and(axioms,condition);
}
default:
internalFailure("invalid boolean expression encountered (" + condition
+ ")", filename, condition);
return null;
}
}
private Code instantiateAxioms(Code.Nary condition) {
switch(condition.opcode) {
case AND:
case OR: {
Code[] e_operands = new Code[condition.operands.length];
for(int i=0;i!=e_operands.length;++i) {
e_operands[i] = instantiateAxioms(condition.operands[i]);
}
return Code.Nary(condition.type, condition.opcode, e_operands, condition.attributes());
}
default:
internalFailure("invalid boolean expression encountered (" + condition
+ ")", filename, condition);
return null;
}
}
private Code instantiateAxioms(Code.Quantifier condition) {
return Code.Quantifier(condition.type, condition.opcode,
instantiateAxioms(condition.operands[0]), condition.types, condition.attributes());
}
private Code instantiateAxioms(Code.FunCall condition) {
ArrayList<Code> axioms = new ArrayList<Code>();
try {
WycsFile module = builder.getModule(condition.nid.module());
// module should not be null if TypePropagation has already passed.
Object d = module.declaration(condition.nid.name());
if(d instanceof WycsFile.Function) {
WycsFile.Function fn = (WycsFile.Function) d;
if(fn.constraint != null) {
// There are some axioms we can instantiate. First, we need to
// construct the generic binding for this function.
HashMap<String,SemanticType> generics = buildGenericBinding(fn.type.generics(),condition.type.generics());
HashMap<Integer,Code> binding = new HashMap<Integer,Code>();
binding.put(1, condition.operands[0]);
binding.put(0, condition);
axioms.add(fn.constraint.substitute(binding).instantiate(generics));
}
} else if(d instanceof WycsFile.Macro){
// we can ignore macros, because they are inlined separately by
// MacroExpansion.
} else {
internalFailure("cannot resolve as function or macro call",
filename, condition);
}
} catch(Exception ex) {
internalFailure(ex.getMessage(), filename, condition, ex);
}
instantiateFromExpression(condition.operands[0], axioms);
return and(axioms,condition);
}
private HashMap<String, SemanticType> buildGenericBinding(
SemanticType[] from, SemanticType[] to) {
HashMap<String, SemanticType> binding = new HashMap<String, SemanticType>();
for (int i = 0; i != to.length; ++i) {
SemanticType.Var v = (SemanticType.Var) from[i];
binding.put(v.name(), to[i]);
}
return binding;
}
private Code instantiateAxioms(Code.Load condition) {
return Code.Load(condition.type, instantiateAxioms(condition.operands[0]), condition.index,
condition.attributes());
}
private void instantiateFromExpression(Code expression, ArrayList<Code> axioms) {
if (expression instanceof Code.Variable || expression instanceof Code.Constant) {
// do nothing
} else if (expression instanceof Code.Unary) {
instantiateFromExpression((Code.Unary)expression,axioms);
} else if (expression instanceof Code.Binary) {
instantiateFromExpression((Code.Binary)expression,axioms);
} else if (expression instanceof Code.Nary) {
instantiateFromExpression((Code.Nary)expression,axioms);
} else if (expression instanceof Code.Load) {
instantiateFromExpression((Code.Load)expression,axioms);
} else if (expression instanceof Code.FunCall) {
instantiateFromExpression((Code.FunCall)expression,axioms);
} else {
internalFailure("invalid expression encountered (" + expression
+ ", " + expression.getClass().getName() + ")", filename, expression);
}
}
private void instantiateFromExpression(Code.Unary expression, ArrayList<Code> axioms) {
instantiateFromExpression(expression.operands[0],axioms);
if(expression.opcode == Code.Op.LENGTH) {
Code lez = Code.Binary(SemanticType.Int, Code.Op.LTEQ,
Code.Constant(Value.Integer(BigInteger.ZERO)), expression);
axioms.add(lez);
}
}
private void instantiateFromExpression(Code.Binary expression, ArrayList<Code> axioms) {
instantiateFromExpression(expression.operands[0],axioms);
instantiateFromExpression(expression.operands[1],axioms);
}
private void instantiateFromExpression(Code.Nary expression, ArrayList<Code> axioms) {
Code[] e_operands = expression.operands;
for(int i=0;i!=e_operands.length;++i) {
instantiateFromExpression(e_operands[i],axioms);
}
}
private void instantiateFromExpression(Code.Load expression, ArrayList<Code> axioms) {
instantiateFromExpression(expression.operands[0],axioms);
}
private void instantiateFromExpression(Code.FunCall expression, ArrayList<Code> axioms) {
instantiateFromExpression(expression.operands[0], axioms);
try {
WycsFile module = builder.getModule(expression.nid.module());
// module should not be null if TypePropagation has already passed.
WycsFile.Function fn = module.declaration(expression.nid.name(),
WycsFile.Function.class);
if (fn.constraint != null) {
// There are some axioms we can instantiate. First, we need to
// construct the generic binding for this function.
HashMap<String, SemanticType> generics = buildGenericBinding(
fn.type.generics(), expression.type.generics());
HashMap<Integer, Code> binding = new HashMap<Integer, Code>();
binding.put(1, expression.operands[0]);
binding.put(0, expression);
axioms.add(fn.constraint.substitute(binding).instantiate(
generics));
}
} catch (Exception ex) {
internalFailure(ex.getMessage(), filename, expression, ex);
}
}
private Code and(ArrayList<Code> axioms, Code c) {
if(axioms.size() == 0) {
return c;
} else {
Code[] clauses = new Code[axioms.size()+1];
clauses[0] = c;
for(int i=0;i!=axioms.size();++i) {
clauses[i+1] = axioms.get(i);
}
return Code.Nary(SemanticType.Bool,Code.Op.AND,clauses);
}
}
}
|
package nars.util.java;
import com.gs.collections.api.map.MutableMap;
import com.gs.collections.impl.bimap.mutable.HashBiMap;
import com.gs.collections.impl.map.mutable.UnifiedMap;
import javassist.util.proxy.MethodHandler;
import javassist.util.proxy.ProxyFactory;
import javassist.util.proxy.ProxyObject;
import nars.Global;
import nars.NAR;
import nars.Symbols;
import nars.nal.nal1.Inheritance;
import nars.nal.nal2.Instance;
import nars.nal.nal2.Similarity;
import nars.nal.nal4.Product;
import nars.nal.nal8.Operation;
import nars.nal.nal8.Operator;
import nars.term.Atom;
import nars.term.Term;
import nars.term.Variable;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Stream;
public class NALObjects extends DefaultTermizer implements MethodHandler, Termizer {
private final NAR nar;
final MutableMap<Class, ProxyFactory> proxyCache = new UnifiedMap().asSynchronized();
// final Map<Object, Term> instances = new com.google.common.collect.MapMaker()
// .concurrencyLevel(4).weakKeys().makeMap();
final HashBiMap<Object,Term> instances = new HashBiMap();
final Map<Method,MethodOperator> methodOps = Global.newHashMap();
/*
* Acceleration mechanism because we assume that the Method.toString() method is slower than Method.hashCode()
*
* Assumtion: hashes of Object methods are always the same for all Classes
*/
public static Set<Integer> methodExclusionsHashes = null;
public static Set<String> methodExclusions = new HashSet<String>() {{
add("hashCode");
add("notify");
add("notifyAll");
add("wait");
add("finalize");
}};
private AtomicBoolean goalInvoke = new AtomicBoolean(true);
public NALObjects(NAR n) {
this.nar = n;
}
@Override
protected void onClassInPackage(Term classs, Atom packagge) {
nar.believe(Inheritance.make(classs, packagge));
}
@Override
protected void onInstanceOfClass(Term oterm, Term clas) {
nar.believe(Instance.make(oterm, clas));
}
@Override
protected void onInstanceChange(Term oterm, Term prevOterm) {
nar.believe(Similarity.make(oterm, prevOterm));
}
AtomicBoolean lock = new AtomicBoolean(false);
/** when a proxy wrapped instance method is called, this can
* parametrically intercept arguments and return value
* and input them to the NAL in narsese.
*/
@Override
public Object invoke(Object object, Method overridden, Method forwarder,
Object[] args) throws Throwable {
initializeExcludedMethodHashesIfNecessary();
Object result = forwarder.invoke(object, args);
if (methodExclusionsHashes.contains(overridden.hashCode()) && methodExclusions.contains(overridden.getName()))
return result;
if (!lock.compareAndSet(false,true)) {
return result;
}
final Term instance = term(object);
final Term[] argterm = Stream.of(args).map(x -> term(x)).toArray(n -> new Term[n]);
Term effect;
//String opName =
final Operator op = Operator.the(
overridden.getDeclaringClass().getSimpleName() + "_" + overridden.getName()
);
Term[] instancePlusArgs = new Term[argterm.length+2];
instancePlusArgs[0] = instance;
System.arraycopy(argterm, 0, instancePlusArgs, 1, argterm.length);
instancePlusArgs[instancePlusArgs.length-1] = Variable.the(Symbols.VAR_DEPENDENT + "1");
nar.input(nar.memory.newTask(
Operation.make(Product.make(instancePlusArgs), op )
).goal().present().truth(1f, 0.9f).get());
if (result!=null) {
effect = term(result);
}
else {
effect = VOID;
}
//TODO use task of callee as Parent task, if self-invoked
nar.input(nar.memory.newTask(
Operation.result(op, Product.make(instancePlusArgs), effect )
).belief().present().truth(1f, 0.9f).get());
lock.set(false);
return result;
}
// //TODO use a generic Consumer<Task> for recipient/recipients of these
// public final NAR nar;
// public NALProxyMethodHandler(NAR n /* options */) {
// private final List<NALObjMethodHandler> methodHandlers = Global.newArrayList();
// public NALObject() {
// public NALObject add(NALObjMethodHandler n) {
// methodHandlers.add(n);
// return this;
/** the id will be the atom term label for the created instance */
public <T> T build(String id, Class<T> classs) throws Exception {
initializeExcludedMethodHashesIfNecessary();
ProxyFactory factory = proxyCache.getIfAbsentPut(classs, () -> new ProxyFactory());
factory.setSuperclass(classs);
Class clazz = factory.createClass();
Object instance = clazz.newInstance();
((ProxyObject) instance).setHandler(this);
instances.put(instance, Atom.the(id));
objects.put(instance, Atom.the(id));
//add operators for public methods
for (Method m : classs.getMethods()) {
if (!(methodExclusionsHashes.contains(m.hashCode()) && methodExclusions.contains(m.toString())) && Modifier.isPublic(m.getModifiers())) {
MethodOperator op = methodOps.computeIfAbsent(m, _m -> {
MethodOperator mo = new MethodOperator(goalInvoke, this, m);
nar.on(mo);
return mo;
});
}
}
return (T) instance;
}
public void setGoalInvoke(boolean b) {
this.goalInvoke.set(b);
}
// @Override
// public Term term(Object o) {
// Term i = instances.get(o);
// if (i!=null)
// return i;
// return super.term(o);
private static void initializeExcludedMethodHashesIfNecessary() {
if( methodExclusionsHashes != null ) {
return;
}
methodExclusionsHashes = new HashSet<>();
for( final Method iterationMethod : Object.class.getMethods() ) {
if( methodExclusions.contains(iterationMethod.toString()) ) {
methodExclusionsHashes.add(iterationMethod.hashCode());
}
}
}
}
|
package edu.msu.nscl.olog;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import java.io.UnsupportedEncodingException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.ws.rs.core.MultivaluedMap;
import javax.jcr.RepositoryException;
import javax.ws.rs.core.Response;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.ibatis.exceptions.PersistenceException;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
/**
* JDBC query to retrieve logs from the directory .
*
* @author Eric Berryman taken from Ralph Lange <Ralph.Lange@bessy.de>
*/
public class FindLogsQuery {
private enum SearchType {
LOG, TAG
};
private Multimap<String, String> value_matches = ArrayListMultimap.create();
private Multimap<String, String> logPaginate_matches = ArrayListMultimap.create();
private Multimap<String, String> date_matches = ArrayListMultimap.create();
private List<String> log_matches = new ArrayList();
private List<Long> logId_matches = new ArrayList();
private List<String> logbook_matches = new ArrayList();
private List<String> tag_matches = new ArrayList();
private List<String> tag_patterns = new ArrayList();
private List<Long> jcr_search_ids = new ArrayList();
private static SqlSessionFactory ssf = MyBatisSession.getSessionFactory();
/**
* Creates a new instance of FindLogsQuery, sorting the query parameters.
* Logbook matches and tag string matches go to the first inner query,
* tag pattern matches are queried separately,
* name matches go to the outer query.
* Logbook and tag names are converted to lowercase before being matched.
*
* @param matches the map of matches to apply
*/
private FindLogsQuery(MultivaluedMap<String, String> matches) throws RepositoryException {
for (Map.Entry<String, List<String>> match : matches.entrySet()) {
String key = match.getKey().toLowerCase();
if (key.equals("search")) {
log_matches.addAll(match.getValue());
JcrSearch js = new JcrSearch();
jcr_search_ids = js.searchForIds(match.getValue().get(0));
} else if (key.equals("tag")) {
addTagMatches(match.getValue());
} else if (key.equals("logbook")) {
addLogbookMatches(match.getValue());
} else if (key.equals("page")) {
logPaginate_matches.putAll(key, match.getValue());
} else if (key.equals("limit")) {
logPaginate_matches.putAll(key, match.getValue());
} else if (key.equals("start")) {
date_matches.putAll(key, match.getValue());
} else if (key.equals("end")) {
date_matches.putAll(key, match.getValue());
} else {
value_matches.putAll(key, match.getValue());
}
}
}
private FindLogsQuery(SearchType type, Collection<String> matches) {
if (type == SearchType.LOG) {
log_matches.addAll(matches);
} else {
addTagMatches(matches);
}
}
private FindLogsQuery(SearchType type, String name) {
if (type == SearchType.LOG) {
log_matches.add(name);
} else {
addTagMatches(Collections.singleton(name));
}
}
private FindLogsQuery(SearchType type, Long logId) {
if (type == SearchType.LOG) {
logId_matches.add(logId);
}
}
private void addLogbookMatches(Collection<String> matches) {
for (String m : matches) {
logbook_matches.add(m);
}
if (logbook_matches.size() == 1) {
String match = logbook_matches.get(0);
logbook_matches.clear();
logbook_matches.addAll(Arrays.asList(match.split(",")));
}
}
private void addTagMatches(Collection<String> matches) {
for (String m : matches) {
if (m.contains("?") || m.contains("*")) {
tag_patterns.add(m);
} else {
tag_matches.add(m);
}
}
}
/**
* Creates and executes the logbook and tag string match subquery using GROUP.
*
* @param con connection to use
* @return a set of log ids that match
*/
//TODO: need to add search params like olog; logs between dates, search all fields, files, etc.
private Set<Long> getIdsFromLogbookAndTagMatch() throws CFException {
SqlSession ss = ssf.openSession();
try {
Set<Long> ids = new HashSet<Long>(); // set of matching log ids
List<String> params = new ArrayList<String>(); // parameter list for this query
for (String tag : tag_matches) {
params.add(tag);
}
int size = tag_matches.size();
HashMap<String, Object> hm = new HashMap<String, Object>();
hm.put("list", params);
hm.put("size", size);
ArrayList<XmlLog> logs = (ArrayList<XmlLog>) ss.selectList("mappings.LogMapping.getIdsFromLogbookAndTagMatch", hm);
if (logs != null) {
Iterator<XmlLog> iterator = logs.iterator();
while (iterator.hasNext()) {
XmlLog log = iterator.next();
ids.add(log.getId());
}
}
return ids;
} catch (PersistenceException e) {
throw new CFException(Response.Status.INTERNAL_SERVER_ERROR,
"MyBatis exception: " + e);
} finally {
ss.close();
}
}
/**
* Creates and executes the properties string match subquery using GROUP.
*
* @param con connection to use
* @return a set of log ids that match
*/
private Set<Long> getIdsFromPropertiesMatch() throws CFException {
SqlSession ss = ssf.openSession();
try {
Set<Long> ids = new HashSet<Long>(); // set of matching log ids
List<String> values = new ArrayList<String>();
List<String> names = new ArrayList<String>();
for (Map.Entry<String, Collection<String>> match : value_matches.asMap().entrySet()) {
names.add(match.getKey().toLowerCase());
for (String value : match.getValue()) {
values.add(convertFileGlobToSQLPattern(value));
}
}
int size = value_matches.asMap().size();
HashMap<String, Object> hm = new HashMap<String, Object>();
hm.put("propNameList", names);
hm.put("propValueList", values);
hm.put("size", size);
ArrayList<XmlLog> logs = (ArrayList<XmlLog>) ss.selectList("mappings.LogMapping.getIdsFromPropertiesMatch", hm);
if (logs != null) {
Iterator<XmlLog> iterator = logs.iterator();
while (iterator.hasNext()) {
XmlLog log = iterator.next();
ids.add(log.getId());
}
}
return ids;
} catch (PersistenceException e) {
throw new CFException(Response.Status.INTERNAL_SERVER_ERROR,
"MyBatis exception: " + e);
} finally {
ss.close();
}
}
/**
* Creates and executes the tag string match subquery using GROUP.
*
* @param con connection to use
* @return a set of log ids that match
*/
private Set<Long> getIdsFromTagMatch(String match) throws CFException {
SqlSession ss = ssf.openSession();
try {
Set<Long> ids = new HashSet<Long>();
ArrayList<XmlLog> logs = (ArrayList<XmlLog>) ss.selectList("mappings.LogMapping.getIdsFromTagMatch", match);
if (logs != null) {
Iterator<XmlLog> iterator = logs.iterator();
while (iterator.hasNext()) {
XmlLog log = iterator.next();
ids.add(log.getId());
}
}
return ids;
} catch (PersistenceException e) {
throw new CFException(Response.Status.INTERNAL_SERVER_ERROR,
"MyBatis exception: " + e);
} finally {
ss.close();
}
}
/**
* Creates and executes the logbook string match subquery using GROUP.
*
* @return a set of log ids that match
*/
private Set<Long> getIdsFromLogbookMatch(String match) throws CFException {
SqlSession ss = ssf.openSession();
try {
Set<Long> ids = new HashSet<Long>();
ArrayList<XmlLog> logs = (ArrayList<XmlLog>) ss.selectList("mappings.LogMapping.getIdsFromLogbookMatch", match);
if (logs != null) {
Iterator<XmlLog> iterator = logs.iterator();
while (iterator.hasNext()) {
XmlLog log = iterator.next();
ids.add(log.getId());
}
}
return ids;
} catch (PersistenceException e) {
throw new CFException(Response.Status.INTERNAL_SERVER_ERROR,
"MyBatis exception: " + e);
} finally {
ss.close();
}
}
/**
* Creates and executes the pagination subquery using GROUP BY.
*
* @param con connection to use
* @return a set of log ids that match
*/
Set<Long> getIdsFromPagination() throws CFException {
SqlSession ss = ssf.openSession();
try {
Set<Long> idsList = new HashSet<Long>();
Set<Long> idsSearchList = new HashSet<Long>();
Set<String> valuesList = new HashSet<String>();
Set<Long> returnIds = new HashSet<Long>();
HashMap<String, Object> hm = new HashMap<String, Object>();
if (!tag_matches.isEmpty()) {
for (String tag : tag_matches) {
Set<Long> ids = getIdsFromTagMatch(tag);
if (ids.isEmpty()) {
return null;
}
idsList.addAll(ids);
}
}
if (!value_matches.isEmpty()) {
Set<Long> ids = getIdsFromPropertiesMatch();
if (ids.isEmpty()) {
return null;
}
idsList.addAll(ids);
}
if (!tag_patterns.isEmpty()) {
for (String p : tag_patterns) {
Set<Long> ids = getIdsFromTagMatch(p);
if (ids.isEmpty()) {
return null;
}
idsList.addAll(ids);
}
}
if (!logbook_matches.isEmpty()) {
if (idsList.isEmpty()) {
for (String logbook : logbook_matches) {
Set<Long> ids = getIdsFromLogbookMatch(logbook);
if (ids.isEmpty()) {
return null;
}
idsList.addAll(ids);
}
} else {
Set<Long> id_results = new HashSet<Long>();
for (String logbook : logbook_matches) {
Set<Long> ids = getIdsFromLogbookMatch(logbook);
if (ids.isEmpty()) {
return null;
}
id_results.addAll(ids);
}
Set<Long> temp_set = new HashSet<Long>();
for (Long id : idsList) {
if (id_results.contains(id)) {
temp_set.add(id);
}
}
idsList.clear();
idsList.addAll(temp_set);
}
}
if (!date_matches.isEmpty()) {
String start = null, end = null;
for (Map.Entry<String, Collection<String>> match : date_matches.asMap().entrySet()) {
if (match.getKey().toLowerCase().equals("start")) {
start = match.getValue().iterator().next();
}
if (match.getKey().toLowerCase().equals("end")) {
end = match.getValue().iterator().next();
}
}
if (start != null && end == null) {
hm.put("start", Long.valueOf(start));
hm.put("end", Long.valueOf(Calendar.getInstance().getTime().getTime() / 1000L));
} else if (start == null && end != null) {
hm.put("start", 0L);
hm.put("end", Long.valueOf(end));
} else {
hm.put("start", Long.valueOf(start));
hm.put("end", Long.valueOf(end));
}
}
if (!logId_matches.isEmpty()) {
for (long i : logId_matches) {
idsList.add(i);
}
}
if (!log_matches.isEmpty()) {
for (String value : log_matches) {
valuesList.add(convertFileGlobToSQLPattern(value));
}
}
if (!jcr_search_ids.isEmpty()) {
for (long i : jcr_search_ids) {
idsSearchList.add(i);
}
}
if (!logPaginate_matches.isEmpty()) {
String limit = null, offset = null;
for (Map.Entry<String, Collection<String>> match : logPaginate_matches.asMap().entrySet()) {
if (match.getKey().toLowerCase().equals("limit")) {
limit = match.getValue().iterator().next();
}
if (match.getKey().toLowerCase().equals("page")) {
offset = match.getValue().iterator().next();
}
}
if (limit != null && offset != null) {
Long longOffset = Long.valueOf(offset) * Long.valueOf(limit) - Long.valueOf(limit);
hm.put("limit", Long.valueOf(limit));
hm.put("offset", longOffset);
}
}
if (idsSearchList.size() > 0) {
hm.put("idsSearchList", idsSearchList);
}
if (idsList.size() > 0) {
hm.put("idsList", idsList);
}
if (valuesList.size() > 0) {
hm.put("valuesList", valuesList);
}
ArrayList<XmlLog> logs = (ArrayList<XmlLog>) ss.selectList("mappings.LogMapping.getIdsFromPagination", hm);
if (logs != null) {
Iterator<XmlLog> iterator = logs.iterator();
while (iterator.hasNext()) {
XmlLog log = iterator.next();
returnIds.add(log.getId());
}
}
return returnIds;
} catch (PersistenceException e) {
throw new CFException(Response.Status.INTERNAL_SERVER_ERROR,
"MyBatis exception: " + e);
} finally {
ss.close();
}
}
/**
* Creates and executes a JDBC based query using subqueries for
* logbook and tag matches.
*
* @param con connection to use
* @return result set with columns named <tt>log</tt>, <tt>logbook</tt>,
* <tt>value</tt>, null if no results
* @throws CFException wrapping an SQLException
*/
private ArrayList<XmlLog> executeQuery() throws CFException {
SqlSession ss = ssf.openSession();
try {
List<Long> idsList = new ArrayList<Long>();
Set<Long> paginate_result = new HashSet<Long>();
Set<Long> ids = getIdsFromPagination();
if (ids == null || ids.isEmpty()) {
return null;
} else {
paginate_result.addAll(ids);
}
if (!paginate_result.isEmpty()) {
for (long i : paginate_result) {
idsList.add(i);
}
}
ArrayList<XmlLog> logs = (ArrayList<XmlLog>) ss.selectList("mappings.LogMapping.getLogsFromIds", idsList);
HashMap<String, Object> hm = new HashMap<String, Object>();
for (XmlLog log : logs) {
Collection<XmlProperty> props = log.getXmlProperties();
Map<String, String> attributes = new HashMap<String, String>();
for (XmlProperty prop : props) {
hm.clear();
hm.put("lid", log.getId());
hm.put("pid", prop.getId());
ArrayList<HashMap> attrs = (ArrayList<HashMap>) ss.selectList("mappings.PropertyMapping.attributesForLog", hm);
Iterator p = attrs.iterator();
for (HashMap hash : attrs) {
attributes.put(hash.get("name").toString(), hash.get("value").toString());
}
prop.setAttributes(attributes);
}
}
return logs;
} catch (PersistenceException e) {
throw new CFException(Response.Status.INTERNAL_SERVER_ERROR,
"MyBatis exception: " + e);
} finally {
ss.close();
}
}
/* Regexp for this pattern: "((\\\\)*)((\\\*)|(\*)|(\\\?)|(\?)|(%)|(_))"
* i.e. any number of "\\" (group 1) -> same number of "\\"
* then any of "\*" (group 4) -> "*"
* "*" (group 5) -> "%"
* "\?" (group 6) -> "?"
* "?" (group 7) -> "_"
* "%" (group 8) -> "\%"
* "_" (group 9) -> "\_"
*/
private static Pattern pat = Pattern.compile("((\\\\\\\\)*)((\\\\\\*)|(\\*)|(\\\\\\?)|(\\?)|(%)|(_))");
private static final int grp[] = {4, 5, 6, 7, 8, 9};
private static final String rpl[] = {"*", "%", "?", "_", "\\%", "\\_"};
/**
* Translates the specified file glob pattern <tt>in</tt>
* into the corresponding SQL pattern.
*
* @param in file glob pattern
* @return SQL pattern
*/
private static String convertFileGlobToSQLPattern(String in) {
StringBuffer out = new StringBuffer();
Matcher m = pat.matcher(in);
while (m.find()) {
StringBuilder rep = new StringBuilder();
if (m.group(1) != null) {
rep.append(m.group(1));
}
for (int i = 0; i < grp.length; i++) {
if (m.group(grp[i]) != null) {
rep.append(rpl[i]);
break;
}
}
m.appendReplacement(out, rep.toString());
}
m.appendTail(out);
return out.toString();
}
/**
* Finds logs by matching logbook/tag values and/or log and/or tag names.
*
* @param matches MultiMap of query parameters
* @return XmlLogs container with all found logs and their logbooks/tags
*/
public static XmlLogs findLogsByMultiMatch(MultivaluedMap<String, String> matches) throws CFException, RepositoryException, UnsupportedEncodingException, NoSuchAlgorithmException {
FindLogsQuery q = new FindLogsQuery(matches);
XmlLogs xmlLogs = new XmlLogs();
ArrayList<XmlLog> logs = q.executeQuery();
if (logs != null) {
Iterator<XmlLog> iterator = logs.iterator();
while (iterator.hasNext()) {
XmlLog log = iterator.next();
if (log.getMD5Entry().equals(CreateLogQuery.getmd5Entry(log.getId(), log))) {
xmlLogs.addXmlLog(log);
}
}
}
return xmlLogs;
}
/**
* Returns logs found by matching logbook/tag and/or log names.
*
* @param name query to be used for matching
* @return XmlLogs container with all found logs and their logbooks/tags
*/
public static XmlLogs findLogsByLogbookName(String name) throws CFException {
FindLogsQuery q = new FindLogsQuery(SearchType.TAG, name);
XmlLogs xmlLogs = null;
ArrayList<XmlLog> logs = q.executeQuery();
if (logs != null) {
Iterator<XmlLog> iterator = logs.iterator();
while (iterator.hasNext()) {
xmlLogs.addXmlLog(iterator.next());
}
}
return xmlLogs;
}
/**
* Return single log found by log id.
*
* @param logId id to look for
* @return XmlLog with found log and its logbooks
* @throws CFException on SQLException
*/
public static XmlLog findLogById(Long logId) throws CFException, UnsupportedEncodingException, NoSuchAlgorithmException {
FindLogsQuery q = new FindLogsQuery(SearchType.LOG, logId);
XmlLog xmlLog = null;
ArrayList<XmlLog> logs = q.executeQuery();
if (logs != null) {
Iterator<XmlLog> iterator = logs.iterator();
while (iterator.hasNext()) {
XmlLog log = iterator.next();
if (log.getMD5Entry().equals(CreateLogQuery.getmd5Entry(log.getId(), log))) {
xmlLog = log;
}
}
}
return xmlLog;
}
/**
* Return single log found by log id without checking for md5
*
* @param logId id to look for
* @return XmlLog with found log and its logbooks
* @throws CFException on SQLException
*/
public static XmlLog findLogByIdNoMD5(Long logId) throws CFException, UnsupportedEncodingException, NoSuchAlgorithmException {
FindLogsQuery q = new FindLogsQuery(SearchType.LOG, logId);
XmlLog xmlLog = null;
ArrayList<XmlLog> logs = q.executeQuery();
if (logs != null) {
Iterator<XmlLog> iterator = logs.iterator();
while (iterator.hasNext()) {
xmlLog = iterator.next();
}
}
return xmlLog;
}
}
|
package us.kbase.typedobj.db;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringReader;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.commons.codec.digest.DigestUtils;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.fge.jsonschema.cfg.ValidationConfiguration;
import com.github.fge.jsonschema.main.JsonSchema;
import com.github.fge.jsonschema.main.JsonSchemaFactory;
import us.kbase.kidl.KbFuncdef;
import us.kbase.kidl.KbList;
import us.kbase.kidl.KbMapping;
import us.kbase.kidl.KbModule;
import us.kbase.kidl.KbModuleComp;
import us.kbase.kidl.KbParameter;
import us.kbase.kidl.KbScalar;
import us.kbase.kidl.KbService;
import us.kbase.kidl.KbStruct;
import us.kbase.kidl.KbStructItem;
import us.kbase.kidl.KbTuple;
import us.kbase.kidl.KbType;
import us.kbase.kidl.KbTypedef;
import us.kbase.kidl.KbUnspecifiedObject;
import us.kbase.kidl.KidlParser;
import us.kbase.typedobj.core.AbsoluteTypeDefId;
import us.kbase.typedobj.core.TypeDefId;
import us.kbase.typedobj.core.TypeDefName;
import us.kbase.typedobj.core.validatorconfig.ValidationConfigurationFactory;
import us.kbase.typedobj.exceptions.*;
/**
* This class is the primary interface for storing and retrieving versioned typed
* object definitions and association meta information.
*
* @author msneddon
* @author rsutormin
*
*/
public class TypeDefinitionDB {
/**
* This is the factory used to create a JsonSchema object from a Json Schema
* document stored in the DB.
*/
protected JsonSchemaFactory jsonSchemaFactory;
/**
* The Jackson ObjectMapper which can translate a raw Json Schema document to a JsonTree
*/
protected ObjectMapper mapper;
private static final SemanticVersion defaultVersion = new SemanticVersion(0, 1);
private static final SemanticVersion releaseVersion = new SemanticVersion(1, 0);
private static final long maxDeadLockWaitTime = 120000;
private final TypeStorage storage;
private final File parentTempDir;
private final UserInfoProvider uip;
private final Object tempDirLock = new Object();
private final Object moduleStateLock = new Object();
private final Map<String, ModuleState> moduleStates = new HashMap<String, ModuleState>();
private final ThreadLocal<Map<String,Integer>> localReadLocks = new ThreadLocal<Map<String,Integer>>();
private final String kbTopPath;
enum Change {
noChange, backwardCompatible, notCompatible;
public static Change joinChanges(Change c1, Change c2) {
return Change.values()[Math.max(c1.ordinal(), c2.ordinal())];
}
}
/**
* Set up a new DB pointing to the specified storage object
* @param storage
* @param uip
* @throws TypeStorageException
*/
public TypeDefinitionDB(TypeStorage storage, UserInfoProvider uip)
throws TypeStorageException {
this(storage, null, uip);
}
public TypeDefinitionDB(TypeStorage storage, File tempDir,
UserInfoProvider uip) throws TypeStorageException {
this(storage, tempDir, uip, null);
}
/**
* Setup a new DB handle pointing to the specified storage object, using the
* specified location when processing temporary type compiler files.
* @param storage
* @param tempDir
* @param uip
* @throws TypeStorageException
*/
public TypeDefinitionDB(TypeStorage storage, File tempDir,
UserInfoProvider uip, String kbTopPath) throws TypeStorageException {
this.mapper = new ObjectMapper();
// Create the custom json schema factory for KBase typed objects and use this
ValidationConfiguration kbcfg = ValidationConfigurationFactory.buildKBaseWorkspaceConfiguration();
this.jsonSchemaFactory = JsonSchemaFactory.newBuilder()
.setValidationConfiguration(kbcfg)
.freeze();
this.storage = storage;
if (tempDir == null) {
this.parentTempDir = new File(".");
} else {
this.parentTempDir = tempDir;
if (parentTempDir.exists()) {
if (!parentTempDir.isDirectory()) {
throw new TypeStorageException("Requested temp dir "
+ parentTempDir + " is not a directory");
}
} else {
boolean success = parentTempDir.mkdirs();
if (!success) {
if (!parentTempDir.isDirectory()) {
throw new TypeStorageException(
"Could not create requested temp dir "
+ parentTempDir);
}
}
}
}
this.uip = uip;
this.kbTopPath = kbTopPath;
}
/**
* Retrieve a Json Schema Document for the most recent version of the typed object specified
* @param typeDefName
* @return
* @throws NoSuchTypeException
* @throws NoSuchModuleException
* @throws TypeStorageException
*/
public String getJsonSchemaDocument(final TypeDefName typeDefName)
throws NoSuchTypeException, NoSuchModuleException, TypeStorageException {
return getJsonSchemaDocument(new TypeDefId(typeDefName));
}
private ModuleState getModuleState(String moduleName) {
synchronized (moduleStateLock) {
ModuleState ret = moduleStates.get(moduleName);
if (ret == null) {
ret = new ModuleState();
moduleStates.put(moduleName, ret);
}
return ret;
}
}
private int getLocalReadLocks(String moduleName) {
Map<String, Integer> map = localReadLocks.get();
if (map == null) {
map = new HashMap<String, Integer>();
localReadLocks.set(map);
}
Integer ret = map.get(moduleName);
if (ret == null)
return 0;
return ret;
}
private void setLocalReadLocks(String moduleName, int locks) {
Map<String, Integer> map = localReadLocks.get();
if (map == null) {
map = new HashMap<String, Integer>();
localReadLocks.set(map);
}
map.put(moduleName, locks);
}
private void requestReadLock(String moduleName) throws NoSuchModuleException, TypeStorageException {
if (!storage.checkModuleExist(moduleName))
throw new NoSuchModuleException(moduleName);
requestReadLockNM(moduleName);
}
private void requestReadLockNM(String moduleName) throws TypeStorageException {
int lrl = getLocalReadLocks(moduleName);
if (lrl == 0) {
final ModuleState ms = getModuleState(moduleName);
synchronized (ms) {
long startTime = System.currentTimeMillis();
while (ms.writerCount > 0) {
try {
ms.wait(10000);
} catch (InterruptedException ignore) {}
if (System.currentTimeMillis() - startTime > maxDeadLockWaitTime)
throw new IllegalStateException("Looks like deadlock");
}
ms.readerCount++;
//new Exception("moduleName=" + moduleName + ", readerCount=" + ms.readerCount).printStackTrace(System.out);
}
}
setLocalReadLocks(moduleName, lrl + 1);
}
private void releaseReadLock(String moduleName) {
final ModuleState ms = getModuleState(moduleName);
int lrl = getLocalReadLocks(moduleName);
lrl
setLocalReadLocks(moduleName, lrl);
if (lrl == 0) {
synchronized (ms) {
if (ms.readerCount == 0)
throw new IllegalStateException("Can not release empty read lock");
ms.readerCount
//new Exception("moduleName=" + moduleName + ", readerCount=" + ms.readerCount).printStackTrace(System.out);
ms.notifyAll();
}
}
}
private void requestWriteLock(String moduleName) {
final ModuleState ms = getModuleState(moduleName);
synchronized (ms) {
if (ms.writerCount > 0)
throw new IllegalStateException("Concurent changes of module " + moduleName);
ms.writerCount++;
//new Exception("moduleName=" + moduleName + ", writerCount=" + ms.writerCount).printStackTrace(System.out);
long startTime = System.currentTimeMillis();
while (ms.readerCount > 0) {
try {
ms.wait(10000);
} catch (InterruptedException ignore) {}
if (System.currentTimeMillis() - startTime > maxDeadLockWaitTime) {
ms.writerCount
throw new IllegalStateException("Looks like deadlock");
}
}
}
}
private void releaseWriteLock(String moduleName) {
final ModuleState ms = getModuleState(moduleName);
synchronized (ms) {
if (ms.writerCount == 0)
throw new IllegalStateException("Can not release empty write lock");
ms.writerCount
//new Exception("moduleName=" + moduleName + ", writerCount=" + ms.writerCount).printStackTrace(System.out);
ms.notifyAll();
}
}
/**
* Retrieve a Json Schema Document for the typed object specified. If no version numbers
* are indicated, the latest version is returned. If the major version only is specified,
* then the latest version that is backwards compatible with the major version is returned.
* If exact major/minor version numbers are given, that is the exact version that is returned.
* @param typeDefId
* @return
* @throws NoSuchTypeException
* @throws NoSuchModuleException
* @throws TypeStorageException
*/
public String getJsonSchemaDocument(final TypeDefId typeDefId)
throws NoSuchTypeException, NoSuchModuleException, TypeStorageException {
String moduleName = typeDefId.getType().getModule();
requestReadLock(moduleName);
try {
return getJsonSchemaDocumentNL(typeDefId);
} finally {
releaseReadLock(moduleName);
}
}
private String getJsonSchemaDocumentNL(final TypeDefId typeDefId)
throws NoSuchTypeException, NoSuchModuleException, TypeStorageException {
// first make sure that the json schema document can be found
AbsoluteTypeDefId absTypeDefId = resolveTypeDefIdNL(typeDefId);
String typeName = absTypeDefId.getType().getName();
// second retrieve the document if it is available
SemanticVersion schemaDocumentVer = new SemanticVersion(absTypeDefId.getMajorVersion(), absTypeDefId.getMinorVersion());
String moduleName = typeDefId.getType().getModule();
String ret = storage.getTypeSchemaRecord(moduleName,typeName,schemaDocumentVer.toString());
if (ret == null)
throw new NoSuchTypeException("Unable to read type schema record: '"+moduleName+"."+typeName+"'");
return ret;
}
private long findModuleVersion(ModuleDefId moduleDef) throws NoSuchModuleException, TypeStorageException {
if (moduleDef.getVersion() == null)
return storage.getLastReleasedModuleVersion(moduleDef.getModuleName());
long version = moduleDef.getVersion();
if (!storage.checkModuleInfoRecordExist(moduleDef.getModuleName(), version))
throw new NoSuchModuleException("There is no information about module " + moduleDef.getModuleName() +
" for version " + version);
return version;
}
public Map<AbsoluteTypeDefId, String> getJsonSchemasForAllTypes(ModuleDefId moduleDef)
throws NoSuchModuleException, TypeStorageException {
String moduleName = moduleDef.getModuleName();
requestReadLock(moduleName);
try {
long moduleVersion = findModuleVersion(moduleDef);
ModuleInfo info = storage.getModuleInfoRecord(moduleName, moduleVersion);
Map<AbsoluteTypeDefId, String> ret = new HashMap<AbsoluteTypeDefId, String>();
for (TypeInfo ti : info.getTypes().values()) {
String typeVersionText = ti.getTypeVersion();
String jsonSchema = storage.getTypeSchemaRecord(moduleName, ti.getTypeName(), typeVersionText);
SemanticVersion typeVer = new SemanticVersion(typeVersionText);
ret.put(new AbsoluteTypeDefId(new TypeDefName(moduleName, ti.getTypeName()),
typeVer.getMajor(), typeVer.getMinor()), jsonSchema);
}
return ret;
} finally {
releaseReadLock(moduleName);
}
}
/**
* Given a typeDefId that may not be valid or have major/minor versions defined,
* attempt to lookup if a specific type definition can be resolved in the database.
* If a specific type definition is found, it is returned; else an exception is thrown.
* @param typeDefId
* @return
* @throws NoSuchTypeException
* @throws NoSuchModuleException
* @throws TypeStorageException
*/
public AbsoluteTypeDefId resolveTypeDefId(final TypeDefId typeDefId)
throws NoSuchTypeException, NoSuchModuleException, TypeStorageException {
String moduleName = typeDefId.getType().getModule();
requestReadLock(moduleName);
try {
return resolveTypeDefIdNL(typeDefId);
} finally {
releaseReadLock(moduleName);
}
}
private AbsoluteTypeDefId resolveTypeDefIdNL(final TypeDefId typeDefId)
throws NoSuchTypeException, NoSuchModuleException, TypeStorageException {
String moduleName = typeDefId.getType().getModule();
checkModuleRegistered(moduleName);
SemanticVersion schemaDocumentVer = findTypeVersion(typeDefId);
if (schemaDocumentVer == null)
throwNoSuchTypeException(typeDefId);
String typeName = typeDefId.getType().getName();
String ret = storage.getTypeSchemaRecord(moduleName,typeName,schemaDocumentVer.toString());
if (ret == null)
throw new NoSuchTypeException("Unable to read type schema record: '"+moduleName+"."+typeName+"'");
// TODO: use this instead, but not yet supported with Mongo Storage backend
//if(!storage.checkTypeSchemaRecordExists(moduleName, typeName, schemaDocumentVer.toString()))
// throw new NoSuchTypeException("Unable to read type schema record: '"+moduleName+"."+typeName+"'");
return new AbsoluteTypeDefId(new TypeDefName(moduleName,typeName),schemaDocumentVer.getMajor(),schemaDocumentVer.getMinor());
}
/**
* Retrieve a Json Schema object that can be used for json validation for the most recent
* version of the typed object specified
* @param typeDefName
* @return
* @throws NoSuchTypeException
* @throws NoSuchModuleException
* @throws BadJsonSchemaDocumentException
* @throws TypeStorageException
*/
public JsonSchema getJsonSchema(TypeDefName typeDefName)
throws NoSuchTypeException, NoSuchModuleException, BadJsonSchemaDocumentException, TypeStorageException {
return getJsonSchema(new TypeDefId(typeDefName));
}
/**
* Retrieve a Json Schema objec tha can be used for json validation for the typed object specified.
* If no version numbers are indicated, the latest version is returned. If the major version only
* is specified, then the latest version that is backwards compatible with the major version is returned.
* If exact major/minor version numbers are given, that is the exact version that is returned.
* @param typeDefId
* @return
* @throws NoSuchTypeException
* @throws NoSuchModuleException
* @throws BadJsonSchemaDocumentException
* @throws TypeStorageException
*/
public JsonSchema getJsonSchema(final TypeDefId typeDefId)
throws NoSuchTypeException, NoSuchModuleException, BadJsonSchemaDocumentException, TypeStorageException {
String moduleName = typeDefId.getType().getModule();
requestReadLock(moduleName);
try {
String jsonSchemaDocument = getJsonSchemaDocumentNL(typeDefId);
try {
JsonNode schemaRootNode = mapper.readTree(jsonSchemaDocument);
return jsonSchemaFactory.getJsonSchema(schemaRootNode);
} catch (Exception e) {
throw new BadJsonSchemaDocumentException("schema for typed object '"+typeDefId.getTypeString()+"'" +
"was not a valid or readable JSON document",e);
}
} finally {
releaseReadLock(moduleName);
}
}
/**
* Convert a Json Schema Document into a Json Schema object that can be used for json validation.
* @param jsonSchemaDocument
* @return
* @throws BadJsonSchemaDocumentException
* @throws TypeStorageException
*/
protected JsonSchema jsonSchemaFromString(String jsonSchemaDocument)
throws BadJsonSchemaDocumentException, TypeStorageException {
try {
JsonNode schemaRootNode = mapper.readTree(jsonSchemaDocument);
return jsonSchemaFactory.getJsonSchema(schemaRootNode);
} catch (Exception e) {
throw new BadJsonSchemaDocumentException("string was not a valid or readable JSON Schema document",e);
}
}
/**
* Given a moduleName and typeName, return the SPEC parsing document for the type. No version
* number is specified, so the latest version of document will be returned.
* @param moduleName
* @param typeName
* @return JSON Schema document as a String
* @throws NoSuchTypeException
*/
public KbTypedef getTypeParsingDocument(TypeDefName type)
throws NoSuchTypeException, NoSuchModuleException, TypeStorageException {
return getTypeParsingDocument(new TypeDefId(type));
}
/**
* Check if module spec-file was registered at least once.
* @param moduleName
* @return true if module spec-file was registered at least once
* @throws TypeStorageException
*/
public boolean isValidModule(String moduleName) throws TypeStorageException {
try {
requestReadLock(moduleName);
} catch (NoSuchModuleException ignore) {
return false;
}
try {
return isValidModuleNL(moduleName, null);
} finally {
releaseReadLock(moduleName);
}
}
private boolean isValidModuleNL(String moduleName, Long version) throws TypeStorageException {
if (!storage.checkModuleExist(moduleName))
return false;
if (version == null)
version = storage.getLastReleasedModuleVersion(moduleName);
return storage.checkModuleInfoRecordExist(moduleName, version) &&
storage.checkModuleSpecRecordExist(moduleName, version);
}
private void checkModule(String moduleName, Long version) throws NoSuchModuleException, TypeStorageException {
if (!isValidModuleNL(moduleName, version))
throw new NoSuchModuleException("Module wasn't uploaded: " + moduleName);
}
private void checkModuleRegistered(String moduleName) throws NoSuchModuleException, TypeStorageException {
if ((!storage.checkModuleExist(moduleName)) || (!storage.checkModuleInfoRecordExist(moduleName,
storage.getLastReleasedModuleVersion(moduleName))))
throw new NoSuchModuleException("Module wasn't registered: " + moduleName);
}
/**
* Determine if the type is registered and valid.
* @param typeDefName
* @return true if valid, false otherwise
* @throws TypeStorageException
*/
public boolean isValidType(TypeDefName typeDefName) throws TypeStorageException {
return isValidType(new TypeDefId(typeDefName));
}
/**
* Determines if the type is registered and valid. If version numbers are set, the specific version
* specified must also resolve to a valid type definition.
* @param typeDef
* @return true if valid, false otherwise
* @throws TypeStorageException
*/
public boolean isValidType(TypeDefId typeDefId) throws TypeStorageException {
String moduleName = typeDefId.getType().getModule();
try {
requestReadLock(moduleName);
} catch (NoSuchModuleException e) {
return false;
}
try {
String typeName = typeDefId.getType().getName();
if (!storage.checkModuleExist(moduleName))
return false;
if (!storage.checkModuleInfoRecordExist(moduleName,
storage.getLastReleasedModuleVersion(moduleName)))
return false;
SemanticVersion ver = findTypeVersion(typeDefId);
if (ver == null)
return false;
return storage.checkTypeSchemaRecordExists(moduleName, typeName, ver.toString());
} finally {
releaseReadLock(moduleName);
}
}
private boolean isTypePresent(String moduleName, String typeName) throws TypeStorageException {
ModuleInfo mi;
try {
mi = getModuleInfoNL(moduleName);
} catch (NoSuchModuleException e) {
return false;
}
return mi.getTypes().get(typeName) != null;
}
private SemanticVersion findTypeVersion(TypeDefId typeDef) throws TypeStorageException {
if (typeDef.isAbsolute())
return new SemanticVersion(typeDef.getMajorVersion(), typeDef.getMinorVersion());
if (typeDef.getMajorVersion() != null) {
Map<String, Boolean> versions = storage.getAllTypeVersions(typeDef.getType().getModule(),
typeDef.getType().getName());
SemanticVersion ret = null;
for (String verText : versions.keySet()) {
if (!versions.get(verText))
continue;
SemanticVersion ver = new SemanticVersion(verText);
if (ver.getMajor() == typeDef.getMajorVersion() &&
(ret == null || ret.compareTo(ver) < 0))
ret = ver;
}
return ret;
}
return findLastTypeVersion(typeDef.getType().getModule(), typeDef.getType().getName(), false);
}
private SemanticVersion findLastTypeVersion(String moduleName, String typeName,
boolean withNoLongerSupported) throws TypeStorageException {
if (!isTypePresent(moduleName, typeName))
return null;
ModuleInfo mi;
try {
mi = getModuleInfoNL(moduleName);
} catch (NoSuchModuleException e) {
return null;
}
return findLastTypeVersion(mi, typeName, withNoLongerSupported);
}
private SemanticVersion findLastTypeVersion(ModuleInfo module, String typeName,
boolean withNoLongerSupported) {
TypeInfo ti = module.getTypes().get(typeName);
if (ti == null || !(ti.isSupported() || withNoLongerSupported) || ti.getTypeVersion() == null)
return null;
return new SemanticVersion(ti.getTypeVersion());
}
protected void throwNoSuchTypeException(String moduleName, String typeName,
String version) throws NoSuchTypeException {
throw new NoSuchTypeException("Unable to locate type: '"+moduleName+"."+typeName+"'" +
(version == null ? "" : (" for version " + version)));
}
protected void throwNoSuchTypeException(TypeDefId typeDef) throws NoSuchTypeException {
throw new NoSuchTypeException("Unable to locate type: " + typeDef.getTypeString());
}
protected void throwNoSuchFuncException(String moduleName, String funcName,
String version) throws NoSuchFuncException {
throw new NoSuchFuncException("Unable to locate function: '"+moduleName+"."+funcName+"'" +
(version == null ? "" : (" for version " + version)));
}
public List<String> getAllRegisteredTypes(String moduleName)
throws NoSuchModuleException, TypeStorageException {
return getAllRegisteredTypes(moduleName, getLastModuleVersion(moduleName));
}
public List<String> getAllRegisteredTypes(String moduleName, long moduleVersion)
throws NoSuchModuleException, TypeStorageException {
requestReadLock(moduleName);
try {
List<String> ret = new ArrayList<String>();
for (TypeInfo typeInfo : getModuleInfoNL(moduleName, moduleVersion).getTypes().values())
if (typeInfo.isSupported())
ret.add(typeInfo.getTypeName());
return ret;
} finally {
releaseReadLock(moduleName);
}
}
/**
* Return latest version of specified type. Version has two level structure of integers
* divided by dot like <major>.<minor>
* @param moduleName
* @param typeName
* @return latest version of specified type
*/
public String getLatestTypeVersion(TypeDefName type)
throws NoSuchTypeException, NoSuchModuleException, TypeStorageException {
String moduleName = type.getModule();
requestReadLock(moduleName);
try {
checkModule(type.getModule(), null);
SemanticVersion ret = findLastTypeVersion(type.getModule(), type.getName(), false);
if (ret == null)
throwNoSuchTypeException(type.getModule(), type.getName(), null);
return ret.toString();
} finally {
releaseReadLock(moduleName);
}
}
private String saveType(ModuleInfo mi, String typeName, String jsonSchemaDocument,
KbTypedef specParsing, boolean notBackwardCompatible, Set<RefInfo> dependencies,
long newModuleVersion) throws NoSuchModuleException, TypeStorageException {
TypeInfo ti = mi.getTypes().get(typeName);
if (ti == null) {
ti = new TypeInfo();
ti.setTypeName(typeName);
mi.getTypes().put(typeName, ti);
}
ti.setSupported(true);
return saveType(mi, ti, jsonSchemaDocument, specParsing, notBackwardCompatible,
dependencies, newModuleVersion);
}
private String saveType(ModuleInfo mi, TypeInfo ti, String jsonSchemaDocument,
KbTypedef specParsing, boolean notBackwardCompatible, Set<RefInfo> dependencies,
long newModuleVersion) throws NoSuchModuleException, TypeStorageException {
SemanticVersion version = getIncrementedVersion(mi, ti.getTypeName(),
notBackwardCompatible);
ti.setTypeVersion(version.toString());
return saveType(mi, ti, jsonSchemaDocument, specParsing, dependencies, newModuleVersion);
}
protected SemanticVersion getIncrementedVersion(ModuleInfo mi, String typeName,
boolean notBackwardCompatible) {
SemanticVersion version = findLastTypeVersion(mi, typeName, true);
if (version == null) {
version = defaultVersion;
} else {
int major = version.getMajor();
int minor = version.getMinor();
if (major > 0 && notBackwardCompatible) {
major++;
minor = 0;
} else {
minor++;
}
version = new SemanticVersion(major, minor);
}
return version;
}
private String saveType(ModuleInfo mi, TypeInfo ti, String jsonSchemaDocument,
KbTypedef specParsing, Set<RefInfo> dependencies, long newModuleVersion)
throws NoSuchModuleException, TypeStorageException {
if (dependencies != null)
for (RefInfo ri : dependencies) {
ri.setDepVersion(ti.getTypeVersion());
ri.setDepModuleVersion(newModuleVersion);
updateInternalRefVersion(ri, mi);
}
storage.writeTypeSchemaRecord(mi.getModuleName(), ti.getTypeName(), ti.getTypeVersion(),
newModuleVersion, jsonSchemaDocument);
writeTypeParsingFile(mi.getModuleName(), ti.getTypeName(), ti.getTypeVersion(),
specParsing, newModuleVersion);
return ti.getTypeVersion();
}
private void updateInternalRefVersion(RefInfo ri, ModuleInfo mi) {
if (ri.getRefVersion() == null) {
if (!ri.getRefModule().equals(mi.getModuleName()))
throw new IllegalStateException("Type reference has no refVersion but reference " +
"is not internal: " + ri);
}
if (ri.getRefModule().equals(mi.getModuleName())) {
TypeInfo ti = mi.getTypes().get(ri.getRefName());
if (ti == null)
throw new IllegalStateException("Type reference was not found: " + ri);
ri.setRefVersion(ti.getTypeVersion());
}
}
private void writeTypeParsingFile(String moduleName, String typeName, String version,
KbTypedef document, long newModuleVersion) throws TypeStorageException {
try {
StringWriter sw = new StringWriter();
mapper.writeValue(sw, document.getData());
sw.close();
storage.writeTypeParseRecord(moduleName, typeName, version, newModuleVersion, sw.toString());
} catch (IOException ex) {
throw new IllegalStateException("Unexpected internal error: " + ex.getMessage(), ex);
}
}
private boolean checkUserIsOwnerOrAdmin(String moduleName, String userId)
throws NoSuchPrivilegeException, TypeStorageException {
if (uip.isAdmin(userId))
return true;
Map<String, OwnerInfo> owners = storage.getOwnersForModule(moduleName);
if (!owners.containsKey(userId))
throw new NoSuchPrivilegeException("User " + userId + " is not in list of owners of module " +
moduleName);
return owners.get(userId).isWithChangeOwnersPrivilege();
}
public List<String> getModuleOwners(String moduleName) throws NoSuchModuleException, TypeStorageException {
requestReadLock(moduleName);
try {
checkModuleRegistered(moduleName);
return new ArrayList<String>(storage.getOwnersForModule(moduleName).keySet());
} finally {
releaseReadLock(moduleName);
}
}
/**
* Change major version of every registered type to 1.0 for types of version 0.x or set module releaseVersion to currentVersion.
* @param moduleName
* @param userId
* @return new versions of types
*/
public List<AbsoluteTypeDefId> releaseModule(String moduleName, String userId)
throws NoSuchModuleException, TypeStorageException, NoSuchPrivilegeException {
checkUserIsOwnerOrAdmin(moduleName, userId);
checkModuleRegistered(moduleName);
long version = storage.getLastModuleVersionWithUnreleased(moduleName);
checkModule(moduleName, version);
ModuleInfo info = storage.getModuleInfoRecord(moduleName, version);
//List<AbsoluteTypeDefId> ret = new ArrayList<AbsoluteTypeDefId>();
requestWriteLock(moduleName);
try {
List<String> typesTo10 = new ArrayList<String>();
for (String type : info.getTypes().keySet())
if (new SemanticVersion(info.getTypes().get(type).getTypeVersion()).getMajor() == 0)
typesTo10.add(type);
List<String> funcsTo10 = new ArrayList<String>();
for (String func : info.getFuncs().keySet())
if (new SemanticVersion(info.getFuncs().get(func).getFuncVersion()).getMajor() == 0)
funcsTo10.add(func);
if (typesTo10.size() > 0 || funcsTo10.size() > 0) {
info.setUploadUserId(userId);
info.setUploadMethod("releaseModule");
long transactionStartTime = storage.generateNewModuleVersion(moduleName);
try {
Set<RefInfo> newTypeRefs = new TreeSet<RefInfo>();
Set<RefInfo> newFuncRefs = new TreeSet<RefInfo>();
for (String type : typesTo10) {
String typeName = type;
TypeInfo ti = info.getTypes().get(typeName);
String jsonSchemaDocument = storage.getTypeSchemaRecord(moduleName, type, ti.getTypeVersion());
Set<RefInfo> deps = storage.getTypeRefsByDep(moduleName, typeName, ti.getTypeVersion());
try {
KbTypedef specParsing = getTypeParsingDocumentNL(new TypeDefId(moduleName + "." + type, ti.getTypeVersion()));
ti.setTypeVersion(releaseVersion.toString());
saveType(info, ti, jsonSchemaDocument, specParsing, deps, transactionStartTime);
newTypeRefs.addAll(deps);
} catch (NoSuchTypeException ex) {
throw new IllegalStateException(ex); // Can not occur anyways
}
//ret.add(new AbsoluteTypeDefId(new TypeDefName(moduleName, type), newVersion.getMajor(), newVersion.getMinor()));
}
for (String funcName : funcsTo10) {
FuncInfo fi = info.getFuncs().get(funcName);
Set<RefInfo> deps = storage.getFuncRefsByDep(moduleName, funcName, fi.getFuncVersion());
try {
KbFuncdef specParsing = getFuncParsingDocumentNL(moduleName, funcName, fi.getFuncVersion());
fi.setFuncVersion(releaseVersion.toString());
saveFunc(info, fi, specParsing, deps, transactionStartTime);
newFuncRefs.addAll(deps);
} catch (NoSuchFuncException ex) {
throw new IllegalStateException(ex); // Can not occur anyways
}
}
String specDocument = storage.getModuleSpecRecord(info.getModuleName(), version);
writeModuleInfoSpec(info, specDocument, transactionStartTime);
storage.addRefs(newTypeRefs, newFuncRefs);
storage.setModuleReleaseVersion(moduleName, transactionStartTime);
transactionStartTime = -1;
} finally {
if (transactionStartTime > 0)
rollbackModuleTransaction(moduleName, transactionStartTime);
}
} else {
storage.setModuleReleaseVersion(moduleName, version);
}
} finally {
releaseWriteLock(moduleName);
}
List<AbsoluteTypeDefId> ret = new ArrayList<AbsoluteTypeDefId>();
for (TypeInfo ti : info.getTypes().values()) {
SemanticVersion typeVersion = new SemanticVersion(ti.getTypeVersion());
ret.add(new AbsoluteTypeDefId(new TypeDefName(moduleName, ti.getTypeName()),
typeVersion.getMajor(), typeVersion.getMinor()));
}
return ret;
}
/**
* Given a moduleName, a typeName and version, return the JSON Schema document for the type. If
* version parameter is null (no version number is specified) then the latest version of document
* will be returned.
* @param moduleName
* @param typeName
* @param version
* @return JSON Schema document as a String
* @throws NoSuchTypeException
*/
public KbTypedef getTypeParsingDocument(TypeDefId typeDef)
throws NoSuchTypeException, NoSuchModuleException, TypeStorageException {
String moduleName = typeDef.getType().getModule();
requestReadLock(moduleName);
try {
return getTypeParsingDocumentNL(typeDef);
} finally {
releaseReadLock(moduleName);
}
}
private KbTypedef getTypeParsingDocumentNL(TypeDefId typeDef)
throws NoSuchTypeException, NoSuchModuleException, TypeStorageException {
String moduleName = typeDef.getType().getModule();
String typeName = typeDef.getType().getName();
checkModuleRegistered(moduleName);
SemanticVersion documentVer = findTypeVersion(typeDef);
if (documentVer == null)
throwNoSuchTypeException(typeDef);
String ret = storage.getTypeParseRecord(moduleName, typeName, documentVer.toString());
if (ret == null)
throw new NoSuchTypeException("Unable to read type parse record: '"+moduleName+"."+typeName+"'");
try {
Map<?,?> data = mapper.readValue(ret, Map.class);
return new KbTypedef().loadFromMap(data);
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
private void rollbackModuleTransaction(String moduleName, long versionTime) {
try {
TreeSet<Long> allVers = new TreeSet<Long>(storage.getAllModuleVersions(moduleName).keySet());
if (allVers.last() == versionTime) {
allVers.remove(allVers.last());
}
storage.removeModuleVersionAndSwitchIfNotCurrent(moduleName, versionTime, allVers.last());
} catch (Throwable ignore) {
ignore.printStackTrace();
}
}
private void writeModuleInfoSpec(ModuleInfo info, String specDocument,
long backupTime) throws TypeStorageException {
storage.writeModuleRecords(info, specDocument, backupTime);
}
public String getModuleSpecDocument(String moduleName)
throws NoSuchModuleException, TypeStorageException {
requestReadLock(moduleName);
try {
return storage.getModuleSpecRecord(moduleName, storage.getLastReleasedModuleVersion(moduleName));
} finally {
releaseReadLock(moduleName);
}
}
public String getModuleSpecDocument(String moduleName, long version)
throws NoSuchModuleException, TypeStorageException {
requestReadLock(moduleName);
try {
checkModule(moduleName, version);
return storage.getModuleSpecRecord(moduleName, version);
} finally {
releaseReadLock(moduleName);
}
}
public String getModuleSpecDocument(ModuleDefId moduleDef)
throws NoSuchModuleException, TypeStorageException {
String moduleName = moduleDef.getModuleName();
requestReadLock(moduleName);
try {
checkModuleRegistered(moduleName);
long version = findModuleVersion(moduleDef);
checkModule(moduleName, version);
return storage.getModuleSpecRecord(moduleName, version);
} finally {
releaseReadLock(moduleName);
}
}
private ModuleInfo getModuleInfoNL(String moduleName)
throws NoSuchModuleException, TypeStorageException {
return getModuleInfoNL(moduleName, storage.getLastReleasedModuleVersion(moduleName));
}
public ModuleInfo getModuleInfo(String moduleName)
throws NoSuchModuleException, TypeStorageException {
requestReadLock(moduleName);
try {
return getModuleInfoNL(moduleName);
} finally {
releaseReadLock(moduleName);
}
}
private ModuleInfo getModuleInfoNL(String moduleName, long version)
throws NoSuchModuleException, TypeStorageException {
checkModuleRegistered(moduleName);
return storage.getModuleInfoRecord(moduleName, version);
}
public ModuleInfo getModuleInfo(String moduleName, long version)
throws NoSuchModuleException, TypeStorageException {
requestReadLock(moduleName);
try {
return getModuleInfoNL(moduleName, version);
} finally {
releaseReadLock(moduleName);
}
}
public ModuleInfo getModuleInfo(ModuleDefId moduleDef)
throws NoSuchModuleException, TypeStorageException {
String moduleName = moduleDef.getModuleName();
requestReadLock(moduleName);
try {
return getModuleInfoNL(moduleName, findModuleVersion(moduleDef));
} finally {
releaseReadLock(moduleName);
}
}
public long getLastModuleVersion(String moduleName)
throws NoSuchModuleException, TypeStorageException {
requestReadLock(moduleName);
try {
checkModuleRegistered(moduleName);
return storage.getLastReleasedModuleVersion(moduleName);
} finally {
releaseReadLock(moduleName);
}
}
public long getLastModuleVersionWithUnreleased(String moduleName, String userId)
throws NoSuchModuleException, TypeStorageException, NoSuchPrivilegeException {
checkUserIsOwnerOrAdmin(moduleName, userId);
requestReadLock(moduleName);
try {
checkModuleRegistered(moduleName);
return storage.getLastModuleVersionWithUnreleased(moduleName);
} finally {
releaseReadLock(moduleName);
}
}
public List<Long> getAllModuleVersions(String moduleName)
throws NoSuchModuleException, TypeStorageException {
requestReadLock(moduleName);
try {
checkModuleRegistered(moduleName);
TreeMap<Long, Boolean> map = storage.getAllModuleVersions(moduleName);
List<Long> ret = new ArrayList<Long>();
for (Map.Entry<Long, Boolean> enrty : map.entrySet())
if (enrty.getValue() && enrty.getKey() != map.firstKey())
ret.add(enrty.getKey());
return ret;
} finally {
releaseReadLock(moduleName);
}
}
public List<String> getAllRegisteredFuncs(String moduleName)
throws NoSuchModuleException, TypeStorageException {
requestReadLock(moduleName);
try {
List<String> ret = new ArrayList<String>();
for (FuncInfo info : getModuleInfoNL(moduleName).getFuncs().values())
if (info.isSupported())
ret.add(info.getFuncName());
return ret;
} finally {
releaseReadLock(moduleName);
}
}
private SemanticVersion findLastFuncVersion(String moduleName, String funcName) throws TypeStorageException {
try {
return findLastFuncVersion(getModuleInfoNL(moduleName), funcName, false);
} catch (NoSuchModuleException e) {
return null;
}
}
private SemanticVersion findLastFuncVersion(ModuleInfo mi, String funcName,
boolean withNotSupported) {
FuncInfo fi = mi.getFuncs().get(funcName);
if (fi == null || !(fi.isSupported() || withNotSupported) || fi.getFuncVersion() == null)
return null;
return new SemanticVersion(fi.getFuncVersion());
}
/**
* Return latest version of specified type. Version has two level structure of integers
* divided by dot like <major>.<minor>
* @param moduleName
* @param funcName
* @return latest version of specified type
*/
public String getLatestFuncVersion(String moduleName, String funcName)
throws NoSuchFuncException, NoSuchModuleException, TypeStorageException {
requestReadLock(moduleName);
try {
checkModule(moduleName, null);
SemanticVersion ret = findLastFuncVersion(moduleName, funcName);
if (ret == null)
throwNoSuchFuncException(moduleName, funcName, null);
return ret.toString();
} finally {
releaseReadLock(moduleName);
}
}
private String saveFunc(ModuleInfo mi, String funcName, KbFuncdef specParsingDocument,
boolean notBackwardCompatible, Set<RefInfo> dependencies, long newModuleVersion)
throws NoSuchModuleException, TypeStorageException {
FuncInfo fi = mi.getFuncs().get(funcName);
if (fi == null) {
fi = new FuncInfo();
fi.setFuncName(funcName);
mi.getFuncs().put(funcName, fi);
}
fi.setSupported(true);
return saveFunc(mi, fi, specParsingDocument, notBackwardCompatible, dependencies, newModuleVersion);
}
private String saveFunc(ModuleInfo mi, FuncInfo fi, KbFuncdef specParsingDocument,
boolean notBackwardCompatible, Set<RefInfo> dependencies, long newModuleVersion)
throws NoSuchModuleException, TypeStorageException {
SemanticVersion version = findLastFuncVersion(mi, fi.getFuncName(), true);
if (version == null) {
version = defaultVersion;
} else {
int major = version.getMajor();
int minor = version.getMinor();
if (major > 0 && notBackwardCompatible) {
major++;
minor = 0;
} else {
minor++;
}
version = new SemanticVersion(major, minor);
}
fi.setFuncVersion(version.toString());
return saveFunc(mi, fi, specParsingDocument, dependencies, newModuleVersion);
}
private String saveFunc(ModuleInfo mi, FuncInfo fi, KbFuncdef specParsingDocument,
Set<RefInfo> dependencies, long newModuleVersion)
throws NoSuchModuleException, TypeStorageException {
if (dependencies != null)
for (RefInfo dep : dependencies) {
dep.setDepVersion(fi.getFuncVersion());
dep.setDepModuleVersion(newModuleVersion);
updateInternalRefVersion(dep, mi);
}
writeFuncParsingFile(mi.getModuleName(), fi.getFuncName(), fi.getFuncVersion(),
specParsingDocument, newModuleVersion);
return fi.getFuncVersion();
}
private void writeFuncParsingFile(String moduleName, String funcName, String version,
KbFuncdef document, long newModuleVersion)
throws TypeStorageException {
try {
StringWriter sw = new StringWriter();
mapper.writeValue(sw, document.getData());
sw.close();
storage.writeFuncParseRecord(moduleName, funcName, version.toString(),
newModuleVersion, sw.toString());
} catch (TypeStorageException ex) {
throw ex;
} catch (IOException ex) {
throw new IllegalStateException("Unexpected internal error: " + ex.getMessage(), ex);
}
}
public KbFuncdef getFuncParsingDocument(String moduleName, String funcName)
throws NoSuchFuncException, NoSuchModuleException, TypeStorageException {
return getFuncParsingDocument(moduleName, funcName, null);
}
public KbFuncdef getFuncParsingDocument(String moduleName, String funcName,
String version) throws NoSuchFuncException, NoSuchModuleException, TypeStorageException {
requestReadLock(moduleName);
try {
return getFuncParsingDocumentNL(moduleName, funcName, version);
} finally {
releaseReadLock(moduleName);
}
}
private KbFuncdef getFuncParsingDocumentNL(String moduleName, String funcName,
String version) throws NoSuchFuncException, NoSuchModuleException, TypeStorageException {
checkModuleRegistered(moduleName);
SemanticVersion curVersion = version == null ? findLastFuncVersion(moduleName, funcName) :
new SemanticVersion(version);
if (curVersion == null)
throwNoSuchFuncException(moduleName, funcName, null);
String ret = storage.getFuncParseRecord(moduleName, funcName, curVersion.toString());
if (ret == null)
throwNoSuchFuncException(moduleName, funcName, version);
try {
Map<?,?> data = mapper.readValue(ret, Map.class);
return new KbFuncdef().loadFromMap(data, null);
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
private void stopTypeSupport(ModuleInfo mi, String typeName, long newModuleVersion)
throws NoSuchTypeException, NoSuchModuleException, TypeStorageException {
TypeInfo ti = mi.getTypes().get(typeName);
if (ti == null)
throwNoSuchTypeException(mi.getModuleName(), typeName, null);
ti.setSupported(false);
}
public void stopTypeSupport(TypeDefName type, String userId)
throws NoSuchTypeException, NoSuchModuleException, TypeStorageException,
NoSuchPrivilegeException, SpecParseException {
String moduleName = type.getModule();
String typeName = type.getName();
saveModule(getModuleSpecDocument(moduleName), Collections.<String>emptySet(),
new HashSet<String>(Arrays.asList(typeName)), userId, false,
Collections.<String,Long>emptyMap(), null, "stopTypeSupport");
}
private void stopFuncSupport(ModuleInfo info, String funcName, long newModuleVersion)
throws NoSuchFuncException, NoSuchModuleException, TypeStorageException {
FuncInfo fi = info.getFuncs().get(funcName);
if (fi == null)
throwNoSuchFuncException(info.getModuleName(), funcName, null);
fi.setSupported(false);
}
public void removeModule(String moduleName, String userId)
throws NoSuchModuleException, TypeStorageException, NoSuchPrivilegeException {
requestWriteLock(moduleName);
try {
checkAdmin(userId);
checkModuleRegistered(moduleName);
storage.removeModule(moduleName);
} finally {
releaseWriteLock(moduleName);
}
}
/**
* @return all names of registered modules
*/
public List<String> getAllRegisteredModules() throws TypeStorageException {
return storage.getAllRegisteredModules();
}
private String getTypeVersion(TypeDefId typeDef)
throws NoSuchTypeException, NoSuchModuleException, TypeStorageException {
checkModuleRegistered(typeDef.getType().getModule());
SemanticVersion ret = findTypeVersion(typeDef);
if (ret == null)
throwNoSuchTypeException(typeDef);
return ret.toString();
}
public Set<RefInfo> getTypeRefsByDep(TypeDefId depTypeDef)
throws TypeStorageException, NoSuchTypeException, NoSuchModuleException {
String depModule = depTypeDef.getType().getModule();
requestReadLock(depModule);
try {
String depType = depTypeDef.getType().getName();
String version = getTypeVersion(depTypeDef);
return storage.getTypeRefsByDep(depModule, depType, version);
} finally {
releaseReadLock(depModule);
}
}
public Set<RefInfo> getTypeRefsByRef(TypeDefId refTypeDef)
throws TypeStorageException, NoSuchTypeException, NoSuchModuleException {
String refModule = refTypeDef.getType().getModule();
requestReadLock(refModule);
try {
String refType = refTypeDef.getType().getName();
String version = getTypeVersion(refTypeDef);
return storage.getTypeRefsByRef(refModule, refType, version);
} finally {
releaseReadLock(refModule);
}
}
public Set<RefInfo> getFuncRefsByDep(String depModule, String depFunc)
throws TypeStorageException, NoSuchModuleException {
requestReadLock(depModule);
try {
return storage.getFuncRefsByDep(depModule, depFunc, null);
} finally {
releaseReadLock(depModule);
}
}
public Set<RefInfo> getFuncRefsByDep(String depModule, String depFunc,
String version) throws TypeStorageException, NoSuchModuleException, NoSuchFuncException {
requestReadLock(depModule);
try {
checkModuleRegistered(depModule);
if (version == null) {
SemanticVersion sVer = findLastFuncVersion(depModule, depFunc);
if (sVer == null)
throwNoSuchFuncException(depModule, depFunc, version);
version = sVer.toString();
}
return storage.getFuncRefsByDep(depModule, depFunc, version);
} finally {
releaseReadLock(depModule);
}
}
public Set<RefInfo> getFuncRefsByRef(TypeDefId refTypeDef)
throws TypeStorageException, NoSuchTypeException, NoSuchModuleException {
String refModule = refTypeDef.getType().getModule();
requestReadLock(refModule);
try {
String refType = refTypeDef.getType().getName();
String version = getTypeVersion(refTypeDef);
return storage.getFuncRefsByRef(refModule, refType, version);
} finally {
releaseReadLock(refModule);
}
}
private File createTempDir() {
synchronized (tempDirLock) {
long suffix = System.currentTimeMillis();
File ret;
while (true) {
ret = new File(parentTempDir, "temp_" + suffix);
if (!ret.exists())
break;
suffix++;
}
ret.mkdirs();
return ret;
}
}
public void requestModuleRegistration(String moduleName, String ownerUserId)
throws TypeStorageException {
requestReadLockNM(moduleName);
try {
storage.addNewModuleRegistrationRequest(moduleName, ownerUserId);
} finally {
releaseReadLock(moduleName);
}
}
public List<OwnerInfo> getNewModuleRegistrationRequests(String adminUserId)
throws NoSuchPrivilegeException, TypeStorageException {
checkAdmin(adminUserId);
return storage.getNewModuleRegistrationRequests();
}
private void checkAdmin(String adminUserId)
throws NoSuchPrivilegeException {
if (!uip.isAdmin(adminUserId))
throw new NoSuchPrivilegeException("User " + adminUserId + " should be administrator");
}
public void approveModuleRegistrationRequest(String adminUserId, String newModuleName)
throws TypeStorageException, NoSuchPrivilegeException {
checkAdmin(adminUserId);
requestWriteLock(newModuleName);
try {
String newOwnerUserId = storage.getOwnerForNewModuleRegistrationRequest(newModuleName);
autoGenerateModuleInfo(newModuleName, newOwnerUserId);
storage.removeNewModuleRegistrationRequest(newModuleName, newOwnerUserId);
// TODO: send notification to e-mail of requesting user
} finally {
releaseWriteLock(newModuleName);
}
}
public void refuseModuleRegistrationRequest(String adminUserId, String newModuleName)
throws TypeStorageException, NoSuchPrivilegeException {
checkAdmin(adminUserId);
requestWriteLock(newModuleName);
try {
String newOwnerUserId = storage.getOwnerForNewModuleRegistrationRequest(newModuleName);
storage.removeNewModuleRegistrationRequest(newModuleName, newOwnerUserId);
// TODO: send notification to e-mail of requesting user
} finally {
releaseWriteLock(newModuleName);
}
}
private void autoGenerateModuleInfo(String moduleName, String ownerUserId) throws TypeStorageException {
if (storage.checkModuleExist(moduleName))
throw new IllegalStateException("Module " + moduleName + " was already registered");
ModuleInfo info = new ModuleInfo();
info.setModuleName(moduleName);
storage.initModuleInfoRecord(info);
storage.addOwnerToModule(moduleName, ownerUserId, true);
storage.setModuleReleaseVersion(moduleName, info.getVersionTime());
}
public Map<TypeDefName, TypeChange> registerModule(String specDocument,
String userId) throws SpecParseException,
TypeStorageException, NoSuchPrivilegeException, NoSuchModuleException {
return registerModule(specDocument, Collections.<String>emptyList(), userId);
}
public Map<TypeDefName, TypeChange> registerModule(String specDocument,
List<String> typesToSave, String userId) throws SpecParseException,
TypeStorageException, NoSuchPrivilegeException, NoSuchModuleException {
return registerModule(specDocument, typesToSave, Collections.<String>emptyList(), userId);
}
public Map<TypeDefName, TypeChange> registerModule(String specDocument,
List<String> typesToSave, List<String> typesToUnregister, String userId)
throws SpecParseException, TypeStorageException, NoSuchPrivilegeException,
NoSuchModuleException {
return registerModule(specDocument, typesToSave, typesToUnregister, userId, false);
}
public Map<TypeDefName, TypeChange> registerModule(String specDocument,
List<String> typesToSave, List<String> typesToUnregister, String userId,
boolean dryMode) throws SpecParseException, TypeStorageException, NoSuchPrivilegeException,
NoSuchModuleException {
return registerModule(specDocument, typesToSave, typesToUnregister, userId, dryMode,
Collections.<String, Long>emptyMap());
}
public Map<TypeDefName, TypeChange> registerModule(String specDocument,
List<String> typesToSave, List<String> typesToUnregister, String userId,
boolean dryMode, Map<String, Long> moduleVersionRestrictions)
throws SpecParseException, TypeStorageException, NoSuchPrivilegeException,
NoSuchModuleException {
return registerModule(specDocument, typesToSave, typesToUnregister, userId, dryMode,
moduleVersionRestrictions, null);
}
public Map<TypeDefName, TypeChange> registerModule(String specDocument,
List<String> typesToSave, List<String> typesToUnregister, String userId,
boolean dryMode, Map<String, Long> moduleVersionRestrictions, Long prevModuleVersion)
throws SpecParseException, TypeStorageException, NoSuchPrivilegeException,
NoSuchModuleException {
return saveModule(specDocument, new HashSet<String>(typesToSave),
new HashSet<String>(typesToUnregister), userId, dryMode, moduleVersionRestrictions,
prevModuleVersion, "registerModule");
}
public Map<TypeDefName, TypeChange> refreshModule(String moduleName,
String userId) throws SpecParseException,
TypeStorageException, NoSuchModuleException, NoSuchPrivilegeException {
return refreshModule(moduleName, Collections.<String>emptyList(), userId);
}
public Map<TypeDefName, TypeChange> refreshModule(String moduleName,
List<String> typesToSave, String userId) throws SpecParseException,
TypeStorageException, NoSuchModuleException, NoSuchPrivilegeException {
return refreshModule(moduleName, typesToSave, Collections.<String>emptyList(), userId);
}
public Map<TypeDefName, TypeChange> refreshModule(String moduleName,
List<String> typesToSave, List<String> typesToUnregister, String userId)
throws SpecParseException, TypeStorageException, NoSuchModuleException,
NoSuchPrivilegeException {
return refreshModule(moduleName, typesToSave, typesToUnregister, userId, false);
}
public Map<TypeDefName, TypeChange> refreshModule(String moduleName,
List<String> typesToSave, List<String> typesToUnregister, String userId,
boolean dryMode) throws SpecParseException, TypeStorageException, NoSuchModuleException,
NoSuchPrivilegeException {
return refreshModule(moduleName, typesToSave, typesToUnregister, userId, dryMode,
Collections.<String, Long>emptyMap());
}
public Map<TypeDefName, TypeChange> refreshModule(String moduleName,
List<String> typesToSave, List<String> typesToUnregister, String userId,
boolean dryMode, Map<String, Long> moduleVersionRestrictions)
throws SpecParseException, TypeStorageException, NoSuchModuleException,
NoSuchPrivilegeException {
String specDocument = getModuleSpecDocument(moduleName);
return saveModule(specDocument, new HashSet<String>(typesToSave),
new HashSet<String>(typesToUnregister), userId, dryMode, moduleVersionRestrictions,
null, "refreshModule");
}
private String correctSpecIncludes(String specDocument, List<String> includedModules)
throws SpecParseException {
try {
StringWriter withGoodImports = new StringWriter();
PrintWriter pw = null; // new PrintWriter(withGoodImports);
BufferedReader br = new BufferedReader(new StringReader(specDocument));
while (true) {
String l = br.readLine();
if (l == null)
break;
if (pw == null) {
if (l.trim().isEmpty())
continue;
if (l.startsWith("#include")) {
l = l.substring(8).trim();
if (!(l.startsWith("<") && l.endsWith(">")))
throw new IllegalStateException("Wrong include structure (it should be ...<file_path>): " + l);
l = l.substring(1, l.length() - 1).trim();
if (l.indexOf('/') >= 0)
l = l.substring(l.lastIndexOf('/') + 1);
if (l.indexOf('.') >= 0)
l = l.substring(0, l.indexOf('.')).trim();
includedModules.add(l);
} else {
pw = new PrintWriter(withGoodImports);
for (String iModuleName : includedModules)
pw.println("#include <" + iModuleName + ".types>");
pw.println();
pw.println(l);
}
} else {
pw.println(l);
}
}
br.close();
pw.close();
return withGoodImports.toString();
} catch (Exception ex) {
throw new SpecParseException("Unexpected error during parsing of spec-file include declarations: " + ex.getMessage(), ex);
}
}
private KbModule compileSpecFile(String specDocument, List<String> includedModules,
Map<String, Map<String, String>> moduleToTypeToSchema,
Map<String, ModuleInfo> moduleToInfo, Map<String, Long> moduleVersionRestrictions)
throws SpecParseException {
File tempDir = createTempDir();
try {
File specFile = new File(tempDir, "currentlyCompiled.spec");
writeFile(specDocument, specFile);
Map<String, IncludeDependentPath> moduleToPath = new HashMap<String, IncludeDependentPath>();
for (String iModule : includedModules) {
Long iVersion = moduleVersionRestrictions.get(iModule);
if (iVersion == null)
iVersion = getLastModuleVersion(iModule);
saveIncludedModuleRecusive(tempDir, new IncludeDependentPath(), iModule, iVersion,
moduleToPath, moduleVersionRestrictions);
}
for (IncludeDependentPath path : moduleToPath.values())
moduleToInfo.put(path.info.getModuleName(), path.info);
List<KbService> services = KidlParser.parseSpec(specFile, tempDir, moduleToTypeToSchema, kbTopPath);
if (services.size() != 1)
throw new SpecParseException("Spec-file should consist of only one service");
if (services.get(0).getModules().size() != 1)
throw new SpecParseException("Spec-file should consist of only one module");
return services.get(0).getModules().get(0);
} catch (SpecParseException ex) {
throw ex;
} catch (Exception ex) {
throw new SpecParseException("Unexpected error during spec-file parsing: " + ex.getMessage(), ex);
} finally {
deleteTempDir(tempDir);
}
}
private Map<TypeDefName, TypeChange> saveModule(String specDocument,
Set<String> addedTypes, Set<String> unregisteredTypes, String userId,
boolean dryMode, Map<String, Long> moduleVersionRestrictions, Long prevModuleVersion,
String uploadMethod) throws SpecParseException, TypeStorageException, NoSuchPrivilegeException,
NoSuchModuleException {
List<String> includedModules = new ArrayList<String>();
specDocument = correctSpecIncludes(specDocument, includedModules);
String moduleName = null;
long transactionStartTime = -1;
Map<String, Map<String, String>> moduleToTypeToSchema = new HashMap<String, Map<String, String>>();
Map<String, ModuleInfo> moduleToInfo = new HashMap<String, ModuleInfo>();
KbModule module = compileSpecFile(specDocument, includedModules, moduleToTypeToSchema, moduleToInfo,
moduleVersionRestrictions);
moduleName = module.getModuleName();
checkModuleRegistered(moduleName);
checkUserIsOwnerOrAdmin(moduleName, userId);
long realPrevVersion = storage.getLastModuleVersionWithUnreleased(moduleName);
if (prevModuleVersion != null) {
if (realPrevVersion != prevModuleVersion)
throw new SpecParseException("Concurrent modification: previous module version is " +
realPrevVersion + " (but should be " + prevModuleVersion + ")");
}
requestWriteLock(moduleName);
try {
try {
ModuleInfo info = getModuleInfoNL(moduleName, realPrevVersion);
boolean isNew = !storage.checkModuleSpecRecordExist(moduleName, info.getVersionTime());
String prevMd5 = info.getMd5hash();
info.setMd5hash(DigestUtils.md5Hex(mapper.writeValueAsString(module.getData())));
info.setDescription(module.getComment());
Map<String, Long> includedModuleNameToVersion = new LinkedHashMap<String, Long>();
for (String iModule : includedModules)
includedModuleNameToVersion.put(iModule, moduleToInfo.get(iModule).getVersionTime());
Map<String, Long> prevIncludes = info.getIncludedModuleNameToVersion();
info.setIncludedModuleNameToVersion(includedModuleNameToVersion);
info.setUploadUserId(userId);
info.setUploadMethod(uploadMethod);
Map<String, String> typeToSchema = moduleToTypeToSchema.get(moduleName);
if (typeToSchema == null)
throw new SpecParseException("Json schema generation was missed for module: " + moduleName);
Set<String> oldRegisteredTypes = new HashSet<String>();
Set<String> oldRegisteredFuncs = new HashSet<String>();
if (!isNew) {
for (TypeInfo typeInfo : info.getTypes().values())
if (typeInfo.isSupported())
oldRegisteredTypes.add(typeInfo.getTypeName());
for (FuncInfo funcInfo : info.getFuncs().values())
if (funcInfo.isSupported())
oldRegisteredFuncs.add(funcInfo.getFuncName());
}
for (String type : unregisteredTypes) {
if (!oldRegisteredTypes.contains(type))
throw new SpecParseException("Type is in unregistering type list but was not already " +
"registered: " + type);
}
for (String type : addedTypes) {
if (oldRegisteredTypes.contains(type))
throw new SpecParseException("Type was already registered before: " + type);
if (unregisteredTypes.contains(type))
throw new SpecParseException("Type couldn't be in both adding and unregistering lists: " + type);
}
Set<String> newRegisteredTypes = new HashSet<String>();
newRegisteredTypes.addAll(oldRegisteredTypes);
newRegisteredTypes.removeAll(unregisteredTypes);
newRegisteredTypes.addAll(addedTypes);
Set<String> allNewTypes = new HashSet<String>();
Set<String> allNewFuncs = new HashSet<String>();
List<ComponentChange> comps = new ArrayList<ComponentChange>();
Map<TypeDefName, TypeChange> ret = new LinkedHashMap<TypeDefName, TypeChange>();
for (KbModuleComp comp : module.getModuleComponents()) {
if (comp instanceof KbTypedef) {
KbTypedef type = (KbTypedef)comp;
allNewTypes.add(type.getName());
if (newRegisteredTypes.contains(type.getName())) {
if (typeToSchema.get(type.getName()) == null)
throw new SpecParseException("Json schema wasn't generated for type: " + type.getName());
Change change = findTypeChange(info, type);
if (change == Change.noChange)
continue;
String jsonSchemaDocument = typeToSchema.get(type.getName());
Set<RefInfo> dependencies = extractTypeRefs(type, moduleToInfo, newRegisteredTypes);
jsonSchemaFromString(jsonSchemaDocument);
boolean notBackwardCompatible = (change == Change.notCompatible);
comps.add(new ComponentChange(true, false, type.getName(), jsonSchemaDocument, type, null,
notBackwardCompatible, dependencies));
TypeDefName typeDefName = new TypeDefName(info.getModuleName(), type.getName());
SemanticVersion newVer = getIncrementedVersion(info, type.getName(), notBackwardCompatible);
ret.put(typeDefName, new TypeChange(false, new AbsoluteTypeDefId(typeDefName, newVer.getMajor(),
newVer.getMinor()), jsonSchemaDocument));
}
} else if (comp instanceof KbFuncdef) {
KbFuncdef func = (KbFuncdef)comp;
allNewFuncs.add(func.getName());
Change change = findFuncChange(info, func);
if (change == Change.noChange)
continue;
Set<RefInfo> dependencies = new TreeSet<RefInfo>();
for (KbParameter param : func.getParameters())
dependencies.addAll(extractTypeRefs(moduleName, func.getName(), param, moduleToInfo, newRegisteredTypes));
for (KbParameter param : func.getReturnType())
dependencies.addAll(extractTypeRefs(moduleName, func.getName(), param, moduleToInfo, newRegisteredTypes));
boolean notBackwardCompatible = (change == Change.notCompatible);
comps.add(new ComponentChange(false, false, func.getName(), null, null, func, notBackwardCompatible,
dependencies));
}
}
for (String type : addedTypes) {
if (!allNewTypes.contains(type))
throw new SpecParseException("Type is in adding type list but is not defined in spec-file: " + type);
}
for (String type : newRegisteredTypes) {
if (!allNewTypes.contains(type))
unregisteredTypes.add(type);
}
for (String typeName : unregisteredTypes) {
comps.add(new ComponentChange(true, true, typeName, null, null, null, false, null));
TypeDefName typeDefName = new TypeDefName(info.getModuleName(), typeName);
ret.put(typeDefName, new TypeChange(true, null, null));
}
for (String funcName : oldRegisteredFuncs) {
if (!allNewFuncs.contains(funcName)) {
comps.add(new ComponentChange(false, true, funcName, null, null, null, false, null));
}
}
if (prevMd5 != null && prevMd5.equals(info.getMd5hash()) && prevIncludes.isEmpty() &&
info.getIncludedModuleNameToVersion().isEmpty() && comps.isEmpty()) {
String prevSpec = storage.getModuleSpecRecord(moduleName, info.getVersionTime());
if (prevSpec.equals(specDocument))
throw new SpecParseException("There is no difference between previous and current versions of " +
"module " + moduleName);
}
if (!dryMode) {
Set<RefInfo> createdTypeRefs = new TreeSet<RefInfo>();
Set<RefInfo> createdFuncRefs = new TreeSet<RefInfo>();
transactionStartTime = storage.generateNewModuleVersion(moduleName);
for (ComponentChange comp : comps) {
if (comp.isType) {
if (comp.isDeletion) {
stopTypeSupport(info, comp.name, transactionStartTime);
} else {
saveType(info, comp.name, comp.jsonSchemaDocument, comp.typeParsing, comp.notBackwardCompatible,
comp.dependencies, transactionStartTime);
createdTypeRefs.addAll(comp.dependencies);
}
} else {
if (comp.isDeletion) {
stopFuncSupport(info, comp.name, transactionStartTime);
} else {
saveFunc(info, comp.name, comp.funcParsing, comp.notBackwardCompatible, comp.dependencies,
transactionStartTime);
createdFuncRefs.addAll(comp.dependencies);
}
}
}
writeModuleInfoSpec(info, specDocument, transactionStartTime);
storage.addRefs(createdTypeRefs, createdFuncRefs);
transactionStartTime = -1;
}
return ret;
} catch (NoSuchModuleException ex) {
throw ex;
} catch (TypeStorageException ex) {
throw ex;
} catch (SpecParseException ex) {
throw ex;
} catch (Exception ex) {
throw new SpecParseException("Unexpected error during spec-file parsing: " + ex.getMessage(), ex);
} finally {
try {
if (transactionStartTime > 0) {
rollbackModuleTransaction(moduleName, transactionStartTime);
}
} catch (Exception ignore) {}
}
} finally {
releaseWriteLock(moduleName);
}
}
private Change findTypeChange(ModuleInfo info, KbTypedef newType)
throws SpecParseException, NoSuchTypeException, NoSuchModuleException, TypeStorageException {
if (!info.getTypes().containsKey(newType.getName()))
return Change.notCompatible;
TypeInfo ti = info.getTypes().get(newType.getName());
KbTypedef oldType = getTypeParsingDocumentNL(new TypeDefId(info.getModuleName() + "." + ti.getTypeName(),
ti.getTypeVersion()));
return findChange(oldType, newType);
}
private Change findChange(KbType oldType, KbType newType) throws SpecParseException {
if (!oldType.getClass().equals(newType.getClass()))
return Change.notCompatible;
if (newType instanceof KbTypedef) {
KbTypedef oldIType = (KbTypedef)oldType;
KbTypedef newIType = (KbTypedef)newType;
if (!newIType.getName().equals(oldIType.getName()))
return Change.notCompatible;
return findChange(oldIType.getAliasType(), newIType.getAliasType());
} else if (newType instanceof KbList) {
KbList oldIType = (KbList)oldType;
KbList newIType = (KbList)newType;
return findChange(oldIType.getElementType(), newIType.getElementType());
} else if (newType instanceof KbMapping) {
KbMapping oldIType = (KbMapping)oldType;
KbMapping newIType = (KbMapping)newType;
return findChange(oldIType.getValueType(), newIType.getValueType());
} else if (newType instanceof KbTuple) {
KbTuple oldIType = (KbTuple)oldType;
KbTuple newIType = (KbTuple)newType;
if (oldIType.getElementTypes().size() != newIType.getElementTypes().size())
return Change.notCompatible;
Change ret = Change.noChange;
for (int pos = 0; pos < oldIType.getElementTypes().size(); pos++) {
ret = Change.joinChanges(ret, findChange(oldIType.getElementTypes().get(pos),
newIType.getElementTypes().get(pos)));
if (ret == Change.notCompatible)
return ret;
}
return ret;
} else if (newType instanceof KbUnspecifiedObject) {
return Change.noChange;
} else if (newType instanceof KbScalar) {
KbScalar oldIType = (KbScalar)oldType;
KbScalar newIType = (KbScalar)newType;
if (oldIType.getScalarType() != newIType.getScalarType())
return Change.notCompatible;
String oldIdRefText = "" + oldIType.getIdReferences();
String newIdRefText = "" + newIType.getIdReferences();
return oldIdRefText.equals(newIdRefText) ? Change.noChange : Change.notCompatible;
} else if (newType instanceof KbStruct) {
KbStruct oldIType = (KbStruct)oldType;
KbStruct newIType = (KbStruct)newType;
Map<String, KbStructItem> newFields = new HashMap<String, KbStructItem>();
for (KbStructItem item : newIType.getItems())
newFields.put(item.getName(), item);
Change ret = Change.noChange;
for (KbStructItem oldItem : oldIType.getItems()) {
if (!newFields.containsKey(oldItem.getName()))
return Change.notCompatible;
ret = Change.joinChanges(ret, findChange(oldItem.getItemType(),
newFields.get(oldItem.getName()).getItemType()));
if (ret == Change.notCompatible)
return ret;
if (oldItem.isOptional() != newFields.get(oldItem.getName()).isOptional())
return Change.notCompatible;
newFields.remove(oldItem.getName());
}
for (KbStructItem newItem : newFields.values()) {
if (!newItem.isOptional())
return Change.notCompatible;
ret = Change.joinChanges(ret, Change.backwardCompatible);
}
return ret;
}
throw new SpecParseException("Unknown type class: " + newType.getClass().getSimpleName());
}
private Change findFuncChange(ModuleInfo info, KbFuncdef newFunc)
throws NoSuchFuncException, NoSuchModuleException, TypeStorageException, SpecParseException {
if (!info.getFuncs().containsKey(newFunc.getName())) {
return Change.notCompatible;
}
FuncInfo fi = info.getFuncs().get(newFunc.getName());
KbFuncdef oldFunc = getFuncParsingDocumentNL(info.getModuleName(), fi.getFuncName(), fi.getFuncVersion());
if (oldFunc.getParameters().size() != newFunc.getParameters().size() ||
oldFunc.getReturnType().size() != newFunc.getReturnType().size()) {
return Change.notCompatible;
}
Change ret = Change.noChange;
for (int pos = 0; pos < oldFunc.getParameters().size(); pos++) {
KbParameter oldParam = oldFunc.getParameters().get(pos);
KbParameter newParam = newFunc.getParameters().get(pos);
ret = Change.joinChanges(ret, findChange(oldParam.getType(), newParam.getType()));
if (ret == Change.notCompatible)
return ret;
}
for (int pos = 0; pos < oldFunc.getReturnType().size(); pos++) {
KbParameter oldRet = oldFunc.getReturnType().get(pos);
KbParameter newRet = newFunc.getReturnType().get(pos);
ret = Change.joinChanges(ret, findChange(oldRet.getType(), newRet.getType()));
if (ret == Change.notCompatible)
return ret;
}
return ret;
}
private Set<RefInfo> extractTypeRefs(KbTypedef main, Map<String, ModuleInfo> moduleToInfo,
Set<String> mainRegisteredTypes) throws SpecParseException {
Set<RefInfo> ret = new TreeSet<RefInfo>();
collectTypeRefs(ret, main.getModule(), main.getName(), main.getAliasType(), moduleToInfo, mainRegisteredTypes);
return ret;
}
private Set<RefInfo> extractTypeRefs(String module, String funcName, KbParameter main,
Map<String, ModuleInfo> moduleToInfo, Set<String> mainRegisteredTypes)
throws SpecParseException {
Set<RefInfo> ret = new TreeSet<RefInfo>();
collectTypeRefs(ret, module, funcName, main.getType(), moduleToInfo, mainRegisteredTypes);
return ret;
}
private void collectTypeRefs(Set<RefInfo> ret, String mainModule, String mainName, KbType internal,
Map<String, ModuleInfo> moduleToInfo, Set<String> mainRegisteredTypes)
throws SpecParseException {
if (internal instanceof KbTypedef) {
KbTypedef type = (KbTypedef)internal;
boolean isOuterModule = !type.getModule().equals(mainModule);
boolean terminal = isOuterModule || mainRegisteredTypes.contains(type.getName());
if (terminal) {
RefInfo ref = new RefInfo();
ref.setDepModule(mainModule);
ref.setDepName(mainName);
ref.setRefModule(type.getModule());
ref.setRefName(type.getName());
if (isOuterModule) {
ModuleInfo oModule = moduleToInfo.get(type.getModule());
TypeInfo oType = null;
if (oModule != null)
oType = oModule.getTypes().get(type.getName());
if (oType == null)
throw new SpecParseException("Reference to external not registered " +
"module/type is missing: " + type.getModule() + "." + type.getName());
ref.setRefVersion(oType.getTypeVersion());
}
ret.add(ref);
} else {
collectTypeRefs(ret, mainModule, mainName, type.getAliasType(), moduleToInfo, mainRegisteredTypes);
}
} else if (internal instanceof KbList) {
KbList type = (KbList)internal;
collectTypeRefs(ret, mainModule, mainName, type.getElementType(), moduleToInfo, mainRegisteredTypes);
} else if (internal instanceof KbMapping) {
KbMapping type = (KbMapping)internal;
collectTypeRefs(ret, mainModule, mainName, type.getValueType(), moduleToInfo, mainRegisteredTypes);
} else if (internal instanceof KbStruct) {
KbStruct type = (KbStruct)internal;
for (KbStructItem item : type.getItems())
collectTypeRefs(ret, mainModule, mainName, item.getItemType(), moduleToInfo, mainRegisteredTypes);
} else if (internal instanceof KbTuple) {
KbTuple type = (KbTuple)internal;
for (KbType iType : type.getElementTypes())
collectTypeRefs(ret, mainModule, mainName, iType, moduleToInfo, mainRegisteredTypes);
}
}
private void saveIncludedModuleRecusive(File workDir, IncludeDependentPath parent,
String moduleName, long version, Map<String, IncludeDependentPath> savedModules,
Map<String, Long> moduleVersionRestrictions)
throws NoSuchModuleException, IOException, TypeStorageException, SpecParseException {
ModuleInfo info = getModuleInfoNL(moduleName, version);
IncludeDependentPath currentPath = new IncludeDependentPath(info, parent);
Long restriction = moduleVersionRestrictions.get(moduleName);
if (restriction != null && version != restriction)
throw new SpecParseException("Version of dependent module " + currentPath + " " +
"is not compatible with module version restriction: " + restriction);
if (savedModules.containsKey(moduleName)) {
IncludeDependentPath alreadyPath = savedModules.get(moduleName);
if (alreadyPath.info.getVersionTime() != currentPath.info.getVersionTime())
throw new SpecParseException("Incompatible module dependecies: " + alreadyPath +
" and " + currentPath);
return;
}
String spec = getModuleSpecDocument(moduleName);
writeFile(spec, new File(workDir, moduleName + ".types"));
savedModules.put(moduleName, currentPath);
for (Map.Entry<String, Long> entry : info.getIncludedModuleNameToVersion().entrySet()) {
String includedModule = entry.getKey();
long includedVersion = entry.getValue();
saveIncludedModuleRecusive(workDir, currentPath, includedModule, includedVersion,
savedModules, moduleVersionRestrictions);
}
}
private static void writeFile(String text, File f) throws IOException {
FileWriter fw = new FileWriter(f);
fw.write(text);
fw.close();
}
private void deleteTempDir(File dir) {
for (File f : dir.listFiles()) {
if (f.isFile()) {
f.delete();
} else {
deleteTempDir(f);
}
}
dir.delete();
}
public void addOwnerToModule(String knownOwnerUserId, String moduleName, String newOwnerUserId,
boolean withChangeOwnersPrivilege) throws TypeStorageException, NoSuchPrivilegeException {
checkUserCanChangePrivileges(knownOwnerUserId, moduleName);
storage.addOwnerToModule(moduleName, newOwnerUserId, withChangeOwnersPrivilege);
}
public void removeOwnerFromModule(String knownOwnerUserId, String moduleName, String removedOwnerUserId)
throws NoSuchPrivilegeException, TypeStorageException {
checkUserCanChangePrivileges(knownOwnerUserId, moduleName);
storage.removeOwnerFromModule(moduleName, removedOwnerUserId);
}
private void checkUserCanChangePrivileges(String knownOwnerUserId,
String moduleName) throws NoSuchPrivilegeException, TypeStorageException {
boolean canChangeOwnersPrivilege = checkUserIsOwnerOrAdmin(moduleName, knownOwnerUserId);
if (!canChangeOwnersPrivilege)
throw new NoSuchPrivilegeException("User " + knownOwnerUserId + " can not change " +
"priviledges for module " + moduleName);
}
public String getModuleDescription(String moduleName)
throws TypeStorageException, NoSuchModuleException {
return getModuleInfo(moduleName).getDescription();
}
public String getModuleDescription(String moduleName, long version)
throws TypeStorageException, NoSuchModuleException {
return getModuleInfo(moduleName, version).getDescription();
}
public String getTypeDescription(TypeDefId typeDef)
throws NoSuchTypeException, NoSuchModuleException, TypeStorageException {
return getTypeParsingDocument(typeDef).getComment();
}
public String getFuncDescription(String moduleName, String funcName, String version)
throws NoSuchFuncException, NoSuchModuleException, TypeStorageException {
return getFuncParsingDocument(moduleName, funcName, version).getComment();
}
public String getModuleMD5(String moduleName)
throws NoSuchModuleException, TypeStorageException {
return getModuleInfo(moduleName).getMd5hash();
}
public String getModuleMD5(String moduleName, long version)
throws TypeStorageException, NoSuchModuleException {
return getModuleInfo(moduleName, version).getMd5hash();
}
public Set<ModuleDefId> findModuleVersionsByMD5(String moduleName, String md5)
throws NoSuchModuleException, TypeStorageException {
requestReadLock(moduleName);
try {
Set<ModuleDefId> ret = new LinkedHashSet<ModuleDefId>();
for (long version : getAllModuleVersions(moduleName)) {
ModuleInfo info = getModuleInfoNL(moduleName, version);
if (md5.equals(info.getMd5hash()))
ret.add(new ModuleDefId(moduleName, version));
}
return ret;
} finally {
releaseReadLock(moduleName);
}
}
public List<ModuleDefId> findModuleVersionsByTypeVersion(TypeDefId typeDef)
throws NoSuchModuleException, TypeStorageException, NoSuchTypeException {
String moduleName = typeDef.getType().getModule();
requestReadLock(moduleName);
try {
typeDef = resolveTypeDefIdNL(typeDef);
List<ModuleDefId> ret = new ArrayList<ModuleDefId>();
Set<Long> moduleVersions = storage.getModuleVersionsForTypeVersion(moduleName,
typeDef.getType().getName(), typeDef.getVerString());
for (long moduleVersion : moduleVersions)
ret.add(new ModuleDefId(moduleName, moduleVersion));
return ret;
} finally {
releaseReadLock(moduleName);
}
}
public List<String> getModulesByOwner(String userId) throws TypeStorageException {
return new ArrayList<String>(storage.getModulesForOwner(userId).keySet());
}
private static class ComponentChange {
boolean isType;
boolean isDeletion;
String name;
String jsonSchemaDocument;
KbTypedef typeParsing;
KbFuncdef funcParsing;
boolean notBackwardCompatible;
Set<RefInfo> dependencies;
public ComponentChange(boolean isType, boolean isDeletion, String name,
String jsonSchemaDocument, KbTypedef typeParsing, KbFuncdef funcParsing,
boolean notBackwardCompatible, Set<RefInfo> dependencies) {
this.isType = isType;
this.isDeletion = isDeletion;
this.name = name;
this.jsonSchemaDocument = jsonSchemaDocument;
this.typeParsing = typeParsing;
this.funcParsing = funcParsing;
this.notBackwardCompatible = notBackwardCompatible;
this.dependencies = dependencies;
}
}
private static class IncludeDependentPath {
ModuleInfo info;
IncludeDependentPath parent;
public IncludeDependentPath() {
info = new ModuleInfo();
info.setModuleName("RootModule");
}
public IncludeDependentPath(ModuleInfo info, IncludeDependentPath parent) {
this.info = info;
this.parent = parent;
}
@Override
public String toString() {
StringBuilder ret = new StringBuilder();
for (IncludeDependentPath cur = this; cur != null; cur = cur.parent) {
if (ret.length() > 0)
ret.append("<-");
ret.append(cur.info.getModuleName());
if (cur.info.getVersionTime() > 0)
ret.append('(').append(cur.info.getVersionTime()).append(')');
}
return ret.toString();
}
}
private static class ModuleState {
int readerCount = 0;
int writerCount = 0;
}
}
|
package view.editor.entityeditor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.List;
import java.util.ResourceBundle;
import view.editor.Editor;
import view.enums.DefaultStrings;
import view.utilities.ButtonFactory;
import view.utilities.TextFieldFactory;
import model.component.visual.AnimatedSprite;
import model.entity.Entity;
import javafx.beans.property.SimpleObjectProperty;
import javafx.scene.Node;
import javafx.scene.control.Button;
import javafx.scene.control.ScrollPane;
import javafx.scene.control.TextField;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.scene.layout.VBox;
import api.IComponent;
import api.IEntity;
import guiObjects.GuiObject;
import guiObjects.GuiObjectFactory;
/**
*
* @author Melissa Zhang
* @author Cali Nelson
* @author Ben Zhang
*
*/
public class EditorEntity extends Editor{
private IEntity myEntity;
private String myLanguage;
private ObservableList<IEntity> entityList = FXCollections.observableArrayList();
private Button saveButton, addButton, removeButton;
private ResourceBundle myResources, myLocs, myComponentNames;
private TextField name;
private ScrollPane scrollPane;
private List<String> myComponents;
private VBox container;
private final GuiObjectFactory guiFactory = new GuiObjectFactory();
public EditorEntity(String language, IEntity toEdit){
scrollPane = new ScrollPane();
myLanguage = language;
myResources = ResourceBundle.getBundle(language);
myComponentNames = ResourceBundle.getBundle(language + DefaultStrings.COMPONENTS.getDefault());
myEntity = (Entity) toEdit;
}
public EditorEntity(String language, IEntity toEdit, ObservableList<IEntity> addToList) {
this(language, toEdit);
entityList = addToList;
}
private void getComponents() {
myLocs = ResourceBundle.getBundle(DefaultStrings.COMPONENT_LOC.getDefault());
Enumeration<String> iter = myLocs.getKeys();
while(iter.hasMoreElements()) {
myComponents.add(myComponentNames.getString(iter.nextElement()));
}
}
@Override
public ScrollPane getPane() {
return scrollPane;
}
@Override
public void populateLayout() {
container = new VBox();
scrollPane.setContent(container);
container.getStyleClass().add("vbox");
myComponents = new ArrayList<String>();
addName();
addComponentsToPane();
addButtons();
}
private void addButtons() {
saveButton = ButtonFactory.makeButton(myResources.getString("saveEntity"), e -> save());
addButton = ButtonFactory.makeButton(myResources.getString("addComponent"), e -> addComponent());
removeButton = ButtonFactory.makeButton(myResources.getString("removeComponent"), e->removeComponent());
container.getChildren().addAll(addButton, removeButton, saveButton);
}
private void addName() {
name = TextFieldFactory.makeTextArea(myResources.getString("enterName"));
name.setText(myEntity.getName());
container.getChildren().add(name);
}
private void addComponentsToPane() {
Collection<IComponent> componentList = myEntity.getAllComponents();
for (IComponent component: componentList){
addObject(component);
}
}
private void addObject(IComponent component) {
if(component.getClass().equals(AnimatedSprite.class)){
container.getChildren().add( (Node) guiFactory.createNewGuiObject("AnimatedSprite", DefaultStrings.GUI_RESOURCES.getDefault(), myLanguage, myEntity).getGuiNode());
}
component.getProperties().stream().forEach(e -> addVisualObject(e));
}
private void addVisualObject(SimpleObjectProperty<?> property) {
GuiObject object = guiFactory.createNewGuiObject(property.getName(), DefaultStrings.GUI_RESOURCES.getDefault(),myLanguage, property, property.getValue());
if (object != null && (property.getName().equals("ImagePath") && !myEntity.hasComponent(AnimatedSprite.class))){
container.getChildren().add((Node) object.getGuiNode());
}
}
private void removeComponent() {
(new ComponentRemover(myLanguage, myEntity)).modifyComponentList();
updateEditor();
}
private void addComponent() {
(new ComponentAdder(myLanguage, myEntity)).modifyComponentList();
updateEditor();
}
@Override
public void updateEditor() {
getComponents();
populateLayout();
}
private void save() {
myEntity.updateComponents();
myEntity.setName(name.getText());
myEntity.getAllComponents().stream().forEach(e -> removeBindings(e));
entityList.remove(myEntity);
entityList.add(myEntity);
container = new VBox();
container.getChildren().add(super.saveMessage(myResources.getString("saveMessage")));
scrollPane.setContent(container);
}
private void removeBindings(IComponent e) {
e.removeBindings();
}
}
|
package com.orm;
import android.database.Cursor;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
import static com.orm.util.ThreadUtil.*;
/**
* SugarDataSource provides basic crud operations and simplifies SugarRecord by using callbacks and
* performing Asynchronous execution to run queries.
*
* @author jonatan.salas
*/
@SuppressWarnings("all")
public final class SugarDataSource<T> {
private final Class<T> sClass;
/**
* SugarDataSource constructor with params
*
* @param tClass class argument used then to run SugarRecord class queries
*/
private SugarDataSource(Class<T> tClass) {
if (null == tClass) {
throw new IllegalArgumentException("sClass shouldn't be null!");
}
this.sClass = tClass;
}
/**
* SugarDataSource static method to construct an Instance of this class.
*
* @param sClass class argument used then to run SugarRecord class queries
* @param <T> generic argument that must be a SugarRecord extended class or @Table annotated class
* @return an instance of SugarDataSource
*/
public static <T> SugarDataSource<T> getInstance(Class<T> sClass) {
return new SugarDataSource<>(sClass);
}
/**
* Method used to perform an Asynchronous insert. It works on top of SugarRecord class, executes the
* insert query using Futures.
*
* @param object the object you want to insert. It must be a SugarRecord extended class or @Table annotated class
* @param successCallback the callback for a successful insert operation
* @param errorCallback the callback for an error in insert operation
*/
public void insert(final T object, final SuccessCallback<Long> successCallback, final ErrorCallback errorCallback) {
checkNotNull(successCallback);
checkNotNull(errorCallback);
checkNotNull(object);
final Callable<Long> call = new Callable<Long>() {
@Override
public Long call() throws Exception {
return SugarRecord.save(object);
}
};
final Future<Long> future = doInBackground(call);
Long id;
try {
id = future.get();
if (null == id) {
errorCallback.onError(new Exception("Error when performing insert of " + object.toString()));
} else {
successCallback.onSuccess(id);
}
} catch (Exception e) {
errorCallback.onError(e);
}
}
/**
* Method that performs a bulk insert. It works on top of SugarRecord class, and executes the query
* asynchronously using Futures.
*
* @param objects the list of objects that you want to insert. They must be SugarRecord extended objects or @Table annotatd objects.
* @param successCallback the callback for successful bulk insert operation
* @param errorCallback the callback for an error in bulk insert operation
*/
public void bulkInsert(final List<T> objects, final SuccessCallback<List<Long>> successCallback, final ErrorCallback errorCallback) {
checkNotNull(successCallback);
checkNotNull(errorCallback);
checkNotNull(objects);
final Callable<List<Long>> call = new Callable<List<Long>>() {
@Override
public List<Long> call() throws Exception {
List<Long> ids = new ArrayList<>(objects.size());
for (int i = 0; i < objects.size(); i++) {
Long id = SugarRecord.save(objects.get(i));
ids.add(i, id);
}
return ids;
}
};
final Future<List<Long>> future = doInBackground(call);
List<Long> ids;
try {
ids = future.get();
if (null == ids || ids.isEmpty()) {
errorCallback.onError(new Exception("Error when performing bulk insert"));
} else {
successCallback.onSuccess(ids);
}
} catch (Exception e) {
errorCallback.onError(e);
}
}
/**
* Method that performs a findById, It works on top of SugarRecord class providing asynchronous
* execution with the use of Futures.
*
* @param id the id of the object you want to retrieve
* @param successCallback the callback to execute when the operation is successful
* @param errorCallback the callback to execute when the operation has a trouble
*/
public void findById(final Long id, final SuccessCallback<T> successCallback, final ErrorCallback errorCallback) {
checkNotNull(successCallback);
checkNotNull(errorCallback);
checkNotNull(id);
final Callable<T> call = new Callable<T>() {
@Override
public T call() throws Exception {
return SugarRecord.findById(getSugarClass(), id);
}
};
final Future<T> future = doInBackground(call);
T object;
try {
object = future.get();
if (null == object) {
errorCallback.onError(new Exception("The object with " + id.toString() + "doesn't exist in database"));
} else {
successCallback.onSuccess(object);
}
} catch (Exception e) {
errorCallback.onError(e);
}
}
/**
* Method that provides you the ability of perform a custom query and retrieve a cursor. It works on top of SugarRecord class,
* All the code is executed asynchronously with the usage of Futures and callbacks.
*
* @param whereClause the clause of the search
* @param whereArgs the arguments for the search
* @param groupBy the form that you want to group them
* @param orderBy the form that you want to order
* @param limit the limit of objects to want
* @param successCallback the callback to be executed if the operation is successful
* @param errorCallback the callback to be executed if the operation has an error
*/
public void query(final String whereClause, final String[] whereArgs, final String groupBy, final String orderBy, final String limit, final SuccessCallback<Cursor> successCallback, final ErrorCallback errorCallback) {
checkNotNull(successCallback);
checkNotNull(errorCallback);
final Callable<Cursor> call = new Callable<Cursor>() {
@Override
public Cursor call() throws Exception {
return SugarRecord.getCursor(getSugarClass(), whereClause, whereArgs, groupBy, orderBy, limit);
}
};
final Future<Cursor> future = doInBackground(call);
Cursor cursor;
try {
cursor = future.get();
if (null == cursor) {
errorCallback.onError(new Exception("Problem when trying to get the cursor"));
} else {
successCallback.onSuccess(cursor);
}
} catch (Exception e) {
errorCallback.onError(e);
}
}
/**
* Method that list all elements. It run a SugarRecord.listAll but it's code is performed asynchronously
* with the usage of Futures and callbacks.
*
* @param orderBy the way you want to order the objects you get
* @param successCallback the callback that is performed if the operation is successful
* @param errorCallback the callback that is performed if your code has an error
*/
public void listAll(final String orderBy, final SuccessCallback<List<T>> successCallback, final ErrorCallback errorCallback) {
checkNotNull(successCallback);
checkNotNull(errorCallback);
final Callable<List<T>> call = new Callable<List<T>>() {
@Override
public List<T> call() throws Exception {
return SugarRecord.listAll(getSugarClass(), orderBy);
}
};
final Future<List<T>> future = doInBackground(call);
List<T> objects;
try {
objects = future.get();
if (null == objects || objects.isEmpty()) {
errorCallback.onError(new Exception("There are no objects in the database"));
} else {
successCallback.onSuccess(objects);
}
} catch (Exception e) {
errorCallback.onError(e);
}
}
/**
* Method that works on top of SugarRecord.update and runs the code asynchronously via Futures
* and callbacks.
*
* @param object the object you want to update
* @param successCallback the callback that will be performed if the update is successful
* @param errorCallback the callback that will be performed if the update has an error
*/
public void update(final T object, final SuccessCallback<Long> successCallback, final ErrorCallback errorCallback) {
checkNotNull(successCallback);
checkNotNull(errorCallback);
checkNotNull(object);
final Callable<Long> call = new Callable<Long>() {
@Override
public Long call() throws Exception {
return SugarRecord.update(object);
}
};
final Future<Long> future = doInBackground(call);
Long id;
try {
id = future.get();
if (null == id) {
errorCallback.onError(new Exception("Error when performing update of " + object.toString()));
} else {
successCallback.onSuccess(id);
}
} catch (Exception e) {
errorCallback.onError(e);
}
}
/**
* This method works on top of SugarRecord and provides asynchronous code execution via the usage of
* Futures and callbacks to handle success result and error.
*
* @param object the object you want to delete
* @param successCallback the callback to be performed when the operation is successful
* @param errorCallback the callback to be performed when the operation has an error
*/
public void delete(final T object, final SuccessCallback<Boolean> successCallback, final ErrorCallback errorCallback) {
checkNotNull(successCallback);
checkNotNull(errorCallback);
checkNotNull(object);
final Callable<Boolean> call = new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return SugarRecord.delete(object);
}
};
final Future<Boolean> future = doInBackground(call);
Boolean isDeleted;
try {
isDeleted = future.get();
if (null == isDeleted || !isDeleted) {
errorCallback.onError(new Exception("Error when performing delete of " + object.toString()));
} else {
successCallback.onSuccess(isDeleted);
}
} catch (Exception e) {
errorCallback.onError(e);
}
}
/**
* Method that performs a selective delete. The code is executed asynchronously via the usage of Futures
* and result callbacks
*
* @param whereClause the clause for the search
* @param whereArgs the values
* @param successCallback the callback to be executed if there is no trouble
* @param errorCallback the callback to be executed if there is an error
*/
public void delete(final String whereClause, final String[] whereArgs, final SuccessCallback<Integer> successCallback, final ErrorCallback errorCallback) {
checkNotNull(successCallback);
checkNotNull(errorCallback);
final Callable<Integer> call = new Callable<Integer>() {
@Override
public Integer call() throws Exception {
return SugarRecord.deleteAll(getSugarClass(), whereClause, whereArgs);
}
};
final Future<Integer> future = doInBackground(call);
Integer count;
try {
count = future.get();
if (null == count) {
errorCallback.onError(new Exception("Error when performing delete of all elements"));
} else {
successCallback.onSuccess(count);
}
} catch (Exception e) {
errorCallback.onError(e);
}
}
/**
* Method that deletes all data in a SQLite table.
*
* @param successCallback the callback that is executed if the operation is succesful
* @param errorCallback the callback that is executed if there is an error
*/
public void deleteAll(final SuccessCallback<Integer> successCallback, final ErrorCallback errorCallback) {
delete(null, null, successCallback, errorCallback);
}
/**
* Method that performs a count
*
* @param successCallback the callback that is executed if this is successful
* @param errorCallback the callback that is executed if there is an error
*/
public void count(final SuccessCallback<Long> successCallback, final ErrorCallback errorCallback) {
checkNotNull(successCallback);
checkNotNull(errorCallback);
final Callable<Long> call = new Callable<Long>() {
@Override
public Long call() throws Exception {
return SugarRecord.count(getSugarClass());
}
};
final Future<Long> future = doInBackground(call);
Long count;
try {
count = future.get();
if (null == count) {
errorCallback.onError(new Exception("Error when trying to get count"));
} else {
successCallback.onSuccess(count);
}
} catch (Exception e) {
errorCallback.onError(e);
}
}
/**
* Method that checks an object to be not null
*
* @param object the object to be checked
*/
protected void checkNotNull(Object object) {
if (null == object) {
throw new IllegalArgumentException("object shouldn't be null");
}
}
public Class<T> getSugarClass() {
return sClass;
}
/**
* The callback to be executed when some SugarDataSource operation is successful.
*
* @author jonatan.salas
* @param <S> the parameter of the result that is passed to onSuccess method
*/
public interface SuccessCallback<S> {
/**
* This code is executed if there is no trouble on any SugarDataSource operation.
*
* @param result the result of some SugarDatasource operation
*/
void onSuccess(final S result);
}
/**
* The callback to be executed when some SugarDataSource operation has an error.
*
* @author jonatan.salas
*/
public interface ErrorCallback {
/**
* This method is executed if some trouble is detected when using some SugarDataSource method.
*
* @param e the exception thrown by the method of SugarDataSource you have invoked
*/
void onError(final Exception e);
}
}
|
package michael.leetcode;
import java.util.Comparator;
import java.util.PriorityQueue;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentSkipListSet;
public class KthLargest {
// define a min heap.
private PriorityQueue<Integer> heap = null;
/**
* Using a priority queue to resolve the problem, time is O(nlogK).
*
* @param nums
* @param k
* @return
*/
public int findKthLargest(int[] nums, int k) {
if (nums.length < k) {
return -1;
}
heap = new PriorityQueue<Integer>(k, new Comparator<Integer>() {
@Override
public int compare(Integer o1, Integer o2) {
// TODO Auto-generated method stub
return o1 - o2;
}
});
for (int num : nums) {
// store in a min heap with max size K.
if (heap.size() < k) {
heap.add(num);
} else {
// replace the head of the min heap, if the present element is
// larger than the head.
if (heap.peek() < num) {
heap.remove();
heap.add(num);
}
}
}
return heap.peek();
}
private int[] results = new int[100000];
/**
* Depending on the context, if the number are in a available range, we use
* array to resolve the problem, and time is O(n).
*
* @param nums
* @param k
* @return
*/
public int findKthLargest2(int[] nums, int k) {
for (int num : nums) {
results[results.length - num - 1] += 1;
}
int result = 0;
for (int i = 0; i < results.length; i++) {
result += results[i];
if (result >= k) {
return results.length - i - 1;
}
}
return -1;
}
// find the max element among the three<start, middle, end>.
// and put the middle element to start.
private void findPivot(int[] nums, int start, int end) {
int max = nums[start];
int middle = nums[start + (end - start) >>> 1];
int min = nums[end];
if (max < min) {
swap(nums, start, end);
}
if (middle > nums[start]) {
// start is the middler number.
} else if (middle < nums[end]) {
swap(nums, start, end);
} else {
swap(nums, start, start + (end - start) >>> 1);
}
}
private void swap(int[] nums, int a1, int a2) {
int temp = nums[a1];
nums[a1] = nums[a2];
nums[a2] = temp;
}
public int findKthLargest3(int[] nums, int k) {
return findKthLargest3(nums, 0, nums.length - 1, k);
}
/**
* Using the Quick selection to solve the problem, average time is O(n).
*
* @param nums
* @param start
* @param end
* @param k
* @return
*/
private int findKthLargest3(int[] nums, int start, int end, int k) {
if (start >= end) {
return nums[end];
}
findPivot(nums, start, end);
int left = start + 1;
int right = start + 1;
for (; right <= end; right++) {
if (nums[right] <= nums[start]) {
continue;
} else {
swap(nums, left, right);
// left = left < end ? left + 1 : left;
left++;
}
}
swap(nums, start, left - 1);
if (left < k) {
return findKthLargest3(nums, left, end, k);
} else if (left > k) {
return findKthLargest3(nums, start, left - 1, k);
} else {
return nums[left - 1];
}
}
// define a tree.
private TreeSet<Integer> tree = null;
/**
* Using red-black tree(based on tree set) to resolve the problem.
*
* @param nums
* @param k
* @return
*/
public int findKthLargest4(int[] nums, int k) {
tree = new TreeSet<Integer>(new Comparator<Integer>(){
@Override
public int compare(Integer o1, Integer o2) {
if(o1 > o2){
return 1;
}else{
return -1;
}
}
});
for (int num : nums) {
// store in a R-B tree with max size K.
if (tree.size() < k) {
tree.add(num);
} else {
// replace the head of the R-B tree, if the present element is
// larger than the head.
if (tree.first() < num) {
tree.pollFirst();
tree.add(num);
}
}
}
return tree.first();
}
//define a skip list.
private ConcurrentSkipListSet<Integer> skipList = null;
/**
* Using skip list to resolve the problem.
*
* @param nums
* @param k
* @return
*/
public int findKthLargest5(int[] nums, int k) {
skipList = new ConcurrentSkipListSet<Integer>(new Comparator<Integer>(){
@Override
public int compare(Integer o1, Integer o2) {
if(o1 > o2){
return 1;
}else{
return -1;
}
}
});
for (int num : nums) {
// store in a skip list with max size K.
if (skipList.size() < k) {
skipList.add(num);
} else {
// replace the head of the skip list, if the present element is
// larger than the head.
if (skipList.first() < num) {
skipList.pollFirst();
skipList.add(num);
}
}
}
return skipList.first();
}
public static void main(String[] args) {
int[] nums = new int[] { 3, 2, 1, 5, 6, 4, 7, 10, 8, 9, 22 };
System.out.println(new KthLargest().findKthLargest(nums, 2));
System.out.println(new KthLargest().findKthLargest2(nums, 2));
System.out.println(new KthLargest().findKthLargest3(nums, 2));
System.out.println(new KthLargest().findKthLargest4(nums, 2));
System.out.println(new KthLargest().findKthLargest5(nums, 2));
int[] nums2 = new int[] { 1, 2 };
System.out.println(new KthLargest().findKthLargest3(nums2, 1));
}
}
|
package cgeo.geocaching.connector.gc;
import cgeo.geocaching.CgeoApplication;
import cgeo.geocaching.R;
import cgeo.geocaching.connector.AbstractLogin;
import cgeo.geocaching.enumerations.StatusCode;
import cgeo.geocaching.network.HtmlImage;
import cgeo.geocaching.network.Network;
import cgeo.geocaching.network.Parameters;
import cgeo.geocaching.settings.Credentials;
import cgeo.geocaching.settings.Settings;
import cgeo.geocaching.utils.Log;
import cgeo.geocaching.utils.MatcherWrapper;
import cgeo.geocaching.utils.TextUtils;
import android.graphics.drawable.Drawable;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.Locale;
import io.reactivex.Observable;
import io.reactivex.Single;
import io.reactivex.functions.Consumer;
import io.reactivex.functions.Function;
import okhttp3.Response;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
public class GCLogin extends AbstractLogin {
private static final String LOGIN_URI = "https:
private static final String REQUEST_VERIFICATION_TOKEN = "__RequestVerificationToken";
private class StatusException extends RuntimeException {
private static final long serialVersionUID = -597420116705938433L;
final StatusCode statusCode;
StatusException(final StatusCode statusCode) {
super("Status code: " + statusCode);
this.statusCode = statusCode;
}
}
private GCLogin() {
// singleton
}
public static GCLogin getInstance() {
return SingletonHolder.INSTANCE;
}
private static class SingletonHolder {
private static final GCLogin INSTANCE = new GCLogin();
}
private static StatusCode resetGcCustomDate(final StatusCode statusCode) {
Settings.setGcCustomDate(GCConstants.DEFAULT_GC_DATE);
return statusCode;
}
@Override
@NonNull
protected StatusCode login(final boolean retry) {
return login(retry, Settings.getCredentials(GCConnector.getInstance()));
}
@Override
@NonNull
protected StatusCode login(final boolean retry, @NonNull final Credentials credentials) {
final StatusCode status = loginInternal(retry, credentials);
if (status != StatusCode.NO_ERROR) {
resetLoginStatus();
}
return status;
}
private StatusCode loginInternal(final boolean retry, @NonNull final Credentials credentials) {
if (credentials.isInvalid()) {
clearLoginInfo();
Log.w("Login.login: No login information stored");
return resetGcCustomDate(StatusCode.NO_LOGIN_INFO_STORED);
}
final String username = credentials.getUserName();
setActualStatus(CgeoApplication.getInstance().getString(R.string.init_login_popup_working));
try {
final String tryLoggedInData = getLoginPage();
if (StringUtils.isBlank(tryLoggedInData)) {
Log.w("Login.login: Failed to retrieve login page (1st)");
return StatusCode.CONNECTION_FAILED; // no login page
}
if (getLoginStatus(tryLoggedInData)) {
Log.i("Already logged in Geocaching.com as " + username + " (" + Settings.getGCMemberStatus() + ')');
if (switchToEnglish(tryLoggedInData) && retry) {
return login(false, credentials);
}
setHomeLocation();
refreshMemberStatus();
detectGcCustomDate();
return StatusCode.NO_ERROR; // logged in
}
final String requestVerificationToken = extractRequestVerificationToken(tryLoggedInData);
if (StringUtils.isEmpty(requestVerificationToken)) {
Log.w("GCLogin.login: failed to find request verification token");
return StatusCode.LOGIN_PARSE_ERROR;
}
final String loginData = postCredentials(credentials, requestVerificationToken);
if (StringUtils.isBlank(loginData)) {
Log.w("Login.login: Failed to retrieve login page (2nd)");
// FIXME: should it be CONNECTION_FAILED to match the first attempt?
return StatusCode.COMMUNICATION_ERROR; // no login page
}
assert loginData != null; // Caught above
if (getLoginStatus(loginData)) {
if (switchToEnglish(loginData) && retry) {
return login(false, credentials);
}
Log.i("Successfully logged in Geocaching.com as " + username + " (" + Settings.getGCMemberStatus() + ')');
setHomeLocation();
refreshMemberStatus();
detectGcCustomDate();
return StatusCode.NO_ERROR; // logged in
}
if (loginData.contains("your username or password is incorrect")) {
Log.i("Failed to log in Geocaching.com as " + username + " because of wrong username/password");
return resetGcCustomDate(StatusCode.WRONG_LOGIN_DATA); // wrong login
}
if (loginData.contains("You must validate your account before you can log in.")) {
Log.i("Failed to log in Geocaching.com as " + username + " because account needs to be validated first");
return resetGcCustomDate(StatusCode.UNVALIDATED_ACCOUNT);
}
Log.i("Failed to log in Geocaching.com as " + username + " for some unknown reason");
if (retry) {
switchToEnglish(loginData);
return login(false, credentials);
}
return resetGcCustomDate(StatusCode.UNKNOWN_ERROR); // can't login
} catch (final StatusException status) {
return status.statusCode;
} catch (final Exception ignored) {
Log.w("Login.login: communication error");
return StatusCode.CONNECTION_FAILED;
}
}
public StatusCode logout() {
try {
getResponseBodyOrStatus(Network.postRequest("https:
} catch (final StatusException status) {
return status.statusCode;
} catch (final Exception ignored) {
}
resetLoginStatus();
return StatusCode.NO_ERROR;
}
private String getResponseBodyOrStatus(final Response response) {
final String body;
try {
body = response.body().string();
} catch (final IOException ignore) {
throw new StatusException(StatusCode.COMMUNICATION_ERROR);
}
if (response.code() == 503 && TextUtils.matches(body, GCConstants.PATTERN_MAINTENANCE)) {
throw new StatusException(StatusCode.MAINTENANCE);
} else if (!response.isSuccessful()) {
throw new StatusException(StatusCode.COMMUNICATION_ERROR);
}
return body;
}
private String getLoginPage() {
return getResponseBodyOrStatus(Network.getRequest(LOGIN_URI).blockingGet());
}
@Nullable
private String extractRequestVerificationToken(final String page) {
final Document document = Jsoup.parse(page);
final String value = document.select(".login > form > input[name=\"" + REQUEST_VERIFICATION_TOKEN + "\"]").attr("value");
return StringUtils.isNotEmpty(value) ? value : null;
}
private String postCredentials(final Credentials credentials, final String requestVerificationToken) {
final Parameters params = new Parameters("Username", credentials.getUserName(),
"Password", credentials.getPassword(), REQUEST_VERIFICATION_TOKEN, requestVerificationToken);
return getResponseBodyOrStatus(Network.postRequest(LOGIN_URI, params).blockingGet());
}
private static String removeDotAndComma(final String str) {
return StringUtils.replaceChars(str, ".,", null);
}
/**
* Check if the user has been logged in when he retrieved the data.
*
* @return {@code true} if user is logged in, {@code false} otherwise
*/
boolean getLoginStatus(@Nullable final String page) {
if (StringUtils.isBlank(page)) {
Log.w("Login.checkLogin: No page given");
return false;
}
assert page != null;
if (TextUtils.matches(page, GCConstants.PATTERN_MAP_LOGGED_IN)) {
return true;
}
setActualStatus(CgeoApplication.getInstance().getString(R.string.init_login_popup_ok));
// on every page except login page
final String username = GCParser.getUsername(page);
setActualLoginStatus(StringUtils.isNotBlank(username));
if (isActualLoginStatus()) {
setActualUserName(username);
int cachesCount = 0;
try {
cachesCount = Integer.parseInt(removeDotAndComma(TextUtils.getMatch(page, GCConstants.PATTERN_CACHES_FOUND, true, "0")));
} catch (final NumberFormatException e) {
Log.e("getLoginStatus: bad cache count", e);
}
setActualCachesFound(cachesCount);
return true;
}
// login page
setActualLoginStatus(TextUtils.matches(page, GCConstants.PATTERN_LOGIN_NAME_LOGIN_PAGE));
if (isActualLoginStatus()) {
setActualUserName(Settings.getUserName());
// number of caches found is not part of this page
return true;
}
setActualStatus(CgeoApplication.getInstance().getString(R.string.init_login_popup_failed));
return false;
}
private boolean isLanguageEnglish(@NonNull final String page) {
final Element languageElement = Jsoup.parse(page).select("div.language-dropdown > select > option[selected=\"selected\"]").first();
return languageElement != null && StringUtils.equals(languageElement.text(), "English");
}
/**
* Ensure that the web site is in English.
*
* @param previousPage
* the content of the last loaded page
* @return {@code true} if a switch was necessary and successfully performed (non-English -> English)
*/
private boolean switchToEnglish(final String previousPage) {
if (previousPage != null && isLanguageEnglish(previousPage)) {
Log.i("Geocaching.com language already set to English");
// get find count
getLoginStatus(previousPage);
} else {
try {
final String page = Network.getResponseData(Network.getRequest("https:
Log.i("changed language on geocaching.com to English");
getLoginStatus(page);
return true;
} catch (final Exception ignored) {
Log.e("Failed to set geocaching.com language to English");
}
}
return false;
}
/**
* Retrieve avatar url from GC
*
* @return the avatar url
*/
public String getAvatarUrl() {
try {
final String responseData = StringUtils.defaultString(Network.getResponseData(Network.getRequest("https:
final String profile = TextUtils.replaceWhitespace(responseData);
setActualCachesFound(Integer.parseInt(removeDotAndComma(TextUtils.getMatch(profile, GCConstants.PATTERN_CACHES_FOUND, true, "-1"))));
final String avatarURL = TextUtils.getMatch(profile, GCConstants.PATTERN_AVATAR_IMAGE_PROFILE_PAGE, false, null);
if (avatarURL != null) {
return avatarURL.replace("avatar", "user/large");
}
// No match? There may be no avatar set by user.
Log.d("No avatar set for user");
} catch (final Exception e) {
Log.w("Error when retrieving user avatar url", e);
}
return StringUtils.EMPTY;
}
/**
* Download the avatar
*
* @return the avatar drawable
*/
public Observable<Drawable> downloadAvatar() {
try {
final String avatarURL = getAvatarUrl();
if (!avatarURL.isEmpty()) {
final HtmlImage imgGetter = new HtmlImage(HtmlImage.SHARED, false, false, false);
return imgGetter.fetchDrawable(avatarURL).cast(Drawable.class);
}
} catch (final Exception e) {
Log.w("Error when retrieving user avatar", e);
}
return null;
}
/**
* Retrieve the home location
*
* @return a Single containing the home location, or IOException
*/
static Single<String> retrieveHomeLocation() {
return Network.getResponseDocument(Network.getRequest("https:
.map(new Function<Document, String>() {
@Override
public String apply(final Document document) {
final Document innerHtml = Jsoup.parse(document.getElementById("tplSearchCoords").html());
return innerHtml.select("input.search-coordinates").attr("value");
}
});
}
private static void setHomeLocation() {
retrieveHomeLocation().subscribe(new Consumer<String>() {
@Override
public void accept(final String homeLocationStr) throws Exception {
if (StringUtils.isNotBlank(homeLocationStr) && !StringUtils.equals(homeLocationStr, Settings.getHomeLocation())) {
assert homeLocationStr != null;
Log.i("Setting home location to " + homeLocationStr);
Settings.setHomeLocation(homeLocationStr);
}
}
}, new Consumer<Throwable>() {
@Override
public void accept(final Throwable throwable) throws Exception {
Log.w("Unable to retrieve the home location");
}
});
}
private static void refreshMemberStatus() {
Network.getResponseDocument(Network.getRequest("https:
.subscribe(new Consumer<Document>() {
@Override
public void accept(final Document document) throws Exception {
final Element membership = document.select("dl.membership-details > dd:eq(3)").first();
if (membership != null) {
final GCMemberState memberState = GCMemberState.fromString(membership.text());
Log.d("Setting member status to " + memberState);
Settings.setGCMemberStatus(memberState);
} else {
Log.w("Cannot determine member status");
}
}
},
new Consumer<Throwable>() {
@Override
public void accept(final Throwable throwable) throws Exception {
Log.w("Unable to retrieve member status", throwable);
}
});
}
/**
* Detect user date settings on geocaching.com
*/
private static void detectGcCustomDate() {
try {
final Document document = Network.getResponseDocument(Network.getRequest("https:
final String customDate = document.select("select#SelectedDateFormat option[selected]").attr("value");
if (StringUtils.isNotBlank(customDate)) {
Log.d("Setting GC custom date to " + customDate);
Settings.setGcCustomDate(customDate);
} else {
Settings.setGcCustomDate(GCConstants.DEFAULT_GC_DATE);
Log.w("cannot find custom date format in geocaching.com preferences page, using default");
}
} catch (final Exception e) {
Settings.setGcCustomDate(GCConstants.DEFAULT_GC_DATE);
Log.w("cannot set custom date from geocaching.com preferences page, using default", e);
}
}
public static Date parseGcCustomDate(final String input, final String format) throws ParseException {
return new SimpleDateFormat(format, Locale.ENGLISH).parse(input.trim());
}
static Date parseGcCustomDate(final String input) throws ParseException {
return parseGcCustomDate(input, Settings.getGcCustomDate());
}
static String formatGcCustomDate(final int year, final int month, final int day) {
return new SimpleDateFormat(Settings.getGcCustomDate(), Locale.ENGLISH).format(new GregorianCalendar(year, month - 1, day).getTime());
}
/**
* checks if an Array of Strings is empty or not. Empty means:
* - Array is null
* - or all elements are null or empty strings
*/
public static boolean isEmpty(final String[] a) {
if (a == null) {
return true;
}
for (final String s : a) {
if (StringUtils.isNotEmpty(s)) {
return false;
}
}
return true;
}
/**
* read all viewstates from page
*
* @return String[] with all view states
*/
public static String[] getViewstates(final String page) {
// Get the number of viewstates.
// If there is only one viewstate, __VIEWSTATEFIELDCOUNT is not present
if (page == null) { // no network access
return null;
}
int count = 1;
final MatcherWrapper matcherViewstateCount = new MatcherWrapper(GCConstants.PATTERN_VIEWSTATEFIELDCOUNT, page);
if (matcherViewstateCount.find()) {
try {
count = Integer.parseInt(matcherViewstateCount.group(1));
} catch (final NumberFormatException e) {
Log.e("getViewStates", e);
}
}
final String[] viewstates = new String[count];
// Get the viewstates
final MatcherWrapper matcherViewstates = new MatcherWrapper(GCConstants.PATTERN_VIEWSTATES, page);
while (matcherViewstates.find()) {
final String sno = matcherViewstates.group(1); // number of viewstate
int no;
if (StringUtils.isEmpty(sno)) {
no = 0;
} else {
try {
no = Integer.parseInt(sno);
} catch (final NumberFormatException e) {
Log.e("getViewStates", e);
no = 0;
}
}
viewstates[no] = matcherViewstates.group(2);
}
if (viewstates.length != 1 || viewstates[0] != null) {
return viewstates;
}
// no viewstates were present
return null;
}
/**
* put viewstates into request parameters
*/
static void putViewstates(final Parameters params, final String[] viewstates) {
if (ArrayUtils.isEmpty(viewstates)) {
return;
}
params.put("__VIEWSTATE", viewstates[0]);
if (viewstates.length > 1) {
for (int i = 1; i < viewstates.length; i++) {
params.put("__VIEWSTATE" + i, viewstates[i]);
}
params.put("__VIEWSTATEFIELDCOUNT", String.valueOf(viewstates.length));
}
}
/**
* transfers the viewstates variables from a page (response) to parameters
* (next request)
*/
static void transferViewstates(final String page, final Parameters params) {
putViewstates(params, getViewstates(page));
}
/**
* POST HTTP request. Do the request a second time if the user is not logged in
*
*/
String postRequestLogged(final String uri, final Parameters params) {
final String data = Network.getResponseData(Network.postRequest(uri, params));
if (getLoginStatus(data)) {
return data;
}
if (login() == StatusCode.NO_ERROR) {
return Network.getResponseData(Network.postRequest(uri, params));
}
Log.i("Working as guest.");
return data;
}
/**
* GET HTTP request. Do the request a second time if the user is not logged in
*
*/
@Nullable
String getRequestLogged(@NonNull final String uri, @Nullable final Parameters params) {
try {
final Response response = Network.getRequest(uri, params).blockingGet();
final String data = Network.getResponseData(response, canRemoveWhitespace(uri));
// A page not found will not be found if the user logs in either
if (response.code() == 404 || getLoginStatus(data)) {
return data;
}
if (login() == StatusCode.NO_ERROR) {
return Network.getResponseData(Network.getRequest(uri, params), canRemoveWhitespace(uri));
}
Log.w("Working as guest.");
return data;
} catch (final Exception ignored) {
// FIXME: propagate the exception instead
return null;
}
}
/**
* Unfortunately the cache details page contains user generated whitespace in the personal note, therefore we cannot
* remove the white space from cache details pages.
*
*/
private static boolean canRemoveWhitespace(final String uri) {
return !StringUtils.contains(uri, "cache_details");
}
/**
* Get user session & session token from the Live Map. Needed for following requests.
*
* @return first is user session, second is session token
*/
@NonNull
public MapTokens getMapTokens() {
final String data = getRequestLogged(GCConstants.URL_LIVE_MAP, null);
final String userSession = TextUtils.getMatch(data, GCConstants.PATTERN_USERSESSION, "");
final String sessionToken = TextUtils.getMatch(data, GCConstants.PATTERN_SESSIONTOKEN, "");
return new MapTokens(userSession, sessionToken);
}
}
|
package cgeo.geocaching.connector.gc;
import cgeo.geocaching.CgeoApplication;
import cgeo.geocaching.R;
import cgeo.geocaching.connector.AbstractLogin;
import cgeo.geocaching.enumerations.StatusCode;
import cgeo.geocaching.network.Cookies;
import cgeo.geocaching.network.HtmlImage;
import cgeo.geocaching.network.Network;
import cgeo.geocaching.network.Parameters;
import cgeo.geocaching.settings.Settings;
import cgeo.geocaching.utils.Log;
import cgeo.geocaching.utils.MatcherWrapper;
import cgeo.geocaching.utils.TextUtils;
import ch.boye.httpclientandroidlib.HttpResponse;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.Nullable;
import rx.Observable;
import android.graphics.drawable.Drawable;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.Locale;
public class GCLogin extends AbstractLogin {
private final static String ENGLISH = "<a href=\"#\">English ▼</a>";
public static final String LANGUAGE_CHANGE_URI = "http:
private GCLogin() {
// singleton
}
public static GCLogin getInstance() {
return SingletonHolder.INSTANCE;
}
private static class SingletonHolder {
private static final GCLogin INSTANCE = new GCLogin();
}
private static StatusCode resetGcCustomDate(final StatusCode statusCode) {
Settings.setGcCustomDate("MM/dd/yyyy");
return statusCode;
}
@Override
protected StatusCode login(boolean retry) {
final ImmutablePair<String, String> credentials = Settings.getGcCredentials();
final String username = credentials.left;
final String password = credentials.right;
if (StringUtils.isEmpty(username) || StringUtils.isEmpty(password)) {
clearLoginInfo();
Log.e("Login.login: No login information stored");
return resetGcCustomDate(StatusCode.NO_LOGIN_INFO_STORED);
}
setActualStatus(CgeoApplication.getInstance().getString(R.string.init_login_popup_working));
HttpResponse loginResponse = Network.getRequest("https:
String loginData = Network.getResponseData(loginResponse);
if (loginResponse != null && loginResponse.getStatusLine().getStatusCode() == 503 && TextUtils.matches(loginData, GCConstants.PATTERN_MAINTENANCE)) {
return StatusCode.MAINTENANCE;
}
if (StringUtils.isBlank(loginData)) {
Log.e("Login.login: Failed to retrieve login page (1st)");
return StatusCode.CONNECTION_FAILED; // no login page
}
if (getLoginStatus(loginData)) {
Log.i("Already logged in Geocaching.com as " + username + " (" + Settings.getGCMemberStatus() + ')');
if (switchToEnglish(loginData) && retry) {
return login(false);
}
detectGcCustomDate();
return StatusCode.NO_ERROR; // logged in
}
Cookies.clearCookies();
Settings.setCookieStore(null);
final Parameters params = new Parameters(
"__EVENTTARGET", "",
"__EVENTARGUMENT", "",
"ctl00$ContentBody$tbUsername", username,
"ctl00$ContentBody$tbPassword", password,
"ctl00$ContentBody$cbRememberMe", "on",
"ctl00$ContentBody$btnSignIn", "Login");
final String[] viewstates = GCLogin.getViewstates(loginData);
if (isEmpty(viewstates)) {
Log.e("Login.login: Failed to find viewstates");
return StatusCode.LOGIN_PARSE_ERROR; // no viewstates
}
GCLogin.putViewstates(params, viewstates);
loginResponse = Network.postRequest("https:
loginData = Network.getResponseData(loginResponse);
if (StringUtils.isBlank(loginData)) {
Log.e("Login.login: Failed to retrieve login page (2nd)");
// FIXME: should it be CONNECTION_FAILED to match the first attempt?
return StatusCode.COMMUNICATION_ERROR; // no login page
}
assert loginData != null; // Caught above
if (getLoginStatus(loginData)) {
if (switchToEnglish(loginData) && retry) {
return login(false);
}
Log.i("Successfully logged in Geocaching.com as " + username + " (" + Settings.getGCMemberStatus() + ')');
Settings.setCookieStore(Cookies.dumpCookieStore());
detectGcCustomDate();
return StatusCode.NO_ERROR; // logged in
}
if (loginData.contains("Your username/password combination does not match.")) {
Log.i("Failed to log in Geocaching.com as " + username + " because of wrong username/password");
return resetGcCustomDate(StatusCode.WRONG_LOGIN_DATA); // wrong login
}
if (loginData.contains("You must validate your account before you can log in.")) {
Log.i("Failed to log in Geocaching.com as " + username + " because account needs to be validated first");
return resetGcCustomDate(StatusCode.UNVALIDATED_ACCOUNT);
}
Log.i("Failed to log in Geocaching.com as " + username + " for some unknown reason");
if (retry) {
switchToEnglish(loginData);
return login(false);
}
return resetGcCustomDate(StatusCode.UNKNOWN_ERROR); // can't login
}
public StatusCode logout() {
final HttpResponse logoutResponse = Network.getRequest("https:
final String logoutData = Network.getResponseData(logoutResponse);
if (logoutResponse != null && logoutResponse.getStatusLine().getStatusCode() == 503 && TextUtils.matches(logoutData, GCConstants.PATTERN_MAINTENANCE)) {
return StatusCode.MAINTENANCE;
}
resetLoginStatus();
return StatusCode.NO_ERROR;
}
/**
* Check if the user has been logged in when he retrieved the data.
*
* @param page
* @return <code>true</code> if user is logged in, <code>false</code> otherwise
*/
public boolean getLoginStatus(@Nullable final String page) {
if (StringUtils.isBlank(page)) {
Log.e("Login.checkLogin: No page given");
return false;
}
assert page != null;
if (TextUtils.matches(page, GCConstants.PATTERN_MAP_LOGGED_IN)) {
return true;
}
setActualStatus(CgeoApplication.getInstance().getString(R.string.init_login_popup_ok));
// on every page except login page
setActualLoginStatus(TextUtils.matches(page, GCConstants.PATTERN_LOGIN_NAME));
if (isActualLoginStatus()) {
setActualUserName(TextUtils.getMatch(page, GCConstants.PATTERN_LOGIN_NAME, true, "???"));
int cachesCount = 0;
try {
cachesCount = Integer.parseInt(TextUtils.getMatch(page, GCConstants.PATTERN_CACHES_FOUND, true, "0").replaceAll("[,.]", ""));
} catch (final NumberFormatException e) {
Log.e("getLoginStatus: bad cache count", e);
}
setActualCachesFound(cachesCount);
Settings.setGCMemberStatus(TextUtils.getMatch(page, GCConstants.PATTERN_MEMBER_STATUS, true, null));
if (page.contains(GCConstants.MEMBER_STATUS_RENEW)) {
Settings.setGCMemberStatus(GCConstants.MEMBER_STATUS_PM);
}
return true;
}
// login page
setActualLoginStatus(TextUtils.matches(page, GCConstants.PATTERN_LOGIN_NAME_LOGIN_PAGE));
if (isActualLoginStatus()) {
setActualUserName(Settings.getUsername());
// number of caches found is not part of this page
return true;
}
setActualStatus(CgeoApplication.getInstance().getString(R.string.init_login_popup_failed));
return false;
}
/**
* Ensure that the web site is in English.
*
* @param previousPage the content of the last loaded page
* @return <code>true</code> if a switch was necessary and succesfully performed (non-English -> English)
*/
private boolean switchToEnglish(String previousPage) {
if (previousPage != null && previousPage.contains(ENGLISH)) {
Log.i("Geocaching.com language already set to English");
// get find count
getLoginStatus(Network.getResponseData(Network.getRequest("http:
} else {
final String page = Network.getResponseData(Network.getRequest(LANGUAGE_CHANGE_URI));
getLoginStatus(page);
if (page == null) {
Log.e("Failed to read viewstates to set geocaching.com language");
}
final Parameters params = new Parameters(
"__EVENTTARGET", "ctl00$uxLocaleList$uxLocaleList$ctl00$uxLocaleItem", // switch to english
"__EVENTARGUMENT", "");
GCLogin.transferViewstates(page, params);
final HttpResponse response = Network.postRequest(LANGUAGE_CHANGE_URI, params, new Parameters("Referer", LANGUAGE_CHANGE_URI));
if (Network.isSuccess(response)) {
Log.i("changed language on geocaching.com to English");
return true;
}
Log.e("Failed to set geocaching.com language to English");
}
return false;
}
public Observable<Drawable> downloadAvatarAndGetMemberStatus() {
try {
final String responseData = StringUtils.defaultString(Network.getResponseData(Network.getRequest("http:
final String profile = TextUtils.replaceWhitespace(responseData);
Settings.setGCMemberStatus(TextUtils.getMatch(profile, GCConstants.PATTERN_MEMBER_STATUS, true, null));
if (profile.contains(GCConstants.MEMBER_STATUS_RENEW)) {
Settings.setGCMemberStatus(GCConstants.MEMBER_STATUS_PM);
}
setActualCachesFound(Integer.parseInt(TextUtils.getMatch(profile, GCConstants.PATTERN_CACHES_FOUND, true, "-1").replaceAll("[,.]", "")));
final String avatarURL = TextUtils.getMatch(profile, GCConstants.PATTERN_AVATAR_IMAGE_PROFILE_PAGE, false, null);
if (avatarURL != null) {
final HtmlImage imgGetter = new HtmlImage("", false, 0, false);
return imgGetter.fetchDrawable(avatarURL.replace("avatar", "user/large")).cast(Drawable.class);
}
// No match? There may be no avatar set by user.
Log.d("No avatar set for user");
} catch (final Exception e) {
Log.w("Error when retrieving user avatar", e);
}
return null;
}
/**
* Detect user date settings on geocaching.com
*/
private static void detectGcCustomDate() {
final String result = Network.getResponseData(Network.getRequest("https:
if (null == result) {
Log.w("Login.detectGcCustomDate: result is null");
return;
}
final String customDate = TextUtils.getMatch(result, GCConstants.PATTERN_CUSTOMDATE, true, null);
if (null != customDate) {
Settings.setGcCustomDate(customDate);
}
}
public static Date parseGcCustomDate(final String input, final String format) throws ParseException {
return new SimpleDateFormat(format, Locale.ENGLISH).parse(input.trim());
}
public static Date parseGcCustomDate(final String input) throws ParseException {
return parseGcCustomDate(input, Settings.getGcCustomDate());
}
public static String formatGcCustomDate(int year, int month, int day) {
return new SimpleDateFormat(Settings.getGcCustomDate(), Locale.ENGLISH).format(new GregorianCalendar(year, month - 1, day).getTime());
}
/**
* checks if an Array of Strings is empty or not. Empty means:
* - Array is null
* - or all elements are null or empty strings
*/
public static boolean isEmpty(String[] a) {
if (a == null) {
return true;
}
for (final String s : a) {
if (StringUtils.isNotEmpty(s)) {
return false;
}
}
return true;
}
/**
* read all viewstates from page
*
* @return String[] with all view states
*/
public static String[] getViewstates(String page) {
// Get the number of viewstates.
// If there is only one viewstate, __VIEWSTATEFIELDCOUNT is not present
if (page == null) { // no network access
return null;
}
int count = 1;
final MatcherWrapper matcherViewstateCount = new MatcherWrapper(GCConstants.PATTERN_VIEWSTATEFIELDCOUNT, page);
if (matcherViewstateCount.find()) {
try {
count = Integer.parseInt(matcherViewstateCount.group(1));
} catch (final NumberFormatException e) {
Log.e("getViewStates", e);
}
}
final String[] viewstates = new String[count];
// Get the viewstates
final MatcherWrapper matcherViewstates = new MatcherWrapper(GCConstants.PATTERN_VIEWSTATES, page);
while (matcherViewstates.find()) {
final String sno = matcherViewstates.group(1); // number of viewstate
int no;
if (StringUtils.isEmpty(sno)) {
no = 0;
} else {
try {
no = Integer.parseInt(sno);
} catch (final NumberFormatException e) {
Log.e("getViewStates", e);
no = 0;
}
}
viewstates[no] = matcherViewstates.group(2);
}
if (viewstates.length != 1 || viewstates[0] != null) {
return viewstates;
}
// no viewstates were present
return null;
}
/**
* put viewstates into request parameters
*/
public static void putViewstates(final Parameters params, final String[] viewstates) {
if (ArrayUtils.isEmpty(viewstates)) {
return;
}
params.put("__VIEWSTATE", viewstates[0]);
if (viewstates.length > 1) {
for (int i = 1; i < viewstates.length; i++) {
params.put("__VIEWSTATE" + i, viewstates[i]);
}
params.put("__VIEWSTATEFIELDCOUNT", String.valueOf(viewstates.length));
}
}
/**
* transfers the viewstates variables from a page (response) to parameters
* (next request)
*/
public static void transferViewstates(final String page, final Parameters params) {
putViewstates(params, getViewstates(page));
}
/**
* POST HTTP request. Do the request a second time if the user is not logged in
*
* @param uri
* @return
*/
public String postRequestLogged(final String uri, final Parameters params) {
final String data = Network.getResponseData(Network.postRequest(uri, params));
if (getLoginStatus(data)) {
return data;
}
if (login() == StatusCode.NO_ERROR) {
return Network.getResponseData(Network.postRequest(uri, params));
}
Log.i("Working as guest.");
return data;
}
/**
* GET HTTP request. Do the request a second time if the user is not logged in
*
* @param uri
* @param params
* @return
*/
@Nullable
public String getRequestLogged(@NonNull final String uri, @Nullable final Parameters params) {
final HttpResponse response = Network.getRequest(uri, params);
final String data = Network.getResponseData(response, canRemoveWhitespace(uri));
// A page not found will not be found if the user logs in either
if (Network.isPageNotFound(response) || getLoginStatus(data)) {
return data;
}
if (login() == StatusCode.NO_ERROR) {
return Network.getResponseData(Network.getRequest(uri, params), canRemoveWhitespace(uri));
}
Log.w("Working as guest.");
return data;
}
/**
* Unfortunately the cache details page contains user generated whitespace in the personal note, therefore we cannot
* remove the white space from cache details pages.
*
* @param uri
* @return
*/
private static boolean canRemoveWhitespace(final String uri) {
return !StringUtils.contains(uri, "cache_details");
}
/**
* Get user session & session token from the Live Map. Needed for following requests.
*
* @return first is user session, second is session token
*/
public @NonNull
MapTokens getMapTokens() {
final String data = getRequestLogged(GCConstants.URL_LIVE_MAP, null);
final String userSession = TextUtils.getMatch(data, GCConstants.PATTERN_USERSESSION, "");
final String sessionToken = TextUtils.getMatch(data, GCConstants.PATTERN_SESSIONTOKEN, "");
return new MapTokens(userSession, sessionToken);
}
}
|
package cgeo.geocaching.maps.brouter;
import cgeo.geocaching.location.Geopoint;
import cgeo.geocaching.settings.Settings;
import cgeo.geocaching.utils.Log;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.util.Xml;
import java.util.LinkedList;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
public final class BRouter {
private static final double MAX_ROUTING_DISTANCE_KILOMETERS = 5.0;
private static BRouterServiceConnection brouter;
private static Geopoint lastDirectionUpdatePoint;
private static Geopoint[] lastRoutingPoints;
private static Geopoint lastDestination;
private BRouter() {
// utility class
}
public static void connect(final Context ctx) {
if (brouter != null && brouter.isConnected()) {
//already connected
return;
}
brouter = new BRouterServiceConnection();
final Intent intent = new Intent();
intent.setClassName("btools.routingapp", "btools.routingapp.BRouterService");
if (!ctx.bindService(intent, brouter, Context.BIND_AUTO_CREATE)) {
brouter = null;
}
}
public static void disconnect(final Context ctx) {
if (brouter != null && brouter.isConnected()) {
ctx.unbindService(brouter);
brouter = null;
}
}
@Nullable
public static Geopoint[] getTrack(final Geopoint start, final Geopoint destination) {
if (brouter == null) {
return null;
}
// Disable routing for huge distances
if (start.distanceTo(destination) > MAX_ROUTING_DISTANCE_KILOMETERS) {
return null;
}
// Use cached route if current position has not changed more than 5m
// TODO: Maybe adjust this to current zoomlevel
if (lastDirectionUpdatePoint != null && destination == lastDestination && start.distanceTo(lastDirectionUpdatePoint) < 0.005) {
return lastRoutingPoints;
}
// now really calculate a new route
lastDestination = destination;
lastRoutingPoints = calculateRouting(start, destination);
lastDirectionUpdatePoint = start;
return lastRoutingPoints;
}
private static Geopoint[] calculateRouting(final Geopoint start, final Geopoint dest) {
final Bundle params = new Bundle();
params.putString("trackFormat", "gpx");
params.putString("v", "foot");
params.putDoubleArray("lats", new double[]{start.getLatitude(), dest.getLatitude()});
params.putDoubleArray("lons", new double[]{start.getLongitude(), dest.getLongitude()});
params.putString("v", Settings.getRoutingMode().parameterValue);
final String gpx = brouter.getTrackFromParams(params);
return parseGpxTrack(gpx);
}
@Nullable
private static Geopoint[] parseGpxTrack(final String gpx) {
try {
final LinkedList<Geopoint> result = new LinkedList<>();
Xml.parse(gpx, new DefaultHandler() {
@Override
public void startElement(final String uri, final String localName, final String qName, final Attributes atts) throws SAXException {
if (qName.equalsIgnoreCase("trkpt")) {
final String lat = atts.getValue("lat");
if (lat != null) {
final String lon = atts.getValue("lon");
if (lon != null) {
result.add(new Geopoint(lat, lon));
}
}
}
}
});
return result.toArray(new Geopoint[result.size()]);
} catch (final SAXException e) {
Log.e("cannot parse brouter output", e);
}
return null;
}
public static void invalidateRouting() {
lastDirectionUpdatePoint = null;
}
public static boolean isAvailable() {
return brouter != null;
}
}
|
package arez.processor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.AnnotationValue;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.ExecutableType;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.Elements;
import javax.lang.model.util.Types;
final class ProcessorUtil
{
private ProcessorUtil()
{
}
@SuppressWarnings( "unchecked" )
static boolean isWarningSuppressed( @Nonnull final Element element,
@Nonnull final String warning,
@Nullable final String alternativeSuppressWarnings )
{
if ( null != alternativeSuppressWarnings )
{
final AnnotationMirror suppress = AnnotationsUtil.findAnnotationByType( element, alternativeSuppressWarnings );
if ( null != suppress )
{
final AnnotationValue value = AnnotationsUtil.findAnnotationValueNoDefaults( suppress, "value" );
if ( null != value )
{
final List<AnnotationValue> warnings = (List<AnnotationValue>) value.getValue();
for ( final AnnotationValue suppression : warnings )
{
if ( warning.equals( suppression.getValue() ) )
{
return true;
}
}
}
}
}
final SuppressWarnings annotation = element.getAnnotation( SuppressWarnings.class );
if ( null != annotation )
{
for ( final String suppression : annotation.value() )
{
if ( warning.equals( suppression ) )
{
return true;
}
}
}
final Element enclosingElement = element.getEnclosingElement();
return null != enclosingElement && isWarningSuppressed( enclosingElement, warning, alternativeSuppressWarnings );
}
@Nonnull
static List<TypeElement> getSuperTypes( @Nonnull final TypeElement element )
{
final List<TypeElement> superTypes = new ArrayList<>();
enumerateSuperTypes( element, superTypes );
return superTypes;
}
private static void enumerateSuperTypes( @Nonnull final TypeElement element,
@Nonnull final List<TypeElement> superTypes )
{
final TypeMirror superclass = element.getSuperclass();
if ( TypeKind.NONE != superclass.getKind() )
{
final TypeElement superclassElement = (TypeElement) ( (DeclaredType) superclass ).asElement();
superTypes.add( superclassElement );
enumerateSuperTypes( superclassElement, superTypes );
}
for ( final TypeMirror interfaceType : element.getInterfaces() )
{
final TypeElement interfaceElement = (TypeElement) ( (DeclaredType) interfaceType ).asElement();
enumerateSuperTypes( interfaceElement, superTypes );
}
}
@Nonnull
static List<TypeElement> getInterfaces( @Nonnull final TypeElement element )
{
final List<TypeElement> superTypes = new ArrayList<>();
enumerateInterfaces( element, superTypes );
return superTypes;
}
private static void enumerateInterfaces( @Nonnull final TypeElement element,
@Nonnull final List<TypeElement> superTypes )
{
final TypeMirror superclass = element.getSuperclass();
if ( TypeKind.NONE != superclass.getKind() )
{
final TypeElement superclassElement = (TypeElement) ( (DeclaredType) superclass ).asElement();
enumerateInterfaces( superclassElement, superTypes );
}
for ( final TypeMirror interfaceType : element.getInterfaces() )
{
final TypeElement interfaceElement = (TypeElement) ( (DeclaredType) interfaceType ).asElement();
superTypes.add( interfaceElement );
enumerateInterfaces( interfaceElement, superTypes );
}
}
@Nonnull
static List<VariableElement> getFieldElements( @Nonnull final TypeElement element )
{
final Map<String, VariableElement> methodMap = new LinkedHashMap<>();
enumerateFieldElements( element, methodMap );
return new ArrayList<>( methodMap.values() );
}
private static void enumerateFieldElements( @Nonnull final TypeElement element,
@Nonnull final Map<String, VariableElement> fields )
{
final TypeMirror superclass = element.getSuperclass();
if ( TypeKind.NONE != superclass.getKind() )
{
enumerateFieldElements( (TypeElement) ( (DeclaredType) superclass ).asElement(), fields );
}
for ( final Element member : element.getEnclosedElements() )
{
if ( member.getKind() == ElementKind.FIELD )
{
fields.put( member.getSimpleName().toString(), (VariableElement) member );
}
}
}
@Nonnull
static List<ExecutableElement> getMethods( @Nonnull final TypeElement element,
@Nonnull final Elements elementUtils,
@Nonnull final Types typeUtils )
{
final Map<String, ArrayList<ExecutableElement>> methodMap = new LinkedHashMap<>();
enumerateMethods( element, elementUtils, typeUtils, element, methodMap );
return methodMap.values().stream().flatMap( Collection::stream ).collect( Collectors.toList() );
}
private static void enumerateMethods( @Nonnull final TypeElement scope,
@Nonnull final Elements elementUtils,
@Nonnull final Types typeUtils,
@Nonnull final TypeElement element,
@Nonnull final Map<String, ArrayList<ExecutableElement>> methods )
{
final TypeMirror superclass = element.getSuperclass();
if ( TypeKind.NONE != superclass.getKind() )
{
final TypeElement superclassElement = (TypeElement) ( (DeclaredType) superclass ).asElement();
enumerateMethods( scope, elementUtils, typeUtils, superclassElement, methods );
}
for ( final TypeMirror interfaceType : element.getInterfaces() )
{
final TypeElement interfaceElement = (TypeElement) ( (DeclaredType) interfaceType ).asElement();
enumerateMethods( scope, elementUtils, typeUtils, interfaceElement, methods );
}
for ( final Element member : element.getEnclosedElements() )
{
if ( member.getKind() == ElementKind.METHOD )
{
final ExecutableElement method = (ExecutableElement) member;
processMethod( elementUtils, typeUtils, scope, methods, method );
}
}
}
private static void processMethod( @Nonnull final Elements elementUtils,
@Nonnull final Types typeUtils,
@Nonnull final TypeElement typeElement,
@Nonnull final Map<String, ArrayList<ExecutableElement>> methods,
@Nonnull final ExecutableElement method )
{
final ExecutableType methodType =
(ExecutableType) typeUtils.asMemberOf( (DeclaredType) typeElement.asType(), method );
final String key = method.getSimpleName().toString();
final ArrayList<ExecutableElement> elements = methods.computeIfAbsent( key, k -> new ArrayList<>() );
boolean found = false;
final int size = elements.size();
for ( int i = 0; i < size; i++ )
{
final ExecutableElement executableElement = elements.get( i );
if ( method.equals( executableElement ) )
{
found = true;
break;
}
else if ( isSubsignature( typeUtils, typeElement, methodType, executableElement ) )
{
if ( !isAbstractInterfaceMethod( method ) )
{
elements.set( i, method );
}
found = true;
break;
}
else if ( elementUtils.overrides( method, executableElement, typeElement ) )
{
elements.set( i, method );
found = true;
break;
}
}
if ( !found )
{
elements.add( method );
}
}
private static boolean isAbstractInterfaceMethod( final @Nonnull ExecutableElement method )
{
return method.getModifiers().contains( Modifier.ABSTRACT ) &&
ElementKind.INTERFACE == method.getEnclosingElement().getKind();
}
private static boolean isSubsignature( @Nonnull final Types typeUtils,
@Nonnull final TypeElement typeElement,
@Nonnull final ExecutableType methodType,
@Nonnull final ExecutableElement candidate )
{
final ExecutableType candidateType =
(ExecutableType) typeUtils.asMemberOf( (DeclaredType) typeElement.asType(), candidate );
final boolean isEqual = methodType.equals( candidateType );
final boolean isSubsignature = typeUtils.isSubsignature( methodType, candidateType );
return isSubsignature || isEqual;
}
@Nonnull
static List<ExecutableElement> getConstructors( @Nonnull final TypeElement element )
{
return element.getEnclosedElements().stream().
filter( m -> m.getKind() == ElementKind.CONSTRUCTOR ).
map( m -> (ExecutableElement) m ).
collect( Collectors.toList() );
}
@Nullable
static String deriveName( @Nonnull final ExecutableElement method,
@Nonnull final Pattern pattern,
@Nonnull final String name )
throws ProcessorException
{
if ( Constants.SENTINEL.equals( name ) )
{
final String methodName = method.getSimpleName().toString();
final Matcher matcher = pattern.matcher( methodName );
if ( matcher.find() )
{
final String candidate = matcher.group( 1 );
return firstCharacterToLowerCase( candidate );
}
else
{
return null;
}
}
else
{
return name;
}
}
@Nonnull
static String firstCharacterToLowerCase( @Nonnull final String name )
{
return Character.toLowerCase( name.charAt( 0 ) ) + name.substring( 1 );
}
static boolean hasNonnullAnnotation( @Nonnull final Element element )
{
return AnnotationsUtil.hasAnnotationOfType( element, Constants.NONNULL_ANNOTATION_CLASSNAME );
}
static boolean isDisposableTrackableRequired( @Nonnull final Element element )
{
final VariableElement variableElement = (VariableElement)
AnnotationsUtil.getAnnotationValue( element,
Constants.COMPONENT_ANNOTATION_CLASSNAME,
"disposeNotifier" ).getValue();
switch ( variableElement.getSimpleName().toString() )
{
case "ENABLE":
return true;
case "DISABLE":
return false;
default:
return !AnnotationsUtil.hasAnnotationOfType( element, Constants.SINGLETON_ANNOTATION_CLASSNAME );
}
}
static boolean doesMethodOverrideInterfaceMethod( @Nonnull final Types typeUtils,
@Nonnull final TypeElement typeElement,
@Nonnull final ExecutableElement method )
{
return getInterfaces( typeElement ).stream()
.flatMap( i -> i.getEnclosedElements().stream() )
.filter( e1 -> e1 instanceof ExecutableElement )
.map( e1 -> (ExecutableElement) e1 )
.collect(
Collectors.toList() ).stream()
.anyMatch( e -> isSubsignature( typeUtils,
typeElement,
(ExecutableType) typeUtils.asMemberOf( (DeclaredType) typeElement.asType(), e ),
method ) );
}
}
|
package org.broadinstitute.sting;
import org.apache.commons.lang.StringUtils;
import org.broad.tribble.FeatureCodec;
import org.broad.tribble.Tribble;
import org.broad.tribble.index.Index;
import org.broad.tribble.index.IndexFactory;
import org.broadinstitute.sting.utils.codecs.vcf.VCFCodec;
import org.broadinstitute.sting.gatk.CommandLineExecutable;
import org.broadinstitute.sting.gatk.CommandLineGATK;
import org.broadinstitute.sting.gatk.GenomeAnalysisEngine;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.collections.Pair;
import org.broadinstitute.sting.utils.Utils;
import org.broadinstitute.sting.utils.exceptions.StingException;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import java.io.File;
import java.util.*;
public class WalkerTest extends BaseTest {
private static final boolean ENABLE_REPORTING = false;
@BeforeMethod
public void initializeRandomGenerator() {
GenomeAnalysisEngine.resetRandomGenerator();
}
public String assertMatchingMD5(final String name, final File resultsFile, final String expectedMD5) {
return assertMatchingMD5(name, resultsFile, expectedMD5, parameterize());
}
public void maybeValidateSupplementaryFile(final String name, final File resultFile) {
File indexFile = Tribble.indexFile(resultFile);
//System.out.println("Putative index file is " + indexFile);
if ( indexFile.exists() ) {
if ( resultFile.getAbsolutePath().contains(".vcf") ) {
// todo -- currently we only understand VCF files! Blow up since we can't test them
throw new StingException("Found an index created for file " + resultFile + " but we can only validate VCF files. Extend this code!");
}
System.out.println("Verifying on-the-fly index " + indexFile + " for test " + name + " using file " + resultFile);
Index indexFromOutputFile = IndexFactory.createIndex(resultFile, new VCFCodec());
Index dynamicIndex = IndexFactory.loadIndex(indexFile.getAbsolutePath());
if ( ! indexFromOutputFile.equals(dynamicIndex) ) {
Assert.fail(String.format("Index on disk from indexing on the fly not equal to the index created after the run completed. FileIndex %s vs. on-the-fly %s%n",
indexFromOutputFile.getProperties(),
dynamicIndex.getProperties()));
}
}
}
public List<String> assertMatchingMD5s(final String name, List<File> resultFiles, List<String> expectedMD5s) {
List<String> md5s = new ArrayList<String>();
for (int i = 0; i < resultFiles.size(); i++) {
String md5 = assertMatchingMD5(name, resultFiles.get(i), expectedMD5s.get(i));
maybeValidateSupplementaryFile(name, resultFiles.get(i));
md5s.add(i, md5);
}
return md5s;
}
public String buildCommandLine(String... arguments) {
String cmdline = "";
for ( int argIndex = 0; argIndex < arguments.length; argIndex++ ) {
cmdline += arguments[argIndex];
if (argIndex < arguments.length - 1) {
cmdline += " ";
}
}
return cmdline;
}
public class WalkerTestSpec {
String args = "";
int nOutputFiles = -1;
List<String> md5s = null;
List<String> exts = null;
Class expectedException = null;
// the default output path for the integration test
private File outputFileLocation = null;
protected Map<String, File> auxillaryFiles = new HashMap<String, File>();
public WalkerTestSpec(String args, List<String> md5s) {
this(args, -1, md5s);
}
public WalkerTestSpec(String args, int nOutputFiles, List<String> md5s) {
this.args = args;
this.nOutputFiles = md5s.size();
this.md5s = md5s;
}
public WalkerTestSpec(String args, List<String> exts, List<String> md5s) {
this(args, -1, exts, md5s);
}
public WalkerTestSpec(String args, int nOutputFiles, List<String> exts, List<String> md5s) {
this.args = args;
this.nOutputFiles = md5s.size();
this.md5s = md5s;
this.exts = exts;
}
public WalkerTestSpec(String args, int nOutputFiles, Class expectedException) {
this.args = args;
this.nOutputFiles = nOutputFiles;
this.expectedException = expectedException;
}
public void setOutputFileLocation(File outputFileLocation) {
this.outputFileLocation = outputFileLocation;
}
protected File getOutputFileLocation() {
return outputFileLocation;
}
public boolean expectsException() {
return expectedException != null;
}
public Class getExpectedException() {
if ( ! expectsException() ) throw new ReviewedStingException("Tried to get expection for walker test that doesn't expect one");
return expectedException;
}
public void addAuxFile(String expectededMD5sum, File outputfile) {
auxillaryFiles.put(expectededMD5sum, outputfile);
}
}
protected boolean parameterize() {
return false;
}
protected Pair<List<File>, List<String>> executeTestParallel(final String name, WalkerTestSpec spec) {
return executeTest(name, spec, Arrays.asList(1, 4));
}
protected Pair<List<File>, List<String>> executeTest(final String name, WalkerTestSpec spec, List<Integer> parallelThreads) {
String originalArgs = spec.args;
Pair<List<File>, List<String>> results = null;
for ( int nt : parallelThreads ) {
String extra = nt == 1 ? "" : (" -nt " + nt);
spec.args = originalArgs + extra;
results = executeTest(name + "-nt-" + nt, spec);
}
return results;
}
protected Pair<List<File>, List<String>> executeTest(final String name, WalkerTestSpec spec) {
ensureMd5DbDirectory(); // ensure the md5 directory exists
List<File> tmpFiles = new ArrayList<File>();
for (int i = 0; i < spec.nOutputFiles; i++) {
String ext = spec.exts == null ? ".tmp" : "." + spec.exts.get(i);
File fl = createTempFile(String.format("walktest.tmp_param.%d", i), ext);
tmpFiles.add(fl);
}
final String args = String.format(spec.args, tmpFiles.toArray());
System.out.println(Utils.dupString('-', 80));
if ( spec.expectsException() ) {
// this branch handles the case were we are testing that a walker will fail as expected
return executeTest(name, spec.getOutputFileLocation(), null, tmpFiles, args, spec.getExpectedException());
} else {
List<String> md5s = new LinkedList<String>();
md5s.addAll(spec.md5s);
// check to see if they included any auxillary files, if so add them to the list
for (String md5 : spec.auxillaryFiles.keySet()) {
md5s.add(md5);
tmpFiles.add(spec.auxillaryFiles.get(md5));
}
return executeTest(name, spec.getOutputFileLocation(), md5s, tmpFiles, args, null);
}
}
private void qcMD5s(String name, List<String> md5s) {
final String exampleMD5 = "709a1f482cce68992c637da3cff824a8";
for (String md5 : md5s) {
if ( md5 == null )
throw new IllegalArgumentException("Null MD5 found in test " + name);
if ( md5.equals("") )
continue;
if ( ! StringUtils.isAlphanumeric(md5) )
throw new IllegalArgumentException("MD5 contains non-alphanumeric characters test " + name + " md5=" + md5);
if ( md5.length() != exampleMD5.length() )
throw new IllegalArgumentException("Non-empty MD5 of unexpected number of characters test " + name + " md5=" + md5);
}
}
/**
* execute the test, given the following:
* @param name the name of the test
* @param md5s the list of md5s
* @param tmpFiles the temp file corresponding to the md5 list
* @param args the argument list
* @param expectedException the expected exception or null
* @return a pair of file and string lists
*/
private Pair<List<File>, List<String>> executeTest(String name, File outputFileLocation, List<String> md5s, List<File> tmpFiles, String args, Class expectedException) {
if ( md5s != null ) qcMD5s(name, md5s);
if (outputFileLocation != null)
args += " -o " + outputFileLocation.getAbsolutePath();
executeTest(name, args, expectedException);
if ( expectedException != null ) {
return null;
} else {
// we need to check MD5s
return new Pair<List<File>, List<String>>(tmpFiles, assertMatchingMD5s(name, tmpFiles, md5s));
}
}
/**
* execute the test, given the following:
* @param name the name of the test
* @param args the argument list
* @param expectedException the expected exception or null
*/
public static void executeTest(String name, String args, Class expectedException) {
CommandLineGATK instance = new CommandLineGATK();
String[] command = Utils.escapeExpressions(args);
// add the logging level to each of the integration test commands
command = Utils.appendArray(command, "-et", ENABLE_REPORTING ? "STANDARD" : "NO_ET");
// run the executable
boolean gotAnException = false;
try {
System.out.println(String.format("Executing test %s with GATK arguments: %s", name, Utils.join(" ",command)));
CommandLineExecutable.start(instance, command);
} catch (Exception e) {
gotAnException = true;
if ( expectedException != null ) {
// we expect an exception
System.out.println(String.format("Wanted exception %s, saw %s", expectedException, e.getClass()));
if ( expectedException.isInstance(e) ) {
// it's the type we expected
System.out.println(String.format(" => %s PASSED", name));
} else {
e.printStackTrace();
Assert.fail(String.format("Test %s expected exception %s but got %s instead",
name, expectedException, e.getClass()));
}
} else {
// we didn't expect an exception but we got one :-(
throw new RuntimeException(e);
}
}
// catch failures from the integration test
if ( expectedException != null ) {
if ( ! gotAnException )
// we expected an exception but didn't see it
Assert.fail(String.format("Test %s expected exception %s but none was thrown", name, expectedException.toString()));
} else {
if ( CommandLineExecutable.result != 0) {
throw new RuntimeException("Error running the GATK with arguments: " + args);
}
}
}
protected File createTempFileFromBase(String name) {
File fl = new File(name);
fl.deleteOnExit();
return fl;
}
}
|
package com.jetbrains.python.psi.impl;
import com.intellij.codeInsight.completion.CompletionUtil;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.util.NotNullLazyValue;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.scope.PsiScopeProcessor;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.search.SearchScope;
import com.intellij.psi.stubs.IStubElementType;
import com.intellij.psi.stubs.StubElement;
import com.intellij.psi.tree.TokenSet;
import com.intellij.psi.util.*;
import com.intellij.util.*;
import com.intellij.util.containers.ContainerUtil;
import com.jetbrains.python.*;
import com.jetbrains.python.codeInsight.controlflow.ControlFlowCache;
import com.jetbrains.python.codeInsight.controlflow.ScopeOwner;
import com.jetbrains.python.codeInsight.dataflow.scope.ScopeUtil;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.resolve.PyResolveContext;
import com.jetbrains.python.psi.resolve.PyResolveUtil;
import com.jetbrains.python.psi.resolve.QualifiedNameFinder;
import com.jetbrains.python.psi.stubs.PropertyStubStorage;
import com.jetbrains.python.psi.stubs.PyClassStub;
import com.jetbrains.python.psi.stubs.PyFunctionStub;
import com.jetbrains.python.psi.stubs.PyTargetExpressionStub;
import com.jetbrains.python.psi.types.PyClassType;
import com.jetbrains.python.psi.types.PyClassTypeImpl;
import com.jetbrains.python.psi.types.PyType;
import com.jetbrains.python.psi.types.TypeEvalContext;
import com.jetbrains.python.toolbox.Maybe;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.*;
/**
* @author yole
*/
public class PyClassImpl extends PyPresentableElementImpl<PyClassStub> implements PyClass {
public static final PyClass[] EMPTY_ARRAY = new PyClassImpl[0];
private List<PyTargetExpression> myInstanceAttributes;
private final NotNullLazyValue<CachedValue<Boolean>> myNewStyle = new NotNullLazyValue<CachedValue<Boolean>>() {
@NotNull
@Override
protected CachedValue<Boolean> compute() {
return CachedValuesManager.getManager(getProject()).createCachedValue(new NewStyleCachedValueProvider(), false);
}
};
private volatile Map<String, Property> myPropertyCache;
@Override
public PyType getType(@NotNull TypeEvalContext context) {
return new PyClassTypeImpl(this, true);
}
private class NewStyleCachedValueProvider implements CachedValueProvider<Boolean> {
@Override
public Result<Boolean> compute() {
return new Result<Boolean>(calculateNewStyleClass(), PsiModificationTracker.OUT_OF_CODE_BLOCK_MODIFICATION_COUNT);
}
}
public PyClassImpl(@NotNull ASTNode astNode) {
super(astNode);
}
public PyClassImpl(@NotNull final PyClassStub stub) {
this(stub, PyElementTypes.CLASS_DECLARATION);
}
public PyClassImpl(@NotNull final PyClassStub stub, @NotNull IStubElementType nodeType) {
super(stub, nodeType);
}
public PsiElement setName(@NotNull String name) throws IncorrectOperationException {
final ASTNode nameElement = PyElementGenerator.getInstance(getProject()).createNameIdentifier(name);
final ASTNode node = getNameNode();
if (node != null) {
getNode().replaceChild(node, nameElement);
}
return this;
}
@Nullable
@Override
public String getName() {
final PyClassStub stub = getStub();
if (stub != null) {
return stub.getName();
}
else {
ASTNode node = getNameNode();
return node != null ? node.getText() : null;
}
}
public PsiElement getNameIdentifier() {
final ASTNode nameNode = getNameNode();
return nameNode != null ? nameNode.getPsi() : null;
}
public ASTNode getNameNode() {
return getNode().findChildByType(PyTokenTypes.IDENTIFIER);
}
@Override
public Icon getIcon(int flags) {
return PlatformIcons.CLASS_ICON;
}
@Override
protected void acceptPyVisitor(PyElementVisitor pyVisitor) {
pyVisitor.visitPyClass(this);
}
@NotNull
public PyStatementList getStatementList() {
final PyStatementList statementList = childToPsi(PyElementTypes.STATEMENT_LIST);
assert statementList != null : "Statement list missing for class " + getText();
return statementList;
}
@Override
public PyArgumentList getSuperClassExpressionList() {
final PyArgumentList argList = PsiTreeUtil.getChildOfType(this, PyArgumentList.class);
if (argList != null && argList.getFirstChild() != null) {
return argList;
}
return null;
}
@NotNull
public PyExpression[] getSuperClassExpressions() {
final PyArgumentList argList = getSuperClassExpressionList();
if (argList != null) {
return argList.getArguments();
}
return PyExpression.EMPTY_ARRAY;
}
@NotNull
public PsiElement[] getSuperClassElements() {
final PyExpression[] superExpressions = getSuperClassExpressions();
List<PsiElement> superClasses = new ArrayList<PsiElement>();
for (PyExpression expr : superExpressions) {
superClasses.add(classElementFromExpression(expr));
}
return PsiUtilCore.toPsiElementArray(superClasses);
}
@Nullable
public static PsiElement classElementFromExpression(@NotNull PyExpression expression) {
expression = unfoldClass(expression);
if (expression instanceof PyReferenceExpression) {
final PsiPolyVariantReference ref = ((PyReferenceExpression)expression).getReference(PyResolveContext.noProperties());
return ref.resolve();
}
return null;
}
public static PyExpression unfoldClass(PyExpression expression) {
if (expression instanceof PyCallExpression) {
PyCallExpression call = (PyCallExpression)expression;
//noinspection ConstantConditions
if (call.getCallee() != null && "with_metaclass".equals(call.getCallee().getName()) && call.getArguments().length > 1) {
expression = call.getArguments()[1];
}
}
return expression;
}
public Iterable<PyClassRef> iterateAncestors() {
// The implementation is manifestly lazy wrt psi scanning and uses stack rather sparingly.
// It must be more efficient on deep and wide hierarchies, but it was more fun than efficiency that produced it.
return new AncestorsIterable(this);
}
@Override
public Iterable<PyClass> iterateAncestorClasses() {
return new AncestorClassesIterable(this);
}
public boolean isSubclass(PyClass parent) {
if (this == parent) {
return true;
}
for (PyClass superclass : iterateAncestorClasses()) {
if (parent == superclass) return true;
}
return false;
}
@Override
public boolean isSubclass(@NotNull String superClassQName) {
if (superClassQName.equals(getQualifiedName())) {
return true;
}
for (PyClassRef superclass : iterateAncestors()) {
if (superClassQName.equals(superclass.getQualifiedName())) return true;
}
return false;
}
public PyDecoratorList getDecoratorList() {
return getStubOrPsiChild(PyElementTypes.DECORATOR_LIST);
}
@Nullable
public String getQualifiedName() {
String name = getName();
final PyClassStub stub = getStub();
PsiElement ancestor = stub != null ? stub.getParentStub().getPsi() : getParent();
while (!(ancestor instanceof PsiFile)) {
if (ancestor == null) return name; // can this happen?
if (ancestor instanceof PyClass) {
name = ((PyClass)ancestor).getName() + "." + name;
}
ancestor = stub != null ? ((StubBasedPsiElement)ancestor).getStub().getParentStub().getPsi() : ancestor.getParent();
}
PsiFile psiFile = ((PsiFile)ancestor).getOriginalFile();
final PyFile builtins = PyBuiltinCache.getInstance(this).getBuiltinsFile();
if (!psiFile.equals(builtins)) {
VirtualFile vFile = psiFile.getVirtualFile();
if (vFile != null) {
final String packageName = QualifiedNameFinder.findShortestImportableName(this, vFile);
return packageName + "." + name;
}
}
return name;
}
@Override
public List<String> getSlots() {
List<String> slots = getOwnSlots();
if (slots != null) {
return slots;
}
for (PyClass cls : iterateAncestorClasses()) {
slots = ((PyClassImpl)cls).getOwnSlots();
if (slots != null) {
return slots;
}
}
return null;
}
@Nullable
public List<String> getOwnSlots() {
final PyClassStub stub = getStub();
if (stub != null) {
return stub.getSlots();
}
return PyFileImpl.getStringListFromTargetExpression(PyNames.SLOTS, getClassAttributes());
}
protected List<PyClassRef> getSuperClassesList() {
if (PyNames.FAKE_OLD_BASE.equals(getName())) {
return Collections.emptyList();
}
List<PyClassRef> result = resolveSuperClassesFromStub();
if (result == null) {
result = new ArrayList<PyClassRef>();
final TypeEvalContext context = TypeEvalContext.fastStubOnly(null);
final PyExpression[] superClassExpressions = getSuperClassExpressions();
for (PyExpression expression : superClassExpressions) {
final PsiElement element = classElementFromExpression(expression);
if (element != null) {
result.add(new PyClassRef(element));
}
else {
final PyType type = expression.getType(context);
if (type instanceof PyClassType) {
result.add(new PyClassRef((PyClassType)type));
}
}
}
}
if (result.size() == 0 && isValid() && !PyBuiltinCache.getInstance(this).hasInBuiltins(this)) {
String implicitSuperclassName = LanguageLevel.forElement(this).isPy3K() ? PyNames.OBJECT : PyNames.FAKE_OLD_BASE;
PyClass implicitSuperclass = PyBuiltinCache.getInstance(this).getClass(implicitSuperclassName);
if (implicitSuperclass != null) {
result.add(new PyClassRef(implicitSuperclass));
}
}
return result;
}
@Nullable
private List<PyClassRef> resolveSuperClassesFromStub() {
final PyClassStub stub = getStub();
if (stub == null) {
return null;
}
// stub-based resolve currently works correctly only with classes in file level
final PsiElement parent = stub.getParentStub().getPsi();
if (!(parent instanceof PyFile)) {
// TODO[yole] handle this case
return null;
}
List<PyClassRef> result = new ArrayList<PyClassRef>();
for (PyQualifiedName qualifiedName : stub.getSuperClasses()) {
result.add(classRefFromQName((NameDefiner)parent, qualifiedName));
}
return result;
}
private static PyClassRef classRefFromQName(NameDefiner parent, PyQualifiedName qualifiedName) {
if (qualifiedName == null) {
return new PyClassRef((String)null);
}
NameDefiner currentParent = parent;
for (String component : qualifiedName.getComponents()) {
PsiElement element = currentParent.getElementNamed(component);
element = PyUtil.turnDirIntoInit(element);
if (element instanceof PyImportElement) {
element = ((PyImportElement)element).resolve();
}
if (!(element instanceof NameDefiner)) {
currentParent = null;
break;
}
currentParent = (NameDefiner)element;
}
if (currentParent != null) {
return new PyClassRef(currentParent);
}
if (qualifiedName.getComponentCount() == 1) {
final PyClass builtinClass = PyBuiltinCache.getInstance(parent).getClass(qualifiedName.getComponents().get(0));
if (builtinClass != null) {
return new PyClassRef(builtinClass);
}
}
return new PyClassRef(qualifiedName.toString());
}
@NotNull
public PyClass[] getSuperClasses() {
final PyClassStub stub = getStub();
if (stub != null) {
final List<PyClassRef> pyClasses = resolveSuperClassesFromStub();
if (pyClasses == null) {
return EMPTY_ARRAY;
}
List<PyClass> result = new ArrayList<PyClass>();
for (PyClassRef clsRef : pyClasses) {
PyClass pyClass = clsRef.getPyClass();
if (pyClass != null) {
result.add(pyClass);
}
}
return result.toArray(new PyClass[result.size()]);
}
PsiElement[] superClassElements = getSuperClassElements();
if (superClassElements.length > 0) {
List<PyClass> result = new ArrayList<PyClass>();
for (PsiElement element : superClassElements) {
if (element instanceof PyClass) {
result.add((PyClass)element);
}
}
return result.toArray(new PyClass[result.size()]);
}
return EMPTY_ARRAY;
}
public
@NotNull
List<PyClass> getMRO() {
// see http://hackage.haskell.org/packages/archive/MetaObject/latest/doc/html/src/MO-Util-C3.html#linearize for code to port from.
return mroLinearize(this, Collections.<PyClass>emptyList());
}
private static List<PyClass> mroMerge(List<List<PyClass>> sequences) {
List<PyClass> result = new LinkedList<PyClass>(); // need to insert to 0th position on linearize
while (true) {
// filter blank sequences
List<List<PyClass>> nonBlankSequences = new ArrayList<List<PyClass>>(sequences.size());
for (List<PyClass> item : sequences) {
if (item.size() > 0) nonBlankSequences.add(item);
}
if (nonBlankSequences.isEmpty()) return result;
// find a clean head
PyClass head = null; // to keep compiler happy; really head is assigned in the loop at least once.
for (List<PyClass> seq : nonBlankSequences) {
head = seq.get(0);
boolean head_in_tails = false;
for (List<PyClass> tail_seq : nonBlankSequences) {
if (tail_seq.indexOf(head) > 0) { // -1 is not found, 0 is head, >0 is tail.
head_in_tails = true;
break;
}
}
if (!head_in_tails) {
break;
}
else {
head = null; // as a signal
}
}
assert head != null : "Inconsistent hierarchy!"; // TODO: better diagnostics?
// our head is clean;
result.add(head);
// remove it from heads of other sequences
for (List<PyClass> seq : nonBlankSequences) {
if (seq.get(0) == head) seq.remove(0);
}
} // we either return inside the loop or die by assertion
}
private static List<PyClass> mroLinearize(PyClass cls, List<PyClass> seen) {
assert (seen.indexOf(cls) < 0) : "Circular import structure on " + PyUtil.nvl(cls);
PyClass[] bases = cls.getSuperClasses();
List<List<PyClass>> lins = new ArrayList<List<PyClass>>(bases.length * 2);
ArrayList<PyClass> new_seen = new ArrayList<PyClass>(seen.size() + 1);
new_seen.add(cls);
for (PyClass base : bases) {
List<PyClass> lin = mroLinearize(base, new_seen);
if (!lin.isEmpty()) lins.add(lin);
}
for (PyClass base : bases) {
lins.add(new SmartList<PyClass>(base));
}
List<PyClass> result = mroMerge(lins);
result.add(0, cls);
return result;
}
@NotNull
public PyFunction[] getMethods() {
return getClassChildren(PythonDialectsTokenSetProvider.INSTANCE.getFunctionDeclarationTokens(), PyFunction.ARRAY_FACTORY);
}
@Override
public PyClass[] getNestedClasses() {
return getClassChildren(TokenSet.create(PyElementTypes.CLASS_DECLARATION), PyClass.ARRAY_FACTORY);
}
protected <T extends PsiElement> T[] getClassChildren(TokenSet elementTypes, ArrayFactory<T> factory) {
// TODO: gather all top-level functions, maybe within control statements
final PyClassStub classStub = getStub();
if (classStub != null) {
return classStub.getChildrenByType(elementTypes, factory);
}
List<T> result = new ArrayList<T>();
final PyStatementList statementList = getStatementList();
for (PsiElement element : statementList.getChildren()) {
if (elementTypes.contains(element.getNode().getElementType())) {
//noinspection unchecked
result.add((T)element);
}
}
return result.toArray(factory.create(result.size()));
}
private static class NameFinder<T extends PyElement> implements Processor<T> {
private T myResult;
private final String[] myNames;
public NameFinder(String... names) {
myNames = names;
myResult = null;
}
public T getResult() {
return myResult;
}
public boolean process(T target) {
final String targetName = target.getName();
for (String name : myNames) {
if (name.equals(targetName)) {
myResult = target;
return false;
}
}
return true;
}
}
public PyFunction findMethodByName(@Nullable final String name, boolean inherited) {
if (name == null) return null;
NameFinder<PyFunction> proc = new NameFinder<PyFunction>(name);
visitMethods(proc, inherited);
return proc.getResult();
}
@Nullable
@Override
public PyClass findNestedClass(String name, boolean inherited) {
if (name == null) return null;
NameFinder<PyClass> proc = new NameFinder<PyClass>(name);
visitNestedClasses(proc, inherited);
return proc.getResult();
}
@Nullable
public PyFunction findInitOrNew(boolean inherited) {
NameFinder<PyFunction> proc;
if (isNewStyleClass()) {
proc = new NameFinder<PyFunction>(PyNames.INIT, PyNames.NEW);
}
else {
proc = new NameFinder<PyFunction>(PyNames.INIT);
}
visitMethods(proc, inherited, true);
return proc.getResult();
}
private final static Maybe<Callable> UNKNOWN_CALL = new Maybe<Callable>(); // denotes _not_ a PyFunction, actually
private final static Maybe<Callable> NONE = new Maybe<Callable>(null); // denotes an explicit None
/**
* @param name name of the property
* @param property_filter returns true if the property is acceptable
* @param advanced is @foo.setter syntax allowed
* @return the first property that both filters accepted.
*/
@Nullable
private Property processPropertiesInClass(@Nullable String name, @Nullable Processor<Property> property_filter, boolean advanced) {
// NOTE: fast enough to be rerun every time
Property prop = processDecoratedProperties(name, property_filter, advanced);
if (prop != null) return prop;
if (getStub() != null) {
prop = processStubProperties(name, property_filter);
if (prop != null) return prop;
}
else {
// name = property(...) assignments from PSI
for (PyTargetExpression target : getClassAttributes()) {
if (name == null || name.equals(target.getName())) {
prop = PropertyImpl.fromTarget(target);
if (prop != null) {
if (property_filter == null || property_filter.process(prop)) return prop;
}
}
}
}
return null;
}
@Nullable
private Property processDecoratedProperties(@Nullable String name, @Nullable Processor<Property> filter, boolean useAdvancedSyntax) {
// look at @property decorators
Map<String, List<PyFunction>> grouped = new HashMap<String, List<PyFunction>>();
// group suitable same-named methods, each group defines a property
for (PyFunction method : getMethods()) {
final String methodName = method.getName();
if (name == null || name.equals(methodName)) {
List<PyFunction> bucket = grouped.get(methodName);
if (bucket == null) {
bucket = new SmartList<PyFunction>();
grouped.put(methodName, bucket);
}
bucket.add(method);
}
}
for (Map.Entry<String, List<PyFunction>> entry : grouped.entrySet()) {
Maybe<Callable> getter = NONE;
Maybe<Callable> setter = NONE;
Maybe<Callable> deleter = NONE;
String doc = null;
final String decoratorName = entry.getKey();
for (PyFunction method : entry.getValue()) {
final PyDecoratorList decoratorList = method.getDecoratorList();
if (decoratorList != null) {
for (PyDecorator deco : decoratorList.getDecorators()) {
final PyQualifiedName qname = deco.getQualifiedName();
if (qname != null) {
if (qname.matches(PyNames.PROPERTY)) {
getter = new Maybe<Callable>(method);
}
else if (useAdvancedSyntax && qname.matches(decoratorName, PyNames.SETTER)) {
setter = new Maybe<Callable>(method);
}
else if (useAdvancedSyntax && qname.matches(decoratorName, PyNames.DELETER)) {
deleter = new Maybe<Callable>(method);
}
}
}
}
if (getter != NONE && setter != NONE && deleter != NONE) break; // can't improve
}
if (getter != NONE || setter != NONE || deleter != NONE) {
final PropertyImpl prop = new PropertyImpl(decoratorName, getter, setter, deleter, doc, null);
if (filter == null || filter.process(prop)) return prop;
}
}
return null;
}
private Maybe<Callable> fromPacked(Maybe<String> maybeName) {
if (maybeName.isDefined()) {
final String value = maybeName.value();
if (value == null || PyNames.NONE.equals(value)) {
return NONE;
}
PyFunction method = findMethodByName(value, true);
if (method != null) return new Maybe<Callable>(method);
}
return UNKNOWN_CALL;
}
@Nullable
private Property processStubProperties(@Nullable String name, @Nullable Processor<Property> propertyProcessor) {
final PyClassStub stub = getStub();
if (stub != null) {
for (StubElement subStub : stub.getChildrenStubs()) {
if (subStub.getStubType() == PyElementTypes.TARGET_EXPRESSION) {
final PyTargetExpressionStub targetStub = (PyTargetExpressionStub)subStub;
PropertyStubStorage prop = targetStub.getCustomStub(PropertyStubStorage.class);
if (prop != null && (name == null || name.equals(targetStub.getName()))) {
Maybe<Callable> getter = fromPacked(prop.getGetter());
Maybe<Callable> setter = fromPacked(prop.getSetter());
Maybe<Callable> deleter = fromPacked(prop.getDeleter());
String doc = prop.getDoc();
if (getter != NONE || setter != NONE || deleter != NONE) {
final PropertyImpl property = new PropertyImpl(targetStub.getName(), getter, setter, deleter, doc, targetStub.getPsi());
if (propertyProcessor == null || propertyProcessor.process(property)) return property;
}
}
}
}
}
return null;
}
@Nullable
@Override
public Property findProperty(@NotNull final String name) {
Property property = findLocalProperty(name);
if (property != null) {
return property;
}
if (findMethodByName(name, false) != null || findClassAttribute(name, false) != null) {
return null;
}
for (PyClass aClass : iterateAncestorClasses()) {
final Property ancestorProperty = ((PyClassImpl)aClass).findLocalProperty(name);
if (ancestorProperty != null) {
return ancestorProperty;
}
}
return null;
}
@Override
public Property findPropertyByFunction(PyFunction function) {
if (myPropertyCache == null) {
myPropertyCache = initializePropertyCache();
}
for (Property property : myPropertyCache.values()) {
if (property.getGetter().valueOrNull() == function ||
property.getSetter().valueOrNull() == function ||
property.getDeleter().valueOrNull() == function) {
return property;
}
}
return null;
}
private Property findLocalProperty(String name) {
if (myPropertyCache == null) {
myPropertyCache = initializePropertyCache();
}
return myPropertyCache.get(name);
}
private Map<String, Property> initializePropertyCache() {
final Map<String, Property> result = new HashMap<String, Property>();
processProperties(null, new Processor<Property>() {
@Override
public boolean process(Property property) {
result.put(property.getName(), property);
return false;
}
}, false);
return result;
}
@Nullable
@Override
public Property scanProperties(@Nullable Processor<Property> filter, boolean inherited) {
return processProperties(null, filter, inherited);
}
@Nullable
private Property processProperties(@Nullable String name, @Nullable Processor<Property> filter, boolean inherited) {
if (!isValid()) {
return null;
}
LanguageLevel level = LanguageLevel.getDefault();
// EA-32381: A tree-based instance may not have a parent element somehow, so getContainingFile() may be not appropriate
final PsiFile file = getParentByStub() != null ? getContainingFile() : null;
if (file != null) {
final VirtualFile vfile = file.getVirtualFile();
if (vfile != null) {
level = LanguageLevel.forFile(vfile);
}
}
final boolean useAdvancedSyntax = level.isAtLeast(LanguageLevel.PYTHON26);
final Property local = processPropertiesInClass(name, filter, useAdvancedSyntax);
if (local != null) {
return local;
}
if (inherited) {
if (name != null && (findMethodByName(name, false) != null || findClassAttribute(name, false) != null)) {
return null;
}
for (PyClass cls : iterateAncestorClasses()) {
final Property property = ((PyClassImpl)cls).processPropertiesInClass(name, filter, useAdvancedSyntax);
if (property != null) {
return property;
}
}
}
return null;
}
private static class PropertyImpl extends PropertyBunch<Callable> implements Property {
private final String myName;
private PropertyImpl(String name,
Maybe<Callable> getter,
Maybe<Callable> setter,
Maybe<Callable> deleter,
String doc,
PyTargetExpression site) {
myName = name;
myDeleter = deleter;
myGetter = getter;
mySetter = setter;
myDoc = doc;
mySite = site;
}
public String getName() {
return myName;
}
public PyTargetExpression getDefinitionSite() {
return mySite;
}
@NotNull
@Override
public Maybe<Callable> getByDirection(@NotNull AccessDirection direction) {
switch (direction) {
case READ:
return myGetter;
case WRITE:
return mySetter;
case DELETE:
return myDeleter;
}
throw new IllegalArgumentException("Unknown direction " + PyUtil.nvl(direction));
}
@NotNull
@Override
protected Maybe<Callable> translate(@Nullable PyExpression expr) {
if (expr == null) {
return NONE;
}
if (PyNames.NONE.equals(expr.getName())) return NONE; // short-circuit a common case
if (expr instanceof Callable) {
return new Maybe<Callable>((Callable)expr);
}
final PsiReference ref = expr.getReference();
if (ref != null) {
PsiElement something = ref.resolve();
if (something instanceof Callable) {
return new Maybe<Callable>((Callable)something);
}
}
return NONE;
}
public String toString() {
return "property(" + myGetter + ", " + mySetter + ", " + myDeleter + ", " + myDoc + ")";
}
@Nullable
public static PropertyImpl fromTarget(PyTargetExpression target) {
PyExpression expr = target.findAssignedValue();
final PropertyImpl prop = new PropertyImpl(target.getName(), null, null, null, null, target);
final boolean success = fillFromCall(expr, prop);
return success ? prop : null;
}
}
public boolean visitMethods(Processor<PyFunction> processor, boolean inherited) {
return visitMethods(processor, inherited, false);
}
public boolean visitMethods(Processor<PyFunction> processor,
boolean inherited,
boolean skipClassObj) {
PyFunction[] methods = getMethods();
if (!ContainerUtil.process(methods, processor)) return false;
if (inherited) {
for (PyClass ancestor : iterateAncestorClasses()) {
if (skipClassObj && PyNames.FAKE_OLD_BASE.equals(ancestor.getName())) {
continue;
}
if (!ancestor.visitMethods(processor, false)) {
return false;
}
}
}
return true;
}
public boolean visitNestedClasses(Processor<PyClass> processor, boolean inherited) {
PyClass[] nestedClasses = getNestedClasses();
if (!ContainerUtil.process(nestedClasses, processor)) return false;
if (inherited) {
for (PyClass ancestor : iterateAncestorClasses()) {
if (!((PyClassImpl)ancestor).visitNestedClasses(processor, false)) {
return false;
}
}
}
return true;
}
public boolean visitClassAttributes(Processor<PyTargetExpression> processor, boolean inherited) {
List<PyTargetExpression> methods = getClassAttributes();
if (!ContainerUtil.process(methods, processor)) return false;
if (inherited) {
for (PyClass ancestor : iterateAncestorClasses()) {
if (!ancestor.visitClassAttributes(processor, false)) {
return false;
}
}
}
return true;
// NOTE: sorry, not enough metaprogramming to generalize visitMethods and visitClassAttributes
}
public List<PyTargetExpression> getClassAttributes() {
PyClassStub stub = getStub();
if (stub != null) {
final PyTargetExpression[] children = stub.getChildrenByType(PyElementTypes.TARGET_EXPRESSION, PyTargetExpression.EMPTY_ARRAY);
return Arrays.asList(children);
}
List<PyTargetExpression> result = new ArrayList<PyTargetExpression>();
for (PsiElement psiElement : getStatementList().getChildren()) {
if (psiElement instanceof PyAssignmentStatement) {
final PyAssignmentStatement assignmentStatement = (PyAssignmentStatement)psiElement;
final PyExpression[] targets = assignmentStatement.getTargets();
for (PyExpression target : targets) {
if (target instanceof PyTargetExpression) {
result.add((PyTargetExpression)target);
}
}
}
}
return result;
}
@Override
public PyTargetExpression findClassAttribute(@NotNull String name, boolean inherited) {
final NameFinder<PyTargetExpression> processor = new NameFinder<PyTargetExpression>(name);
visitClassAttributes(processor, inherited);
return processor.getResult();
}
public List<PyTargetExpression> getInstanceAttributes() {
if (myInstanceAttributes == null) {
myInstanceAttributes = collectInstanceAttributes();
}
return myInstanceAttributes;
}
@Nullable
@Override
public PyTargetExpression findInstanceAttribute(String name, boolean inherited) {
final List<PyTargetExpression> instanceAttributes = getInstanceAttributes();
for (PyTargetExpression instanceAttribute : instanceAttributes) {
if (name.equals(instanceAttribute.getReferencedName())) {
return instanceAttribute;
}
}
if (inherited) {
for (PyClass ancestor : iterateAncestorClasses()) {
final PyTargetExpression attribute = ancestor.findInstanceAttribute(name, false);
if (attribute != null) {
return attribute;
}
}
}
return null;
}
private List<PyTargetExpression> collectInstanceAttributes() {
Map<String, PyTargetExpression> result = new HashMap<String, PyTargetExpression>();
// __init__ takes priority over all other methods
PyFunctionImpl initMethod = (PyFunctionImpl)findMethodByName(PyNames.INIT, false);
if (initMethod != null) {
collectInstanceAttributes(initMethod, result);
}
final PyFunction[] methods = getMethods();
for (PyFunction method : methods) {
if (!PyNames.INIT.equals(method.getName())) {
collectInstanceAttributes(method, result);
}
}
final Collection<PyTargetExpression> expressions = result.values();
return new ArrayList<PyTargetExpression>(expressions);
}
private static void collectInstanceAttributes(@NotNull PyFunction method, @NotNull final Map<String, PyTargetExpression> result) {
final PyParameter[] params = method.getParameterList().getParameters();
if (params.length == 0) {
return;
}
final PyFunctionStub methodStub = method.getStub();
if (methodStub != null) {
final PyTargetExpression[] targets = methodStub.getChildrenByType(PyElementTypes.TARGET_EXPRESSION, PyTargetExpression.EMPTY_ARRAY);
for (PyTargetExpression target : targets) {
if (!result.containsKey(target.getName())) {
result.put(target.getName(), target);
}
}
}
else {
final PyStatementList statementList = method.getStatementList();
if (statementList != null) {
statementList.accept(new PyRecursiveElementVisitor() {
public void visitPyAssignmentStatement(final PyAssignmentStatement node) {
super.visitPyAssignmentStatement(node);
collectNewTargets(result, node);
}
});
}
}
}
private static void collectNewTargets(Map<String, PyTargetExpression> collected, PyAssignmentStatement node) {
final PyExpression[] targets = node.getTargets();
for (PyExpression target : targets) {
if (target instanceof PyTargetExpression && PyUtil.isInstanceAttribute(target)) {
collected.put(target.getName(), (PyTargetExpression)target);
}
}
}
public boolean isNewStyleClass() {
return myNewStyle.getValue().getValue();
}
private boolean calculateNewStyleClass() {
final PsiFile containingFile = getContainingFile();
if (containingFile instanceof PyFile && ((PyFile)containingFile).getLanguageLevel().isPy3K()) {
return true;
}
final PyClass objClass = PyBuiltinCache.getInstance(this).getClass("object");
if (this == objClass) return true; // a rare but possible case
if (hasNewStyleMetaClass(this)) return true;
for (PyClassRef ancestor : iterateAncestors()) {
final PyClass pyClass = ancestor.getPyClass();
if (pyClass == null) {
// unknown, assume new-style class
return true;
}
if (pyClass == objClass) return true;
if (hasNewStyleMetaClass(pyClass)) {
return true;
}
}
return false;
}
private static boolean hasNewStyleMetaClass(PyClass pyClass) {
final PsiFile containingFile = pyClass.getContainingFile();
if (containingFile instanceof PyFile) {
final PsiElement element = ((PyFile)containingFile).getElementNamed(PyNames.DUNDER_METACLASS);
if (element instanceof PyTargetExpression) {
final PyQualifiedName qName = ((PyTargetExpression)element).getAssignedQName();
if (qName != null && qName.matches("type")) {
return true;
}
}
}
if (pyClass.findClassAttribute(PyNames.DUNDER_METACLASS, false) != null) {
return true;
}
return false;
}
@Override
public boolean processClassLevelDeclarations(@NotNull PsiScopeProcessor processor) {
final PyClassStub stub = getStub();
if (stub != null) {
final List<StubElement> children = stub.getChildrenStubs();
for (StubElement child : children) {
if (!processor.execute(child.getPsi(), ResolveState.initial())) {
return false;
}
}
}
else {
PyResolveUtil.scopeCrawlUp(processor, this, null, this);
}
return true;
}
@Override
public boolean processInstanceLevelDeclarations(@NotNull PsiScopeProcessor processor, @Nullable PyExpression location) {
Map<String, PyTargetExpression> declarationsInMethod = new HashMap<String, PyTargetExpression>();
PyFunction instanceMethod = PsiTreeUtil.getParentOfType(location, PyFunction.class);
final PyClass containingClass = instanceMethod != null ? instanceMethod.getContainingClass() : null;
if (instanceMethod != null && containingClass != null && CompletionUtil.getOriginalElement(containingClass) == this) {
collectInstanceAttributes(instanceMethod, declarationsInMethod);
for (PyTargetExpression targetExpression : declarationsInMethod.values()) {
if (!processor.execute(targetExpression, ResolveState.initial())) {
return false;
}
}
}
for (PyTargetExpression expr : getInstanceAttributes()) {
if (declarationsInMethod.containsKey(expr.getName())) {
continue;
}
if (!processor.execute(expr, ResolveState.initial())) return false;
}
return true;
}
public int getTextOffset() {
final ASTNode name = getNameNode();
return name != null ? name.getStartOffset() : super.getTextOffset();
}
public PyStringLiteralExpression getDocStringExpression() {
return PythonDocStringFinder.find(getStatementList());
}
@Override
public String getDocStringValue() {
final PyClassStub stub = getStub();
if (stub != null) {
return stub.getDocString();
}
return PyPsiUtils.strValue(getDocStringExpression());
}
public String toString() {
return "PyClass: " + getName();
}
@NotNull
public Iterable<PyElement> iterateNames() {
return Collections.<PyElement>singleton(this);
}
public PyElement getElementNamed(final String the_name) {
return the_name.equals(getName()) ? this : null;
}
public boolean mustResolveOutside() {
return false;
}
public void subtreeChanged() {
super.subtreeChanged();
ControlFlowCache.clear(this);
if (myInstanceAttributes != null) {
myInstanceAttributes = null;
}
myPropertyCache = null;
}
@NotNull
@Override
public SearchScope getUseScope() {
final ScopeOwner scopeOwner = ScopeUtil.getScopeOwner(this);
if (scopeOwner instanceof PyFunction) {
return new LocalSearchScope(scopeOwner);
}
return super.getUseScope();
}
private static class AncestorsIterable implements Iterable<PyClassRef> {
private final PyClassImpl myClass;
public AncestorsIterable(final PyClassImpl pyClass) {
myClass = pyClass;
}
public Iterator<PyClassRef> iterator() {
return new AncestorsIterator(myClass);
}
}
private static class AncestorsIterator implements Iterator<PyClassRef> {
List<PyClassRef> pending = new LinkedList<PyClassRef>();
private final Set<PyClassRef> seen;
Iterator<PyClassRef> percolator;
PyClassRef prefetch = null;
private final PyClassImpl myAClass;
public AncestorsIterator(PyClassImpl aClass) {
myAClass = aClass;
percolator = myAClass.getSuperClassesList().iterator();
seen = new HashSet<PyClassRef>();
}
private AncestorsIterator(PyClassImpl AClass, Set<PyClassRef> seen) {
myAClass = AClass;
this.seen = seen;
percolator = myAClass.getSuperClassesList().iterator();
}
public boolean hasNext() {
// due to already-seen filtering, there's no way but to try and see.
if (prefetch != null) return true;
prefetch = getNext();
return prefetch != null;
}
public PyClassRef next() {
final PyClassRef nextClass = getNext();
if (nextClass == null) throw new NoSuchElementException();
return nextClass;
}
@Nullable
private PyClassRef getNext() {
iterations:
while (true) {
if (prefetch != null) {
PyClassRef ret = prefetch;
prefetch = null;
return ret;
}
if (percolator.hasNext()) {
PyClassRef it = percolator.next();
if (seen.contains(it)) {
continue iterations; // loop back is equivalent to return next();
}
pending.add(it);
seen.add(it);
return it;
}
else {
while (pending.size() > 0) {
PyClassRef it = pending.get(0);
pending.remove(0);
PyClass pyClass = it.getPyClass();
if (pyClass != null) {
percolator = new AncestorsIterator((PyClassImpl)pyClass, new HashSet<PyClassRef>(seen));
continue iterations;
}
}
return null;
}
}
}
public void remove() {
throw new UnsupportedOperationException();
}
}
private static class AncestorClassesIterable implements Iterable<PyClass> {
private final PyClassImpl myClass;
public AncestorClassesIterable(final PyClassImpl pyClass) {
myClass = pyClass;
}
public Iterator<PyClass> iterator() {
return new AncestorClassesIterator(new AncestorsIterator(myClass));
}
}
private static class AncestorClassesIterator implements Iterator<PyClass> {
private final AncestorsIterator myAncestorsIterator;
private PyClass myNext;
public AncestorClassesIterator(AncestorsIterator ancestorsIterator) {
myAncestorsIterator = ancestorsIterator;
}
@Override
public boolean hasNext() {
if (myNext != null) {
return true;
}
while (myAncestorsIterator.hasNext()) {
PyClassRef clsRef = myAncestorsIterator.getNext();
if (clsRef == null) {
return false;
}
myNext = clsRef.getPyClass();
if (myNext != null) {
return true;
}
}
return false;
}
@Nullable
@Override
public PyClass next() {
if (myNext == null) {
if (!hasNext()) return null;
}
PyClass next = myNext;
myNext = null;
return next;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
}
|
package com.bms.chemtris.models;
import com.badlogic.gdx.graphics.PerspectiveCamera;
import com.badlogic.gdx.graphics.g3d.ModelInstance;
public class ColumnTwo extends GameModel {
private enum X{
FRONT, SIDE
}
private enum Y{
UP, DOWN
}
private X x;
private Y y;
public ColumnTwo(PerspectiveCamera cam, boolean[][][] collision) {
super(cam, collision);
x = X.FRONT;
y = Y.UP;
for(float y = 10f; y < 13f; ++y){
ModelInstance p = new ModelInstance(white,2f,y,2f);
ModelInstance s = new ModelInstance(gray,2f,y,2f);
ModelInstance f = new ModelInstance(wire,2f,y,2f);
parts.add(p);
shadow.add(s);
frame.add(f);
}
traceShadow();
}
@Override
public void rotateX() {
switch (y){
case DOWN:
switch (x){
case FRONT:
if(canMovePart(1,0,0) && canMovePart(2,0,0)){
movePart(1,1f,0f,0f);
movePart(2,2f,0f,0f);
x = X.SIDE;
}
break;
case SIDE:
if(canMovePart(0,0,-1) && canMovePart(0,0,-2)){
movePart(1,0f,0f,-1f);
movePart(1,0f,0f,-2f);
x = X.FRONT;
}
break;
}
}
}
@Override
public void rotateY() {
switch (y){
case UP:
if(canMovePart(0,0,-1) && canMovePart(0,0,-2)){
movePart(1,0f,0f,-1f);
movePart(2,0f,0f,-2f);
y = Y.DOWN;
}
break;
case DOWN:
if(canMovePart(0,1,0) && canMovePart(0,2,0)){
movePart(1,0f,1f,0f);
movePart(2,0f,2f,0f);
y = Y.UP;
}
break;
}
}
@Override
public void rotateZ() {
rotateY();
}
}
|
package com.fteams.sstrain.objects;
import com.badlogic.gdx.math.MathUtils;
import com.badlogic.gdx.math.Vector2;
import com.fteams.sstrain.assets.Assets;
import com.fteams.sstrain.config.GlobalConfiguration;
import com.fteams.sstrain.entities.Note;
import com.fteams.sstrain.entities.Results;
import com.fteams.sstrain.util.Accuracy;
import com.fteams.sstrain.util.SongUtils;
public class Circle implements Comparable<Circle> {
public Note note;
public Vector2 origin = new Vector2();
public Vector2 position = new Vector2();
Vector2 velocity = new Vector2();
public boolean hold;
public Long destination;
Double speed;
public Circle nextNote;
public Circle previousNote;
float spawnTime;
float despawnTime;
float startWaitTime;
float endWaitTime;
float size;
public float hitTime;
float previousTime;
public boolean visible;
public boolean holding;
public boolean waiting;
public boolean soundPlayed;
public boolean miss;
public float alpha = 1f;
public Accuracy accuracy;
public boolean processed;
// holds consist of 2 notes out of which the first one must have type 2
// the second one will either be a type 2 or a type 1 with some effect
// notes with type 1 can be linked to other notes IF the effect != 0 and the groupID is set.
public Circle(float x, float y, Note note, Double noteSpeed, float delay) {
float timing = (float) (delay + note.timing * 1f + GlobalConfiguration.offset * 1f / 1000f);
this.origin.x = x;
this.origin.y = y;
this.position.x = x;
this.position.y = y;
this.note = note;
this.hold = (note.type & SongUtils.NOTE_TYPE_HOLD) != 0;
// position goes 1-5
this.destination = note.endPos;
this.speed = noteSpeed;
this.spawnTime = (float) (timing - speed);
this.startWaitTime = (float) (timing - (hold || !note.status.equals(SongUtils.NOTE_NO_SWIPE) ? 2f : 1f) * SongUtils.overallDiffBad[GlobalConfiguration.overallDifficulty] / 1000f);
this.endWaitTime = (float) (timing + (hold || !note.status.equals(SongUtils.NOTE_NO_SWIPE) ? 2f : 1f) * SongUtils.overallDiffBad[GlobalConfiguration.overallDifficulty] / 1000f);
this.despawnTime = timing * 1.0f;
this.size = 1f;
hitTime = -9f;
previousTime = 0f;
initializeVelocity();
initializeStates();
}
private void initializeStates() {
visible = false;
holding = false;
soundPlayed = false;
miss = false;
}
public void setPreviousNote(Circle previousNote) {
this.previousNote = previousNote;
if (previousNote != null) {
if (previousNote.hold && previousNote.previousNote == null) {
this.startWaitTime = (float) (despawnTime - 2f * SongUtils.overallDiffBad[GlobalConfiguration.overallDifficulty] / 1000f);
this.endWaitTime = (float) (despawnTime + 2f * SongUtils.overallDiffBad[GlobalConfiguration.overallDifficulty] / 1000f);
}
}
}
public void setNextNote(Circle nextNote) {
this.nextNote = nextNote;
}
private void initializeVelocity() {
// unless the arc movement is implemented,
// the notes will simply fall from the top towards the tap zones
velocity.x = 0;
velocity.y = (float) (-249 / speed);
}
public void update(float time) {
if (miss || (accuracy != null && !holding)) {
if (visible) {
visible = false;
}
return;
}
if (spawnTime <= time && despawnTime > time && !visible) {
visible = true;
}
if (spawnTime >= time && visible)
visible = false;
if (visible && despawnTime <= time) {
if (GlobalConfiguration.playHintSounds && !soundPlayed) {
// hint sounds play at 50% of the volume
if (note.status.equals(SongUtils.NOTE_NO_SWIPE)) {
Assets.perfectTapSound.play(GlobalConfiguration.feedbackVolume / 200f);
} else {
Assets.perfectSwipeSound.play(GlobalConfiguration.feedbackVolume / 200f);
}
soundPlayed = true;
}
if (holding) {
alpha = 1f;
} else {
alpha = MathUtils.clamp((endWaitTime - time) / (endWaitTime - despawnTime), 0f, 1f);
if (alpha == 0f)
visible = false;
}
}
if (visible) {
// TODO: implement parabolic movement of the notes towards the player and use the origin spot instead of spawning from the same lane (more SS-like)
float scl = time - spawnTime;
if (holding) {
position.set(origin.cpy().x, origin.cpy().y - 249);
} else
position.set(origin.cpy().add(velocity.cpy().scl(scl)));
}
if (startWaitTime <= time && endWaitTime > time && !waiting && accuracy == null) {
waiting = true;
}
processMiss(time);
previousTime = time;
}
private void processMiss(float time) {
// miss if we miss the first note
if (nextNote != null && hold && !holding && endWaitTime <= time && accuracy == null && !miss) {
waiting = false;
miss = true;
accuracy = Accuracy.MISS;
nextNote.miss = true;
nextNote.accuracy = Accuracy.MISS;
nextNote.processed = true;
nextNote.waiting = false;
// System.out.println("MISS-001: didn't hit the note (" + note.id + ")");
} else if (nextNote == null && endWaitTime <= time && !miss && accuracy == null) {
waiting = false;
miss = true;
accuracy = Accuracy.MISS;
// System.out.println("MISS-002: didn't hit the note (" + note.id + ")");
} else if (nextNote != null && !hold && endWaitTime <= time && accuracy == null && !miss) {
waiting = false;
miss = true;
accuracy = Accuracy.MISS;
// System.out.println("MISS-003: didn't hit the note (" + note.id + ")");
}
if (hold && !miss) {
// miss if we hold for too long
if (nextNote != null && nextNote.endWaitTime <= time && nextNote.accuracy == null) {
miss = true;
holding = false;
waiting = false;
// System.out.println("MISS-004: held for too long (" + note.id + ")");
accuracy = Accuracy.MISS;
}
}
}
public Accuracy hit() {
// System.out.println("H>" + note.id);
// HIT DOESN'T COUNT FOR HOLD RELEASE!
if (previousNote != null && previousNote.hold)
return Accuracy.NONE;
Accuracy accuracy = hold ? Results.getAccuracyForSwipesAndHolds(previousTime - despawnTime - GlobalConfiguration.inputOffset / 1000f) : Results.getAccuracyFor(previousTime - despawnTime - GlobalConfiguration.inputOffset / 1000f);
// If the note was tapped too early, we ignore the tap
if (despawnTime > previousTime && accuracy == Accuracy.MISS) {
return Accuracy.NONE;
}
hitTime = previousTime - despawnTime - GlobalConfiguration.inputOffset / 1000f;
waiting = false;
if (hold) {
hitTime *= Results.SWIPE_HOLD_MULTIPLIER;
holding = true;
} else {
visible = false;
}
this.accuracy = accuracy;
return accuracy;
}
public Accuracy release() {
// System.out.println("R>" + note.id);
// if a non-hold is released it it counts as a miss.
if (!hold || !note.status.equals(SongUtils.NOTE_NO_SWIPE)) {
accuracy = Accuracy.MISS;
miss = true;
visible = false;
previousNote.release();
waiting = false;
// only type 2 can gain from a release.
// type 1 with status calls release on swipe
return accuracy;
}
// RELEASE DOESN'T COUNT FOR HOLD START
if (holding) {
holding = false;
visible = false;
}
if (nextNote != null)
return Accuracy.NONE;
accuracy = Results.getAccuracyForSwipesAndHolds(previousTime - despawnTime - GlobalConfiguration.inputOffset / 1000f);
previousNote.release();
waiting = false;
// miss if we release before we start waiting
if (accuracy == Accuracy.MISS) {
waiting = false;
visible = false;
miss = true;
processed = true;
// System.out.println("MISS-005: released hold too early (" + note.id + ")");
} else {
hitTime = previousTime - despawnTime - GlobalConfiguration.inputOffset / 1000f;
hitTime *= Results.SWIPE_HOLD_MULTIPLIER;
}
return accuracy;
}
public Accuracy swipeLeft() {
// some songs have notes with type 2 and status != 0
if (note.status.equals(SongUtils.NOTE_NO_SWIPE) || note.status.equals(SongUtils.NOTE_SWIPE_RIGHT)) {
return Accuracy.NONE;
}
if (previousNote != null && previousNote.hold) {
previousNote.release();
}
if (previousNote != null && previousNote.previousNote != null) {
if (previousNote.previousNote.note.status.equals(note.status) && !previousNote.isDone()) {
return Accuracy.NONE;
}
}
Accuracy accuracy = Results.getAccuracyForSwipesAndHolds(previousTime - despawnTime - GlobalConfiguration.inputOffset / 1000f);
// If the note was tapped too early, we ignore the tap
if (despawnTime > previousTime && accuracy == Accuracy.MISS) {
return Accuracy.NONE;
}
hitTime = previousTime - despawnTime - GlobalConfiguration.inputOffset / 1000f;
hitTime *= Results.SWIPE_HOLD_MULTIPLIER;
waiting = false;
this.accuracy = accuracy;
visible = false;
return accuracy;
}
public Accuracy swipeRight() {
// some songs have notes with type 2 and status != 0
// legne on pro, for instance.
if (note.status.equals(SongUtils.NOTE_NO_SWIPE) || note.status.equals(SongUtils.NOTE_SWIPE_LEFT)) {
return Accuracy.NONE;
}
if (previousNote != null && previousNote.hold) {
previousNote.release();
}
if (previousNote != null && previousNote.previousNote != null) {
if (previousNote.previousNote.note.status.equals(note.status) && !previousNote.isDone()) {
return Accuracy.NONE;
}
}
Accuracy accuracy = Results.getAccuracyForSwipesAndHolds(previousTime - despawnTime - GlobalConfiguration.inputOffset / 1000f);
// If the note was tapped too early, we ignore the tap
if (despawnTime > previousTime && accuracy == Accuracy.MISS) {
return Accuracy.NONE;
}
hitTime = previousTime - despawnTime - GlobalConfiguration.inputOffset / 1000f;
hitTime *= Results.SWIPE_HOLD_MULTIPLIER;
waiting = false;
this.accuracy = accuracy;
visible = false;
return accuracy;
}
public boolean isDone() {
return miss || (accuracy != null && !holding);
}
@Override
public int compareTo(Circle o) {
if (o == null)
return 1;
// if the notes have the same timing, sort them by destination
if (0 == Double.compare(note.timing, o.note.timing)) {
return Long.compare(destination, o.destination);
}
return Double.compare(note.timing, o.note.timing);
}
}
|
package hudson.maven;
import hudson.FilePath;
import hudson.FilePath.FileCallable;
import hudson.maven.PluginManagerInterceptor.AbortException;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.BuildListener;
import hudson.model.DependencyGraph;
import hudson.model.Hudson;
import hudson.model.Result;
import hudson.model.Run;
import hudson.remoting.Channel;
import hudson.remoting.VirtualChannel;
import hudson.scm.ChangeLogSet;
import hudson.scm.ChangeLogSet.Entry;
import hudson.tasks.test.AbstractTestResultAction;
import hudson.util.ArgumentListBuilder;
import org.apache.maven.BuildFailureException;
import org.apache.maven.embedder.MavenEmbedderException;
import org.apache.maven.embedder.PlexusLoggerAdapter;
import org.apache.maven.lifecycle.LifecycleExecutionException;
import org.apache.maven.monitor.event.DefaultEventMonitor;
import org.apache.maven.monitor.event.EventMonitor;
import org.apache.maven.plugin.PluginManager;
import org.apache.maven.project.DuplicateProjectException;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.ProjectBuildingException;
import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
import org.codehaus.plexus.util.dag.CycleDetectedException;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
/**
* {@link Run} for {@link MavenModule}.
*
* @author Kohsuke Kawaguchi
*/
public class MavenBuild extends AbstractBuild<MavenModule,MavenBuild> {
/**
* {@link MavenReporter}s that will contribute project actions.
* Can be null if there's none.
*/
/*package*/ List<MavenReporter> projectActionReporters;
public MavenBuild(MavenModule job) throws IOException {
super(job);
}
public MavenBuild(MavenModule job, Calendar timestamp) {
super(job, timestamp);
}
public MavenBuild(MavenModule project, File buildDir) throws IOException {
super(project, buildDir);
}
/**
* Gets the {@link MavenModuleSetBuild} that has the same build number.
*
* @return
* null if no such build exists, which happens when the module build
* is manually triggered.
*/
public MavenModuleSetBuild getParentBuild() {
return getParent().getParent().getBuildByNumber(getNumber());
}
@Override
public ChangeLogSet<? extends Entry> getChangeSet() {
return new FilteredChangeLogSet(this);
}
/**
* We always get the changeset from {@link MavenModuleSetBuild}.
*/
@Override
public boolean hasChangeSetComputed() {
return true;
}
@Override
public AbstractTestResultAction getTestResultAction() {
return getAction(AbstractTestResultAction.class);
}
public void registerAsProjectAction(MavenReporter reporter) {
if(projectActionReporters==null)
projectActionReporters = new ArrayList<MavenReporter>();
projectActionReporters.add(reporter);
}
@Override
public void run() {
run(new RunnerImpl());
getProject().updateTransientActions();
}
/**
* Runs Maven and builds the project.
*
* This code is executed on the remote machine.
*/
private static final class Builder implements FileCallable<Result> {
private final BuildListener listener;
private final MavenBuildProxy buildProxy;
private final MavenReporter[] reporters;
private final List<String> goals;
public Builder(BuildListener listener,MavenBuildProxy buildProxy,MavenReporter[] reporters, List<String> goals) {
this.listener = listener;
this.buildProxy = buildProxy;
this.reporters = reporters;
this.goals = goals;
}
public Result invoke(File moduleRoot, VirtualChannel channel) throws IOException {
MavenProject p=null;
try {
MavenEmbedder embedder = MavenUtil.createEmbedder(listener);
File pom = new File(moduleRoot,"pom.xml").getAbsoluteFile(); // MavenEmbedder only works if it's absolute
if(!pom.exists()) {
listener.error("No POM: "+pom);
return Result.FAILURE;
}
// event monitor is mostly useless. It only provides a few strings
EventMonitor eventMonitor = new DefaultEventMonitor( new PlexusLoggerAdapter( new EmbedderLoggerImpl(listener) ) );
p = embedder.readProject(pom);
PluginManagerInterceptor interceptor;
try {
interceptor = (PluginManagerInterceptor)embedder.getContainer().lookup(PluginManager.class.getName());
interceptor.setBuilder(buildProxy,reporters,listener);
} catch (ComponentLookupException e) {
throw new Error(e); // impossible
}
for (MavenReporter r : reporters)
r.preBuild(buildProxy,p,listener);
embedder.execute(p, goals, eventMonitor,
new TransferListenerImpl(listener),
null, // TODO: allow additional properties to be specified
pom.getParentFile());
interceptor.fireLeaveModule();
return null;
} catch (MavenEmbedderException e) {
buildProxy.setResult(Result.FAILURE);
e.printStackTrace(listener.error(e.getMessage()));
} catch (ProjectBuildingException e) {
buildProxy.setResult(Result.FAILURE);
e.printStackTrace(listener.error(e.getMessage()));
} catch (CycleDetectedException e) {
buildProxy.setResult(Result.FAILURE);
e.printStackTrace(listener.error(e.getMessage()));
} catch (LifecycleExecutionException e) {
buildProxy.setResult(Result.FAILURE);
e.printStackTrace(listener.error(e.getMessage()));
} catch (BuildFailureException e) {
buildProxy.setResult(Result.FAILURE);
e.printStackTrace(listener.error(e.getMessage()));
} catch (DuplicateProjectException e) {
buildProxy.setResult(Result.FAILURE);
e.printStackTrace(listener.error(e.getMessage()));
} catch (AbortException e) {
listener.error("build aborted");
} catch (InterruptedException e) {
listener.error("build aborted");
} finally {
// this should happen after a build is marked as a failure
try {
if(p!=null)
for (MavenReporter r : reporters)
r.postBuild(buildProxy,p,listener);
} catch (InterruptedException e) {
buildProxy.setResult(Result.FAILURE);
}
}
return Result.FAILURE;
}
}
/**
* {@link MavenBuildProxy} implementation.
*/
private class ProxyImpl implements MavenBuildProxy, Serializable {
public <V, T extends Throwable> V execute(BuildCallable<V, T> program) throws T, IOException, InterruptedException {
return program.call(MavenBuild.this);
}
public FilePath getRootDir() {
return new FilePath(MavenBuild.this.getRootDir());
}
public FilePath getProjectRootDir() {
return new FilePath(MavenBuild.this.getParent().getRootDir());
}
public FilePath getArtifactsDir() {
return new FilePath(MavenBuild.this.getArtifactsDir());
}
public void setResult(Result result) {
MavenBuild.this.setResult(result);
}
public void registerAsProjectAction(MavenReporter reporter) {
MavenBuild.this.registerAsProjectAction(reporter);
}
private Object writeReplace() {
return Channel.current().export(MavenBuildProxy.class, new ProxyImpl());
}
}
private class RunnerImpl extends AbstractRunner {
protected Result doRun(BuildListener listener) throws Exception {
// pick up a list of reporters to run
List<MavenReporter> reporters = new ArrayList<MavenReporter>();
getProject().getReporters().addAllTo(reporters);
for (MavenReporterDescriptor d : MavenReporters.LIST) {
if(getProject().getReporters().contains(d))
continue; // already configured
MavenReporter auto = d.newAutoInstance(getProject());
if(auto!=null)
reporters.add(auto);
}
ArgumentListBuilder args = new ArgumentListBuilder();
args.addTokenized(getProject().getGoals());
return getProject().getModuleRoot().act(new Builder(
listener,new ProxyImpl(),
reporters.toArray(new MavenReporter[0]), args.toList()));
}
public void post(BuildListener listener) {
if(!getResult().isWorseThan(Result.UNSTABLE)) {
// trigger dependency builds
DependencyGraph graph = Hudson.getInstance().getDependencyGraph();
for( AbstractProject down : getParent().getDownstreamProjects()) {
if(!graph.hasIndirectDependencies(getParent(),down)) {
// if there's a longer dependency path to this project,
// then scheduling the build now is going to be a waste,
// so don't do that.
listener.getLogger().println("Triggering a new build of "+down.getName());
down.scheduleBuild();
}
}
}
//// run all of them even if one of them failed
//try {
// for( Publisher bs : project.getPublishers().values() )
// bs.perform(Build.this, launcher, listener);
//} catch (InterruptedException e) {
// e.printStackTrace(listener.fatalError("aborted"));
// setResult(Result.FAILURE);
//} catch (IOException e) {
// e.printStackTrace(listener.fatalError("failed"));
// setResult(Result.FAILURE);
}
}
}
|
package com.net2plan.gui.plugins.networkDesign.viewEditTopolTables.rightPanelTabs;
import java.awt.BorderLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;
import java.util.Set;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
import javax.swing.JTextField;
import javax.swing.ScrollPaneLayout;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableColumn;
import com.net2plan.gui.plugins.GUINetworkDesign;
import com.net2plan.gui.plugins.networkDesign.viewEditTopolTables.controlTables.AdvancedJTable_networkElement;
import com.net2plan.gui.plugins.networkDesign.viewEditTopolTables.controlTables.specificTables.AdvancedJTable_layer;
import com.net2plan.gui.plugins.networkDesign.viewEditTopolTables.controlTables.specificTables.Niw_AdvancedJTable_layer;
import com.net2plan.gui.utils.AdvancedJTable;
import com.net2plan.gui.utils.ClassAwareTableModel;
import com.net2plan.gui.utils.ColumnHeaderToolTips;
import com.net2plan.gui.utils.FullScrollPaneLayout;
import com.net2plan.gui.utils.TableCursorNavigation;
import com.net2plan.interfaces.networkDesign.NetPlan;
import com.net2plan.internal.Constants.NetworkElementType;
import com.net2plan.utils.StringUtils;
import net.miginfocom.swing.MigLayout;
public class NetPlanViewTableComponent_network extends JPanel {
private final static String[] attributeTableHeader = StringUtils.arrayOf("Attribute", "Value");
private final static String[] attributeTableTips = attributeTableHeader;
private final static String[] tagTableHeader = StringUtils.arrayOf("Tag");
private final static String[] tagTableTip = StringUtils.arrayOf("Name of the tag");
private JTextField txt_networkName, txt_currentDate , txt_niwOpticalSlotSizeGHz;
private JButton updateDate , update_niwOpticalSlotSizeGHz;
private JTextArea txt_networkDescription;
private AdvancedJTable networkTagTable;
private AdvancedJTable networkAttributeTable;
private AdvancedJTable_networkElement layerTable;
private final GUINetworkDesign networkViewer;
public NetPlanViewTableComponent_network(final GUINetworkDesign networkViewer, AdvancedJTable_networkElement layerTable)
{
super(new MigLayout("", "[][grow]", "[][][grow][][][][][grow]"));
this.layerTable = layerTable;
this.networkViewer = networkViewer;
final boolean isNiwActive = networkViewer.getVisualizationState().isNiwDesignButtonActive() && networkViewer.isNiwValidCurrentDesign();
if (isNiwActive)
{
this.update_niwOpticalSlotSizeGHz = new JButton ("Update");
this.update_niwOpticalSlotSizeGHz.setEnabled(true);
this.update_niwOpticalSlotSizeGHz.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
try
{
final double val = Double.parseDouble(txt_niwOpticalSlotSizeGHz.getText());
if (val > 0)
networkViewer.getNiwInfo().getSecond().setWdmOpticalSlotSizeInGHz(val);
} catch (Exception ee) { }
txt_niwOpticalSlotSizeGHz.setText("" + networkViewer.getNiwInfo().getSecond().getWdmOpticalSlotSizeInGHz());
}
});
}
updateDate = new JButton ("Update");
updateDate.setEnabled(true);
updateDate.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
final SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
try
{
final Date date = df.parse(txt_currentDate.getText());
if (!date.equals(networkViewer.getDesign().getCurrentDate()))
{
networkViewer.getDesign().setCurrentDate(date);
networkViewer.updateVisualizationJustTables();
}
} catch (Exception ee) { txt_currentDate.setText(df.format(networkViewer.getDesign().getCurrentDate()));}
}
});
txt_networkName = new JTextField();
txt_networkDescription = new JTextArea();
txt_networkDescription.setFont(new JLabel().getFont());
txt_networkDescription.setLineWrap(true);
txt_networkDescription.setWrapStyleWord(true);
txt_networkName.setEditable(networkViewer.getVisualizationState().isNetPlanEditable());
txt_networkDescription.setEditable(networkViewer.getVisualizationState().isNetPlanEditable());
txt_currentDate = new JTextField();
txt_currentDate.setEditable(networkViewer.getVisualizationState().isNetPlanEditable());
if (isNiwActive)
{
txt_niwOpticalSlotSizeGHz = new JTextField();
txt_niwOpticalSlotSizeGHz.setEditable(true);
}
if (networkViewer.getVisualizationState().isNetPlanEditable()) {
txt_networkName.getDocument().addDocumentListener(new DocumentAdapter(networkViewer) {
@Override
protected void updateInfo(String text) {
networkViewer.getDesign().setName(text);
}
});
txt_networkDescription.getDocument().addDocumentListener(new DocumentAdapter(networkViewer) {
@Override
protected void updateInfo(String text) {
networkViewer.getDesign().setDescription(text);
}
});
}
networkTagTable = new AdvancedJTable(new ClassAwareTableModel(new Object[1][tagTableHeader.length], tagTableHeader));
ColumnHeaderToolTips tagTips = new ColumnHeaderToolTips();
for (int c = 0; c < tagTableHeader.length; c++) {
TableColumn col = networkTagTable.getColumnModel().getColumn(c);
tagTips.setToolTip(col, tagTableTip[c]);
}
networkTagTable.getTableHeader().addMouseMotionListener(tagTips);
networkTagTable.setAutoCreateRowSorter(true);
if (networkViewer.getVisualizationState().isNetPlanEditable())
{
networkTagTable.addMouseListener(new SingleElementTagEditor(networkViewer, NetworkElementType.NETWORK));
}
JScrollPane sp_tags = new JScrollPane(networkTagTable);
ScrollPaneLayout tagLayout = new FullScrollPaneLayout();
sp_tags.setLayout(tagLayout);
sp_tags.setHorizontalScrollBarPolicy(JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS);
networkAttributeTable = new AdvancedJTable(new ClassAwareTableModel(new Object[1][attributeTableHeader.length], attributeTableHeader));
if (networkViewer.getVisualizationState().isNetPlanEditable()) {
networkAttributeTable.addMouseListener(new SingleElementAttributeEditor(networkViewer, NetworkElementType.NETWORK));
}
ColumnHeaderToolTips tips = new ColumnHeaderToolTips();
for (int c = 0; c < attributeTableHeader.length; c++) {
TableColumn col = networkAttributeTable.getColumnModel().getColumn(c);
tips.setToolTip(col, attributeTableTips[c]);
}
networkAttributeTable.getTableHeader().addMouseMotionListener(tips);
networkAttributeTable.setAutoCreateRowSorter(true);
JScrollPane scrollPane = new JScrollPane(networkAttributeTable);
ScrollPaneLayout layout = new FullScrollPaneLayout();
scrollPane.setLayout(layout);
scrollPane.setHorizontalScrollBarPolicy(JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS);
this.add(new JLabel("Name"));
this.add(txt_networkName, "grow, wrap");
this.add(new JLabel("Description"), "aligny top");
this.add(new JScrollPane(txt_networkDescription), "grow, wrap, height 100::");
this.add(sp_tags, "grow, spanx, wrap");
this.add(scrollPane, "grow, spanx 2, wrap");
this.add(new JLabel("Current date (yyyy-MM-dd HH:mm:ss)"), "grow");
final JPanel auxPanel = new JPanel (new BorderLayout());
auxPanel.add(txt_currentDate , BorderLayout.CENTER);
auxPanel.add(updateDate, BorderLayout.EAST);
this.add(auxPanel, "grow, wrap");
if (isNiwActive)
{
this.add(new JLabel("WDM layer optical slot size (GHz)"), "grow");
final JPanel auxPanel2 = new JPanel (new BorderLayout());
auxPanel2.add(txt_niwOpticalSlotSizeGHz , BorderLayout.CENTER);
auxPanel2.add(update_niwOpticalSlotSizeGHz, BorderLayout.EAST);
this.add(auxPanel2, "grow, wrap");
}
// this.add(new JLabel("Click this button to update the tables"), "grow");
// this.add(updateTables, "grow, wrap");
this.add(new JLabel("Layer information"), "grow, spanx2, wrap");
this.add(layerTable.getTableScrollPane(), "grow, spanx 2");
networkAttributeTable.addKeyListener(new TableCursorNavigation());
}
// GETDECORATOR
public void updateNetPlanView(NetPlan currentState)
{
networkAttributeTable.setEnabled(false);
((DefaultTableModel) networkAttributeTable.getModel()).setDataVector(new Object[1][attributeTableHeader.length], attributeTableHeader);
networkTagTable.setEnabled(false);
((DefaultTableModel) networkTagTable.getModel()).setDataVector(new Object[1][tagTableHeader.length], tagTableHeader);
Map<String, String> networkAttributes = currentState.getAttributes();
if (!networkAttributes.isEmpty()) {
int networkAttributeId = 0;
Object[][] networkData = new Object[networkAttributes.size()][2];
for (Map.Entry<String, String> entry : networkAttributes.entrySet()) {
networkData[networkAttributeId][0] = entry.getKey();
networkData[networkAttributeId][1] = entry.getValue();
networkAttributeId++;
}
((DefaultTableModel) networkAttributeTable.getModel()).setDataVector(networkData, attributeTableHeader);
}
// Tag data
final Set<String> layerTags = currentState.getTags();
final String[] tagArray = layerTags.toArray(new String[layerTags.size()]);
if (!(tagArray.length == 0))
{
final Object[][] tagData = new Object[tagArray.length][1];
for (int i = 0; i < tagData.length; i++)
{
tagData[i][0] = tagArray[i];
}
((DefaultTableModel) networkTagTable.getModel()).setDataVector(tagData, tagTableHeader);
}
this.layerTable.updateView();
final SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
if (!txt_currentDate.getText().equals(df.format(currentState.getCurrentDate())))
txt_currentDate.setText(df.format(currentState.getCurrentDate()));
final boolean isNiwActive = networkViewer.getVisualizationState().isNiwDesignButtonActive() && networkViewer.isNiwValidCurrentDesign();
if (isNiwActive)
txt_niwOpticalSlotSizeGHz.setText("" + networkViewer.getNiwInfo().getSecond().getWdmOpticalSlotSizeInGHz());
txt_currentDate.setText(df.format(currentState.getCurrentDate()));
txt_networkName.setText(currentState.getName());
txt_networkDescription.setText(currentState.getDescription());
txt_networkDescription.setCaretPosition(0);
}
// GETTABLE
}
|
package fr.openwide.core.wicket.more.markup.html.template.js.jquery.plugins.fancybox;
import org.apache.wicket.Component;
import org.apache.wicket.ajax.AjaxEventBehavior;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.odlabs.wiquery.core.behavior.WiQueryAbstractBehavior;
import org.odlabs.wiquery.core.commons.WiQueryResourceManager;
import org.odlabs.wiquery.core.javascript.JsStatement;
import org.odlabs.wiquery.core.javascript.JsUtils;
public class FancyboxPopupPanelBehavior extends WiQueryAbstractBehavior {
private static final long serialVersionUID = 6414097982857106898L;
private FancyboxPopupPanel fancyboxPopupPanel;
public FancyboxPopupPanelBehavior(FancyboxPopupPanel fancyboxPopupPanel) {
super();
this.fancyboxPopupPanel = fancyboxPopupPanel;
}
@Override
public void bind(Component link) {
super.bind(link);
link.add(new AjaxEventBehavior("onclick") {
private static final long serialVersionUID = -1194316821232521566L;
@Override
protected void onEvent(AjaxRequestTarget target) {
onPopupShow();
fancyboxPopupPanel.show(target);
DefaultTipsyFancybox fancybox = new FancyboxAnchor(fancyboxPopupPanel.getReplaceableContainer());
target.appendJavascript(new JsStatement().$().chain(fancybox).render().toString());
String markupId = "#" + fancyboxPopupPanel.getReplaceableContainer().getMarkupId();
target.appendJavascript(new JsStatement().append("document.lastPopupElement")
.append(" = ")
.append(JsUtils.quotes(markupId)).render().toString());
}
});
}
protected void onPopupShow() {
// override this to do things on popup show
}
@Override
public JsStatement statement() {
return null;
}
@Override
public void contribute(WiQueryResourceManager wiQueryResourceManager) {
super.contribute(wiQueryResourceManager);
wiQueryResourceManager.addJavaScriptResource(FancyboxJavaScriptResourceReference.get());
wiQueryResourceManager.addCssResource(FancyboxStyleSheetResourceReference.get());
}
}
|
package org.gemoc.sequential_addons.multidimensional.timeline.views.timeline;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.eclipse.ui.PlatformUI;
import org.gemoc.commons.eclipse.ui.ViewHelper;
import org.gemoc.executionframework.engine.mse.LogicalStep;
import org.gemoc.executionframework.engine.mse.MSEOccurrence;
import org.gemoc.xdsmlframework.api.core.EngineStatus.RunStatus;
import org.gemoc.xdsmlframework.api.core.IBasicExecutionEngine;
import org.gemoc.xdsmlframework.api.engine_addon.IEngineAddon;
import fr.inria.diverse.trace.gemoc.api.IMultiDimensionalTraceAddon;
public class MultidimentionalTimeLineOpenViewAddon implements IEngineAddon {
@Override
public void engineAboutToStart(IBasicExecutionEngine engine) {
// when selected in the addon from launch config, make sure to start the view
final IBasicExecutionEngine _engine = engine;
// make sure to have the view when starting the engine
PlatformUI.getWorkbench().getDisplay().syncExec(
new Runnable()
{
@Override
public void run() {
MultidimensionalTimeLineView timelineView;
timelineView = ViewHelper.showView(MultidimensionalTimeLineView.ID);
timelineView.configure(_engine);
}
});
}
@Override
public void engineStarted(IBasicExecutionEngine executionEngine) {
// TODO Auto-generated method stub
}
@Override
public void engineAboutToStop(IBasicExecutionEngine engine) {
// TODO Auto-generated method stub
}
@Override
public void engineStopped(IBasicExecutionEngine engine) {
// TODO Auto-generated method stub
}
@Override
public void engineAboutToDispose(IBasicExecutionEngine engine) {
// TODO Auto-generated method stub
}
@Override
public void engineStatusChanged(IBasicExecutionEngine engine,
RunStatus newStatus) {
// TODO Auto-generated method stub
}
@Override
public void aboutToSelectLogicalStep(IBasicExecutionEngine engine, Collection<LogicalStep> logicalSteps) {
// TODO Auto-generated method stub
}
@Override
public void proposedLogicalStepsChanged(IBasicExecutionEngine engine, Collection<LogicalStep> logicalSteps) {
// TODO Auto-generated method stub
}
@Override
public void logicalStepSelected(IBasicExecutionEngine engine, LogicalStep selectedLogicalStep) {
// TODO Auto-generated method stub
}
@Override
public void aboutToExecuteLogicalStep(IBasicExecutionEngine engine, LogicalStep logicalStepToExecute) {
// TODO Auto-generated method stub
}
@Override
public void logicalStepExecuted(IBasicExecutionEngine engine, LogicalStep logicalStepExecuted) {
// TODO Auto-generated method stub
}
@Override
public void aboutToExecuteMSEOccurrence(IBasicExecutionEngine engine, MSEOccurrence mseOccurrence) {
// TODO Auto-generated method stub
}
@Override
public void mseOccurrenceExecuted(IBasicExecutionEngine engine, MSEOccurrence mseOccurrence) {
// TODO Auto-generated method stub
}
@Override
public List<String> validate(List<IEngineAddon> otherAddons) {
ArrayList<String> errors = new ArrayList<String>();
boolean found = false;
for (IEngineAddon iEngineAddon : otherAddons) {
if( iEngineAddon instanceof IMultiDimensionalTraceAddon){
found = true;
break;
}
}
if(!found){
errors.add("MultidimentionalTimeLineOpenViewAddon can't run without IMultiDimensionalTraceAddon");
}
return errors;
}
}
|
package org.kie.workbench.common.services.datamodel.backend.server.cache;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.event.Observes;
import javax.inject.Inject;
import javax.inject.Named;
import org.drools.core.rule.TypeMetaInfo;
import org.drools.workbench.models.commons.shared.imports.Import;
import org.drools.workbench.models.commons.shared.imports.Imports;
import org.drools.workbench.models.commons.shared.oracle.model.TypeSource;
import org.drools.workbench.models.commons.shared.oracle.ProjectDataModelOracle;
import org.guvnor.common.services.backend.cache.LRUCache;
import org.guvnor.common.services.builder.Builder;
import org.guvnor.common.services.builder.LRUBuilderCache;
import org.guvnor.common.services.project.builder.events.InvalidateDMOProjectCacheEvent;
import org.guvnor.common.services.project.model.Project;
import org.guvnor.common.services.project.model.ProjectImports;
import org.guvnor.common.services.project.service.POMService;
import org.guvnor.common.services.project.service.ProjectService;
import org.kie.commons.io.IOService;
import org.kie.commons.java.nio.file.Files;
import org.kie.commons.validation.PortablePreconditions;
import org.kie.scanner.KieModuleMetaData;
import org.kie.workbench.common.services.datamodel.backend.server.builder.projects.ProjectDataModelOracleBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.uberfire.backend.server.util.Paths;
import org.uberfire.backend.vfs.Path;
/**
* A simple LRU cache for Project DataModelOracles
*/
@ApplicationScoped
@Named("ProjectDataModelOracleCache")
public class LRUProjectDataModelOracleCache extends LRUCache<Project, ProjectDataModelOracle> {
private static final Logger log = LoggerFactory.getLogger( LRUProjectDataModelOracleCache.class );
private final static String DEFAULTPKG = "defaultpkg";
@Inject
private Paths paths;
@Inject
private POMService pomService;
@Inject
@Named("ioStrategy")
private IOService ioService;
@Inject
private ProjectService projectService;
@Inject
private LRUBuilderCache cache;
public synchronized void invalidateProjectCache( @Observes final InvalidateDMOProjectCacheEvent event ) {
PortablePreconditions.checkNotNull( "event",
event );
final Path resourcePath = event.getResourcePath();
final Project project = projectService.resolveProject( resourcePath );
//If resource was not within a Project there's nothing to invalidate
if ( project != null ) {
invalidateCache( project );
}
}
//Check the ProjectOracle for the Project has been created, otherwise create one!
public synchronized ProjectDataModelOracle assertProjectDataModelOracle( final Project project ) {
ProjectDataModelOracle projectOracle = getEntry( project );
if ( projectOracle == null ) {
projectOracle = makeProjectOracle( project );
setEntry( project,
projectOracle );
}
return projectOracle;
}
private ProjectDataModelOracle makeProjectOracle( final Project project ) {
//Get a Builder for the project
final Builder builder = cache.assertBuilder( project );
//Create the ProjectOracle...
final KieModuleMetaData kieModuleMetaData = KieModuleMetaData.Factory.newKieModuleMetaData( builder.getKieModuleIgnoringErrors() );
final ProjectDataModelOracleBuilder pdBuilder = ProjectDataModelOracleBuilder.newProjectOracleBuilder();
// Add all packages
pdBuilder.addPackages(kieModuleMetaData.getPackages());
//Add all classes from the KieModule metaData
for ( final String packageName : kieModuleMetaData.getPackages() ) {
for ( final String className : kieModuleMetaData.getClasses( packageName ) ) {
final Class clazz = kieModuleMetaData.getClass( packageName,
className );
final TypeMetaInfo typeMetaInfo = kieModuleMetaData.getTypeMetaInfo( clazz );
final TypeSource typeSource = builder.getClassSource( kieModuleMetaData,
clazz );
try {
pdBuilder.addClass( clazz,
typeMetaInfo.isEvent(),
typeSource );
} catch ( IOException ioe ) {
log.error( ioe.getMessage() );
}
}
}
//Add external imports. The availability of these classes is checked in Builder and failed fast. Here we load them into the DMO
final org.kie.commons.java.nio.file.Path nioExternalImportsPath = paths.convert( project.getImportsPath() );
if ( Files.exists( nioExternalImportsPath ) ) {
final Path externalImportsPath = paths.convert( nioExternalImportsPath );
final ProjectImports projectImports = projectService.load( externalImportsPath );
final Imports imports = projectImports.getImports();
for ( final Import item : imports.getImports() ) {
try {
Class clazz = this.getClass().getClassLoader().loadClass( item.getType() );
pdBuilder.addClass( clazz );
} catch ( ClassNotFoundException cnfe ) {
//This should not happen as Builder would have failed to load them and failed fast.
log.error( cnfe.getMessage() );
} catch ( IOException ioe ) {
log.error( ioe.getMessage() );
}
}
}
addAllRuleNames( builder, pdBuilder, project );
return pdBuilder.build();
}
private void addAllRuleNames(
Builder builder,
ProjectDataModelOracleBuilder pdBuilder,
Project project) {
final KieModuleMetaData kieModuleMetaData = KieModuleMetaData.Factory.newKieModuleMetaData(builder.getKieModuleIgnoringErrors());
final List<String> ruleNames = new ArrayList<String>();
for (org.guvnor.common.services.project.model.Package pkg : projectService.resolvePackages(project)) {
String packageName = pkg.getPackageName();
if (packageName.isEmpty()) {
packageName = DEFAULTPKG;
}
ruleNames.addAll(kieModuleMetaData.getRuleNamesInPackage(packageName));
}
pdBuilder.addRuleNames( ruleNames );
}
}
|
package fr.pizzeria.admin.jaxrs;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import fr.pizzeria.admin.metier.PizzaService;
import fr.pizzeria.exception.DaoException;
import fr.pizzeria.model.Pizza;
@Path("/pizzas")
public class PizzaResource {
@Inject private PizzaService pizzaService;
public PizzaResource() {
// TODO Auto-generated constructor stub
}
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response findAllPizzas() throws DaoException {
ResponseBuilder respBuilder = Response.ok();
respBuilder.entity(pizzaService.findAllPizzas());
respBuilder.header("Access-Control-Allow-Origin", "http://localhost");
return respBuilder.build();
}
@POST
@Consumes(MediaType.APPLICATION_JSON)
public void savePizza(Pizza newPizza) throws DaoException {
pizzaService.savePizza(newPizza);
// return newPizza;
}
@PUT
@Consumes(MediaType.APPLICATION_JSON)
public void updatePizza(Pizza updatePizza) throws DaoException {
pizzaService.updatePizza(updatePizza.getCode(), updatePizza);
// return updatePizza;
}
@DELETE
@Path("/{code}")
public void deletePizza(@PathParam("code") String codePizza) throws DaoException {
pizzaService.deletePizza(codePizza);
}
}
|
/*
* @author max
*/
package com.intellij.openapi.vfs.newvfs.persistent;
import com.intellij.concurrency.JobScheduler;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.Forceable;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.containers.IntArrayList;
import com.intellij.util.io.MappedFile;
import com.intellij.util.io.PersistentStringEnumerator;
import com.intellij.util.io.storage.HeavyProcessLatch;
import com.intellij.util.io.storage.Storage;
import gnu.trove.TObjectIntHashMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.*;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
@SuppressWarnings({"PointlessArithmeticExpression", "HardCodedStringLiteral"})
public class FSRecords implements Disposable, Forceable {
private static final Logger LOG = Logger.getInstance("#com.intellij.vfs.persistent.FSRecords");
private static final int VERSION = 7;
private static final int PARENT_OFFSET = 0;
private static final int PARENT_SIZE = 4;
private static final int NAME_OFFSET = PARENT_OFFSET + PARENT_SIZE;
private static final int NAME_SIZE = 4;
private static final int FLAGS_OFFSET = NAME_OFFSET + NAME_SIZE;
private static final int FLAGS_SIZE = 4;
private static final int ATTREF_OFFSET = FLAGS_OFFSET + FLAGS_SIZE;
private static final int ATTREF_SIZE = 4;
private static final int TIMESTAMP_OFFSET = ATTREF_OFFSET + ATTREF_SIZE;
private static final int TIMESTAMP_SIZE = 8;
private static final int MODCOUNT_OFFSET = TIMESTAMP_OFFSET + TIMESTAMP_SIZE;
private static final int MODCOUNT_SIZE = 4;
private static final int LENGTH_OFFSET = MODCOUNT_OFFSET + MODCOUNT_SIZE;
private static final int LENGTH_SIZE = 8;
private static final int RECORD_SIZE = LENGTH_OFFSET + LENGTH_SIZE;
private static final byte[] ZEROES = new byte[RECORD_SIZE];
private static final int HEADER_VERSION_OFFSET = 0;
private static final int HEADER_FREE_RECORD_OFFSET = 4;
private static final int HEADER_GLOBAL_MODCOUNT_OFFSET = 8;
private static final int HEADER_CONNECTION_STATUS_OFFSET = 12;
private static final int HEADER_SIZE = HEADER_CONNECTION_STATUS_OFFSET + 4;
private static final int CONNECTED_MAGIC = 0x12ad34e4;
private static final int SAFELY_CLOSED_MAGIC = 0x1f2f3f4f;
private static final int CORRUPTED_MAGIC = 0xabcf7f7f;
private static final String CHILDREN_ATT = "FsRecords.DIRECTORY_CHILDREN";
private static final Object lock = new Object();
private DbConnection myConnection;
private static int ourLocalModificationCount = 0;
private static final int FREE_RECORD_FLAG = 0x100;
private static final int ALL_VALID_FLAGS = PersistentFS.ALL_VALID_FLAGS | FREE_RECORD_FLAG;
static {
//noinspection ConstantConditions
assert HEADER_SIZE <= RECORD_SIZE;
}
private static class DbConnection {
private static int refCount = 0;
private static final Object LOCK = new Object();
private static final TObjectIntHashMap<String> myAttributeIds = new TObjectIntHashMap<String>();
private static PersistentStringEnumerator myNames;
private static Storage myAttributes;
private static MappedFile myRecords;
private static boolean myDirty = false;
private static ScheduledFuture<?> myFlushingFuture;
private static boolean myCorrupted = false;
public static DbConnection connect() {
synchronized (LOCK) {
if (refCount == 0) {
init();
setupFlushing();
}
refCount++;
}
return new DbConnection();
}
private static void createBrokenMarkerFile() {
File brokenMarker = getCorruptionMarkerFile();
try {
final FileWriter writer = new FileWriter(brokenMarker);
writer.write("These files are corrupted and must be rebuilt from the scratch on next startup");
writer.close();
}
catch (IOException e) {
// No luck.
}
}
private static File getCorruptionMarkerFile() {
File basePath = new File(PathManager.getSystemPath() + "/caches/");
File brokenMarker = new File(basePath, "corruption.marker");
return brokenMarker;
}
private static void init() {
File basePath = new File(PathManager.getSystemPath() + "/caches/");
basePath.mkdirs();
final File namesFile = new File(basePath, "names.dat");
final File attributesFile = new File(basePath, "attrib.dat");
final File recordsFile = new File(basePath, "records.dat");
try {
if (getCorruptionMarkerFile().exists()) {
throw new IOException("Corruption marker file found");
}
myNames = new PersistentStringEnumerator(namesFile);
myAttributes = Storage.create(attributesFile.getCanonicalPath());
myRecords = new MappedFile(recordsFile, 20 * 1024);
if (myRecords.length() == 0) {
cleanRecord(0); // Clean header
cleanRecord(1); // Create root record
setCurrentVersion();
}
if (getVersion() != VERSION) {
throw new IOException("FS repository version mismatch");
}
if (myRecords.getInt(HEADER_CONNECTION_STATUS_OFFSET) != SAFELY_CLOSED_MAGIC) {
throw new IOException("FS repostiory wasn't safely shut down");
}
markDirty();
}
catch (IOException e) {
LOG.info("Filesystem storage is corrupted or does not exist. [Re]Building. Reason: " + e.getMessage());
try {
closeFiles();
boolean deleted = FileUtil.delete(getCorruptionMarkerFile()) &&
FileUtil.delete(namesFile) &&
Storage.deleteFiles(attributesFile.getCanonicalPath()) &&
FileUtil.delete(recordsFile);
if (!deleted) {
throw new IOException("Cannot delete filesystem storage files");
}
}
catch (IOException e1) {
throw new RuntimeException("Can't rebuild filesystem storage ", e1);
}
init();
}
}
private static void markDirty() throws IOException {
if (!myDirty) {
myDirty = true;
myRecords.putInt(HEADER_CONNECTION_STATUS_OFFSET, CONNECTED_MAGIC);
}
}
private static void setupFlushing() {
myFlushingFuture = JobScheduler.getScheduler().scheduleAtFixedRate(new Runnable() {
int lastModCount = 0;
public void run() {
if (lastModCount == ourLocalModificationCount && !HeavyProcessLatch.INSTANCE.isRunning()) {
force();
}
lastModCount = ourLocalModificationCount;
}
}, 5000, 5000, TimeUnit.MILLISECONDS);
}
public static void force() {
synchronized (lock) {
try {
markClean();
}
catch (IOException e) {
// Ignore
}
myNames.force();
myAttributes.force();
myRecords.force();
}
}
public static boolean isDirty() {
return myDirty || myNames.isDirty() || myAttributes.isDirty() || myRecords.isDirty();
}
private static int getVersion() throws IOException {
final int storageVersion = myAttributes.getVersion();
final int recordsVersion = myRecords.getInt(HEADER_VERSION_OFFSET);
if (storageVersion != recordsVersion) return -1;
return recordsVersion;
}
private static void setCurrentVersion() throws IOException {
myRecords.putInt(HEADER_VERSION_OFFSET, VERSION);
myAttributes.setVersion(VERSION);
myRecords.putInt(HEADER_CONNECTION_STATUS_OFFSET, SAFELY_CLOSED_MAGIC);
}
public static void cleanRecord(final int id) throws IOException {
myRecords.put(id * RECORD_SIZE, ZEROES, 0, RECORD_SIZE);
}
public static PersistentStringEnumerator getNames() {
return myNames;
}
public static Storage getAttributes() {
return myAttributes;
}
public static MappedFile getRecords() {
return myRecords;
}
public void dispose() throws IOException {
synchronized (LOCK) {
refCount
if (refCount == 0) {
closeFiles();
}
}
}
private static void closeFiles() throws IOException {
if (myFlushingFuture != null) {
myFlushingFuture.cancel(false);
myFlushingFuture = null;
}
if (myNames != null) {
myNames.close();
myNames = null;
}
if (myAttributes != null) {
myAttributes.dispose();
myAttributes = null;
}
if (myRecords != null) {
markClean();
myRecords.close();
myRecords = null;
}
}
private static void markClean() throws IOException {
if (myDirty) {
myDirty = false;
myRecords.putInt(HEADER_CONNECTION_STATUS_OFFSET, myCorrupted ? CORRUPTED_MAGIC : SAFELY_CLOSED_MAGIC);
}
}
private static int getAttributeId(String attId) throws IOException {
if (myAttributeIds.containsKey(attId)) {
return myAttributeIds.get(attId);
}
int id = myNames.enumerate(attId);
myAttributeIds.put(attId, id);
return id;
}
private static RuntimeException handleError(final IOException e) {
if (!myCorrupted) {
createBrokenMarkerFile();
myCorrupted = true;
force();
}
return new RuntimeException(e);
}
}
public FSRecords() {
}
public void connect() {
myConnection = DbConnection.connect();
}
private static MappedFile getRecords() {
return DbConnection.getRecords();
}
private static Storage getAttributes() {
return DbConnection.getAttributes();
}
public static PersistentStringEnumerator getNames() {
return DbConnection.getNames();
}
public static int createRecord() {
synchronized (lock) {
try {
DbConnection.markDirty();
final int next = getRecords().getInt(HEADER_FREE_RECORD_OFFSET);
if (next == 0) {
final int filelength = (int)getRecords().length();
LOG.assertTrue(filelength % RECORD_SIZE == 0);
int result = filelength / RECORD_SIZE;
DbConnection.cleanRecord(result);
return result;
}
else {
getRecords().putInt(HEADER_FREE_RECORD_OFFSET, getNextFree(next));
setNextFree(next, 0);
return next;
}
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
public void deleteRecordRecursively(int id) {
synchronized (lock) {
try {
DbConnection.markDirty();
incModCount(id);
doDeleteRecursively(id);
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
private void doDeleteRecursively(final int id) {
for (int subrecord : list(id)) {
doDeleteRecursively(subrecord);
}
deleteRecord(id);
}
private void deleteRecord(final int id) {
synchronized (lock) {
try {
DbConnection.markDirty();
int att_page = getAttributeRecordId(id);
if (att_page != 0) {
final DataInputStream attStream = getAttributes().readStream(att_page);
while (attStream.available() > 0) {
attStream.readInt(); // Attribute ID;
int attAddress = attStream.readInt();
getAttributes().deleteRecord(attAddress);
}
attStream.close();
getAttributes().deleteRecord(att_page);
}
DbConnection.cleanRecord(id);
addToFreeRecordsList(id);
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
private void addToFreeRecordsList(int id) throws IOException {
final int next = getRecords().getInt(HEADER_FREE_RECORD_OFFSET);
setNextFree(id, next);
setFlags(id, FREE_RECORD_FLAG, false);
getRecords().putInt(HEADER_FREE_RECORD_OFFSET, id);
}
public int[] listRoots() throws IOException {
synchronized (lock) {
DbConnection.markDirty();
final DataInputStream input = readAttribute(1, CHILDREN_ATT);
if (input == null) return ArrayUtil.EMPTY_INT_ARRAY;
int[] result;
try {
final int count = input.readInt();
result = new int[count];
for (int i = 0; i < count; i++) {
input.readInt(); // Name
result[i] = input.readInt();
}
}
finally {
input.close();
}
return result;
}
}
public void force() {
DbConnection.force();
}
public boolean isDirty() {
return DbConnection.isDirty();
}
public int findRootRecord(String rootUrl) throws IOException {
synchronized (lock) {
DbConnection.markDirty();
final int root = getNames().enumerate(rootUrl);
final DataInputStream input = readAttribute(1, CHILDREN_ATT);
int[] names = ArrayUtil.EMPTY_INT_ARRAY;
int[] ids = ArrayUtil.EMPTY_INT_ARRAY;
if (input != null) {
try {
final int count = input.readInt();
names = new int[count];
ids = new int[count];
for (int i = 0; i < count; i++) {
final int name = input.readInt();
final int id = input.readInt();
if (name == root) {
return id;
}
names[i] = name;
ids[i] = id;
}
}
finally {
input.close();
}
}
final DataOutputStream output = writeAttribute(1, CHILDREN_ATT);
int id;
try {
id = createRecord();
output.writeInt(names.length + 1);
for (int i = 0; i < names.length; i++) {
output.writeInt(names[i]);
output.writeInt(ids[i]);
}
output.writeInt(root);
output.writeInt(id);
}
finally {
output.close();
}
return id;
}
}
public void deleteRootRecord(int id) throws IOException {
synchronized (lock) {
DbConnection.markDirty();
final DataInputStream input = readAttribute(1, CHILDREN_ATT);
assert input != null;
int count;
int[] names;
int[] ids;
try {
count = input.readInt();
names = new int[count];
ids = new int[count];
for (int i = 0; i < count; i++) {
names[i] = input.readInt();
ids[i] = input.readInt();
}
}
finally {
input.close();
}
final int index = ArrayUtil.find(ids, id);
assert index >= 0;
names = ArrayUtil.remove(names, index);
ids = ArrayUtil.remove(ids, index);
final DataOutputStream output = writeAttribute(1, CHILDREN_ATT);
try {
output.writeInt(count - 1);
for (int i = 0; i < names.length; i++) {
output.writeInt(names[i]);
output.writeInt(ids[i]);
}
}
finally {
output.close();
}
}
}
public int[] list(int id) {
synchronized (lock) {
try {
final DataInputStream input = readAttribute(id, CHILDREN_ATT);
if (input == null) return new int[0];
final int count = input.readInt();
final int[] result = new int[count];
for (int i = 0; i < count; i++) {
result[i] = input.readInt();
}
input.close();
return result;
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
public boolean wereChildrenAccessed(int id) {
try {
synchronized (lock) {
int encodedAttId = DbConnection.getAttributeId(CHILDREN_ATT);
final int att = findAttributePage(id, encodedAttId, false);
return att != 0;
}
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
public void updateList(int id, int[] children) {
synchronized (lock) {
try {
DbConnection.markDirty();
final DataOutputStream record = writeAttribute(id, CHILDREN_ATT);
record.writeInt(children.length);
for (int child : children) {
if (child == id) {
LOG.error("Cyclic parent child relations");
}
else {
record.writeInt(child);
}
}
record.close();
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
private void incModCount(int id) throws IOException {
ourLocalModificationCount++;
final int count = getModCount() + 1;
getRecords().putInt(HEADER_GLOBAL_MODCOUNT_OFFSET, count);
int parent = id;
while (parent != 0) {
setModCount(parent, count);
parent = getParent(parent);
}
}
public static int getModCount() {
synchronized (lock) {
try {
return getRecords().getInt(HEADER_GLOBAL_MODCOUNT_OFFSET);
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
public static int getParent(int id) {
synchronized (lock) {
try {
final int parentId = getRecords().getInt(id * RECORD_SIZE + PARENT_OFFSET);
if (parentId == id) {
LOG.error("Cyclic parent child relations in the database. id = " + id);
return 0;
}
return parentId;
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
public void setParent(int id, int parent) {
if (id == parent) {
LOG.error("Cyclic parent/child relations");
return;
}
synchronized (lock) {
try {
DbConnection.markDirty();
incModCount(id);
getRecords().putInt(id * RECORD_SIZE + PARENT_OFFSET, parent);
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
private static int getNextFree(int id) {
return getParent(id);
}
private static void setNextFree(int id, int next) {
try {
getRecords().putInt(id * RECORD_SIZE + PARENT_OFFSET, next);
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
public static String getName(int id) {
synchronized (lock) {
try {
final int nameId = getRecords().getInt(id * RECORD_SIZE + NAME_OFFSET);
return nameId != 0 ? getNames().valueOf(nameId) : "";
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
public void setName(int id, String name) {
synchronized (lock) {
try {
DbConnection.markDirty();
incModCount(id);
getRecords().putInt(id * RECORD_SIZE + NAME_OFFSET, getNames().enumerate(name));
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
public static int getFlags(int id) {
synchronized (lock) {
try {
return getRecords().getInt(id * RECORD_SIZE + FLAGS_OFFSET);
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
public void setFlags(int id, int flags, final boolean markAsChange) {
synchronized (lock) {
try {
if (markAsChange) {
DbConnection.markDirty();
incModCount(id);
}
getRecords().putInt(id * RECORD_SIZE + FLAGS_OFFSET, flags);
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
public static long getLength(int id) {
synchronized (lock) {
try {
return getRecords().getLong(id * RECORD_SIZE + LENGTH_OFFSET);
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
public void setLength(int id, long len) {
synchronized (lock) {
try {
DbConnection.markDirty();
incModCount(id);
getRecords().putLong(id * RECORD_SIZE + LENGTH_OFFSET, len);
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
public static long getTimestamp(int id) {
synchronized (lock) {
try {
return getRecords().getLong(id * RECORD_SIZE + TIMESTAMP_OFFSET);
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
public void setTimestamp(int id, long value) {
synchronized (lock) {
try {
DbConnection.markDirty();
incModCount(id);
getRecords().putLong(id * RECORD_SIZE + TIMESTAMP_OFFSET, value);
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
public static int getModCount(int id) {
synchronized (lock) {
try {
return getRecords().getInt(id * RECORD_SIZE + MODCOUNT_OFFSET);
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
private static void setModCount(int id, int value) throws IOException {
getRecords().putInt(id * RECORD_SIZE + MODCOUNT_OFFSET, value);
}
private static int getAttributeRecordId(final int id) throws IOException {
return getRecords().getInt(id * RECORD_SIZE + ATTREF_OFFSET);
}
@Nullable
public DataInputStream readAttribute(int id, String attId) {
try {
synchronized (attId) {
final int att;
synchronized (lock) {
int encodedAttId = DbConnection.getAttributeId(attId);
att = findAttributePage(id, encodedAttId, false);
if (att == 0) return null;
}
return getAttributes().readStream(att);
}
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
private int findAttributePage(int fileId, int attributeId, boolean createIfNotFound) throws IOException {
assert fileId > 0;
assert (getFlags(fileId) & FREE_RECORD_FLAG) == 0; // TODO: This assertion is a bit timey, will remove when bug is caught.
int attrsRecord = getAttributeRecordId(fileId);
if (attrsRecord == 0) {
if (!createIfNotFound) return 0;
attrsRecord = getAttributes().createNewRecord();
getRecords().putInt(fileId * RECORD_SIZE + ATTREF_OFFSET, attrsRecord);
}
else {
final DataInputStream attrRefs = getAttributes().readStream(attrsRecord);
try {
while (attrRefs.available() > 0) {
final int attIdOnPage = attrRefs.readInt();
final int attAddress = attrRefs.readInt();
if (attIdOnPage == attributeId) return attAddress;
}
}
finally {
attrRefs.close();
}
}
if (createIfNotFound) {
Storage.AppenderStream appender = getAttributes().appendStream(attrsRecord);
appender.writeInt(attributeId);
int attAddress = getAttributes().createNewRecord();
appender.writeInt(attAddress);
appender.close();
return attAddress;
}
return 0;
}
private class AttributeOutputStream extends DataOutputStream {
private final String myAttributeId;
private final int myFileId;
private AttributeOutputStream(final int fileId, final String attributeId) {
super(new ByteArrayOutputStream());
myFileId = fileId;
myAttributeId = attributeId;
}
public void close() throws IOException {
super.close();
try {
synchronized (myAttributeId) {
final int att;
synchronized (lock) {
DbConnection.markDirty();
incModCount(myFileId);
final int encodedAttId = DbConnection.getAttributeId(myAttributeId);
att = findAttributePage(myFileId, encodedAttId, true);
}
final DataOutputStream sinkStream = getAttributes().writeStream(att);
sinkStream.write(((ByteArrayOutputStream)out).toByteArray());
sinkStream.close();
}
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
@NotNull
public DataOutputStream writeAttribute(final int id, final String attId) {
return new AttributeOutputStream(id, attId);
}
public void dispose() {
synchronized (lock) {
try {
DbConnection.force();
DbConnection.closeFiles();
}
catch (IOException e) {
throw DbConnection.handleError(e);
}
}
}
public static void checkSanity() {
long startTime = System.currentTimeMillis();
synchronized (lock) {
final int fileLength = (int)getRecords().length();
assert fileLength % RECORD_SIZE == 0;
int recordCount = fileLength / RECORD_SIZE;
IntArrayList freeRecordIds = new IntArrayList();
IntArrayList usedAttributeRecordIds = new IntArrayList();
IntArrayList validAttributeIds = new IntArrayList();
for(int id=2; id<recordCount; id++) {
int flags = getFlags(id);
assert (flags & ~ALL_VALID_FLAGS) == 0;
if ((flags & FREE_RECORD_FLAG) != 0) {
freeRecordIds.add(id);
}
else {
checkRecordSanity(id, recordCount, usedAttributeRecordIds, validAttributeIds);
}
}
try {
checkFreeListSanity(freeRecordIds);
}
catch (IOException ex) {
throw DbConnection.handleError(ex);
}
}
long endTime = System.currentTimeMillis();
System.out.println("Sanity check took " + (endTime-startTime) + " ms");
}
private static void checkRecordSanity(final int id, final int recordCount, final IntArrayList usedAttributeRecordIds,
final IntArrayList validAttributeIds) {
int parentId = getParent(id);
assert parentId >= 0 && parentId < recordCount;
if (parentId > 0) {
final int parentFlags = getFlags(parentId);
assert (parentFlags & FREE_RECORD_FLAG) == 0;
assert (parentFlags & PersistentFS.IS_DIRECTORY_FLAG) != 0;
}
String name = getName(id);
assert parentId > 0 || name.length() > 0: "File with empty name found under " + getName(parentId);
int attributeRecordId;
try {
attributeRecordId = getAttributeRecordId(id);
}
catch(IOException ex) {
throw DbConnection.handleError(ex);
}
assert attributeRecordId >= 0;
if (attributeRecordId > 0) {
try {
checkAttributesSanity(attributeRecordId, usedAttributeRecordIds, validAttributeIds);
}
catch (IOException ex) {
throw DbConnection.handleError(ex);
}
}
long length = getLength(id);
assert length >= -1: "Invalid file length found for " + name + ": " + length;
}
private static void checkAttributesSanity(final int attributeRecordId, final IntArrayList usedAttributeRecordIds,
final IntArrayList validAttributeIds) throws IOException {
assert !usedAttributeRecordIds.contains(attributeRecordId);
usedAttributeRecordIds.add(attributeRecordId);
final DataInputStream dataInputStream = getAttributes().readStream(attributeRecordId);
try {
final int streamSize = dataInputStream.available();
assert (streamSize % 8) == 0;
for(int i=0; i<streamSize / 8; i++) {
int attId = dataInputStream.readInt();
int attDataRecordId = dataInputStream.readInt();
assert !usedAttributeRecordIds.contains(attDataRecordId);
usedAttributeRecordIds.add(attDataRecordId);
if (!validAttributeIds.contains(attId)) {
assert getNames().valueOf(attId).length() > 0;
validAttributeIds.add(attId);
}
getAttributes().checkSanity(attDataRecordId);
}
}
finally {
dataInputStream.close();
}
}
private static void checkFreeListSanity(final IntArrayList freeRecordIds) throws IOException {
int freeRecordCount = 0;
int next = getRecords().getInt(HEADER_FREE_RECORD_OFFSET);
while(next > 0) {
freeRecordCount++;
assert freeRecordIds.contains(next);
next = getNextFree(next);
}
assert freeRecordCount == freeRecordIds.size(): "Found " + freeRecordIds.size() + " total free records and only " + freeRecordCount + " records in free list";
}
}
|
package com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1;
import com.bitdubai.fermat_api.CantStartPluginException;
import com.bitdubai.fermat_api.Plugin;
import com.bitdubai.fermat_api.Service;
import com.bitdubai.fermat_api.layer.all_definition.developer.DatabaseManagerForDevelopers;
import com.bitdubai.fermat_api.layer.all_definition.developer.DeveloperDatabase;
import com.bitdubai.fermat_api.layer.all_definition.developer.DeveloperDatabaseTable;
import com.bitdubai.fermat_api.layer.all_definition.developer.DeveloperDatabaseTableRecord;
import com.bitdubai.fermat_api.layer.all_definition.developer.DeveloperObjectFactory;
import com.bitdubai.fermat_api.layer.all_definition.developer.LogManagerForDevelopers;
import com.bitdubai.fermat_api.layer.all_definition.enums.Plugins;
import com.bitdubai.fermat_api.layer.all_definition.enums.ServiceStatus;
import com.bitdubai.fermat_api.layer.all_definition.money.CryptoAddress;
import com.bitdubai.fermat_api.layer.all_definition.transaction_transference_protocol.TransactionProtocolManager;
import com.bitdubai.fermat_api.layer.all_definition.transaction_transference_protocol.crypto_transactions.CryptoStatus;
import com.bitdubai.fermat_api.layer.all_definition.transaction_transference_protocol.crypto_transactions.CryptoTransaction;
import com.bitdubai.fermat_api.layer.dmp_world.wallet.exceptions.CantStartAgentException;
import com.bitdubai.fermat_api.layer.osa_android.database_system.Database;
import com.bitdubai.fermat_api.layer.osa_android.database_system.DealsWithPluginDatabaseSystem;
import com.bitdubai.fermat_api.layer.osa_android.database_system.PluginDatabaseSystem;
import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.CantCreateDatabaseException;
import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.CantOpenDatabaseException;
import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.DatabaseNotFoundException;
import com.bitdubai.fermat_api.layer.osa_android.file_system.DealsWithPluginFileSystem;
import com.bitdubai.fermat_api.layer.osa_android.file_system.PluginFileSystem;
import com.bitdubai.fermat_api.layer.osa_android.logger_system.DealsWithLogger;
import com.bitdubai.fermat_api.layer.osa_android.logger_system.LogLevel;
import com.bitdubai.fermat_api.layer.osa_android.logger_system.LogManager;
import com.bitdubai.fermat_cry_api.layer.crypto_vault.exceptions.CoultNotCreateCryptoTransaction;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.DealsWithErrors;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.ErrorManager;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.UnexpectedPluginExceptionSeverity;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.event_manager.interfaces.DealsWithEvents;
import com.bitdubai.fermat_api.layer.all_definition.events.interfaces.FermatEventListener;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.event_manager.interfaces.EventManager;
import com.bitdubai.fermat_pip_api.layer.pip_user.device_user.exceptions.CantGetLoggedInDeviceUserException;
import com.bitdubai.fermat_pip_api.layer.pip_user.device_user.interfaces.DealsWithDeviceUser;
import com.bitdubai.fermat_pip_api.layer.pip_user.device_user.interfaces.DeviceUserManager;
import com.bitdubai.fermat_cry_api.layer.crypto_network.bitcoin.BitcoinCryptoNetworkManager;
import com.bitdubai.fermat_cry_api.layer.crypto_network.bitcoin.DealsWithBitcoinCryptoNetwork;
import com.bitdubai.fermat_cry_api.layer.crypto_network.bitcoin.exceptions.CantConnectToBitcoinNetwork;
import com.bitdubai.fermat_cry_api.layer.crypto_network.bitcoin.exceptions.CantCreateCryptoWalletException;
import com.bitdubai.fermat_cry_api.layer.crypto_vault.CryptoVaultManager;
import com.bitdubai.fermat_cry_api.layer.crypto_vault.exceptions.CouldNotGetCryptoStatusException;
import com.bitdubai.fermat_cry_api.layer.crypto_vault.exceptions.CouldNotSendMoneyException;
import com.bitdubai.fermat_cry_api.layer.crypto_vault.exceptions.CryptoTransactionAlreadySentException;
import com.bitdubai.fermat_cry_api.layer.crypto_vault.exceptions.InvalidSendToAddressException;
import com.bitdubai.fermat_cry_api.layer.crypto_vault.exceptions.VaultNotConnectedToNetworkException;
import com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.exceptions.CantExecuteQueryException;
import com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.exceptions.UnexpectedResultReturnedFromDatabaseException;
import com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.structure.BitcoinCryptoVault;
import com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.structure.CryptoVaultDatabaseFactory;
import com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.structure.developerUtils.DeveloperDatabaseFactory;
import com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.structure.events.TransactionNotificationAgent;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.regex.Pattern;
public class BitcoinCryptoVaultPluginRoot implements CryptoVaultManager, DatabaseManagerForDevelopers, DealsWithBitcoinCryptoNetwork, DealsWithEvents, DealsWithErrors, DealsWithPluginDatabaseSystem, DealsWithDeviceUser, DealsWithLogger, DealsWithPluginFileSystem, LogManagerForDevelopers, Plugin, Service {
/**
* BitcoinCryptoVaultPluginRoot member variables
*/
BitcoinCryptoVault vault;
TransactionNotificationAgent transactionNotificationAgent;
/**
* DealsWithBitcoinCryptoNetwork interface member variable
*/
BitcoinCryptoNetworkManager bitcoinCryptoNetworkManager;
/**
* DealWithEvents Interface member variables.
*/
EventManager eventManager;
/**
* Plugin Interface member variables.
*/
UUID pluginId;
/**
* DealsWithErrors interface member variable
*/
ErrorManager errorManager;
/**
* DealsWithPluginDatabaseSystem interface member variable
*/
PluginDatabaseSystem pluginDatabaseSystem;
Database database;
/**
* DealsWithLogger interface member variable
*/
LogManager logManager;
static Map<String, LogLevel> newLoggingLevel = new HashMap<String, LogLevel>();
/**
* DealsWithDeviceUsers interface member variable
*/
DeviceUserManager deviceUserManager;
/**
* LogManagerForDevelopers member variables
*/
public static LogLevel logLevel;
/**
* DealsWithPluginFileSystem interface member variable
*/
PluginFileSystem pluginFileSystem;
/**
* Service Interface member variables.
*/
ServiceStatus serviceStatus = ServiceStatus.CREATED;
List<FermatEventListener> listenersAdded = new ArrayList<>();
/**
* DealsWithBitcoinCryptoNetwork interface implementation
* @param bitcoinCryptoNetworkManager
*/
@Override
public void setBitcoinCryptoNetworkManager(BitcoinCryptoNetworkManager bitcoinCryptoNetworkManager) {
this.bitcoinCryptoNetworkManager = bitcoinCryptoNetworkManager;
}
@Override
public List<String> getClassesFullPath() {
List<String> returnedClasses = new ArrayList<String>();
returnedClasses.add("com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.BitcoinCryptoVaultPluginRoot");
returnedClasses.add("com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.structure.BitcoinNetworkConfiguration");
returnedClasses.add("com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.structure.CryptoVaultDatabaseFactory");
returnedClasses.add("com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.structure.BitcoinCryptoVault");
returnedClasses.add("com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.structure.CryptoVaultDatabaseActions");
returnedClasses.add("com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.structure.events.TransactionNotificationAgent");
returnedClasses.add("com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.structure.developerUtils.DeveloperDatabaseFactory");
returnedClasses.add("com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.structure.TransactionConfidenceCalculator");
returnedClasses.add("com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.structure.VaultEventListeners");
returnedClasses.add("com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.structure.CryptoVaultDatabaseConstants");
returnedClasses.add("com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.structure.BitcoinNetworkConfiguration");
returnedClasses.add("com.bitdubai.fermat_cry_plugin.layer.crypto_vault.developer.bitdubai.version_1.structure.TransactionConfidenceCalculator");
/**
* I return the values.
*/
return returnedClasses;
}
@Override
public void setLoggingLevelPerClass(Map<String, LogLevel> newLoggingLevel) {
/**
* I will check the current values and update the LogLevel in those which is different
*/
for (Map.Entry<String, LogLevel> pluginPair : newLoggingLevel.entrySet()) {
/**
* if this path already exists in the Root.bewLoggingLevel I'll update the value, else, I will put as new
*/
if (BitcoinCryptoVaultPluginRoot.newLoggingLevel.containsKey(pluginPair.getKey())) {
BitcoinCryptoVaultPluginRoot.newLoggingLevel.remove(pluginPair.getKey());
BitcoinCryptoVaultPluginRoot.newLoggingLevel.put(pluginPair.getKey(), pluginPair.getValue());
} else {
BitcoinCryptoVaultPluginRoot.newLoggingLevel.put(pluginPair.getKey(), pluginPair.getValue());
}
}
}
/**
* DatabaseManagerForDevelopers interface implementation
* Returns the list of databases implemented on this plug in.
*/
@Override
public List<DeveloperDatabase> getDatabaseList(DeveloperObjectFactory developerObjectFactory) {
try {
String userPublicKey = deviceUserManager.getLoggedInDeviceUser().getPublicKey();
DeveloperDatabaseFactory dbFactory = new DeveloperDatabaseFactory(userPublicKey, pluginId.toString());
return dbFactory.getDatabaseList(developerObjectFactory);
} catch (CantGetLoggedInDeviceUserException e) {
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_BITCOIN_CRYPTO_VAULT, UnexpectedPluginExceptionSeverity.DISABLES_SOME_FUNCTIONALITY_WITHIN_THIS_PLUGIN, e);
}
return new ArrayList<>();
}
/**
* returns the list of tables for the given database
* @param developerObjectFactory
* @param developerDatabase
* @return
*/
@Override
public List<DeveloperDatabaseTable> getDatabaseTableList(DeveloperObjectFactory developerObjectFactory, DeveloperDatabase developerDatabase) {
return DeveloperDatabaseFactory.getDatabaseTableList(developerObjectFactory);
}
/**
* returns the list of records for the passed table
* @param developerObjectFactory
* @param developerDatabase
* @param developerDatabaseTable
* @return
*/
@Override
public List<DeveloperDatabaseTableRecord> getDatabaseTableContent(DeveloperObjectFactory developerObjectFactory, DeveloperDatabase developerDatabase, DeveloperDatabaseTable developerDatabaseTable) {
return DeveloperDatabaseFactory.getDatabaseTableContent(developerObjectFactory, database, developerDatabaseTable);
}
@Override
public void setLogManager(LogManager logManager) {
this.logManager = logManager;
}
/**
* Service interface implementation
* @return
*/
@Override
public ServiceStatus getStatus() {
return this.serviceStatus;
}
@Override
public boolean isValidAddress(CryptoAddress addressTo) {
return vault.isValidAddress(addressTo);
}
/**
* DealWithEvents Interface implementation.
*/
@Override
public void setEventManager(EventManager eventManager) {
this.eventManager = eventManager;
}
/**
*DealWithErrors Interface implementation.
*/
@Override
public void setErrorManager(ErrorManager errorManager) {
this.errorManager = errorManager;
}
/**
* DealsWithPlugIndatabaseSystem interace implementation
* @param pluginDatabaseSystem
*/
@Override
public void setPluginDatabaseSystem(PluginDatabaseSystem pluginDatabaseSystem) {
this.pluginDatabaseSystem = pluginDatabaseSystem;
}
/**
* Plugin method implementation.
*/
@Override
public void setId(UUID pluginId) {
this.pluginId = pluginId;
}
@Override
public void setDeviceUserManager(DeviceUserManager deviceUserManager) {
this.deviceUserManager = deviceUserManager;
}
/**
* DealsWithPluginFileSystem interface implementation
* @param pluginFileSystem
*/
@Override
public void setPluginFileSystem(PluginFileSystem pluginFileSystem) {
this.pluginFileSystem = pluginFileSystem;
}
//TODO Franklin, aqui falta la gestion de excepciones genericas
@Override
public void start() throws CantStartPluginException {
//logManager.log(BitcoinCryptoVaultPluginRoot.getLogLevelByClass(this.getClass().getName()), "CryptoVault Starting...", null, null);
/**
* I get the userPublicKey from the deviceUserManager
*/
String userPublicKey;
try {
userPublicKey = deviceUserManager.getLoggedInDeviceUser().getPublicKey();
} catch (CantGetLoggedInDeviceUserException e) {
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_BITCOIN_CRYPTO_VAULT, UnexpectedPluginExceptionSeverity.DISABLES_SOME_FUNCTIONALITY_WITHIN_THIS_PLUGIN, e);
throw new CantStartPluginException(CantStartPluginException.DEFAULT_MESSAGE, e, "Cant get LoggedIn Device User", "");
}
/**
* I will try to open the database first, if it doesn't exists, then I create it
*/
try {
database = pluginDatabaseSystem.openDatabase(pluginId, userPublicKey);
} catch (CantOpenDatabaseException e) {
/**
* The database could not be opened, let try to create it instead.
*/
try {
CryptoVaultDatabaseFactory cryptoVaultDatabaseFactory = new CryptoVaultDatabaseFactory();
cryptoVaultDatabaseFactory.setPluginDatabaseSystem(pluginDatabaseSystem);
cryptoVaultDatabaseFactory.setErrorManager(errorManager);
database = cryptoVaultDatabaseFactory.createDatabase(pluginId, userPublicKey);
} catch (CantCreateDatabaseException e1) {
/**
* something went wrong creating the db, I can't handle this.
*/
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_BITCOIN_CRYPTO_VAULT, UnexpectedPluginExceptionSeverity.DISABLES_THIS_PLUGIN, e);
}
} catch (DatabaseNotFoundException e) {
/**
* The database doesn't exists, lets create it.
*/
try {
CryptoVaultDatabaseFactory cryptoVaultDatabaseFactory = new CryptoVaultDatabaseFactory();
cryptoVaultDatabaseFactory.setPluginDatabaseSystem(pluginDatabaseSystem);
cryptoVaultDatabaseFactory.setErrorManager(errorManager);
database = cryptoVaultDatabaseFactory.createDatabase(pluginId, userPublicKey);
} catch (CantCreateDatabaseException e1) {
/**
* something went wrong creating the db, I can't handle this.
*/
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_BITCOIN_CRYPTO_VAULT, UnexpectedPluginExceptionSeverity.DISABLES_THIS_PLUGIN, e);
}
}
/**
* I will start the loading creation of the wallet from the user Id
*/
try {
vault = new BitcoinCryptoVault(userPublicKey);
vault.setLogManager(logManager);
vault.setErrorManager(errorManager);
vault.setPluginDatabaseSystem(pluginDatabaseSystem);
vault.setDatabase(this.database);
vault.setPluginFileSystem(this.pluginFileSystem);
vault.setBitcoinCryptoNetworkManager(bitcoinCryptoNetworkManager);
vault.setPluginId(pluginId);
vault.setEventManager(eventManager);
vault.loadOrCreateVault();
/**
* Once the vault is loaded or created, I will connect it to Bitcoin network to recieve pending transactions
*/
try {
vault.connectVault();
} catch (CantConnectToBitcoinNetwork cantConnectToBitcoinNetwork) {
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_BITCOIN_CRYPTO_VAULT, UnexpectedPluginExceptionSeverity.DISABLES_THIS_PLUGIN, cantConnectToBitcoinNetwork);
throw new CantStartPluginException("Error trying to start CryptoVault plugin.", cantConnectToBitcoinNetwork, null, "I couldn't connect to the Bitcoin network.");
}
} catch (CantCreateCryptoWalletException cantCreateCryptoWalletException ) {
/**
* If I couldnt create the Vault, I cant go on.
*/
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_BITCOIN_CRYPTO_VAULT, UnexpectedPluginExceptionSeverity.DISABLES_THIS_PLUGIN, cantCreateCryptoWalletException );
throw new CantStartPluginException("Error trying to start CryptoVault plugin.", cantCreateCryptoWalletException, null, "Probably not enought space available to save the vault.");
}
/**
* now I will start the TransactionNotificationAgent to monitor
*/
transactionNotificationAgent = new TransactionNotificationAgent(eventManager, pluginDatabaseSystem, errorManager, pluginId, userPublicKey);
transactionNotificationAgent.setLogManager(this.logManager);
try {
transactionNotificationAgent.start();
} catch (CantStartAgentException cantStartAgentException ) {
/**
* If I couldn't start the agent, I still will continue with the vault
*/
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_BITCOIN_CRYPTO_VAULT, UnexpectedPluginExceptionSeverity.DISABLES_SOME_FUNCTIONALITY_WITHIN_THIS_PLUGIN, cantStartAgentException );
}
/**
* the service is started.
*/
this.serviceStatus = ServiceStatus.STARTED;
logManager.log(BitcoinCryptoVaultPluginRoot.getLogLevelByClass(this.getClass().getName()), "CryptoVault started.", null, null);
}
/**
* Service interface implementation
*/
@Override
public void pause() {
this.serviceStatus = ServiceStatus.PAUSED;
}
/**
* Service interface implementation
*/
@Override
public void resume() {
this.serviceStatus = ServiceStatus.STARTED;
}
/**
* Service interface implementation
*/
@Override
public void stop() {
/**
* I will remove all the event listeners registered with the event manager.
*/
for (FermatEventListener fermatEventListener : listenersAdded) {
eventManager.removeListener(fermatEventListener);
}
listenersAdded.clear();
/**
* I will also stop the Notification Agent
*/
transactionNotificationAgent.stop();
this.serviceStatus = ServiceStatus.STOPPED;
}
//TODO Franklin, aqui falta la gestion de excepciones genericas
/**
* CryptoVaultManager interface implementation
*/
@Override
public void connectToBitcoin() throws VaultNotConnectedToNetworkException {
try {
vault.connectVault();
} catch (CantConnectToBitcoinNetwork cantConnectToBitcoinNetwork) {
throw new VaultNotConnectedToNetworkException();
}
}
//TODO Franklin, aqui falta la gestion de excepciones genericas, usa el errorManager
/**
* CryptoVaultManager interface implementation
*/
@Override
public void disconnectFromBitcoin() {
try {
vault.disconnectVault();
} catch (Exception exception){
}
}
/**
* CryptoVaultManager interface implementation
*/
@Override
public CryptoAddress getAddress() {
return vault.getAddress();
}
/**
* CryptoVaultManager interface implementation
*/
@Override
public List<CryptoAddress> getAddresses(int amount) {
List<CryptoAddress> addresses = new ArrayList<CryptoAddress>();
for (int i=0; i < amount; i++){
addresses.add(getAddress());
}
return addresses;
}
// changed wallet id from UUID to Strubg representing a public key
// Ezequiel Postan August 15th 2015
@Override
public String sendBitcoins(String walletPublicKey, UUID FermatTrId, CryptoAddress addressTo, long satoshis) throws com.bitdubai.fermat_cry_api.layer.crypto_vault.exceptions.InsufficientMoneyException, InvalidSendToAddressException, CouldNotSendMoneyException, CryptoTransactionAlreadySentException {
return vault.sendBitcoins(FermatTrId, addressTo, satoshis);
}
@Override
public TransactionProtocolManager<CryptoTransaction> getTransactionManager() {
return vault;
}
/**
* Static method to get the logging level from any class under root.
* @param className
* @return
*/
public static LogLevel getLogLevelByClass(String className){
try{
/**
* sometimes the classname may be passed dinamically with an $moretext
* I need to ignore whats after this.
*/
String[] correctedClass = className.split((Pattern.quote("$")));
return BitcoinCryptoVaultPluginRoot.newLoggingLevel.get(correctedClass[0]);
} catch (Exception e){
/**
* If I couldn't get the correct loggin level, then I will set it to minimal.
*/
return DEFAULT_LOG_LEVEL;
}
}
//TODO Franklin, aqui falta la gestion de excepciones genericas
@Override
public CryptoStatus getCryptoStatus(UUID transactionId) throws CouldNotGetCryptoStatusException {
try {
return vault.getCryptoStatus(transactionId);
} catch (CantExecuteQueryException e) {
throw new CouldNotGetCryptoStatusException("There was an error accesing the database to get the CryptoStatus.", e, "TransactionId: " + transactionId.toString(), "An error in the database plugin.");
} catch (UnexpectedResultReturnedFromDatabaseException e) {
throw new CouldNotGetCryptoStatusException("There was an error getting the CryptoStatus of the transaction.", e, "TransactionId: " + transactionId.toString(), "Duplicated transaction Id in the database.");
}
}
@Override
public CryptoTransaction generateDraftCryptoTransaction(CryptoAddress addressTo, long cryptoAmount) throws CoultNotCreateCryptoTransaction {
//todo rodrigo implementar.
return null;
}
}
|
package org.gemoc.gemoc_language_workbench.extensions.timesquare.moc.impl;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.FileLocator;
import org.eclipse.core.runtime.Platform;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EStructuralFeature.Setting;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.gemoc.execution.engine.trace.gemoc_execution_trace.Gemoc_execution_traceFactory;
import org.gemoc.execution.engine.trace.gemoc_execution_trace.LogicalStep;
import org.gemoc.execution.engine.trace.gemoc_execution_trace.MSEOccurrence;
import org.gemoc.gemoc_language_workbench.api.core.IExecutionContext;
import org.gemoc.gemoc_language_workbench.api.core.IExecutionWorkspace;
import org.gemoc.gemoc_language_workbench.extensions.timesquare.Activator;
import org.gemoc.gemoc_language_workbench.utils.ccsl.QvtoTransformationPerformer;
import org.osgi.framework.Bundle;
import fr.inria.aoste.timesquare.ccslkernel.explorer.CCSLConstraintState;
import fr.inria.aoste.timesquare.ccslkernel.model.TimeModel.Event;
import fr.inria.aoste.timesquare.ccslkernel.modelunfolding.exception.UnfoldingException;
import fr.inria.aoste.timesquare.ccslkernel.runtime.exceptions.NoBooleanSolution;
import fr.inria.aoste.timesquare.ccslkernel.runtime.exceptions.SimulationException;
import fr.inria.aoste.timesquare.ccslkernel.solver.exception.SolverException;
import fr.inria.aoste.timesquare.ccslkernel.solver.launch.CCSLKernelSolverWrapper;
import fr.inria.aoste.timesquare.ecl.feedback.feedback.ActionModel;
import fr.inria.aoste.timesquare.ecl.feedback.feedback.ModelSpecificEvent;
import fr.inria.aoste.timesquare.simulationpolicy.maxcardpolicy.MaxCardSimulationPolicy;
import fr.inria.aoste.trace.EventOccurrence;
//import fr.inria.aoste.trace.LogicalStep;
import fr.inria.aoste.trace.ModelElementReference;
import fr.inria.aoste.trace.Reference;
/**
* Implementation of the ISolver dedicated to CCSL.
*
*/
public class CcslSolver implements org.gemoc.gemoc_language_workbench.api.moc.ISolver {
private CCSLKernelSolverWrapper solverWrapper = null;
private URI solverInputURI = null;
private ArrayList<LogicalStep> _lastLogicalSteps = new ArrayList<LogicalStep>();
private ActionModel _feedbackModel;
public CcslSolver()
{
}
@Override
public ArrayList<ModelElementReference> getAllDiscreteClocks(){
return solverWrapper.getClockList();
}
@Override
public void forbidEventOccurrence(EventOccurrence eventOccurrence) {
this.solverWrapper.forceClockAbsence(this
.getModelElementReferenceFromEventOccurrence(eventOccurrence));
}
@Override
public void forceEventOccurrence(EventOccurrence eventOccurrence) {
this.solverWrapper.forceClockPresence(this
.getModelElementReferenceFromEventOccurrence(eventOccurrence));
}
/**
* Returns the ModelElementReference refered by this eventOccurrence (as
* originally sent by the CCSL Solver).
*
* @param eventOccurrence
* @return
*/
private ModelElementReference getModelElementReferenceFromEventOccurrence(
EventOccurrence eventOccurrence) {
Reference reference = eventOccurrence.getReferedElement();
if (reference instanceof ModelElementReference) {
ModelElementReference mer = (ModelElementReference) reference;
return mer;
// ModelElementReference merToForce = this.mappingEventToOriginalMer
// .get(mer.getElementRef().get(0));
// return merToForce;
} else {
throw new RuntimeException(
"Refered Element of eventOccurrence should be a ModelElementReference");
}
}
private LogicalStep createLogicalStep(fr.inria.aoste.trace.LogicalStep res)
{
LogicalStep ls = Gemoc_execution_traceFactory.eINSTANCE.createLogicalStep();
for (Event e : LogicalStepHelper.getTickedEvents(res))
{
MSEOccurrence mseOccurrence = Gemoc_execution_traceFactory.eINSTANCE.createMSEOccurrence();
for (ModelSpecificEvent mse : _feedbackModel.getEvents())
{
if (mse.getName().replace("MSE_", "").equals(e.getName().replace("evt_", "")))
{
mseOccurrence.setMse(mse);
break;
}
}
ls.getMseOccurrences().add(mseOccurrence);
}
return ls;
}
@Override
public String toString() {
return this.getClass().getName() + "@[modelOfExecutionURI="
+ this.solverInputURI + "]";
}
private void createSolver(IExecutionContext context)
{
this.solverInputURI = URI.createPlatformResourceURI(context.getWorkspace().getMoCPath().toString(), true);
URI feedbackURI = URI.createPlatformResourceURI(context.getWorkspace().getFeedbackModelPath().toString(), true);
try
{
ResourceSet resourceSet = context.getResourceModel().getResourceSet();
Resource ccslResource = resourceSet.getResource(this.solverInputURI, true);
EcoreUtil.resolveAll(resourceSet);
traceResources(resourceSet);
traceUnresolvedProxies(resourceSet, solverInputURI);
this.solverWrapper = new CCSLKernelSolverWrapper();
this.solverWrapper.getSolver().loadModel(ccslResource);
this.solverWrapper.getSolver().initSimulation();
this.solverWrapper.getSolver().setPolicy(new MaxCardSimulationPolicy());
Resource feedbackResource = resourceSet.getResource(feedbackURI, true);
_feedbackModel = (ActionModel)feedbackResource.getContents().get(0);
} catch (IOException e) {
String errorMessage = "IOException while instantiating the CcslSolver";
Activator.getDefault().error(errorMessage);
Activator.getDefault().error(errorMessage, e);
} catch (UnfoldingException e) {
String errorMessage = "UnfoldingException while instantiating the CcslSolver";
Activator.getDefault().error(errorMessage);
Activator.getDefault().error(errorMessage, e);
} catch (SolverException e) {
String errorMessage = "SolverException while instantiating the CcslSolver";
Activator.getDefault().error(errorMessage);
Activator.getDefault().error(errorMessage, e);
} catch (SimulationException e) {
String errorMessage = "SimulationException while instantiating the CcslSolver";
Activator.getDefault().error(errorMessage);
Activator.getDefault().error(errorMessage, e);
}
}
private void traceUnresolvedProxies(ResourceSet resourceSet,
URI solverInputURI) {
Map<EObject, Collection<Setting>> unresolvedProxies = EcoreUtil.UnresolvedProxyCrossReferencer.find(resourceSet);
if(unresolvedProxies.size() != 0){
Activator.getDefault().warn("There are unresolved proxies in "+solverInputURI+ ", the first is "+unresolvedProxies.entrySet().toArray()[0]);
Activator.getDefault().warn("Please verify that you don't have the modeling nature for your project so that the aird indexed all the resources, (it must not contain resolve warning).");
}
}
private void traceResources(ResourceSet resourceSet) {
Activator.getDefault().info("Input resources:");
for(Resource r : resourceSet.getResources())
{
Activator.getDefault().info(r.getURI().toString());
}
}
@Override
public List<LogicalStep> computeAndGetPossibleLogicalSteps() {
try {
List<fr.inria.aoste.trace.LogicalStep> intermediateResult = solverWrapper.computeAndGetPossibleLogicalSteps();
_lastLogicalSteps.clear();
for (fr.inria.aoste.trace.LogicalStep lsFromTimesquare : intermediateResult)
{
LogicalStep lsFromTrace = createLogicalStep(lsFromTimesquare);
_lastLogicalSteps.add(lsFromTrace);
}
return new ArrayList<LogicalStep>(_lastLogicalSteps);
} catch (NoBooleanSolution e) {
Activator.getDefault().error(e.getMessage(), e);
} catch (SolverException e) {
Activator.getDefault().error(e.getMessage(), e);
} catch (SimulationException e) {
Activator.getDefault().error(e.getMessage(), e);
}
return new ArrayList<LogicalStep>();
}
@Override
public List<LogicalStep> updatePossibleLogicalSteps() {
try {
List<fr.inria.aoste.trace.LogicalStep> intermediateResult = solverWrapper.updatePossibleLogicalSteps();
_lastLogicalSteps.clear();
for (fr.inria.aoste.trace.LogicalStep lsFromTimesquare : intermediateResult)
{
LogicalStep lsFromTrace = createLogicalStep(lsFromTimesquare);
_lastLogicalSteps.add(lsFromTrace);
}
return new ArrayList<LogicalStep>(_lastLogicalSteps);
} catch (NoBooleanSolution e) {
Activator.getDefault().error(e.getMessage(), e);
} catch (SolverException e) {
Activator.getDefault().error(e.getMessage(), e);
} catch (SimulationException e) {
Activator.getDefault().error(e.getMessage(), e);
}
return new ArrayList<LogicalStep>();
}
@Override
public LogicalStep proposeLogicalStep() {
int index = solverWrapper.proposeLogicalStepByIndex();
LogicalStep result = null;
if (_lastLogicalSteps.size() > index)
{
result = _lastLogicalSteps.get(index);
}
return result;
}
@Override
public void applyLogicalStep(LogicalStep logicalStep) {
try {
int index = _lastLogicalSteps.indexOf(logicalStep);
solverWrapper.applyLogicalStepByIndex(index);
} catch (SolverException e) {
Activator.getDefault().error(e.getMessage(), e);
} catch (SimulationException e) {
Activator.getDefault().error(e.getMessage(), e);
}
}
@Override
public byte[] getState() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
ObjectOutputStream objOut;
try {
objOut = new ObjectOutputStream(out);
objOut.writeObject(solverWrapper.getSolver().getCurrentState());
return out.toByteArray();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
@Override
public void setState(byte[] serializableModel) {
ByteArrayInputStream out = new ByteArrayInputStream(serializableModel);
ObjectInputStream objOut;
try {
objOut = new ObjectInputStream(out);
Object o = objOut.readObject();
solverWrapper.getSolver().setCurrentState((CCSLConstraintState) o);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Override
public void revertForceClockEffect() {
try {
solverWrapper.revertForceClockEffect();
} catch (SimulationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Override
public void setUp(IExecutionContext context)
{
generateMoC(context);
createSolver(context);
}
private void generateMoC(IExecutionContext context)
{
IExecutionWorkspace workspace = context.getWorkspace();
String transformationPath = context.getLanguageDefinitionExtension().getQVTOPath();
boolean mustGenerate = false;
IFile mocFile = ResourcesPlugin.getWorkspace().getRoot().getFile(workspace.getMoCPath());
if (!mocFile.exists()
|| ResourcesPlugin.getWorkspace().getRoot().getFile(workspace.getModelPath()).getLocalTimeStamp() >
ResourcesPlugin.getWorkspace().getRoot().getFile(workspace.getMoCPath()).getLocalTimeStamp())
{
mustGenerate = true;
}
IFile feedbackFile = ResourcesPlugin.getWorkspace().getRoot().getFile(workspace.getFeedbackModelPath());
if (!feedbackFile.exists()
|| ResourcesPlugin.getWorkspace().getRoot().getFile(workspace.getModelPath()).getLocalTimeStamp() >
ResourcesPlugin.getWorkspace().getRoot().getFile(workspace.getFeedbackModelPath()).getLocalTimeStamp())
{
mustGenerate = true;
}
final int bundleNameEnd=transformationPath.indexOf('/', 1);
final String bundleName=transformationPath.substring(1,bundleNameEnd);
Bundle bundle=Platform.getBundle(bundleName);
if (bundle != null) {
final URL bundleFileURL=bundle.getEntry(transformationPath.substring(bundleNameEnd));
try {
URL fileURL = FileLocator.toFileURL(bundleFileURL);
File transformationFile =new File(fileURL.getFile());
if ( feedbackFile.exists() &&
transformationFile.lastModified() >
ResourcesPlugin.getWorkspace().getRoot().getFile(workspace.getFeedbackModelPath()).getLocalTimeStamp())
{
mustGenerate = true;
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
if (mustGenerate)
{
QvtoTransformationPerformer performer = new QvtoTransformationPerformer();
performer.run(
context.getResourceModel().getResourceSet(),
"platform:/plugin" + transformationPath,
context.getRunConfiguration().getExecutedModelAsMelangeURI().toString(),
"platform:/resource" + workspace.getMoCPath().toString(),
"platform:/resource" + workspace.getFeedbackModelPath().toString());
}
}
@Override
public void dispose() {
// TODO Auto-generated method stub
}
}
|
package projectileMotionSim;
import java.awt.Graphics;
import javax.swing.*;
import java.awt.*;
public class PMSimDraw extends JPanel {
private int velocity;
private double angle;
private double xvel;
private double yvel;
private double x;
private double y;
private double time;
public PMSimDraw(){
setPreferredSize(new Dimension(550, 550));
setBackground(Color.WHITE);
}
public void paintComponent(Graphics g){
super.paintComponent(g);
g.drawRect(50, 50, 450, 450);
int xcor = 50;
int ycor = 500;
int tickcounter = 0;
for(int i = 0; i < 10; i++){
g.drawLine(xcor, 500, xcor, 505);
if(i == 0){
g.drawString("" + tickcounter + "", xcor, 525);
}
else if(i == 2){
g.drawString("" + tickcounter + "", xcor - 8, 525);
}
else{
g.drawString("" + tickcounter + "", xcor - 12, 525);
}
xcor += 50;
tickcounter += 50;
}
tickcounter = 0;
for(int i = 0; i < 10; i++){
g.drawLine(45, ycor, 50, ycor);
if(i == 0){
g.drawString("" + tickcounter + "", 25, ycor + 5);
}
else if(i == 1){
g.drawString("" + tickcounter + "", 20, ycor + 5);
}
else{
g.drawString("" + tickcounter + "", 15, ycor + 5);
}
ycor -= 50;
tickcounter += 50;
}
rocketFlight(g);
}
public void rocketFlight(Graphics g){
double x;
double y;
for(int i = 0; i <= time; i++) {
x = 50 + xvel * i;
y = 495 - yvel * i - 0.5 * (-9.8) * Math.pow(i, 2);
g.setColor(Color.blue);
g.fillOval((int)x, (int)y, 5, 5);
}
}
public void setFlightParameters(int v, int a){
angle = a;
velocity = v;
xvel = velocity * Math.cos(Math.toRadians(angle));
yvel = velocity * Math.sin(Math.toRadians(angle));
time = -yvel / (0.5 * -9.8);
}
public double getTime(){
return time;
}
public double getAltitude(){
return (yvel * time) / 2;
}
public double getDist(){
return xvel * time;
}
}
|
package com.intellij.util.indexing;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hasher;
import com.google.common.hash.Hashing;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.vfs.newvfs.persistent.FSRecords;
import com.intellij.openapi.vfs.newvfs.persistent.PersistentFSImpl;
import com.intellij.util.indexing.flavor.FileIndexingFlavorProvider;
import com.intellij.util.indexing.flavor.HashBuilder;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
@ApiStatus.Internal
public final class IndexedHashesSupport {
// TODO replace with sha-256
private static final HashFunction INDEXED_FILE_CONTENT_HASHER = Hashing.sha1();
public static int getVersion() {
return 3;
}
public static byte @NotNull [] getOrInitIndexedHash(@NotNull FileContentImpl content) {
byte[] hash = content.getHash();
if (hash != null) return hash;
byte[] contentHash = PersistentFSImpl.getContentHashIfStored(content.getFile());
if (contentHash == null) {
contentHash = getBinaryContentHash(content.getContent());
// todo store content hash in FS
}
hash = calculateIndexedHash(content, contentHash, false);
content.setHashes(hash);
return hash;
}
public static byte @NotNull [] getBinaryContentHash(byte @NotNull [] content) {
//TODO: duplicate of com.intellij.openapi.vfs.newvfs.persistent.FSRecords.calculateHash
MessageDigest digest = FSRecords.getContentHashDigest();
digest.update(String.valueOf(content.length).getBytes(StandardCharsets.UTF_8));
digest.update("\u0000".getBytes(StandardCharsets.UTF_8));
digest.update(content);
return digest.digest();
}
public static byte @NotNull [] calculateIndexedHash(@NotNull IndexedFile indexedFile, byte @NotNull [] contentHash, boolean isUtf8Forced) {
Hasher hasher = INDEXED_FILE_CONTENT_HASHER.newHasher();
hasher.putBytes(contentHash);
if (!FileContentImpl.getFileTypeWithoutSubstitution(indexedFile).isBinary()) {
Charset charset = isUtf8Forced ? StandardCharsets.UTF_8 :
indexedFile instanceof FileContentImpl
? ((FileContentImpl)indexedFile).getCharset()
: indexedFile.getFile().getCharset();
hasher.putString(charset.name(), StandardCharsets.UTF_8);
}
hasher.putString(indexedFile.getFileName(), StandardCharsets.UTF_8);
FileType fileType = indexedFile.getFileType();
hasher.putString(fileType.getName(), StandardCharsets.UTF_8);
@Nullable
FileIndexingFlavorProvider<?> provider = FileIndexingFlavorProvider.INSTANCE.forFileType(fileType);
if (provider != null) {
buildFlavorHash(indexedFile, provider, new HashBuilder() {
@Override
public @NotNull HashBuilder putInt(int val) {
hasher.putInt(val);
return this;
}
@Override
public @NotNull HashBuilder putBoolean(boolean val) {
hasher.putBoolean(val);
return this;
}
@Override
public @NotNull HashBuilder putString(@NotNull CharSequence charSequence) {
hasher.putString(charSequence, StandardCharsets.UTF_8);
return this;
}
});
}
return hasher.hash().asBytes();
}
private static <F> void buildFlavorHash(@NotNull IndexedFile indexedFile,
@NotNull FileIndexingFlavorProvider<F> flavorProvider,
@NotNull HashBuilder hashBuilder) {
F flavor = flavorProvider.getFlavor(indexedFile);
hashBuilder.putString(flavorProvider.getId());
hashBuilder.putInt(flavorProvider.getVersion());
if (flavor != null) {
flavorProvider.buildHash(flavor, hashBuilder);
}
}
}
|
package com.intellij.openapi.ui;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.NlsUI;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.ui.mac.MacMessages;
import com.intellij.util.ObjectUtils;
import com.intellij.util.nls.NlsContexts;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
public abstract class MessageDialogBuilder<T extends MessageDialogBuilder> {
protected final String myMessage;
protected final String myTitle;
protected String myYesText;
protected String myNoText;
protected Project myProject;
protected Icon myIcon;
protected DialogWrapper.DoNotAskOption myDoNotAskOption;
private MessageDialogBuilder(@NotNull @Nls @NlsContexts.DialogTitle String title,
@NotNull @Nls @NlsContexts.DialogMessage String message) {
myTitle = title;
myMessage = message;
}
@NotNull
public static YesNo yesNo(@NotNull @Nls @NlsContexts.DialogTitle String title, @NotNull @Nls @NlsContexts.DialogMessage String message) {
return new YesNo(title, message).icon(Messages.getQuestionIcon());
}
public static YesNoCancel yesNoCancel(@NotNull @Nls @NlsContexts.DialogTitle String title,
@NotNull @Nls @NlsContexts.DialogMessage String message) {
return new YesNoCancel(title, message).icon(Messages.getQuestionIcon());
}
protected abstract T getThis();
@NotNull
public T project(@Nullable Project project) {
myProject = project;
return getThis();
}
/**
* @see Messages#getInformationIcon()
* @see Messages#getWarningIcon()
* @see Messages#getErrorIcon()
* @see Messages#getQuestionIcon()
*/
public T icon(@Nullable Icon icon) {
myIcon = icon;
return getThis();
}
@NotNull
public T doNotAsk(@NotNull DialogWrapper.DoNotAskOption doNotAskOption) {
myDoNotAskOption = doNotAskOption;
return getThis();
}
public T yesText(@NotNull @Nls @NlsUI.Button String yesText) {
myYesText = yesText;
return getThis();
}
public T noText(@NotNull @Nls @NlsUI.Button String noText) {
myNoText = noText;
return getThis();
}
public static final class YesNo extends MessageDialogBuilder<YesNo> {
private YesNo(@NotNull String title, @NotNull String message) {
super(title, message);
}
@Override
protected YesNo getThis() {
return this;
}
@Messages.YesNoResult
public int show() {
String yesText = ObjectUtils.chooseNotNull(myYesText, Messages.getYesButton());
String noText = ObjectUtils.chooseNotNull(myNoText, Messages.getNoButton());
try {
if (Messages.canShowMacSheetPanel() && !Messages.isApplicationInUnitTestOrHeadless()) {
Window window = WindowManager.getInstance().suggestParentWindow(myProject);
return MacMessages.getInstance().showYesNoDialog(myTitle, myMessage, yesText, noText, window, myDoNotAskOption);
}
}
catch (Exception ignored) { }
String[] options = {yesText, noText};
return Messages.showDialog(myProject, myMessage, myTitle, options, 0, myIcon, myDoNotAskOption) == 0 ? Messages.YES : Messages.NO;
}
public boolean isYes() {
return show() == Messages.YES;
}
}
public static final class YesNoCancel extends MessageDialogBuilder<YesNoCancel> {
private String myCancelText;
private YesNoCancel(@NotNull String title, @NotNull String message) {
super(title, message);
}
public YesNoCancel cancelText(@NotNull @Nls @NlsUI.Button String cancelText) {
myCancelText = cancelText;
return getThis();
}
@Override
protected YesNoCancel getThis() {
return this;
}
@Messages.YesNoCancelResult
public int show() {
String yesText = ObjectUtils.chooseNotNull(myYesText, Messages.getYesButton());
String noText = ObjectUtils.chooseNotNull(myNoText, Messages.getNoButton());
String cancelText = ObjectUtils.chooseNotNull(myCancelText, Messages.getCancelButton());
try {
if (Messages.canShowMacSheetPanel() && !Messages.isApplicationInUnitTestOrHeadless()) {
Window window = WindowManager.getInstance().suggestParentWindow(myProject);
return MacMessages.getInstance().showYesNoCancelDialog(myTitle, myMessage, yesText, noText, cancelText, window, myDoNotAskOption);
}
}
catch (Exception ignored) {}
String[] options = {yesText, noText, cancelText};
int buttonNumber = Messages.showDialog(myProject, myMessage, myTitle, options, 0, myIcon, myDoNotAskOption);
return buttonNumber == 0 ? Messages.YES : buttonNumber == 1 ? Messages.NO : Messages.CANCEL;
}
}
}
|
package org.xwiki.administration.test.po;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.FindBy;
import org.xwiki.test.ui.po.BaseElement;
/**
* Page object used to interact with the Administration menu.
*
* @version $Id$
* @since 9.2RC1
*/
public class AdministrationMenu extends BaseElement
{
@FindBy(className = "admin-menu")
private WebElement container;
private By categoryByName(String categoryName)
{
return By.xpath(".//a[contains(@class, 'panel-heading') and . = '" + categoryName + "']");
}
private By categoryById(String categoryId)
{
return By.cssSelector("a[id='panel-heading-" + categoryId + "']");
}
public WebElement getCategoryByName(String categoryName)
{
return this.container.findElement(categoryByName(categoryName));
}
public WebElement getCategoryById(String categoryId)
{
return this.container.findElement(categoryById(categoryId));
}
public boolean hasCategoryWithId(String categoryId)
{
return getDriver().hasElement(this.container, categoryById(categoryId));
}
public boolean hasCategoryWithName(String categoryName)
{
return getDriver().hasElement(this.container, categoryByName(categoryName));
}
public boolean hasNotCategoryWithId(String categoryId)
{
return getDriver().findElementsWithoutWaiting(this.container, categoryById(categoryId)).size() == 0;
}
public boolean hasNotCategoryWithName(String categoryName)
{
return getDriver().findElementsWithoutWaiting(this.container, categoryByName(categoryName)).size() == 0;
}
public AdministrationMenu expandCategoryWithId(String categoryId)
{
return expandCategory(getCategoryById(categoryId));
}
public AdministrationMenu expandCategoryWithName(String categoryName)
{
return expandCategory(getCategoryByName(categoryName));
}
private AdministrationMenu expandCategory(WebElement categoryLink)
{
if (categoryLink.getAttribute("class").contains("collapsed")) {
By categoryContent = By.cssSelector(categoryLink.getAttribute("data-target") + ".collapse.in");
categoryLink.click();
getDriver().waitUntilElementIsVisible(categoryContent);
}
return this;
}
private By sectionByName(String categoryName, String sectionName)
{
return By.xpath(".//a[contains(@class, 'panel-heading') and . = '" + categoryName
+ "']/following-sibling::*//a[contains(@class, 'list-group-item') and . = '" + sectionName + "']");
}
private By sectionById(String sectionId)
{
return By.cssSelector("a.list-group-item[data-id='" + sectionId + "']");
}
public WebElement getSectionByName(String categoryName, String sectionName)
{
return this.container.findElement(sectionByName(categoryName, sectionName));
}
public WebElement getSectionById(String sectionId)
{
return this.container.findElement(sectionById(sectionId));
}
public boolean hasSectionWithId(String sectionId)
{
return getDriver().hasElement(this.container, sectionById(sectionId));
}
public boolean hasSectionWithName(String categoryName, String sectionName)
{
return getDriver().hasElement(this.container, sectionByName(categoryName, sectionName));
}
public boolean hasNotSectionWithId(String sectionId)
{
return getDriver().findElementsWithoutWaiting(this.container, sectionById(sectionId)).size() == 0;
}
public boolean hasNotSectionWithName(String categoryName, String sectionName)
{
return getDriver().findElementsWithoutWaiting(this.container, sectionByName(categoryName, sectionName))
.size() == 0;
}
}
|
package org.xwiki.extension.xar.internal.handler.packager.xml;
import java.io.IOException;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
import org.xwiki.component.manager.ComponentLookupException;
import org.xwiki.component.manager.ComponentManager;
import org.xwiki.extension.xar.internal.handler.ConflictQuestion;
import org.xwiki.extension.xar.internal.handler.ConflictQuestion.GlobalAction;
import org.xwiki.extension.xar.internal.handler.packager.DefaultPackager;
import org.xwiki.extension.xar.internal.handler.packager.NotADocumentException;
import org.xwiki.extension.xar.internal.handler.packager.PackageConfiguration;
import org.xwiki.extension.xar.internal.handler.packager.XarEntry;
import org.xwiki.extension.xar.internal.handler.packager.XarEntryMergeResult;
import org.xwiki.extension.xar.internal.handler.packager.XarFile;
import org.xwiki.logging.LogLevel;
import org.xwiki.model.EntityType;
import org.xwiki.model.reference.DocumentReference;
import org.xwiki.model.reference.EntityReference;
import org.xwiki.model.reference.EntityReferenceSerializer;
import com.xpn.xwiki.XWikiContext;
import com.xpn.xwiki.XWikiException;
import com.xpn.xwiki.doc.XWikiAttachment;
import com.xpn.xwiki.doc.XWikiDocument;
import com.xpn.xwiki.doc.merge.MergeResult;
/**
* @version $Id$
* @since 4.0M2
*/
public class DocumentImporterHandler extends DocumentHandler
{
/** Logging helper object. */
private static final Logger LOGGER = LoggerFactory.getLogger(DocumentImporterHandler.class);
private XarFile previousXarFile;
private DefaultPackager packager;
private XarEntryMergeResult mergeResult;
private PackageConfiguration configuration;
private EntityReferenceSerializer<String> compactWikiSerializer;
/**
* Attachment are imported before trying to merge a document for memory handling reasons so we need to know if there
* was really an existing document before starting to import attachments.
*/
private Boolean hasCurrentDocument;
public DocumentImporterHandler(DefaultPackager packager, ComponentManager componentManager, String wiki)
throws ComponentLookupException
{
super(componentManager, wiki);
this.compactWikiSerializer =
getComponentManager().getInstance(EntityReferenceSerializer.TYPE_STRING, "compactwiki");
this.packager = packager;
}
public void setPreviousXarFile(XarFile previousXarFile)
{
this.previousXarFile = previousXarFile;
}
public void setConfiguration(PackageConfiguration configuration)
{
this.configuration = configuration;
}
public XarEntryMergeResult getMergeResult()
{
return this.mergeResult;
}
private String getUserString()
{
return this.compactWikiSerializer.serialize(this.configuration.getUserReference(), getDocument()
.getDocumentReference());
}
private void saveDocument(XWikiDocument document, String comment, XWikiContext context) throws Exception
{
XWikiDocument currentDocument = getDatabaseDocument();
DocumentReference userReference = this.configuration.getUserReference();
if (!currentDocument.isNew()) {
if (document != currentDocument) {
if (document.isNew()) {
currentDocument.apply(document);
currentDocument.setAuthorReference(document.getAuthorReference());
currentDocument.setContentAuthorReference(document.getContentAuthorReference());
} else {
currentDocument = document;
}
}
} else {
currentDocument = document;
if (userReference != null) {
currentDocument.setCreatorReference(userReference);
}
}
if (userReference != null) {
currentDocument.setAuthorReference(userReference);
currentDocument.setContentAuthorReference(userReference);
}
saveDocumentSetContextUser(currentDocument, comment, context);
}
private void saveDocumentSetContextUser(XWikiDocument document, String comment, XWikiContext context)
throws Exception
{
DocumentReference userReference = context.getUserReference();
try {
// Make sure to have context user corresponding to document author for badly designed listeners expecting
// the document to actually be saved by context user
context.setUserReference(document.getAuthorReference());
context.getWiki().saveDocument(document, comment, context);
} catch (Exception e) {
context.setUserReference(userReference);
}
}
private XWikiDocument askDocumentToSave(XWikiDocument currentDocument, XWikiDocument previousDocument,
XWikiDocument nextDocument, XWikiDocument mergedDocument)
{
// Ask what to do
ConflictQuestion question =
new ConflictQuestion(currentDocument, previousDocument, nextDocument, mergedDocument);
if (mergedDocument == null) {
question.setGlobalAction(GlobalAction.NEXT);
}
if (this.configuration != null && this.configuration.getJobStatus() != null) {
try {
this.configuration.getJobStatus().ask(question);
} catch (InterruptedException e) {
// TODO: log something ?
}
}
XWikiDocument documentToSave;
switch (question.getGlobalAction()) {
case CURRENT:
documentToSave = currentDocument;
break;
case NEXT:
documentToSave = nextDocument;
break;
case PREVIOUS:
documentToSave = previousDocument;
break;
case CUSTOM:
documentToSave = question.getCustomDocument() != null ? question.getCustomDocument() : mergedDocument;
break;
default:
documentToSave = mergedDocument;
break;
}
return documentToSave;
}
private void saveDocument(String comment) throws SAXException
{
try {
XWikiContext context = getXWikiContext();
XWikiDocument currentDocument = getDatabaseDocument();
XWikiDocument nextDocument = getDocument();
if (this.configuration.isLogEnabled()) {
LOGGER.info("Importing document [{}] in language [{}]...", nextDocument.getDocumentReference(),
nextDocument.getRealLanguage());
}
// Merge and save
if (currentDocument != null && this.hasCurrentDocument == Boolean.TRUE) {
XWikiDocument previousDocument = getPreviousDocument();
if (previousDocument != null) {
// 3 ways merge
XWikiDocument mergedDocument = currentDocument.clone();
MergeResult documentMergeResult =
mergedDocument.merge(previousDocument, nextDocument,
this.configuration.getMergeConfiguration(), context);
if (documentMergeResult.isModified()) {
if (this.configuration.isInteractive()
&& !documentMergeResult.getLog().getLogs(LogLevel.ERROR).isEmpty()) {
// Indicate future author to whoever is going to answer the question
nextDocument.setCreatorReference(currentDocument.getCreatorReference());
mergedDocument.setCreatorReference(currentDocument.getCreatorReference());
DocumentReference userReference = this.configuration.getUserReference();
if (userReference != null) {
nextDocument.setAuthorReference(userReference);
nextDocument.setContentAuthorReference(userReference);
mergedDocument.setAuthorReference(userReference);
mergedDocument.setContentAuthorReference(userReference);
}
XWikiDocument documentToSave =
askDocumentToSave(currentDocument, previousDocument, nextDocument, mergedDocument);
if (documentToSave != currentDocument) {
saveDocument(documentToSave, comment, context);
}
} else {
saveDocument(mergedDocument, comment, context);
}
}
this.mergeResult =
new XarEntryMergeResult(new XarEntry(mergedDocument.getDocumentReference(),
mergedDocument.getLanguage()), documentMergeResult);
} else {
// already existing document in database but without previous version
if (!currentDocument.equalsData(nextDocument)) {
XWikiDocument documentToSave;
if (this.configuration.isInteractive()) {
// Indicate future author to whoever is going to answer the question
nextDocument.setCreatorReference(currentDocument.getCreatorReference());
DocumentReference userReference = this.configuration.getUserReference();
nextDocument.setAuthorReference(userReference);
nextDocument.setContentAuthorReference(userReference);
documentToSave = askDocumentToSave(currentDocument, previousDocument, nextDocument, null);
} else {
documentToSave = nextDocument;
}
if (documentToSave != currentDocument) {
saveDocument(documentToSave, comment, context);
}
}
}
} else {
saveDocument(nextDocument, comment, context);
}
} catch (Exception e) {
throw new SAXException("Failed to save document", e);
}
}
private XWikiDocument getDatabaseDocument() throws ComponentLookupException, XWikiException
{
XWikiContext context = getXWikiContext();
XWikiDocument document = getDocument();
XWikiDocument existingDocument = context.getWiki().getDocument(document.getDocumentReference(), context);
if (StringUtils.isNotEmpty(document.getLanguage())) {
String defaultLanguage = existingDocument.getDefaultLanguage();
XWikiDocument translatedDocument = existingDocument.getTranslatedDocument(document.getLanguage(), context);
if (translatedDocument == existingDocument) {
translatedDocument = new XWikiDocument(document.getDocumentReference());
translatedDocument.setDefaultLanguage(defaultLanguage);
translatedDocument.setTranslation(1);
translatedDocument.setLanguage(document.getLanguage());
}
existingDocument = translatedDocument;
}
if (this.hasCurrentDocument == null) {
this.hasCurrentDocument = !existingDocument.isNew();
}
return existingDocument;
}
private XWikiDocument getPreviousDocument() throws NotADocumentException, ParserConfigurationException,
SAXException, IOException
{
XWikiDocument previousDocument = null;
if (this.previousXarFile != null) {
XWikiDocument document = getDocument();
DocumentHandler documentHandler = new DocumentHandler(getComponentManager(), document.getWikiName());
XarEntry realEntry =
this.previousXarFile.getEntry(new EntityReference(document.getName(), EntityType.DOCUMENT,
new EntityReference(document.getSpace(), EntityType.SPACE)), document.getRealLanguage());
if (realEntry != null) {
this.packager.parseDocument(this.previousXarFile.getInputStream(realEntry), documentHandler);
previousDocument = documentHandler.getDocument();
}
}
return previousDocument;
}
private void saveAttachment(XWikiAttachment attachment, String comment) throws SAXException
{
try {
XWikiContext context = getXWikiContext();
XWikiDocument document = getDocument();
// Set proper author
DocumentReference userReference = this.configuration.getUserReference();
if (userReference != null) {
document.setAuthorReference(userReference);
attachment.setAuthor(getUserString());
}
XWikiDocument dbDocument = getDatabaseDocument();
XWikiAttachment dbAttachment = dbDocument.getAttachment(attachment.getFilename());
if (dbAttachment == null) {
attachment.setDoc(dbDocument);
dbDocument.getAttachmentList().add(attachment);
} else {
dbAttachment.setContent(attachment.getContentInputStream(context));
dbAttachment.setFilename(attachment.getFilename());
dbAttachment.setAuthor(attachment.getAuthor());
}
saveDocumentSetContextUser(dbDocument, comment, context);
// reset content since it could consume lots of memory and it's not used in diff for now
attachment.setAttachment_content(null);
getDocument().getAttachmentList().add(attachment);
} catch (Exception e) {
throw new SAXException("Failed to save attachment [" + attachment + "]", e);
}
}
@Override
protected void endAttachment(String uri, String localName, String qName) throws SAXException
{
AttachmentHandler handler = (AttachmentHandler) getCurrentHandler();
saveAttachment(handler.getAttachment(), "Import: add attachment");
}
@Override
protected void endHandlerElement(String uri, String localName, String qName) throws SAXException
{
saveDocument(getDocument().getAttachmentList().isEmpty() ? "Import" : "Import: final save");
}
}
|
package org.xwiki.extension.xar.internal.handler.packager.xml;
import java.io.IOException;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
import org.xwiki.component.manager.ComponentLookupException;
import org.xwiki.component.manager.ComponentManager;
import org.xwiki.extension.xar.internal.handler.ConflictQuestion;
import org.xwiki.extension.xar.internal.handler.ConflictQuestion.GlobalAction;
import org.xwiki.extension.xar.internal.handler.packager.DefaultPackager;
import org.xwiki.extension.xar.internal.handler.packager.NotADocumentException;
import org.xwiki.extension.xar.internal.handler.packager.PackageConfiguration;
import org.xwiki.extension.xar.internal.handler.packager.XarEntry;
import org.xwiki.extension.xar.internal.handler.packager.XarEntryMergeResult;
import org.xwiki.extension.xar.internal.handler.packager.XarFile;
import org.xwiki.logging.LogLevel;
import org.xwiki.model.EntityType;
import org.xwiki.model.reference.DocumentReference;
import org.xwiki.model.reference.EntityReference;
import com.xpn.xwiki.XWikiContext;
import com.xpn.xwiki.XWikiException;
import com.xpn.xwiki.doc.XWikiAttachment;
import com.xpn.xwiki.doc.XWikiDocument;
import com.xpn.xwiki.doc.merge.MergeResult;
/**
* @version $Id$
* @since 4.0M2
*/
public class DocumentImporterHandler extends DocumentHandler
{
/** Logging helper object. */
private static final Logger LOGGER = LoggerFactory.getLogger(DocumentImporterHandler.class);
private XarFile previousXarFile;
private DefaultPackager packager;
private XarEntryMergeResult mergeResult;
private PackageConfiguration configuration;
/**
* Attachment are imported before trying to merge a document for memory handling reasons so we need to know if there
* was really an existing document before starting to import attachments.
*/
private Boolean hasCurrentDocument;
public DocumentImporterHandler(DefaultPackager packager, ComponentManager componentManager, String wiki)
{
super(componentManager, wiki);
this.packager = packager;
}
public void setPreviousXarFile(XarFile previousXarFile)
{
this.previousXarFile = previousXarFile;
}
public void setConfiguration(PackageConfiguration configuration)
{
this.configuration = configuration;
}
public XarEntryMergeResult getMergeResult()
{
return this.mergeResult;
}
private DocumentReference getUserReference(XWikiContext context)
{
DocumentReference userReference = this.configuration.getUserReference();
if (userReference == null) {
userReference = context.getUserReference();
}
return userReference;
}
private void saveDocument(XWikiDocument document, String comment, XWikiContext context) throws Exception
{
XWikiDocument currentDocument = getDatabaseDocument();
DocumentReference userReference = getUserReference(context);
if (!currentDocument.isNew()) {
if (document != currentDocument) {
if (document.isNew()) {
currentDocument.apply(document);
} else {
currentDocument = document;
}
}
} else {
currentDocument = document;
currentDocument.setCreatorReference(userReference);
}
currentDocument.setAuthorReference(userReference);
currentDocument.setContentAuthorReference(userReference);
context.getWiki().saveDocument(currentDocument, comment, context);
}
private XWikiDocument askDocumentToSave(XWikiDocument currentDocument, XWikiDocument previousDocument,
XWikiDocument nextDocument, XWikiDocument mergedDocument)
{
// Ask what to do
ConflictQuestion question =
new ConflictQuestion(currentDocument, previousDocument, nextDocument, mergedDocument);
if (mergedDocument == null) {
question.setGlobalAction(GlobalAction.NEXT);
}
if (this.configuration != null && this.configuration.getJobStatus() != null) {
try {
this.configuration.getJobStatus().ask(question);
} catch (InterruptedException e) {
// TODO: log something ?
}
}
XWikiDocument documentToSave;
switch (question.getGlobalAction()) {
case CURRENT:
documentToSave = currentDocument;
break;
case NEXT:
documentToSave = nextDocument;
break;
case PREVIOUS:
documentToSave = previousDocument;
break;
case CUSTOM:
documentToSave = question.getCustomDocument() != null ? question.getCustomDocument() : mergedDocument;
break;
default:
documentToSave = mergedDocument;
break;
}
return documentToSave;
}
private void saveDocument(String comment) throws SAXException
{
try {
XWikiContext context = getXWikiContext();
XWikiDocument currentDocument = getDatabaseDocument();
XWikiDocument nextDocument = getDocument();
if (this.configuration.isLogEnabled()) {
LOGGER.info("Importing document [{}] in language [{}]...", nextDocument.getDocumentReference(),
nextDocument.getRealLanguage());
}
// Merge and save
if (currentDocument != null && this.hasCurrentDocument == Boolean.TRUE) {
XWikiDocument previousDocument = getPreviousDocument();
if (previousDocument != null) {
// 3 ways merge
XWikiDocument mergedDocument = currentDocument.clone();
MergeResult documentMergeResult =
mergedDocument.merge(previousDocument, nextDocument,
this.configuration.getMergeConfiguration(), context);
if (documentMergeResult.isModified()) {
if (this.configuration.isInteractive()
&& !documentMergeResult.getLog().getLogs(LogLevel.ERROR).isEmpty()) {
// Indicate future author to whoever is going to answer the question
nextDocument.setCreatorReference(currentDocument.getCreatorReference());
mergedDocument.setCreatorReference(currentDocument.getCreatorReference());
DocumentReference userReference = getUserReference(context);
nextDocument.setAuthorReference(userReference);
nextDocument.setContentAuthorReference(userReference);
mergedDocument.setAuthorReference(userReference);
mergedDocument.setContentAuthorReference(userReference);
XWikiDocument documentToSave =
askDocumentToSave(currentDocument, previousDocument, nextDocument, mergedDocument);
if (documentToSave != currentDocument) {
saveDocument(documentToSave, comment, context);
}
} else {
saveDocument(mergedDocument, comment, context);
}
}
this.mergeResult =
new XarEntryMergeResult(new XarEntry(mergedDocument.getDocumentReference(),
mergedDocument.getLanguage()), documentMergeResult);
} else {
// already existing document in database but without previous version
if (!currentDocument.equalsData(nextDocument)) {
XWikiDocument documentToSave;
if (this.configuration.isInteractive()) {
// Indicate future author to whoever is going to answer the question
nextDocument.setCreatorReference(currentDocument.getCreatorReference());
DocumentReference userReference = getUserReference(context);
nextDocument.setAuthorReference(userReference);
nextDocument.setContentAuthorReference(userReference);
documentToSave = askDocumentToSave(currentDocument, previousDocument, nextDocument, null);
} else {
documentToSave = nextDocument;
}
if (documentToSave != currentDocument) {
saveDocument(documentToSave, comment, context);
}
}
}
} else {
saveDocument(nextDocument, comment, context);
}
} catch (Exception e) {
throw new SAXException("Failed to save document", e);
}
}
private XWikiDocument getDatabaseDocument() throws ComponentLookupException, XWikiException
{
XWikiContext context = getXWikiContext();
XWikiDocument document = getDocument();
XWikiDocument existingDocument = context.getWiki().getDocument(document.getDocumentReference(), context);
if (StringUtils.isNotEmpty(document.getLanguage())) {
String defaultLanguage = existingDocument.getDefaultLanguage();
XWikiDocument translatedDocument = existingDocument.getTranslatedDocument(document.getLanguage(), context);
if (translatedDocument == existingDocument) {
translatedDocument = new XWikiDocument(document.getDocumentReference());
translatedDocument.setDefaultLanguage(defaultLanguage);
translatedDocument.setTranslation(1);
translatedDocument.setLanguage(document.getLanguage());
}
existingDocument = translatedDocument;
}
if (this.hasCurrentDocument == null) {
this.hasCurrentDocument = !existingDocument.isNew();
}
return existingDocument;
}
private XWikiDocument getPreviousDocument() throws NotADocumentException, ParserConfigurationException,
SAXException, IOException
{
XWikiDocument previousDocument = null;
if (this.previousXarFile != null) {
XWikiDocument document = getDocument();
DocumentHandler documentHandler = new DocumentHandler(getComponentManager(), document.getWikiName());
XarEntry realEntry =
this.previousXarFile.getEntry(new EntityReference(document.getName(), EntityType.DOCUMENT,
new EntityReference(document.getSpace(), EntityType.SPACE)), document.getRealLanguage());
if (realEntry != null) {
this.packager.parseDocument(this.previousXarFile.getInputStream(realEntry), documentHandler);
previousDocument = documentHandler.getDocument();
}
}
return previousDocument;
}
private void saveAttachment(XWikiAttachment attachment, String comment) throws SAXException
{
try {
XWikiContext context = getXWikiContext();
// Set proper author
// TODO: add a setAuthorReference in XWikiAttachment
XWikiDocument document = getDocument();
document.setAuthorReference(context.getUserReference());
attachment.setAuthor(document.getAuthor());
XWikiDocument dbDocument = getDatabaseDocument();
XWikiAttachment dbAttachment = dbDocument.getAttachment(attachment.getFilename());
if (dbAttachment == null) {
dbDocument.getAttachmentList().add(attachment);
} else {
dbAttachment.setContent(attachment.getContentInputStream(context));
dbAttachment.setFilename(attachment.getFilename());
dbAttachment.setAuthor(attachment.getAuthor());
}
context.getWiki().saveDocument(dbDocument, comment, context);
// reset content to since it could consume lots of memory and it's not used in diff for now
attachment.setAttachment_content(null);
getDocument().getAttachmentList().add(attachment);
} catch (Exception e) {
throw new SAXException("Failed to save attachment [" + attachment + "]", e);
}
}
@Override
protected void endAttachment(String uri, String localName, String qName) throws SAXException
{
AttachmentHandler handler = (AttachmentHandler) getCurrentHandler();
saveAttachment(handler.getAttachment(), "Import: add attachment");
}
@Override
protected void endHandlerElement(String uri, String localName, String qName) throws SAXException
{
saveDocument(getDocument().getAttachmentList().isEmpty() ? "Import" : "Import: final save");
}
}
|
package org.xwiki.localization.wiki.internal;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.regex.Pattern;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Provider;
import javax.inject.Singleton;
import org.apache.commons.lang3.EnumUtils;
import org.slf4j.Logger;
import org.xwiki.cache.Cache;
import org.xwiki.cache.CacheException;
import org.xwiki.cache.CacheManager;
import org.xwiki.cache.config.CacheConfiguration;
import org.xwiki.component.annotation.Component;
import org.xwiki.component.descriptor.ComponentDescriptor;
import org.xwiki.component.descriptor.ComponentInstantiationStrategy;
import org.xwiki.component.descriptor.DefaultComponentDescriptor;
import org.xwiki.component.internal.multi.ComponentManagerManager;
import org.xwiki.component.manager.ComponentLifecycleException;
import org.xwiki.component.manager.ComponentLookupException;
import org.xwiki.component.manager.ComponentManager;
import org.xwiki.component.manager.ComponentRepositoryException;
import org.xwiki.component.phase.Disposable;
import org.xwiki.component.phase.Initializable;
import org.xwiki.component.phase.InitializationException;
import org.xwiki.localization.TranslationBundle;
import org.xwiki.localization.TranslationBundleContext;
import org.xwiki.localization.TranslationBundleDoesNotExistsException;
import org.xwiki.localization.TranslationBundleFactory;
import org.xwiki.localization.message.TranslationMessageParser;
import org.xwiki.localization.wiki.internal.TranslationDocumentModel.Scope;
import org.xwiki.model.EntityType;
import org.xwiki.model.reference.DocumentReference;
import org.xwiki.model.reference.DocumentReferenceResolver;
import org.xwiki.model.reference.EntityReferenceSerializer;
import org.xwiki.model.reference.RegexEntityReference;
import org.xwiki.model.reference.WikiReference;
import org.xwiki.observation.EventListener;
import org.xwiki.observation.ObservationManager;
import org.xwiki.observation.event.Event;
import org.xwiki.query.Query;
import org.xwiki.query.QueryManager;
import org.xwiki.security.authorization.AccessDeniedException;
import org.xwiki.security.authorization.AuthorizationManager;
import org.xwiki.security.authorization.Right;
import com.xpn.xwiki.XWikiContext;
import com.xpn.xwiki.XWikiException;
import com.xpn.xwiki.doc.XWikiDocument;
import com.xpn.xwiki.internal.event.XObjectAddedEvent;
import com.xpn.xwiki.internal.event.XObjectDeletedEvent;
import com.xpn.xwiki.internal.event.XObjectUpdatedEvent;
import com.xpn.xwiki.objects.BaseObject;
import com.xpn.xwiki.objects.StringProperty;
/**
* Generate and manager wiki document based translations bundles.
*
* @version $Id$
* @since 4.3M2
*/
@Component
@Named("document")
@Singleton
public class DocumentTranslationBundleFactory implements TranslationBundleFactory, Initializable, Disposable
{
private static final RegexEntityReference TRANSLATIONOBJET = new RegexEntityReference(Pattern.compile("[^:]+:"
+ TranslationDocumentModel.TRANSLATIONCLASS_REFERENCE_STRING + "\\[\\d*\\]"), EntityType.OBJECT);
private static final List<Event> EVENTS = Arrays.<Event> asList(new XObjectAddedEvent(TRANSLATIONOBJET),
new XObjectUpdatedEvent(TRANSLATIONOBJET), new XObjectDeletedEvent(TRANSLATIONOBJET));
@Inject
@Named("context")
private Provider<ComponentManager> componentManagerProvider;
@Inject
@Named("uid")
private EntityReferenceSerializer<String> uidSerializer;
@Inject
private EntityReferenceSerializer<String> serializer;
@Inject
@Named("current")
private DocumentReferenceResolver<String> currentResolver;
@Inject
private CacheManager cacheManager;
@Inject
private ObservationManager observation;
@Inject
private Provider<XWikiContext> xcontextProvider;
@Inject
@Named("messagetool/1.0")
private TranslationMessageParser translationParser;
@Inject
private ComponentManagerManager cmManager;
@Inject
private Logger logger;
@Inject
private QueryManager queryManager;
@Inject
private AuthorizationManager authorizationManager;
/**
* Used to access the current bundles.
*/
@Inject
private TranslationBundleContext bundleContext;
private Cache<TranslationBundle> bundlesCache;
private EventListener listener = new EventListener()
{
@Override
public void onEvent(Event event, Object arg1, Object arg2)
{
XWikiDocument document = (XWikiDocument) arg1;
if (event instanceof XObjectAddedEvent) {
translationObjectAdded(document);
} else if (event instanceof XObjectDeletedEvent) {
translationObjectDeleted(document);
} else {
translationObjectUpdated(document);
}
}
@Override
public String getName()
{
return "localization.bundle.document";
}
@Override
public List<Event> getEvents()
{
return EVENTS;
}
};
@Override
public void initialize() throws InitializationException
{
// Cache
CacheConfiguration cacheConfiguration = new CacheConfiguration("localization.bundle.document");
try {
this.bundlesCache = this.cacheManager.createNewCache(cacheConfiguration);
} catch (CacheException e) {
this.logger.error("Failed to create cache [{}]", cacheConfiguration.getConfigurationId(), e);
}
// Load existing translations
XWikiContext xcontext = this.xcontextProvider.get();
Set<String> wikis;
try {
wikis = new HashSet<String>(xcontext.getWiki().getVirtualWikisDatabaseNames(xcontext));
} catch (XWikiException e) {
this.logger.error("Failed to list existing wikis", e);
wikis = new HashSet<String>();
}
if (!wikis.contains(xcontext.getMainXWiki())) {
wikis.add(xcontext.getMainXWiki());
}
for (String wiki : wikis) {
loadTranslations(wiki, xcontext);
}
// Listener
this.observation.addListener(this.listener);
}
private void loadTranslations(String wiki, XWikiContext xcontext)
{
try {
Query query =
this.queryManager.createQuery(String.format(
"select distinct doc.space, doc.name from Document doc, doc.object(%s) as translation",
TranslationDocumentModel.TRANSLATIONCLASS_REFERENCE_STRING), Query.XWQL);
query.setWiki(wiki);
List<Object[]> documents = query.execute();
for (Object[] documentName : documents) {
DocumentReference reference =
new DocumentReference(wiki, (String) documentName[0], (String) documentName[1]);
XWikiDocument document = xcontext.getWiki().getDocument(reference, xcontext);
registerTranslationBundle(document);
}
} catch (Exception e) {
this.logger.error("Failed to load eexisting translations", e);
}
}
@Override
public TranslationBundle getBundle(String bundleId) throws TranslationBundleDoesNotExistsException
{
String id = AbstractDocumentTranslationBundle.ID_PREFIX + bundleId;
if (this.componentManagerProvider.get().hasComponent(TranslationBundle.class, id)) {
try {
return this.componentManagerProvider.get().getInstance(TranslationBundle.class, id);
} catch (ComponentLookupException e) {
this.logger.debug("Failed to lookup component [{}] with hint [{}].", TranslationBundle.class, bundleId,
e);
}
}
return getDocumentBundle(this.currentResolver.resolve(bundleId));
}
private TranslationBundle getDocumentBundle(DocumentReference documentReference)
throws TranslationBundleDoesNotExistsException
{
String uid = this.uidSerializer.serialize(documentReference);
TranslationBundle bundle = this.bundlesCache.get(uid);
if (bundle == null) {
synchronized (this.bundlesCache) {
bundle = this.bundlesCache.get(uid);
if (bundle == null) {
bundle = createDocumentBundle(documentReference);
this.bundlesCache.set(uid, bundle);
}
}
}
return bundle;
}
private DefaultDocumentTranslationBundle createDocumentBundle(DocumentReference documentReference)
throws TranslationBundleDoesNotExistsException
{
XWikiContext context = this.xcontextProvider.get();
XWikiDocument document;
try {
document = context.getWiki().getDocument(documentReference, context);
} catch (XWikiException e) {
throw new TranslationBundleDoesNotExistsException("Failed to get translation document", e);
}
if (document.isNew()) {
throw new TranslationBundleDoesNotExistsException(String.format("Document [%s] does not exists",
documentReference));
}
return createDocumentBundle(document);
}
private DefaultDocumentTranslationBundle createDocumentBundle(XWikiDocument document)
throws TranslationBundleDoesNotExistsException
{
BaseObject translationObject = document.getXObject(TranslationDocumentModel.TRANSLATIONCLASS_REFERENCE);
if (translationObject == null) {
throw new TranslationBundleDoesNotExistsException(String.format("[%s] is not a translation document",
document));
}
DefaultDocumentTranslationBundle documentBundle;
try {
documentBundle =
new DefaultDocumentTranslationBundle(document.getDocumentReference(),
this.componentManagerProvider.get(), this.translationParser);
} catch (ComponentLookupException e) {
throw new TranslationBundleDoesNotExistsException("Failed to create document bundle", e);
}
return documentBundle;
}
/**
* @param document the translation document
*/
private void translationObjectUpdated(XWikiDocument document)
{
unregisterTranslationBundle(document.getOriginalDocument());
try {
registerTranslationBundle(document);
} catch (Exception e) {
this.logger.error("Failed to register translation bundle from document [{}]",
document.getDocumentReference(), e);
}
}
/**
* @param document the translation document
*/
private void translationObjectDeleted(XWikiDocument document)
{
unregisterTranslationBundle(document.getOriginalDocument());
}
/**
* @param document the translation document
*/
private void translationObjectAdded(XWikiDocument document)
{
try {
registerTranslationBundle(document);
} catch (Exception e) {
this.logger.error("Failed to register translation bundle from document [{}]",
document.getDocumentReference(), e);
}
}
/**
* @param obj the translation object
* @return the {@link Scope} stored in the object, null not assigned or unknown
*/
private Scope getScope(BaseObject obj)
{
if (obj != null) {
StringProperty scopeProperty =
(StringProperty) obj.getField(TranslationDocumentModel.TRANSLATIONCLASS_PROP_SCOPE);
if (scopeProperty != null) {
String scopeString = scopeProperty.getValue();
return EnumUtils.getEnum(Scope.class, scopeString.toUpperCase());
}
}
return null;
}
/**
* @param document the translation document
*/
private void unregisterTranslationBundle(XWikiDocument document)
{
Scope scope = getScope(document.getXObject(TranslationDocumentModel.TRANSLATIONCLASS_REFERENCE));
// Unregister component
if (scope != null && scope != Scope.ON_DEMAND) {
ComponentDescriptor<TranslationBundle> descriptor =
createComponentDescriptor(document.getDocumentReference());
getComponentManager(document, scope, true).unregisterComponent(descriptor);
}
// Remove from cache
this.bundlesCache.remove(this.uidSerializer.serialize(document.getDocumentReference()));
}
/**
* @param document the translation document
* @throws TranslationBundleDoesNotExistsException when no translation bundle could be created from the provided
* document
* @throws ComponentRepositoryException when the actual registration of the document bundle failed
* @throws AccessDeniedException when the document author does not have enough right to register the translation
* bundle
*/
private void registerTranslationBundle(XWikiDocument document) throws TranslationBundleDoesNotExistsException,
ComponentRepositoryException, AccessDeniedException
{
Scope scope = getScope(document.getXObject(TranslationDocumentModel.TRANSLATIONCLASS_REFERENCE));
if (scope != null && scope != Scope.ON_DEMAND) {
checkRegistrationAuthorization(document, scope);
DefaultDocumentTranslationBundle bundle = createDocumentBundle(document);
ComponentDescriptor<TranslationBundle> descriptor =
createComponentDescriptor(document.getDocumentReference());
getComponentManager(document, scope, true).registerComponent(descriptor, bundle);
this.bundleContext.addBundle(bundle);
}
}
/**
* @param document the translation document
* @param scope the scope
* @throws AccessDeniedException thrown when the document author does not have enough right for the provided
* {@link Scope}
*/
private void checkRegistrationAuthorization(XWikiDocument document, Scope scope) throws AccessDeniedException
{
switch (scope) {
case GLOBAL:
this.authorizationManager.checkAccess(Right.PROGRAM, document.getAuthorReference(), new WikiReference(
this.xcontextProvider.get().getMainXWiki()));
break;
case WIKI:
this.authorizationManager.checkAccess(Right.ADMIN, document.getAuthorReference(), document
.getDocumentReference().getWikiReference());
break;
default:
break;
}
}
/**
* @param documentReference the translation document reference
* @return the component descriptor to use to register/unregister the translation bundle
*/
private ComponentDescriptor<TranslationBundle> createComponentDescriptor(DocumentReference documentReference)
{
DefaultComponentDescriptor<TranslationBundle> descriptor = new DefaultComponentDescriptor<TranslationBundle>();
descriptor.setImplementation(DefaultDocumentTranslationBundle.class);
descriptor.setInstantiationStrategy(ComponentInstantiationStrategy.SINGLETON);
descriptor.setRoleHint(AbstractDocumentTranslationBundle.ID_PREFIX
+ this.serializer.serialize(documentReference));
descriptor.setRoleType(TranslationBundle.class);
return descriptor;
}
/**
* Get the right component manager based on the scope.
*
* @param document the translation document
* @param scope the translation scope
* @param create true if the component manager should be created if it does not exists
* @return the component manager corresponding to the provided {@link Scope}
*/
private ComponentManager getComponentManager(XWikiDocument document, Scope scope, boolean create)
{
String hint;
switch (scope) {
case WIKI:
hint = "wiki:" + document.getDocumentReference().getWikiReference().getName();
break;
case USER:
hint = "user:" + this.serializer.serialize(document.getAuthorReference());
break;
default:
hint = null;
break;
}
return this.cmManager.getComponentManager(hint, create);
}
@Override
public void dispose() throws ComponentLifecycleException
{
this.observation.removeListener(this.listener.getName());
}
}
|
package com.bitdubai.reference_niche_wallet.bitcoin_wallet.fragmentFactory;
import android.app.Fragment;
import com.bitdubai.fermat_android_api.layer.definition.wallet.exceptions.FragmentNotFoundException;
import com.bitdubai.fermat_android_api.layer.definition.wallet.interfaces.WalletSession;
import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_settings.interfaces.WalletSettings;
import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_settings.interfaces.WalletSettingsManager;
import com.bitdubai.fermat_api.layer.dmp_network_service.wallet_resources.WalletResourcesProviderManager;
import com.bitdubai.reference_niche_wallet.bitcoin_wallet.fragments.BalanceFragment;
import com.bitdubai.reference_niche_wallet.bitcoin_wallet.fragments.BlankFragment;
import com.bitdubai.reference_niche_wallet.bitcoin_wallet.fragments.ContactDetailFragment;
import com.bitdubai.reference_niche_wallet.bitcoin_wallet.fragments.ContactsFragment;
import com.bitdubai.reference_niche_wallet.bitcoin_wallet.fragments.CreateContactFragment;
import com.bitdubai.reference_niche_wallet.bitcoin_wallet.fragments.MoneyRequestFragment;
import com.bitdubai.reference_niche_wallet.bitcoin_wallet.fragments.ReceiveFragment;
import com.bitdubai.reference_niche_wallet.bitcoin_wallet.fragments.SendFragment;
import com.bitdubai.reference_niche_wallet.bitcoin_wallet.fragments.TransactionsBookFragment;
import com.bitdubai.reference_niche_wallet.bitcoin_wallet.fragments.TransactionsFragment;
import com.bitdubai.reference_niche_wallet.bitcoin_wallet.fragments.wallet_v2.HomeFragment;
import com.bitdubai.reference_niche_wallet.bitcoin_wallet.session.ReferenceWalletSession;
public class ReferenceWalletFragmentFactory implements com.bitdubai.fermat_android_api.layer.definition.wallet.interfaces.WalletFragmentFactory {
/**
* Create a new Fragment based on the fragmentType
*
* @param code getCode from a Enum of fragments
* @param walletSession reference of walletSession
* @return
* @throws FragmentNotFoundException
*/
@Override
public Fragment getFragment(String code,WalletSession walletSession,WalletResourcesProviderManager walletResourcesProviderManager) throws FragmentNotFoundException {
Fragment currentFragment = null;
try {
ReferenceWalletSession refereceWalletSession = (ReferenceWalletSession) walletSession;
ReferenceFragmentsEnumType fragment = ReferenceFragmentsEnumType.getValue(code);
switch (fragment) {
/**
* Executing fragments for BITCOIN REQUESTED.
*/
case CWP_WALLET_RUNTIME_WALLET_BITCOIN_ALL_BITDUBAI_BALANCE:
currentFragment = /*BlankFragment.newInstance(null,null);*/BalanceFragment.newInstance(0, refereceWalletSession, walletResourcesProviderManager);
//currentFragment = HomeFragment.newInstance(0,refereceWalletSession);
break;
case CWP_WALLET_RUNTIME_WALLET_BITCOIN_ALL_BITDUBAI_RECEIVE:
currentFragment = com.bitdubai.reference_niche_wallet.bitcoin_wallet.fragments.wallet_v2.ReceiveFragment.newInstance(0);
break;
case CWP_WALLET_RUNTIME_WALLET_BITCOIN_ALL_BITDUBAI_SEND:
currentFragment = com.bitdubai.reference_niche_wallet.bitcoin_wallet.fragments.wallet_v2.SendFragment.newInstance(0,refereceWalletSession);
break;
case CWP_WALLET_RUNTIME_WALLET_BITCOIN_ALL_BITDUBAI_TRANSACTIONS:
currentFragment = TransactionsFragment.newInstance(0, refereceWalletSession, walletResourcesProviderManager);
break;
case CWP_WALLET_RUNTIME_WALLET_BITCOIN_ALL_BITDUBAI_CONTACTS:
currentFragment = ContactsFragment.newInstance(refereceWalletSession, walletResourcesProviderManager);
//currentFragment = com.bitdubai.reference_niche_wallet.bitcoin_wallet.fragments.wallet_v2.ContactsFragment.newInstance();
break;
case CWP_WALLET_RUNTIME_WALLET_BITCOIN_ALL_BITDUBAI_CREATE_CONTACTS:
currentFragment = CreateContactFragment.newInstance(0, refereceWalletSession, walletResourcesProviderManager);
break;
case CWP_WALLET_RUNTIME_WALLET_BITCOIN_ALL_BITDUBAI_DETAIL_CONTACTS:
currentFragment = ContactDetailFragment.newInstance(refereceWalletSession, walletResourcesProviderManager);
break;
case CWP_WALLET_RUNTIME_WALLET_BITCOIN_ALL_BITDUBAI_MONEY_REQUEST:
currentFragment = MoneyRequestFragment.newInstance(0, null, refereceWalletSession, walletResourcesProviderManager);
break;
case CWP_WALLET_RUNTIME_WALLET_BITCOIN_ALL_BITDUBAI_TRANSACTIONS_BOOK:
currentFragment = TransactionsBookFragment.newInstance(0, refereceWalletSession, walletResourcesProviderManager, 0);
break;
case CWP_WALLET_RUNTIME_WALLET_BITCOIN_ALL_BITDUBAI_TRANSACTIONS_AVAILABLE:
currentFragment = TransactionsBookFragment.newInstance(0, refereceWalletSession, walletResourcesProviderManager, 1);
break;
default:
throw new FragmentNotFoundException("Fragment not found", new Exception(), code, "Swith failed");
}
}catch (Exception e){
e.printStackTrace();
}
return currentFragment;
}
}
|
package com.bitdubai.fermat_dmp_plugin.layer.network_service.crypto_addresses.developer.bitdubai.version_1;
import com.bitdubai.fermat_api.Plugin;
import com.bitdubai.fermat_api.Service;
import com.bitdubai.fermat_api.layer.all_definition.enums.ReferenceWallet;
import com.bitdubai.fermat_api.layer.all_definition.enums.WalletCategory;
import com.bitdubai.fermat_api.layer.all_definition.money.CryptoAddress;
import com.bitdubai.fermat_api.layer.dmp_network_service.NetworkService;
import com.bitdubai.fermat_api.layer.dmp_network_service.crypto_addressees.enums.AddressExchangeState;
import com.bitdubai.fermat_api.layer.dmp_network_service.crypto_addressees.exceptions.CantAcceptAddressExchangeException;
import com.bitdubai.fermat_api.layer.dmp_network_service.crypto_addressees.exceptions.CantGetCryptoAddessException;
import com.bitdubai.fermat_api.layer.dmp_network_service.crypto_addressees.exceptions.CantGetCurrentStateException;
import com.bitdubai.fermat_api.layer.dmp_network_service.crypto_addressees.exceptions.CantGetPendingContactRequestsListException;
import com.bitdubai.fermat_api.layer.dmp_network_service.crypto_addressees.exceptions.CantRegisterCompatibleListException;
import com.bitdubai.fermat_api.layer.dmp_network_service.crypto_addressees.exceptions.CantRejectAddressExchangeException;
import com.bitdubai.fermat_api.layer.dmp_network_service.crypto_addressees.exceptions.CantStartAddressExchangeException;
import com.bitdubai.fermat_api.layer.dmp_network_service.crypto_addressees.interfaces.CryptoAddressesManager;
import com.bitdubai.fermat_api.layer.all_definition.enums.ServiceStatus;
import com.bitdubai.fermat_api.layer.dmp_network_service.crypto_addressees.interfaces.PendingContactRequest;
import com.bitdubai.fermat_api.layer.dmp_network_service.crypto_addressees.interfaces.RequestHandlerWallet;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.DealsWithErrors;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.ErrorManager;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.event_manager.DealsWithEvents;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.event_manager.EventHandler;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.event_manager.EventListener;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.event_manager.EventManager;
import com.bitdubai.fermat_api.layer.osa_android.file_system.DealsWithPluginFileSystem;
import com.bitdubai.fermat_api.layer.osa_android.file_system.PluginFileSystem;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
public class CryptoAddressesNetworkServicePluginRoot implements Service, NetworkService, CryptoAddressesManager,DealsWithEvents, DealsWithErrors, DealsWithPluginFileSystem, Plugin {
/**
* Service Interface member variables.
*/
ServiceStatus serviceStatus = ServiceStatus.CREATED;
List<EventListener> listenersAdded = new ArrayList<>();
/**
* DealWithEvents Interface member variables.
*/
EventManager eventManager;
/**
* UsesFileSystem Interface member variables.
*/
PluginFileSystem pluginFileSystem;
/**
* DealsWithPluginIdentity Interface member variables.
*/
UUID pluginId;
/**
* Service Interface implementation.
*/
@Override
public void start() {
/**
* I will initialize the handling of com.bitdubai.platform events.
*/
EventListener eventListener;
EventHandler eventHandler;
this.serviceStatus = ServiceStatus.STARTED;
}
@Override
public void pause() {
this.serviceStatus = ServiceStatus.PAUSED;
}
@Override
public void resume() {
this.serviceStatus = ServiceStatus.STARTED;
}
@Override
public void stop() {
/**
* I will remove all the event listeners registered with the event manager.
*/
for (EventListener eventListener : listenersAdded) {
eventManager.removeListener(eventListener);
}
listenersAdded.clear();
this.serviceStatus = ServiceStatus.STOPPED;
}
@Override
public ServiceStatus getStatus() {
return this.serviceStatus;
}
/**
* NetworkService Interface implementation.
*/
@Override
public UUID getId() {
return null;
}
/**
* UsesFileSystem Interface implementation.
*/
@Override
public void setPluginFileSystem(PluginFileSystem pluginFileSystem) {
this.pluginFileSystem = pluginFileSystem;
}
/**
* DealWithEvents Interface implementation.
*/
@Override
public void setEventManager(EventManager eventManager) {
this.eventManager = eventManager;
}
/**
*DealWithErrors Interface implementation.
*/
@Override
public void setErrorManager(ErrorManager errorManager) {
}
/**
* DealsWithPluginIdentity methods implementation.
*/
@Override
public void setId(UUID pluginId) {
this.pluginId = pluginId;
}
/*
* CryptoAddressesManager interface method implementation
*/
@Override
public void exchangeAddressesAndAddContact(String walletPublicKey, ReferenceWallet referenceWallet, CryptoAddress cryptoAddressSent, String intraUserToContactPublicKey, String intraUserAskingAddressPublicKey, String intraUserAskingAddressName, byte[] intraUserAskingAddressProfileImage) throws CantStartAddressExchangeException {
}
@Override
public void exchangeAddresses(String walletPublicKey, ReferenceWallet referenceWallet, CryptoAddress cryptoAddressSent, String intraUserToContactPublicKey, String intraUserAskingAddressName) {
}
@Override
public void acceptAddressExchange(UUID exchangeId, String walletAcceptingTheRequestPublicKey, ReferenceWallet referenceWallet, CryptoAddress cryptoAddressSent, String intraUserAcceptingTheRequestPublicKey, String intraUserToInformAcceptancePublicKey) throws CantAcceptAddressExchangeException {
}
@Override
public void rejectAddressExchange(UUID exchangeId, String walletThatAskedTheExchangePublicKey, String intraUserThatSentTheRequestPublicKey, String intraUserRejectingTheRequest) throws CantRejectAddressExchangeException {
}
@Override
public void setCompatibleWallets(UUID requestId, List<RequestHandlerWallet> compatibleWallets) throws CantRegisterCompatibleListException {
}
@Override
public List<PendingContactRequest> getPendingRequests(String intraUserLoggedInPublicKey) throws CantGetPendingContactRequestsListException {
return null;
}
@Override
public AddressExchangeState getCurrentExchangeState(String walletPublicKey, String intraUserAskingAddressPublicKey, String intraUserToContactPublicKey) throws CantGetCurrentStateException {
return null;
}
@Override
public CryptoAddress getReceivedAddress(String walletPublicKey, String intraUserAskingAddressPublicKey, String intraUserToContactPublicKey) throws CantGetCryptoAddessException {
return null;
}
}
|
package mc3kit.types.partition;
import mc3kit.*;
import mc3kit.model.Distribution;
import mc3kit.model.Model;
import mc3kit.model.ModelNode;
import mc3kit.model.Variable;
import mc3kit.step.univariate.VariableProposer;
import mc3kit.util.*;
import java.util.*;
import com.google.gson.*;
import cern.jet.random.Uniform;
public class PartitionVariable extends Variable {
int n;
int k;
private boolean allowsEmptyGroups;
boolean useGibbs;
int[] assignment;
IterableBitSet[] groups;
List<IndexAssociator> indexAssociators;
List<Association> associations;
protected PartitionVariable() {
}
public PartitionVariable(Model model, String name, int n, int k)
throws MC3KitException {
this(model, name, n, k, false);
}
public PartitionVariable(Model model, String name, int n, int k,
boolean allowsEmptyGroups) throws MC3KitException {
this(model, name, n, k, allowsEmptyGroups, false);
}
public PartitionVariable(Model model, String name, int n, int k,
boolean allowsEmptyGroups, boolean useGibbs) throws MC3KitException {
super(model, name, false);
this.n = n;
this.k = k;
this.setAllowsEmptyGroups(allowsEmptyGroups);
this.useGibbs = useGibbs;
assignment = new int[n];
groups = new IterableBitSet[k];
for(int i = 0; i < k; i++)
groups[i] = new IterableBitSet(n);
indexAssociators = new ArrayList<IndexAssociator>();
associations = new ArrayList<Association>();
}
public boolean allowsEmptyGroups() {
return allowsEmptyGroups;
}
public void setAllowsEmptyGroups(boolean allowsEmptyGroups) {
this.allowsEmptyGroups = allowsEmptyGroups;
}
public void associate(IndexAssociator associator) {
indexAssociators.add(associator);
}
public void associate(ModelNode[] tails, ModelNode[] heads,
Associator associator) {
if(tails.length != n)
throw new IllegalArgumentException("Wrong number of tails");
if(heads.length != k)
throw new IllegalArgumentException("Wrong number of heads");
Association association = new Association(tails, heads, associator);
associations.add(association);
}
public void associateVariablesWithDistributions(ModelNode[] vars,
ModelNode[] dists) {
associate(vars, dists, new DistributionAssociator());
}
public IterableBitSet getGroup(int g) {
return groups[g];
}
public int getGroupId(int i) {
return assignment[i];
}
public void setGroups(int[] gs) throws MC3KitException {
assert gs.length == n;
// Reset group bitsets
for(int i = 0; i < k; i++) {
groups[i].clear();
}
for(int i = 0; i < gs.length; i++) {
int gi = gs[i];
assert gi < k;
assignment[i] = gi;
groups[gi].set(i);
}
// Call index associators
for(IndexAssociator asr : indexAssociators) {
for(int i = 0; i < n; i++) {
asr.associate(i, assignment[i]);
}
}
// Associate vars and priors
for(Association asn : associations) {
for(int i = 0; i < n; i++) {
asn.setGroup(i, assignment[i]);
}
}
setChanged();
notifyObservers();
}
public void setGroup(int i, int g) throws MC3KitException {
groups[assignment[i]].clear(i);
groups[g].set(i);
assignment[i] = g;
for(IndexAssociator asr : indexAssociators) {
asr.associate(i, g);
}
for(Association asn : associations) {
asn.setGroup(i, g);
}
setChanged();
notifyObservers();
}
@Override
public void sample() throws MC3KitException {
Distribution dist = getDistribution();
if(dist == null) {
Uniform unif = new Uniform(getRng());
// Choose group numbers uniformly randomly
// conditioned on all groups having at least one member
boolean done;
int[] gs = new int[n];
do {
int[] groupCounts = new int[k];
// Just choose group number uniformly randomly
for(int i = 0; i < n; i++) {
gs[i] = unif.nextIntFromTo(0, k - 1);
groupCounts[gs[i]]++;
}
done = true;
for(int g = 0; g < k; g++) {
if(groupCounts[g] == 0) {
done = false;
break;
}
}
} while(!done);
setGroups(gs);
}
else {
getDistribution().sample(this);
}
}
@Override
public boolean canManipulateGraph() {
return associations.size() > 0 || indexAssociators.size() > 0;
}
public int getElementCount() {
return n;
}
public int getGroupCount() {
return k;
}
public int getGroupSize(int g) {
return groups[g].cardinality();
}
public int getGroupSizeForItem(int i) {
return groups[assignment[i]].cardinality();
}
private class Association {
ModelNode[] tails;
ModelNode[] heads;
Associator associator;
public Association(ModelNode[] tails, ModelNode[] heads,
Associator associator) {
this.tails = tails;
this.heads = heads;
this.associator = associator;
}
public void setGroup(int i, int g) throws MC3KitException {
associator.associate(tails[i], heads[g]);
}
}
private class DistributionAssociator implements Associator {
@Override
public void associate(ModelNode tail, ModelNode head)
throws MC3KitException {
((Variable) tail).setDistribution((Distribution) head);
}
}
@Override
public VariableProposer makeProposer() {
return new PartitionProposer(getName());
}
@Override
public Object makeOutputObject() {
if(k == 1) {
return null;
}
return assignment.clone();
}
@Override
public String makeOutputString() {
return new Gson().toJson(assignment);
}
@Override
public Object toDbValue() {
return getGson().toJson(assignment);
}
@Override
public void loadFromDbValue(Object value) throws MC3KitException {
setGroups(getGson().fromJson((String) value, int[].class));
}
}
|
package me.kaes3kuch3n.lwjgltest.entities;
import org.lwjgl.input.Keyboard;
import org.lwjgl.input.Mouse;
import org.lwjgl.util.vector.Vector3f;
public class Camera {
private float distanceFromPlayer = 50;
private float angleAroundPlayer = 0;
private Vector3f position = new Vector3f(100, 35, 50);
private float pitch = 10;
private float yaw = 0;
private float roll = 0;
private Player player;
private static final float Y_CORRECTION_FACTOR = 4;
public Camera(Player player) {
this.player = player;
}
public void move() {
calculateZoom();
calculatePitch();
calculateAngleAroundPlayer();
float horizontalDistance = calculateHorizontalDistance();
float verticalDistance = calculateVerticalDistance();
calculateCameraPosition(horizontalDistance, verticalDistance);
this.yaw = 180 - (player.getRotY() + angleAroundPlayer);
if(Keyboard.isKeyDown(Keyboard.KEY_T)) System.out.println(angleAroundPlayer);
}
public Vector3f getPosition() {
return position;
}
public float getPitch() {
return pitch;
}
public float getYaw() {
return yaw;
}
public float getRoll() {
return roll;
}
private void calculateCameraPosition(float horizDistance, float verticDistance) {
float theta = player.getRotY() + angleAroundPlayer;
float offsetX = (float) (horizDistance * Math.sin(Math.toRadians(theta)));
float offsetZ = (float) (horizDistance * Math.cos(Math.toRadians(theta)));
position.x = player.getPosition().x - offsetX;
position.z = player.getPosition().z - offsetZ;
position.y = player.getPosition().y + Y_CORRECTION_FACTOR + verticDistance;
}
private float calculateHorizontalDistance() {
return (float) (distanceFromPlayer * Math.cos(Math.toRadians(pitch)));
}
private float calculateVerticalDistance() {
return (float) (distanceFromPlayer * Math.sin(Math.toRadians(pitch)));
}
private void calculateZoom() {
float zoomLevel = Mouse.getDWheel() * 0.1f;
distanceFromPlayer -= zoomLevel;
if(distanceFromPlayer < 15) {
distanceFromPlayer = 15;
} else if(distanceFromPlayer > 150) {
distanceFromPlayer = 150;
}
}
private void calculatePitch() {
if(Mouse.isButtonDown(1)) {
float pitchChange = Mouse.getDY() * 0.1f;
pitch -= pitchChange;
if(pitch < 0) {
pitch = 0;
} else if(pitch > 60) {
pitch = 60;
}
}
}
private void calculateAngleAroundPlayer() {
if(Mouse.isButtonDown(1)) {
float angleChanger = Mouse.getDX() * 0.3f;
angleAroundPlayer -= angleChanger;
if(angleAroundPlayer >= 360) {
angleAroundPlayer -= 360;
} else if(angleAroundPlayer <= 0) {
angleAroundPlayer += 360;
}
}
}
}
|
package microTiPi.epifluorescence;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.jtransforms.fft.DoubleFFT_2D;
import org.jtransforms.fft.FloatFFT_2D;
import microTiPi.microUtils.Zernike;
import microTiPi.microscopy.MicroscopeModel;
import mitiv.array.Array3D;
import mitiv.array.Array4D;
import mitiv.array.Double1D;
import mitiv.array.Double2D;
import mitiv.array.Double3D;
import mitiv.array.Double4D;
import mitiv.array.Float2D;
import mitiv.array.Float3D;
import mitiv.array.Float4D;
import mitiv.base.Shape;
import mitiv.linalg.shaped.DoubleShapedVector;
import mitiv.linalg.shaped.DoubleShapedVectorSpace;
import mitiv.linalg.shaped.ShapedVector;
import mitiv.old.MathUtils;
public class WideFieldModel extends MicroscopeModel{
protected double deltaX=0; // position in X of the center of the defocus function inside the pupil
protected double deltaY=0; // position in X of the center of the defocus function inside the pupil
protected int Nzern; // number of Zernike modes
protected boolean radial=false; // when true, the PSF is radially symmetric
protected double lambda; // the emission wavelength in meters
protected double NA; // the numerical aperture
protected double ni; // the refractive index of the immersion medium
protected double lambda_ni; // (ni / \lambda)
protected double radius; // radius of the pupil in meter^-1
protected double pupil_area; // area of the pupil
protected double[] Z; // Zernike polynomials basis
protected boolean[] maskPupil; // position in the where the pupil is non null including vignetting
protected boolean[] mapPupil; // position in the where the pupil is non null
protected double[] rho; // pupil modulus based on Zernike polynomials
protected double[] phi; // pupil phase based on Zernike polynomials
protected double[] psi; // defocus function
protected Array4D cpxPsf; // Fourier transform of the pupil function
protected Shape cpxPsfShape;
protected Shape aShape;
protected Shape psf2DShape;
// protected Object FFT2D;
protected int nModulus;
protected int nDefocus;
protected int nPhase;
private boolean para=true;
public WideFieldModel(Shape psfShape, double NA, double lambda, double ni, double dxy, double dz, boolean radial, boolean single){
this( psfShape,0, 1, NA, lambda, ni, dxy, dz, radial, single) ;
}
public WideFieldModel(Shape psfShape,int nPhase, int nModulus,
double NA, double lambda, double ni, double dxy, double dz, boolean radial, boolean single) {
super(psfShape, dxy, dz, single);
if(Nx != Ny){
throw new IllegalArgumentException("Nx should equal Ny");
}
this.lambda = lambda;
this.ni = ni;
this.Nzern = 4;
this.NA = NA;
this.radius = NA/lambda;
this.lambda_ni = ni/lambda;
this.phi = new double[Ny*Nx];
this.psi = new double[Ny*Nx];
this.radial = radial;
cpxPsfShape = new Shape(2,Nx, Ny, Nz);
aShape = new Shape(2,Nx, Ny);
psf2DShape = new Shape(Nx, Ny);
computeMaskPupil();
this.nModulus = nModulus;
if(this.nModulus<1){
this.nModulus = 1;
}
this.nPhase= nPhase;
setNModulus();
setNPhase();
setDefocus();
}
/**
* Compute the Zernike basis Z.
*/
protected void computeZernike(){
Z = Zernike.zernikeArray(Nzern, Nx, Ny, radius*dxy*Nx, NORMALIZED,radial);
Z = MathUtils.gram_schmidt_orthonormalization(Z, Nx, Ny, Nzern);
}
@Override
public void computePSF(){
if (PState>0)
return;
if(single){
// this.psf = new double[Nz*Ny*Nx];
cpxPsf = Float4D.create( cpxPsfShape);
psf = Float3D.create( psfShape);
final float PSFnorm = (float) (1.0/(Nx*Ny*Nz));
final int Npix = Nx*Ny;
int threads = Runtime.getRuntime().availableProcessors();
ExecutorService service = Executors.newFixedThreadPool(threads);
List<Future<GetPsfParaOut>> futures = new ArrayList<Future<GetPsfParaOut>>();
for ( int iz = 0; iz < Nz; iz++)
{
final int iz1 = iz;
Callable<GetPsfParaOut> callable = new Callable<GetPsfParaOut>() {
@Override
public GetPsfParaOut call() throws Exception {
GetPsfParaOut output = new GetPsfParaOut(Npix,iz1,single);
double defoc_scale;
double phasePupil;
float[] A = new float[2*Npix];
if (iz1 > Nz/2)
{
defoc_scale = DEUXPI*(iz1 - Nz)*dz;
}
else
{
defoc_scale = DEUXPI*iz1*dz;
}
for (int in = 0; in < Npix; in++)
{
phasePupil = phi[in] + defoc_scale*psi[in];
A[2*in] = (float) (rho[in]*Math.cos(phasePupil));
A[2*in + 1] = (float) (rho[in]*Math.sin(phasePupil));
}
/* Fourier transform of the pupil function A(z) */
FloatFFT_2D FFT2D = new FloatFFT_2D(Nx, Ny);
FFT2D.complexForward(A);
for (int in = 0; in < Npix; in++)
{
((float[])output.outA)[2*in] = A[2*in];
((float[])output.outA)[2*in + 1] = -A[2*in + 1]; // store conjugate of A
((float[])output.outPsf)[in] = (A[2*in]*A[2*in] + A[2*in+1]*A[2*in+1])*PSFnorm ;
}
return output;
}
};
futures.add(service.submit(callable));
}
service.shutdown();
for (Future<GetPsfParaOut> future : futures) {
GetPsfParaOut output;
try {
output = future.get();
cpxPsf.slice(output.idxz).assign(Float3D.wrap((float[])output.outA, aShape));
psf.slice(output.idxz).assign(Float2D.wrap((float[])output.outPsf, psf2DShape));
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ExecutionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}else{
if(para){
cpxPsf = Double4D.create( cpxPsfShape);
psf = Double3D.create( psfShape);
final double PSFnorm = 1.0/(Nx*Ny*Nz);
final int Npix = Nx*Ny;
int threads = Runtime.getRuntime().availableProcessors();
ExecutorService service = Executors.newFixedThreadPool(threads);
List<Future<GetPsfParaOut>> futures = new ArrayList<Future<GetPsfParaOut>>();
for ( int iz = 0; iz < Nz; iz++)
{
final int iz1 = iz;
Callable<GetPsfParaOut> callable = new Callable<GetPsfParaOut>() {
@Override
public GetPsfParaOut call() throws Exception {
GetPsfParaOut output = new GetPsfParaOut(Npix,iz1,single);
double defoc_scale;
double phasePupil;
double[] A = new double[2*Npix];
if (iz1 > Nz/2)
{
defoc_scale = DEUXPI*(iz1 - Nz)*dz;
}
else
{
defoc_scale = DEUXPI*iz1*dz;
}
for (int in = 0; in < Npix; in++)
{
phasePupil = phi[in] + defoc_scale*psi[in];
A[2*in] = rho[in]*Math.cos(phasePupil);
A[2*in + 1] = rho[in]*Math.sin(phasePupil);
}
/* Fourier transform of the pupil function A(z) */
DoubleFFT_2D FFT2D = new DoubleFFT_2D(Nx, Ny);
FFT2D.complexForward(A);
for (int in = 0; in < Npix; in++)
{
((double[])output.outA)[2*in] = A[2*in];
((double[])output.outA)[2*in + 1] = -A[2*in + 1]; // store conjugate of A
((double[])output.outPsf)[in] = (A[2*in]*A[2*in] + A[2*in+1]*A[2*in+1])*PSFnorm ;
}
return output;
}
};
futures.add(service.submit(callable));
}
service.shutdown();
for (Future<GetPsfParaOut> future : futures) {
GetPsfParaOut output;
try {
output = future.get();
cpxPsf.slice(output.idxz).assign(Double3D.wrap((double[])output.outA, aShape));
psf.slice(output.idxz).assign(Double2D.wrap((double[])output.outPsf, psf2DShape));
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ExecutionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}else{
cpxPsf = Double4D.create( cpxPsfShape);
psf = Double3D.create( psfShape);
final double PSFnorm = 1.0/(Nx*Ny*Nz);
final int Npix = Nx*Ny;
DoubleFFT_2D FFT2D = new DoubleFFT_2D(Nx, Ny);
for ( int iz = 0; iz < Nz; iz++)
{
double defoc_scale;
double phasePupil;
double[] A = new double[2*Npix];
if (iz > Nz/2)
{
defoc_scale = DEUXPI*(iz - Nz)*dz;
}
else
{
defoc_scale = DEUXPI*iz*dz;
}
for (int in = 0; in < Npix; in++)
{
phasePupil =phi[in] + defoc_scale*psi[in];
A[2*in] = rho[in]*Math.cos(phasePupil);
A[2*in + 1] = rho[in]*Math.sin(phasePupil);
}
/* Fourier transform of the pupil function A(z) */
FFT2D.complexForward(A);
for (int iy = 0; iy < Ny; iy++){
for (int ix = 0; ix < Nx; ix++){
int in = (ix+Nx*iy);
((Double4D) cpxPsf).set(0, ix, iy, iz, A[2*in]);
((Double4D) cpxPsf).set(1, ix, iy, iz, -A[2*in+1]);
((Double3D) psf).set(ix, iy, iz, (A[2*in]*A[2*in] + A[2*in+1]*A[2*in+1])*PSFnorm);
}
}
}
}
}
PState = 1;
}
/**
* Apply the Jacobian matrix to go from the PSF space to modulus coefficients space.
* @param q : the gradient of some criterion in the PSF space
* @return the gradient of this criterion in the modulus coefficients space.
*/
@Override
public DoubleShapedVector apply_J_modulus( ShapedVector q)
{
int Ci;
final int Npix = Nx*Ny;
double defoc_scale = 0.;
final double PSFNorm = 1.0/(Nx*Ny*Nz);
final double NBeta =1./modulus_coefs.norm2();
Double1D JRho = Double1D.create(modulusSpace.getShape());
JRho.fill(0.);
if(single){
if(para){
int threads = Runtime.getRuntime().availableProcessors();
ExecutorService service = Executors.newFixedThreadPool(threads);
List<Future<ApplyJPhaOut>> futures = new ArrayList<Future<ApplyJPhaOut>>();
for ( int iz = 0; iz < Nz; iz++)
{
final Float2D qz = ((Float3D) q.asShapedArray()).slice(iz);
final int iz1 = iz;
Callable<ApplyJPhaOut> callable = new Callable<ApplyJPhaOut>() {
@Override
public ApplyJPhaOut call() throws Exception {
double defoc_scale=0;
float Aq[] = new float[2*Npix];
ApplyJPhaOut pout = new ApplyJPhaOut( modulusSpace.getNumber());
if (iz1 > Nz/2)
{
defoc_scale = DEUXPI*(iz1 - Nz)*dz;
}
else
{
defoc_scale = DEUXPI*iz1*dz;
}
for (int iy = 0; iy < Ny; iy++){
for (int ix = 0; ix < Nx; ix++){
int in = (ix+Nx*iy);
float qin = qz.get(ix, iy);
Aq[2*in]= ((Float4D) cpxPsf).get(0, ix, iy, iz1 )*qin;
Aq[2*in+1]= ((Float4D) cpxPsf).get(1, ix, iy, iz1 )*qin;
}
}
/* Fourier transform of the pupil function A(z) */
FloatFFT_2D FFT2D = new FloatFFT_2D(Nx, Ny);
FFT2D.complexForward(Aq);
for (int j = 0; j < Ny; j++)
{
for (int i = 0; i < Nx; i++)
{
int in = i + j*Nx;
if(maskPupil[in] )
{
double ph = phi[in] + defoc_scale*psi[in];
double jin = rho[in]*(Aq[2*in]*Math.sin(ph) + Aq[2*in + 1]*Math.cos(ph));
for (int k = 0; k < modulusSpace.getNumber(); k++)
{
int Ci= k*Npix + in;
pout.grd[k] += 2*PSFNorm*jin*Z[Ci]*(1 - Math.pow(modulus_coefs.get(k)*NBeta,2))*NBeta;
}
}
}
}
return pout;
}
};
futures.add(service.submit(callable));
}
service.shutdown();
for (Future<ApplyJPhaOut> future : futures) {
ApplyJPhaOut pout;
try {
pout = future.get();
for (int k = 0; k < modulusSpace.getNumber(); k++)
{
JRho.set(k,JRho.get(k)+ pout.grd[k]);
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ExecutionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}else{
double J[] = new double[Ny*Nx];
FloatFFT_2D FFT2D = new FloatFFT_2D(Nx, Ny);
for (int iz = 0; iz < Nz; iz++)
{
float Aq[] = new float[2*Npix];
if (iz > Nz/2)
{
defoc_scale = DEUXPI*(iz - Nz)*dz;
}
else
{
defoc_scale = DEUXPI*iz*dz;
}
for (int iy = 0; iy < Ny; iy++){
for (int ix = 0; ix < Nx; ix++){
int in = (ix+Nx*iy);
float qin = ((Float3D) q.asShapedArray()).get(ix, iy, iz);
Aq[2*in]= ((Float4D) cpxPsf).get(0, ix, iy, iz )*qin;
Aq[2*in+1]= ((Float4D) cpxPsf).get(1, ix, iy, iz )*qin;
}
}
FFT2D.complexForward(Aq);
for (int in = 0; in < Npix; in++)
{
Ci = iz*Npix + in;
double ph = phi[in] + defoc_scale*psi[in];
J[in] = J[in] + Aq[2*in]*Math.cos(ph) - Aq[2*in + 1]*Math.sin(ph);
}
}
for (int k = 0; k < modulusSpace.getNumber(); k++)
{
double tmp = 0;
for (int in = 0; in < Npix; in++)
{
Ci = k*Npix + in;
tmp += J[in]*Z[Ci];
}
JRho.set(k,2*PSFNorm*tmp*(1 - Math.pow(modulus_coefs.get(k)*NBeta,2))*NBeta);
}
}
}else{
if(para){
int threads = Runtime.getRuntime().availableProcessors();
ExecutorService service = Executors.newFixedThreadPool(threads);
List<Future<double[]>> futures = new ArrayList<Future<double[]>>();
for ( int iz = 0; iz < Nz; iz++)
{
final double[] qz = ((Double3D) q.asShapedArray()).slice(iz).flatten(); //FIXME Remove flatten
final double[] Az = ((Double4D) cpxPsf).slice(iz).flatten();
final int iz1 = iz;
Callable<double[]> callable = new Callable<double[]>() {
@Override
public double[] call() throws Exception {
double defoc_scale=0;
double[] Aq = new double[2*Npix];
double[] J = new double[Npix];
if (iz1 > Nz/2)
{
defoc_scale = DEUXPI*(iz1 - Nz)*dz;
}
else
{
defoc_scale = DEUXPI*iz1*dz;
}
for (int iy = 0; iy < Ny; iy++){
for (int ix = 0; ix < Nx; ix++){
int in = (ix+Nx*iy);
double qin = qz[in];
// Aq[2*in]= ((Double4D) cpxPsf).get(0, ix, iy, iz1 )*qin;
// Aq[2*in+1]= ((Double4D) cpxPsf).get(1, ix, iy, iz1 )*qin;
Aq[2*in]= Az[2*in]*qin;
Aq[2*in+1]= Az[2*in +1]*qin;
}
}
/* Fourier transform of the pupil function A(z) */
DoubleFFT_2D FFT2D = new DoubleFFT_2D(Nx, Ny);
FFT2D.complexForward(Aq);
for (int in = 0; in < Npix; in++)
{
double ph = phi[in] + defoc_scale*psi[in];
J[in] = J[in] + Aq[2*in]*Math.cos(ph) - Aq[2*in + 1]*Math.sin(ph);
}
/*
for (int j = 0; j < Ny; j++)
{
for (int i = 0; i < Nx; i++)
{
int in = i + j*Nx;
if(maskPupil[in] )
{
double ph = phi[in] + defoc_scale*psi[in];
double jin = rho[in]*(Aq[2*in]*Math.sin(ph) + Aq[2*in + 1]*Math.cos(ph));
for (int k = 0; k < modulusSpace.getNumber(); k++)
{
int Ci= k*Npix + in;
pout.grd[k] += 2*PSFNorm*jin*Z[Ci]*(1 - Math.pow(modulus_coefs.get(k)*NBeta,2))*NBeta;
}
}
}
}
return pout;
*/
return J;
}
};
futures.add(service.submit(callable));
}
service.shutdown();
/*
for (Future<ApplyJPhaOut> future : futures) {
ApplyJPhaOut pout;
try {
pout = future.get();
for (int k = 0; k < modulusSpace.getNumber(); k++)
{
JRho.set(k,JRho.get(k)+ pout.grd[k]);
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ExecutionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}*/
for (Future<double[]> future : futures) {
double[] jt;
try {
jt = future.get();
for (int k = 0; k < modulusSpace.getNumber(); k++)
{
double tmp = 0;
for (int in = 0; in < Npix; in++)
{
Ci = k*Npix + in;
tmp += jt[in]*Z[Ci];
}
JRho.set(k,2*PSFNorm*tmp*(1 - Math.pow(modulus_coefs.get(k)*NBeta,2))*NBeta);
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ExecutionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}else{
// DoubleFFT_2D FFT2D = new DoubleFFT_2D(Ny, Nx);
double J[] = new double[Ny*Nx];
double Aq[] = new double[2*Npix];
DoubleFFT_2D FFT2D = new DoubleFFT_2D(Nx, Ny);
for (int iz = 0; iz < Nz; iz++)
{
if (iz > Nz/2)
{
defoc_scale = DEUXPI*(iz - Nz)*dz;
}
else
{
defoc_scale = DEUXPI*iz*dz;
}
for (int iy = 0; iy < Ny; iy++){
for (int ix = 0; ix < Nx; ix++){
int in = (ix+Nx*iy);
double qin = ((Double3D) q.asShapedArray()).get(ix, iy, iz);
Aq[2*in]= ((Double4D) cpxPsf).get(0, ix, iy, iz )*qin;
Aq[2*in+1]= ((Double4D) cpxPsf).get(1, ix, iy, iz )*qin;
}
}
FFT2D.complexForward(Aq);
for (int in = 0; in < Npix; in++)
{
Ci = iz*Npix + in;
double ph = phi[in] + defoc_scale*psi[in];
J[in] = J[in] + Aq[2*in]*Math.cos(ph) - Aq[2*in + 1]*Math.sin(ph);
}
}
for (int k = 0; k < modulusSpace.getNumber(); k++)
{
double tmp = 0;
for (int in = 0; in < Npix; in++)
{
Ci = k*Npix + in;
tmp += J[in]*Z[Ci];
}
JRho.set(k,2*PSFNorm*tmp*(1 - Math.pow(modulus_coefs.get(k)*NBeta,2))*NBeta);
}
}
}
return modulusSpace.create(JRho);
}
/**
* Apply the Jacobian matrix to go from the PSF space to phase coefficients space.
* @param q : the gradient of some criterion in the PSF space
* @return the gradient of this criterion in the phase coefficients space.
*/
@Override
public DoubleShapedVector apply_J_phi(ShapedVector q)
{
int Ci;
final int Npix = Nx*Ny;
final double PSFNorm = 1.0/(Nx*Ny*Nz);
Double1D JPhi = Double1D.create(phaseSpace.getShape());
JPhi.fill(0.);
if(single){
if(para){
int threads = Runtime.getRuntime().availableProcessors();
ExecutorService service = Executors.newFixedThreadPool(threads);
List<Future<ApplyJPhaOut>> futures = new ArrayList<Future<ApplyJPhaOut>>();
for ( int iz = 0; iz < Nz; iz++)
{
final Float2D qz = ((Float3D) q.asShapedArray()).slice(iz);
final int iz1 = iz;
Callable<ApplyJPhaOut> callable = new Callable<ApplyJPhaOut>() {
@Override
public ApplyJPhaOut call() throws Exception {
double defoc_scale=0;
float Aq[] = new float[2*Npix];
ApplyJPhaOut pout = new ApplyJPhaOut( phaseSpace.getNumber());
if (iz1 > Nz/2)
{
defoc_scale = DEUXPI*(iz1 - Nz)*dz;
}
else
{
defoc_scale = DEUXPI*iz1*dz;
}
for (int iy = 0; iy < Ny; iy++){
for (int ix = 0; ix < Nx; ix++){
int in = (ix+Nx*iy);
float qin = qz.get(ix, iy);
Aq[2*in]= ((Float4D) cpxPsf).get(0, ix, iy, iz1 )*qin;
Aq[2*in+1]= ((Float4D) cpxPsf).get(1, ix, iy, iz1 )*qin;
}
}
/* Fourier transform of the pupil function A(z) */
FloatFFT_2D FFT2D = new FloatFFT_2D(Nx, Ny);
FFT2D.complexForward(Aq);
for (int j = 0; j < Ny; j++)
{
for (int i = 0; i < Nx; i++)
{
int in = i + j*Nx;
if(maskPupil[in] )
{
double ph = phi[in] + defoc_scale*psi[in];
double jin = rho[in]*(Aq[2*in]*Math.sin(ph) + Aq[2*in + 1]*Math.cos(ph));
for (int k = 0; k < phaseSpace.getNumber(); k++)
{
int Ci;
if(radial){
Ci= (k+1)*Npix + in;
}else{
Ci= (k+3)*Npix + in;
}
pout.grd[k] -= 2*PSFNorm*jin*Z[Ci];
}
}
}
}
return pout;
}
};
futures.add(service.submit(callable));
}
service.shutdown();
for (Future<ApplyJPhaOut> future : futures) {
ApplyJPhaOut pout;
try {
pout = future.get();
for (int k = 0; k < phaseSpace.getNumber(); k++)
{
JPhi.set(k,JPhi.get(k)+ pout.grd[k]);
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ExecutionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}else{
double J[] = new double[Ny*Nx];
float[] Aq = new float[2*Npix];
FloatFFT_2D FFT2D = new FloatFFT_2D(Nx, Ny);
for (int iz = 0; iz < Nz; iz++)
{
double defoc_scale=0.;
if (iz > Nz/2)
{
defoc_scale = DEUXPI*(iz - Nz)*dz;
}
else
{
defoc_scale = DEUXPI*iz*dz;
}
for (int iy = 0; iy < Ny; iy++){
for (int ix = 0; ix < Nx; ix++){
int in = (ix+Nx*iy);
float qin = ((Float3D) q.asShapedArray()).get(ix, iy, iz);
Aq[2*in]= ((Float4D) cpxPsf).get(0, ix, iy, iz )*qin;
Aq[2*in+1]= ((Float4D) cpxPsf).get(1, ix, iy, iz )*qin;
}
}
FFT2D.complexForward(Aq);
for (int in = 0; in < Npix; in++)
{
Ci = iz*Npix + in;
double ph = phi[in] + defoc_scale*psi[in];
J[in] = J[in] + rho[in]*(Aq[2*in]*Math.sin(ph) + Aq[2*in + 1]*Math.cos(ph));
}
}
for (int k = 0; k < phaseSpace.getNumber(); k++)
{
double tmp = 0;
for (int in = 0; in < Npix; in++)
{
Ci = k*Npix + in;
if(radial){
tmp += J[in]*Z[Ci + 1*Npix];
}else{
tmp += J[in]*Z[Ci + 3*Npix];
}
}
JPhi.set(k, -2*PSFNorm*tmp);
}
}
}else{
if(para){
int threads = Runtime.getRuntime().availableProcessors();
ExecutorService service = Executors.newFixedThreadPool(threads);
List<Future<ApplyJPhaOut>> futures = new ArrayList<Future<ApplyJPhaOut>>();
for ( int iz = 0; iz < Nz; iz++)
{
final Double2D qz = ((Double3D) q.asShapedArray()).slice(iz);
final int iz1 = iz;
Callable<ApplyJPhaOut> callable = new Callable<ApplyJPhaOut>() {
@Override
public ApplyJPhaOut call() throws Exception {
double defoc_scale=0;
double Aq[] = new double[2*Npix];
ApplyJPhaOut pout = new ApplyJPhaOut( phaseSpace.getNumber());
if (iz1 > Nz/2)
{
defoc_scale = DEUXPI*(iz1 - Nz)*dz;
}
else
{
defoc_scale = DEUXPI*iz1*dz;
}
for (int iy = 0; iy < Ny; iy++){
for (int ix = 0; ix < Nx; ix++){
int in = (ix+Nx*iy);
double qin = qz.get(ix, iy);
Aq[2*in]= ((Double4D) cpxPsf).get(0, ix, iy, iz1 )*qin;
Aq[2*in+1]= ((Double4D) cpxPsf).get(1, ix, iy, iz1 )*qin;
}
}
/* Fourier transform of the pupil function A(z) */
DoubleFFT_2D FFT2D = new DoubleFFT_2D(Nx, Ny);
FFT2D.complexForward(Aq);
for (int j = 0; j < Ny; j++)
{
for (int i = 0; i < Nx; i++)
{
int in = i + j*Nx;
if(maskPupil[in] )
{
double ph = phi[in] + defoc_scale*psi[in];
double jin = rho[in]*(Aq[2*in]*Math.sin(ph) + Aq[2*in + 1]*Math.cos(ph));
for (int k = 0; k < phaseSpace.getNumber(); k++)
{
int Ci;
if(radial){
Ci= (k+1)*Npix + in;
}else{
Ci= (k+3)*Npix + in;
}
pout.grd[k] -= 2*PSFNorm*jin*Z[Ci];
}
}
}
}
return pout;
}
};
futures.add(service.submit(callable));
}
service.shutdown();
for (Future<ApplyJPhaOut> future : futures) {
ApplyJPhaOut pout;
try {
pout = future.get();
for (int k = 0; k < phaseSpace.getNumber(); k++)
{
JPhi.set(k,JPhi.get(k)+ pout.grd[k]);
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ExecutionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}else{
double J[] = new double[Ny*Nx];
double[] Aq = new double[2*Npix];
DoubleFFT_2D FFT2D = new DoubleFFT_2D(Ny, Nx);
for (int iz = 0; iz < Nz; iz++)
{
double defoc_scale=0.;
if (iz > Nz/2)
{
defoc_scale = DEUXPI*(iz - Nz)*dz;
}
else
{
defoc_scale = DEUXPI*iz*dz;
}
for (int iy = 0; iy < Ny; iy++){
for (int ix = 0; ix < Nx; ix++){
int in = (ix+Nx*iy);
double qin = ((Double3D) q.asShapedArray()).get(ix, iy, iz);
Aq[2*in]= ((Double4D) cpxPsf).get(0, ix, iy, iz )*qin;
Aq[2*in+1]= ((Double4D) cpxPsf).get(1, ix, iy, iz )*qin;
}
}
FFT2D.complexForward(Aq);
for (int in = 0; in < Npix; in++)
{
Ci = iz*Npix + in;
double ph = phi[in] + defoc_scale*psi[in];
J[in] = J[in] + rho[in]*(Aq[2*in]*Math.sin(ph) + Aq[2*in + 1]*Math.cos(ph));
}
}
for (int k = 0; k < phaseSpace.getNumber(); k++)
{
double tmp = 0;
for (int in = 0; in < Npix; in++)
{
Ci = k*Npix + in;
if(radial){
tmp += J[in]*Z[Ci + 1*Npix];
}else{
tmp += J[in]*Z[Ci + 3*Npix];
}
}
JPhi.set(k, -2*PSFNorm*tmp);
}
}
}
return phaseSpace.create(JPhi );
}
/**
* Apply the Jacobian matrix to go from the PSF space to defocus coefficients space.
* @param q : the gradient of some criterion in the PSF space
* @return the gradient of this criterion in the defocus coefficients space.
*/
@Override
public DoubleShapedVector apply_J_defocus(ShapedVector q)
{
double scale_x = 1/(Nx*dxy);
double scale_y = 1/(Ny*dxy);
double d0 = 0, d1 = 0, d2 = 0;
final double[] rx = new double[Nx];
final double[] ry = new double[Ny];
final int Npix = Nx*Ny;
final double PSFNorm = 1.0/(Nx*Ny*Nz);
double[] grd = new double[defocusSpace.getNumber()];
for(int nx = 0; nx < Nx; nx++)
{
if(nx > Nx/2)
{
rx[nx] = (nx - Nx)*scale_x - deltaX;
}
else
{
rx[nx] = nx*scale_x - deltaX;
}
}
for(int ny = 0; ny < Ny; ny++)
{
if(ny > Ny/2)
{
ry[ny] = (ny - Ny)*scale_y - deltaY;
}else
{
ry[ny] = ny*scale_y - deltaY;
}
}
if(single){
if(para){
int threads = Runtime.getRuntime().availableProcessors();
ExecutorService service = Executors.newFixedThreadPool(threads);
List<Future<ApplyJDefOut>> futures = new ArrayList<Future<ApplyJDefOut>>();
for ( int iz = 0; iz < Nz; iz++)
{
final Float2D qz = ((Float3D) q.asShapedArray()).slice(iz);
final int iz1 = iz;
Callable<ApplyJDefOut> callable = new Callable<ApplyJDefOut>() {
@Override
public ApplyJDefOut call() throws Exception {
double defoc_scale=0;
float Aq[] = new float[2*Npix];
double defoc;
ApplyJDefOut dout = new ApplyJDefOut(0,0,0);
if (iz1 > Nz/2)
{
defoc = (iz1 - Nz)*dz;
defoc_scale = DEUXPI*(iz1 - Nz)*dz;
}
else
{
defoc = iz1*dz;
defoc_scale = DEUXPI*iz1*dz;
}
for (int iy = 0; iy < Ny; iy++){
for (int ix = 0; ix < Nx; ix++){
int in = (ix+Nx*iy);
float qin = qz.get(ix, iy);
Aq[2*in]= ((Float4D) cpxPsf).get(0, ix, iy, iz1 )*qin;
Aq[2*in+1]= ((Float4D) cpxPsf).get(1, ix, iy, iz1 )*qin;
}
}
/* Fourier transform of the pupil function A(z) */
FloatFFT_2D FFT2D = new FloatFFT_2D(Nx, Ny);
FFT2D.complexForward(Aq);
for (int j = 0; j < Ny; j++)
{
for (int i = 0; i < Nx; i++)
{
int in = i + j*Nx;
if(maskPupil[in] )
{
double idef= 1./psi[in];
double ph = phi[in] + defoc_scale*psi[in];
double tmpvar = -DEUXPI*rho[in]*( Aq[2*in]*Math.sin(ph) + Aq[2*in + 1]*Math.cos(ph) )*PSFNorm;
{
dout.d1 -= tmpvar*( rx[i]*(defoc*idef ));
dout.d2 -= tmpvar*( ry[j]*(defoc*idef) );
dout.d0 += tmpvar*( idef*lambda_ni*defoc );
}
}
}
}
return dout;
}
};
futures.add(service.submit(callable));
}
service.shutdown();
for (Future<ApplyJDefOut> future : futures) {
ApplyJDefOut output;
try {
output = future.get();
d0 += output.d0;
d1 -= output.d1;
d2 -= output.d2;
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ExecutionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}else{
FloatFFT_2D FFT2D = new FloatFFT_2D(Nx, Ny);
double defoc, idef, tmpvar;
float Aq[] = new float[2*Npix];
for (int iz = 0; iz < Nz; iz++)
{
double defoc_scale =0.;
if (iz > Nz/2)
{
defoc = (iz - Nz)*dz;
defoc_scale = DEUXPI*defoc;
}
else
{
defoc = iz*dz;
defoc_scale = DEUXPI*defoc;
}
for (int iy = 0; iy < Ny; iy++){
for (int ix = 0; ix < Nx; ix++){
int in = (ix+Nx*iy);
float qin = ((Float3D) q.asShapedArray()).get(ix, iy, iz);
Aq[2*in]= ((Float4D) cpxPsf).get(0, ix, iy, iz )*qin;
Aq[2*in+1]= ((Float4D) cpxPsf).get(1, ix, iy, iz )*qin;
}
}
FFT2D.complexForward(Aq);
for (int j = 0; j < Ny; j++)
{
for (int i = 0; i < Nx; i++)
{
int in = i + j*Nx;
if(maskPupil[in] )
{
idef= 1./psi[in];
double ph = phi[in] + defoc_scale*psi[in];
tmpvar = -DEUXPI*rho[in]*( Aq[2*in]*Math.sin(ph) + Aq[2*in + 1]*Math.cos(ph) )*PSFNorm;
{
d1 -= tmpvar*( rx[i]*(defoc*idef ));
d2 -= tmpvar*( ry[j]*(defoc*idef) );
d0 += tmpvar*( idef*lambda_ni*defoc );
}
}
}
}
}
}
}else{
if(para){
int threads = Runtime.getRuntime().availableProcessors();
ExecutorService service = Executors.newFixedThreadPool(threads);
List<Future<double[]>> futures = new ArrayList<Future<double[]>>();
for ( int iz = 0; iz < Nz; iz++)
{
final double[] qz = ((Double3D) q.asShapedArray()).slice(iz).flatten(); // FIXME remove flatten
final double[] Az = ((Double4D) cpxPsf).slice(iz).flatten();
final int iz1 = iz;
Callable<double[]> callable = new Callable<double[]>() {
@Override
public double[] call() throws Exception {
double defoc_scale=0;
double Aq[] = new double[2*Npix];
double defoc;
// ApplyJDefOut dout = new ApplyJDefOut(0,0,0);
double[] dout = new double[3];
if (iz1 > Nz/2)
{
defoc = (iz1 - Nz)*dz;
defoc_scale = DEUXPI*(iz1 - Nz)*dz;
}
else
{
defoc = iz1*dz;
defoc_scale = DEUXPI*iz1*dz;
}
for (int iy = 0; iy < Ny; iy++){
for (int ix = 0; ix < Nx; ix++){
int in = (ix+Nx*iy);
// double qin = qz.get(ix, iy);
// Aq[2*in]= ((Double4D) cpxPsf).get(0, ix, iy, iz1 )*qin;
// Aq[2*in+1]= ((Double4D) cpxPsf).get(1, ix, iy, iz1 )*qin;
Aq[2*in]= Az[2*in]*qz[in];
Aq[2*in+1]= Az[2*in+1]*qz[in];
}
}
/* Fourier transform of the pupil function A(z) */
DoubleFFT_2D FFT2D = new DoubleFFT_2D(Nx, Ny);
FFT2D.complexForward(Aq);
for (int j = 0; j < Ny; j++)
{
for (int i = 0; i < Nx; i++)
{
int in = i + j*Nx;
if(maskPupil[in] )
{
double idef= 1./psi[in];
double ph = phi[in] + defoc_scale*psi[in];
double tmpvar = -DEUXPI*rho[in]*( Aq[2*in]*Math.sin(ph) + Aq[2*in + 1]*Math.cos(ph) )*PSFNorm;
{
/* dout.d1 -= tmpvar*( rx[i]*(defoc*idef ));
dout.d2 -= tmpvar*( ry[j]*(defoc*idef) );
dout.d0 += tmpvar*( idef*lambda_ni*defoc );*/
dout[1] -= tmpvar*( rx[i]*(defoc*idef ));
dout[2] -= tmpvar*( ry[j]*(defoc*idef) );
dout[0] += tmpvar*( idef*lambda_ni*defoc );
}
}
}
}
return dout;
}
};
futures.add(service.submit(callable));
}
service.shutdown();
for (Future<double[]> future : futures) {
double[] output;
try {
output = future.get();
d0 += output[0];
d1 -= output[1];
d2 -= output[2];
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ExecutionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}else{
DoubleFFT_2D FFT2D = new DoubleFFT_2D(Nx, Ny);
for ( int iz = 0; iz < Nz; iz++)
{
final double[] qz = ((Double3D) q.asShapedArray()).slice(iz).flatten(); // FIXME remove flatten
final double[] Az = ((Double4D) cpxPsf).slice(iz).flatten();
// final Double2D qz = ((Double3D) q.asShapedArray()).slice(iz);
final int iz1 = iz;
double defoc_scale=0;
double Aq[] = new double[2*Npix];
double defoc;
// ApplyJDefOut dout = new ApplyJDefOut(0,0,0);
// double[] dout = new double[3];
if (iz1 > Nz/2)
{
defoc = (iz1 - Nz)*dz;
defoc_scale = DEUXPI*(iz1 - Nz)*dz;
}
else
{
defoc = iz1*dz;
defoc_scale = DEUXPI*iz1*dz;
}
for (int iy = 0; iy < Ny; iy++){
for (int ix = 0; ix < Nx; ix++){
int in = (ix+Nx*iy);
// double qin = qz.get(ix, iy);
// Aq[2*in]= ((Double4D) cpxPsf).get(0, ix, iy, iz1 )*qin;
// Aq[2*in+1]= ((Double4D) cpxPsf).get(1, ix, iy, iz1 )*qin;
Aq[2*in]= Az[2*in]*qz[in];
Aq[2*in+1]= Az[2*in+1]*qz[in];
}
}
/* Fourier transform of the pupil function A(z) */
FFT2D.complexForward(Aq);
for (int j = 0; j < Ny; j++)
{
for (int i = 0; i < Nx; i++)
{
int in = i + j*Nx;
if(maskPupil[in] )
{
double idef= 1./psi[in];
double ph = phi[in] + defoc_scale*psi[in];
double tmpvar = -DEUXPI*rho[in]*( Aq[2*in]*Math.sin(ph) + Aq[2*in + 1]*Math.cos(ph) )*PSFNorm;
{
/* dout.d1 -= tmpvar*( rx[i]*(defoc*idef ));
dout.d2 -= tmpvar*( ry[j]*(defoc*idef) );
dout.d0 += tmpvar*( idef*lambda_ni*defoc );*/
d1 -= tmpvar*( rx[i]*(defoc*idef ));
d2 -= tmpvar*( ry[j]*(defoc*idef) );
d0 += tmpvar*( idef*lambda_ni*defoc );
}
}
}
}
}
}
}
switch(defocusSpace.getNumber())
{
case 3:
grd[2] = d2;
grd[1] = d1;
case 1:
grd[0] = d0;
break;
case 2:
grd[2] = d2;
grd[1] = d1;
break;
}
return defocusSpace.create(Double1D.wrap(grd, defocusSpace.getShape()));
}
/** Determine the map where the pupil in non null. It sets maskPupil and its area pupil_area
*/
private void computeMaskPupil()
{
maskPupil = new boolean[Nx*Ny];
mapPupil = new boolean[Nx*Ny];
double scale_y = Math.pow(1/dxy/Ny, 2);
double scale_x = Math.pow(1/dxy/Nx, 2);
double rx, ry, ix, iy;
double radius2 = radius*radius;
pupil_area =0.;
for(int ny = 0; ny < Ny; ny++)
{
iy = Math.min(ny, Ny - ny);
ry = iy*iy*scale_y;
for(int nx = 0; nx < Nx; nx++)
{
ix = Math.min(nx, Nx - nx);
rx = ix*ix*scale_x;
if( (rx + ry) < radius2 )
{
maskPupil[nx + ny*Nx] = true;
mapPupil[nx + ny*Nx] = true;
pupil_area += 1;
}else{
maskPupil[nx + ny*Nx] = false;
mapPupil[nx + ny*Nx] = false;
}
}
}
pupil_area = Math.sqrt(pupil_area);
freePSF();
}
private class ApplyJDefOut {
double d0;
double d1;
double d2;
public ApplyJDefOut(double d0_,double d1_, double d2_){
d0 = d0_;
d1 = d1_;
d2 = d2_;
}
}
private class ApplyJPhaOut {
double[] grd;
public ApplyJPhaOut( int nMode){
grd = new double[nMode];
Arrays.fill(grd, 0.);
}
}
private class GetPsfParaOut{
Object outA;
Object outPsf;
int idxz;
public GetPsfParaOut(int nPix, int iz, boolean single){
idxz = iz;
if(single){
outA = new float[2*nPix];
outPsf = new float[2*nPix];
}else{
outA = new double[2*nPix];
outPsf = new double[2*nPix];
}
}
}
public void computeDefocus()
{
double lambda_ni2 = lambda_ni*lambda_ni;
double scale_x = 1/(Nx*dxy);
double scale_y = 1/(Ny*dxy);
double q, rx, ry;
for (int ny = 0; ny < Ny; ny++)
{
if(ny > Ny/2)
{
ry = Math.pow(scale_y*(ny - Ny) - deltaY, 2);
}
else
{
ry = Math.pow(scale_y*ny - deltaY, 2);
}
for (int nx = 0; nx < Nx; nx++)
{
int nxy = nx + ny*Nx;
if (mapPupil[nxy] )
{
if(nx > Nx/2)
{
rx = Math.pow(scale_x*(nx - Nx) - deltaX, 2);
}
else
{
rx = Math.pow(scale_x*nx - deltaX, 2);
}
q = lambda_ni2 - rx - ry;
if (q < 0.0)
{
psi[nxy] = 0;
maskPupil[nxy] = false;
}
else
{
psi[nxy] = Math.sqrt(q);
maskPupil[nxy] = true;
}
}
}
}
// freePSF();
}
/**
* @param defocus Update the defocus and the depth functions according the parameters
* defocus. Depending on the number of elements of defocus:
* 3 : defocus = {n_i / \lambda, \delta_x, \delta_y}
* 2 : defocus = { \delta_x, \delta_y}
* 1 : defocus = {n_i / \lambda}
*/
@Override
public void setDefocus(DoubleShapedVector defoc) {
if(defoc.belongsTo(defocusSpace)){
defocus_coefs = defoc;
}else{
throw new IllegalArgumentException("defocus does not belong to the defocusSpace");
}
switch (defoc.getNumber())
{
case 3:
deltaX = defoc.get(1);
deltaY = defoc.get(2);
case 1:
lambda_ni = defoc.get(0);
break;
case 2:
deltaX = defoc.get(1);
deltaY = defoc.get(2);
break;
default:
throw new IllegalArgumentException("bad defocus parameters");
}
computeDefocus();
freePSF();
}
public void setDefocus(double[] defoc) {
if (defocusSpace==null){
defocusSpace = new DoubleShapedVectorSpace(3);
}
defocus_coefs = defocusSpace.wrap(defoc);
setDefocus(defocus_coefs);
}
public void setDefocus() {
setDefocus(new double[] {ni/lambda, deltaX, deltaY});
}
@Override
public void setModulus(DoubleShapedVector beta) {
if(beta.belongsTo(modulusSpace)){
modulus_coefs = beta;
}else{
throw new IllegalArgumentException("DoubleShapedVector beta does not belong to the modulus space");
}
int Npix = Nx*Ny;
rho = new double[Npix];
double betaNorm = 1./( beta.norm2());
for(int in = 0; in < Npix; in++)
{
if (maskPupil[in] )
{
for (int n = 0; n < beta.getNumber(); n++)
{
rho[in] += Z[in + n*Npix]*beta.get(n)*betaNorm;
}
}
}
freePSF();
}
public void setModulus(double[] beta) {
setNModulus(beta.length);
modulus_coefs = modulusSpace.wrap(beta);
setModulus(modulus_coefs);
}
@Override
public void setPhase(DoubleShapedVector phase) {
if(phase.belongsTo(phaseSpace)){
phase_coefs = phase;
}else{
throw new IllegalArgumentException("phase parameter does not belong to the right space ");
}
int Npix = Nx*Ny;
phi = new double[Npix];
for(int in = 0; in < Npix; in++)
{
if (maskPupil[in] )
{
for (int n = 0; n < phase.getNumber(); ++n)
{
if(radial){
phi[in] += Z[in + (n + 1)*Npix]*phase.get(n);
}else{
phi[in] += Z[in + (n + 3)*Npix]*phase.get(n);
}
}
}
}
freePSF();
}
public void setPhase(double[] alpha) {
if((alpha==null)||(alpha.length==0)){
nPhase=0;
}
else{
setNPhase(alpha.length);
phase_coefs = phaseSpace.wrap(alpha);
setPhase(phase_coefs) ;
}
}
/**
* @return the modulus of the pupil
*/
public double[] getRho() {
if (PState<1){
computePSF();
}
return rho;
}
/**
* @return the wavelength used in the computation
*/
public double getLambda() {
return lambda;
}
/**
* @return the refractive index of the immersion medium used in the computation
*/
public double getNi() {
return ni;
}
/**
* @return the phase of the pupil
*/
public double[] getPhi(){
if (PState<1){
computePSF();
}
return phi;
}
/**
* @return the defocus function
*/
public double[] getPsi() {
if (PState<1){
computePSF();
}
return psi;
}
/**
* @return modulus coefficients
*/
public DoubleShapedVector getBeta() {
return modulus_coefs;
}
/**
* @return phase coefficients
*/
public DoubleShapedVector getAlpha() {
return phase_coefs;
}
/**
* @return defocus coefficients in 1./wavelength
*/
public double[] getDefocusMultiplyByLambda() {
if (PState<1){
computePSF();
}
double[] defocus = {lambda_ni*lambda, deltaX*lambda, deltaY*lambda};
return defocus;
}
/**
* @return defocus coefficients
*/
public double[] getDefocus() {
if (PState<1){
computePSF();
}
double[] defocus = {lambda_ni, deltaX, deltaY};
return defocus;
}
/**
* @return the pupil mask
*/
public boolean[] getMaskPupil() {
if (PState<1){
computePSF();
}
return maskPupil;
}
/**
* @return the PSF
*/
@Override
public Array3D getPSF() {
if (PState<1){
computePSF();
}
return psf;
}
/**
* @return the Zernike basis
*/
public double[] getZernike() {
return Z;
}
/**
* @return the number of zernike polynomial used in the Zernike basis
*/
public int getNZern() {
return Nzern;
}
/**
* @param k
* @return the k-th zernike of the basis
*/
public double[] getZernike(int k) {
return MathUtils.getArray(Z, Nx, Ny, k);
}
/**
* @return the complex PSF
*/
public Array4D get_cpxPsf() {
if (PState<1){
computePSF();
}
return cpxPsf;
}
/**
* Plot some information about the WideFieldModel object for debugging purpose
*/
public void getInfo()
{
System.out.println("
MathUtils.stat(psf.toDouble().getData());
System.out.println();
System.out.println("
MathUtils.stat(phi);
System.out.println();
System.out.println("
MathUtils.stat(rho);
System.out.println();
System.out.println("
MathUtils.stat(psi);
System.out.println();
System.out.println("
MathUtils.statC(cpxPsf.toDouble().getData());
System.out.println();
System.out.println("
MathUtils.stat(Z);
}
public void setNPhase() {
if(nPhase>0){
phaseSpace = new DoubleShapedVectorSpace(nPhase);
if(radial){
Nzern = Math.max(nPhase+1, modulusSpace.getNumber());
}else{
Nzern = Math.max(nPhase+3, modulusSpace.getNumber());
}
computeZernike();
phase_coefs = phaseSpace.create(0.);
setPhase(phase_coefs);
}else{
phaseSpace = null;
}
}
public void setNPhase(int nPh ) {
nPhase = nPh;
if(nPhase>0){
phaseSpace = new DoubleShapedVectorSpace(nPhase);
if(radial){
Nzern = Math.max(nPhase+1, modulusSpace.getNumber());
}else{
Nzern = Math.max(nPhase+3, modulusSpace.getNumber());
}
computeZernike();
phase_coefs = phaseSpace.create(0.);
setPhase(phase_coefs);
}else{
phaseSpace = null;
}
}
public void setNModulus(int nMod) {
nModulus = nMod ;
if(nModulus<1){
nModulus = 1;
}
modulusSpace = new DoubleShapedVectorSpace(nModulus);
if (phaseSpace==null){
Nzern = nModulus;
}else{
if(radial){
Nzern = Math.max(phaseSpace.getNumber()+1, nModulus);
}else{
Nzern = Math.max(phaseSpace.getNumber()+3, nModulus);
}
}
computeZernike();
modulus_coefs = modulusSpace.create(0.);
modulus_coefs.set(0, 1.);
setModulus(modulus_coefs);
}
public void setNModulus() {
if(nModulus<1){
nModulus = 1;
}
modulusSpace = new DoubleShapedVectorSpace(nModulus);
if (phaseSpace==null){
Nzern = nModulus;
}else{
if(radial){
Nzern = Math.max(phaseSpace.getNumber()+1, nModulus);
}else{
Nzern = Math.max(phaseSpace.getNumber()+3, nModulus);
}
}
computeZernike();
modulus_coefs = modulusSpace.create(0.);
modulus_coefs.set(0, 1.);
setModulus(modulus_coefs);
}
/**
* reset PSF and its complex a to free some memory
* Set the flag PState to 0
*/
@Override
public void freePSF() {
PState =0;
cpxPsf = null;
psf = null;
}
public int getNModulus() {
return modulus_coefs.getNumber();
}
public int getNPhase() {
return phase_coefs.getNumber();
}
}
|
package org.xbill.DNS;
import java.io.*;
import java.util.*;
import org.xbill.DNS.utils.*;
/**
* A representation of a domain name.
*
* @author Brian Wellington
*/
public class Name {
private static final int LABEL_NORMAL = 0;
private static final int LABEL_COMPRESSION = 0xC0;
private static final int LABEL_EXTENDED = 0x40;
private static final int LABEL_MASK = 0xC0;
private static final int EXT_LABEL_COMPRESSION = 0;
private static final int EXT_LABEL_BITSTRING = 1;
private Object [] name;
private byte labels;
private boolean qualified;
/** The root name */
public static Name root = new Name(".");
/** The maximum number of labels in a Name */
static final int MAXLABELS = 256;
private
Name() {
}
/**
* Create a new name from a string and an origin
* @param s The string to be converted
* @param origin If the name is unqalified, the origin to be appended
*/
public
Name(String s, Name origin) {
labels = 0;
name = new Object[MAXLABELS];
if (s.equals("@") && origin != null) {
append(origin);
qualified = true;
return;
}
try {
MyStringTokenizer st = new MyStringTokenizer(s, ".");
while (st.hasMoreTokens()) {
String token = st.nextToken();
if (token.charAt(0) == '[')
name[labels++] = new BitString(token);
else
name[labels++] = token.getBytes();
}
if (st.hasMoreDelimiters())
qualified = true;
else {
if (origin != null) {
append(origin);
qualified = true;
}
else {
/* This isn't exactly right, but it's close.
* Partially qualified names are evil.
*/
if (Options.check("pqdn"))
qualified = false;
else
qualified = (labels > 1);
}
}
}
catch (Exception e) {
StringBuffer sb = new StringBuffer();
sb.append(s);
if (origin != null) {
sb.append(".");
sb.append(origin);
}
if (e instanceof ArrayIndexOutOfBoundsException)
sb.append(" has too many labels");
else if (e instanceof IOException)
sb.append(" contains an invalid binary label");
else
sb.append(" is invalid");
System.err.println(sb.toString());
name = null;
labels = 0;
}
}
/**
* Create a new name from a string
* @param s The string to be converted
*/
public
Name(String s) {
this (s, null);
}
/**
* Create a new name from DNS wire format
* @param in A stream containing the input data
* @param c The compression context. This should be null unless a full
* message is being parsed.
*/
public
Name(DataByteInputStream in, Compression c) throws IOException {
int len, start, pos, count = 0;
Name name2;
labels = 0;
name = new Object[MAXLABELS];
start = in.getPos();
loop:
while ((len = in.readUnsignedByte()) != 0) {
switch(len & LABEL_MASK) {
case LABEL_NORMAL:
byte [] b = new byte[len];
in.read(b);
name[labels++] = b;
count++;
break;
case LABEL_COMPRESSION:
pos = in.readUnsignedByte();
pos += ((len & ~LABEL_MASK) << 8);
name2 = (c == null) ? null : c.get(pos);
if (Options.check("verbosecompression"))
System.err.println("Looking at " + pos +
", found " + name2);
if (name2 == null)
throw new WireParseException("bad compression");
else {
System.arraycopy(name2.name, 0, name, labels,
name2.labels);
labels += name2.labels;
}
break loop;
case LABEL_EXTENDED:
int type = len & ~LABEL_MASK;
switch (type) {
case EXT_LABEL_COMPRESSION:
pos = in.readUnsignedShort();
name2 = (c == null) ? null : c.get(pos);
if (Options.check("verbosecompression"))
System.err.println("Looking at " +
pos + ", found " +
name2);
if (name2 == null)
throw new WireParseException(
"bad compression");
else {
System.arraycopy(name2.name, 0, name,
labels, name2.labels);
labels += name2.labels;
}
break loop;
case EXT_LABEL_BITSTRING:
int bits = in.readUnsignedByte();
if (bits == 0)
bits = 256;
int bytes = (bits + 7) / 8;
byte [] data = new byte[bytes];
in.read(data);
name[labels++] = new BitString(bits, data);
count++;
break;
default:
throw new WireParseException(
"Unknown name format");
} /* switch */
break;
} /* switch */
}
if (c != null) {
pos = start;
if (Options.check("verbosecompression"))
System.out.println("name = " + this +
", count = " + count);
for (int i = 0; i < count; i++) {
Name tname = new Name(this, i);
c.add(pos, tname);
if (Options.check("verbosecompression"))
System.err.println("Adding " + tname +
" at " + pos);
if (name[i] instanceof BitString)
pos += (((BitString)name[i]).bytes() + 2);
else
pos += (((byte [])name[i]).length + 1);
}
}
qualified = true;
}
/**
* Create a new name by removing labels from the beginning of an existing Name
* @param d An existing Name
* @param n The number of labels to remove from the beginning in the copy
*/
/* Skips n labels and creates a new name */
public
Name(Name d, int n) {
name = new Object[MAXLABELS];
labels = (byte) (d.labels - n);
System.arraycopy(d.name, n, name, 0, labels);
qualified = d.qualified;
}
/**
* Generates a new Name with the first n labels replaced by a wildcard
* @return The wildcard name
*/
public Name
wild(int n) {
Name wild = new Name(this, n - 1);
wild.name[0] = new byte[] {(byte)'*'};
return wild;
}
/**
* Generates a new Name to be used when following a DNAME.
* @return The new name, or null if the DNAME is invalid.
*/
public Name
fromDNAME(DNAMERecord dname) {
Name dnameowner = dname.getName();
Name dnametarget = dname.getTarget();
int nlabels;
int saved;
if (!subdomain(dnameowner))
return null;
saved = labels - dnameowner.labels;
nlabels = saved + dnametarget.labels;
if (nlabels > MAXLABELS)
return null;
Name newname = new Name();
newname.labels = (byte)nlabels;
newname.name = new Object[labels];
System.arraycopy(this.name, 0, newname.name, 0, saved);
System.arraycopy(dnametarget.name, 0, newname.name, saved,
dnametarget.labels);
newname.qualified = true;
return newname;
}
/**
* Is this name a wildcard?
*/
public boolean
isWild() {
if (labels == 0 || (name[0] instanceof BitString))
return false;
byte [] b = (byte []) name[0];
return (b.length == 1 && b[0] == '*');
}
/**
* Is this name fully qualified?
*/
public boolean
isQualified() {
return qualified;
}
/**
* Appends the specified name to the end of the current Name
*/
public void
append(Name d) {
System.arraycopy(d.name, 0, name, labels, d.labels);
labels += d.labels;
}
/**
* The length
*/
public short
length() {
short total = 0;
for (int i = 0; i < labels; i++) {
if (name[i] instanceof BitString)
total += (((BitString)name[i]).bytes() + 2);
else
total += (((byte [])name[i]).length + 1);
}
return ++total;
}
/**
* The number of labels
*/
public byte
labels() {
return labels;
}
/**
* Is the current Name a subdomain of the specified name?
*/
public boolean
subdomain(Name domain) {
if (domain == null || domain.labels > labels)
return false;
Name tname = new Name(this, labels - domain.labels);
return (tname.equals(domain));
}
/**
* Convert Name to a String
*/
public String
toString() {
StringBuffer sb = new StringBuffer();
if (labels == 0)
sb.append(".");
for (int i = 0; i < labels; i++) {
if (name[i] instanceof BitString)
sb.append(name[i]);
else
sb.append(new String((byte []) name[i]));
if (qualified || i < labels - 1)
sb.append(".");
}
return sb.toString();
}
/**
* Convert Name to DNS wire format
*/
public void
toWire(DataByteOutputStream out, Compression c) throws IOException {
for (int i = 0; i < labels; i++) {
Name tname;
if (i == 0)
tname = this;
else
tname = new Name(this, i);
int pos = -1;
if (c != null) {
pos = c.get(tname);
if (Options.check("verbosecompression"))
System.err.println("Looking for " + tname +
", found " + pos);
}
if (pos >= 0) {
pos |= (LABEL_MASK << 8);
out.writeShort(pos);
return;
}
else {
if (c != null) {
c.add(out.getPos(), tname);
if (Options.check("verbosecompression"))
System.err.println("Adding " + tname +
" at " +
out.getPos());
}
if (name[i] instanceof BitString) {
out.writeByte(LABEL_EXTENDED |
EXT_LABEL_BITSTRING);
out.writeByte(((BitString)name[i]).wireBits());
out.write(((BitString)name[i]).data);
}
else
out.writeString((byte []) name[i]);
}
}
out.writeByte(0);
}
/**
* Convert Name to canonical DNS wire format (all lowercase)
*/
public void
toWireCanonical(DataByteOutputStream out) throws IOException {
for (int i = 0; i < labels; i++) {
if (name[i] instanceof BitString) {
out.writeByte(LABEL_EXTENDED | EXT_LABEL_BITSTRING);
out.writeByte(((BitString)name[i]).wireBits());
out.write(((BitString)name[i]).data);
}
else
out.writeStringCanonical(new String((byte []) name[i]));
}
out.writeByte(0);
}
private static final byte
toLower(byte b) {
if (b < 'A' || b > 'Z')
return b;
else
return (byte)(b - 'A' + 'a');
}
/**
* Are these two Names equivalent?
*/
public boolean
equals(Object arg) {
if (arg == null || !(arg instanceof Name))
return false;
if (arg == this)
return true;
Name d = (Name) arg;
if (d.labels != labels)
return false;
for (int i = 0; i < labels; i++) {
if (name[i].getClass() != d.name[i].getClass())
return false;
if (name[i] instanceof BitString) {
if (!name[i].equals(d.name[i]))
return false;
}
else {
byte [] b1 = (byte []) name[i];
byte [] b2 = (byte []) d.name[i];
if (b1.length != b2.length)
return false;
for (int j = 0; j < b1.length; j++) {
if (toLower(b1[j]) != toLower(b2[j]))
return false;
}
}
}
return true;
}
/**
* Computes a hashcode based on the value
*/
public int
hashCode() {
int code = labels;
for (int i = 0; i < labels; i++) {
if (name[i] instanceof BitString) {
BitString b = (BitString) name[i];
for (int j = 0; j < b.bytes(); j++)
code += ((code << 3) + b.data[i]);
}
else {
byte [] b = (byte []) name[i];
for (int j = 0; j < b.length; j++)
code += ((code << 3) + toLower(b[j]));
}
}
return code;
}
}
|
package org.xbill.DNS;
import java.io.*;
import java.text.*;
import java.util.*;
import org.xbill.DNS.utils.*;
/**
* A representation of a domain name.
*
* @author Brian Wellington
*/
public class Name implements Comparable {
private static final int LABEL_NORMAL = 0;
private static final int LABEL_COMPRESSION = 0xC0;
private static final int LABEL_MASK = 0xC0;
private Object [] name;
private byte offset;
private byte labels;
private int hashcode;
private static final byte [] emptyLabel = new byte[0];
private static final byte [] wildLabel = new byte[] {(byte)'*'};
/** The root name */
public static final Name root;
/** The maximum number of labels in a Name */
static final int MAXLABELS = 128;
/* The number of labels initially allocated. */
private static final int STARTLABELS = 4;
/* Used for printing non-printable characters */
private static final DecimalFormat byteFormat = new DecimalFormat();
/* Used to efficiently convert bytes to lowercase */
private static final byte lowercase[] = new byte[256];
/* Used in wildcard names. */
private static final Name wildName;
static {
byteFormat.setMinimumIntegerDigits(3);
for (int i = 0; i < lowercase.length; i++) {
if (i < 'A' || i > 'Z')
lowercase[i] = (byte)i;
else
lowercase[i] = (byte)(i - 'A' + 'a');
}
root = new Name();
root.append(emptyLabel);
wildName = new Name();
wildName.append(wildLabel);
}
private
Name() {
}
private final void
grow(int n) {
if (n > MAXLABELS)
throw new ArrayIndexOutOfBoundsException("name too long");
Object [] newarray = new Object[n];
if (labels > 0)
System.arraycopy(name, 0, newarray, 0, labels);
name = newarray;
}
private final void
append(Object label) {
if (name == null || labels == name.length)
grow(labels + 1);
name[labels++] = label;
}
/**
* Create a new name from a string and an origin
* @param s The string to be converted
* @param origin If the name is not absolute, the origin to be appended
* @deprecated As of dnsjava 1.3.0, replaced by <code>Name.fromString</code>.
*/
public
Name(String s, Name origin) {
Name n;
try {
n = Name.fromString(s, origin);
}
catch (TextParseException e) {
StringBuffer sb = new StringBuffer(s);
if (origin != null)
sb.append("." + origin);
sb.append(": "+ e.getMessage());
System.err.println(sb.toString());
return;
}
labels = n.labels;
name = n.name;
if (!isAbsolute()) {
/*
* This isn't exactly right, but it's close.
* Partially qualified names are evil.
*/
if (!Options.check("pqdn") && labels > 1)
append(emptyLabel);
}
}
/**
* Create a new name from a string
* @param s The string to be converted
* @deprecated as of dnsjava 1.3.0, replaced by <code>Name.fromString</code>.
*/
public
Name(String s) {
this (s, null);
}
/**
* Create a new name from a string and an origin. This does not automatically
* make the name absolute; it will be absolute if it has a trailing dot or an
* absolute origin is appended.
* @param s The string to be converted
* @param origin If the name is not absolute, the origin to be appended.
* @throws TextParseException The name is invalid.
*/
public static Name
fromString(String s, Name origin) throws TextParseException {
Name name = new Name();
name.labels = 0;
name.name = null;
if (s.equals("@")) {
if (origin == null)
return name;
return origin;
} else if (s.equals("."))
return (root);
int labelstart = -1;
int pos = 0;
byte [] label = new byte[64];
boolean escaped = false;
int digits = 0;
int intval = 0;
boolean absolute = false;
for (int i = 0; i < s.length(); i++) {
byte b = (byte) s.charAt(i);
if (escaped) {
if (b >= '0' && b <= '9' && digits < 3) {
digits++;
intval *= 10 + (b - '0');
intval += (b - '0');
if (digits < 3)
continue;
b = (byte) intval;
}
else if (digits > 0 && digits < 3)
throw new TextParseException("bad escape");
if (pos >= label.length)
throw new TextParseException("label too long");
labelstart = pos;
label[pos++] = b;
escaped = false;
} else if (b == '\\') {
escaped = true;
digits = 0;
intval = 0;
} else if (b == '.') {
if (labelstart == -1)
throw new TextParseException("invalid label");
byte [] newlabel = new byte[pos];
System.arraycopy(label, 0, newlabel, 0, pos);
if (name.labels == MAXLABELS)
throw new TextParseException("too many labels");
name.append(newlabel);
labelstart = -1;
pos = 0;
} else {
if (labelstart == -1)
labelstart = i;
if (pos >= label.length)
throw new TextParseException("label too long");
label[pos++] = b;
}
}
if (labelstart == -1) {
name.append(emptyLabel);
absolute = true;
} else {
byte [] newlabel = new byte[pos];
System.arraycopy(label, 0, newlabel, 0, pos);
if (name.labels == MAXLABELS)
throw new TextParseException("too many labels");
name.append(newlabel);
}
if (origin != null && !absolute) {
for (int i = 0; i < origin.labels; i++)
name.append(origin.name[origin.offset + i]);
}
return (name);
}
/**
* Create a new name from a string. This does not automatically make the name
* absolute; it will be absolute if it has a trailing dot.
* @param s The string to be converted
* @throws TextParseException The name is invalid.
*/
public static Name
fromString(String s) throws TextParseException {
return fromString(s, null);
}
public static Name
fromConstantString(String s) {
try {
return fromString(s, null);
}
catch (TextParseException e) {
throw new IllegalArgumentException("Invalid name '" + s + "'");
}
}
/**
* Create a new name from DNS wire format
* @param in A stream containing the input data
*/
public
Name(DataByteInputStream in) throws IOException {
int len, pos, savedpos;
Name name2;
boolean done = false;
labels = 0;
name = new Object[STARTLABELS];
while (!done) {
len = in.readUnsignedByte();
switch (len & LABEL_MASK) {
case LABEL_NORMAL:
if (labels >= MAXLABELS)
throw new WireParseException("too many labels");
if (len == 0) {
append(emptyLabel);
done = true;
} else {
byte [] b = new byte[len];
in.read(b);
append(b);
}
break;
case LABEL_COMPRESSION:
pos = in.readUnsignedByte();
pos += ((len & ~LABEL_MASK) << 8);
if (Options.check("verbosecompression"))
System.err.println("currently " + in.getPos() +
", pointer to " + pos);
savedpos = in.getPos();
if (pos >= savedpos)
throw new WireParseException("bad compression");
in.setPos(pos);
if (Options.check("verbosecompression"))
System.err.println("current name '" + this +
"', seeking to " + pos);
try {
name2 = new Name(in);
}
finally {
in.setPos(savedpos);
}
if (labels + name2.labels >= MAXLABELS)
throw new WireParseException("too many labels");
if (labels + name2.labels > name.length)
grow(labels + name2.labels);
System.arraycopy(name2.name, 0, name, labels,
name2.labels);
labels += name2.labels;
done = true;
break;
}
}
}
/**
* Create a new name by removing labels from the beginning of an existing Name
* @param src An existing Name
* @param n The number of labels to remove from the beginning in the copy
*/
public
Name(Name src, int n) {
name = src.name;
offset = (byte)(src.offset + n);
labels = (byte)(src.labels - n);
}
/**
* Creates a new name by concatenating two existing names.
* @param prefix The prefix name.
* @param suffix The suffix name.
* @return The concatenated name.
*/
public static Name
concatenate(Name prefix, Name suffix) {
if (prefix.isAbsolute())
return (prefix);
int nlabels = prefix.labels + suffix.labels;
if (nlabels > MAXLABELS)
return null;
Name newname = new Name();
newname.labels = (byte)nlabels;
newname.name = new Object[nlabels];
System.arraycopy(prefix.name, prefix.offset, newname.name,
0, prefix.labels);
System.arraycopy(suffix.name, suffix.offset, newname.name,
prefix.labels, suffix.labels);
return newname;
}
/**
* Generates a new Name with the first n labels replaced by a wildcard
* @return The wildcard name
*/
public Name
wild(int n) {
return concatenate(wildName, new Name(this, n));
}
/**
* Generates a new Name to be used when following a DNAME.
* @return The new name, or null if the DNAME is invalid.
*/
public Name
fromDNAME(DNAMERecord dname) {
Name dnameowner = dname.getName();
Name dnametarget = dname.getTarget();
int nlabels;
int saved;
if (!subdomain(dnameowner))
return null;
saved = labels - dnameowner.labels;
nlabels = saved + dnametarget.labels;
if (nlabels > MAXLABELS)
return null;
Name newname = new Name();
newname.labels = (byte)nlabels;
newname.name = new Object[labels];
System.arraycopy(this.name, 0, newname.name, 0, saved);
System.arraycopy(dnametarget.name, 0, newname.name, saved,
dnametarget.labels);
return newname;
}
/**
* Is this name a wildcard?
*/
public boolean
isWild() {
if (labels == 0)
return false;
return (name[0] == wildLabel ||
Arrays.equals((byte[])name[0], wildLabel));
}
/**
* Is this name fully qualified (that is, absolute)?
* @deprecated As of dnsjava 1.3.0, replaced by <code>isAbsolute</code>.
*/
public boolean
isQualified() {
if (labels == 0)
return false;
return (name[offset + labels - 1] == emptyLabel);
}
/**
* Is this name absolute?
*/
public boolean
isAbsolute() {
if (labels == 0)
return false;
return (name[offset + labels - 1] == emptyLabel);
}
/**
* The length of the name.
*/
public short
length() {
short total = 0;
for (int i = offset; i < labels + offset; i++) {
total += (((byte [])name[i]).length + 1);
}
return total;
}
/**
* The number of labels in the name.
*/
public byte
labels() {
return labels;
}
/**
* Is the current Name a subdomain of the specified name?
*/
public boolean
subdomain(Name domain) {
if (domain == null || domain.labels > labels)
return false;
Name tname = new Name(this, labels - domain.labels);
return (tname.equals(domain));
}
private String
byteString(byte [] array) {
StringBuffer sb = new StringBuffer();
for (int i = 0; i < array.length; i++) {
/* Ick. */
short b = (short)(array[i] & 0xFF);
if (b <= 0x20 || b >= 0x7f) {
sb.append('\\');
sb.append(byteFormat.format(b));
}
else if (b == '"' || b == '(' || b == ')' || b == '.' ||
b == ';' || b == '\\' || b == '@' || b == '$')
{
sb.append('\\');
sb.append((char)b);
}
else
sb.append((char)b);
}
return sb.toString();
}
/**
* Convert Name to a String
*/
public String
toString() {
if (labels == 0)
return "@";
StringBuffer sb = new StringBuffer();
for (int i = offset; i < labels + offset; i++) {
sb.append(byteString((byte [])name[i]));
if (i < labels + offset - 1)
sb.append(".");
}
return sb.toString();
}
/**
* Convert the nth label in a Name to a String
* @param n The label to be converted to a String
*/
public String
getLabelString(int n) {
n += offset;
return byteString((byte [])name[n]);
}
public void
toWire(DataByteOutputStream out, Compression c) throws IOException {
if (!isAbsolute())
throw new IllegalArgumentException("toWire() called on " +
"non-absolute name");
for (int i = offset; i < labels + offset; i++) {
Name tname;
if (i == offset)
tname = this;
else
tname = new Name(this, i);
int pos = -1;
if (c != null)
pos = c.get(tname);
if (pos >= 0) {
pos |= (LABEL_MASK << 8);
out.writeShort(pos);
return;
} else {
if (c != null)
c.add(out.getPos(), tname);
out.writeString((byte []) name[i]);
}
}
}
/**
* Convert Name to canonical DNS wire format (all lowercase)
* @param out The output stream to which the message is written.
* @throws IOException An error occurred writing the name.
*/
public void
toWireCanonical(DataByteOutputStream out) throws IOException {
for (int i = offset; i < labels + offset; i++) {
byte [] b = (byte []) name[i];
byte [] bc = new byte[b.length];
for (int j = 0; j < b.length; j++)
bc[j] = lowercase[b[j]];
out.writeString(bc);
}
}
/**
* Convert Name to canonical DNS wire format (all lowercase)
* @throws IOException An error occurred writing the name.
*/
public byte []
toWireCanonical() throws IOException {
DataByteOutputStream out = new DataByteOutputStream();
toWireCanonical(out);
return out.toByteArray();
}
/**
* Are these two Names equivalent?
*/
public boolean
equals(Object arg) {
if (arg == this)
return true;
if (arg == null || !(arg instanceof Name))
return false;
Name d = (Name) arg;
if (d.labels != labels)
return false;
for (int i = 0; i < labels; i++) {
Object nobj = name[offset + i];
Object dnobj = d.name[d.offset + i];
byte [] b1 = (byte []) nobj;
byte [] b2 = (byte []) dnobj;
if (b1.length != b2.length)
return false;
for (int j = 0; j < b1.length; j++) {
if (lowercase[b1[j]] != lowercase[b2[j]])
return false;
}
}
return true;
}
/**
* Computes a hashcode based on the value
*/
public int
hashCode() {
if (hashcode != 0)
return (hashcode);
int code = labels;
for (int i = offset; i < labels + offset; i++) {
byte [] b = (byte []) name[i];
for (int j = 0; j < b.length; j++)
code += ((code << 3) + lowercase[b[j]]);
}
hashcode = code;
return hashcode;
}
/**
* Compares this Name to another Object.
* @param o The Object to be compared.
* @return The value 0 if the argument is a name equivalent to this name;
* a value less than 0 if the argument is less than this name in the canonical
* ordering, and a value greater than 0 if the argument is greater than this
* name in the canonical ordering.
* @throws ClassCastException if the argument is not a Name.
*/
public int
compareTo(Object o) {
Name arg = (Name) o;
if (this == arg)
return (0);
int compares = labels > arg.labels ? arg.labels : labels;
for (int i = 1; i <= compares; i++) {
Object label = name[labels - i + offset];
Object alabel = arg.name[arg.labels - i + arg.offset];
byte [] b = (byte []) label;
byte [] ab = (byte []) alabel;
for (int j = 0; j < b.length && j < ab.length; j++) {
int n = lowercase[b[j]] - lowercase[ab[j]];
if (n != 0)
return (n);
}
if (b.length != ab.length)
return (b.length - ab.length);
}
return (labels - arg.labels);
}
}
|
package org.xbill.DNS;
import java.io.*;
import java.net.*;
import java.util.*;
import org.xbill.DNS.utils.*;
/**
* Transaction signature handling. This class generates and verifies
* TSIG records on messages, which provide transaction security,
* @see TSIGRecord
*
* @author Brian Wellington
*/
public class TSIG {
/**
* The domain name representing the HMAC-MD5 algorithm (the only supported
* algorithm)
*/
public static final String HMAC = "HMAC-MD5.SIG-ALG.REG.INT";
/** The default fudge value for outgoing packets. Can be overriden by the
* tsigfudge option.
*/
public static final short FUDGE = 300;
private Name name, alg;
private byte [] key;
private hmacSigner axfrSigner = null;
static {
if (Options.check("verbosehmac"))
hmacSigner.verbose = true;
}
/**
* Creates a new TSIG object, which can be used to sign or verify a message.
* @param name The name of the shared key
* @param key The shared key's data
*/
public
TSIG(String name, byte [] key) {
this.name = new Name(name);
this.alg = new Name(HMAC);
this.key = key;
}
/**
* Generates a TSIG record for a message and adds it to the message
* @param m The message
* @param old If this message is a response, the TSIG from the request
*/
public void
apply(Message m, TSIGRecord old) throws IOException {
Date timeSigned = new Date();
short fudge;
hmacSigner h = new hmacSigner(key);
if (Options.check("tsigfudge")) {
String s = Options.value("tsigfudge");
try {
fudge = Short.parseShort(s);
}
catch (NumberFormatException e) {
fudge = FUDGE;
}
}
else
fudge = FUDGE;
try {
if (old != null) {
DataByteOutputStream dbs = new DataByteOutputStream();
dbs.writeShort((short)old.getSignature().length);
h.addData(dbs.toByteArray());
h.addData(old.getSignature());
}
/* Digest the message */
h.addData(m.toWire());
DataByteOutputStream out = new DataByteOutputStream();
name.toWireCanonical(out);
out.writeShort(DClass.ANY); /* class */
out.writeInt(0); /* ttl */
alg.toWireCanonical(out);
long time = timeSigned.getTime() / 1000;
short timeHigh = (short) (time >> 32);
int timeLow = (int) (time);
out.writeShort(timeHigh);
out.writeInt(timeLow);
out.writeShort(fudge);
out.writeShort(0); /* No error */
out.writeShort(0); /* No other data */
h.addData(out.toByteArray());
}
catch (IOException e) {
return;
}
Record r = new TSIGRecord(name, DClass.ANY, 0, alg, timeSigned, fudge,
h.sign(), m.getHeader().getID(),
Rcode.NOERROR, null);
m.addRecord(r, Section.ADDITIONAL);
}
/**
* Verifies a TSIG record on an incoming message. Since this is only called
* in the context where a TSIG is expected to be present, it is an error
* if one is not present.
* @param m The message
* @param b The message in unparsed form. This is necessary since TSIG
* signs the message in wire format, and we can't recreate the exact wire
* format (with the same name compression).
* @param old If this message is a response, the TSIG from the request
*/
public boolean
verify(Message m, byte [] b, TSIGRecord old) {
TSIGRecord tsig = m.getTSIG();
hmacSigner h = new hmacSigner(key);
if (tsig == null)
return false;
if (!tsig.getName().equals(name) || !tsig.getAlgorithm().equals(alg)) {
if (Options.check("verbose"));
System.err.println("BADKEY failure");
return false;
}
try {
if (old != null && tsig.getError() != Rcode.BADKEY &&
tsig.getError() != Rcode.BADSIG)
{
DataByteOutputStream dbs = new DataByteOutputStream();
dbs.writeShort((short)old.getSignature().length);
h.addData(dbs.toByteArray());
h.addData(old.getSignature());
}
m.getHeader().decCount(Section.ADDITIONAL);
byte [] header = m.getHeader().toWire();
m.getHeader().incCount(Section.ADDITIONAL);
h.addData(header);
int len = b.length - header.length;
len -= tsig.wireLength;
h.addData(b, header.length, len);
DataByteOutputStream out = new DataByteOutputStream();
tsig.getName().toWireCanonical(out);
out.writeShort(tsig.dclass);
out.writeInt(tsig.ttl);
tsig.getAlgorithm().toWireCanonical(out);
long time = tsig.getTimeSigned().getTime() / 1000;
short timeHigh = (short) (time >> 32);
int timeLow = (int) (time);
out.writeShort(timeHigh);
out.writeInt(timeLow);
out.writeShort(tsig.getFudge());
out.writeShort(tsig.getError());
if (tsig.getOther() != null) {
out.writeShort(tsig.getOther().length);
out.write(tsig.getOther());
}
else
out.writeShort(0);
h.addData(out.toByteArray());
}
catch (IOException e) {
return false;
}
if (axfrSigner != null) {
DataByteOutputStream dbs = new DataByteOutputStream();
dbs.writeShort((short)tsig.getSignature().length);
axfrSigner.addData(dbs.toByteArray());
axfrSigner.addData(tsig.getSignature());
}
if (h.verify(tsig.getSignature()))
return true;
else {
if (Options.check("verbose"));
System.err.println("BADSIG failure");
return false;
}
}
/** Prepares the TSIG object to verify an AXFR */
public void
verifyAXFRStart() {
axfrSigner = new hmacSigner(key);
}
/**
* Verifies a TSIG record on an incoming message that is part of an AXFR.
* TSIG records must be present on the first and last messages, and
* at least every 100 records in between (the last rule is not enforced).
* @param m The message
* @param b The message in unparsed form
* @param old The TSIG from the AXFR request
* @param required True if this message is required to include a TSIG.
* @param first True if this message is the first message of the AXFR
*/
public boolean
verifyAXFR(Message m, byte [] b, TSIGRecord old,
boolean required, boolean first)
{
TSIGRecord tsig = m.getTSIG();
hmacSigner h = axfrSigner;
if (first)
return verify(m, b, old);
try {
if (tsig != null)
m.getHeader().decCount(Section.ADDITIONAL);
byte [] header = m.getHeader().toWire();
if (tsig != null)
m.getHeader().incCount(Section.ADDITIONAL);
h.addData(header);
int len = b.length - header.length;
if (tsig != null)
len -= tsig.wireLength;
h.addData(b, header.length, len);
if (tsig == null) {
if (required)
return false;
else
return true;
}
if (!tsig.getName().equals(name) ||
!tsig.getAlgorithm().equals(alg))
{
if (Options.check("verbose"));
System.err.println("BADKEY failure");
return false;
}
DataByteOutputStream out = new DataByteOutputStream();
long time = tsig.getTimeSigned().getTime() / 1000;
short timeHigh = (short) (time >> 32);
int timeLow = (int) (time);
out.writeShort(timeHigh);
out.writeInt(timeLow);
out.writeShort(tsig.getFudge());
h.addData(out.toByteArray());
}
catch (IOException e) {
return false;
}
if (h.verify(tsig.getSignature()) == false) {
if (Options.check("verbose"));
System.err.println("BADSIG failure");
return false;
}
h.clear();
DataByteOutputStream dbs = new DataByteOutputStream();
dbs.writeShort((short)old.getSignature().length);
h.addData(dbs.toByteArray());
h.addData(tsig.getSignature());
return true;
}
}
|
public class {
public static void main(String[] args) {
// TODO Auto-generated method stub
System.out.println(" ");
System.out.println(" ");
System.out.println(" ");
System.out.println(" ");
System.out.println(" ");
System.out.println(" ");
System.out.println(" ");
System.out.println(" ");
System.out.println(" ");
System.out.println(" ");
System.out.println(" ");
System.out.println(" ");
System.out.println(" ");
System.out.println("");
}
}
|
package com.intellij.util.xml.ui;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.psi.PsiClass;
import com.intellij.util.xml.DomElement;
import com.intellij.util.xml.DomUtil;
import com.intellij.util.xml.GenericValue;
import com.intellij.util.xml.impl.ui.*;
import com.intellij.util.xml.reflect.DomCollectionChildDescription;
import com.intellij.util.ui.ColumnInfo;
import javax.swing.table.TableCellEditor;
import javax.swing.*;
import javax.swing.border.EmptyBorder;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
/**
* @author peter
*/
public class DomUIFactory {
private static final Logger LOG;
private static Method GET_VALUE_METHOD = null;
private static Method SET_VALUE_METHOD = null;
private static Method GET_STRING_METHOD = null;
private static Method SET_STRING_METHOD = null;
static {
LOG = Logger.getInstance("#com.intellij.util.xml.ui.DomUIFactory");
try {
GET_VALUE_METHOD = GenericValue.class.getMethod("getValue");
GET_STRING_METHOD = GenericValue.class.getMethod("getStringValue");
SET_VALUE_METHOD = findMethod(GenericValue.class, "setValue");
SET_STRING_METHOD = findMethod(GenericValue.class, "setStringValue");
}
catch (NoSuchMethodException e) {
LOG.error(e);
}
}
public static DomUIControl createControl(GenericValue element) {
return createGenericValueControl(DomUtil.extractParameterClassFromGenericType(element.getDomElementType()), element);
}
private static BaseControl createGenericValueControl(final Type type, final GenericValue element) {
if (type.equals(boolean.class) || type.equals(Boolean.class)) {
return new BooleanControl(element, GET_VALUE_METHOD, SET_VALUE_METHOD);
}
else if (type.equals(String.class)) {
return new StringControl(element, GET_VALUE_METHOD, SET_VALUE_METHOD);
}
else if (type.equals(PsiClass.class)) {
return new PsiClassControl(element, GET_STRING_METHOD, SET_STRING_METHOD);
}
else if (type instanceof Class && Enum.class.isAssignableFrom((Class)type)) {
return new EnumControl(element, (Class)type, GET_STRING_METHOD, SET_STRING_METHOD);
}
throw new IllegalArgumentException("Not supported: " + type);
}
private static Method findMethod(Class clazz, String methodName) {
final Method[] methods = clazz.getMethods();
for (Method method : methods) {
if (methodName.equals(method.getName())) {
return method;
}
}
return null;
}
private static TableCellEditor createCellEditor(DomElement element, Class type) {
if (String.class.equals(type)) {
return new DefaultCellEditor(removeBorder(new JTextField()));
}
if (PsiClass.class.equals(type)) {
return new PsiClassTableCellEditor(element);
}
if (Enum.class.equals(type)) {
return new DefaultCellEditor(removeBorder(EnumControl.createEnumComboBox(type)));
}
assert false : "Type not supported: " + type;
return null;
}
private static <T extends JComponent> T removeBorder(final T component) {
component.setBorder(new EmptyBorder(0, 0, 0, 0));
return component;
}
public static DomUIControl createCollectionControl(DomElement element, DomCollectionChildDescription description) {
final ColumnInfo columnInfo = createColumnInfo(description, element);
final Class aClass = DomUtil.extractParameterClassFromGenericType(description.getType());
return new DomCollectionControl<GenericValue<?>>(element, description, aClass == null, columnInfo);
}
private static ColumnInfo createColumnInfo(final DomCollectionChildDescription description,
final DomElement element) {
final String presentableName = description.getCommonPresentableName(element);
final Class aClass = DomUtil.extractParameterClassFromGenericType(description.getType());
if (aClass != null) {
if (Boolean.class.equals(aClass) || boolean.class.equals(aClass)) {
return new BooleanColumnInfo(presentableName);
}
return new GenericValueColumnInfo(presentableName, aClass, createCellEditor(element, aClass));
}
return new StringColumnInfo(presentableName);
}
}
|
package io.warp10;
public class Revision {
public static final String REVISION = "";
}
|
package liquibase.lock;
import liquibase.DatabaseChangeLogLock;
import liquibase.util.NetUtil;
import liquibase.database.Database;
import liquibase.database.sql.RawSqlStatement;
import liquibase.database.sql.UpdateStatement;
import liquibase.exception.JDBCException;
import liquibase.exception.LockException;
import liquibase.log.LogFactory;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.sql.Timestamp;
import java.text.DateFormat;
import java.util.*;
public class LockHandler {
private Database database;
private boolean hasChangeLogLock = false;
private long changeLogLockWaitTime = 1000 * 60 * 5; //default to 5 mins
private static Map<Database, LockHandler> instances = new HashMap<Database, LockHandler>();
private LockHandler(Database database) {
this.database = database;
}
public static LockHandler getInstance(Database database) {
if (!instances.containsKey(database)) {
instances.put(database, new LockHandler(database));
}
return instances.get(database);
}
public boolean acquireLock() throws LockException {
if (!database.doesChangeLogLockTableExist()) {
throw new LockException("Could not acquire lock, table does not exist");
}
try {
Boolean locked;
try {
locked = (Boolean) database.getJdbcTemplate().queryForObject(database.getSelectChangeLogLockSQL(), Boolean.class);
} catch (JDBCException e) {
if (!database.getJdbcTemplate().executesStatements()) {
//expected
locked = false;
} else {
throw new LockException("Error checking database lock status", e);
}
}
if (locked) {
return false;
} else {
UpdateStatement updateStatement = new UpdateStatement(database.getDefaultSchemaName(), database.getDatabaseChangeLogLockTableName());
updateStatement.addNewColumnValue("LOCKED", true);
updateStatement.addNewColumnValue("LOCKGRANTED", new Timestamp(new java.util.Date().getTime()));
InetAddress localHost = NetUtil.getLocalHost();
updateStatement.addNewColumnValue("LOCKEDBY", localHost.getHostName() + " (" + localHost.getHostAddress() + ")");
updateStatement.setWhereClause("ID = 1");
database.getJdbcTemplate().comment("Lock Database");
int rowsUpdated = database.getJdbcTemplate().update(updateStatement);
if (rowsUpdated != 1) {
if (!database.getJdbcTemplate().executesStatements()) {
//expected
} else {
throw new LockException("Did not update change log lock correctly");
}
}
database.commit();
LogFactory.getLogger().info("Successfully acquired change log lock");
hasChangeLogLock = true;
return true;
}
} catch (Exception e) {
throw new LockException(e);
}
}
public void releaseLock() throws LockException {
if (database.doesChangeLogLockTableExist()) {
try {
UpdateStatement releaseStatement = new UpdateStatement(database.getDefaultSchemaName(), database.getDatabaseChangeLogLockTableName());
releaseStatement.addNewColumnValue("LOCKED", false);
releaseStatement.addNewColumnValue("LOCKGRANTED", null);
releaseStatement.addNewColumnValue("LOCKEDBY", null);
releaseStatement.setWhereClause(" ID = 1");
database.getJdbcTemplate().comment("Release Database Lock");
int updatedRows = database.getJdbcTemplate().update(releaseStatement);
if (updatedRows != 1) {
if (database.getJdbcTemplate().executesStatements()) {
throw new LockException("Did not update change log lock correctly.\n\n" + releaseStatement + " updated " + updatedRows + " instead of the expected 1 row.");
}
}
database.commit();
hasChangeLogLock = false;
instances.remove(this.database);
LogFactory.getLogger().info("Successfully released change log lock");
} catch (Exception e) {
throw new LockException(e);
}
}
}
public DatabaseChangeLogLock[] listLocks() throws LockException {
if (!database.doesChangeLogLockTableExist()) {
return new DatabaseChangeLogLock[0];
}
try {
List<DatabaseChangeLogLock> allLocks = new ArrayList<DatabaseChangeLogLock>();
RawSqlStatement sqlStatement = new RawSqlStatement((("SELECT ID, LOCKED, LOCKGRANTED, LOCKEDBY FROM " + database.escapeTableName(database.getDefaultSchemaName(), database.getDatabaseChangeLogLockTableName()))));
List<Map> rows = database.getJdbcTemplate().queryForList(sqlStatement);
for (Map columnMap : rows) {
Object lockedValue = columnMap.get("LOCKED");
Boolean locked;
if (lockedValue instanceof Number) {
locked = ((Number) lockedValue).intValue() == 1;
} else {
locked = (Boolean) lockedValue;
}
if (locked != null && locked) {
allLocks.add(new DatabaseChangeLogLock((Integer) columnMap.get("ID"), (Date) columnMap.get("LOCKGRANTED"), (String) columnMap.get("LOCKEDBY")));
}
}
return allLocks.toArray(new DatabaseChangeLogLock[allLocks.size()]);
} catch (Exception e) {
throw new LockException(e);
}
}
public void waitForLock() throws LockException {
if (hasChangeLogLock) {
return;
}
try {
database.checkDatabaseChangeLogLockTable();
boolean locked = false;
long timeToGiveUp = new Date().getTime() + changeLogLockWaitTime;
while (!locked && new Date().getTime() < timeToGiveUp) {
locked = acquireLock();
if (!locked) {
System.out.println("Waiting for changelog lock....");
try {
Thread.sleep(1000 * 10);
} catch (InterruptedException e) {
;
}
}
}
if (!locked) {
DatabaseChangeLogLock[] locks = listLocks();
String lockedBy;
if (locks.length > 0) {
DatabaseChangeLogLock lock = locks[0];
lockedBy = lock.getLockedBy() + " since " + DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT).format(lock.getLockGranted());
} else {
lockedBy = "UNKNOWN";
}
throw new LockException("Could not acquire change log lock. Currently locked by " + lockedBy);
}
} catch (JDBCException e) {
if (!database.getJdbcTemplate().executesStatements()) {
; //nothing to do
} else {
throw new LockException(e);
}
}
}
/**
* Releases whatever locks are on the database change log table
*/
public void forceReleaseLock() throws LockException, JDBCException {
database.checkDatabaseChangeLogLockTable();
releaseLock();
}
/**
* Clears information the lock handler knows about the tables. Should only be called by LiquiBase internal calls
*/
public void reset() {
hasChangeLogLock = false;
}
}
|
package hudson.model;
import hudson.Util;
import hudson.security.ACL;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.acegisecurity.Authentication;
import org.acegisecurity.context.SecurityContextHolder;
import javax.servlet.ServletException;
import java.io.IOException;
/**
* Thread that executes builds.
*
* @author Kohsuke Kawaguchi
*/
public class Executor extends Thread implements ModelObject {
private final Computer owner;
private final Queue queue;
private long startTime;
/**
* Executor number that identifies it among other executors for the same {@link Computer}.
*/
private int number;
/**
* {@link Queue.Executable} being executed right now, or null if the executor is idle.
*/
private volatile Queue.Executable executable;
private Throwable causeOfDeath;
public Executor(Computer owner) {
super("Executor #"+owner.getExecutors().size()+" for "+owner.getDisplayName());
this.owner = owner;
this.queue = Hudson.getInstance().getQueue();
this.number = owner.getExecutors().size();
start();
}
public void run() {
// run as the system user. see ACL.SYSTEM for more discussion about why this is somewhat broken
SecurityContextHolder.getContext().setAuthentication(ACL.SYSTEM);
try {
while(true) {
if(Hudson.getInstance().isTerminating())
return;
synchronized(owner) {
if(owner.getNumExecutors()<owner.getExecutors().size()) {
// we've got too many executors.
owner.removeExecutor(this);
return;
}
}
Queue.Task task;
try {
task = queue.pop();
} catch (InterruptedException e) {
continue;
}
try {
startTime = System.currentTimeMillis();
executable = task.createExecutable();
queue.execute(executable,task);
} catch (Throwable e) {
// for some reason the executor died. this is really
// a bug in the code, but we don't want the executor to die,
// so just leave some info and go on to build other things
e.printStackTrace();
}
executable = null;
}
} catch(RuntimeException e) {
causeOfDeath = e;
throw e;
} catch (Error e) {
causeOfDeath = e;
throw e;
}
}
/**
* Returns the current {@link Queue.Task} this executor is running.
*
* @return
* null if the executor is idle.
*/
public Queue.Executable getCurrentExecutable() {
return executable;
}
/**
* Same as {@link #getName()}.
*/
public String getDisplayName() {
return "Executor #"+getNumber();
}
/**
* Gets the executor number that uniquely identifies it among
* other {@link Executor}s for the same computer.
*
* @return
* a sequential number starting from 0.
*/
public int getNumber() {
return number;
}
/**
* Returns true if this {@link Executor} is ready for action.
*/
public boolean isIdle() {
return executable==null;
}
/**
* If this thread dies unexpectedly, obtain the cause of the failure.
*
* @return null if the death is expected death or the thread is {@link #isAlive() still alive}.
* @since 1.142
*/
public Throwable getCauseOfDeath() {
return causeOfDeath;
}
/**
* Returns the progress of the current build in the number between 0-100.
*
* @return -1
* if it's impossible to estimate the progress.
*/
public int getProgress() {
Queue.Executable e = executable;
if(e==null) return -1;
long d = e.getParent().getEstimatedDuration();
if(d<0) return -1;
int num = (int)((System.currentTimeMillis()-startTime)*100/d);
if(num>=100) num=99;
return num;
}
/**
* Computes a human-readable text that shows the expected remaining time
* until the build completes.
*/
public String getEstimatedRemainingTime() {
Queue.Executable e = executable;
if(e==null) return Messages.Executor_NotAvailable();
long d = e.getParent().getEstimatedDuration();
if(d<0) return Messages.Executor_NotAvailable();
long eta = d-(System.currentTimeMillis()-startTime);
if(eta<=0) return Messages.Executor_NotAvailable();
return Util.getTimeSpanString(eta);
}
/**
* Stops the current build.
*/
public void doStop( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
Queue.Executable e = executable;
if(e!=null) {
e.getParent().checkAbortPermission();
interrupt();
}
rsp.forwardToPreviousPage(req);
}
public boolean hasStopPermission() {
Queue.Executable e = executable;
return e!=null && e.getParent().hasAbortPermission();
}
public Computer getOwner() {
return owner;
}
/**
* Returns the executor of the current thread.
*/
public static Executor currentExecutor() {
return (Executor)Thread.currentThread();
}
}
|
package hudson.model;
import hudson.Extension;
import hudson.Util;
import hudson.model.Descriptor.FormException;
import hudson.util.CaseInsensitiveComparator;
import hudson.util.DescribableList;
import hudson.util.FormValidation;
import hudson.views.LastDurationColumn;
import hudson.views.ListViewColumn;
import hudson.views.StatusColumn;
import hudson.views.WeatherColumn;
import hudson.views.ViewJobFilter;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
/**
* Displays {@link Job}s in a flat list view.
*
* @author Kohsuke Kawaguchi
*/
public class ListView extends View implements Saveable {
/**
* List of job names. This is what gets serialized.
*/
/*package*/ final SortedSet<String> jobNames = new TreeSet<String>(CaseInsensitiveComparator.INSTANCE);
private DescribableList<ViewJobFilter, Descriptor<ViewJobFilter>> jobFilters;
private DescribableList<ListViewColumn, Descriptor<ListViewColumn>> columns;
/**
* Include regex string.
*/
private String includeRegex;
/**
* Compiled include pattern from the includeRegex string.
*/
private transient Pattern includePattern;
/**
* Filter by enabled/disabled status of jobs.
* Null for no filter, true for enabled-only, false for disabled-only.
*/
private Boolean statusFilter;
@DataBoundConstructor
public ListView(String name) {
super(name);
initColumns();
initJobFilters();
}
public ListView(String name, ViewGroup owner) {
this(name);
this.owner = owner;
}
public void save() throws IOException {
// persistence is a part of the owner.
// due to the initialization timing issue, it can be null when this method is called.
if (owner!=null)
owner.save();
}
private Object readResolve() {
if(includeRegex!=null)
includePattern = Pattern.compile(includeRegex);
initColumns();
initJobFilters();
return this;
}
protected void initColumns() {
if (columns == null)
columns = new DescribableList<ListViewColumn, Descriptor<ListViewColumn>>(this,ListViewColumn.createDefaultInitialColumnList());
}
protected void initJobFilters() {
if (jobFilters == null)
jobFilters = new DescribableList<ViewJobFilter, Descriptor<ViewJobFilter>>(this);
}
/**
* Used to determine if we want to display the Add button.
*/
public boolean hasJobFilterExtensions() {
return !ViewJobFilter.all().isEmpty();
}
public DescribableList<ViewJobFilter, Descriptor<ViewJobFilter>> getJobFilters() {
return jobFilters;
}
public Iterable<ListViewColumn> getColumns() {
return columns;
}
/**
* Returns a read-only view of all {@link Job}s in this view.
*
* <p>
* This method returns a separate copy each time to avoid
* concurrent modification issue.
*/
public synchronized List<TopLevelItem> getItems() {
SortedSet<String> names = new TreeSet<String>(jobNames);
if (includePattern != null) {
for (TopLevelItem item : Hudson.getInstance().getItems()) {
String itemName = item.getName();
if (includePattern.matcher(itemName).matches()) {
names.add(itemName);
}
}
}
List<TopLevelItem> items = new ArrayList<TopLevelItem>(names.size());
for (String n : names) {
TopLevelItem item = Hudson.getInstance().getItem(n);
// Add if no status filter or filter matches enabled/disabled status:
if(item!=null && (statusFilter == null || !(item instanceof AbstractProject)
|| ((AbstractProject)item).isDisabled() ^ statusFilter))
items.add(item);
}
// check the filters
Iterable<ViewJobFilter> jobFilters = getJobFilters();
List<TopLevelItem> allItems = Hudson.getInstance().getItems();
for (ViewJobFilter jobFilter: jobFilters) {
items = jobFilter.filter(items, allItems, this);
}
// for sanity, trim off duplicates
items = new ArrayList<TopLevelItem>(new LinkedHashSet<TopLevelItem>(items));
return items;
}
public boolean contains(TopLevelItem item) {
return jobNames.contains(item.getName());
}
/**
* Adds the given item to this view.
*
* @since 1.389
*/
public void add(TopLevelItem item) throws IOException {
jobNames.add(item.getName());
save();
}
public String getIncludeRegex() {
return includeRegex;
}
/**
* Filter by enabled/disabled status of jobs.
* Null for no filter, true for enabled-only, false for disabled-only.
*/
public Boolean getStatusFilter() {
return statusFilter;
}
public synchronized Item doCreateItem(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
Item item = Hudson.getInstance().doCreateItem(req, rsp);
if(item!=null) {
jobNames.add(item.getName());
owner.save();
}
return item;
}
@Override
public synchronized void onJobRenamed(Item item, String oldName, String newName) {
if(jobNames.remove(oldName) && newName!=null)
jobNames.add(newName);
}
/**
* Handles the configuration submission.
*
* Load view-specific properties here.
*/
@Override
protected void submit(StaplerRequest req) throws ServletException, FormException, IOException {
jobNames.clear();
for (TopLevelItem item : Hudson.getInstance().getItems()) {
if(req.getParameter(item.getName())!=null)
jobNames.add(item.getName());
}
if (req.getParameter("useincluderegex") != null) {
includeRegex = Util.nullify(req.getParameter("includeRegex"));
if (includeRegex == null)
includePattern = null;
else
includePattern = Pattern.compile(includeRegex);
} else {
includeRegex = null;
includePattern = null;
}
if (columns == null) {
columns = new DescribableList<ListViewColumn,Descriptor<ListViewColumn>>(this);
}
columns.rebuildHetero(req, req.getSubmittedForm(), ListViewColumn.all(), "columns");
if (jobFilters == null) {
jobFilters = new DescribableList<ViewJobFilter,Descriptor<ViewJobFilter>>(this);
}
jobFilters.rebuildHetero(req, req.getSubmittedForm(), ViewJobFilter.all(), "jobFilters");
String filter = Util.fixEmpty(req.getParameter("statusFilter"));
statusFilter = filter != null ? "1".equals(filter) : null;
}
@Extension
public static final class DescriptorImpl extends ViewDescriptor {
public String getDisplayName() {
return Messages.ListView_DisplayName();
}
/**
* Checks if the include regular expression is valid.
*/
public FormValidation doCheckIncludeRegex( @QueryParameter String value ) throws IOException, ServletException, InterruptedException {
String v = Util.fixEmpty(value);
if (v != null) {
try {
Pattern.compile(v);
} catch (PatternSyntaxException pse) {
return FormValidation.error(pse.getMessage());
}
}
return FormValidation.ok();
}
}
/**
* @deprecated as of 1.391
* Use {@link ListViewColumn#createDefaultInitialColumnList()}
*/
public static List<ListViewColumn> getDefaultColumns() {
return ListViewColumn.createDefaultInitialColumnList();
}
}
|
package jlibs.core.lang;
import jlibs.core.io.IOUtil;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.channels.ReadableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.util.Iterator;
import java.util.LinkedList;
/**
* @author Santhosh Kumar T
*/
public class Bytes implements Iterable<ByteSequence>{
private LinkedList<ByteSequence> list = new LinkedList<ByteSequence>();
public int size(){
int size = 0;
for(ByteSequence seq: list)
size += seq.length();
return size;
}
public boolean isEmpty(){
if(list.size()==0)
return true;
for(ByteSequence seq: list){
if(seq.length()>0)
return false;
}
return true;
}
public void clear(){
list.clear();
}
public void prepend(ByteSequence seq){
list.add(0, seq);
}
public void append(ByteSequence seq){
list.add(seq);
}
@Override
public Iterator<ByteSequence> iterator(){
return list.iterator();
}
public void remove(int count){
Iterator<ByteSequence> iter = iterator();
while(iter.hasNext()){
ByteSequence seq = iter.next();
count -= seq.length();
iter.remove();
if(count==0)
return;
else if(count<0){
list.add(0, seq.slice(seq.length()+count));
return;
}
}
}
private ByteBuffer buff;
public int readFrom(ReadableByteChannel channel) throws IOException{
int total = 0;
while(true){
if(buff==null)
buff = ByteBuffer.allocate(1024);
int read = channel.read(buff);
if(read<=0){
if(read<0 && buff.position()==0) // garbage buff
buff = null;
break;
}
total += read;
append(new ByteSequence(buff.array(), buff.position() - read, read));
if(buff.hasRemaining())
break;
else
buff = null;
}
return total;
}
public int readFully(InputStream in) throws IOException{
int total = 0;
while(true){
if(buff==null)
buff = ByteBuffer.allocate(1024);
int read = IOUtil.readFully(in, buff.array(), buff.position(), buff.limit());
if(read==0){
if(buff.position()==0) // garbage buff
buff = null;
break;
}
buff.position(buff.position()+read);
total += read;
append(new ByteSequence(buff.array(), buff.position() - read, read));
if(buff.hasRemaining()) // eof reached
break;
else
buff = null;
}
return total;
}
public int writeTo(WritableByteChannel channel) throws IOException{
int total = 0;
Iterator<ByteSequence> iter = iterator();
while(iter.hasNext()){
ByteSequence seq = iter.next();
int wrote = channel.write(seq.toByteBuffer());
if(wrote==0)
break;
total += wrote;
iter.remove();
if(wrote<seq.length()){
list.add(0, seq.slice(wrote));
break;
}
}
return total;
}
}
|
package jlibs.core.lang;
import jlibs.core.io.IOUtil;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.channels.ReadableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.util.Iterator;
import java.util.LinkedList;
/**
* @author Santhosh Kumar T
*/
public class Bytes implements Iterable<ByteSequence>{
public static final int CHUNK_SIZE = 1024*4;
public Bytes(int chunkSize){
this.chunkSize = chunkSize;
}
public Bytes(){
this(CHUNK_SIZE);
}
private LinkedList<ByteSequence> list = new LinkedList<ByteSequence>();
public int size(){
int size = 0;
for(ByteSequence seq: list)
size += seq.length();
return size;
}
public boolean isEmpty(){
if(list.size()==0)
return true;
for(ByteSequence seq: list){
if(seq.length()>0)
return false;
}
return true;
}
public void clear(){
list.clear();
}
public void prepend(ByteSequence seq){
list.add(0, seq);
}
public void append(ByteSequence seq){
list.add(seq);
}
@Override
public Iterator<ByteSequence> iterator(){
return list.iterator();
}
public void remove(int count){
Iterator<ByteSequence> iter = iterator();
while(iter.hasNext()){
ByteSequence seq = iter.next();
count -= seq.length();
iter.remove();
if(count==0)
return;
else if(count<0){
list.add(0, seq.slice(seq.length()+count));
return;
}
}
}
private ByteBuffer buff;
private int chunkSize;
public int readFrom(ReadableByteChannel channel) throws IOException{
int total = 0;
while(true){
if(buff==null)
buff = ByteBuffer.allocate(chunkSize);
int read = channel.read(buff);
if(read<=0){
if(read<0 && buff.position()==0) // garbage buff
buff = null;
break;
}
total += read;
append(new ByteSequence(buff.array(), buff.position() - read, read));
if(buff.hasRemaining())
break;
else
buff = null;
}
return total;
}
public int readFully(InputStream in) throws IOException{
int total = 0;
while(true){
if(buff==null)
buff = ByteBuffer.allocate(chunkSize);
int read = IOUtil.readFully(in, buff.array(), buff.position(), buff.limit());
if(read==0){
if(buff.position()==0) // garbage buff
buff = null;
break;
}
buff.position(buff.position()+read);
total += read;
append(new ByteSequence(buff.array(), buff.position() - read, read));
if(buff.hasRemaining()) // eof reached
break;
else
buff = null;
}
return total;
}
public int writeTo(WritableByteChannel channel) throws IOException{
int total = 0;
Iterator<ByteSequence> iter = iterator();
while(iter.hasNext()){
ByteSequence seq = iter.next();
int wrote = channel.write(seq.toByteBuffer());
if(wrote==0)
break;
total += wrote;
iter.remove();
if(wrote<seq.length()){
list.add(0, seq.slice(wrote));
break;
}
}
return total;
}
}
|
package org.judal.storage;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import static java.nio.file.StandardOpenOption.READ;
import java.util.Hashtable;
import java.util.Map;
import java.util.Properties;
import javax.servlet.ServletConfig;
import com.knowgate.stringutils.Str;
import com.knowgate.debug.DebugFile;
import static org.judal.storage.DataSource.PropertyNames;
import static org.judal.storage.DataSource.DefaultValues;
/**
* <p>Read properties from environment to create data sources and other objects.</p>
* @author Sergio Montoro Ten
* @version 1.0
*/
public class Env {
/**
* <p>Get default value for a property (if present).</p>
* @param propertyName String
* @return String Default property value or <b>null</b> if property has no default value.
*/
public static String getDataSourceDefault(String propertyName) {
for (String[] nameValue : DefaultValues)
if (nameValue[0].equalsIgnoreCase(propertyName))
return nameValue[1];
return null;
}
/**
* <p>Read DataSource properties from ServletConfig parameters into a Map
* DataSource property names names are listed at DataSource.PropertyNames</p>
* If namespace is not empty then parameter names will be read as namespace.parameterName
* @param obj ServletConfig
* @param namespace String
* @return Map containing the property name as key and its value at ServletConfig or a default value taken from DataSource interface static variables
* @throws ClassCastException If obj is not an instance of class javax.servlet.ServletConfig
*/
public static Map<String,String> getDataSourcePropertiesFromServletConfig(Object obj, String namespace) throws ClassCastException {
ServletConfig cfg = (ServletConfig) obj;
Hashtable<String,String> props = new Hashtable<String,String>();
if (namespace==null) namespace = "";
String prefix = namespace.length()==0 ? "" : namespace + ".";
for (String propName : PropertyNames) {
String prop = cfg.getInitParameter(prefix + propName);
if (DebugFile.trace)
DebugFile.writeln(prop==null ? "init parameter "+propName+" not found" : "read init parameter "+propName+"="+prop);
if (prop!=null) setProperty(props, propName, prop);
setProperty(props, propName, prop);
}
return props;
}
/**
* <p>Read DataSource properties from a properties input stream into a Map
* If namespace is not empty then property names will be read as namespace.parameterName</p>
* @param inStrm InputStream
* @param namespace String
* @return
* @throws IOException
*/
public static Map<String,String> getDataSourceProperties(InputStream inStrm, String namespace) throws IOException {
Hashtable<String,String> props = new Hashtable<String,String>();
if (DebugFile.trace) {
DebugFile.writeln("Begin Env.getDataSourceProperties(InputStream, namespace=\""+namespace+"\")");
DebugFile.incIdent();
}
if (namespace==null) namespace = "";
String prefix = namespace.length()==0 ? "" : namespace + ".";
Properties reader = new Properties();
reader.load(inStrm);
for (String propName : PropertyNames) {
String prop = reader.getProperty(prefix + propName);
if (DebugFile.trace)
DebugFile.writeln(prop==null ? "property "+propName+" not found" : "read "+propName+"="+prop);
if (prop!=null) setProperty(props, propName, prop);
}
if (DebugFile.trace) {
DebugFile.decIdent();
DebugFile.writeln("End Env.getDataSourceProperties()");
}
return props;
}
/**
* <p>Read DataSource properties from a properties input stream into a Map
* If namespace is not empty then property names will be read as namespace.parameterName</p>
* @param inPath Path
* @param namespace String
* @return
* @throws IOException
*/
public static Map<String,String> getDataSourceProperties(Path inPath, String namespace) throws IOException {
InputStream inStrm = Files.newInputStream(inPath, READ);
Map<String,String> retval = getDataSourceProperties(inStrm, namespace);
inStrm.close();
return retval;
}
/**
* <p>Get the value of a property that represents a boolean type.</p>
* @param oProperties Map<String,String>
* @param sVarName Property Name
* @param bDefault Default Value
* @return If no property named sVarName is found at sProfile then bDefault value is returned.
* If sVarName is one of {true , yes, on, 1} then return value is <b>true</b>.
* If sVarName is one of {false, no, off, 0} then return value is <b>false</b>.
* If sVarName is any other value then then return value is bDefault
*/
public static boolean getBoolean(Map<String,String> oProperties, String sVarName, boolean bDefault) {
boolean bRetVal = bDefault;
String sBool = oProperties.get(sVarName);
if (null==sBool)
sBool = bDefault ? "true" : "false";
if (null!=sBool) {
sBool = sBool.trim();
if (sBool.equalsIgnoreCase("true") || sBool.equalsIgnoreCase("yes") || sBool.equalsIgnoreCase("on") || sBool.equals("1"))
bRetVal = true;
else if (sBool.equalsIgnoreCase("false") || sBool.equalsIgnoreCase("no") || sBool.equalsIgnoreCase("off") || sBool.equals("0"))
bRetVal = false;
else
bRetVal = bDefault;
}
return bRetVal;
} // getProfileBool
/**
* <p>Get a property representing a file path.</p>
* <p>This method ensures that a file separator is always appended to the end of the readed value.</p>
* @param oProperties Map<String,String>
* @param sVarName Property Name
* @return Value terminated with a file separator or <b>null</b> if no property with such name was found.
*/
public static String getPath(Map<String,String> oProperties, String sVarName) {
String sPath = oProperties.get(sVarName);
return Str.chomp(sPath, System.getProperty("file.separator"));
}
public static String getString(Map<String,String> oProperties, String sVarName, String sDefault) {
String sValue = oProperties.get(sVarName);
return null==sValue ? sDefault : sValue;
}
/**
* <p>Get a property representing a positive integer value.</p>
* <p>This method ensures that a file separator is always appended to the end of the read value.</p>
* @param oProperties Map<String,String>
* @param sVarName Property Name
* @return Value terminated with a file separator or <b>null</b> if no property with such name was found.
* @throws NumberFormatException
*/
public static int getPositiveInteger(Map<String,String> oProperties, String sVarName, int iDefault) {
int iRetVal;
String sValue = oProperties.get(sVarName);
if (null==sValue) {
iRetVal = iDefault;
} else {
try {
iRetVal = Integer.parseInt(sValue);
if (iRetVal<0) throw new NumberFormatException();
}
catch (NumberFormatException nfe) {
if (DebugFile.trace) {
DebugFile.writeln(sVarName + " property must be a positive integer value");
DebugFile.decIdent();
}
throw new NumberFormatException(sVarName + " property must be a positive integer value");
}
}
return iRetVal;
}
private static void setProperty(Hashtable<String,String> props, String propName, String propValue) {
if (null==propValue) {
if (getDataSourceDefault(propName)!=null)
props.put(propName, getDataSourceDefault(propName));
} else if (propValue.length()==0) {
if (getDataSourceDefault(propName)!=null)
props.put(propName, getDataSourceDefault(propName));
} else {
props.put(propName, propValue);
}
}
}
|
package com.plexobject.service;
import java.lang.management.ManagementFactory;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.management.InstanceAlreadyExistsException;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import javax.servlet.ServletContext;
import org.apache.log4j.Logger;
import com.plexobject.bridge.web.WebToJmsBridge;
import com.plexobject.bridge.web.WebToJmsEntry;
import com.plexobject.bus.EventBus;
import com.plexobject.bus.impl.EventBusImpl;
import com.plexobject.domain.Configuration;
import com.plexobject.domain.Preconditions;
import com.plexobject.encode.CodecType;
import com.plexobject.handler.BasePayload;
import com.plexobject.handler.Request;
import com.plexobject.handler.RequestHandler;
import com.plexobject.handler.Response;
import com.plexobject.http.WebContainerProvider;
import com.plexobject.http.netty.NettyWebContainerProvider;
import com.plexobject.metrics.ServiceMetrics;
import com.plexobject.metrics.ServiceMetricsRegistry;
import com.plexobject.metrics.StatsCollector;
import com.plexobject.security.SecurityAuthorizer;
import com.plexobject.service.impl.InterceptorLifecycleImpl;
import com.plexobject.service.impl.ServiceInvocationHelper;
import com.plexobject.service.impl.ServiceRegistryContainers;
import com.plexobject.service.impl.ServiceRegistryHandlers;
/**
* This class defines registry for service handlers
*
* @author shahzad bhatti
*
*/
public class ServiceRegistry implements ServiceContainer,
InterceptorsLifecycle, ServiceRegistryMBean {
private static final Logger logger = Logger
.getLogger(ServiceRegistry.class);
private final Configuration config;
private WebToJmsBridge webToJmsBridge;
private boolean running;
private StatsCollector statsd;
private ServiceMetricsRegistry serviceMetricsRegistry;
private MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
private Collection<ServiceRegistryLifecycleAware> serviceRegistryLifecycleAwares = new HashSet<>();
private final ServiceInvocationHelper serviceInvocationHelper;
private final ServiceRegistryHandlers serviceRegistryHandlers;
private final ServiceRegistryContainers serviceRegistryContainers;
private final Map<String, RequestHandler> pingHandlers = new ConcurrentHashMap<>();
private final InterceptorsLifecycle interceptorLifecycle = new InterceptorLifecycleImpl();
private final boolean enablePingHandlers;
private ServletContext servletContext;
private SecurityAuthorizer securityAuthorizer;
private EventBus eventBus = new EventBusImpl();
// pass this as VM param -Dfile.encoding=UTF-8
static {
System.setProperty("file.encoding", "UTF-8");
}
public ServiceRegistry(Configuration config) {
this(config, new NettyWebContainerProvider());
}
public ServiceRegistry(Configuration config,
WebContainerProvider webContainerProvider) {
Preconditions.requireNotNull(config,
"config is required");
this.config = config;
this.serviceInvocationHelper = new ServiceInvocationHelper(this);
this.serviceRegistryHandlers = new ServiceRegistryHandlers();
this.serviceRegistryContainers = new ServiceRegistryContainers(config,
webContainerProvider, this);
this.enablePingHandlers = config.getBoolean("enablePingHandlers");
String statsCollectorClassName = config
.getProperty("statsCollectorClassName");
if (statsCollectorClassName != null) {
try {
this.statsd = (StatsCollector) Class.forName(
statsCollectorClassName).newInstance();
} catch (Exception e) {
logger.error("PLEXSVC Could not create stats collector", e);
}
// String servicePrefix = config.getProperty("serviceConfigs", "");
// this.statsd = new NonBlockingStatsDClient(config.getProperty(
// "statsd.prefix", servicePrefix), statsdHost,
// config.getInteger("statsd.port", 8125));
} else {
this.statsd = null;
}
serviceMetricsRegistry = new ServiceMetricsRegistry(this, statsd);
try {
mbs.registerMBean(this, new ObjectName(
"PlexServices:name=ServiceRegistry"));
} catch (InstanceAlreadyExistsException e) {
} catch (Exception e) {
logger.error(
"PLEXSVC Could not register mbean for service-registry", e);
}
}
@Override
public synchronized boolean isRunning() {
return running;
}
public ServiceConfigDesc getServiceConfig(RequestHandler h) {
return serviceRegistryHandlers.getServiceConfig(h);
}
private void setServiceConfig(RequestHandler h, ServiceConfigDesc config) {
serviceRegistryHandlers.setServiceConfig(h, config);
}
private void removeServiceConfig(RequestHandler h) {
serviceRegistryHandlers.removeServiceConfig(h);
}
public void setRequestHandlers(Collection<RequestHandler> handlers) {
for (RequestHandler h : handlers) {
addRequestHandler(h);
}
}
public Configuration getConfiguration() {
return config;
}
@Override
public void addRequestHandler(RequestHandler h) {
addRequestHandler(new ServiceConfigDesc(h), h);
}
public synchronized void addRequestHandler(ServiceConfigDesc config,
RequestHandler h) {
Preconditions.requireNotNull(config, "service handler " + h
+ " doesn't define ServiceConfig annotation");
ServiceContainer container = serviceRegistryContainers
.getOrAddServiceContainer(config.protocol());
Preconditions.requireNotNull(container,
"Unsupported container for service handler " + h);
serviceRegistryHandlers.add(h, config);
if (!container.existsRequestHandler(h)) {
registerMetricsJMX(h);
registerServiceHandlerLifecycle(h);
container.addRequestHandler(h);
if (enablePingHandlers) {
addPingHandler(h, config, container);
}
}
}
public ServiceMetricsRegistry getServiceMetricsRegistry() {
return serviceMetricsRegistry;
}
@Override
public synchronized boolean removeRequestHandler(RequestHandler h) {
ServiceConfigDesc config = getServiceConfig(h);
Preconditions.requireNotNull(config, "config" + h
+ " doesn't define ServiceConfig annotation");
ServiceContainer container = serviceRegistryContainers
.getOrAddServiceContainer(config.protocol());
if (container == null) {
return false;
}
if (container.removeRequestHandler(h)) {
if (enablePingHandlers) {
removePingHandler(h, config, container);
}
return true;
}
return false;
}
@Override
public boolean existsRequestHandler(RequestHandler h) {
ServiceConfigDesc config = getServiceConfig(h);
Preconditions.requireNotNull(config, "config" + h
+ " doesn't define ServiceConfig annotation");
ServiceContainer container = serviceRegistryContainers
.getOrAddServiceContainer(config.protocol());
if (container == null) {
return false;
}
return container.existsRequestHandler(h);
}
@Override
public Collection<ServiceConfigDesc> getServiceConfigurations() {
Collection<ServiceConfigDesc> configs = new HashSet<>();
for (RequestHandler h : getHandlers()) {
configs.add(getServiceConfig(h));
}
return configs;
}
public String dumpServiceConfigurations() {
StringBuilder sb = new StringBuilder();
for (ServiceConfigDesc c : getServiceConfigurations()) {
sb.append(c.protocol() + ":" + c.method() + "->" + c.endpoint()
+ " " + c.codec() + "\n");
}
return sb.toString();
}
@Override
public synchronized Collection<RequestHandler> getHandlers() {
return serviceRegistryContainers.getHandlers();
}
@Override
public synchronized void start() {
for (ServiceRegistryLifecycleAware srl : serviceRegistryLifecycleAwares) {
logger.info("PLEXSVC invoking onStarted for " + srl + " ...");
srl.onStarted(this);
}
serviceRegistryContainers.start();
running = true;
}
@Override
public synchronized void stop() {
serviceRegistryContainers.stop();
running = false;
for (ServiceRegistryLifecycleAware srl : serviceRegistryLifecycleAwares) {
srl.onStopped(this);
}
}
public void setWebToJmsEntries(Collection<WebToJmsEntry> entries) {
for (WebToJmsEntry e : entries) {
add(e);
}
}
/**
* This method adds bridge between HTTP/Websocket and JMS
*
* @param e
*/
public synchronized void add(WebToJmsEntry e) {
if (webToJmsBridge == null) {
webToJmsBridge = new WebToJmsBridge(this,
serviceRegistryContainers.getJmsBridgeContainer());
}
webToJmsBridge.add(e);
}
public synchronized void addServiceRegistryLifecycleAware(
ServiceRegistryLifecycleAware serviceRegistryLifecycleAware) {
this.serviceRegistryLifecycleAwares.add(serviceRegistryLifecycleAware);
}
public synchronized void removeServiceRegistryLifecycleAware(
ServiceRegistryLifecycleAware serviceRegistryLifecycleAware) {
this.serviceRegistryLifecycleAwares
.remove(serviceRegistryLifecycleAware);
}
/**
* This method executes handler by encoding the payload to proper java class
* and enforces security set by the underlying application.
*
* @param request
* @param handler
*/
public void invoke(Request request, RequestHandler handler) {
serviceInvocationHelper.invoke(request, handler, this);
}
public ServletContext getServletContext() {
return servletContext;
}
public void setServletContext(ServletContext servletContext) {
this.servletContext = servletContext;
}
@Override
public void addRequestInterceptor(Interceptor<Request> interceptor) {
interceptorLifecycle.addRequestInterceptor(interceptor);
}
@Override
public boolean removeRequestInterceptor(Interceptor<Request> interceptor) {
return interceptorLifecycle.removeRequestInterceptor(interceptor);
}
@Override
public Collection<Interceptor<Request>> getRequestInterceptors() {
return interceptorLifecycle.getRequestInterceptors();
}
@Override
public void addResponseInterceptor(Interceptor<Response> interceptor) {
interceptorLifecycle.addResponseInterceptor(interceptor);
}
@Override
public boolean removeResponseInterceptor(Interceptor<Response> interceptor) {
return interceptorLifecycle.removeResponseInterceptor(interceptor);
}
@Override
public Collection<Interceptor<Response>> getResponseInterceptors() {
return interceptorLifecycle.getResponseInterceptors();
}
@Override
public void addInputInterceptor(Interceptor<BasePayload<Object>> interceptor) {
interceptorLifecycle.addInputInterceptor(interceptor);
}
@Override
public boolean removeInputInterceptor(
Interceptor<BasePayload<Object>> interceptor) {
return interceptorLifecycle.removeInputInterceptor(interceptor);
}
@Override
public Collection<Interceptor<BasePayload<Object>>> getInputInterceptors() {
return interceptorLifecycle.getInputInterceptors();
}
@Override
public void addOutputInterceptor(
Interceptor<BasePayload<Object>> interceptor) {
interceptorLifecycle.addOutputInterceptor(interceptor);
}
@Override
public boolean removeOutputInterceptor(
Interceptor<BasePayload<Object>> interceptor) {
return interceptorLifecycle.removeOutputInterceptor(interceptor);
}
@Override
public Collection<Interceptor<BasePayload<Object>>> getOutputInterceptors() {
return interceptorLifecycle.getOutputInterceptors();
}
@Override
public boolean hasInputInterceptors() {
return interceptorLifecycle.hasInputInterceptors();
}
@Override
public boolean hasRequestInterceptors() {
return interceptorLifecycle.hasRequestInterceptors();
}
@Override
public boolean hasOutputInterceptors() {
return interceptorLifecycle.hasOutputInterceptors();
}
@Override
public boolean hasResponseInterceptors() {
return interceptorLifecycle.hasResponseInterceptors();
}
@Override
public AroundInterceptor getAroundInterceptor() {
return interceptorLifecycle.getAroundInterceptor();
}
@Override
public void setAroundInterceptor(AroundInterceptor interceptor) {
interceptorLifecycle.setAroundInterceptor(interceptor);
}
public SecurityAuthorizer getSecurityAuthorizer() {
return securityAuthorizer;
}
public void setSecurityAuthorizer(SecurityAuthorizer securityAuthorizer) {
this.securityAuthorizer = securityAuthorizer;
}
public EventBus getEventBus() {
return eventBus;
}
public void setEventBus(EventBus eventBus) {
this.eventBus = eventBus;
}
private void addPingHandler(final RequestHandler h,
final ServiceConfigDesc config, final ServiceContainer container) {
String pingEndpoint = config.endpoint() + ".ping";
ServiceConfigDesc pingConfig = ServiceConfigDesc
.builder(config)
.setCodecType(CodecType.TEXT)
.setMethod(
config.protocol() == Protocol.HTTP ? RequestMethod.GET
: config.method()).setEndpoint(pingEndpoint)
.setContentsClass(Void.class).setRecordStatsdMetrics(false)
.setRolesAllowed(new String[0]).build();
final RequestHandler pingHandler = new RequestHandler() {
@Override
public void handle(Request request) {
request.getResponse().setContents(
getServiceMetricsRegistry().getServiceMetrics(h)
.getSummary());
}
};
pingHandlers.put(pingEndpoint, pingHandler);
setServiceConfig(pingHandler, pingConfig);
container.addRequestHandler(pingHandler);
}
private void removePingHandler(final RequestHandler handler,
final ServiceConfigDesc config, final ServiceContainer container) {
String pingEndpoint = config.endpoint() + ".ping";
final RequestHandler pingHandler = pingHandlers.get(pingEndpoint);
if (pingHandler != null) {
container.removeRequestHandler(pingHandler);
removeServiceConfig(pingHandler);
}
}
private void registerServiceHandlerLifecycle(RequestHandler h) {
String objName = getPackageName(h) + h.getClass().getSimpleName()
+ ":type=Lifecycle";
try {
mbs.registerMBean(new ServiceHandlerLifecycle(this, h),
new ObjectName(objName));
} catch (InstanceAlreadyExistsException e) {
} catch (Exception e) {
logger.error("PLEXSVC Could not register mbean " + objName, e);
}
}
private static String getPackageName(RequestHandler h) {
return h.getClass().getPackage().getName().replaceAll(".*\\.", "")
+ ".";
}
private void registerMetricsJMX(RequestHandler h) {
String objName = getPackageName(h) + h.getClass().getSimpleName()
+ ":type=Metrics";
ServiceMetrics metrics = serviceMetricsRegistry.getServiceMetrics(h);
try {
mbs.registerMBean(metrics, new ObjectName(objName));
} catch (InstanceAlreadyExistsException e) {
} catch (Exception e) {
logger.error("PLEXSVC Could not register mbean " + objName, e);
}
}
}
|
package imagej.plugins.commands.debug;
import imagej.command.Command;
import imagej.menu.MenuConstants;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Map;
import org.scijava.ItemIO;
import org.scijava.plugin.Menu;
import org.scijava.plugin.Parameter;
import org.scijava.plugin.Plugin;
/**
* Provides a complete stack dump of all threads.
* <p>
* The output is similar to a subset of that given when Ctrl+\ (or Ctrl+Pause on
* Windows) is pressed from the console.
* </p>
*
* @author Curtis Rueden
*/
@Plugin(type = Command.class, menu = {
@Menu(label = MenuConstants.PLUGINS_LABEL,
weight = MenuConstants.PLUGINS_WEIGHT,
mnemonic = MenuConstants.PLUGINS_MNEMONIC), @Menu(label = "Debug"),
@Menu(label = "Dump Stack", accelerator = "ctrl back_slash") },
headless = true)
public class DumpStack implements Command {
// -- Constants --
private static final String NL = System.getProperty("line.separator");
// -- Parameters --
@Parameter(label = "Stack Dump", type = ItemIO.OUTPUT)
private String stackDump;
// -- Runnable methods --
@Override
public void run() {
final StringBuilder sb = new StringBuilder();
final Map<Thread, StackTraceElement[]> stackTraces =
Thread.getAllStackTraces();
// sort list of threads by name
final ArrayList<Thread> threads =
new ArrayList<Thread>(stackTraces.keySet());
Collections.sort(threads, new Comparator<Thread>() {
@Override
public int compare(final Thread t1, final Thread t2) {
return t1.getName().compareTo(t2.getName());
}
});
for (final Thread t : threads) {
dumpThread(t, stackTraces.get(t), sb);
}
stackDump = sb.toString();
}
// -- Helper methods --
private void dumpThread(final Thread t, final StackTraceElement[] trace,
final StringBuilder sb)
{
threadInfo(t, sb);
for (final StackTraceElement element : trace) {
sb.append("\tat ");
sb.append(element);
sb.append(NL);
}
sb.append(NL);
}
private void threadInfo(final Thread t, final StringBuilder sb) {
sb.append("\"");
sb.append(t.getName());
sb.append("\"");
if (!t.isAlive()) sb.append(" DEAD");
if (t.isInterrupted()) sb.append(" INTERRUPTED");
if (t.isDaemon()) sb.append(" daemon");
sb.append(" prio=");
sb.append(t.getPriority());
sb.append(" id=");
sb.append(t.getId());
sb.append(" group=");
sb.append(t.getThreadGroup().getName());
sb.append(NL);
sb.append(" java.lang.Thread.State: ");
sb.append(t.getState());
sb.append(NL);
}
}
|
package org.helioviewer.jhv.plugins.swek.view;
import java.awt.BorderLayout;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import javax.swing.JPanel;
import javax.swing.JTree;
import org.helioviewer.jhv.plugins.swek.config.SWEKEventType;
import org.helioviewer.jhv.plugins.swek.download.SWEKDownloadManager;
import org.helioviewer.jhv.plugins.swek.model.EventTypePanelModel;
import org.helioviewer.jhv.plugins.swek.model.SWEKTreeModelEventType;
/**
* Panel display one event type
*
* @author Bram Bourgoignie (Bram.Bourgoignie@oma.be)
*
*/
public class EventPanel extends JPanel implements MouseListener {
/** seriolVersionUID */
private static final long serialVersionUID = 1057300852220893978L;
/** The event type for which the event panel is created */
private final SWEKEventType eventType;
/** Tree containing the event type and it's sources. */
private JTree eventTypeTree;
/** The model for this panel */
private final EventTypePanelModel eventPanelModel;
/** Instance of the download manager */
private final SWEKDownloadManager downloadManager;
/**
* Creates a event panel for a certain
*/
public EventPanel(SWEKEventType eventType) {
this.downloadManager = SWEKDownloadManager.getSingletonInstance();
this.eventType = eventType;
this.eventPanelModel = new EventTypePanelModel(new SWEKTreeModelEventType(this.eventType));
this.eventPanelModel.addEventPanelModelListener(this.downloadManager);
initVisisualComponents();
}
/**
* Initializes the visual components
*/
private void initVisisualComponents() {
setLayout(new BorderLayout());
this.eventTypeTree = new JTree(this.eventPanelModel);
this.eventTypeTree.setShowsRootHandles(true);
this.eventTypeTree.setSelectionModel(null);
this.eventTypeTree.addMouseListener(this);
this.eventTypeTree.addTreeExpansionListener(this.eventPanelModel);
this.eventTypeTree.setCellRenderer(new SWEKEventTreeRenderer());
add(this.eventTypeTree, BorderLayout.CENTER);
}
@Override
public void mouseClicked(MouseEvent e) {
int clickedOnRow = this.eventTypeTree.getRowForLocation(e.getX(), e.getY());
this.eventPanelModel.rowClicked(clickedOnRow);
this.eventTypeTree.revalidate();
this.eventTypeTree.repaint();
}
@Override
public void mouseEntered(MouseEvent e) {
}
@Override
public void mouseExited(MouseEvent e) {
}
@Override
public void mousePressed(MouseEvent e) {
}
@Override
public void mouseReleased(MouseEvent e) {
}
}
|
package tlc2.tool.fp;
import java.io.EOFException;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.lang.reflect.Field;
import java.nio.LongBuffer;
import java.rmi.RemoteException;
import java.util.Arrays;
import java.util.NoSuchElementException;
import java.util.TreeSet;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import sun.misc.Unsafe;
import tlc2.output.EC;
import tlc2.output.MP;
import util.Assert;
@SuppressWarnings({ "serial", "restriction" })
public class OffHeapDiskFPSet extends DiskFPSet implements FPSetStatistic {
protected static final double COLLISION_BUCKET_RATIO = .025d;
private static sun.misc.Unsafe getUnsafe() {
try {
final Field f = sun.misc.Unsafe.class.getDeclaredField("theUnsafe");
f.setAccessible(true);
return (sun.misc.Unsafe) f.get(null);
} catch (Exception e) {
throw new RuntimeException(
"Trying to use Sun VM specific sun.misc.Unsafe implementation but no Sun based VM detected.",
e);
}
}
protected final int bucketCapacity;
/**
* This implementation uses sun.misc.Unsafe instead of a wrapping
* java.nio.ByteBuffer due to the fact that the former's allocateMemory
* takes a long argument, while the latter is restricted to
* Integer.MAX_VALUE as its capacity.<br>
* In 2012 this poses a too hard limit on the usable memory, hence we trade
* generality for performance.
*/
private final Unsafe u;
/**
* The base address allocated for fingerprints
*/
private final long baseAddress;
/**
* Address size (either 4 or 8 bytes) depending on current architecture
*/
private final int logAddressSize;
/**
* A bucket containing collision elements which is used as a fall-back if a
* bucket is fully used up. Buckets cannot grow as the whole in-memory
* data-structure is static and not designed to be resized.
*
* <p>
* Open addressing - contrary to separate chaining - is not an option for an
* {@link OffHeapDiskFPSet}, because it does not support the invariant of
* monotonic increasing buckets required by the {@link Indexer}. Adhering to
* this invariant has the benefit, that only the elements in a bucket have
* to be sorted, but they don't change buckets during sort. Thus, a
* temporary sort array as in {@link LSBDiskFPSet.LSBFlusher#prepareTable()} is
* obsolete halving the memory footprint.
* </p>
*/
protected CollisionBucket collisionBucket;
/**
* The indexer maps a fingerprint to a in-memory bucket and the associated lock
*/
private final Indexer indexer;
private final ReadWriteLock csRWLock = new ReentrantReadWriteLock();
protected OffHeapDiskFPSet(long maxInMemoryCapacity) throws RemoteException {
this(maxInMemoryCapacity, 0);
}
protected OffHeapDiskFPSet(final long maxInMemoryCapacity, final int prefixBits) throws RemoteException {
super(maxInMemoryCapacity);
// Determine base address which varies depending on machine architecture.
u = getUnsafe();
int addressSize = u.addressSize();
int cnt = -1;
while (addressSize > 0) {
cnt++;
addressSize = addressSize >>> 1;
}
logAddressSize = cnt;
// Allocate non-heap memory for maxInMemoryCapacity fingerprints
long bytes = maxInMemoryCapacity << logAddressSize;
baseAddress = u.allocateMemory(bytes);
// Null memory (could be done in parallel on segments when bottleneck).
// This is essential as allocateMemory returns uninitialized memory and
// memInsert/memLockup utilize 0L as a mark for an unused fingerprint slot.
// Otherwise memory garbage wouldn't be distinguishable from a true fp.
for (long i = 0; i < maxInMemoryCapacity; i++) {
u.putAddress(log2phy(i), 0L);
}
final int csCapacity = (int) (maxTblCnt * COLLISION_BUCKET_RATIO);
this.collisionBucket = new TreeSetCollisionBucket(csCapacity);
this.flusher = new OffHeapMSBFlusher();
// Move n as many times to the right to calculate moveBy. moveBy is the
// number of bits the (fp & mask) has to be right shifted to make the
// logical bucket index.
long n = (Long.MAX_VALUE >>> prefixBits) - (maxInMemoryCapacity - 1);
int moveBy = 0;
while (n >= maxInMemoryCapacity) {
moveBy++;
n = n >>> 1;
}
// Calculate Hamming weight of maxTblCnt
final int bitCount = Long.bitCount(maxInMemoryCapacity);
// If Hamming weight is 1, the logical index address can be calculated
// significantly faster by bit-shifting. However, with large memory
// sizes, only supporting increments of 2^n sizes would waste memory
// (e.g. either 32GiB or 64Gib). Hence, we check if the bitCount allows
// us to use bit-shifting. If not, we fall back to less efficient
// calculations. Additionally we increase the bucket capacity to make
// use of extra memory. The down side is, that larger buckets mean
// increased linear search. But linear search on maximally 31 elements
// still outperforms disk I/0.
if (bitCount == 1) {
bucketCapacity = InitialBucketCapacity;
this.indexer = new BitshiftingIndexer(moveBy, prefixBits);
} else {
// Round maxInMemoryCapacity to next lower 2^n power
cnt = -1;
while (bytes > 0) {
cnt++;
bytes = bytes >>> 1;
}
// Extra memory that cannot be addressed by BitshiftingIndexer
final long extraMem = (maxInMemoryCapacity * LongSize) - (long) Math.pow(2, cnt);
// Divide extra memory across addressable buckets
int x = (int) (extraMem / ((n + 1) / InitialBucketCapacity));
bucketCapacity = InitialBucketCapacity + (x / LongSize) ;
// Twice InitialBucketCapacity would mean we could have used one
// more bit for addressing.
Assert.check(bucketCapacity < (2 * InitialBucketCapacity), EC.GENERAL);
// non 2^n buckets cannot use a bit shifting indexer
this.indexer = new Indexer(moveBy, prefixBits);
}
}
/* (non-Javadoc)
* @see tlc2.tool.fp.DiskFPSet#sizeof()
*/
public long sizeof() {
long size = 44; // approx size of this DiskFPSet object
size += maxTblCnt * (long) LongSize;
size += getIndexCapacity() * 4;
size += getCollisionBucketCnt() * (long) LongSize; // ignoring the internal TreeSet overhead here
return size;
}
/* (non-Javadoc)
* @see tlc2.tool.fp.DiskFPSet#needsDiskFlush()
*/
protected boolean needsDiskFlush() {
// Only flush due to collision ratio when primary hash table is at least
// 25% full. Otherwise a second flush potentially immediately follows a
// first one, when both values for tblCnt and collision size can be small.
return (collisionRatioExceeds(COLLISION_BUCKET_RATIO) && loadFactorExceeds(.25d))
|| loadFactorExceeds(1d) || forceFlush;
}
/**
* This limits the (primary) in-memory hash table to grow beyond the given
* limit.
*
* @param limit
* A limit in the domain [0, 1] which restricts the hash table
* from growing past it.
* @return true iff the current hash table load exceeds the given limit
*/
private boolean loadFactorExceeds(final double limit) {
// Base this one the primary hash table only and exclude the
// collision bucket
final double d = (this.tblCnt.doubleValue() - collisionBucket.size()) / (double) this.maxTblCnt;
return d >= limit;
}
/**
* @param limit A limit the collsionBucket is not allowed to exceed
* @return The proportional size of the collision bucket compared to the
* size of the set.
*/
private boolean collisionRatioExceeds(final double limit) {
// Do not use the thread safe getCollisionRatio here to avoid
// unnecessary locking. put() calls us holding a memory write locking
// which also blocks writers to collisionBucket.
final long size = collisionBucket.size();
// Subtract size from overall tblCnt as it includes the cs size
// @see put(long)
final double d = (double) size / (tblCnt.doubleValue() - size);
return d >= limit;
}
/* (non-Javadoc)
* @see tlc2.tool.fp.DiskFPSet#getLockIndex(long)
*/
@Override
protected int getLockIndex(long fp) {
return this.indexer.getLockIndex(fp);
}
/* (non-Javadoc)
* @see tlc2.tool.fp.DiskFPSet#memLookup(long)
*/
boolean memLookup(long fp) {
final long position = indexer.getLogicalPosition(fp);
// Linearly search the logical bucket; 0L is an invalid fp and marks the
// end of the allocated bucket
long l = -1L;
for (int i = 0; i < bucketCapacity && l != 0L; i++) {
l = u.getAddress(log2phy(position, i));
// zero the long msb (which is 1 if fp has been flushed to disk)
if (fp == (l & 0x7FFFFFFFFFFFFFFFL)) {
return true;
}
}
return csLookup(fp);
}
/**
* Probes {@link OffHeapDiskFPSet#collisionBucket} for the given fingerprint.
* @param fp
* @return true iff fp is in the collision bucket
*/
protected boolean csLookup(long fp) {
try {
csRWLock.readLock().lock();
return collisionBucket.contains(fp);
} finally {
csRWLock.readLock().unlock();
}
}
/* (non-Javadoc)
* @see tlc2.tool.fp.DiskFPSet#memInsert(long)
*/
boolean memInsert(long fp) {
final long position = indexer.getLogicalPosition(fp);
long l = -1;
long freePosition = -1L;
for (int i = 0; i < bucketCapacity && l != 0L; i++) {
l = u.getAddress(log2phy(position, i));
// zero the long msb (which is 1 if fp has been flushed to disk)
if (fp == (l & 0x7FFFFFFFFFFFFFFFL)) {
return true;
} else if (l == 0L && freePosition == -1) {
if (i == 0) {
tblLoad++;
}
// empty or disk written slot found, simply insert at _current_ position
u.putAddress(log2phy(position, i), fp);
this.tblCnt.getAndIncrement();
return false;
} else if (l < 0L && freePosition == -1) {
// record free (disk written fp) slot
freePosition = log2phy(position, i);
}
}
// index slot overflow, thus add to collisionBucket of write to free
// position.
if (freePosition > -1 && !csLookup(fp)) {
u.putAddress(freePosition, fp);
this.tblCnt.getAndIncrement();
return false;
} else {
boolean success = csInsert(fp);
if (success) {
this.tblCnt.getAndIncrement();
}
return !success;
}
}
/**
* Inserts the given fingerprint into the {@link OffHeapDiskFPSet#collisionBucket}.
* @param fp
* @return true iff fp has been added to the collision bucket
*/
protected boolean csInsert(long fp) {
try {
csRWLock.writeLock().lock();
return collisionBucket.add(fp);
} finally {
csRWLock.writeLock().unlock();
}
}
/**
* Converts from logical bucket index numbers and in-bucket position to a
* physical memory address.
*
* @param bucketNumber
* @param inBucketPosition
* @return The physical address of the fp slot
*/
private long log2phy(long bucketNumber, long inBucketPosition) {
return log2phy(bucketNumber + inBucketPosition);
}
/**
* Converts from logical addresses to
* physical memory addresses.
*
* @param logicalAddress
* @return The physical address of the fp slot
*/
private long log2phy(long logicalAddress) {
return baseAddress + (logicalAddress << logAddressSize);
}
/* (non-Javadoc)
* @see tlc2.tool.fp.DiskFPSet#getTblCapacity()
*/
public long getTblCapacity() {
return maxTblCnt;
}
/* (non-Javadoc)
* @see tlc2.tool.fp.DiskFPSet#getCollisionBucketCnt()
*/
public long getCollisionBucketCnt() {
try {
this.csRWLock.readLock().lock();
return collisionBucket.size();
} finally {
this.csRWLock.readLock().unlock();
}
}
/* (non-Javadoc)
* @see tlc2.tool.fp.DiskFPSet#getCollisionRatio()
*/
public double getCollisionRatio() {
return (double) getCollisionBucketCnt() / tblCnt.doubleValue();
}
public class Indexer {
protected final long prefixMask;
/**
* Number of bits to right shift bits during index calculation
* @see MSBDiskFPSet#moveBy
*/
protected final int moveBy;
/**
* Number of bits to right shift bits during index calculation of
* striped lock.
*/
protected final int lockMoveBy;
public Indexer(final int moveBy, int prefixBits) {
// same for lockCnt
this.prefixMask = 0x7FFFFFFFFFFFFFFFL >>> prefixBits;
this.moveBy = moveBy;
this.lockMoveBy = 63 - prefixBits - LogLockCnt;
}
/* (non-Javadoc)
* @see tlc2.tool.fp.DiskFPSet#getLockIndex(long)
*/
protected int getLockIndex(long fp) {
// calculate hash value (just n most significant bits of fp) which is
// used as an index address
final long idx = (fp & prefixMask) >> lockMoveBy;
Assert.check(0 <= idx && idx < lockCnt, EC.GENERAL);
return (int) idx;
}
/**
* @param fp
* @return The logical bucket position in the table for the given fingerprint.
*/
protected long getLogicalPosition(final long fp) {
// push MSBs for moveBy positions to the right and align with a bucket address
long position = (fp & prefixMask) >> moveBy;
position = floorToBucket(position);
Assert.check(0 <= position && position < maxTblCnt, EC.GENERAL);
return position;
}
public long getNextBucketBasePosition(long logicalPosition) {
return floorToBucket(logicalPosition + bucketCapacity);
}
/**
* Returns the largest position that
* is less than or equal to the argument and is equal to bucket base address.
*
* @param logicalPosition
* @return
*/
private long floorToBucket(long logicalPosition) {
long d = (long) Math.floor(logicalPosition / bucketCapacity);
return bucketCapacity * d;
}
/**
* @param logicalPosition
* @return true iff logicalPosition is a multiple of bucketCapacity
*/
public boolean isBucketBasePosition(long logicalPosition) {
return logicalPosition % bucketCapacity == 0;
}
}
/**
* A {@link BitshiftingIndexer} uses the more efficient AND operation
* compared to MODULO and DIV used by {@link Indexer}. Since indexing is
* executed on every {@link FPSet#put(long)} or {@link FPSet#contains(long)}
* , it is worthwhile to minimize is execution overhead.
*/
public class BitshiftingIndexer extends Indexer {
/**
* Mask used to round of to a bucket address which is a power of 2.
*/
protected final long bucketBaseIdx;
public BitshiftingIndexer(final int moveBy, final int prefixBits) throws RemoteException {
super(moveBy, prefixBits);
this.bucketBaseIdx = 0x7FFFFFFFFFFFFFFFL - (bucketCapacity - 1);
}
/* (non-Javadoc)
* @see tlc2.tool.fp.OffHeapDiskFPSet.Indexer#getLogicalPosition(long)
*/
@Override
protected long getLogicalPosition(final long fp) {
// push MSBs for moveBy positions to the right and align with a bucket address
long position = ((fp & prefixMask) >> moveBy) & bucketBaseIdx;
//Assert.check(0 <= position && position < maxTblCnt, EC.GENERAL);
return position;
}
/* (non-Javadoc)
* @see tlc2.tool.fp.OffHeapDiskFPSet.Indexer#getNextBucketPosition(long)
*/
@Override
public long getNextBucketBasePosition(long logicalPosition) {
return (logicalPosition + bucketCapacity) & bucketBaseIdx;
}
/* (non-Javadoc)
* @see tlc2.tool.fp.OffHeapDiskFPSet.Indexer#isBucketBase(long)
*/
@Override
public boolean isBucketBasePosition(long logicalPosition) {
return (logicalPosition & (InitialBucketCapacity - 1)) == 0;
}
}
public class OffHeapMSBFlusher extends Flusher {
/* (non-Javadoc)
* @see tlc2.tool.fp.DiskFPSet.Flusher#flushTable()
*/
@Override
void flushTable() throws IOException {
super.flushTable();
// garbage old values in collision bucket
collisionBucket.clear();
}
/* (non-Javadoc)
* @see tlc2.tool.fp.MSBDiskFPSet#mergeNewEntries(java.io.RandomAccessFile, java.io.RandomAccessFile)
*/
@Override
protected void mergeNewEntries(RandomAccessFile inRAF, RandomAccessFile outRAF) throws IOException {
final long buffLen = tblCnt.get();
ByteBufferIterator itr = new ByteBufferIterator(u, baseAddress, collisionBucket, buffLen);
// Precompute the maximum value of the new file
long maxVal = itr.getLast();
if (index != null) {
maxVal = Math.max(maxVal, index[index.length - 1]);
}
int indexLen = calculateIndexLen(buffLen);
index = new long[indexLen];
index[indexLen - 1] = maxVal;
currIndex = 0;
counter = 0;
// initialize positions in "buff" and "inRAF"
long value = 0L; // initialize only to make compiler happy
boolean eof = false;
if (fileCnt > 0) {
try {
value = inRAF.readLong();
} catch (EOFException e) {
eof = true;
}
} else {
eof = true;
}
// merge while both lists still have elements remaining
boolean eol = false;
long fp = itr.next();
while (!eof || !eol) {
if ((value < fp || eol) && !eof) {
writeFP(outRAF, value);
try {
value = inRAF.readLong();
} catch (EOFException e) {
eof = true;
}
} else {
// prevent converting every long to String when assertion holds (this is expensive)
if (value == fp) {
//MAK: Commented cause a duplicate does not pose a risk for correctness.
// It merely indicates a bug somewhere.
//Assert.check(false, EC.TLC_FP_VALUE_ALREADY_ON_DISK,
// String.valueOf(value));
MP.printWarning(EC.TLC_FP_VALUE_ALREADY_ON_DISK, String.valueOf(value));
}
writeFP(outRAF, fp);
// we used one fp up, thus move to next one
try {
fp = itr.next();
} catch (NoSuchElementException e) {
// has read all elements?
Assert.check(!itr.hasNext(), EC.GENERAL);
eol = true;
}
}
}
// both sets used up completely
Assert.check(eof && eol, EC.GENERAL);
// currIndex is amount of disk writes
Assert.check(currIndex == indexLen - 1, EC.SYSTEM_INDEX_ERROR);
// maintain object invariants
fileCnt += buffLen;
}
}
/**
* A non-thread safe Iterator
*/
public class ByteBufferIterator {
private final CollisionBucket cs;
/**
* Number of elements in the buffer
*/
private long bufferElements;
/**
* Total amount of elements in both the buffer as well as the collisionBucket.
*/
private final long totalElements;
/**
* The logical position is the position inside the {@link LongBuffer} and
* thus reflects a fingerprints
*/
private long logicalPosition = 0;
/**
* Used to verify that the elements we hand out are strictly monotonic
* increasing.
*/
private long previous = -1l;
/**
* Number of elements read with next()
*/
private long readElements = 0L;
private long cache = -1L;
private final Unsafe unsafe;
public ByteBufferIterator(Unsafe u, long baseAddress, CollisionBucket collisionBucket, long expectedElements) {
this.unsafe = u;
this.logicalPosition = 0L;
this.totalElements = expectedElements;
// Do calculation before prepareForFlush() potentially empties the cs causing size() to return 0
this.bufferElements = expectedElements - collisionBucket.size();
this.cs = collisionBucket;
this.cs.prepareForFlush();
}
/**
* Returns the next element in the iteration.
*
* @return the next element in the iteration.
* @exception NoSuchElementException iteration has no more elements.
*/
public long next() {
long result = -1l;
if (cache < 0L && bufferElements > 0) {
result = getNextFromBuffer();
bufferElements
} else {
result = cache;
cache = -1L;
}
if (!cs.isEmpty()) {
long first = cs.first();
if (result > first || result == -1L) {
cs.remove(first);
cache = result;
result = first;
}
}
// adhere to the general Iterator contract to fail fast and not hand out
// meaningless values
if (result == -1L) {
throw new NoSuchElementException();
}
// hand out strictly monotonic increasing elements
Assert.check(previous < result, EC.GENERAL);
previous = result;
// maintain read statistics
readElements++;
return result;
}
private long getNextFromBuffer() {
sortNextBucket();
long l = unsafe.getAddress(log2phy(logicalPosition));
if (l > 0L) {
unsafe.putAddress(log2phy(logicalPosition++), l | 0x8000000000000000L);
return l;
}
while ((l = unsafe.getAddress(log2phy(logicalPosition))) <= 0L && logicalPosition < maxTblCnt) {
// increment position to next bucket
logicalPosition = indexer.getNextBucketBasePosition(logicalPosition);
sortNextBucket();
}
if (l > 0L) {
unsafe.putAddress(log2phy(logicalPosition++), l | 0x8000000000000000L);
return l;
}
throw new NoSuchElementException();
}
// sort the current logical bucket if we reach the first slot of the
// bucket
private void sortNextBucket() {
if (indexer.isBucketBasePosition(logicalPosition)) {
long[] longBuffer = new long[bucketCapacity];
int i = 0;
for (; i < bucketCapacity; i++) {
long l = unsafe.getAddress(log2phy(logicalPosition + i));
if (l <= 0L) {
break;
} else {
longBuffer[i] = l;
}
}
if (i > 0) {
Arrays.sort(longBuffer, 0, i);
for (int j = 0; j < i; j++) {
unsafe.putAddress(log2phy(logicalPosition, j),
longBuffer[j]);
}
}
}
}
/**
* Returns <tt>true</tt> if the iteration has more elements. (In other
* words, returns <tt>true</tt> if <tt>next</tt> would return an element
* rather than throwing an exception.)
*
* @return <tt>true</tt> if the iterator has more elements.
*/
public boolean hasNext() {
// hasNext does not move the indices at all!
return readElements < totalElements;
}
/**
* @return The last element in the iteration.
* @exception NoSuchElementException if iteration is empty.
*/
public long getLast() {
// Remember current position
final long tmpLogicalPosition = logicalPosition;
// Calculate last bucket position and have it sorted
logicalPosition = maxTblCnt - bucketCapacity;
sortNextBucket();
// Reverse the current bucket to obtain last element (More elegantly
// this could be achieved recursively, but this can cause a
// stack overflow).
long l = 1L;
while ((l = unsafe.getAddress(log2phy(logicalPosition-- + bucketCapacity - 1))) <= 0L) {
sortNextBucket();
}
// Done searching in-memory storage backwards, reset position to
// original value.
logicalPosition = tmpLogicalPosition;
// Compare max element found in main in-memory buffer to man
// element in collisionBucket. Return max of the two.
if (!cs.isEmpty()) {
l = Math.max(cs.last(), l);
}
// Either return the maximum element or fail fast.
if (l > 0L) {
return l;
}
throw new NoSuchElementException();
}
// prevent synthetic methods
private long log2phy(long logicalAddress) {
return OffHeapDiskFPSet.this.log2phy(logicalAddress);
}
private long log2phy(long bucketAddress, long inBucketAddress) {
return OffHeapDiskFPSet.this.log2phy(bucketAddress, inBucketAddress);
}
}
public interface CollisionBucket {
void clear();
void prepareForFlush();
void remove(long first);
long first();
long last();
boolean isEmpty();
/**
* @param fp
* @return {@code true} if this set did not already contain the specified
* fingerprint
*/
boolean add(long fp);
boolean contains(long fp);
long size();
}
public class TreeSetCollisionBucket implements CollisionBucket {
private final TreeSet<Long> set;
public TreeSetCollisionBucket(int initialCapacity) {
this.set = new TreeSet<Long>();
}
/* (non-Javadoc)
* @see tlc2.tool.fp.OffHeapDiskFPSet.CollisionBucket#clear()
*/
public void clear() {
set.clear();
}
/* (non-Javadoc)
* @see tlc2.tool.fp.OffHeapDiskFPSet.CollisionBucket#prepareForFlush()
*/
public void prepareForFlush() {
// no-op
}
/* (non-Javadoc)
* @see tlc2.tool.fp.OffHeapDiskFPSet.CollisionBucket#remove(long)
*/
public void remove(long first) {
set.remove(first);
}
/* (non-Javadoc)
* @see tlc2.tool.fp.OffHeapDiskFPSet.CollisionBucket#first()
*/
public long first() {
return set.first();
}
/* (non-Javadoc)
* @see tlc2.tool.fp.OffHeapDiskFPSet.CollisionBucket#last()
*/
public long last() {
return set.last();
}
/* (non-Javadoc)
* @see tlc2.tool.fp.OffHeapDiskFPSet.CollisionBucket#isEmpty()
*/
public boolean isEmpty() {
return set.isEmpty();
}
/* (non-Javadoc)
* @see tlc2.tool.fp.OffHeapDiskFPSet.CollisionBucket#add(long)
*
* If this set already contains the element, the call leaves the set
* unchanged and returns false.
*/
public boolean add(long fp) {
return set.add(fp);
}
/* (non-Javadoc)
* @see tlc2.tool.fp.OffHeapDiskFPSet.CollisionBucket#contains(long)
*/
public boolean contains(long fp) {
return set.contains(fp);
}
/* (non-Javadoc)
* @see tlc2.tool.fp.OffHeapDiskFPSet.CollisionBucket#size()
*/
public long size() {
return set.size();
}
}
public class PrettyPrinter {
/**
* Print the current in-memory hash table to System.out with increments
*/
public void printDistribution(final int increments) {
final int mask = increments - 1;
int cnt = 0;
int min = Integer.MAX_VALUE;
int max = 0;
for (long i = maxTblCnt - 1; i >= 0; i
if ((i & mask) == 0) {
if (cnt > max) {
max = cnt;
}
if (cnt < min) {
min = cnt;
}
System.out.println(i + " " + cnt);
cnt = 0;
}
if (u.getAddress(log2phy(i)) > 0L) {
cnt++;
}
}
System.out.println("max: " + max + " min: " + min + " avg:" + (tblLoad / tblCnt.doubleValue()));
}
public void printBuckets() {
printBuckets(0, maxTblCnt);
}
/**
* @param from inclusive lower bound
* @param to exclusive upper bound
*/
public void printBuckets(int from, long to) {
for (long i = from; i < maxTblCnt && i < to; i++) {
if (i % bucketCapacity == 0) {
System.out.println("Bucket idx: " + i);
}
System.out.println(u.getAddress(log2phy(i)));
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.