answer
stringlengths 17
10.2M
|
|---|
package org.jboss.virtual.plugins.context.zip;
import java.io.File;
import org.jboss.virtual.plugins.context.temp.BasicTempInfo;
import org.jboss.virtual.spi.VirtualFileHandler;
/**
* ZipEntry temp info.
* It resets initState of owning zip context.
*
* @author <a href="ales.justin@jboss.org">Ales Justin</a>
*/
public class ZipEntryTempInfo extends BasicTempInfo
{
private ZipEntryContext context;
public ZipEntryTempInfo(String path, File file, VirtualFileHandler handler, ZipEntryContext context)
{
super(path, file, handler);
this.context = context;
}
@Override
public void cleanup()
{
context.resetInitStatus();
context = null; // release
super.cleanup();
}
@Override
public boolean isValid()
{
return context != null && super.isValid();
}
}
|
package com.jmex.swt.lwjgl;
import java.util.HashMap;
import org.eclipse.swt.SWT;
import org.eclipse.swt.opengl.GLData;
import org.eclipse.swt.widgets.Composite;
import org.lwjgl.LWJGLException;
import com.jme.input.KeyInput;
import com.jme.input.MouseInput;
import com.jme.system.JmeException;
import com.jme.system.canvas.CanvasConstructor;
import com.jme.system.canvas.JMECanvas;
import com.jmex.swt.input.SWTKeyInput;
import com.jmex.swt.input.SWTMouseInput;
public class LWJGLSWTCanvasConstructor implements CanvasConstructor {
public JMECanvas makeCanvas(HashMap<String, Object> props) {
try {
Composite parent = (Composite) props.get(LWJGLSWTConstants.PARENT);
Integer style = (Integer)props.get(LWJGLSWTConstants.STYLE);
if (style == null) {
style = SWT.NONE;
}
GLData data = new GLData();
data.doubleBuffer = true;
LWJGLSWTCanvas canvas = new LWJGLSWTCanvas(parent, style, data);
return canvas;
} catch (LWJGLException e) {
e.printStackTrace();
throw new JmeException("Unable to create lwjgl-swt canvas: "+e.getLocalizedMessage());
}
}
}
|
package com.jwetherell.algorithms.sorts;
public class CountingSort {
private CountingSort() { }
public static Integer[] sort(Integer[] unsorted) {
int maxValue = findMax(unsorted);
int[] counts = new int[maxValue + 1];//counts number of elements
updateCounts(unsorted, counts);
populateCounts(unsorted, counts);
return unsorted;
}
//finding maximum value in unsorted array
private static int findMax(Integer[] unsorted) {
int max = Integer.MIN_VALUE;//assume minimum value(-2147483648) of interger is maximum
for (int i : unsorted) {
if (i > max)
max = i;
}
return max;
}
//Incrementing the number of counts in unsorted array
private static void updateCounts(Integer[] unsorted, int[] counts) {
for (int e : unsorted)
counts[e]++;
}
private static void populateCounts(Integer[] unsorted, int[] counts) {
int index = 0;
for (int i = 0; i < counts.length; i++) {
int e = counts[i];
while (e > 0) {
unsorted[index++] = i;
e
}
}
}
}
|
package org.owasp.esapi.reference;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import org.owasp.esapi.ESAPI;
import org.owasp.esapi.Logger;
import org.owasp.esapi.SecurityConfiguration;
import org.owasp.esapi.errors.ConfigurationException;
// DISCUSS: Is there a good way for us to determine if they have changed the master key and salt, and if not,
// at _least_ log a warning? Need to distinguish the "as-shipped" versions from the current versions.
// Proposed solution: We will leave these 2 properties empty in the ESAPI.properties file and the
// installation instructions will show how to set them.
public class DefaultSecurityConfiguration implements SecurityConfiguration {
private Properties properties = null;
private String cipherXformFromESAPIProp = null; // New in ESAPI 2.0
private String cipherXformCurrent = null; // New in ESAPI 2.0
/** The name of the ESAPI property file */
public static final String RESOURCE_FILE = "ESAPI.properties";
private static final String REMEMBER_TOKEN_DURATION = "Authenticator.RememberTokenDuration";
private static final String IDLE_TIMEOUT_DURATION = "Authenticator.IdleTimeoutDuration";
private static final String ABSOLUTE_TIMEOUT_DURATION = "Authenticator.AbsoluteTimeoutDuration";
private static final String ALLOWED_LOGIN_ATTEMPTS = "Authenticator.AllowedLoginAttempts";
private static final String USERNAME_PARAMETER_NAME = "Authenticator.UsernameParameterName";
private static final String PASSWORD_PARAMETER_NAME = "Authenticator.PasswordParameterName";
private static final String MAX_OLD_PASSWORD_HASHES = "Authenticator.MaxOldPasswordHashes";
private static final String ALLOW_MULTIPLE_ENCODING = "Encoder.AllowMultipleEncoding";
private static final String CANONICALIZATION_CODECS = "Encoder.DefaultCodecList";
private static final String DISABLE_INTRUSION_DETECTION = "IntrusionDetector.Disable";
private static final String MASTER_KEY = "Encryptor.MasterKey";
private static final String MASTER_SALT = "Encryptor.MasterSalt";
private static final String KEY_LENGTH = "Encryptor.EncryptionKeyLength";
private static final String ENCRYPTION_ALGORITHM = "Encryptor.EncryptionAlgorithm";
private static final String HASH_ALGORITHM = "Encryptor.HashAlgorithm";
private static final String HASH_ITERATIONS = "Encryptor.HashIterations";
private static final String CHARACTER_ENCODING = "Encryptor.CharacterEncoding";
private static final String RANDOM_ALGORITHM = "Encryptor.RandomAlgorithm";
private static final String DIGITAL_SIGNATURE_ALGORITHM = "Encryptor.DigitalSignatureAlgorithm";
private static final String DIGITAL_SIGNATURE_KEY_LENGTH = "Encryptor.DigitalSignatureKeyLength";
// New in ESAPI Java 2.0 //
private static final String CIPHERTEXT_USE_MAC = "Encryptor.CipherText.useMAC";
private static final String PLAINTEXT_OVERWRITE = "Encryptor.PlainText.overwrite";
private static final String IV_TYPE = "Encryptor.ChooseIVMethod";
private static final String FIXED_IV = "Encryptor.fixedIV";
private static final String WORKING_DIRECTORY = "Executor.WorkingDirectory";
private static final String APPROVED_EXECUTABLES = "Executor.ApprovedExecutables";
private static final String FORCE_HTTPONLYSESSION = "HttpUtilities.ForceHttpOnlySession";
private static final String FORCE_SECURESESSION = "HttpUtilities.SecureSession";
private static final String FORCE_HTTPONLYCOOKIES = "HttpUtilities.ForceHttpOnlyCookies";
private static final String FORCE_SECURECOOKIES = "HttpUtilities.ForceSecureCookies";
private static final String UPLOAD_DIRECTORY = "HttpUtilities.UploadDir";
private static final String UPLOAD_TEMP_DIRECTORY = "HttpUtilities.UploadTempDir";
private static final String APPROVED_UPLOAD_EXTENSIONS = "HttpUtilities.ApprovedUploadExtensions";
private static final String MAX_UPLOAD_FILE_BYTES = "HttpUtilities.MaxUploadFileBytes";
private static final String RESPONSE_CONTENT_TYPE = "HttpUtilities.ResponseContentType";
private static final String APPLICATION_NAME = "Logger.ApplicationName";
private static final String LOG_LEVEL = "Logger.LogLevel";
private static final String LOG_FILE_NAME = "Logger.LogFileName";
private static final String MAX_LOG_FILE_SIZE = "Logger.MaxLogFileSize";
private static final String LOG_ENCODING_REQUIRED = "Logger.LogEncodingRequired";
private static final String LOG_APPLICATION_NAME = "Logger.LogApplicationName";
private static final String LOG_SERVER_IP = "Logger.LogServerIP";
private static final String VALIDATION_PROPERTIES = "Validator.ConfigurationFile";
/**
* The default max log file size is set to 10,000,000 bytes (10 Meg). If the current log file exceeds the current
* max log file size, the logger will move the old log data into another log file. There currently is a max of
* 1000 log files of the same name. If that is exceeded it will presumably start discarding the oldest logs.
*/
public static final int DEFAULT_MAX_LOG_FILE_SIZE = 10000000;
protected final int MAX_REDIRECT_LOCATION = 1000;
protected final int MAX_FILE_NAME_LENGTH = 1000; // DISCUSS: Is this for given directory or refer to canonicalized full path name?
// Too long if the former! (Usually 255 is limit there.) Hard to tell since not used
// here in this class and it's protected, so not sure what it's intent is.
/*
* Implementation Keys
*/
private static final String LOG_IMPLEMENTATION = "ESAPI.Logger";
private static final String AUTHENTICATION_IMPLEMENTATION = "ESAPI.Authenticator";
private static final String ENCODER_IMPLEMENTATION = "ESAPI.Encoder";
private static final String ACCESS_CONTROL_IMPLEMENTATION = "ESAPI.AccessControl";
private static final String ENCRYPTION_IMPLEMENTATION = "ESAPI.Encryptor";
private static final String INTRUSION_DETECTION_IMPLEMENTATION = "ESAPI.IntrusionDetector";
private static final String RANDOMIZER_IMPLEMENTATION = "ESAPI.Randomizer";
private static final String EXECUTOR_IMPLEMENTATION = "ESAPI.Executor";
private static final String VALIDATOR_IMPLEMENTATION = "ESAPI.Validator";
private static final String HTTP_UTILITIES_IMPLEMENTATION = "ESAPI.HTTPUtilities";
// New in ESAPI Java 2.0 //
// Not implementation classes!!! //
private static final String PRINT_PROPERTIES_WHEN_LOADED = "ESAPI.printProperties";
private static final String CIPHER_TRANSFORMATION_IMPLEMENTATION = "Encryptor.CipherTransformation";
/*
* Default Implementations
*/
public static final String DEFAULT_LOG_IMPLEMENTATION = "org.owasp.esapi.reference.JavaLogFactory";
public static final String DEFAULT_AUTHENTICATION_IMPLEMENTATION = "org.owasp.esapi.reference.FileBasedAuthenticator";
public static final String DEFAULT_ENCODER_IMPLEMENTATION = "org.owasp.esapi.reference.DefaultEncoder";
public static final String DEFAULT_ACCESS_CONTROL_IMPLEMENTATION = "org.owasp.esapi.reference.accesscontrol.DefaultAccessController";
public static final String DEFAULT_ENCRYPTION_IMPLEMENTATION = "org.owasp.esapi.reference.JavaEncryptor";
public static final String DEFAULT_INTRUSION_DETECTION_IMPLEMENTATION = "org.owasp.esapi.reference.DefaultIntrusionDetector";
public static final String DEFAULT_RANDOMIZER_IMPLEMENTATION = "org.owasp.esapi.reference.DefaultRandomizer";
public static final String DEFAULT_EXECUTOR_IMPLEMENTATION = "org.owasp.esapi.reference.DefaultExecutor";
public static final String DEFAULT_HTTP_UTILITIES_IMPLEMENTATION = "org.owasp.esapi.reference.DefaultHTTPUtilities";
public static final String DEFAULT_VALIDATOR_IMPLEMENTATION = "org.owasp.esapi.reference.DefaultValidator";
private static final Map<String, Pattern> patternCache = new HashMap<String, Pattern>();
/*
* Absolute path to the userDirectory
*/
private static String userDirectory = System.getProperty("user.home" ) + "/.esapi";
/*
* Absolute path to the customDirectory
*/
private static String customDirectory = System.getProperty("org.owasp.esapi.resources");
/*
* Relative path to the resourceDirectory. Relative to the classpath.
* Specifically, ClassLoader.getResource(resourceDirectory + filename) will
* be used to load the file.
*/
private String resourceDirectory = ".esapi";
// private static long lastModified = -1;
/**
* Instantiates a new configuration.
*/
public DefaultSecurityConfiguration() {
// load security configuration
try {
loadConfiguration();
this.setCipherXProperties();
} catch( IOException e ) {
logSpecial("Failed to load security configuration", e );
}
}
/**
* Instantiates a new configuration with the supplied properties.
*
* Warning - if the setResourceDirectory() method is invoked the properties will
* be re-loaded, replacing the supplied properties.
*
* @param properties
*/
public DefaultSecurityConfiguration(Properties properties) {
super();
this.properties = properties;
this.setCipherXProperties();
}
private void setCipherXProperties() {
// TODO: FUTURE: Replace by CryptoControls ???
// See SecurityConfiguration.setCipherTransformation() for
// explanation of this.
cipherXformFromESAPIProp =
getESAPIProperty(CIPHER_TRANSFORMATION_IMPLEMENTATION,
"AES/CBC/PKCS5Padding");
cipherXformCurrent = cipherXformFromESAPIProp;
}
/**
* {@inheritDoc}
*/
public String getApplicationName() {
return getESAPIProperty(APPLICATION_NAME, "DefaultName");
}
/**
* {@inheritDoc}
*/
public String getLogImplementation() {
return getESAPIProperty(LOG_IMPLEMENTATION, DEFAULT_LOG_IMPLEMENTATION);
}
/**
* {@inheritDoc}
*/
public String getAuthenticationImplementation() {
return getESAPIProperty(AUTHENTICATION_IMPLEMENTATION, DEFAULT_AUTHENTICATION_IMPLEMENTATION);
}
/**
* {@inheritDoc}
*/
public String getEncoderImplementation() {
return getESAPIProperty(ENCODER_IMPLEMENTATION, DEFAULT_ENCODER_IMPLEMENTATION);
}
/**
* {@inheritDoc}
*/
public String getAccessControlImplementation() {
return getESAPIProperty(ACCESS_CONTROL_IMPLEMENTATION, DEFAULT_ACCESS_CONTROL_IMPLEMENTATION);
}
/**
* {@inheritDoc}
*/
public String getEncryptionImplementation() {
return getESAPIProperty(ENCRYPTION_IMPLEMENTATION, DEFAULT_ENCRYPTION_IMPLEMENTATION);
}
/**
* {@inheritDoc}
*/
public String getIntrusionDetectionImplementation() {
return getESAPIProperty(INTRUSION_DETECTION_IMPLEMENTATION, DEFAULT_INTRUSION_DETECTION_IMPLEMENTATION);
}
/**
* {@inheritDoc}
*/
public String getRandomizerImplementation() {
return getESAPIProperty(RANDOMIZER_IMPLEMENTATION, DEFAULT_RANDOMIZER_IMPLEMENTATION);
}
/**
* {@inheritDoc}
*/
public String getExecutorImplementation() {
return getESAPIProperty(EXECUTOR_IMPLEMENTATION, DEFAULT_EXECUTOR_IMPLEMENTATION);
}
/**
* {@inheritDoc}
*/
public String getHTTPUtilitiesImplementation() {
return getESAPIProperty(HTTP_UTILITIES_IMPLEMENTATION, DEFAULT_HTTP_UTILITIES_IMPLEMENTATION);
}
/**
* {@inheritDoc}
*/
public String getValidationImplementation() {
return getESAPIProperty(VALIDATOR_IMPLEMENTATION, DEFAULT_VALIDATOR_IMPLEMENTATION);
}
/**
* {@inheritDoc}
*/
public byte[] getMasterKey() {
byte[] key = getESAPIPropertyEncoded( MASTER_KEY, null );
if ( key == null || key.length == 0 ) {
throw new ConfigurationException("Property '" + MASTER_KEY +
"' missing or empty in ESAPI.properties file.");
}
return key;
}
/**
* {@inheritDoc}
*/
public void setResourceDirectory( String dir ) {
resourceDirectory = dir;
logSpecial( "Reset resource directory to: " + dir, null );
// reload configuration if necessary
try {
this.loadConfiguration();
} catch( IOException e ) {
logSpecial("Failed to load security configuration from " + dir, e);
}
}
public int getEncryptionKeyLength() {
return getESAPIProperty(KEY_LENGTH, 128 );
}
/**
* {@inheritDoc}
*/
public byte[] getMasterSalt() {
byte[] salt = getESAPIPropertyEncoded( MASTER_SALT, null );
if ( salt == null || salt.length == 0 ) {
throw new ConfigurationException("Property '" + MASTER_SALT +
"' missing or empty in ESAPI.properties file.");
}
return salt;
}
/**
* {@inheritDoc}
*/
public List<String> getAllowedExecutables() {
String def = "";
String[] exList = getESAPIProperty(APPROVED_EXECUTABLES,def).split(",");
return Arrays.asList(exList);
}
/**
* {@inheritDoc}
*/
public List<String> getAllowedFileExtensions() {
String def = ".zip,.pdf,.tar,.gz,.xls,.properties,.txt,.xml";
String[] extList = getESAPIProperty(APPROVED_UPLOAD_EXTENSIONS,def).split(",");
return Arrays.asList(extList);
}
/**
* {@inheritDoc}
*/
public int getAllowedFileUploadSize() {
return getESAPIProperty(MAX_UPLOAD_FILE_BYTES, 5000000);
}
private Properties loadPropertiesFromStream( InputStream is, String name ) throws IOException {
Properties config = new Properties();
try {
config.load(is);
logSpecial("Loaded '" + name + "' properties file", null);
} finally {
if ( is != null ) try { is.close(); } catch( Exception e ) {}
}
return config;
}
/**
* Load configuration. Never prints properties.
*
* @throws java.io.IOException
* if the file is inaccessible
*/
private void loadConfiguration() throws IOException {
try {
//first attempt file IO loading of properties
logSpecial("Attempting to load " + RESOURCE_FILE + " via file io.");
properties = loadPropertiesFromStream(getResourceStream(RESOURCE_FILE), RESOURCE_FILE);
} catch (Exception iae) {
//if file io loading fails, attempt classpath based loading next
logSpecial("Loading " + RESOURCE_FILE + " via file io failed.");
logSpecial("Attempting to load " + RESOURCE_FILE + " via the classpath.");
try {
properties = loadConfigurationFromClasspath(RESOURCE_FILE);
} catch (Exception e) {
logSpecial(RESOURCE_FILE + " could not be loaded by any means. fail.", e);
}
}
// if properties loaded properly above, get validation properties and merge them into the main properties
if (properties != null) {
String validationPropFileName = getESAPIProperty(VALIDATION_PROPERTIES, "validation.properties");
Properties validationProperties = null;
try {
//first attempt file IO loading of properties
logSpecial("Attempting to load " + validationPropFileName + " via file io.");
validationProperties = loadPropertiesFromStream(getResourceStream(validationPropFileName), validationPropFileName);
} catch (Exception iae) {
//if file io loading fails, attempt classpath based loading next
logSpecial("Loading " + validationPropFileName + " via file io failed.");
logSpecial("Attempting to load " + validationPropFileName + " via the classpath.");
try {
validationProperties = loadConfigurationFromClasspath(validationPropFileName);
} catch (Exception e) {
logSpecial(validationPropFileName + " could not be loaded by any means. fail.", e);
}
}
if (validationProperties != null) {
Iterator<?> i = validationProperties.keySet().iterator();
while( i.hasNext() ) {
String key = (String)i.next();
String value = validationProperties.getProperty(key);
properties.put( key, value);
}
}
}
if ( shouldPrintProperties() ) {
//FIXME - make this chunk configurable
}
}
/**
* @param filename
* @return An {@code InputStream} associated with the specified file name as
* a resource stream.
* @throws IOException
* If the file cannot be found or opened for reading.
*/
public InputStream getResourceStream(String filename) throws IOException {
if (filename == null) {
return null;
}
try {
File f = getResourceFile(filename);
if (f != null && f.exists()) {
return new FileInputStream(f);
}
} catch (Exception e) {
}
throw new FileNotFoundException();
}
/**
* {@inheritDoc}
*/
public File getResourceFile(String filename) {
logSpecial("Attempting to load " + filename + " via file io.");
if (filename == null) {
logSpecial("Failed to load properties via FileIO. Filename is null.");
return null; // not found.
}
File f = null;
// first, allow command line overrides. -Dorg.owasp.esapi.resources
// directory
f = new File(customDirectory, filename);
if (customDirectory != null && f.canRead()) {
logSpecial("Found in 'org.owasp.esapi.resources' directory: " + f.getAbsolutePath());
return f;
} else {
logSpecial("Not found in 'org.owasp.esapi.resources' directory or file not readable: " + f.getAbsolutePath());
}
// if not found, then try the programatically set resource directory
// (this defaults to SystemResource directory/RESOURCE_FILE
URL fileUrl = ClassLoader.getSystemResource(resourceDirectory + File.separator + filename);
if (fileUrl != null) {
String fileLocation = fileUrl.getFile();
f = new File(fileLocation);
if (f.exists()) {
logSpecial("Found in SystemResource Directory/resourceDirectory: " + f.getAbsolutePath());
return f;
} else {
logSpecial("Not found in SystemResource Directory/resourceDirectory (this should never happen): " + f.getAbsolutePath());
}
} else {
logSpecial("Not found in SystemResource Directory/resourceDirectory: " + resourceDirectory + File.separator + filename);
}
// if not found, then try the user's home directory
f = new File(userDirectory, filename);
if (userDirectory != null && f.exists()) {
logSpecial("Found in 'user.home' directory: " + f.getAbsolutePath());
return f;
} else {
logSpecial("Not found in 'user.home' directory: " + f.getAbsolutePath());
}
// return null if not found
return null;
}
/**
* Used to load ESAPI.properties from a variety of different classpath locations.
*
* @param fileName The properties file filename.
*/
private Properties loadConfigurationFromClasspath(String fileName) throws IllegalArgumentException {
Properties result = null;
InputStream in = null;
ClassLoader[] loaders = new ClassLoader[] {
Thread.currentThread().getContextClassLoader(),
ClassLoader.getSystemClassLoader(),
getClass().getClassLoader()
};
ClassLoader currentLoader = null;
for (int i = 0; i < loaders.length; i++) {
if (loaders[i] != null) {
currentLoader = loaders[i];
try {
// try root
in = loaders[i].getResourceAsStream(fileName);
// try .esapi folder
if (in == null) {
in = currentLoader.getResourceAsStream(".esapi/" + fileName);
}
// try resources folder
if (in == null) {
in = currentLoader.getResourceAsStream("resources/" + fileName);
}
// now load the properties
if (in != null) {
result = new Properties();
result.load(in); // Can throw IOException
logSpecial("Successfully loaded " + fileName + " via the classpath! BOO-YA!");
}
} catch (Exception e) {
result = null;
} finally {
try {
in.close();
} catch (Exception e) {
}
}
}
}
if (result == null) {
throw new IllegalArgumentException("Failed to load " + RESOURCE_FILE + " as a classloader resource.");
}
return result;
}
/**
* Used to log errors to the console during the loading of the properties file itself. Can't use
* standard logging in this case, since the Logger is not initialized yet. Output is sent to
* {@code PrintStream} {@code System.out}.
*
* @param message The message to send to the console.
* @param e The error that occurred (this value is currently ignored).
*/
private void logSpecial(String message, Throwable e) {
System.out.println(message);
//TODO - e.printStackTrace() ?
}
private void logSpecial(String message) {
System.out.println(message);
}
/**
* {@inheritDoc}
*/
public String getPasswordParameterName() {
return getESAPIProperty(PASSWORD_PARAMETER_NAME, "password");
}
/**
* {@inheritDoc}
*/
public String getUsernameParameterName() {
return getESAPIProperty(USERNAME_PARAMETER_NAME, "username");
}
/**
* {@inheritDoc}
*/
public String getEncryptionAlgorithm() {
return getESAPIProperty(ENCRYPTION_ALGORITHM, "AES");
}
/**
* {@inheritDoc}
*/
public String getCipherTransformation() {
assert cipherXformCurrent != null : "Current cipher transformation is null";
return cipherXformCurrent;
}
/**
* {@inheritDoc}
*/
public String setCipherTransformation(String cipherXform) {
String previous = getCipherTransformation();
if ( cipherXform == null ) {
// Special case... means set it to original value from ESAPI.properties
cipherXformCurrent = cipherXformFromESAPIProp;
} else {
assert ! cipherXform.trim().equals("") :
"Cipher transformation cannot be just white space or empty string";
cipherXformCurrent = cipherXform; // Note: No other sanity checks!!!
}
return previous;
}
/**
* {@inheritDoc}
*/
public boolean useMACforCipherText() {
return getESAPIProperty(CIPHERTEXT_USE_MAC, true);
}
/**
* {@inheritDoc}
*/
public boolean overwritePlainText() {
return getESAPIProperty(PLAINTEXT_OVERWRITE, true);
}
/**
* {@inheritDoc}
*/
public String getIVType() {
String value = getESAPIProperty(IV_TYPE, "random");
if ( value.equalsIgnoreCase("fixed") || value.equalsIgnoreCase("random") ) {
return value;
} else if ( value.equalsIgnoreCase("specified") ) {
// This is planned for future implementation where setting
// Encryptor.ChooseIVMethod=specified will require setting some
// other TBD property that will specify an implementation class that
// will generate appropriate IVs. The intent of this would be to use
// such a class with various feedback modes where it is imperative
// that for a given key, any particular IV is *NEVER* reused. For
// now, we will assume that generating a random IV is usually going
// to be sufficient to prevent this.
throw new ConfigurationException("'" + IV_TYPE + "=specified' is not yet implemented. Use 'fixed' or 'random'");
} else {
// DISCUSS: Could just log this and then silently return "random" instead.
throw new ConfigurationException(value + " is illegal value for " + IV_TYPE +
". Use 'random' (preferred) or 'fixed'.");
}
}
/**
* {@inheritDoc}
*/
public String getFixedIV() {
if ( getIVType().equalsIgnoreCase("fixed") ) {
String ivAsHex = getESAPIProperty(FIXED_IV, ""); // No default
if ( ivAsHex == null || ivAsHex.trim().equals("") ) {
throw new ConfigurationException("Fixed IV requires property " +
FIXED_IV + " to be set, but it is not.");
}
return ivAsHex; // We do no further checks here as we have no context.
} else {
// DISCUSS: Should we just log a warning here and return null instead?
// If so, may cause NullPointException somewhere later.
throw new ConfigurationException("IV type not 'fixed' (set to '" +
getIVType() + "'), so no fixed IV applicable.");
}
}
/**
* {@inheritDoc}
*/
public String getHashAlgorithm() {
return getESAPIProperty(HASH_ALGORITHM, "SHA-512");
}
/**
* {@inheritDoc}
*/
public int getHashIterations() {
return getESAPIProperty(HASH_ITERATIONS, 1024);
}
/**
* {@inheritDoc}
*/
public String getCharacterEncoding() {
return getESAPIProperty(CHARACTER_ENCODING, "UTF-8");
}
/**
* {@inheritDoc}
*/
public boolean getAllowMultipleEncoding() {
return getESAPIProperty( ALLOW_MULTIPLE_ENCODING, false );
}
/**
* {@inheritDoc}
*/
public List<String> getDefaultCanonicalizationCodecs() {
List<String> def = new ArrayList<String>();
def.add( "org.owasp.esapi.codecs.HTMLEntityCodec" );
def.add( "org.owasp.esapi.codecs.PercentCodec" );
def.add( "org.owasp.esapi.codecs.JavaScriptCodec" );
return getESAPIProperty( CANONICALIZATION_CODECS, def );
}
/**
* {@inheritDoc}
*/
public String getDigitalSignatureAlgorithm() {
return getESAPIProperty(DIGITAL_SIGNATURE_ALGORITHM, "SHAwithDSA");
}
/**
* {@inheritDoc}
*/
public int getDigitalSignatureKeyLength() {
return getESAPIProperty(DIGITAL_SIGNATURE_KEY_LENGTH, 1024);
}
/**
* {@inheritDoc}
*/
public String getRandomAlgorithm() {
return getESAPIProperty(RANDOM_ALGORITHM, "SHA1PRNG");
}
/**
* {@inheritDoc}
*/
public int getAllowedLoginAttempts() {
return getESAPIProperty(ALLOWED_LOGIN_ATTEMPTS, 5);
}
/**
* {@inheritDoc}
*/
public int getMaxOldPasswordHashes() {
return getESAPIProperty(MAX_OLD_PASSWORD_HASHES, 12);
}
/**
* {@inheritDoc}
*/
public File getUploadDirectory() {
String dir = getESAPIProperty( UPLOAD_DIRECTORY, "UploadDir");
return new File( dir );
}
/**
* {@inheritDoc}
*/
public File getUploadTempDirectory() {
String dir = getESAPIProperty(UPLOAD_TEMP_DIRECTORY,
System.getProperty("java.io.tmpdir","UploadTempDir"));
return new File( dir );
}
/**
* {@inheritDoc}
*/
public boolean getDisableIntrusionDetection() {
String value = properties.getProperty( DISABLE_INTRUSION_DETECTION );
if ("true".equalsIgnoreCase(value)) return true;
return false; // Default result
}
/**
* {@inheritDoc}
*/
public Threshold getQuota(String eventName) {
int count = getESAPIProperty("IntrusionDetector." + eventName + ".count", 0);
int interval = getESAPIProperty("IntrusionDetector." + eventName + ".interval", 0);
List<String> actions = new ArrayList<String>();
String actionString = getESAPIProperty("IntrusionDetector." + eventName + ".actions", "");
if (actionString != null) {
String[] actionList = actionString.split(",");
actions = Arrays.asList(actionList);
}
if ( count > 0 && interval > 0 && actions.size() > 0 ) {
return new Threshold(eventName, count, interval, actions);
}
return null;
}
/**
* {@inheritDoc}
*/
public int getLogLevel() {
String level = getESAPIProperty(LOG_LEVEL, "WARNING" );
if (level.equalsIgnoreCase("OFF"))
return Logger.OFF;
if (level.equalsIgnoreCase("FATAL"))
return Logger.FATAL;
if (level.equalsIgnoreCase("ERROR"))
return Logger.ERROR ;
if (level.equalsIgnoreCase("WARNING"))
return Logger.WARNING;
if (level.equalsIgnoreCase("INFO"))
return Logger.INFO;
if (level.equalsIgnoreCase("DEBUG"))
return Logger.DEBUG;
if (level.equalsIgnoreCase("TRACE"))
return Logger.TRACE;
if (level.equalsIgnoreCase("ALL"))
return Logger.ALL;
// This error is NOT logged the normal way because the logger constructor calls getLogLevel() and if this error occurred it would cause
// an infinite loop.
logSpecial("The LOG-LEVEL property in the ESAPI properties file has the unrecognized value: " + level + ". Using default: WARNING", null);
return Logger.WARNING; // Note: The default logging level is WARNING.
}
/**
* {@inheritDoc}
*/
public String getLogFileName() {
return getESAPIProperty( LOG_FILE_NAME, "ESAPI_logging_file" );
}
/**
* {@inheritDoc}
*/
public int getMaxLogFileSize() {
return getESAPIProperty( MAX_LOG_FILE_SIZE, DEFAULT_MAX_LOG_FILE_SIZE );
}
/**
* {@inheritDoc}
*/
public boolean getLogEncodingRequired() {
return getESAPIProperty( LOG_ENCODING_REQUIRED, false );
}
/**
* {@inheritDoc}
*/
public boolean getLogApplicationName() {
return getESAPIProperty( LOG_APPLICATION_NAME, true );
}
/**
* {@inheritDoc}
*/
public boolean getLogServerIP() {
return getESAPIProperty( LOG_SERVER_IP, true );
}
/**
* {@inheritDoc}
*/
public boolean getForceHttpOnlySession() {
return getESAPIProperty( FORCE_HTTPONLYSESSION, true );
}
/**
* {@inheritDoc}
*/
public boolean getForceSecureSession() {
return getESAPIProperty( FORCE_SECURESESSION, true );
}
/**
* {@inheritDoc}
*/
public boolean getForceHttpOnlyCookies() {
return getESAPIProperty( FORCE_HTTPONLYCOOKIES, true );
}
/**
* {@inheritDoc}
*/
public boolean getForceSecureCookies() {
return getESAPIProperty( FORCE_SECURECOOKIES, true );
}
/**
* {@inheritDoc}
*/
public String getResponseContentType() {
return getESAPIProperty( RESPONSE_CONTENT_TYPE, "text/html; charset=UTF-8" );
}
/**
* {@inheritDoc}
*/
public long getRememberTokenDuration() {
int days = getESAPIProperty( REMEMBER_TOKEN_DURATION, 14 );
return (long) (1000 * 60 * 60 * 24 * days);
}
/**
* {@inheritDoc}
*/
public int getSessionIdleTimeoutLength() {
int minutes = getESAPIProperty( IDLE_TIMEOUT_DURATION, 20 );
return 1000 * 60 * minutes;
}
/**
* {@inheritDoc}
*/
public int getSessionAbsoluteTimeoutLength() {
int minutes = getESAPIProperty(ABSOLUTE_TIMEOUT_DURATION, 20 );
return 1000 * 60 * minutes;
}
/**
* getValidationPattern returns a single pattern based upon key
*
* @param key
* validation pattern name you'd like
* @return
* if key exists, the associated validation pattern, null otherwise
*/
public Pattern getValidationPattern( String key ) {
String value = getESAPIProperty( "Validator." + key, "" );
// check cache
Pattern p = patternCache.get( value );
if ( p != null ) return p;
// compile a new pattern
if ( value == null || value.equals( "" ) ) return null;
try {
Pattern q = Pattern.compile(value);
patternCache.put( value, q );
return q;
} catch ( PatternSyntaxException e ) {
logSpecial( "SecurityConfiguration for " + key + " not a valid regex in ESAPI.properties. Returning null", null );
return null;
}
}
/**
* getWorkingDirectory returns the default directory where processes will be executed
* by the Executor.
*/
public File getWorkingDirectory() {
String dir = getESAPIProperty( WORKING_DIRECTORY, System.getProperty( "user.dir") );
if ( dir != null ) {
return new File( dir );
}
return null;
}
private String getESAPIProperty( String key, String def ) {
String value = properties.getProperty(key);
if ( value == null ) {
logSpecial( "SecurityConfiguration for " + key + " not found in ESAPI.properties. Using default: " + def, null );
return def;
}
return value;
}
private boolean getESAPIProperty( String key, boolean def ) {
String property = properties.getProperty(key);
if ( property == null ) {
logSpecial( "SecurityConfiguration for " + key + " not found in ESAPI.properties. Using default: " + def, null );
return def;
}
if ( property.equalsIgnoreCase("true") || property.equalsIgnoreCase("yes" ) ) {
return true;
}
if ( property.equalsIgnoreCase("false") || property.equalsIgnoreCase( "no" ) ) {
return false;
}
logSpecial( "SecurityConfiguration for " + key + " not either \"true\" or \"false\" in ESAPI.properties. Using default: " + def, null );
return def;
}
private byte[] getESAPIPropertyEncoded( String key, byte[] def ) {
String property = properties.getProperty(key);
if ( property == null ) {
logSpecial( "SecurityConfiguration for " + key + " not found in ESAPI.properties. Using default: " + def, null );
return def;
}
try {
return ESAPI.encoder().decodeFromBase64(property);
} catch( IOException e ) {
logSpecial( "SecurityConfiguration for " + key + " not properly Base64 encoded in ESAPI.properties. Using default: " + def, null );
return null;
}
}
private int getESAPIProperty( String key, int def ) {
String property = properties.getProperty(key);
if ( property == null ) {
logSpecial( "SecurityConfiguration for " + key + " not found in ESAPI.properties. Using default: " + def, null );
return def;
}
try {
return Integer.parseInt( property );
} catch( NumberFormatException e ) {
logSpecial( "SecurityConfiguration for " + key + " not an integer in ESAPI.properties. Using default: " + def, null );
return def;
}
}
/**
* Returns a List representing the parsed, comma-separated property
* @param key
* @param def
* @return
*/
private List<String> getESAPIProperty( String key, List<String> def ) {
String property = properties.getProperty( key );
if ( property == null ) {
logSpecial( "SecurityConfiguration for " + key + " not found in ESAPI.properties. Using default: " + def, null );
return def;
}
String[] parts = property.split(",");
return Arrays.asList( parts );
}
private boolean shouldPrintProperties() {
return getESAPIProperty(PRINT_PROPERTIES_WHEN_LOADED, false);
}
}
|
package com.mmtechco.surface.monitor;
import java.util.Timer;
import java.util.TimerTask;
import java.util.Vector;
import javax.microedition.location.Criteria;
import javax.microedition.location.Location;
import javax.microedition.location.LocationException;
import javax.microedition.location.LocationListener;
import javax.microedition.location.LocationProvider;
import net.rim.device.api.gps.BlackBerryCriteria;
import net.rim.device.api.gps.BlackBerryLocationProvider;
import net.rim.device.api.gps.GPSInfo;
import net.rim.device.api.gps.LocationInfo;
import com.mmtechco.surface.Registration;
import com.mmtechco.surface.data.ActivityLog;
import com.mmtechco.surface.net.Reply;
import com.mmtechco.surface.net.Server;
import com.mmtechco.surface.prototypes.Message;
import com.mmtechco.surface.prototypes.ObserverScreen;
import com.mmtechco.surface.ui.AlertScreen;
import com.mmtechco.surface.util.ErrorMessage;
import com.mmtechco.surface.util.Logger;
import com.mmtechco.surface.util.Tools;
import com.mmtechco.surface.util.ToolsBB;
/**
* Monitors and registers location based events.
*/
public class LocationMonitor implements LocationListener {
private static final String TAG = ToolsBB
.getSimpleClassName(LocationMonitor.class);
private Logger logger = Logger.getInstance();
// Represents the period of the position query, in seconds
private static int interval = 5;
// Upload interval (in milliseconds)
private static int uploadInterval = 30 * 1000;
private BlackBerryLocationProvider locationProvider;
public static double latitude;
public static double longitude;
private Message locMsg;
private Server server;
private static Vector observers = new Vector();
public LocationMonitor() {
// Enable location services
if (LocationInfo.getAvailableLocationSources() != 0) {
LocationInfo.setLocationOn();
// Attempt to start the listening thread
if (startLocationUpdate()) {
logger.log(TAG,
"Location status: " + locationProvider.getState());
}
} else {
logger.log(TAG, "Could not start location services");
return;
}
server = new Server();
// Initialize lat/long
latitude = 0;
longitude = 0;
// Upload location periodically
new Timer().scheduleAtFixedRate(new UploadTask(), 0, uploadInterval);
}
public boolean startLocationUpdate() {
boolean started = false;
try {
BlackBerryCriteria criteria = new BlackBerryCriteria(
GPSInfo.GPS_MODE_ASSIST);
criteria.enableGeolocationWithGPS();
criteria.setFailoverMode(GPSInfo.GPS_MODE_AUTONOMOUS, 3, 100);
// criteria.setSubsequentMode(GPSInfo.GPS_MODE_CELLSITE);
criteria.setHorizontalAccuracy(5);
criteria.setVerticalAccuracy(5);
criteria.setPreferredPowerConsumption(Criteria.POWER_USAGE_MEDIUM);
criteria.setPreferredResponseTime(uploadInterval - 1000);
locationProvider = (BlackBerryLocationProvider) LocationProvider
.getInstance(criteria);
if (locationProvider != null) {
/*
* Only a single listener can be associated with a provider, and
* unsetting it involves the same call but with null. Therefore,
* there is no need to cache the listener instance request an
* update every second.
*/
locationProvider.setLocationListener(this, interval, -1, -1);
started = true;
} else {
logger.log(TAG, "Failed to obtain a location provider.");
}
} catch (final LocationException le) {
logger.log(TAG, "Failed to instantiate LocationProvider object:"
+ le.toString());
}
return started;
}
public void locationUpdated(LocationProvider provider, Location location) {
// Polls GPS service based on interval specified in constructor and
// upload to the server.
if (location.isValid()) {
float speed = location.getSpeed();
longitude = location.getQualifiedCoordinates().getLongitude();
latitude = location.getQualifiedCoordinates().getLatitude();
locMsg = new LocationMessage(latitude, longitude, speed);
}
}
public void providerStateChanged(LocationProvider provider, int newState) {
logger.log(TAG, "GPS Provider changed");
if (newState == LocationProvider.TEMPORARILY_UNAVAILABLE) {
provider.reset();
}
}
public static void addObserver(ObserverScreen screen) {
observers.addElement(screen);
}
public static void removeObserver(ObserverScreen screen) {
observers.removeElement(screen);
}
private void notifyObservers() {
for (int i = 0; i < observers.size(); i++) {
((ObserverScreen) observers.elementAt(i)).surface();
}
}
private class UploadTask extends TimerTask {
public void run() {
// Check there are valid values
if (longitude != 0 && latitude != 0) {
logger.log(TAG, "Sending location to server");
Reply reply = server.contactServer(locMsg.getREST());
if (reply.getCallingCode().equals(AlertScreen.type_surface)) {
logger.log(TAG, "Server has requested surface");
notifyObservers();
}
}
}
}
}
/**
* Holds GPS messages
*/
class LocationMessage implements Message {
private final int type = 6;
private double latitude, longitude;
private String deviceTime;
private float speed;
public LocationMessage(double lat, double lon, float speed) {
latitude = lat;
longitude = lon;
this.speed = speed;
deviceTime = ToolsBB.getInstance().getDate();
}
/**
* Retrieves the message formatted in to a single string value. Location
* message consists of:
* <ul>
* <li>Registration Serial number.
* <li>Location message type which is '06' (two digits number).
* <li>Device time.
* <li>Latitude.
* <li>Longitude.
* <li>Speed. *<i>Warning</i> not implemented
* </ul>
*
* @return a single string containing the entire message.
*/
public String getREST() {
return Registration.getRegID() + Tools.ServerQueryStringSeparator + '0'
+ type + Tools.ServerQueryStringSeparator + deviceTime
+ Tools.ServerQueryStringSeparator + latitude
+ Tools.ServerQueryStringSeparator + longitude
// + Tools.ServerQueryStringSeparator + speed;
;
}
public String getTime() {
return deviceTime;
}
public int getType() {
return type;
}
}
|
package org.spongepowered.asm.mixin.struct;
import java.util.Locale;
import javax.tools.Diagnostic.Kind;
import org.objectweb.asm.tree.AnnotationNode;
import org.objectweb.asm.tree.MethodNode;
import org.spongepowered.asm.mixin.MixinEnvironment.Option;
import org.spongepowered.asm.mixin.injection.IInjectionPointContext;
import org.spongepowered.asm.mixin.injection.selectors.ISelectorContext;
import org.spongepowered.asm.mixin.refmap.IMixinContext;
import org.spongepowered.asm.mixin.refmap.IReferenceMapper;
import org.spongepowered.asm.util.Annotations;
import org.spongepowered.asm.util.asm.IAnnotationHandle;
import org.spongepowered.asm.util.logging.MessageRouter;
/**
* Data bundle for an annotated method in a mixin
*/
public class AnnotatedMethodInfo implements IInjectionPointContext {
/**
* Mixin context
*/
private final IMixinContext context;
/**
* Annotated method
*/
protected final MethodNode method;
/**
* Annotation on the method
*/
protected final AnnotationNode annotation;
public AnnotatedMethodInfo(IMixinContext mixin, MethodNode method, AnnotationNode annotation) {
this.context = mixin;
this.method = method;
this.annotation = annotation;
}
@Override
public String remap(String reference) {
if (this.context != null) {
IReferenceMapper referenceMapper = this.context.getReferenceMapper();
return referenceMapper != null ? referenceMapper.remap(this.context.getClassRef(), reference) : reference;
}
return reference;
}
/* (non-Javadoc)
* @see org.spongepowered.asm.mixin.injection.selectors.ISelectorContext
* #getParent()
*/
@Override
public ISelectorContext getParent() {
return null;
}
/**
* Get the mixin target context for this annotated method
*
* @return the target context
*/
@Override
public final IMixinContext getMixin() {
return this.context;
}
/**
* Get method being called
*
* @return injector method
*/
@Override
public final MethodNode getMethod() {
return this.method;
}
/**
* Get the original name of the method, if available
*/
public String getMethodName() {
return this.method.name;
}
/**
* Get the primary annotation which makes this method special
*/
@Override
public AnnotationNode getAnnotationNode() {
return this.annotation;
}
/**
* Get the primary annotation which makes this method special
*
* @return The primary method annotation
*/
@Override
public final IAnnotationHandle getAnnotation() {
return Annotations.handleOf(this.annotation);
}
/**
* Get the annotation context for selectors operating in the context of this
* method.
*
* @return The selector context annotation
*/
@Override
public IAnnotationHandle getSelectorAnnotation() {
return Annotations.handleOf(this.annotation);
}
/**
* Get the selector coordinate for this method
*
* @return The selector context annotation
*/
@Override
public String getSelectorCoordinate(boolean leaf) {
return leaf ? "method" : this.getMethodName().toLowerCase(Locale.ROOT);
}
/* (non-Javadoc)
* @see org.spongepowered.asm.mixin.injection.IInjectionPointContext
* #addMessage(java.lang.String, java.lang.Object[])
*/
@Override
public void addMessage(String format, Object... args) {
if (this.context.getOption(Option.DEBUG_VERBOSE)) {
MessageRouter.getMessager().printMessage(Kind.WARNING, String.format(format, args));
}
}
}
|
package com.rapid.server;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Scanner;
import java.util.Set;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import javax.xml.XMLConstants;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactoryConfigurationError;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import javax.xml.xpath.XPathExpressionException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.web.Log4jServletContextListener;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.xml.sax.SAXException;
import com.rapid.core.Action;
import com.rapid.core.Application;
import com.rapid.core.Application.RapidLoadingException;
import com.rapid.core.Applications;
import com.rapid.core.Applications.Versions;
import com.rapid.core.Device.Devices;
import com.rapid.core.Email;
import com.rapid.core.Process;
import com.rapid.core.Theme;
import com.rapid.utils.Classes;
import com.rapid.utils.Comparators;
import com.rapid.utils.Encryption.EncryptionProvider;
import com.rapid.utils.Files;
import com.rapid.utils.Https;
import com.rapid.utils.JAXB.EncryptedXmlAdapter;
import com.rapid.utils.Strings;
public class RapidServletContextListener extends Log4jServletContextListener implements ServletContextListener {
// the logger which we will initialise
private static Logger _logger;
// the schema factory that we will load the actions and controls schemas into
private static SchemaFactory _schemaFactory;
// all of the classes we are going to put into our jaxb context
private static ArrayList<Class> _jaxbClasses;
public static void logFileNames(File dir, String rootPath) {
for (File file : dir.listFiles()) {
if (file.isDirectory()) {
logFileNames(file, rootPath);
} else {
String fileName = file.toString();
_logger.info(fileName.substring(rootPath.length()));
}
}
}
public static int loadLogins(ServletContext servletContext) throws Exception {
int loginCount = 0;
// get the directory in which the control xml files are stored
File dir = new File(servletContext.getRealPath("/WEB-INF/logins/"));
// if the directory exists
if (dir.exists()) {
// create an array list of json objects to hold the logins
ArrayList<JSONObject> logins = new ArrayList<JSONObject>();
// create a filter for finding .control.xml files
FilenameFilter xmlFilenameFilter = new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.toLowerCase().endsWith(".login.xml");
}
};
// create a schema object for the xsd
Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/WEB-INF/schemas/") + "/login.xsd"));
// create a validator
Validator validator = schema.newValidator();
// loop the xml files in the folder
for (File xmlFile : dir.listFiles(xmlFilenameFilter)) {
// read the xml into a string
String xml = Strings.getString(xmlFile);
// validate the control xml file against the schema
validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8"))));
// convert the string into JSON
JSONObject jsonLogin = org.json.XML.toJSONObject(xml).getJSONObject("login");
// add to array list
logins.add(jsonLogin);
// increment the count
loginCount++;
}
// put the logins in a context attribute (this is available to the security adapters on initialisation)
servletContext.setAttribute("jsonLogins", logins);
}
_logger.info(loginCount + " logins loaded from .login.xml files");
return loginCount;
}
public static int loadDatabaseDrivers(ServletContext servletContext) throws Exception {
// create a schema object for the xsd
Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/WEB-INF/schemas/") + "/databaseDrivers.xsd"));
// create a validator
Validator validator = schema.newValidator();
// read the xml into a string
String xml = Strings.getString(new File(servletContext.getRealPath("/WEB-INF/database/") + "/databaseDrivers.xml"));
// validate the control xml file against the schema
validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8"))));
// convert the xml string into JSON
JSONObject jsonDatabaseDriverCollection = org.json.XML.toJSONObject(xml).getJSONObject("databaseDrivers");
// prepare the array we are going to popoulate
JSONArray jsonDatabaseDrivers = new JSONArray();
JSONObject jsonDatabaseDriver;
int index = 0;
int count = 0;
if (jsonDatabaseDriverCollection.optJSONArray("databaseDriver") == null) {
jsonDatabaseDriver = jsonDatabaseDriverCollection.getJSONObject("databaseDriver");
} else {
jsonDatabaseDriver = jsonDatabaseDriverCollection.getJSONArray("databaseDriver").getJSONObject(index);
count = jsonDatabaseDriverCollection.getJSONArray("databaseDriver").length();
}
do {
_logger.info("Registering database driver " + jsonDatabaseDriver.getString("name") + " using " + jsonDatabaseDriver.getString("class"));
try {
// check this type does not already exist
for (int i = 0; i < jsonDatabaseDrivers.length(); i++) {
if (jsonDatabaseDriver.getString("name").equals(jsonDatabaseDrivers.getJSONObject(i).getString("name"))) throw new Exception(" database driver type is loaded already. Type names must be unique");
}
// get the class name
String className = jsonDatabaseDriver.getString("class");
// get the current thread class loader (this should log better if there are any issues)
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
// check we got a class loader
if (classLoader == null) {
// register the class the old fashioned way so the DriverManager can find it
Class.forName(className);
} else {
// register the class on this thread so we can catch any errors
Class.forName(className, true, classLoader);
}
// add the jsonControl to our array
jsonDatabaseDrivers.put(jsonDatabaseDriver);
} catch (Exception ex) {
_logger.error("Error registering database driver : " + ex.getMessage(), ex);
}
// inc the count of controls in this file
index++;
// get the next one
if (index < count) jsonDatabaseDriver = jsonDatabaseDriverCollection.getJSONArray("databaseDriver").getJSONObject(index);
} while (index < count);
// put the jsonControls in a context attribute (this is available via the getJsonActions method in RapidHttpServlet)
servletContext.setAttribute("jsonDatabaseDrivers", jsonDatabaseDrivers);
_logger.info(index + " database drivers loaded from databaseDrivers.xml file");
return index;
}
// loop all of the .connectionAdapter.xml files and check the injectable classes, so we can re-initialise JAXB context to be able to serialise them, and cache their constructors for speedy initialisation
public static int loadConnectionAdapters(ServletContext servletContext) throws Exception {
int adapterCount = 0;
// retain our class constructors in a hashtable - this speeds up initialisation
HashMap<String,Constructor> connectionConstructors = new HashMap<String,Constructor>();
// create an array list of json objects which we will sort later according to the order
ArrayList<JSONObject> connectionAdapters = new ArrayList<JSONObject>();
// get the directory in which the control xml files are stored
File dir = new File(servletContext.getRealPath("/WEB-INF/database/"));
// create a filter for finding .control.xml files
FilenameFilter xmlFilenameFilter = new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.toLowerCase().endsWith(".connectionadapter.xml");
}
};
// create a schema object for the xsd
Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/WEB-INF/schemas/") + "/connectionAdapter.xsd"));
// create a validator
Validator validator = schema.newValidator();
// loop the xml files in the folder
for (File xmlFile : dir.listFiles(xmlFilenameFilter)) {
// read the xml into a string
String xml = Strings.getString(xmlFile);
// validate the control xml file against the schema
validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8"))));
// convert the string into JSON
JSONObject jsonConnectionAdapter = org.json.XML.toJSONObject(xml).getJSONObject("connectionAdapter");
// get the class name from the json
String className = jsonConnectionAdapter.getString("class");
// get the class
Class classClass = Class.forName(className);
// check the class extends com.rapid.data.ConnectionAdapter
if (!Classes.extendsClass(classClass, com.rapid.data.ConnectionAdapter.class)) throw new Exception(classClass.getCanonicalName() + " must extend com.rapid.data.ConnectionAdapter");
// check this class is unique
if (connectionConstructors.get(className) != null) throw new Exception(className + " connection adapter already loaded.");
// add to constructors hashmap referenced by type
connectionConstructors.put(className, classClass.getConstructor(ServletContext.class, String.class, String.class, String.class, String.class));
// add to to our array list
connectionAdapters.add(jsonConnectionAdapter);
// increment the count
adapterCount++;
}
// sort the connection adapters according to their order property
Collections.sort(connectionAdapters, new Comparator<JSONObject>() {
@Override
public int compare(JSONObject o1, JSONObject o2) {
try {
return o1.getInt("order") - o2.getInt("order");
} catch (JSONException e) {
return 999;
}
}
});
// create a JSON Array object which will hold json for all of the available security adapters
JSONArray jsonConnectionAdapters = new JSONArray();
// loop the sorted connection adapters and add to the json array
for (JSONObject jsonConnectionAdapter : connectionAdapters) jsonConnectionAdapters.put(jsonConnectionAdapter);
// put the jsonControls in a context attribute (this is available via the getJsonActions method in RapidHttpServlet)
servletContext.setAttribute("jsonConnectionAdapters", jsonConnectionAdapters);
// put the constructors hashmapin a context attribute (this is available via the getContructor method in RapidHttpServlet)
servletContext.setAttribute("securityConstructors", connectionConstructors);
_logger.info(adapterCount + " connection adapters loaded in .connectionAdapter.xml files");
return adapterCount;
}
// loop all of the .securityAdapter.xml files and check the injectable classes, so we can re-initialise JAXB context to be able to serialise them, and cache their constructors for speedy initialisation
public static int loadSecurityAdapters(ServletContext servletContext) throws Exception {
int adapterCount = 0;
// retain our class constructors in a hashtable - this speeds up initialisation
HashMap<String,Constructor> securityConstructors = new HashMap<String,Constructor>();
// create a JSON Array object which will hold json for all of the available security adapters
JSONArray jsonSecurityAdapters = new JSONArray();
// get the directory in which the control xml files are stored
File dir = new File(servletContext.getRealPath("/WEB-INF/security/"));
// create a filter for finding .securityadapter.xml files
FilenameFilter xmlFilenameFilter = new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.toLowerCase().endsWith(".securityadapter.xml");
}
};
// create a schema object for the xsd
Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/WEB-INF/schemas/") + "/securityAdapter.xsd"));
// create a validator
Validator validator = schema.newValidator();
// loop the xml files in the folder
for (File xmlFile : dir.listFiles(xmlFilenameFilter)) {
// read the xml into a string
String xml = Strings.getString(xmlFile);
// validate the control xml file against the schema
validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8"))));
// convert the string into JSON
JSONObject jsonSecurityAdapter = org.json.XML.toJSONObject(xml).getJSONObject("securityAdapter");
// get the type from the json
String type = jsonSecurityAdapter.getString("type");
// get the class name from the json
String className = jsonSecurityAdapter.getString("class");
// get the class
Class classClass = Class.forName(className);
// check the class extends com.rapid.security.SecurityAdapter
if (!Classes.extendsClass(classClass, com.rapid.security.SecurityAdapter.class)) throw new Exception(type + " security adapter class " + classClass.getCanonicalName() + " must extend com.rapid.security.SecurityAdapter");
// check this type is unique
if (securityConstructors.get(type) != null) throw new Exception(type + " security adapter already loaded. Type names must be unique.");
// add to constructors hashmap referenced by type
securityConstructors.put(type, classClass.getConstructor(ServletContext.class, Application.class));
// add to our collection
jsonSecurityAdapters.put(jsonSecurityAdapter);
// increment the count
adapterCount++;
}
// put the jsonControls in a context attribute (this is available via the getJsonActions method in RapidHttpServlet)
servletContext.setAttribute("jsonSecurityAdapters", jsonSecurityAdapters);
// put the constructors hashmapin a context attribute (this is available via the getContructor method in RapidHttpServlet)
servletContext.setAttribute("securityConstructors", securityConstructors);
_logger.info(adapterCount + " security adapters loaded in .securityAdapter.xml files");
return adapterCount;
}
// loop all of the .securityAdapter.xml files and check the injectable classes, so we can re-initialise JAXB context to be able to serialise them, and cache their constructors for speedy initialisation
public static int loadFormAdapters(ServletContext servletContext) throws Exception {
int adapterCount = 0;
// retain our class constructors in a hashtable - this speeds up initialisation
HashMap<String,Constructor> formConstructors = new HashMap<String,Constructor>();
// create a JSON Array object which will hold json for all of the available security adapters
JSONArray jsonAdapters = new JSONArray();
// get the directory in which the control xml files are stored
File dir = new File(servletContext.getRealPath("/WEB-INF/forms/"));
// create a filter for finding .formadapter.xml files
FilenameFilter xmlFilenameFilter = new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.toLowerCase().endsWith(".formadapter.xml");
}
};
// create a schema object for the xsd
Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/WEB-INF/schemas/") + "/formAdapter.xsd"));
// create a validator
Validator validator = schema.newValidator();
// loop the xml files in the folder
for (File xmlFile : dir.listFiles(xmlFilenameFilter)) {
// read the xml into a string
String xml = Strings.getString(xmlFile);
// validate the control xml file against the schema
validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8"))));
// convert the string into JSON
JSONObject jsonFormAdapter = org.json.XML.toJSONObject(xml).getJSONObject("formAdapter");
// get the type from the json
String type = jsonFormAdapter.getString("type");
// get the class name from the json
String className = jsonFormAdapter.getString("class");
// get the class
Class classClass = Class.forName(className);
// check the class extends com.rapid.security.SecurityAdapter
if (!Classes.extendsClass(classClass, com.rapid.forms.FormAdapter.class)) throw new Exception(type + " form adapter class " + classClass.getCanonicalName() + " must extend com.rapid.forms.FormsAdapter");
// check this type is unique
if (formConstructors.get(type) != null) throw new Exception(type + " form adapter already loaded. Type names must be unique.");
// add to constructors hashmap referenced by type
formConstructors.put(type, classClass.getConstructor(ServletContext.class, Application.class));
// add to our collection
jsonAdapters.put(jsonFormAdapter);
// increment the count
adapterCount++;
}
// put the jsonControls in a context attribute (this is available via the getJsonActions method in RapidHttpServlet)
servletContext.setAttribute("jsonFormAdapters", jsonAdapters);
// put the constructors hashmapin a context attribute (this is available via the getContructor method in RapidHttpServlet)
servletContext.setAttribute("formConstructors", formConstructors);
_logger.info(adapterCount + " form adapters loaded in .formAdapter.xml files");
return adapterCount;
}
// loop all of the .action.xml files and check the injectable classes, so we can re-initialise JAXB context to be able to serialise them, and cache their constructors for speedy initialisation
public static int loadActions(ServletContext servletContext) throws Exception {
// assume no actions
int actionCount = 0;
// create a list of json actions which we will sort later
List<JSONObject> jsonActions = new ArrayList<JSONObject>();
// retain our class constructors in a hashtable - this speeds up initialisation
HashMap<String,Constructor> actionConstructors = new HashMap<String,Constructor>();
// build a collection of classes so we can re-initilise the JAXB context to recognise our injectable classes
ArrayList<Action> actions = new ArrayList<Action>();
// get the directory in which the control xml files are stored
File dir = new File(servletContext.getRealPath("/WEB-INF/actions/"));
// create a filter for finding .control.xml files
FilenameFilter xmlFilenameFilter = new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.toLowerCase().endsWith(".action.xml");
}
};
// create a schema object for the xsd
Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/WEB-INF/schemas/") + "/action.xsd"));
// create a validator
Validator validator = schema.newValidator();
// loop the xml files in the folder
for (File xmlFile : dir.listFiles(xmlFilenameFilter)) {
// get a scanner to read the file
Scanner fileScanner = new Scanner(xmlFile).useDelimiter("\\A");
// read the xml into a string
String xml = fileScanner.next();
// close the scanner (and file)
fileScanner.close();
// validate the control xml file against the schema
validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8"))));
// convert the string into JSON
JSONObject jsonActionCollection = org.json.XML.toJSONObject(xml).getJSONObject("actions");
JSONObject jsonAction;
int index = 0;
int count = 0;
// the JSON library will add a single key of there is a single class, otherwise an array
if (jsonActionCollection.optJSONArray("action") == null) {
jsonAction = jsonActionCollection.getJSONObject("action");
} else {
jsonAction = jsonActionCollection.getJSONArray("action").getJSONObject(index);
count = jsonActionCollection.getJSONArray("action").length();
}
do {
// check this type does not already exist
for (int i = 0; i < jsonActions.size(); i++) {
if (jsonAction.getString("type").equals(jsonActions.get(i).getString("type"))) throw new Exception(" action type is loaded already. Type names must be unique");
}
// add the jsonControl to our array
jsonActions.add(jsonAction);
// get the named type from the json
String type = jsonAction.getString("type");
// get the class name from the json
String className = jsonAction.getString("class");
// get the class
Class classClass = Class.forName(className);
// check the class extends com.rapid.Action
if (!Classes.extendsClass(classClass, com.rapid.core.Action.class)) throw new Exception(type + " action class " + classClass.getCanonicalName() + " must extend com.rapid.core.Action.");
// check this type is unique
if (actionConstructors.get(type) != null) throw new Exception(type + " action already loaded. Type names must be unique.");
// add to constructors hashmap referenced by type
actionConstructors.put(type, classClass.getConstructor(RapidHttpServlet.class, JSONObject.class));
// add to our jaxb classes collection
_jaxbClasses.add(classClass);
// inc the control count
actionCount ++;
// inc the count of controls in this file
index++;
// get the next one
if (index < count) jsonAction = jsonActionCollection.getJSONArray("control").getJSONObject(index);
} while (index < count);
}
// sort the list of actions by name
Collections.sort(jsonActions, new Comparator<JSONObject>() {
@Override
public int compare(JSONObject c1, JSONObject c2) {
try {
return Comparators.AsciiCompare(c1.getString("name"), c2.getString("name"), false);
} catch (JSONException e) {
return 0;
}
}
});
// create a JSON Array object which will hold json for all of the available controls
JSONArray jsonArrayActions = new JSONArray(jsonActions);
// put the jsonControls in a context attribute (this is available via the getJsonActions method in RapidHttpServlet)
servletContext.setAttribute("jsonActions", jsonArrayActions);
// put the constructors hashmapin a context attribute (this is available via the getContructor method in RapidHttpServlet)
servletContext.setAttribute("actionConstructors", actionConstructors);
_logger.info(actionCount + " actions loaded in .action.xml files");
return actionCount;
}
// here we loop all of the control.xml files and instantiate the json class object/functions and cache them in the servletContext
public static int loadControls(ServletContext servletContext) throws Exception {
// assume no controls
int controlCount = 0;
// create a list for our controls
List<JSONObject> jsonControls = new ArrayList<JSONObject>();
// get the directory in which the control xml files are stored
File dir = new File(servletContext.getRealPath("/WEB-INF/controls/"));
// create a filter for finding .control.xml files
FilenameFilter xmlFilenameFilter = new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.toLowerCase().endsWith(".control.xml");
}
};
// create a schema object for the xsd
Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/WEB-INF/schemas/") + "/control.xsd"));
// create a validator
Validator validator = schema.newValidator();
// loop the xml files in the folder
for (File xmlFile : dir.listFiles(xmlFilenameFilter)) {
// get a scanner to read the file
Scanner fileScanner = new Scanner(xmlFile).useDelimiter("\\A");
// read the xml into a string
String xml = fileScanner.next();
// close the scanner (and file)
fileScanner.close();
// validate the control xml file against the schema
validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8"))));
// convert the string into JSON
JSONObject jsonControlCollection = org.json.XML.toJSONObject(xml).getJSONObject("controls");
JSONObject jsonControl;
int index = 0;
int count = 0;
if (jsonControlCollection.optJSONArray("control") == null) {
jsonControl = jsonControlCollection.getJSONObject("control");
} else {
jsonControl = jsonControlCollection.getJSONArray("control").getJSONObject(index);
count = jsonControlCollection.getJSONArray("control").length();
}
do {
// check this type does not already exist
for (int i = 0; i < jsonControls.size(); i++) {
if (jsonControl.getString("type").equals(jsonControls.get(i).getString("type"))) throw new Exception(" control type is loaded already. Type names must be unique");
}
// add the jsonControl to our array
jsonControls.add(jsonControl);
// inc the control count
controlCount ++;
// inc the count of controls in this file
index++;
// get the next one
if (index < count) jsonControl = jsonControlCollection.getJSONArray("control").getJSONObject(index);
} while (index < count);
}
// sort the list of controls by name
Collections.sort(jsonControls, new Comparator<JSONObject>() {
@Override
public int compare(JSONObject c1, JSONObject c2) {
try {
return Comparators.AsciiCompare(c1.getString("name"), c2.getString("name"), false);
} catch (JSONException e) {
return 0;
}
}
});
// create a JSON Array object which will hold json for all of the available controls
JSONArray jsonArrayControls = new JSONArray(jsonControls);
// put the jsonControls in a context attribute (this is available via the getJsonControls method in RapidHttpServlet)
servletContext.setAttribute("jsonControls", jsonArrayControls);
_logger.info(controlCount + " controls loaded in .control.xml files");
return controlCount;
}
// here we loop all of the theme.xml files and instantiate the json class object/functions and cache them in the servletContext
public static int loadThemes(ServletContext servletContext) throws Exception {
// assume no themes
int themeCount = 0;
// create a list for our themes
List<Theme> themes = new ArrayList<Theme>();
// get the directory in which the control xml files are stored
File dir = new File(servletContext.getRealPath("/WEB-INF/themes/"));
// create a filter for finding .control.xml files
FilenameFilter xmlFilenameFilter = new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.toLowerCase().endsWith(".theme.xml");
}
};
// create a schema object for the xsd
Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/WEB-INF/schemas/") + "/theme.xsd"));
// create a validator
Validator validator = schema.newValidator();
// loop the xml files in the folder
for (File xmlFile : dir.listFiles(xmlFilenameFilter)) {
// get a scanner to read the file
Scanner fileScanner = new Scanner(xmlFile).useDelimiter("\\A");
// read the xml into a string
String xml = fileScanner.next();
// close the scanner (and file)
fileScanner.close();
// validate the control xml file against the schema
validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8"))));
// create a theme object from the xml
Theme theme = new Theme(xml);
// add it to our collection
themes.add(theme);
// inc the template count
themeCount ++;
}
// sort the list of templates by name
Collections.sort(themes, new Comparator<Theme>() {
@Override
public int compare(Theme t1, Theme t2) {
return Comparators.AsciiCompare(t1.getName(), t2.getName(), false);
}
});
// put the jsonControls in a context attribute (this is available via the getJsonControls method in RapidHttpServlet)
servletContext.setAttribute("themes", themes);
_logger.info(themeCount + " templates loaded in .template.xml files");
return themeCount;
}
// Here we loop all of the folders under "applications" looking for a application.xml file, copying to the latest version if found before loading the versions
public static int loadApplications(ServletContext servletContext) throws JAXBException, JSONException, InstantiationException, IllegalAccessException, ClassNotFoundException, IllegalArgumentException, SecurityException, InvocationTargetException, NoSuchMethodException, IOException, ParserConfigurationException, SAXException, TransformerFactoryConfigurationError, TransformerException, RapidLoadingException, XPathExpressionException {
// get any existing applications
Applications applications = (Applications) servletContext.getAttribute("applications");
// check we got some
if (applications != null) {
// log
_logger.info("Closing applications");
// loop the application ids
for (String appId : applications.getIds()) {
// loop the versions
for (String version : applications.getVersions(appId).keySet()) {
// get the version
Application application = applications.get(appId, version);
// close it
application.close(servletContext);
}
}
}
_logger.info("Loading applications");
// make a new set of applications
applications = new Applications();
File applicationFolderRoot = new File(servletContext.getRealPath("/WEB-INF/applications/"));
for (File applicationFolder : applicationFolderRoot.listFiles()) {
if (applicationFolder.isDirectory()) {
// get the list of files in this folder - should be all version folders
File[] applicationFolders = applicationFolder.listFiles();
// assume we didn't need to version
boolean versionCreated = false;
// if we got some
if (applicationFolders != null) {
try {
// look for an application file in the root of the application folder
File applicationFile = new File(applicationFolder.getAbsoluteFile() + "/application.xml");
// set a version for this app (just in case it doesn't have one)
String version = "1";
// if it exists here, it's in the wrong (non-versioned) place!
if (applicationFile.exists()) {
// create a file for the new version folder
File versionFolder = new File(applicationFolder + "/" + version);
// keep appending the version if the folder already exists
while (versionFolder.exists()) {
// append .1 to the version 1, 1.1, 1.1.1, etc
version += ".1";
versionFolder = new File(applicationFolder + "/" + version);
}
// make the dir
versionFolder.mkdir();
_logger.info(versionFolder + " created");
// copy in all files and pages folder
for (File file : applicationFolders) {
// copy all files and the pages folder
if (!file.isDirectory() || (file.isDirectory() && "pages".equals(file.getName()))) {
// make a desintation file
File destFile = new File(versionFolder + "/" + file.getName());
// this is not a version folder itself, copy it to the new version folder
Files.copyFolder(file, destFile);
// delete the file or folder
Files.deleteRecurring(file);
// log
_logger.info(file + " moved to " + destFile);
}
}
// record that we created a version
versionCreated = true;
} // application.xml non-versioned check
try {
// get the version folders
File[] versionFolders = applicationFolder.listFiles();
// get a marshaller
Marshaller marshaller = RapidHttpServlet.getMarshaller();
// loop them
for (File versionFolder : versionFolders) {
// check is folder
if (versionFolder.isDirectory()) {
// look for an application file in the version folder
applicationFile = new File(versionFolder + "/application.xml");
// if it exists
if (applicationFile.exists()) {
// placeholder for the application we're going to version up or just load
Application application = null;
// if we had to create a version for it
if (versionCreated) {
// load without resources
application = Application.load(servletContext, applicationFile, false);
// set the new version
application.setVersion(version);
// re-initialise it without resources (for the security adapter)
application.initialise(servletContext, false);
// marshal the updated application object to it's file
FileOutputStream fos = new FileOutputStream(applicationFile);
marshaller.marshal(application, fos);
fos.close();
// get a dir for the pages
File pageDir = new File(versionFolder + "/pages");
// check it exists
if (pageDir.exists()) {
// loop the pages files
for (File pageFile : pageDir.listFiles()) {
// read the contents of the file
String pageContent = Strings.getString(pageFile);
// replace all old file references
pageContent = pageContent
.replace("/" + application.getId() + "/", "/" + application.getId() + "/" + application.getVersion() + "/")
.replace("~?a=" + application.getId() + "&", "~?a=" + application.getId() + "&" + application.getVersion() + "&");
// create a file writer
FileWriter fs = new FileWriter(pageFile);
// save the changes
fs.write(pageContent);
// close the writer
fs.close();
_logger.info(pageFile + " updated with new references");
}
}
// make a dir for it's web resources
File webDir = new File(application.getWebFolder(servletContext));
webDir.mkdir();
_logger.info(webDir + " created");
// loop all the files in the parent
for (File file : webDir.getParentFile().listFiles()) {
// check not dir
if (!file.isDirectory()) {
// create a destination file for the new location
File destFile = new File(webDir + "/" + file.getName());
// copy it to the new destination
Files.copyFile(file, destFile);
// delete the file or folder
file.delete();
_logger.info(file + " moved to " + destFile);
}
}
}
// (re)load the application
application = Application.load(servletContext, applicationFile);
// put it in our collection
applications.put(application);
}
} // folder check
} // version folder loop
} catch (Exception ex) {
// log the exception
_logger.error("Error loading app " + applicationFile, ex);
} // version load catch
} catch (Exception ex) {
// log it
_logger.error("Error creating version folder for app " + applicationFolder, ex);
} // version folder creation catch
} // application folders check
} // application folder check
} // application folder loop
// store them in the context
servletContext.setAttribute("applications", applications);
_logger.info(applications.size() + " applications loaded");
return applications.size();
}
public static int loadProcesses(ServletContext servletContext) throws Exception {
// get any existing processes
List<Process> processes = (List<Process>) servletContext.getAttribute("processes");
// check we got some
if (processes != null) {
// log
_logger.info("Stopping processes");
// loop the application ids
for (Process process : processes) {
// interrupt the process
process.interrupt();
}
}
_logger.info("Loading processes");
// make a new set of applications
processes = new ArrayList<Process>();
// get the directory in which the process xml files are stored
File dir = new File(servletContext.getRealPath("/WEB-INF/processes/"));
// create a filter for finding .control.xml files
FilenameFilter xmlFilenameFilter = new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.toLowerCase().endsWith(".process.xml");
}
};
// create a schema object for the xsd
Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/WEB-INF/schemas/") + "/process.xsd"));
// create a validator
Validator validator = schema.newValidator();
// loop the xml files in the folder
for (File xmlFile : dir.listFiles(xmlFilenameFilter)) {
// get a scanner to read the file
Scanner fileScanner = new Scanner(xmlFile).useDelimiter("\\A");
// read the xml into a string
String xml = fileScanner.next();
// close the scanner (and file)
fileScanner.close();
// validate the control xml file against the schema
validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8"))));
// convert the xml into JSON
JSONObject jsonProcess = org.json.XML.toJSONObject(xml).getJSONObject("process");
// get the name from the json
String name = jsonProcess.getString("name");
// get the class name from the json
String className = jsonProcess.getString("class");
// get the class
Class classClass = Class.forName(className);
// check the class extends com.rapid.security.SecurityAdapter
if (!Classes.extendsClass(classClass, com.rapid.core.Process.class)) throw new Exception(name + " process class " + classClass.getCanonicalName() + " must extend com.rapid.core.Process");
// get a constructor
Constructor constructor = classClass.getConstructor(ServletContext.class, String.class, Integer.TYPE);
// create a process object from the xml
Process process = (Process) constructor.newInstance(servletContext, name, jsonProcess.getInt("interval"));
// start it
process.start();
// add it to our collection
processes.add(process);
}
// store them in the context
servletContext.setAttribute("processes", processes);
// log that we've loaded them
_logger.info(processes.size() + " processes loaded");
// return the size
return processes.size();
}
@Override
public void contextInitialized(ServletContextEvent event) {
// set up logging
super.contextInitialized(event);
// request windows line breaks to make the files easier to edit (in particular the marshalled .xml files)
System.setProperty("line.separator", "\r\n");
// get a reference to the servlet context
ServletContext servletContext = event.getServletContext();
// set up logging
try {
// get a logger
_logger = LogManager.getLogger(RapidHttpServlet.class);
// set the logger and store in servletConext
servletContext.setAttribute("logger", _logger);
// log!
_logger.info("Logger created");
} catch (Exception e) {
System.err.println("Error initilising logging : " + e.getMessage());
e.printStackTrace();
}
try {
// we're looking for a password and salt for the encryption
char[] password = null;
byte[] salt = null;
// look for the rapid.txt file with the saved password and salt
File secretsFile = new File(servletContext.getRealPath("/") + "/WEB-INF/security/encryption.txt");
// if it exists
if (secretsFile.exists()) {
// get a file reader
BufferedReader br = new BufferedReader(new FileReader(secretsFile));
// read the first line
String className = br.readLine().trim();
// close the reader
br.close();
// if the class name does not start with
if (!className.startsWith("
try {
// get the class
Class classClass = Class.forName(className);
// get the interfaces
Class[] classInterfaces = classClass.getInterfaces();
// assume it doesn't have the interface we want
boolean gotInterface = false;
// check we got some
if (classInterfaces != null) {
for (Class classInterface : classInterfaces) {
if (com.rapid.utils.Encryption.EncryptionProvider.class.equals(classInterface)) {
gotInterface = true;
break;
}
}
}
// check the class extends com.rapid.Action
if (gotInterface) {
// get the constructors
Constructor[] classConstructors = classClass.getDeclaredConstructors();
// check we got some
if (classConstructors != null) {
// assume we don't get the parameterless one we need
Constructor constructor = null;
// loop them
for (Constructor classConstructor : classConstructors) {
// check parameters
if (classConstructor.getParameterTypes().length == 0) {
constructor = classConstructor;
break;
}
}
// check we got what we want
if (constructor == null) {
_logger.error("Encryption not initialised : Class in security.txt class must have a parameterless constructor");
} else {
// construct the class
EncryptionProvider encryptionProvider = (EncryptionProvider) constructor.newInstance();
// get the password
password = encryptionProvider.getPassword();
// get the salt
salt = encryptionProvider.getSalt();
// log
_logger.info("Encryption initialised");
}
}
} else {
_logger.error("Encryption not initialised : Class in security.txt class must extend com.rapid.utils.Encryption.EncryptionProvider");
}
} catch (Exception ex) {
_logger.error("Encyption not initialised : " + ex.getMessage(), ex);
}
}
} else {
_logger.info("Encyption not initialised");
}
// create the encypted xml adapter (if the file above is not found there no encryption will occur)
RapidHttpServlet.setEncryptedXmlAdapter(new EncryptedXmlAdapter(password, salt));
// initialise the schema factory (we'll reuse it in the various loaders)
_schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
// initialise the list of classes we're going to want in the JAXB context (the loaders will start adding to it)
_jaxbClasses = new ArrayList<Class>();
// load the logins first
_logger.info("Loading logins");
// load the database drivers first
loadLogins(servletContext);
_logger.info("Loading database drivers");
// load the database drivers
loadDatabaseDrivers(servletContext);
_logger.info("Loading connection adapters");
// load the connection adapters
loadConnectionAdapters(servletContext);
_logger.info("Loading security adapters");
// load the security adapters
loadSecurityAdapters(servletContext);
_logger.info("Loading form adapters");
// load the form adapters
loadFormAdapters(servletContext);
_logger.info("Loading actions");
// load the actions
loadActions(servletContext);
_logger.info("Loading templates");
// load templates
loadThemes(servletContext);
_logger.info("Loading controls");
// load the controls
loadControls(servletContext);
// add some classes manually
_jaxbClasses.add(com.rapid.soa.SOAElementRestriction.class);
_jaxbClasses.add(com.rapid.soa.SOAElementRestriction.NameRestriction.class);
_jaxbClasses.add(com.rapid.soa.SOAElementRestriction.MinOccursRestriction.class);
_jaxbClasses.add(com.rapid.soa.SOAElementRestriction.MaxOccursRestriction.class);
_jaxbClasses.add(com.rapid.soa.SOAElementRestriction.MaxLengthRestriction.class);
_jaxbClasses.add(com.rapid.soa.SOAElementRestriction.MinLengthRestriction.class);
_jaxbClasses.add(com.rapid.soa.SOAElementRestriction.EnumerationRestriction.class);
_jaxbClasses.add(com.rapid.soa.Webservice.class);
_jaxbClasses.add(com.rapid.soa.SQLWebservice.class);
_jaxbClasses.add(com.rapid.soa.JavaWebservice.class);
_jaxbClasses.add(com.rapid.core.Validation.class);
_jaxbClasses.add(com.rapid.core.Action.class);
_jaxbClasses.add(com.rapid.core.Event.class);
_jaxbClasses.add(com.rapid.core.Style.class);
_jaxbClasses.add(com.rapid.core.Control.class);
_jaxbClasses.add(com.rapid.core.Page.class);
_jaxbClasses.add(com.rapid.core.Application.class);
_jaxbClasses.add(com.rapid.core.Device.class);
_jaxbClasses.add(com.rapid.core.Device.Devices.class);
_jaxbClasses.add(com.rapid.core.Email.class);
// convert arraylist to array
Class[] classes = _jaxbClasses.toArray(new Class[_jaxbClasses.size()]);
// re-init the JAXB context to include our injectable classes
JAXBContext jaxbContext = JAXBContext.newInstance(classes);
// this logs the JAXB classes
_logger.trace("JAXB content : " + jaxbContext.toString());
// store the jaxb context in RapidHttpServlet
RapidHttpServlet.setJAXBContext(jaxbContext);
// load the devices
Devices.load(servletContext);
// load the email settings
Email.load(servletContext);
// load the applications!
loadApplications(servletContext);
// load the processes
loadProcesses(servletContext);
// add some useful global objects
servletContext.setAttribute("xmlDateFormatter", new SimpleDateFormat("yyyy-MM-dd"));
servletContext.setAttribute("xmlDateTimeFormatter", new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"));
String localDateFormat = servletContext.getInitParameter("localDateFormat");
if (localDateFormat == null) localDateFormat = "dd/MM/yyyy";
servletContext.setAttribute("localDateFormatter", new SimpleDateFormat(localDateFormat));
String localDateTimeFormat = servletContext.getInitParameter("localDateTimeFormat");
if (localDateTimeFormat == null) localDateTimeFormat = "dd/MM/yyyy HH:mm a";
servletContext.setAttribute("localDateTimeFormatter", new SimpleDateFormat(localDateTimeFormat));
boolean actionCache = Boolean.parseBoolean(servletContext.getInitParameter("actionCache"));
if (actionCache) servletContext.setAttribute("actionCache", new ActionCache(servletContext));
// allow calling to https without checking certs (for now)
SSLContext sc = SSLContext.getInstance("SSL");
TrustManager[] trustAllCerts = new TrustManager[]{ new Https.TrustAllCerts() };
sc.init(null, trustAllCerts, new java.security.SecureRandom());
HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
} catch (Exception ex) {
_logger.error("Error initialising Rapid : " + ex.getMessage());
ex.printStackTrace();
}
}
@Override
public void contextDestroyed(ServletContextEvent event){
_logger.info("Shutting down...");
// interrupt the page monitor if we have one
//if (_monitor != null) _monitor.interrupt();
// get the servletContext
ServletContext servletContext = event.getServletContext();
// get all of the applications
Applications applications = (Applications) servletContext.getAttribute("applications");
// if we got some
if (applications != null) {
// loop the application ids
for (String id : applications.getIds()) {
// get the application
Versions versions = applications.getVersions(id);
// loop the versions of each app
for (String version : versions.keySet()) {
// get the application
Application application = applications.get(id, version);
// have it close any sensitive resources
application.close(servletContext);
}
}
}
// sleep for 2 seconds to allow any database connection cleanup to complete
try { Thread.sleep(2000); } catch (Exception ex) {}
// This manually deregisters JDBC drivers, which prevents Tomcat from complaining about memory leaks from this class
Enumeration<Driver> drivers = DriverManager.getDrivers();
while (drivers.hasMoreElements()) {
Driver driver = drivers.nextElement();
try {
DriverManager.deregisterDriver(driver);
_logger.info(String.format("Deregistering jdbc driver: %s", driver));
} catch (SQLException e) {
_logger.error(String.format("Error deregistering driver %s", driver), e);
}
}
Set<Thread> threadSet = Thread.getAllStackTraces().keySet();
Thread[] threadArray = threadSet.toArray(new Thread[threadSet.size()]);
for (Thread t:threadArray) {
if (t.getName().contains("Abandoned connection cleanup thread")) {
synchronized (t) {
try {
_logger.info("Forcing stop of Abandoned connection cleanup thread");
t.stop(); //don't complain, it works
} catch (Exception ex) {
_logger.info("Error forcing stop of Abandoned connection cleanup thread",ex);
}
}
}
}
// sleep for 1 second to allow any database connection cleanup to complete
try { Thread.sleep(1000); } catch (Exception ex) {}
// last log
_logger.info("Logger shutdown");
// shutdown logger
if (_logger != null) LogManager.shutdown();
// shut down logging
super.contextDestroyed(event);
}
}
|
package de.duenndns.ssl;
import android.app.Activity;
import android.app.Application;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.Service;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.util.SparseArray;
import android.os.Build;
import android.os.Handler;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.security.cert.*;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Enumeration;
import java.util.List;
import java.util.Locale;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.SSLSession;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import javax.net.ssl.X509TrustManager;
/**
* A X509 trust manager implementation which asks the user about invalid
* certificates and memorizes their decision.
* <p>
* The certificate validity is checked using the system default X509
* TrustManager, creating a query Dialog if the check fails.
* <p>
* <b>WARNING:</b> This only works if a dedicated thread is used for
* opening sockets!
*/
public class MemorizingTrustManager implements X509TrustManager {
final static String DECISION_INTENT = "de.duenndns.ssl.DECISION";
final static String DECISION_INTENT_ID = DECISION_INTENT + ".decisionId";
final static String DECISION_INTENT_CERT = DECISION_INTENT + ".cert";
final static String DECISION_INTENT_CHOICE = DECISION_INTENT + ".decisionChoice";
private final static Logger LOGGER = Logger.getLogger(MemorizingTrustManager.class.getName());
final static String DECISION_TITLE_ID = DECISION_INTENT + ".titleId";
private final static int NOTIFICATION_ID = 100509;
static String KEYSTORE_DIR = "KeyStore";
static String KEYSTORE_FILE = "KeyStore.bks";
Context master;
Activity foregroundAct;
NotificationManager notificationManager;
private static int decisionId = 0;
private static SparseArray<MTMDecision> openDecisions = new SparseArray<MTMDecision>();
Handler masterHandler;
private File keyStoreFile;
private KeyStore appKeyStore;
private X509TrustManager defaultTrustManager;
private X509TrustManager appTrustManager;
/** Creates an instance of the MemorizingTrustManager class that falls back to a custom TrustManager.
*
* You need to supply the application context. This has to be one of:
* - Application
* - Activity
* - Service
*
* The context is used for file management, to display the dialog /
* notification and for obtaining translated strings.
*
* @param m Context for the application.
* @param defaultTrustManager Delegate trust management to this TM. If null, the user must accept every certificate.
*/
public MemorizingTrustManager(Context m, X509TrustManager defaultTrustManager) {
init(m);
this.appTrustManager = getTrustManager(appKeyStore);
this.defaultTrustManager = defaultTrustManager;
}
/** Creates an instance of the MemorizingTrustManager class using the system X509TrustManager.
*
* You need to supply the application context. This has to be one of:
* - Application
* - Activity
* - Service
*
* The context is used for file management, to display the dialog /
* notification and for obtaining translated strings.
*
* @param m Context for the application.
*/
public MemorizingTrustManager(Context m) {
init(m);
this.appTrustManager = getTrustManager(appKeyStore);
this.defaultTrustManager = getTrustManager(null);
}
void init(Context m) {
master = m;
masterHandler = new Handler(m.getMainLooper());
notificationManager = (NotificationManager)master.getSystemService(Context.NOTIFICATION_SERVICE);
Application app;
if (m instanceof Application) {
app = (Application)m;
} else if (m instanceof Service) {
app = ((Service)m).getApplication();
} else if (m instanceof Activity) {
app = ((Activity)m).getApplication();
} else throw new ClassCastException("MemorizingTrustManager context must be either Activity or Service!");
File dir = app.getDir(KEYSTORE_DIR, Context.MODE_PRIVATE);
keyStoreFile = new File(dir + File.separator + KEYSTORE_FILE);
appKeyStore = loadAppKeyStore();
}
/**
* Returns a X509TrustManager list containing a new instance of
* TrustManagerFactory.
*
* This function is meant for convenience only. You can use it
* as follows to integrate TrustManagerFactory for HTTPS sockets:
*
* <pre>
* SSLContext sc = SSLContext.getInstance("TLS");
* sc.init(null, MemorizingTrustManager.getInstanceList(this),
* new java.security.SecureRandom());
* HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
* </pre>
* @param c Activity or Service to show the Dialog / Notification
*/
public static X509TrustManager[] getInstanceList(Context c) {
return new X509TrustManager[] { new MemorizingTrustManager(c) };
}
/**
* Binds an Activity to the MTM for displaying the query dialog.
*
* This is useful if your connection is run from a service that is
* triggered by user interaction -- in such cases the activity is
* visible and the user tends to ignore the service notification.
*
* You should never have a hidden activity bound to MTM! Use this
* function in onResume() and @see unbindDisplayActivity in onPause().
*
* @param act Activity to be bound
*/
public void bindDisplayActivity(Activity act) {
foregroundAct = act;
}
/**
* Removes an Activity from the MTM display stack.
*
* Always call this function when the Activity added with
* {@link #bindDisplayActivity(Activity)} is hidden.
*
* @param act Activity to be unbound
*/
public void unbindDisplayActivity(Activity act) {
// do not remove if it was overridden by a different activity
if (foregroundAct == act)
foregroundAct = null;
}
/**
* Changes the path for the KeyStore file.
*
* The actual filename relative to the app's directory will be
* <code>app_<i>dirname</i>/<i>filename</i></code>.
*
* @param dirname directory to store the KeyStore.
* @param filename file name for the KeyStore.
*/
public static void setKeyStoreFile(String dirname, String filename) {
KEYSTORE_DIR = dirname;
KEYSTORE_FILE = filename;
}
/**
* Get a list of all certificate aliases stored in MTM.
*
* @return an {@link Enumeration} of all certificates
*/
public Enumeration<String> getCertificates() {
try {
return appKeyStore.aliases();
} catch (KeyStoreException e) {
// this should never happen, however...
throw new RuntimeException(e);
}
}
/**
* Get a certificate for a given alias.
*
* @param alias the certificate's alias as returned by {@link #getCertificates()}.
*
* @return the certificate associated with the alias or <tt>null</tt> if none found.
*/
public Certificate getCertificate(String alias) {
try {
return appKeyStore.getCertificate(alias);
} catch (KeyStoreException e) {
// this should never happen, however...
throw new RuntimeException(e);
}
}
/**
* Removes the given certificate from MTMs key store.
*
* <p>
* <b>WARNING</b>: this does not immediately invalidate the certificate. It is
* well possible that (a) data is transmitted over still existing connections or
* (b) new connections are created using TLS renegotiation, without a new cert
* check.
* </p>
* @param alias the certificate's alias as returned by {@link #getCertificates()}.
*
* @throws KeyStoreException if the certificate could not be deleted.
*/
public void deleteCertificate(String alias) throws KeyStoreException {
appKeyStore.deleteEntry(alias);
keyStoreUpdated();
}
public HostnameVerifier wrapHostnameVerifier(final HostnameVerifier defaultVerifier) {
if (defaultVerifier == null)
throw new IllegalArgumentException("The default verifier may not be null");
return new MemorizingHostnameVerifier(defaultVerifier);
}
X509TrustManager getTrustManager(KeyStore ks) {
try {
TrustManagerFactory tmf = TrustManagerFactory.getInstance("X509");
tmf.init(ks);
for (TrustManager t : tmf.getTrustManagers()) {
if (t instanceof X509TrustManager) {
return (X509TrustManager)t;
}
}
} catch (Exception e) {
// Here, we are covering up errors. It might be more useful
// however to throw them out of the constructor so the
// embedding app knows something went wrong.
LOGGER.log(Level.SEVERE, "getTrustManager(" + ks + ")", e);
}
return null;
}
KeyStore loadAppKeyStore() {
KeyStore ks;
try {
ks = KeyStore.getInstance(KeyStore.getDefaultType());
} catch (KeyStoreException e) {
LOGGER.log(Level.SEVERE, "getAppKeyStore()", e);
return null;
}
try {
ks.load(null, null);
} catch (NoSuchAlgorithmException | CertificateException | IOException e) {
LOGGER.log(Level.SEVERE, "getAppKeyStore(" + keyStoreFile + ")", e);
}
InputStream is = null;
try {
is = new java.io.FileInputStream(keyStoreFile);
ks.load(is, "MTM".toCharArray());
} catch (NoSuchAlgorithmException | CertificateException | IOException e) {
LOGGER.log(Level.INFO, "getAppKeyStore(" + keyStoreFile + ") - exception loading file key store");
} finally {
if (is != null) {
try {
is.close();
} catch (IOException e) {
LOGGER.log(Level.FINE, "getAppKeyStore(" + keyStoreFile + ") - exception closing file key store input stream");
}
}
}
return ks;
}
void storeCert(String alias, Certificate cert) {
try {
appKeyStore.setCertificateEntry(alias, cert);
} catch (KeyStoreException e) {
LOGGER.log(Level.SEVERE, "storeCert(" + cert + ")", e);
return;
}
keyStoreUpdated();
}
void storeCert(X509Certificate cert) {
storeCert(cert.getSubjectDN().toString(), cert);
}
void keyStoreUpdated() {
// reload appTrustManager
appTrustManager = getTrustManager(appKeyStore);
// store KeyStore to file
java.io.FileOutputStream fos = null;
try {
fos = new java.io.FileOutputStream(keyStoreFile);
appKeyStore.store(fos, "MTM".toCharArray());
} catch (Exception e) {
LOGGER.log(Level.SEVERE, "storeCert(" + keyStoreFile + ")", e);
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException e) {
LOGGER.log(Level.SEVERE, "storeCert(" + keyStoreFile + ")", e);
}
}
}
}
// if the certificate is stored in the app key store, it is considered "known"
private boolean isCertKnown(X509Certificate cert) {
try {
return appKeyStore.getCertificateAlias(cert) != null;
} catch (KeyStoreException e) {
return false;
}
}
private static boolean isExpiredException(Throwable e) {
do {
if (e instanceof CertificateExpiredException)
return true;
e = e.getCause();
} while (e != null);
return false;
}
private static boolean isPathException(Throwable e) {
do {
if (e instanceof CertPathValidatorException)
return true;
e = e.getCause();
} while (e != null);
return false;
}
public void checkCertTrusted(X509Certificate[] chain, String authType, boolean isServer)
throws CertificateException
{
LOGGER.log(Level.FINE, "checkCertTrusted(" + chain + ", " + authType + ", " + isServer + ")");
try {
LOGGER.log(Level.FINE, "checkCertTrusted: trying appTrustManager");
if (isServer)
appTrustManager.checkServerTrusted(chain, authType);
else
appTrustManager.checkClientTrusted(chain, authType);
} catch (CertificateException ae) {
LOGGER.log(Level.FINER, "checkCertTrusted: appTrustManager did not verify certificate. Will fall back to secondary verification mechanisms (if any).", ae);
// if the cert is stored in our appTrustManager, we ignore expiredness
if (isExpiredException(ae)) {
LOGGER.log(Level.INFO, "checkCertTrusted: accepting expired certificate from keystore");
return;
}
if (isCertKnown(chain[0])) {
LOGGER.log(Level.INFO, "checkCertTrusted: accepting cert already stored in keystore");
return;
}
try {
if (defaultTrustManager == null) {
LOGGER.fine("No defaultTrustManager set. Verification failed, throwing " + ae);
throw ae;
}
LOGGER.log(Level.FINE, "checkCertTrusted: trying defaultTrustManager");
if (isServer)
defaultTrustManager.checkServerTrusted(chain, authType);
else
defaultTrustManager.checkClientTrusted(chain, authType);
} catch (CertificateException e) {
LOGGER.log(Level.FINER, "checkCertTrusted: defaultTrustManager failed", e);
interactCert(chain, authType, e);
}
}
}
public void checkClientTrusted(X509Certificate[] chain, String authType)
throws CertificateException
{
checkCertTrusted(chain, authType, false);
}
public void checkServerTrusted(X509Certificate[] chain, String authType)
throws CertificateException
{
checkCertTrusted(chain, authType, true);
}
public X509Certificate[] getAcceptedIssuers()
{
LOGGER.log(Level.FINE, "getAcceptedIssuers()");
return defaultTrustManager.getAcceptedIssuers();
}
private static int createDecisionId(MTMDecision d) {
int myId;
synchronized(openDecisions) {
myId = decisionId;
openDecisions.put(myId, d);
decisionId += 1;
}
return myId;
}
private static String hexString(byte[] data) {
StringBuilder si = new StringBuilder();
for (int i = 0; i < data.length; i++) {
si.append(String.format("%02x", data[i]));
if (i < data.length - 1)
si.append(":");
}
return si.toString();
}
private static String certHash(final X509Certificate cert, String digest) {
try {
MessageDigest md = MessageDigest.getInstance(digest);
md.update(cert.getEncoded());
return hexString(md.digest());
} catch (java.security.cert.CertificateEncodingException e) {
return e.getMessage();
} catch (java.security.NoSuchAlgorithmException e) {
return e.getMessage();
}
}
private static void certDetails(StringBuilder si, X509Certificate c) {
SimpleDateFormat validityDateFormater = new SimpleDateFormat("yyyy-MM-dd");
si.append("\n");
si.append(c.getSubjectDN().toString());
si.append("\n");
si.append(validityDateFormater.format(c.getNotBefore()));
si.append(" - ");
si.append(validityDateFormater.format(c.getNotAfter()));
si.append("\nSHA-256: ");
si.append(certHash(c, "SHA-256"));
si.append("\nSHA-1: ");
si.append(certHash(c, "SHA-1"));
si.append("\nSigned by: ");
si.append(c.getIssuerDN().toString());
si.append("\n");
}
private String certChainMessage(final X509Certificate[] chain, CertificateException cause) {
Throwable e = cause;
LOGGER.log(Level.FINE, "certChainMessage for " + e);
StringBuilder si = new StringBuilder();
if (isPathException(e))
si.append(master.getString(R.string.mtm_trust_anchor));
else if (isExpiredException(e))
si.append(master.getString(R.string.mtm_cert_expired));
else {
// get to the cause
while (e.getCause() != null)
e = e.getCause();
si.append(e.getLocalizedMessage());
}
si.append("\n\n");
si.append(master.getString(R.string.mtm_connect_anyway));
si.append("\n\n");
si.append(master.getString(R.string.mtm_cert_details));
for (X509Certificate c : chain) {
certDetails(si, c);
}
return si.toString();
}
private String hostNameMessage(X509Certificate cert, String hostname) {
StringBuilder si = new StringBuilder();
si.append(master.getString(R.string.mtm_hostname_mismatch, hostname));
si.append("\n\n");
try {
Collection<List<?>> sans = cert.getSubjectAlternativeNames();
if (sans == null) {
si.append(cert.getSubjectDN());
si.append("\n");
} else for (List<?> altName : sans) {
Object name = altName.get(1);
if (name instanceof String) {
si.append("[");
si.append(altName.get(0));
si.append("] ");
si.append(name);
si.append("\n");
}
}
} catch (CertificateParsingException e) {
e.printStackTrace();
si.append("<Parsing error: ");
si.append(e.getLocalizedMessage());
si.append(">\n");
}
si.append("\n");
si.append(master.getString(R.string.mtm_connect_anyway));
si.append("\n\n");
si.append(master.getString(R.string.mtm_cert_details));
certDetails(si, cert);
return si.toString();
}
/**
* Reflectively call
* <code>Notification.setLatestEventInfo(Context, CharSequence, CharSequence, PendingIntent)</code>
* since it was remove in Android API level 23.
*
* @param notification
* @param context
* @param mtmNotification
* @param certName
* @param call
*/
private static void setLatestEventInfoReflective(Notification notification,
Context context, CharSequence mtmNotification,
CharSequence certName, PendingIntent call) {
Method setLatestEventInfo;
try {
setLatestEventInfo = notification.getClass().getMethod(
"setLatestEventInfo", Context.class, CharSequence.class,
CharSequence.class, PendingIntent.class);
} catch (NoSuchMethodException e) {
throw new IllegalStateException(e);
}
try {
setLatestEventInfo.invoke(notification, context, mtmNotification,
certName, call);
} catch (IllegalAccessException | IllegalArgumentException
| InvocationTargetException e) {
throw new IllegalStateException(e);
}
}
void startActivityNotification(Intent intent, int decisionId, String certName) {
Notification notification;
final PendingIntent call = PendingIntent.getActivity(master, 0, intent,
0);
final String mtmNotification = master.getString(R.string.mtm_notification);
final long currentMillis = System.currentTimeMillis();
final Context context = master.getApplicationContext();
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB) {
@SuppressWarnings("deprecation")
// Use an extra identifier for the legacy build notification, so
// that we suppress the deprecation warning. We will latter assign
// this to the correct identifier.
Notification n = new Notification(android.R.drawable.ic_lock_lock,
mtmNotification,
currentMillis);
setLatestEventInfoReflective(n, context, mtmNotification, certName, call);
n.flags |= Notification.FLAG_AUTO_CANCEL;
notification = n;
} else {
notification = new Notification.Builder(master)
.setContentTitle(mtmNotification)
.setContentText(certName)
.setTicker(certName)
.setSmallIcon(android.R.drawable.ic_lock_lock)
.setWhen(currentMillis)
.setContentIntent(call)
.setAutoCancel(true)
.build();
}
notificationManager.notify(NOTIFICATION_ID + decisionId, notification);
}
/**
* Returns the top-most entry of the activity stack.
*
* @return the Context of the currently bound UI or the master context if none is bound
*/
Context getUI() {
return (foregroundAct != null) ? foregroundAct : master;
}
int interact(final String message, final int titleId) {
/* prepare the MTMDecision blocker object */
MTMDecision choice = new MTMDecision();
final int myId = createDecisionId(choice);
masterHandler.post(new Runnable() {
public void run() {
Intent ni = new Intent(master, MemorizingActivity.class);
ni.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
ni.setData(Uri.parse(MemorizingTrustManager.class.getName() + "/" + myId));
ni.putExtra(DECISION_INTENT_ID, myId);
ni.putExtra(DECISION_INTENT_CERT, message);
ni.putExtra(DECISION_TITLE_ID, titleId);
// we try to directly start the activity and fall back to
// making a notification
try {
getUI().startActivity(ni);
} catch (Exception e) {
LOGGER.log(Level.FINE, "startActivity(MemorizingActivity)", e);
startActivityNotification(ni, myId, message);
}
}
});
LOGGER.log(Level.FINE, "openDecisions: " + openDecisions + ", waiting on " + myId);
try {
synchronized(choice) { choice.wait(); }
} catch (InterruptedException e) {
LOGGER.log(Level.FINER, "InterruptedException", e);
}
LOGGER.log(Level.FINE, "finished wait on " + myId + ": " + choice.state);
return choice.state;
}
void interactCert(final X509Certificate[] chain, String authType, CertificateException cause)
throws CertificateException
{
switch (interact(certChainMessage(chain, cause), R.string.mtm_accept_cert)) {
case MTMDecision.DECISION_ALWAYS:
storeCert(chain[0]); // only store the server cert, not the whole chain
case MTMDecision.DECISION_ONCE:
break;
default:
throw (cause);
}
}
boolean interactHostname(X509Certificate cert, String hostname)
{
switch (interact(hostNameMessage(cert, hostname), R.string.mtm_accept_servername)) {
case MTMDecision.DECISION_ALWAYS:
storeCert(hostname, cert);
case MTMDecision.DECISION_ONCE:
return true;
default:
return false;
}
}
protected static void interactResult(int decisionId, int choice) {
MTMDecision d;
synchronized(openDecisions) {
d = openDecisions.get(decisionId);
openDecisions.remove(decisionId);
}
if (d == null) {
LOGGER.log(Level.SEVERE, "interactResult: aborting due to stale decision reference!");
return;
}
synchronized(d) {
d.state = choice;
d.notify();
}
}
class MemorizingHostnameVerifier implements HostnameVerifier {
private HostnameVerifier defaultVerifier;
public MemorizingHostnameVerifier(HostnameVerifier wrapped) {
defaultVerifier = wrapped;
}
@Override
public boolean verify(String hostname, SSLSession session) {
LOGGER.log(Level.FINE, "hostname verifier for " + hostname + ", trying default verifier first");
// if the default verifier accepts the hostname, we are done
if (defaultVerifier.verify(hostname, session)) {
LOGGER.log(Level.FINE, "default verifier accepted " + hostname);
return true;
}
// otherwise, we check if the hostname is an alias for this cert in our keystore
try {
X509Certificate cert = (X509Certificate)session.getPeerCertificates()[0];
//Log.d(TAG, "cert: " + cert);
if (cert.equals(appKeyStore.getCertificate(hostname.toLowerCase(Locale.US)))) {
LOGGER.log(Level.FINE, "certificate for " + hostname + " is in our keystore. accepting.");
return true;
} else {
LOGGER.log(Level.FINE, "server " + hostname + " provided wrong certificate, asking user.");
return interactHostname(cert, hostname);
}
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
}
}
|
package net.littlebigisland.droidibus.ibus.systems;
import java.util.ArrayList;
import net.littlebigisland.droidibus.ibus.IBusSystem;
public class GlobalBroadcastSystem extends IBusSystem{
private static final byte IKE_SYSTEM = Devices.InstrumentClusterElectronics.toByte();
private static final byte GLOBAL_BROADCAST = Devices.GlobalBroadcast.toByte();
/**
* Messages from the IKE to the GlobalBroadcast
*/
class IKESystem extends IBusSystem{
private static final byte IGN_STATE = 0x11;
private static final byte OBC_UNITSET = 0x15;
private static final byte SPEED_RPM = 0x18;
private static final byte MILEAGE = 0x17;
private static final byte COOLANT_TEMP = 0x19;
public void mapReceived(ArrayList<Byte> msg){
currentMessage = msg;
switch(msg.get(3)){
case IGN_STATE:
int state = (msg.get(4) < 2) ? msg.get(4) : (0x02 & msg.get(4));
triggerCallback("onUpdateIgnitionSate", state);
break;
case OBC_UNITSET:
triggerCallback(
"onUpdateUnits",
String.format(
"%8s;%8s",
Integer.toBinaryString(msg.get(5) & 0xFF),
Integer.toBinaryString(msg.get(6) & 0xFF)
).replace(' ', '0')
);
break;
case SPEED_RPM:
triggerCallback("onUpdateSpeed", (int) msg.get(4));
triggerCallback("onUpdateRPM", (int) msg.get(5) * 100);
break;
case MILEAGE:
// Bytes 5-7 contain the Mileage
// Bytes 8 and 9 hold the inspection interval
// Byte 10 is the SIA Type (0x40 == Inspection)
// Byte 11 is the the days to inspection.
int mileage = (
(msg.get(7) * 65535) + (msg.get(6) * 256) + msg.get(5)
);
int serviceInterval = (msg.get(8) + msg.get(9)) * 50;
int serviceIntervalType = msg.get(10);
int daysToInspection = msg.get(11);
triggerCallback("onUpdateMileage", mileage);
triggerCallback("onUpdateServiceInterval", serviceInterval);
triggerCallback(
"onUpdateServiceIntervalType", serviceIntervalType
);
triggerCallback("onUpdateDaysToInspection", daysToInspection);
break;
case COOLANT_TEMP:
triggerCallback("onUpdateCoolantTemp", (int)msg.get(5));
break;
}
}
}
/**
* Messages from the LCM to the GlobalBroadcast
*/
class LightControlModuleSystem extends IBusSystem{
public void mapReceived(ArrayList<Byte> msg) {
currentMessage = msg;
// 0x5C is the light dimmer status. It appears FF = lights off and FE = lights on
if(currentMessage.get(3) == 0x5C){
int lightStatus = (currentMessage.get(4) == (byte) 0xFF) ? 0 : 1;
triggerCallback("onLightStatus", lightStatus);
}
}
}
/**
* Request mileage from the IKE
* IBUS Message: BF 03 80 16 2A
* @return byte[] Message for the IBus
*/
public byte[] getMileage(){
return new byte[]{
GLOBAL_BROADCAST, 0x03, IKE_SYSTEM, 0x16, 0x2A
};
}
public GlobalBroadcastSystem(){
IBusDestinationSystems.put(
Devices.InstrumentClusterElectronics.toByte(), new IKESystem()
);
IBusDestinationSystems.put(
Devices.LightControlModule.toByte(), new LightControlModuleSystem()
);
}
}
|
package com.relteq.sirius.db.importer;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.Time;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.torque.TorqueException;
import org.apache.torque.util.Criteria;
import org.apache.torque.util.Transaction;
import com.relteq.sirius.jaxb.Point;
import com.relteq.sirius.jaxb.Position;
import com.relteq.sirius.om.FundamentalDiagramProfileSets;
import com.relteq.sirius.om.FundamentalDiagramProfiles;
import com.relteq.sirius.om.FundamentalDiagrams;
import com.relteq.sirius.om.InitialDensities;
import com.relteq.sirius.om.InitialDensitySets;
import com.relteq.sirius.om.LinkFamilies;
import com.relteq.sirius.om.Links;
import com.relteq.sirius.om.NetworkLists;
import com.relteq.sirius.om.Networks;
import com.relteq.sirius.om.NodeFamilies;
import com.relteq.sirius.om.Nodes;
import com.relteq.sirius.om.PhaseLinks;
import com.relteq.sirius.om.Phases;
import com.relteq.sirius.om.Scenarios;
import com.relteq.sirius.om.SensorLists;
import com.relteq.sirius.om.SignalLists;
import com.relteq.sirius.om.Signals;
import com.relteq.sirius.om.SplitRatioProfileSets;
import com.relteq.sirius.om.SplitRatioProfiles;
import com.relteq.sirius.om.SplitRatios;
import com.relteq.sirius.om.VehicleTypeFamilies;
import com.relteq.sirius.om.VehicleTypeLists;
import com.relteq.sirius.om.VehicleTypes;
import com.relteq.sirius.om.VehicleTypesInLists;
import com.relteq.sirius.om.VehicleTypesPeer;
import com.relteq.sirius.om.WeavingFactorSets;
import com.relteq.sirius.om.WeavingFactors;
import com.relteq.sirius.simulator.Double2DMatrix;
import com.relteq.sirius.simulator.ObjectFactory;
import com.relteq.sirius.simulator.Scenario;
import com.relteq.sirius.simulator.SiriusException;
/**
* Imports a scenario
*/
public class ScenarioLoader {
Connection conn = null;
/**
* @return a universally unique random string identifier
*/
protected String uuid() {
return com.relteq.sirius.db.util.UUID.generate();
}
private String project_id;
private String scenario_id = null;
/**
* @return the project id
*/
private String getProjectId() {
return project_id;
}
/**
* @return the generated scenario id
*/
private String getScenarioId() {
return scenario_id;
}
private String [] vehicle_type_id = null;
private Map<String, String> network_id = null;
private Map<String, String> link_family_id = null;
private Map<String, String> node_family_id = null;
private ScenarioLoader() {
project_id = "default";
}
/**
* Loads a scenario from a file
* @param filename the configuration (scenario) file name
* @throws SiriusException
*/
public static void load(String filename) throws SiriusException {
ScenarioLoader sl = new ScenarioLoader();
try {
com.relteq.sirius.db.Service.init();
sl.load(ObjectFactory.createAndLoadScenario(filename));
com.relteq.sirius.db.Service.shutdown();
} catch (TorqueException exc) {
throw new SiriusException(exc.getMessage(), exc);
}
}
public Scenarios load(Scenario scenario) throws TorqueException {
try {
conn = Transaction.begin();
Scenarios db_scenario = save(scenario);
Transaction.commit(conn);
conn = null;
return db_scenario;
} finally {
if (null != conn) {
Transaction.safeRollback(conn);
conn = null;
}
}
}
/**
* Imports a scenario
* @param scenario
* @throws TorqueException
*/
protected Scenarios save(Scenario scenario) throws TorqueException {
Scenarios db_scenario = new Scenarios();
db_scenario.setId(scenario_id = uuid());
db_scenario.setProjectId(getProjectId());
db_scenario.setName(scenario.getName());
db_scenario.setDescription(scenario.getDescription());
db_scenario.save(conn);
db_scenario.setVehicleTypeLists(save(scenario.getSettings().getVehicleTypes()));
save(scenario.getNetworkList());
db_scenario.setSignalLists(save(scenario.getSignalList()));
db_scenario.setSensorLists(save(scenario.getSensorList()));
db_scenario.setSplitRatioProfileSets(save(scenario.getSplitRatioProfileSet()));
db_scenario.setWeavingFactorSets(save(scenario.getWeavingFactorSet()));
db_scenario.setInitialDensitySets(save(scenario.getInitialDensitySet()));
db_scenario.setFundamentalDiagramProfileSets(save(scenario.getFundamentalDiagramProfileSet()));
db_scenario.save(conn);
return db_scenario;
}
/**
* Imports vehicle types
* @param vtypes
* @return the imported vehicle type list
* @throws TorqueException
*/
protected VehicleTypeLists save(com.relteq.sirius.jaxb.VehicleTypes vtypes) throws TorqueException {
VehicleTypeLists db_vtl = new VehicleTypeLists();
db_vtl.setId(uuid());
db_vtl.setProjectId(getProjectId());
db_vtl.save(conn);
if (null == vtypes) {
vtypes = new com.relteq.sirius.jaxb.VehicleTypes();
com.relteq.sirius.jaxb.VehicleType vt = new com.relteq.sirius.jaxb.VehicleType();
vt.setName("SOV");
vt.setWeight(new BigDecimal(1));
vtypes.getVehicleType().add(vt);
}
List<com.relteq.sirius.jaxb.VehicleType> vtlist = vtypes.getVehicleType();
vehicle_type_id = new String[vtlist.size()];
int ind = 0;
for (com.relteq.sirius.jaxb.VehicleType vt : vtlist)
vehicle_type_id[ind++] = save(vt, db_vtl).getId();
return db_vtl;
}
/**
* Imports a vehicle type
* @param vt the vehicle type to be imported
* @param db_vtl an imported vehicle type list
* @return the imported (or already existing) vehicle type
* @throws TorqueException
*/
private VehicleTypes save(com.relteq.sirius.jaxb.VehicleType vt, VehicleTypeLists db_vtl) throws TorqueException {
Criteria crit = new Criteria();
crit.add(VehicleTypesPeer.PROJECT_ID, getProjectId());
crit.add(VehicleTypesPeer.NAME, vt.getName());
crit.add(VehicleTypesPeer.WEIGHT, vt.getWeight());
@SuppressWarnings("unchecked")
List<VehicleTypes> db_vt_l = VehicleTypesPeer.doSelect(crit);
VehicleTypes db_vtype = null;
if (db_vt_l.isEmpty()) {
VehicleTypeFamilies db_vtf = new VehicleTypeFamilies();
db_vtf.setId(uuid());
db_vtf.save(conn);
db_vtype = new VehicleTypes();
db_vtype.setVehicleTypeFamilies(db_vtf);
db_vtype.setProjectId(getProjectId());
db_vtype.setName(vt.getName());
db_vtype.setWeight(vt.getWeight());
db_vtype.save(conn);
} else {
// TODO what if db_vt_l.size() > 1
db_vtype = db_vt_l.get(0);
}
VehicleTypesInLists db_vtinl = new VehicleTypesInLists();
db_vtinl.setVehicleTypeLists(db_vtl);
db_vtinl.setVehicleTypeId(db_vtype.getId());
db_vtinl.save(conn);
return db_vtype;
}
/**
* Imports a network list
* @param nl
* @throws TorqueException
*/
protected void save(com.relteq.sirius.jaxb.NetworkList nl) throws TorqueException {
network_id = new HashMap<String, String>(nl.getNetwork().size());
link_family_id = new HashMap<String, String>();
node_family_id = new HashMap<String, String>();
for (com.relteq.sirius.jaxb.Network network : nl.getNetwork()) {
NetworkLists db_nl = new NetworkLists();
db_nl.setScenarioId(getScenarioId());
db_nl.setNetworks(save(network));
db_nl.save(conn);
}
}
private String getLinkFamily(String id) throws TorqueException {
if (!link_family_id.containsKey(id)) {
LinkFamilies db_lf = new LinkFamilies();
String lfid = uuid();
db_lf.setId(lfid);
db_lf.save(conn);
link_family_id.put(id, lfid);
}
return link_family_id.get(id);
}
private String getNodeFamily(String id) throws TorqueException {
if (!node_family_id.containsKey(id)) {
NodeFamilies db_nf = new NodeFamilies();
String nfid = uuid();
db_nf.setId(nfid);
db_nf.save(conn);
node_family_id.put(id, nfid);
}
return node_family_id.get(id);
}
/**
* Imports a network
* @param network
* @return the imported network
* @throws TorqueException
*/
protected Networks save(com.relteq.sirius.jaxb.Network network) throws TorqueException {
Networks db_network = new Networks();
String id = uuid();
network_id.put(network.getId(), id);
db_network.setId(id);
db_network.setProjectId(getProjectId());
db_network.setName(network.getName());
db_network.setDescription(network.getDescription());
db_network.save(conn);
for (com.relteq.sirius.jaxb.Node node : network.getNodeList().getNode()) {
save(node, db_network);
}
for (com.relteq.sirius.jaxb.Link link : network.getLinkList().getLink()) {
save(link, db_network);
}
return db_network;
}
/**
* Imports a node
* @param node
* @param db_network
* @throws TorqueException
*/
private void save(com.relteq.sirius.jaxb.Node node, Networks db_network) throws TorqueException {
Nodes db_node = new Nodes();
db_node.setId(getNodeFamily(node.getId()));
db_node.setNetworks(db_network);
db_node.setName(node.getName());
db_node.setDescription(node.getDescription());
db_node.setType(node.getType());
Position pos = node.getPosition();
if (null != pos && 1 == pos.getPoint().size()) {
Point point = pos.getPoint().get(0);
db_node.setLatitude(point.getLat());
db_node.setLongitude(point.getLng());
db_node.setElevation(point.getElevation());
}
db_node.setPostmile(node.getPostmile());
db_node.setModel("STANDARD");
db_node.save(conn);
}
/**
* Imports a link
* @param link
* @param db_network
* @throws TorqueException
*/
private void save(com.relteq.sirius.jaxb.Link link, Networks db_network) throws TorqueException {
Links db_link = new Links();
db_link.setId(getLinkFamily(link.getId()));
db_link.setNetworks(db_network);
db_link.setBeginNodeId(node_family_id.get(link.getBegin().getNodeId()));
db_link.setEndNodeId(node_family_id.get(link.getEnd().getNodeId()));
db_link.setName(link.getName());
db_link.setRoadName(link.getRoadName());
db_link.setDescription(link.getDescription());
db_link.setType(link.getType());
if (null != link.getLinkGeometry()) db_link.setShape(link.getLinkGeometry().toString());
db_link.setLanes(link.getLanes());
db_link.setLength(link.getLength());
db_link.setModel(link.getDynamics().getType());
db_link.setDisplayLaneOffset(link.getLaneOffset());
db_link.save(conn);
}
/**
* Imports a signal list
* @param sl
* @return the imported signal list
* @throws TorqueException
*/
private SignalLists save(com.relteq.sirius.jaxb.SignalList sl) throws TorqueException {
if (null == sl) return null;
SignalLists db_sl = new SignalLists();
db_sl.setId(uuid());
db_sl.setProjectId(getProjectId());
// TODO db_sl.setName();
// TODO db_sl.setDescription();
db_sl.save(conn);
for (com.relteq.sirius.jaxb.Signal signal : sl.getSignal())
save(signal, db_sl);
return db_sl;
}
/**
* Imports a signal
* @param signal
* @param db_sl an imported signal list
* @throws TorqueException
*/
private void save(com.relteq.sirius.jaxb.Signal signal, SignalLists db_sl) throws TorqueException {
Signals db_signal = new Signals();
db_signal.setId(uuid());
db_signal.setNodeId(node_family_id.get(signal.getNodeId()));
db_signal.setSignalLists(db_sl);
db_signal.save(conn);
for (com.relteq.sirius.jaxb.Phase phase : signal.getPhase()) {
save(phase, db_signal);
}
}
/**
* Imports a signal phase
* @param phase
* @param db_signal
* @throws TorqueException
*/
private void save(com.relteq.sirius.jaxb.Phase phase, Signals db_signal) throws TorqueException {
Phases db_phase = new Phases();
db_phase.setSignals(db_signal);
db_phase.setPhase(phase.getNema().intValue());
db_phase.setIs_protected(phase.isProtected());
db_phase.setPermissive(phase.isPermissive());
db_phase.setLag(phase.isLag());
db_phase.setRecall(phase.isRecall());
db_phase.setMinGreenTime(phase.getMinGreenTime());
db_phase.setYellowTime(phase.getYellowTime());
db_phase.setRedClearTime(phase.getRedClearTime());
db_phase.save(conn);
for (com.relteq.sirius.jaxb.LinkReference lr : phase.getLinks().getLinkReference())
save(lr, db_phase);
}
/**
* Imports a link reference (for a signal phase)
* @param lr the link reference
* @param db_phase the imported phase
* @throws TorqueException
*/
private void save(com.relteq.sirius.jaxb.LinkReference lr, Phases db_phase) throws TorqueException {
PhaseLinks db_lr = new PhaseLinks();
db_lr.setSignalId(db_phase.getSignalId());
db_lr.setPhase(db_phase.getPhase());
db_lr.setLinkId(link_family_id.get(lr.getId()));
db_lr.save(conn);
}
/**
* Imports a sensor list
* @param sl
* @param db_network
* @return the imported sensor list
* @throws TorqueException
*/
private SensorLists save(com.relteq.sirius.jaxb.SensorList sl) throws TorqueException {
if (null == sl) return null;
SensorLists db_sl = new SensorLists();
db_sl.setId(uuid());
db_sl.save(conn);
for (com.relteq.sirius.jaxb.Sensor sensor : sl.getSensor()) {
save(sensor, db_sl);
}
return db_sl;
}
/**
* Imports a sensor
* @param sensor
* @param db_sl
*/
private void save(com.relteq.sirius.jaxb.Sensor sensor, SensorLists db_sl) {
// TODO Auto-generated method stub
}
/**
* Imports initial densities
* @param idset
* @return the imported initial density set
* @throws TorqueException
*/
protected InitialDensitySets save(com.relteq.sirius.jaxb.InitialDensitySet idset) throws TorqueException {
if (null == idset) return null;
InitialDensitySets db_idsets = new InitialDensitySets();
db_idsets.setId(uuid());
db_idsets.setProjectId(getProjectId());
db_idsets.setName(idset.getName());
db_idsets.setDescription(idset.getDescription());
for (com.relteq.sirius.simulator.InitialDensitySet.Tuple tuple :
((com.relteq.sirius.simulator.InitialDensitySet) idset).getData()) {
InitialDensities db_density = new InitialDensities();
db_density.setInitialDensitySets(db_idsets);
db_density.setLinkId(link_family_id.get(tuple.getLinkId()));
db_density.setVehicleTypeId(vehicle_type_id[tuple.getVehicleTypeIndex()]);
db_density.setDensity(new BigDecimal(tuple.getDensity()));
}
db_idsets.save(conn);
return db_idsets;
}
/**
* Imports weaving factors
* @param wfset
* @return the imported weaving factor set
* @throws TorqueException
*/
protected WeavingFactorSets save(com.relteq.sirius.jaxb.WeavingFactorSet wfset) throws TorqueException {
if (null == wfset) return null;
WeavingFactorSets db_wfset = new WeavingFactorSets();
db_wfset.setId(uuid());
db_wfset.setName(wfset.getName());
db_wfset.setDescription(wfset.getDescription());
db_wfset.save(conn);
for (com.relteq.sirius.jaxb.Weavingfactors wf : wfset.getWeavingfactors()) {
save(wf, db_wfset);
}
return db_wfset;
}
/**
* Imports weaving factors
* @param wf weaving factors to be imported
* @param db_wfset an already imported weaving factor set
* @throws TorqueException
*/
private void save(com.relteq.sirius.jaxb.Weavingfactors wf, WeavingFactorSets db_wfset) throws TorqueException {
com.relteq.sirius.simulator.Double1DVector factor_vector = new com.relteq.sirius.simulator.Double1DVector(wf.getContent(), ":");
if (factor_vector.isEmpty()) return;
for (Double factor : factor_vector.getData()) {
WeavingFactors db_wf = new WeavingFactors();
db_wf.setWeavingFactorSets(db_wfset);
// TODO db_wf.setInLinkId();
// TODO db_wf.setOutLinkId();
db_wf.setFactor(new BigDecimal(factor));
// TODO db_wf.save(conn);
}
}
/**
* Imports split ratio profiles
* @param srps
* @return the imported split ratio profile set
* @throws TorqueException
*/
private SplitRatioProfileSets save(com.relteq.sirius.jaxb.SplitRatioProfileSet srps) throws TorqueException {
if (null == srps) return null;
SplitRatioProfileSets db_srps = new SplitRatioProfileSets();
db_srps.setId(uuid());
db_srps.setProjectId(getProjectId());
db_srps.setName(srps.getName());
db_srps.setDescription(srps.getDescription());
db_srps.save(conn);
for (com.relteq.sirius.jaxb.SplitratioProfile srp : srps.getSplitratioProfile()) {
SplitRatioProfiles db_srp = new SplitRatioProfiles();
db_srp.setId(uuid());
db_srp.setSplitRatioProfileSets(db_srps);
db_srp.setNodeId(node_family_id.get(srp.getNodeId()));
db_srp.setDt(srp.getDt());
db_srp.setStartTime(srp.getStartTime());
db_srp.save(conn);
for (com.relteq.sirius.jaxb.Splitratio sr : srp.getSplitratio()) {
Double2DMatrix data = new Double2DMatrix(sr.getContent());
if (!data.isEmpty()) {
for (int t = 0; t < data.getnTime(); ++t) {
Time ts = new Time(t * 1000);
for (int vtn = 0; vtn < data.getnVTypes(); ++vtn) {
SplitRatios db_sr = new SplitRatios();
db_sr.setSplitRatioProfiles(db_srp);
db_sr.setInLinkId(link_family_id.get(sr.getLinkIn()));
db_sr.setOutLinkId(link_family_id.get(sr.getLinkOut()));
db_sr.setVehicleTypeId(vehicle_type_id[vtn]);
db_sr.setTs(ts);
db_sr.setSplitRatio(new BigDecimal(data.get(t, vtn)));
db_sr.save(conn);
}
}
}
}
}
return db_srps;
}
/**
* Imports a fundamental diagram profile set
* @param fdps
* @return the imported FD profile set
* @throws TorqueException
*/
private FundamentalDiagramProfileSets save(com.relteq.sirius.jaxb.FundamentalDiagramProfileSet fdps) throws TorqueException {
FundamentalDiagramProfileSets db_fdps = new FundamentalDiagramProfileSets();
db_fdps.setId(uuid());
db_fdps.setProjectId(getProjectId());
db_fdps.setName(fdps.getName());
db_fdps.setDescription(fdps.getDescription());
db_fdps.save(conn);
for (com.relteq.sirius.jaxb.FundamentalDiagramProfile fdprofile : fdps.getFundamentalDiagramProfile())
save(fdprofile, db_fdps);
return db_fdps;
}
/**
* Imports a fundamental diagram profile
* @param fdprofile
* @param db_fdps an already imported FD profile set
* @throws TorqueException
*/
private void save(com.relteq.sirius.jaxb.FundamentalDiagramProfile fdprofile, FundamentalDiagramProfileSets db_fdps) throws TorqueException {
FundamentalDiagramProfiles db_fdprofile = new FundamentalDiagramProfiles();
db_fdprofile.setId(uuid());
db_fdprofile.setFundamentalDiagramProfileSets(db_fdps);
db_fdprofile.setLinkId(link_family_id.get(fdprofile.getLinkId()));
db_fdprofile.setDt(fdprofile.getDt());
db_fdprofile.setStartTime(fdprofile.getStartTime());
db_fdprofile.save(conn);
int num = 0;
for (com.relteq.sirius.jaxb.FundamentalDiagram fd : fdprofile.getFundamentalDiagram())
save(fd, db_fdprofile, new Time(1000 * num++));
}
/**
* Imports a fundamental diagram
* @param fd
* @param db_fdprofile an already imported FD profile
* @param ts timestamp for sorting
* @throws TorqueException
*/
private void save(com.relteq.sirius.jaxb.FundamentalDiagram fd, FundamentalDiagramProfiles db_fdprofile, java.util.Date ts) throws TorqueException {
FundamentalDiagrams db_fd = new FundamentalDiagrams();
db_fd.setFundamentalDiagramProfiles(db_fdprofile);
db_fd.setTs(ts);
db_fd.setFreeFlowSpeed(fd.getFreeFlowSpeed());
db_fd.setCongestionWaveSpeed(fd.getCongestionSpeed());
db_fd.setCapacity(fd.getCapacity());
db_fd.setJamDensity(fd.getJamDensity());
db_fd.setCapacityDrop(fd.getCapacityDrop());
db_fd.setStdDeviationCapacity(fd.getStdDevCapacity());
db_fd.save(conn);
}
}
|
// University of Southampton IT Innovation Centre, 2011
// University Road, Highfield, Southampton, UK, SO17 1BJ
// or reproduced in whole or in part in any manner or form or in or
// on any media by any person other than in accordance with the terms
// of the Licence Agreement supplied with the software, or otherwise
// PURPOSE, except where stated in the Licence Agreement supplied with
// the software.
// Created Date : 2011-09-29
// Created for Project : WeGov
package eu.wegov.tools.searchandanalysis;
import eu.wegov.coordinator.dao.data.ExperimediaPostsCounter;
import eu.wegov.coordinator.dao.data.ExperimediaTopicOpinion;
import java.util.ArrayList;
import eu.wegov.tools.WegovTool;
import java.sql.Timestamp;
import java.util.Date;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import west.importer.WegovImporter;
import west.wegovdemo.SampleInput;
import west.wegovdemo.TopicOpinionAnalysis;
import west.wegovdemo.TopicOpinionOutput;
public class WegovSearchTool extends WegovTool {
private ArrayList<SingleSiteSearch> searches;
public WegovSearchTool(String[] args, String myRunID, String myConfigurationID, String configPath) throws Exception {
super(args, myRunID, myConfigurationID, configPath);
}
@Override
protected void configure() throws Exception {
setupSearch();
}
protected String[] getMustHaveProperties() {
return new String[]{"oauthConsumerKey", "oauthConsumerSecret", "oauthConsumerAccessToken", "oauthConsumerAccessTokenSecret"};
}
private void setupSearch() throws Exception {
searches = new ArrayList<SingleSiteSearch>();
String sitesStr = getValueOfParameter("sites");
String resultsType = getValueOfParameter("results.type", "static");
System.out.println("sites: " + sitesStr);
if ((sitesStr == null) || (sitesStr.equals(""))) {
throw new Exception("No sites defined");
}
String[] sites = sitesStr.split(",");
for (String site : sites) {
if (site.equals("twitter")) {
if (resultsType.equals("dynamic")) {
searches.add(new TwitterStreamSearch(this));
} else {
searches.add(new TwitterSearch(this));
}
} else if (site.equals("socialmention")) {
searches.add(new SocialMentionSearch(this));
} else if (site.equals("poblish")) {
searches.add(new PoblishSearch(this));
} else if (site.equals("trendsmap")) {
searches.add(new TrendsMapSearch(this));
} else if (site.equals("facebook")) {
searches.add(new FacebookGroupPostsSearch(this));
} else {
System.out.println("WARNING: site is not supported: " + site);
}
// Limit to a single site search for now
// TODO: remove this once multi-site searches are supported by UI
if (searches.size() > 0) {
break;
}
}
}
/*
* private boolean siteSelected(String site) { boolean siteSelected = false;
* try { String siteValue = configuration.getValueOfParameter(site); if
* (siteValue != null) siteSelected = siteValue.equals("true"); } catch
* (Exception e) { e.printStackTrace(); }
*
* return siteSelected; }
*/
@Override
public int execute() {
int exitCode = 0;
for (SingleSiteSearch search : searches) {
try {
System.out.println("Executing search " + search.getSearchName());
search.execute();
System.out.println("Storing results for search " + search.getSearchName());
search.storeResults();
System.out.println("Results are: ");
System.out.println(search.getResultsDataAsJson());
// this is stupid
JSONObject results = JSONObject.fromObject(search.getResultsDataAsJson());
JSONObject postData = results.getJSONObject("postData");
JSONArray data = postData.getJSONArray("data");
int numMessages = data.size();
JSONObject post, from;
String postContents, userId, postContentsWithoutHttp, cleanWord;
SampleInput input = new SampleInput();
// TopicOpinionAnalysisResult result = new TopicOpinionAnalysisResult();
for (int i = 0; i < numMessages; i++) {
post = JSONObject.fromObject(data.get(i));
from = post.getJSONObject("from");
postContents = post.getString("message");
userId = from.getString("id");
System.out.println("\"" + postContents + "\" by " + userId);
postContentsWithoutHttp = "";
for (String word : postContents.split(" ")) {
cleanWord = word.toLowerCase().trim();
if (cleanWord.startsWith("http:
} else {
postContentsWithoutHttp = postContentsWithoutHttp + cleanWord + " ";
}
}
if (postContentsWithoutHttp.trim().length() > 3)
input.add(postContentsWithoutHttp.trim(), userId);
}
TopicOpinionAnalysis analysis = new WegovImporter();
TopicOpinionOutput output = analysis.analyzeTopicsOpinions(input);
int numTopics = output.getNumTopics();
System.out.println("Found " + numTopics + " topics");
StringBuilder sb;
String[] topicTerms;
String keyTerm, topicAsKeywords;
ArrayList<String> topicsAsKeywords = new ArrayList<String>();
for (int topicID = 0; topicID < numTopics; topicID++) {
topicTerms = output.getTopicTerms(topicID);
sb = new StringBuilder();
for (int i = 0; i < topicTerms.length; i++) {
keyTerm = topicTerms[i];
sb.append(keyTerm);
if (i < topicTerms.length - 1)
sb.append(", ");
}
// System.out.println("Topic #" + topicID);
// for (String topic : output.getTopicTerms(topicID)) {
// System.out.println(topic + ", ");
topicAsKeywords = sb.toString();
topicsAsKeywords.add(topicAsKeywords);
System.out.println("Topic #" + topicID + ": " + topicAsKeywords);
}
int topicId = 0;
Date now = new Date();
Timestamp timeCollected = new Timestamp(now.getTime());
for (String tempKeyTerm : topicsAsKeywords.toArray(new String[0])) {
getCoordinator().getDataSchema().insertObject(
new ExperimediaTopicOpinion(
Integer.toString(topicId), tempKeyTerm, timeCollected, Integer.parseInt(getMyRunId())));
topicId++;
}
getCoordinator().getDataSchema().insertObject(
new ExperimediaPostsCounter(numMessages, timeCollected, Integer.parseInt(getMyRunId())));
} catch (Exception e) {
reportError(e);
exitCode = -1;
}
}
return exitCode;
}
public static void main(String[] args) throws Exception {
//Redirect stderr to stdout
System.setErr(System.out);
System.out.println("WeGov Search Tool v2.0");
String configPath = "C:/Users/kem/Projects/WeGov/workspace/wegov-parent/wegov-dashboard/coordinator.properties";
String myRunID = ""; // request to create a new Run
String myConfigurationID = "";
WegovSearchTool wegovSearch = null;
int exitCode = 0;
try {
wegovSearch = new WegovSearchTool(args, myRunID, myConfigurationID, configPath);
if (wegovSearch.error) {
exitCode = -1;
} else {
exitCode = wegovSearch.execute();
}
} catch (Exception e) {
e.printStackTrace(System.out);
System.out.println();
exitCode = -1;
}
System.out.println("\nSearch Tool exit code: " + exitCode);
//System.err.flush();
System.out.flush();
//System.err.close();
System.out.close();
if (args.length == 0) {
if (wegovSearch != null) {
wegovSearch.finalizeManualRun(exitCode);
}
}
System.exit(exitCode);
}
}
|
package de.lmu.ifi.dbs.distance;
/**
* Provides a Distance for a double-valued distance.
*
* @author Elke Achtert (<a
* href="mailto:achtert@dbs.ifi.lmu.de">achtert@dbs.ifi.lmu.de</a>)
*/
@SuppressWarnings("serial")
class DoubleDistance extends AbstractDistance
{
/**
* The double value of this distance.
*/
private double value;
/**
* Constructs a new DoubleDistance object that represents the double
* argument.
*
* @param value
* the value to be represented by the DoubleDistance.
*/
public DoubleDistance(double value)
{
this.value = value;
}
/**
* @see java.lang.Object#hashCode()
*/
public int hashCode()
{
long bits = Double.doubleToLongBits(value);
return (int) (bits ^ (bits >>> 32));
}
/**
* @see de.lmu.ifi.dbs.distance.Distance
*/
public Distance plus(Distance distance)
{
DoubleDistance other = (DoubleDistance) distance;
return new DoubleDistance(this.value + other.value);
}
/**
* @see de.lmu.ifi.dbs.distance.Distance
*/
public Distance minus(Distance distance)
{
DoubleDistance other = (DoubleDistance) distance;
return new DoubleDistance(this.value - other.value);
}
/**
* @see de.lmu.ifi.dbs.distance.Distance
*/
public String description()
{
return "distance";
}
/**
*
* @see java.lang.Comparable#compareTo(Object)
*/
public int compareTo(Distance o)
{
DoubleDistance other = (DoubleDistance) o;
return Double.compare(this.value, other.value);
}
/**
* Returns a string representation of this distance.
*
* @return a string representation of this distance.
*/
public String toString()
{
// return Util.format(value, 6);
return Double.toString(value);
}
}
|
package org.apache.jmeter.protocol.http.proxy;
import java.io.Serializable;
import java.net.UnknownHostException;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import junit.framework.TestCase;
import org.apache.jmeter.config.ConfigElement;
import org.apache.jmeter.config.ConfigTestElement;
import org.apache.jmeter.exceptions.IllegalUserActionException;
import org.apache.jmeter.functions.ValueReplacer;
import org.apache.jmeter.gui.GuiPackage;
import org.apache.jmeter.gui.tree.JMeterTreeModel;
import org.apache.jmeter.gui.tree.JMeterTreeNode;
import org.apache.jmeter.protocol.http.config.gui.HttpDefaultsGui;
import org.apache.jmeter.protocol.http.config.gui.UrlConfigGui;
import org.apache.jmeter.protocol.http.control.HeaderManager;
import org.apache.jmeter.protocol.http.control.RecordingController;
import org.apache.jmeter.protocol.http.sampler.HTTPSampler;
import org.apache.jmeter.threads.ThreadGroup;
import org.apache.jmeter.testelement.TestElement;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.log.Hierarchy;
import org.apache.log.Logger;
import org.apache.oro.text.PatternCacheLRU;
import org.apache.oro.text.regex.Pattern;
import org.apache.oro.text.regex.Perl5Compiler;
import org.apache.oro.text.regex.Perl5Matcher;
public class ProxyControl extends ConfigTestElement implements Serializable
{
transient private static Logger log = Hierarchy.getDefaultHierarchy().getLoggerFor("jmeter.protocol.http");
Daemon server;
private final int DEFAULT_PORT = 8080;
private static PatternCacheLRU patternCache = new PatternCacheLRU(1000, new Perl5Compiler());
transient Perl5Matcher matcher;
public final static String PORT = "ProxyControlGui.port";
public final static String EXCLUDE_LIST = "ProxyControlGui.exclude_list";
public final static String INCLUDE_LIST = "ProxyControlGui.include_list";
public ProxyControl()
{
matcher = new Perl5Matcher();
setPort(DEFAULT_PORT);
setExcludeList(new HashSet());
setIncludeList(new HashSet());
}
public void setPort(int port)
{
this.setProperty(PORT, new Integer(port));
}
public void setIncludeList(Collection list)
{
setProperty(INCLUDE_LIST, new HashSet(list));
}
public void setExcludeList(Collection list)
{
setProperty(EXCLUDE_LIST, new HashSet(list));
}
public String getClassLabel()
{
return JMeterUtils.getResString("proxy_title");
}
public int getPort()
{
if (this.getProperty(PORT) instanceof String)
{
setPort(Integer.parseInt((String) getProperty(PORT)));
return ((Integer) this.getProperty(PORT)).intValue();
}
else
{
return ((Integer) this.getProperty(PORT)).intValue();
}
}
public int getDefaultPort()
{
return DEFAULT_PORT;
}
public Class getGuiClass()
{
return org.apache.jmeter.protocol.http.proxy.gui.ProxyControlGui.class;
}
public void addConfigElement(ConfigElement config)
{}
public void startProxy()
{
try
{
server = new Daemon(getPort(), this);
server.start();
}
catch (UnknownHostException e)
{
log.error("", e);
}
}
public void addExcludedPattern(String pattern)
{
getExcludePatterns().add(pattern);
}
public Collection getExcludePatterns()
{
return (Collection) getProperty(EXCLUDE_LIST);
}
public void addIncludedPattern(String pattern)
{
getIncludePatterns().add(pattern);
}
public Collection getIncludePatterns()
{
return (Collection) getProperty(INCLUDE_LIST);
}
public void clearExcludedPatterns()
{
getExcludePatterns().clear();
}
public void clearIncludedPatterns()
{
getIncludePatterns().clear();
}
/**
* Receives the recorded sampler from the proxy server for placing in the
* test tree
* @param sampler
* @param subConfigs
* @param serverResponse Added to allow saving of the server's response while
* recording. A future consideration.
*/
public void deliverSampler(HTTPSampler sampler, TestElement[] subConfigs, byte[] serverResponse)
{
if (filterUrl(sampler))
{
placeConfigElement(sampler, subConfigs);
}
}
public void stopProxy()
{
if (server != null)
{
server.stopServer();
}
}
protected boolean filterUrl(HTTPSampler sampler)
{
boolean ok = false;
if (sampler.getDomain() == null || sampler.getDomain().equals(""))
{
return false;
}
if (getIncludePatterns().size() == 0)
{
ok = true;
}
else
{
ok = checkIncludes(sampler);
}
if (!ok)
{
return ok;
}
else
{
if (getExcludePatterns().size() == 0)
{
return ok;
}
else
{
ok = checkExcludes(sampler);
}
}
return ok;
}
private void placeConfigElement(HTTPSampler sampler, TestElement[] subConfigs)
{
ValueReplacer replacer = GuiPackage.getInstance().getReplacer();
TestElement urlConfig = null;
JMeterTreeModel treeModel = GuiPackage.getInstance().getTreeModel();
List nodes = treeModel.getNodesOfType(RecordingController.class);
if (nodes.size() == 0)
{
nodes = treeModel.getNodesOfType(ThreadGroup.class);
}
Iterator iter = nodes.iterator();
while (iter.hasNext())
{
JMeterTreeNode node = (JMeterTreeNode) iter.next();
if (!node.isEnabled())
{
continue;
}
else
{
Enumeration enum = node.children();
String guiClassName = null;
while (enum.hasMoreElements())
{
JMeterTreeNode subNode = (JMeterTreeNode) enum.nextElement();
TestElement sample = (TestElement) subNode.createTestElement();
guiClassName = sample.getPropertyAsString(TestElement.GUI_CLASS);
if (guiClassName.equals(UrlConfigGui.class.getName())
|| guiClassName.equals(HttpDefaultsGui.class.getName()))
{
urlConfig = sample;
break;
}
}
if (areMatched(sampler, urlConfig))
{
removeValuesFromSampler(sampler, urlConfig);
replaceValues(sampler,subConfigs);
sampler.setProperty(TestElement.GUI_CLASS,"org.apache.jmeter.protocol.http.control.gui.HttpTestSampleGui");
try
{
JMeterTreeNode newNode = treeModel.addComponent(sampler, node);
for (int i = 0; subConfigs != null && i < subConfigs.length; i++)
{
if (subConfigs[i] instanceof HeaderManager)
{
subConfigs[i].setProperty(TestElement.GUI_CLASS,"org.apache.jmeter.protocol.http.gui.HeaderPanel");
treeModel.addComponent(subConfigs[i], newNode);
}
}
}
catch (IllegalUserActionException e)
{
JMeterUtils.reportErrorToUser(e.getMessage());
}
}
return;
}
}
}
private void removeValuesFromSampler(HTTPSampler sampler, TestElement urlConfig)
{
if (urlConfig != null)
{
if (sampler.getDomain().equals(urlConfig.getProperty(HTTPSampler.DOMAIN)))
{
sampler.setDomain("");
}
/* Need to add some kind of "ignore-me" value
if (("" + sampler.getPort()).equals(urlConfig.getProperty(HTTPSampler.PORT)))
{
sampler.setPort(0);
}
*/
if (sampler.getPath().equals(urlConfig.getProperty(HTTPSampler.PATH)))
{
sampler.setPath("");
}
}
}
private boolean areMatched(HTTPSampler sampler, TestElement urlConfig)
{
return urlConfig == null
|| (urlConfig.getProperty(HTTPSampler.DOMAIN) == null
|| urlConfig.getProperty(HTTPSampler.DOMAIN).equals("")
|| urlConfig.getProperty(HTTPSampler.DOMAIN).equals(sampler.getDomain()))
&& (urlConfig.getProperty(HTTPSampler.PATH) == null
|| urlConfig.getProperty(HTTPSampler.PATH).equals("")
|| urlConfig.getProperty(HTTPSampler.PATH).equals(sampler.getPath()));
}
private boolean checkIncludes(HTTPSampler sampler)
{
boolean ok = false;
Iterator iter = getIncludePatterns().iterator();
while (iter.hasNext())
{
String item = (String) iter.next();
Pattern pattern = patternCache.getPattern(item, Perl5Compiler.READ_ONLY_MASK & Perl5Compiler.SINGLELINE_MASK);
StringBuffer url = new StringBuffer(sampler.getDomain());
url.append(":");
url.append(sampler.getPort());
url.append(sampler.getPath());
if (sampler.getQueryString().length() > 0)
{
url.append("?");
url.append(sampler.getQueryString());
}
ok = matcher.matches(url.toString(), pattern);
if (ok)
{
break;
}
}
return ok;
}
private boolean checkExcludes(HTTPSampler sampler)
{
boolean ok = true;
Iterator iter = getExcludePatterns().iterator();
while (iter.hasNext())
{
String item = (String) iter.next();
Pattern pattern = patternCache.getPattern(item, Perl5Compiler.READ_ONLY_MASK & Perl5Compiler.SINGLELINE_MASK);
StringBuffer url = new StringBuffer(sampler.getDomain());
url.append(":");
url.append(sampler.getPort());
url.append(sampler.getPath());
if (sampler.getQueryString().length() > 0)
{
url.append("?");
url.append(sampler.getQueryString());
}
ok = ok && !matcher.matches(url.toString(), pattern);
if (!ok)
{
return ok;
}
}
return ok;
}
protected void replaceValues(TestElement sampler, TestElement[] configs)
{
GuiPackage.getInstance().getReplacer().reverseReplace(sampler);
for (int i = 0; i < configs.length; i++)
{
GuiPackage.getInstance().getReplacer().reverseReplace(configs[i]);
}
}
public static class Test extends TestCase
{
public Test(String name)
{
super(name);
}
public void testFiltering() throws Exception
{
ProxyControl control = new ProxyControl();
control.addIncludedPattern(".*\\.jsp");
control.addExcludedPattern(".*apache.org.*");
HTTPSampler sampler = new HTTPSampler();
sampler.setDomain("jakarta.org");
sampler.setPath("index.jsp");
assertTrue(control.filterUrl(sampler));
sampler.setDomain("www.apache.org");
assertTrue(!control.filterUrl(sampler));
sampler.setPath("header.gif");
sampler.setDomain("jakarta.org");
assertTrue(!control.filterUrl(sampler));
}
}
}
|
package com.xamoom.android.xamoom_pingeborg_android;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.drawable.Drawable;
import android.location.Location;
import android.os.Bundle;
import android.support.design.widget.TabLayout;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.app.FragmentPagerAdapter;
import android.support.v4.view.ViewPager;
import android.util.Base64;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.caverock.androidsvg.SVG;
import com.caverock.androidsvg.SVGParseException;
import com.google.android.gms.maps.CameraUpdate;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.BitmapDescriptorFactory;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.LatLngBounds;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import com.xamoom.android.APICallback;
import com.xamoom.android.XamoomEndUserApi;
import com.xamoom.android.mapping.ContentByLocation;
import com.xamoom.android.mapping.ContentByLocationItem;
import com.xamoom.android.mapping.Spot;
import com.xamoom.android.mapping.SpotMap;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
/**
* A placeholder fragment containing a simple view.
*/
public class MapActivityFragment extends Fragment implements OnMapReadyCallback {
private final HashMap<Marker, Spot> markerMap = new HashMap<Marker, Spot>();
private SupportMapFragment mSupportMapFragment;
private GoogleMap mGoogleMap;
private ViewPager mViewPager;
private MapAdditionFragment mMapAdditionFragment;
private GeofenceFragment mGeofenceFragment;
private Marker mActiveMarker;
private BestLocationProvider mBestLocationProvider;
private BestLocationListener mBestLocationListener;
/**
* TODO
*/
public static MapActivityFragment newInstance() {
MapActivityFragment mapActivityFragment = new MapActivityFragment();
return mapActivityFragment;
}
public MapActivityFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_map, container, false);
mViewPager = (ViewPager) view.findViewById(R.id.viewpager);
if (mViewPager != null) {
setupViewPager(mViewPager);
TabLayout tabLayout = (TabLayout) view.findViewById(R.id.tabs);
tabLayout.setupWithViewPager(mViewPager);
}
setupLocation();
return view;
}
private void setupLocation() {
mBestLocationProvider = new BestLocationProvider(getActivity(), true, true, 1000, 1000, 5, 10);
mBestLocationListener = new BestLocationListener() {
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
}
@Override
public void onProviderEnabled(String provider) {
}
@Override
public void onProviderDisabled(String provider) {
}
@Override
public void onLocationUpdateTimeoutExceeded(BestLocationProvider.LocationType type) {
}
@Override
public void onLocationUpdate(Location location, BestLocationProvider.LocationType type, boolean isFresh) {
if(isFresh) {
Log.i("pingeborg", "onLocationUpdate TYPE:" + type + " Location:" + mBestLocationProvider.locationToString(location));
setupGeofencing(location);
}
}
};
//start Location Updates
startLocationUpdating();
}
public void onDestroyView() {
mBestLocationProvider.stopLocationUpdates();
super.onDestroyView();
}
private void setupViewPager(ViewPager viewPager) {
mSupportMapFragment = SupportMapFragment.newInstance();
try {
//add fragments to viewPager
FragmentManager fragmentManager = getChildFragmentManager();
Adapter adapter = new Adapter(fragmentManager);
adapter.addFragment(mSupportMapFragment, "Map");
adapter.addFragment(SpotListFragment.newInstance(), "List");
viewPager.setAdapter(adapter);
} catch (Exception e) {
e.printStackTrace();
}
//hide mapAddition when changing tabs
viewPager.addOnPageChangeListener(new ViewPager.OnPageChangeListener() {
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
}
@Override
public void onPageSelected(int position) {
if(position != 0) {
closeMapAdditionFragment();
}
}
@Override
public void onPageScrollStateChanged(int state) {
}
});
mSupportMapFragment.getMapAsync(this);
}
public void startLocationUpdating() {
mBestLocationProvider.startLocationUpdatesWithListener(mBestLocationListener);
}
public void setupGeofencing(Location location) {
XamoomEndUserApi.getInstance().getContentByLocation(location.getLatitude(), location.getLongitude(), null, new APICallback<ContentByLocation>() {
@Override
public void finished(ContentByLocation result) {
//open geofence when there is at least on item (you can only get one geofence at a time - the nearest)
if (result.getItems().size() > 0) {
mBestLocationProvider.stopLocationUpdates();
openGeofenceFragment(result.getItems().get(0));
}
}
});
}
public void openGeofenceFragment(ContentByLocationItem content) {
try {
FragmentTransaction fragmentTransaction = getActivity().getSupportFragmentManager().beginTransaction();
if(mGeofenceFragment == null)
fragmentTransaction.setCustomAnimations(R.anim.slide_bottom_top, R.anim.slide_top_bottom);
mGeofenceFragment = GeofenceFragment.newInstance(content.getContentName(), content.getImagePublicUrl(), content.getContentId());
mGeofenceFragment.setSavedGeofence(content);
fragmentTransaction.replace(R.id.geofenceFrameLayout, mGeofenceFragment).commit();
} catch (NullPointerException e) {
Log.v(Global.DEBUG_TAG, "Exception: Geofencefragment is null.");
}
}
public void closeGeofenceFragment() {
try {
getActivity().getSupportFragmentManager().beginTransaction().remove(mGeofenceFragment).commit();
} catch (Exception e) {
e.printStackTrace();
}
}
public void onMapReady(GoogleMap googleMap) {
mGoogleMap = googleMap;
googleMap.setOnMarkerClickListener(new GoogleMap.OnMarkerClickListener() {
@Override
public boolean onMarkerClick(Marker marker) {
if(marker == mActiveMarker)
return true;
try {
mActiveMarker = marker;
openMapAdditionFragment(markerMap.get(marker));
} catch (Exception e) {
Log.e(Global.DEBUG_TAG,"Pressing on many Spot-Markers at the same time. (Stacked Spots in one Place)");
}
return false;
}
});
googleMap.setOnMapClickListener(new GoogleMap.OnMapClickListener() {
@Override
public void onMapClick(LatLng latLng) {
closeMapAdditionFragment();
}
});
addMarkersToMap(googleMap, markerMap);
googleMap.setMyLocationEnabled(true);
}
private void openMapAdditionFragment(Spot spot) {
FragmentTransaction fragmentTransaction = getActivity().getSupportFragmentManager().beginTransaction();
if(mMapAdditionFragment == null)
fragmentTransaction.setCustomAnimations(R.anim.slide_bottom_top, R.anim.slide_top_bottom);
mMapAdditionFragment = MapAdditionFragment.newInstance(spot.getDisplayName(), spot.getDescription(), spot.getImage(), spot.getLocation());
fragmentTransaction.replace(R.id.mapAdditionFrameLayout, mMapAdditionFragment).commit();
}
private void closeMapAdditionFragment() {
if(mMapAdditionFragment != null) {
getActivity().getSupportFragmentManager()
.beginTransaction()
.setCustomAnimations(R.anim.slide_bottom_top, R.anim.slide_top_bottom)
.remove(mMapAdditionFragment)
.commit();
mMapAdditionFragment = null;
}
}
private void addMarkersToMap(final GoogleMap googleMap, final HashMap<Marker, Spot> markerMap) {
XamoomEndUserApi.getInstance().getSpotMap(null, new String[]{"showAllTheSpots"}, null, new APICallback<SpotMap>() {
@Override
public void finished(SpotMap result) {
Bitmap icon;
//get the icon for the mapMarker (drawable image (eg. png) or SVG
if (result.getStyle().getCustomMarker() != null) {
String iconString = result.getStyle().getCustomMarker();
icon = getIconFromBase64(iconString);
} else {
icon = BitmapFactory.decodeResource(getActivity().getResources(), com.xamoom.android.xamoomcontentblocks.R.drawable.ic_default_map_marker);
float imageRatio = (float) icon.getWidth() / (float) icon.getHeight();
icon = Bitmap.createScaledBitmap(icon, 70, (int) (70 / imageRatio), false);
}
//show all markers
for (Spot s : result.getItems()) {
final Marker marker = googleMap.addMarker(new MarkerOptions()
.icon(BitmapDescriptorFactory.fromBitmap(icon))
.anchor(0.0f, 1.0f) // Anchors the marker on the bottom left
.title(s.getDisplayName())
.position(new LatLng(s.getLocation().getLat(), s.getLocation().getLon())));
markerMap.put(marker, s);
}
//zoom to display all markers
LatLngBounds.Builder builder = new LatLngBounds.Builder();
for (Marker marker : markerMap.keySet()) {
builder.include(marker.getPosition());
}
LatLngBounds bounds = builder.build();
//move camera to calulated point
CameraUpdate cu = CameraUpdateFactory.newLatLngBounds(bounds, 70);
googleMap.moveCamera(cu);
}
});
}
/**
* Decodes a base64 string to an icon for mapMarkers.
* Can handle normal image formats and also svgs.
* The icon will be resized to width: 70, height will be resized to maintain imageRatio.
*
* @param base64String Base64 string that will be resized. Must start with "data:image/"
* @return icon as BitMap, or null if there was a problem
*/
public Bitmap getIconFromBase64(String base64String) {
Bitmap icon = null;
byte[] data1;
byte[] data2 = "".getBytes();
String decodedString1 = "";
String decodedString2 = "";
if (base64String == null)
return null;
try {
//encode 2 times
data1 = Base64.decode(base64String, Base64.DEFAULT);
decodedString1 = new String(data1, "UTF-8");
//get rid of image/xxxx base64,
int index = decodedString1.indexOf("base64,");
String decodedString1WithoutPrefix = decodedString1.substring(index + 7);
data2 = Base64.decode(decodedString1WithoutPrefix, Base64.DEFAULT);
decodedString2 = new String(data2, "UTF-8");
if (decodedString1.contains("data:image/svg+xml")) {
//svg stuff
SVG svg = null;
svg = SVG.getFromString(decodedString2);
if (svg != null) {
Log.v("pingeborg", "HELLYEAH SVG: " + svg);
//resize svg
float imageRatio = svg.getDocumentWidth() / svg.getDocumentHeight();
svg.setDocumentWidth(70.0f);
svg.setDocumentHeight(70 / imageRatio);
icon = Bitmap.createBitmap((int) svg.getDocumentWidth(), (int) svg.getDocumentHeight(), Bitmap.Config.ARGB_8888);
Canvas canvas1 = new Canvas(icon);
svg.renderToCanvas(canvas1);
}
} else if (decodedString1.contains("data:image/")) {
//normal image stuff
icon = BitmapFactory.decodeByteArray(data2, 0, data2.length);
//resize the icon
double imageRatio = (double) icon.getWidth() / (double) icon.getHeight();
double newHeight = 70.0 / imageRatio;
icon = Bitmap.createScaledBitmap(icon, 70, (int) newHeight, false);
}
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
return BitmapFactory.decodeResource(getActivity().getResources(), com.xamoom.android.xamoomcontentblocks.R.drawable.ic_default_map_marker);
} catch (SVGParseException e ) {
e.printStackTrace();
BitmapFactory.decodeResource(getActivity().getResources(), com.xamoom.android.xamoomcontentblocks.R.drawable.ic_default_map_marker);
}
return icon;
}
static class Adapter extends FragmentPagerAdapter {
private final List<Fragment> mFragments = new ArrayList<>();
private final List<String> mFragmentTitles = new ArrayList<>();
public Adapter(FragmentManager fm) {
super(fm);
}
public void addFragment(Fragment fragment, String title) {
mFragments.add(fragment);
mFragmentTitles.add(title);
}
@Override
public Fragment getItem(int position) {
return mFragments.get(position);
}
@Override
public int getCount() {
return mFragments.size();
}
@Override
public CharSequence getPageTitle(int position) {
return mFragmentTitles.get(position);
}
}
}
|
package de.mycrobase.ssim.ed.mesh;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.Random;
import ssim.sim.SimConst;
import ssim.util.FFT;
import ssim.util.MathExt;
import com.jme3.bounding.BoundingBox;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.scene.Mesh;
import com.jme3.scene.VertexBuffer;
import com.jme3.scene.VertexBuffer.Format;
import com.jme3.scene.VertexBuffer.Type;
import com.jme3.scene.VertexBuffer.Usage;
import com.jme3.util.BufferUtils;
import de.mycrobase.ssim.ed.ocean.WaveSpectrum;
public class OceanSurface extends Mesh {
// TODO: convert into single parameter or function to allow variations
private static final float Depth = 4000; // in m
// used to determine pessimistic bounding box
private static final float AssumedMaxWaveHeight = 50; // in m
private int numX;
private int numY;
private int numVertexX;
private int numVertexY;
private float scaleX;
private float scaleZ;
private WaveSpectrum waveSpectrum;
private FFT fft;
private FloatBuffer positionBuffer;
private FloatBuffer normalBuffer;
// sim data
private float accTime;
// fHold saves fixed calculations for later use
private Vector3f[][] fHold;
private Vector2f[][] mH0;
// temporary data, discarded between frames
private Vector3f[][] vPositions;
private Vector3f[][] vNormals;
private Vector3f[][] fNormals;
private Vector2f[][] c;
private Vector2f[][] mDeltaX;
private Vector2f[][] mDeltaY;
private VertexBuffer positionVBO;
private VertexBuffer normalVBO;
private float waveHeightScale;
private float lambda;
public OceanSurface(int numX, int numY, float scaleX, float scaleZ, WaveSpectrum waveSpectrum) {
this.numX = numX;
this.numY = numY;
this.numVertexX = numX+1;
this.numVertexY = numY+1;
this.scaleX = scaleX;
this.scaleZ = scaleZ;
this.waveSpectrum = waveSpectrum;
fft = new FFT();
initGeometry();
}
public float getWaveHeightScale() {
return waveHeightScale;
}
public void setWaveHeightScale(float waveHeightScale) {
this.waveHeightScale = waveHeightScale;
}
public float getLambda() {
return lambda;
}
public void setLambda(float lambda) {
this.lambda = lambda;
}
public void initSim() {
accTime = 0f;
for(int ix = 0; ix < numVertexX; ix++) {
for(int iy = 0; iy < numVertexY; iy++) {
vPositions[ix][iy] = new Vector3f();
vNormals[ix][iy] = new Vector3f();
}
}
for(int ix = 0; ix < numX; ix++) {
for(int iy = 0; iy < numY; iy++) {
fHold[ix][iy] = new Vector3f();
mH0[ix][iy] = new Vector2f();
fNormals[ix][iy] = new Vector3f();
c[ix][iy] = new Vector2f();
mDeltaX[ix][iy] = new Vector2f();
mDeltaY[ix][iy] = new Vector2f();
}
}
Random r = new Random();
for(int ix = 0; ix < numX; ix++) {
for(int iy = 0; iy < numY; iy++) {
// horizontal components of K, the movement direction
fHold[ix][iy].x = 2f * MathExt.PI * ((float) ix - numX/2) / scaleX;
fHold[ix][iy].y = 2f * MathExt.PI * ((float) iy - numY/2) / scaleZ;
// length named k of movement vector K
fHold[ix][iy].z = (float)
Math.sqrt(fHold[ix][iy].x*fHold[ix][iy].x + fHold[ix][iy].y*fHold[ix][iy].y);
float phillipsRoot =
(float) Math.sqrt(waveSpectrum.getWaveCoefficient(fHold[ix][iy])) * MathExt.INV_SQRT_TWO;
mH0[ix][iy].set(
(float) (r.nextGaussian() * phillipsRoot),
(float) (r.nextGaussian() * phillipsRoot));
}
}
}
public void update(float dt) {
accTime += dt;
updateWaveCoefficients();
updateFaceNormals();
updateVertexPositions();
updateVertexNormals();
// final step: bring data model into vertex buffers
updateGridDataVBOs();
}
// helper
private void initGeometry() {
// vertex data
positionBuffer = BufferUtils.createFloatBuffer((numVertexX * numVertexY + 4) * 3);
normalBuffer = BufferUtils.createFloatBuffer((numVertexX * numVertexY + 4) * 3);
// CPU only data
vPositions = new Vector3f[numVertexX][numVertexY];
vNormals = new Vector3f[numVertexX][numVertexY];
fHold = new Vector3f[numX][numY];
mH0 = new Vector2f[numX][numY];
fNormals = new Vector3f[numX][numY];
c = new Vector2f[numX][numY];
mDeltaX = new Vector2f[numX][numY];
mDeltaY = new Vector2f[numX][numY];
positionVBO = new VertexBuffer(Type.Position);
positionVBO.setupData(Usage.Stream, 3, Format.Float, positionBuffer);
setBuffer(positionVBO);
normalVBO = new VertexBuffer(Type.Normal);
normalVBO.setupData(Usage.Stream, 3, Format.Float, normalBuffer);
setBuffer(normalVBO);
// TODO: TriangleStrip
setMode(Mode.Triangles);
setBound(new BoundingBox(
new Vector3f(0, -AssumedMaxWaveHeight, 0),
new Vector3f(scaleX, +AssumedMaxWaveHeight, scaleZ)
));
VertexBuffer[] indexVBOs = initTriangleIndexVBOs();
setBuffer(indexVBOs[0]);
setLodLevels(indexVBOs);
}
private VertexBuffer[] initTriangleIndexVBOs() {
IntBuffer indexBuffer = BufferUtils.createIntBuffer(numX * numY * 2 * 3 * 1);
IntBuffer index2Buffer = BufferUtils.createIntBuffer(1 * 2 * 3);
for(int ix = 0; ix < numX; ix++) {
for(int iy = 0; iy < numY; iy++) {
// first triangle
indexBuffer.put(getIndexFor(ix, iy));
indexBuffer.put(getIndexFor(ix, iy+1));
indexBuffer.put(getIndexFor(ix+1, iy+1));
// second triangle
indexBuffer.put(getIndexFor(ix, iy));
indexBuffer.put(getIndexFor(ix+1, iy+1));
indexBuffer.put(getIndexFor(ix+1, iy));
}
}
indexBuffer.rewind();
int offset = numVertexX * numVertexY;
index2Buffer.put(offset+0).put(offset+1).put(offset+3);
index2Buffer.put(offset+0).put(offset+3).put(offset+2);
index2Buffer.rewind();
VertexBuffer indexVBO = new VertexBuffer(Type.Index);
indexVBO.setupData(Usage.Static, 1, Format.UnsignedInt, indexBuffer);
indexVBO.updateData(indexBuffer);
VertexBuffer index2VBO = new VertexBuffer(Type.Index);
index2VBO.setupData(Usage.Static, 1, Format.UnsignedInt, index2Buffer);
index2VBO.updateData(index2Buffer);
return new VertexBuffer[] { indexVBO, index2VBO };
}
private void updateWaveCoefficients() {
for(int ix = 0; ix < numX; ix++) {
for(int iy = 0; iy < numY; iy++) {
double wkt = Math.sqrt(fHold[ix][iy].z * SimConst.g * Math.tanh(fHold[ix][iy].z * Depth)) * accTime;
double sinwkt = Math.sin(wkt);
double coswkt = Math.cos(wkt);
// calculate h~(K, t) from the Tessendorf paper
c[ix][iy].set(
(float) (mH0[ix][iy].x*coswkt + mH0[ix][iy].y*sinwkt + mH0[numX-1-ix][numY-1-iy].x*coswkt - mH0[numX-1-ix][numY-1-iy].y*sinwkt),
(float) (mH0[ix][iy].y*coswkt + mH0[ix][iy].x*sinwkt - mH0[numX-1-ix][numY-1-iy].y*coswkt - mH0[numX-1-ix][numY-1-iy].x*sinwkt)
);
}
}
// set up the DX-DY-choppiness, needs all c values in position *before*
// inverse FFT on c
updateChoppinessDelta();
// do the inverse FFT to get the surface
fft.iFFT2D(c);
// create negative power term
for(int ix = 0; ix < numX; ix++) {
for(int iy = 0; iy < numY; iy++) {
//if((ix+iy) % 2 != 0) c[ix][iy].x *= -1;
if(((ix+iy) & 0x01) != 0) c[ix][iy].x = -c[ix][iy].x;
}
}
}
private void updateChoppinessDelta() {
for(int ix = 0; ix < numX; ix++) {
for(int iy = 0; iy < numY; iy++) {
float k = fHold[ix][iy].z;
if(k == 0) {
mDeltaX[ix][iy].set(0, 0);
mDeltaY[ix][iy].set(0, 0);
} else {
mDeltaX[ix][iy].set(0, c[ix][iy].y * -fHold[ix][iy].x/k);
mDeltaY[ix][iy].set(0, c[ix][iy].y * -fHold[ix][iy].y/k);
}
}
}
// TODO: maybe parallize
fft.iFFT2D(mDeltaX);
fft.iFFT2D(mDeltaY);
for(int ix = 0; ix < numX; ix++) {
for(int iy = 0; iy < numY; iy++) {
float s = lambda;
if((ix+iy) % 2 != 0) {
s *= -1;
}
mDeltaX[ix][iy].multLocal(s);
mDeltaY[ix][iy].multLocal(s);
}
}
}
private void updateFaceNormals() {
float xStep = scaleX/numX;
float yStep = scaleZ/numY;
for(int ix = 0; ix < numX; ix++) {
int ixRight = MathExt.wrapByMax(ix+1, numX);
for(int iy = 0; iy < numY; iy++) {
int iyRight = MathExt.wrapByMax(iy+1, numY);
// TODO: does not take mDelta into account
float tax = 0;
float tay = (c[ix][iyRight].x-c[ix][iy].x) * waveHeightScale;
float taz = yStep;
float tbx = xStep;
float tby = (c[ixRight][iy].x-c[ix][iy].x) * waveHeightScale;
float tbz = 0;
// cross product
float tcx = tay*tbz - taz*tby;
float tcy = taz*tbx - tax*tbz;
float tcz = tax*tby - tay*tbx;
// set and normalize
fNormals[ix][iy].set(tcx, tcy, tcz);
fNormals[ix][iy].normalizeLocal();
}
}
}
private void updateVertexPositions() {
for(int ix = 0; ix < numX; ix++) {
for(int iy = 0; iy < numY; iy++) {
vPositions[ix][iy].x = (float) ix/numX * scaleX + mDeltaX[ix][iy].y;
vPositions[ix][iy].y = c[ix][iy].x * waveHeightScale;
vPositions[ix][iy].z = (float) iy/numY * scaleZ + mDeltaY[ix][iy].y;
}
}
for(int iy = 0; iy < numVertexY-1; iy++) {
vPositions[numVertexX-1][iy].set(vPositions[0][iy].x+scaleX, vPositions[0][iy].y, vPositions[0][iy].z);
}
for(int ix = 0; ix < numVertexX-1; ix++) {
vPositions[ix][numVertexY-1].set(vPositions[ix][0].x, vPositions[ix][0].y, vPositions[ix][0].z+scaleZ);
}
vPositions[numVertexX-1][numVertexY-1].set(vPositions[0][0].x+scaleX, vPositions[0][0].y, vPositions[0][0].z+scaleZ);
}
private void updateVertexNormals() {
for(int ix = 0; ix < numX; ix++) {
int ixLeft = MathExt.wrapByMax(ix-1, numX);
for(int iy = 0; iy < numY; iy++) {
int iyLeft = MathExt.wrapByMax(iy-1, numY);
float xsum = fNormals[ix][iy].x + fNormals[ixLeft][iy].x + fNormals[ix][iyLeft].x + fNormals[ixLeft][iyLeft].x;
float ysum = fNormals[ix][iy].y + fNormals[ixLeft][iy].y + fNormals[ix][iyLeft].y + fNormals[ixLeft][iyLeft].y;
float zsum = fNormals[ix][iy].z + fNormals[ixLeft][iy].z + fNormals[ix][iyLeft].z + fNormals[ixLeft][iyLeft].z;
vNormals[ix][iy].set(xsum/4, ysum/4, zsum/4);
}
}
for(int iy = 0; iy < numVertexY-1; iy++) {
vNormals[numVertexX-1][iy].set(vNormals[0][iy]);
}
for(int ix = 0; ix < numVertexX-1; ix++) {
vNormals[ix][numVertexY-1].set(vNormals[ix][0]);
}
vNormals[numVertexX-1][numVertexY-1].set(vNormals[0][0]);
}
private void updateGridDataVBOs() {
// apply the indexing scheme given by #getIndexFor(int ix, int iy)
for(int ix = 0; ix < numVertexX; ix++) {
for(int iy = 0; iy < numVertexY; iy++) {
put(positionBuffer, vPositions[ix][iy]);
put(normalBuffer, vNormals[ix][iy]);
}
}
// add vertices for index2Buffer
positionBuffer.put(0).put(0).put(0);
positionBuffer.put(0).put(0).put(scaleZ);
positionBuffer.put(scaleX).put(0).put(0);
positionBuffer.put(scaleX).put(0).put(scaleZ);
put(normalBuffer, Vector3f.UNIT_Y);
put(normalBuffer, Vector3f.UNIT_Y);
put(normalBuffer, Vector3f.UNIT_Y);
put(normalBuffer, Vector3f.UNIT_Y);
positionBuffer.rewind();
normalBuffer.rewind();
positionVBO.updateData(positionBuffer);
normalVBO.updateData(normalBuffer);
}
private int getIndexFor(int ix, int iy) {
return ix * numVertexY + iy;
}
private static void put(FloatBuffer buffer, Vector3f v) {
buffer.put(v.x);
buffer.put(v.y);
buffer.put(v.z);
}
}
|
package com.strategy.havannah.logic;
import java.io.PrintStream;
import net.sf.javabdd.BDD;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification;
import com.strategy.api.logic.BddCache;
import com.strategy.api.logic.Position;
import com.strategy.util.Preferences;
import com.strategy.util.StoneColor;
public class BddCacheHavannah implements BddCache {
// private Map<BddCacheIndex, BDD> cache;
private Cache<BddCacheIndex, BDD> cache;
// private BDDCacheStatus stats;
public BddCacheHavannah() {
// cache = Maps.newHashMap();
// stats = new BDDCacheStatus();
RemovalListener<BddCacheIndex, BDD> listener = new RemovalListener<BddCache.BddCacheIndex, BDD>() {
@Override
public void onRemoval(
RemovalNotification<BddCacheIndex, BDD> notification) {
notification.getValue().free();
}
};
// cache = CacheBuilder.newBuilder().recordStats().maximumSize(1)
// .removalListener(listener).build();
cache = CacheBuilder.newBuilder().recordStats()
.removalListener(listener).build();
}
@Override
public BDD restore(StoneColor color, Position p, Position q, int i) {
// stats.incrementRestores();
// if (cache.containsKey(BddCacheIndex.getIndex(color, p, q, i))) {
// return cache.get(BddCacheIndex.getIndex(color, p, q, i)).id();
// } else if (cache.containsKey(BddCacheIndex.getIndex(color, q, p, i)))
// return cache.get(BddCacheIndex.getIndex(color, q, p, i)).id();
// } else {
// return null;
if (cache.asMap().containsKey(BddCacheIndex.getIndex(color, p, q, i))) {
return cache.getIfPresent(BddCacheIndex.getIndex(color, p, q, i))
.id();
} else if (cache.asMap().containsKey(
BddCacheIndex.getIndex(color, q, p, i))) {
return cache.getIfPresent(BddCacheIndex.getIndex(color, q, p, i))
.id();
} else {
return null;
}
}
@Override
public BDD store(StoneColor color, Position p, Position q, int i, BDD bdd) {
// stats.incrementStores();
if (null == bdd) {
return null;
}
cache.put(BddCacheIndex.getIndex(color, p, q, i), bdd.id());
return bdd.id();
}
@Override
public boolean isCached(StoneColor color, Position p, Position q, int i) {
return cache.asMap()
.containsKey(BddCacheIndex.getIndex(color, p, q, i))
|| cache.asMap().containsKey(
BddCacheIndex.getIndex(color, q, p, i));
// return false;
}
@Override
public void free() {
// Output.print(stats.toString(), BddCacheHavannah.class);
// for (BDD bdd : cache.values()) {
// bdd.free();
// cache.clear();
PrintStream out = Preferences.getInstance().getOut();
if (null != out) {
out.println(cache.stats());
}
cache.invalidateAll();
}
}
|
package org.apache.jmeter.protocol.http.sampler;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.URLConnection;
import org.apache.jmeter.config.Argument;
import org.apache.jmeter.testelement.property.PropertyIterator;
/**
* @author Michael Stover
* @version $Revision$
*/
public class PostWriter
{
protected final static String BOUNDARY =
"
protected final static byte[] CRLF = { 0x0d, 0x0A };//TODO: make private?
//protected static int fudge = -20;
protected static final String encoding = "iso-8859-1";
/**
* Send POST data from Entry to the open connection.
*/
public void sendPostData(URLConnection connection, HTTPSampler sampler)
throws IOException
{
// If filename was specified then send the post using multipart syntax
String filename = sampler.getFilename();
if ((filename != null) && (filename.trim().length() > 0))
{
OutputStream out = connection.getOutputStream();
//new FileOutputStream("c:\\data\\experiment.txt");
//new ByteArrayOutputStream();
writeln(out, "--" + BOUNDARY);
PropertyIterator args = sampler.getArguments().iterator();
while (args.hasNext())
{
Argument arg = (Argument) args.next().getObjectValue();
writeFormMultipartStyle(
out,
arg.getName(),
(String) arg.getValue());
writeln(out, "--" + BOUNDARY);
}
writeFileToURL(
out,
filename,
sampler.getFileField(),
getFileStream(filename),
sampler.getMimetype());
writeln(out, "--" + BOUNDARY + "--");
out.flush();
out.close();
}
// No filename specified, so send the post using normal syntax
else
{
String postData = sampler.getQueryString();
PrintWriter out = new PrintWriter(connection.getOutputStream());
out.print(postData);
out.flush();
}
}
public void setHeaders(URLConnection connection, HTTPSampler sampler)
throws IOException
{
((HttpURLConnection) connection).setRequestMethod("POST");
// If filename was specified then send the post using multipart syntax
String filename = sampler.getFileField();
if ((filename != null) && (filename.trim().length() > 0))
{
connection.setRequestProperty(
"Content-type",
"multipart/form-data; boundary=" + BOUNDARY);
connection.setDoOutput(true);
connection.setDoInput(true);
}
// No filename specified, so send the post using normal syntax
else
{
String postData = sampler.getQueryString();
connection.setRequestProperty(
"Content-length",
"" + postData.length());
connection.setRequestProperty(
"Content-type",
"application/x-www-form-urlencoded");
connection.setDoOutput(true);
}
}
private InputStream getFileStream(String filename) throws IOException
{
return new BufferedInputStream(new FileInputStream(filename));
}
/* NOTUSED
private String getContentLength(MultipartUrlConfig config)
{
long size = 0;
size += BOUNDARY.length() + 2;
PropertyIterator iter = config.getArguments().iterator();
while (iter.hasNext())
{
Argument item = (Argument) iter.next().getObjectValue();
size += item.getName().length()
+ item.getValue().toString().length();
size += CRLF.length * 4;
size += BOUNDARY.length() + 2;
size += 39;
}
size += new File(config.getFilename()).length();
size += CRLF.length * 5;
size += BOUNDARY.length() + 2;
size += encode(config.getFileFieldName()).length();
size += encode(config.getFilename()).length();
size += config.getMimeType().length();
size += 66;
size += 2 + (CRLF.length * 1);
return Long.toString(size);
}
*/
/**
* Writes out the contents of a file in correct multipart format.
*/
private void writeFileToURL(
OutputStream out,
String filename,
String fieldname,
InputStream in,
String mimetype)
throws IOException
{
writeln(
out,
"Content-Disposition: form-data; name=\""
+ encode(fieldname)
+ "\"; filename=\""
+ encode(filename)
+ "\"");
writeln(out, "Content-Type: " + mimetype);
out.write(CRLF);
byte[] buf = new byte[1024];
//1k - the previous 100k made no sense (there's tons of buffers
// elsewhere in the chain) and it caused OOM when many concurrent
// uploads were being done. Could be fixed by increasing the evacuation
// ratio in bin/jmeter[.bat], but this is better.
int read;
while ((read = in.read(buf)) > 0)
{
out.write(buf, 0, read);
}
out.write(CRLF);
in.close();
}
/**
* Writes form data in multipart format.
*/
private void writeFormMultipartStyle(
OutputStream out,
String name,
String value)
throws IOException
{
writeln(out, "Content-Disposition: form-data; name=\"" + name + "\"");
out.write(CRLF);
writeln(out, value);
}
private String encode(String value)
{
StringBuffer newValue = new StringBuffer();
char[] chars = value.toCharArray();
for (int i = 0; i < chars.length; i++)
{
if (chars[i] == '\\')
{
newValue.append("\\\\");
}
else
{
newValue.append(chars[i]);
}
}
return newValue.toString();
}
/* NOTUSED
private void write(OutputStream out, String value)
throws UnsupportedEncodingException, IOException
{
out.write(value.getBytes(encoding));
}
*/
private void writeln(OutputStream out, String value)
throws UnsupportedEncodingException, IOException
{
out.write(value.getBytes(encoding));
out.write(CRLF);
}
}
|
package org.xwiki.test.ui.po;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang3.LocaleUtils;
import org.openqa.selenium.By;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.FindBy;
import org.openqa.selenium.support.FindBys;
import org.xwiki.test.ui.po.editor.ClassEditPage;
import org.xwiki.test.ui.po.editor.ObjectEditPage;
import org.xwiki.test.ui.po.editor.RightsEditPage;
import org.xwiki.test.ui.po.editor.WYSIWYGEditPage;
import org.xwiki.test.ui.po.editor.WikiEditPage;
/**
* Represents the common actions possible on all Pages.
*
* @version $Id$
* @since 3.2M3
*/
public class BasePage extends BaseElement
{
private static final By DRAWER_MATCHER = By.id("tmDrawer");
/**
* Used for sending keyboard shortcuts to.
*/
@FindBy(id = "xwikimaincontainer")
private WebElement mainContainerDiv;
/**
* The top floating content menu bar.
*/
@FindBy(id = "contentmenu")
private WebElement contentMenuBar;
@FindBy(xpath = "//div[@id='tmCreate']/a[contains(@role, 'button')]")
private WebElement tmCreate;
@FindBy(xpath = "//div[@id='tmMoreActions']/a[contains(@role, 'button')]")
private WebElement moreActionsMenu;
@FindBy(id = "tmDrawerActivator")
private WebElement drawerActivator;
@FindBy(xpath = "//input[@id='tmWatchDocument']/../span[contains(@class, 'bootstrap-switch-label')]")
private WebElement watchDocumentLink;
@FindBy(id = "tmPage")
private WebElement pageMenu;
@FindBys({ @FindBy(id = "tmRegister"), @FindBy(tagName = "a") })
private WebElement registerLink;
@FindBy(xpath = "//a[@id='tmLogin']")
private WebElement loginLink;
@FindBy(xpath = "//a[@id='tmUser']")
private WebElement userLink;
@FindBy(xpath = "//li[contains(@class, 'navbar-avatar')]//img[contains(@class, 'avatar')]")
private WebElement userAvatarImage;
@FindBy(id = "document-title")
private WebElement documentTitle;
@FindBy(xpath = "//input[@id='tmWatchSpace']/../span[contains(@class, 'bootstrap-switch-label')]")
private WebElement watchSpaceLink;
@FindBy(xpath = "//input[@id='tmWatchWiki']/../span[contains(@class, 'bootstrap-switch-label')]")
private WebElement watchWikiLink;
@FindBy(css = "#tmMoreActions a[title='Children']")
private WebElement childrenLink;
@FindBy(id = "tmNotifications")
private WebElement notificationsMenu;
/**
* Used to scroll the page to the top before accessing the floating menu.
*/
@FindBy(id = "companylogo")
protected WebElement logo;
/**
* Note: when reusing instances of BasePage, the constructor is not doing the work anymore and the
* waitUntilPageJSIsLoaded() method needs to be executed manually, when needed.
* <p>
* Note2: Never call the constructor before navigating to the page you need to test first.
*/
public BasePage()
{
super();
waitUntilPageJSIsLoaded();
}
public String getPageTitle()
{
return getDriver().getTitle();
}
// TODO I think this should be in the AbstractTest instead -cjdelisle
public String getPageURL()
{
return getDriver().getCurrentUrl();
}
/**
* @param metaName the name of the XWiki document metadata
* @return the value of the specified XWiki document metadata for the current XWiki document
* @see #getHTMLMetaDataValue(String)
*/
public String getMetaDataValue(String metaName)
{
return getDriver().findElement(By.xpath("/html")).getAttribute("data-xwiki-" + metaName);
}
/**
* @param metaName the name of the HTML meta field
* @return the value of the requested HTML meta field with from the current page
* @since 7.2RC1
*/
public String getHTMLMetaDataValue(String metaName)
{
return getDriver().findElement(By.xpath("//meta[@name='" + metaName + "']")).getAttribute("content");
}
/**
* @return true if we are currently logged in, false otherwise
*/
public boolean isAuthenticated()
{
return getDriver().hasElementWithoutWaiting(By.id("tmUser"));
}
/**
* Determine if the current page is a new document.
*
* @return true if the document is new, false otherwise
*/
public boolean isNewDocument()
{
return (Boolean) ((JavascriptExecutor) getDriver()).executeScript("return XWiki.docisnew");
}
/**
* Perform a click on a "edit menu" sub-menu entry.
*
* @param id The id of the entry to follow
*/
protected void clickEditSubMenuEntry(String id)
{
/**
* Performs a click on the "edit" button.
*/
public void edit()
{
WebElement editMenuButton =
getDriver().findElement(By.xpath("//div[@id='tmEdit']/a[contains(@role, 'button')]"));
editMenuButton.click();
}
/**
* Gets a string representation of the URL for editing the page.
*/
public String getEditURL()
{
return getDriver().findElement(By.xpath("//div[@id='tmEdit']//a")).getAttribute("href");
}
/**
* Performs a click on the "edit wiki" entry of the content menu.
*/
public WikiEditPage editWiki()
{
clickEditSubMenuEntry("tmEditWiki");
return new WikiEditPage();
}
/**
* Performs a click on the "edit wysiwyg" entry of the content menu.
*/
public WYSIWYGEditPage editWYSIWYG()
{
clickEditSubMenuEntry("tmEditWysiwyg");
return new WYSIWYGEditPage();
}
/**
* Performs a click on the "edit inline" entry of the content menu.
*/
public <T extends InlinePage> T editInline()
{
clickEditSubMenuEntry("tmEditInline");
return createInlinePage();
}
/**
* Can be overridden to return extended {@link InlinePage}.
*/
@SuppressWarnings("unchecked")
protected <T extends InlinePage> T createInlinePage()
{
return (T) new InlinePage();
}
/**
* Performs a click on the "edit acces rights" entry of the content menu.
*/
public RightsEditPage editRights()
{
clickEditSubMenuEntry("tmEditRights");
return new RightsEditPage();
}
/**
* Performs a click on the "edit objects" entry of the content menu.
*/
public ObjectEditPage editObjects()
{
clickEditSubMenuEntry("tmEditObject");
return new ObjectEditPage();
}
/**
* Performs a click on the "edit class" entry of the content menu.
*/
public ClassEditPage editClass()
{
clickEditSubMenuEntry("tmEditClass");
return new ClassEditPage();
}
/**
* @since 3.2M3
*/
public void sendKeys(CharSequence... keys)
{
this.mainContainerDiv.sendKeys(keys);
}
/**
* Waits until the page has loaded. Normally we don't need to call this method since a click in Selenium2 is a
* blocking call. However there are cases (such as when using a shortcut) when we asynchronously load a page.
*
* @return this page
* @since 3.2M3
*/
public BasePage waitUntilPageIsLoaded()
{
getDriver().waitUntilElementIsVisible(By.id("footerglobal"));
return this;
}
/**
* @since 7.2M3
*/
public void toggleDrawer()
{
if (isElementVisible(DRAWER_MATCHER)) {
// The drawer is visible, so we close it by clicking outside the drawer
this.mainContainerDiv.click();
getDriver().waitUntilElementDisappears(DRAWER_MATCHER);
} else {
// The drawer is not visible, so we open it
this.drawerActivator.click();
getDriver().waitUntilElementIsVisible(DRAWER_MATCHER);
}
}
/**
* @return true if the drawer used to be hidden
* @since 8.4.5
* @since 9.0RC1
*/
public boolean showDrawer()
{
if (!isElementVisible(DRAWER_MATCHER)) {
// The drawer is not visible, so we open it
this.drawerActivator.click();
getDriver().waitUntilElementIsVisible(DRAWER_MATCHER);
return true;
}
return false;
}
/**
* @return true if the drawer used to be displayed
* @since 8.4.5
* @since 9.0RC1
*/
public boolean hideDrawer()
{
if (isElementVisible(DRAWER_MATCHER)) {
// The drawer is visible, so we close it by clicking outside the drawer
this.mainContainerDiv.click();
getDriver().waitUntilElementDisappears(DRAWER_MATCHER);
return true;
}
return false;
}
/**
* @since 8.4.5
* @since 9.0RC1
*/
public boolean isDrawerVisible()
{
return isElementVisible(DRAWER_MATCHER);
}
/**
* @since 7.2M3
*/
public void toggleActionMenu()
{
this.moreActionsMenu.click();
}
/**
* @since 7.0RC1
*/
public void clickMoreActionsSubMenuEntry(String id)
{
clickSubMenuEntryFromMenu(By.xpath("//div[@id='tmMoreActions']/a[contains(@role, 'button')]"), id);
}
/**
* @since 7.3M2
*/
public void clickAdminActionsSubMenuEntry(String id)
{
clickSubMenuEntryFromMenu(By.xpath("//div[@id='tmMoreActions']/a[contains(@role, 'button')]"), id);
}
/**
* @since 7.0RC1
*/
private void clickSubMenuEntryFromMenu(By menuBy, String id)
{
// Open the parent Menu
getDriver().findElement(menuBy).click();
// Wait for the submenu entry to be visible
getDriver().waitUntilElementIsVisible(By.id(id));
// Click on the specified entry
getDriver().findElement(By.id(id)).click();
}
/**
* @return {@code true} if the screen is extra small (as defined by Bootstrap), {@code false} otherwise
*/
private boolean isExtraSmallScreen()
{
return getDriver().manage().window().getSize().getWidth() < 768;
}
private By getTopMenuToggleSelector(String menuId)
{
String side = isExtraSmallScreen() ? "left" : "right";
return By.xpath("//li[@id='" + menuId + "']//a[contains(@class, 'dropdown-split-" + side + "')]");
}
/**
* @since 4.5M1
*/
public CreatePagePage createPage()
{
this.tmCreate.click();
return new CreatePagePage();
}
/**
* @since 4.5M1
*/
public CopyPage copy()
{
clickAdminActionsSubMenuEntry("tmActionCopy");
return new CopyPage();
}
public RenamePage rename()
{
clickAdminActionsSubMenuEntry("tmActionRename");
return new RenamePage();
}
/**
* @since 4.5M1
*/
public ConfirmationPage delete()
{
clickAdminActionsSubMenuEntry("tmActionDelete");
return new ConfirmationPage();
}
/**
* @since 4.5M1
*/
public boolean canDelete()
{
toggleActionMenu();
// Don't wait here since test can use this method to verify that there's no Delete right on the current page
// and calling hasElement() would incurr the wait timeout.
boolean canDelete = getDriver().hasElementWithoutWaiting(By.id("tmActionDelete"));
toggleActionMenu();
return canDelete;
}
/**
* @since 4.5M1
*/
public void watchDocument()
{
toggleNotificationsMenu();
this.watchDocumentLink.click();
toggleActionMenu();
}
/**
* @since 4.5M1
*/
public boolean hasLoginLink()
{
// Note that we cannot test if the loginLink field is accessible since we're using an AjaxElementLocatorFactory
// and thus it would wait 15 seconds before considering it's not accessible.
return !getDriver().findElementsWithoutWaiting(By.id("tmLogin")).isEmpty();
}
/**
* @since 4.5M1
*/
public LoginPage login()
{
toggleDrawer();
this.loginLink.click();
return new LoginPage();
}
/**
* @since 4.5M1
*/
public String getCurrentUser()
{
// We need to show the drawer because #getText() does not allow getting hidden text (but allow finding the
// element and its attributes...)
boolean hide = showDrawer();
String user = this.userLink.getText();
if (hide) {
hideDrawer();
}
return user;
}
/**
* @since 9.0RC1
*/
public List<Locale> getLocales()
{
List<WebElement> elements =
getDriver().findElementsWithoutWaiting(By.xpath("//ul[@id='tmLanguages_menu']/li/a"));
List<Locale> locales = new ArrayList<>(elements.size());
for (WebElement element : elements) {
String href = element.getAttribute("href");
Matcher matcher = Pattern.compile(".*\\?.*language=([^=&]*)").matcher(href);
if (matcher.matches()) {
String locale = matcher.group(1);
locales.add(LocaleUtils.toLocale(locale));
}
}
return locales;
}
/**
* @since 9.0RC1
*/
public ViewPage clickLocale(Locale locale)
{
// Open drawer
toggleDrawer();
// Open Languages
WebElement languagesElement = getDriver().findElementWithoutWaiting(By.xpath("//a[@id='tmLanguages']"));
languagesElement.click();
// Wait for the languages submenu to be open
getDriver().waitUntilCondition(webDriver ->
getDriver().findElementWithoutWaiting(By.id("tmLanguages_menu")).getAttribute("class")
.contains("collapse in")
);
// Click passed locale
WebElement localeElement = getDriver().findElementWithoutWaiting(
By.xpath("//ul[@id='tmLanguages_menu']/li/a[contains(@href,'language=" + locale + "')]"));
localeElement.click();
return new ViewPage();
}
/**
* @since 4.5M1
*/
public void logout()
{
toggleDrawer();
getDriver().findElement(By.id("tmLogout")).click();
// Update the CSRF token because the context user has changed (it's guest user now). Otherwise, APIs like
// TestUtils#createUser*(), which expect the currently cached token to be valid, will fail because they would be
// using the token of the previously logged in user.
getUtil().recacheSecretToken();
}
/**
* @since 4.5M1
*/
public RegistrationPage register()
{
toggleDrawer();
this.registerLink.click();
return new RegistrationPage();
}
/**
* @since 4.5M1
*/
public String getDocumentTitle()
{
return this.documentTitle.getText();
}
/**
* @since 4.5M1
*/
public void watchSpace()
{
toggleNotificationsMenu();
this.watchSpaceLink.click();
toggleNotificationsMenu();
}
/**
* @since 6.0M1
*/
public void watchWiki()
{
toggleNotificationsMenu();
this.watchWikiLink.click();
toggleNotificationsMenu();
}
/**
* Waits for the javascript libraries and their plugins that need to load before the UI's elements can be used
* safely.
* <p>
* Subclassed should override this method and add additional checks needed by their logic.
*
* @since 6.2
*/
public void waitUntilPageJSIsLoaded()
{
// Prototype
getDriver().waitUntilJavascriptCondition("return window.Prototype != null && window.Prototype.Version != null");
// JQuery and dependencies
// JQuery dropdown plugin needed for the edit button's dropdown menu.
getDriver().waitUntilJavascriptCondition("return window.jQuery != null && window.jQuery().dropdown != null");
}
/**
* Opens the viewer that lists the children of the current page.
*
* @return the viewer that lists the child pages
* @since 7.3RC1
*/
public ChildrenViewer viewChildren()
{
toggleActionMenu();
this.childrenLink.click();
return new ChildrenViewer();
}
/**
* Says if the notifications menu is present (it is displayed only if it has some content).
*
* @return either or not the notifications menu is present
* @since 7.4M1
*/
public boolean hasNotificationsMenu()
{
return getDriver().hasElementWithoutWaiting(By.id("tmNotifications"));
}
/**
* Open/Close the notifications menu.
*
* @since 7.4M1
*/
public void toggleNotificationsMenu()
{
boolean hasMenu = isNotificationsMenuOpen();
this.notificationsMenu.click();
if (hasMenu) {
getDriver().waitUntilElementDisappears(this.notificationsMenu, By.className("dropdown-menu"));
} else {
getDriver().waitUntilElementIsVisible(this.notificationsMenu, By.className("dropdown-menu"));
}
}
/**
* @return true if the notifications menu is open
* @since 7.4M1
*/
public boolean isNotificationsMenuOpen()
{
return this.notificationsMenu.findElement(By.className("dropdown-menu")).isDisplayed();
}
/**
* @return the text of uncaught errors
* @since 8.0M1
*/
public String getErrorContent()
{
return getDriver()
.findElementWithoutWaiting(By.xpath("//div[@id = 'mainContentArea']/pre[contains(@class, 'xwikierror')]"))
.getText();
}
/**
* @param panelTitle the panel displayed title
* @return true if the panel is visible in the left panels or false otherwise
* @since 10.6RC1
*/
public boolean hasLeftPanel(String panelTitle)
{
return getDriver().hasElementWithoutWaiting(By.xpath(
"//div[@id = 'leftPanels']/div/h1[@class = 'xwikipaneltitle' and text() = '" + panelTitle +"']"));
}
}
|
package de.wolfi.minopoly.commands;
import java.util.ArrayList;
import org.bukkit.Bukkit;
import org.bukkit.Material;
import org.bukkit.Sound;
import org.bukkit.event.EventHandler;
import org.bukkit.event.block.Action;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.scheduler.BukkitTask;
import de.wolfi.minopoly.Main;
import de.wolfi.minopoly.components.Minopoly;
import de.wolfi.minopoly.components.Player;
import de.wolfi.minopoly.events.DiceEvent;
import de.wolfi.minopoly.utils.Messages;
import de.wolfi.utils.ItemBuilder;
import de.wolfi.utils.TitlesAPI;
import io.netty.util.internal.ThreadLocalRandom;
public class DiceCommand extends CommandInterface {
public DiceCommand(Main plugin) {
super(plugin, 0, true);
}
private static final Main MAIN = Main.getMain();
private static final ArrayList<DiceRunnable> scheds = new ArrayList<>();
private static class DiceRunnable implements Runnable {
private BukkitTask task;
private Player player;
private short selected_slot = 0;
private short first = 0;
private DiceRunnable() {
}
@Override
public void run() {
player.getHook().playSound(player.getHook().getLocation(), Sound.CLICK, 1f, 1f);
short dur = (short) (ThreadLocalRandom.current().nextInt(6));
this.selected_slot = dur;
TitlesAPI.sendFullTitle(this.player.getHook(), 0, 10, 0, "" + String.valueOf(11 % dur) + "Wrfel:",
"" + String.valueOf(dur % 10) + (dur + 1));
this.player.getHook().getInventory().setItem(this.player.getHook().getInventory().getHeldItemSlot(),
new ItemBuilder(Material.INK_SACK).setName("6Zahl: a" + (dur + 1)).setMeta((short) dur).build());
try {
Thread.sleep(120);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/*@Override
public void run() {
player.getHook().getInventory().setHeldItemSlot(++selected_slot+DiceCommand.SLOT_OFFSET);
if(selected_slot >= 6) selected_slot = 0;
}*/
public void remove() {
task.cancel();
DiceCommand.scheds.remove(this);
}
public int getFirst() {
return first;
}
public short getValue() {
return selected_slot;
}
}
@EventHandler
public void onInteract(PlayerInteractEvent e) {
DiceRunnable dice = this.getSched(e.getPlayer());
if (e.getAction() != Action.PHYSICAL & dice != null) {
if (dice.getFirst() == 0) {
dice.first = dice.getValue();
} else {
dice.remove();
int first = dice.getFirst();
int second = dice.getValue();
DiceEvent event = new DiceEvent(dice.player, first, second);
Bukkit.getPluginManager().callEvent(event);
Messages.PLAYER_ROLLED_THE_DICE.broadcast(dice.player.getName(), String.valueOf(event.getOne()),
String.valueOf(event.getTwo()));
}
}
}
private DiceRunnable getSched(org.bukkit.entity.Player player) {
DiceRunnable dicing = null;
for (DiceRunnable r : DiceCommand.scheds)
if (r.player.getHook().getUniqueId().equals(player.getUniqueId()))
dicing = r;
return dicing;
}
@Override
protected void executeCommand(Minopoly board, Player player, String[] args) {
DiceRunnable dice = new DiceRunnable();
dice.player = player;
dice.task = Bukkit.getScheduler().runTaskTimer(DiceCommand.MAIN, dice, 3, 1);
scheds.add(dice);
}
}
|
package org.xwiki.rendering.internal.parser.reference;
import java.util.Arrays;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Singleton;
import org.apache.commons.lang3.StringUtils;
import org.xwiki.component.annotation.Component;
import org.xwiki.component.manager.ComponentLookupException;
import org.xwiki.rendering.listener.reference.DocumentResourceReference;
import org.xwiki.rendering.listener.reference.InterWikiResourceReference;
import org.xwiki.rendering.listener.reference.ResourceReference;
import org.xwiki.rendering.listener.reference.ResourceType;
import org.xwiki.rendering.parser.ResourceReferenceParser;
import org.xwiki.rendering.parser.ResourceReferenceTypeParser;
/**
* Each syntax should have its own resource reference parser. However while we wait for syntax specific parser to be
* implemented this generic parser should provide a good approximation.
*
* @version $Id$
* @since 2.5RC1
*/
@Component
@Named("default/link")
@Singleton
public class GenericLinkReferenceParser extends AbstractResourceReferenceParser
{
/**
* Interwiki separator.
*/
public static final String SEPARATOR_INTERWIKI = "@";
/**
* Query String separator.
*/
public static final String SEPARATOR_QUERYSTRING = "?";
/**
* Anchor separator.
*/
public static final String SEPARATOR_ANCHOR = "
/**
* Escape character to allow "#", "@" and "?" characters in a reference's name.
*/
public static final char ESCAPE_CHAR = '\\';
/**
* Escapes to remove from the document reference part when parsing the raw reference (i.e. excluding query string,
* anchor and interwiki parts). Note that we don't remove the escaped escape char since this is how an escape char
* is represented in an Entity Reference.
*/
private static final String[] ESCAPES_REFERENCE = new String[] { ESCAPE_CHAR + SEPARATOR_QUERYSTRING,
ESCAPE_CHAR + SEPARATOR_INTERWIKI, ESCAPE_CHAR + SEPARATOR_ANCHOR };
/**
* Escapes to remove from the query string, anchor and interwiki parts when parsing the raw reference.
*/
private static final String[] ESCAPES_EXTRA = new String[] { ESCAPE_CHAR + SEPARATOR_QUERYSTRING,
ESCAPE_CHAR + SEPARATOR_INTERWIKI, ESCAPE_CHAR + SEPARATOR_ANCHOR, String.valueOf(ESCAPE_CHAR) + ESCAPE_CHAR };
/**
* Escapes to remove the interwiki content.
*/
private static final String[] ESCAPE_INTERWIKI = new String[] { String.valueOf(ESCAPE_CHAR) + ESCAPE_CHAR, String.valueOf(ESCAPE_CHAR) };
/**
* Replacement chars for the escapes to be removed from the reference part.
*/
private static final String[] ESCAPE_REPLACEMENTS_REFERENCE = new String[] { SEPARATOR_QUERYSTRING,
SEPARATOR_INTERWIKI, SEPARATOR_ANCHOR };
/**
* Replacement chars for the escapes to be removed from the query string, anchor and interwiki parts.
*/
private static final String[] ESCAPE_REPLACEMENTS_EXTRA = new String[] { SEPARATOR_QUERYSTRING,
SEPARATOR_INTERWIKI, SEPARATOR_ANCHOR, String.valueOf(ESCAPE_CHAR) };
/**
* Replacements chars for the escapes to be removed from the interwiki content.
*/
private static final String[] ESCAPE_REPLACEMENTS_INTERWIKI = new String[] { String.valueOf(ESCAPE_CHAR), "" };
/**
* The list of recognized URL prefixes.
*/
private static final List<String> URI_PREFIXES = Arrays.asList("mailto");
/**
* Parser to parse link references pointing to URLs.
*/
@Inject
@Named("url")
private ResourceReferenceTypeParser urlResourceReferenceTypeParser;
@Inject
@Named("link/untyped")
private ResourceReferenceParser untypedLinkReferenceParser;
/**
* @return the list of URI prefixes the link parser recognizes
*/
protected List<String> getAllowedURIPrefixes()
{
return URI_PREFIXES;
}
@Override
public ResourceReference parse(String rawReference)
{
// Step 1: Check if it's a known URI by looking for one of the known URI schemes. If not, check if it's a URL.
ResourceReference resourceReference = parseURILinks(rawReference);
if (resourceReference != null) {
return resourceReference;
}
// Step 2: Look for an InterWiki link
StringBuffer content = new StringBuffer(rawReference);
resourceReference = parseInterWikiLinks(content);
if (resourceReference != null) {
return resourceReference;
}
// Step 3: If we're in non wiki mode, we consider the reference to be a URL.
if (!isInWikiMode()) {
resourceReference = new ResourceReference(rawReference, ResourceType.URL);
resourceReference.setTyped(false);
return resourceReference;
}
// Step 4: Consider that we have a reference to a document
return parseDocumentLink(content);
}
/**
* Construct a Document Link reference out of the passed content.
*
* @param content the string containing the Document link reference
* @return the parsed Link Object corresponding to the Document link reference
*/
private ResourceReference parseDocumentLink(StringBuffer content)
{
// Extract any query string.
String queryString = null;
String text = parseElementAfterString(content, SEPARATOR_QUERYSTRING);
if (text != null) {
queryString = removeEscapesFromExtraParts(text);
}
// Extract any anchor.
String anchor = null;
text = parseElementAfterString(content, SEPARATOR_ANCHOR);
if (text != null) {
anchor = removeEscapesFromExtraParts(text);
}
// Make sure to unescape the remaining reference string.
String unescapedReferenceString = removeEscapesFromReferencePart(content.toString());
// Parse the string as an untyped link reference.
ResourceReference reference = untypedLinkReferenceParser.parse(unescapedReferenceString);
reference.setTyped(false);
// Set any previously extracted parameters.
if (StringUtils.isNotBlank(queryString)) {
reference.setParameter(DocumentResourceReference.QUERY_STRING, queryString);
}
if (StringUtils.isNotBlank(anchor)) {
reference.setParameter(DocumentResourceReference.ANCHOR, anchor);
}
return reference;
}
/**
* Check if the passed link references is an URI link reference.
*
* @param rawLink the original reference to parse
* @return the parsed Link object or null if the passed reference is not an URI link reference or if no URI type
* parser was found for the passed URI scheme
*/
private ResourceReference parseURILinks(String rawLink)
{
ResourceReference result = null;
int uriSchemeDelimiterPos = rawLink.indexOf(':');
if (uriSchemeDelimiterPos > -1) {
String scheme = rawLink.substring(0, uriSchemeDelimiterPos);
String reference = rawLink.substring(uriSchemeDelimiterPos + 1);
if (getAllowedURIPrefixes().contains(scheme)) {
try {
ResourceReferenceTypeParser parser =
this.componentManagerProvider.get().getInstance(ResourceReferenceTypeParser.class, scheme);
ResourceReference resourceReference = parser.parse(reference);
if (resourceReference != null) {
result = resourceReference;
}
} catch (ComponentLookupException e) {
// Failed to lookup component, this shouldn't happen but ignore it.
}
} else {
// Check if it's a URL
ResourceReference resourceReference = this.urlResourceReferenceTypeParser.parse(rawLink);
if (resourceReference != null) {
resourceReference.setTyped(false);
result = resourceReference;
}
}
}
return result;
}
/**
* Check if the passed link references is an interwiki link reference.
*
* @param content the original content to parse
* @return the parsed Link object or null if the passed reference is not an interwiki link reference
*/
private ResourceReference parseInterWikiLinks(StringBuffer content)
{
ResourceReference result = null;
String interWikiAlias = parseElementAfterString(content, SEPARATOR_INTERWIKI);
if (interWikiAlias != null) {
InterWikiResourceReference link = new InterWikiResourceReference(removeEscapes(content.toString()));
link.setInterWikiAlias(removeEscapes(interWikiAlias));
result = link;
}
return result;
}
protected String parseElementAfterString(StringBuffer content, String separator)
{
String element = null;
// Find the first non escaped separator (starting from the end of the content buffer).
int index = content.lastIndexOf(separator);
while (index != -1) {
// Check if the element is found and it's not escaped.
if (!shouldEscape(content, index)) {
element = content.substring(index + separator.length()).trim();
content.delete(index, content.length());
break;
}
if (index > 0) {
index = content.lastIndexOf(separator, index - 1);
} else {
break;
}
}
return element;
}
/**
* Count the number of escape chars before a given character and if that number is odd then that character should be
* escaped.
*
* @param content the content in which to check for escapes
* @param charPosition the position of the char for which to decide if it should be escaped or not
* @return true if the character should be escaped
*/
private boolean shouldEscape(StringBuffer content, int charPosition)
{
int counter = 0;
int pos = charPosition - 1;
while (pos > -1 && content.charAt(pos) == ESCAPE_CHAR) {
counter++;
pos
}
return counter % 2 != 0;
}
/**
* @param text the reference from which to remove unneeded escapes
* @return the cleaned text
*/
private String removeEscapesFromReferencePart(String text)
{
return StringUtils.replaceEach(text, ESCAPES_REFERENCE, ESCAPE_REPLACEMENTS_REFERENCE);
}
/**
* @param text the reference from which to remove unneeded escapes
* @return the cleaned text
*/
private String removeEscapesFromExtraParts(String text)
{
return StringUtils.replaceEach(text, ESCAPES_EXTRA, ESCAPE_REPLACEMENTS_EXTRA);
}
/**
* @param text the reference from which to remove unneeded escapes
* @return the cleaned text
*/
private String removeEscapes(String text)
{
return StringUtils.replaceEach(text, ESCAPE_INTERWIKI, ESCAPE_REPLACEMENTS_INTERWIKI);
}
}
|
package me.nallar.patched;
import me.nallar.tickthreading.patcher.Declare;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.network.packet.Packet51MapChunk;
import net.minecraft.server.management.PlayerInstance;
import net.minecraft.server.management.PlayerManager;
import net.minecraft.world.ChunkCoordIntPair;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.event.world.ChunkWatchEvent;
public abstract class PatchPlayerInstance extends PlayerInstance {
private static byte[] unloadSequence;
public PatchPlayerInstance(PlayerManager par1PlayerManager, int par2, int par3) {
super(par1PlayerManager, par2, par3);
}
@Override
@Declare
public ChunkCoordIntPair getLocation() {
return chunkLocation;
}
public static void staticConstruct() {
unloadSequence = new byte[]{0x78, (byte) 0x9C, 0x63, 0x64, 0x1C, (byte) 0xD9, 0x00, 0x00, (byte) 0x81, (byte) 0x80, 0x01, 0x01};
}
@Override
public void sendThisChunkToPlayer(EntityPlayerMP par1EntityPlayerMP) {
if (this.playersInChunk.remove(par1EntityPlayerMP)) {
Packet51MapChunk packet51MapChunk = new Packet51MapChunk();
packet51MapChunk.includeInitialize = true;
packet51MapChunk.xCh = chunkLocation.chunkXPos;
packet51MapChunk.zCh = chunkLocation.chunkZPos;
packet51MapChunk.yChMax = 0;
packet51MapChunk.yChMin = 0;
packet51MapChunk.setData(unloadSequence);
par1EntityPlayerMP.playerNetServerHandler.sendPacketToPlayer(packet51MapChunk);
par1EntityPlayerMP.loadedChunks.remove(this.chunkLocation);
MinecraftForge.EVENT_BUS.post(new ChunkWatchEvent.UnWatch(chunkLocation, par1EntityPlayerMP));
if (this.playersInChunk.isEmpty()) {
long var2 = (long) this.chunkLocation.chunkXPos + 2147483647L | (long) this.chunkLocation.chunkZPos + 2147483647L << 32;
this.myManager.getChunkWatchers().remove(var2);
if (numberOfTilesToUpdate > 0) {
this.myManager.playerUpdateLock.lock();
try {
this.myManager.getChunkWatcherWithPlayers().remove(this);
} finally {
this.myManager.playerUpdateLock.unlock();
}
}
this.myManager.getWorldServer().theChunkProviderServer.unloadChunksIfNotNearSpawn(this.chunkLocation.chunkXPos, this.chunkLocation.chunkZPos);
}
}
}
}
|
package com.czequered.promocodes.controller;
import com.czequered.promocodes.model.Game;
import com.czequered.promocodes.service.ClockService;
import com.czequered.promocodes.service.GameService;
import com.czequered.promocodes.service.TokenService;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.web.context.WebApplicationContext;
import java.io.IOException;
import java.time.Clock;
import java.util.Collections;
import static com.czequered.promocodes.config.Constants.TOKEN_HEADER;
import static org.assertj.core.api.Java6Assertions.assertThat;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import static org.springframework.test.web.servlet.setup.MockMvcBuilders.webAppContextSetup;
/**
* @author Martin Varga
*/
@ActiveProfiles("resttest")
@RunWith(SpringJUnit4ClassRunner.class)
@SpringBootTest
public class GameControllerTest {
@Autowired
private WebApplicationContext webApplicationContext;
@Autowired
private GameService gameService;
@Autowired
private TokenService tokenService;
@Autowired
private ClockService clockService;
@Value("${jepice.jwt.expiry}")
private long expiry;
private MockMvc mockMvc;
private Clock clock;
@Before
public void before() {
mockMvc = webAppContextSetup(webApplicationContext).build();
clock = mock(Clock.class);
System.out.println("clockService.getClass() = " + clockService.getClass());
when(clockService.getClock()).thenReturn(clock);
}
@Test
public void listTest() throws Exception {
Game game = new Game("krtek", "auticko");
when(gameService.getGames(eq("krtek"))).thenReturn(Collections.singletonList(game));
String token = tokenService.generateToken("krtek");
MvcResult result = mockMvc.perform(get("/api/v1/games/list").header(TOKEN_HEADER, token))
.andExpect(status().isOk())
.andReturn();
Game[] games = extractGames(result);
assertThat(games).containsExactly(game);
}
@Test
public void listInvalidTokenTest() throws Exception {
Game game = new Game("krtek", "auticko");
when(clock.millis()).thenReturn(10000000L);
when(gameService.getGames(eq("krtek"))).thenReturn(Collections.singletonList(game));
String token = tokenService.generateToken("krtek");
when(clock.millis()).thenReturn(10000000L + expiry + 1);
mockMvc.perform(get("/api/v1/games/list").header(TOKEN_HEADER, token))
.andExpect(status().isForbidden())
.andReturn();
}
@Test
public void getGameNotFoundTest() throws Exception {
when(gameService.getGame(eq("krtek"), eq("auticko"))).thenReturn(null);
String token = tokenService.generateToken("krtek");
mockMvc.perform(get("/api/v1/games/auticko").header(TOKEN_HEADER, token))
.andExpect(status().isNotFound())
.andReturn();
}
@Test
public void getGameInvalidTokenTest() throws Exception {
Game game = new Game("krtek", "auticko");
when(clock.millis()).thenReturn(10000000L);
when(gameService.getGame(eq("krtek"), eq("auticko"))).thenReturn(game);
String token = tokenService.generateToken("krtek");
when(clock.millis()).thenReturn(10000000L + expiry + 1);
mockMvc.perform(get("/api/v1/games/auticko").header(TOKEN_HEADER, token))
.andExpect(status().isForbidden())
.andReturn();
}
@Test
public void getGameTest() throws Exception {
Game game = new Game("krtek", "auticko");
game.setDetails("Ahoj");
when(gameService.getGame(eq("krtek"), eq("auticko"))).thenReturn(game);
String token = tokenService.generateToken("krtek");
mockMvc.perform(get("/api/v1/games/auticko").header(TOKEN_HEADER, token))
.andExpect(status().isOk())
.andExpect(jsonPath("$.details").value("Ahoj"))
.andReturn();
}
private Game[] extractGames(MvcResult result) throws IOException {
String contentAsString = result.getResponse().getContentAsString();
ObjectMapper mapper = new ObjectMapper();
JsonNode root = mapper.readTree(contentAsString);
return mapper.treeToValue(root, Game[].class);
}
}
|
package hudson.remoting;
import hudson.remoting.Channel.Mode;
import org.apache.commons.io.output.NullOutputStream;
import org.junit.Test;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StreamCorruptedException;
import java.io.StringWriter;
import static org.junit.Assert.*;
/**
* @author Kohsuke Kawaguchi
*/
public class DiagnosedStreamCorruptionExceptionTest {
byte[] payload = {
0,0,0,0, /* binary stream preamble*/
(byte)0xAC, (byte)0xED, 0x00, 0x05, /* object input stream header */
1, 2, 3, 4, 5 /* bogus data */
};
@Test
public void exercise() throws Exception {
ClassicCommandTransport ct = (ClassicCommandTransport)
new ChannelBuilder("dummy",null)
.withMode(Mode.BINARY)
.withBaseLoader(getClass().getClassLoader())
.negotiate(new ByteArrayInputStream(payload), new NullOutputStream());
verify(ct);
}
private void verify(ClassicCommandTransport ct) throws IOException, ClassNotFoundException {
try {
ct.read();
fail();
} catch (DiagnosedStreamCorruptionException e) {
StringWriter s = new StringWriter();
PrintWriter w = new PrintWriter(s);
e.printStackTrace(w);
w.close();
String msg = s.toString();
assertTrue(msg,msg.contains("Read ahead: 0x02 0x03 0x04 0x05"));
assertTrue(msg,msg.contains("invalid type code: 01"));
assertSame(StreamCorruptedException.class, e.getCause().getClass());
}
}
/**
* This tests the behaviour of the diagnosis blocking on a non-completed stream, as the writer end is kept open.
*/
@Test(timeout=3000)
public void blockingStreamShouldNotPreventDiagnosis() throws Exception {
try (FastPipedInputStream in = new FastPipedInputStream();
FastPipedOutputStream out = new FastPipedOutputStream(in)) {
out.write(payload);
ClassicCommandTransport ct = (ClassicCommandTransport)
new ChannelBuilder("dummy",null)
.withMode(Mode.BINARY)
.withBaseLoader(getClass().getClassLoader())
.negotiate(in, new NullOutputStream());
verify(ct);
}
}
}
|
package org.csstudio.diag.pvmanager.probe;
import static org.csstudio.utility.pvmanager.ui.SWTUtil.onSWTThread;
import static org.epics.pvmanager.ExpressionLanguage.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.csstudio.csdata.ProcessVariable;
import org.csstudio.ui.util.helpers.ComboHistoryHelper;
import org.csstudio.ui.util.widgets.MeterWidget;
import org.eclipse.jface.viewers.ComboViewer;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.DisposeEvent;
import org.eclipse.swt.events.DisposeListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.IMemento;
import org.eclipse.ui.IViewSite;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.part.ViewPart;
import org.epics.pvmanager.PV;
import org.epics.pvmanager.PVManager;
import org.epics.pvmanager.PVValueChangeListener;
import org.epics.pvmanager.PVWriter;
import org.epics.pvmanager.data.Alarm;
import org.epics.pvmanager.data.AlarmSeverity;
import org.epics.pvmanager.data.Display;
import org.epics.pvmanager.data.Enum;
import org.epics.pvmanager.data.SimpleValueFormat;
import org.epics.pvmanager.data.Time;
import org.epics.pvmanager.data.Util;
import org.epics.pvmanager.data.ValueFormat;
import org.epics.pvmanager.util.TimeStampFormat;
/**
* Probe view.
*/
public class PVManagerProbe extends ViewPart {
public PVManagerProbe() {
}
private static final Logger log = Logger.getLogger(PVManagerProbe.class.getName());
/**
* The ID of the view as specified by the extension.
*/
public static final String SINGLE_VIEW_ID = "org.csstudio.diag.pvmanager.probe.SingleView"; //$NON-NLS-1$
public static final String MULTIPLE_VIEW_ID = "org.csstudio.diag.pvmanager.probe.MultipleView"; //$NON-NLS-1$
private static int instance = 0;
// GUI
private Label alarmLabel;
private Label valueLabel;
private Label timestampLabel;
private Label statusLabel;
private Label newValueLabel;
private Label pvNameLabel;
private Label timestampField;
private Label alarmField;
private Label valueField;
private Label statusField;
private ComboViewer pvNameField;
private ComboHistoryHelper pvNameHelper;
private MeterWidget meter;
private Composite topBox;
private Composite bottomBox;
private Button showMeterButton;
private Button saveToIocButton;
private Button infoButton;
private GridData gd_valueField;
private GridData gd_timestampField;
private GridData gd_statusField;
private GridLayout gl_topBox;
private FormData fd_topBox;
private FormData fd_bottomBox;
/** Currently displayed pv */
private ProcessVariable PVName;
/** Currently connected pv */
private PV<?> pv;
/** Current pv write */
private PVWriter<Object> pvWriter;
/** Formatting used for the value text field */
private ValueFormat valueFormat = new SimpleValueFormat(3);
/** Formatting used for the time text field */
private TimeStampFormat timeFormat = new TimeStampFormat("yyyy/MM/dd HH:mm:ss.N Z"); //$NON-NLS-1$
// No writing to ioc option.
// private ICommandListener saveToIocCmdListener;
private Text newValueField;
private static final String SECURITY_ID = "operating"; //$NON-NLS-1$
/** Memento used to preserve the PV name. */
private IMemento memento = null;
/** Memento tag */
private static final String PV_LIST_TAG = "pv_list"; //$NON-NLS-1$
/** Memento tag */
private static final String PV_TAG = "PVName"; //$NON-NLS-1$
/** Memento tag */
private static final String METER_TAG = "meter"; //$NON-NLS-1$
/**
* Id of the save value command.
*/
private static final String SAVE_VALUE_COMMAND_ID = "org.csstudio.platform.ui.commands.saveValue"; //$NON-NLS-1$
@Override
public void init(final IViewSite site, final IMemento memento)
throws PartInitException {
super.init(site, memento);
// Save the memento
this.memento = memento;
}
@Override
public void saveState(final IMemento memento) {
super.saveState(memento);
// Save the currently selected variable
if (PVName != null) {
memento.putString(PV_TAG, PVName.getName());
}
}
public void createPartControl(Composite parent) {
// Create the view
final boolean canExecute = true;
// final boolean canExecute = SecurityFacade.getInstance().canExecute(SECURITY_ID, true);
final FormLayout layout = new FormLayout();
parent.setLayout(layout);
// 3 Boxes, connected via form layout: Top, meter, bottom
// | Meter |
// [x] Adjust
// Status: ...
// Inside top & bottom, it's a grid layout
topBox = new Composite(parent, 0);
GridLayout gl_bottomBox;
gl_topBox = new GridLayout();
gl_topBox.numColumns = 3;
topBox.setLayout(gl_topBox);
Label label;
pvNameLabel = new Label(topBox, SWT.READ_ONLY);
pvNameLabel.setText(Messages.Probe_pvNameLabelText);
pvNameField = new ComboViewer(topBox, SWT.SINGLE | SWT.BORDER);
pvNameField.getCombo().setToolTipText(Messages.Probe_pvNameFieldToolTipText);
GridData gd = new GridData();
gd.grabExcessHorizontalSpace = true;
gd.horizontalAlignment = SWT.FILL;
pvNameField.getCombo().setLayoutData(gd);
infoButton = new Button(topBox, SWT.PUSH);
infoButton.setText(Messages.Probe_infoTitle);
infoButton.setToolTipText(Messages.Probe_infoButtonToolTipText);
// New Box with only the meter
meter = new MeterWidget(parent, 0);
meter.setEnabled(false);
// Button Box
bottomBox = new Composite(parent, 0);
gl_bottomBox = new GridLayout();
gl_bottomBox.numColumns = 3;
bottomBox.setLayout(gl_bottomBox);
valueLabel = new Label(bottomBox, 0);
valueLabel.setText(Messages.Probe_valueLabelText);
valueField = new Label(bottomBox, SWT.BORDER);
gd_valueField = new GridData();
gd_valueField.grabExcessHorizontalSpace = true;
gd_valueField.horizontalAlignment = SWT.FILL;
valueField.setLayoutData(gd_valueField);
showMeterButton = new Button(bottomBox, SWT.CHECK);
showMeterButton.setText(Messages.Probe_showMeterButtonText);
showMeterButton.setToolTipText(Messages.Probe_showMeterButtonToolTipText);
showMeterButton.setSelection(true);
// New Row
timestampLabel = new Label(bottomBox, 0);
timestampLabel.setText(Messages.Probe_timestampLabelText);
timestampField = new Label(bottomBox, SWT.BORDER);
gd_timestampField = new GridData();
gd_timestampField.grabExcessHorizontalSpace = true;
gd_timestampField.horizontalAlignment = SWT.FILL;
timestampField.setLayoutData(gd_timestampField);
saveToIocButton = new Button(bottomBox, SWT.PUSH);
saveToIocButton.setText(Messages.Probe_saveToIocButtonText);
saveToIocButton.setToolTipText(Messages.Probe_saveToIocButtonToolTipText);
gd = new GridData();
gd.horizontalAlignment = SWT.FILL;
saveToIocButton.setLayoutData(gd);
saveToIocButton.setEnabled(canExecute);
alarmLabel = new Label(bottomBox, SWT.NONE);
alarmLabel.setText(Messages.Probe_alarmLabelText);
alarmField = new Label(bottomBox, SWT.BORDER);
alarmField.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, false,
false, 1, 1));
alarmField.setText(""); //$NON-NLS-1$
new Label(bottomBox, SWT.NONE);
// New Row
newValueLabel = new Label(bottomBox, 0);
newValueLabel.setText(Messages.Probe_newValueLabelText);
newValueLabel.setVisible(false);
newValueField = new Text(bottomBox, SWT.BORDER);
newValueField.setToolTipText(Messages.Probe_newValueFieldToolTipText);
newValueField.setLayoutData(new GridData(SWT.FILL, 0, true, false));
newValueField.setVisible(false);
newValueField.setText(""); //$NON-NLS-1$
final Button btn_adjust = new Button(bottomBox, SWT.CHECK);
btn_adjust.setText(Messages.S_Adjust);
btn_adjust.setToolTipText(Messages.S_ModValue);
btn_adjust.setEnabled(canExecute);
// Status bar
label = new Label(bottomBox, SWT.SEPARATOR | SWT.HORIZONTAL);
gd = new GridData();
gd.grabExcessHorizontalSpace = true;
gd.horizontalAlignment = SWT.FILL;
gd.horizontalSpan = gl_bottomBox.numColumns;
label.setLayoutData(gd);
statusLabel = new Label(bottomBox, 0);
statusLabel.setText(Messages.Probe_statusLabelText);
statusField = new Label(bottomBox, SWT.BORDER);
statusField.setText(Messages.Probe_statusWaitingForPV);
gd_statusField = new GridData();
gd_statusField.grabExcessHorizontalSpace = true;
gd_statusField.horizontalAlignment = SWT.FILL;
gd_statusField.horizontalSpan = gl_bottomBox.numColumns - 1;
statusField.setLayoutData(gd_statusField);
// Connect the 3 boxes in form layout
FormData fd;
fd_topBox = new FormData();
fd_topBox.left = new FormAttachment(0, 0);
fd_topBox.top = new FormAttachment(0, 0);
fd_topBox.right = new FormAttachment(100, 0);
topBox.setLayoutData(fd_topBox);
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(topBox);
fd.right = new FormAttachment(100, 0);
fd.bottom = new FormAttachment(bottomBox);
meter.setLayoutData(fd);
fd_bottomBox = new FormData();
fd_bottomBox.left = new FormAttachment(0, 0);
fd_bottomBox.right = new FormAttachment(100, 0);
fd_bottomBox.bottom = new FormAttachment(100, 0);
bottomBox.setLayoutData(fd_bottomBox);
// Connect actions
pvNameHelper = new ComboHistoryHelper(Activator.getDefault()
.getDialogSettings(), PV_LIST_TAG, pvNameField.getCombo()) {
@Override
public void newSelection(final String pvName) {
setPVName(new ProcessVariable(pvName));
}
};
pvNameField.getCombo().addDisposeListener(new DisposeListener() {
public void widgetDisposed(final DisposeEvent e) {
if (pv != null)
pv.close();
pvNameHelper.saveSettings();
}
});
infoButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(final SelectionEvent ev) {
showInfo();
}
});
btn_adjust.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(final SelectionEvent ev) {
final boolean enable = btn_adjust.getSelection();
newValueLabel.setVisible(enable);
newValueField.setVisible(enable);
newValueField.setText(valueField.getText());
}
});
newValueField.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetDefaultSelected(final SelectionEvent e) {
pvWriter.write(newValueField.getText());
}
});
saveToIocButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(final SelectionEvent e) {
// saveToIoc();
}
});
// // Create a listener to enable/disable the Save to IOC button based
// // the availability of a command handler.
// saveToIocCmdListener = new ICommandListener() {
// public void commandChanged(final CommandEvent commandEvent) {
// if (commandEvent.isEnabledChanged()) {
// btn_save_to_ioc.setVisible(commandEvent.getCommand()
// .isEnabled());
// // Set the initial vilibility of the button
// updateSaveToIocButtonVisibility();
showMeterButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(final SelectionEvent ev) {
showMeter(showMeterButton.getSelection());
}
});
pvNameHelper.loadSettings();
if (memento != null && memento.getString(PV_TAG) != null) {
setPVName(new ProcessVariable(memento.getString(PV_TAG)));
// Per default, the meter is shown.
// Hide according to memento.
final String show = memento.getString(METER_TAG);
if ((show != null) && show.equals("false")) //$NON-NLS-1$
{
showMeterButton.setSelection(false);
showMeter(false);
}
}
}
protected void showMeter(final boolean show) {
if (show) { // Meter about to become visible
// Attach bottom box to bottom of screen,
// and meter stretches between top and bottom box.
final FormData fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.right = new FormAttachment(100, 0);
fd.bottom = new FormAttachment(100, 0);
bottomBox.setLayoutData(fd);
} else { // Meter about to be hidden.
// Attach bottom box to top box.
final FormData fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(topBox);
fd.right = new FormAttachment(100, 0);
bottomBox.setLayoutData(fd);
}
meter.setVisible(show);
meter.getShell().layout(true, true);
}
protected void showInfo() {
final String nl = "\n"; //$NON-NLS-1$
final String space = " "; //$NON-NLS-1$
final String indent = " "; //$NON-NLS-1$
final StringBuilder info = new StringBuilder();
if (pv == null) {
info.append(Messages.Probe_infoStateNotConnected).append(nl);
} else {
Object value = pv.getValue();
Alarm alarm = Util.alarmOf(value);
Display display = Util.displayOf(value);
Class<?> type = Util.typeOf(value);
//info.append(Messages.S_ChannelInfo).append(" ").append(pv.getName()).append(nl); //$NON-NLS-1$
if (pv.getValue() == null) {
info.append(Messages.Probe_infoStateDisconnected).append(nl);
} else {
if (alarm != null
&& AlarmSeverity.UNDEFINED.equals(alarm
.getAlarmSeverity())) {
info.append(Messages.Probe_infoStateDisconnected).append(nl);
} else {
info.append(Messages.Probe_infoStateConnected).append(nl);
}
}
if (type != null) {
info.append(Messages.Probe_infoDataType).append(space).append(type.getSimpleName())
.append(nl);
}
if (display != null) {
info.append(Messages.Probe_infoNumericDisplay).append(nl)
.append(indent).append(Messages.Probe_infoLowDisplayLimit).append(space)
.append(display.getLowerDisplayLimit()).append(nl)
.append(indent).append(Messages.Probe_infoLowAlarmLimit).append(space)
.append(display.getLowerAlarmLimit()).append(nl)
.append(indent).append(Messages.Probe_infoLowWarnLimit).append(space)
.append(display.getLowerWarningLimit()).append(nl)
.append(indent).append(Messages.Probe_infoHighWarnLimit).append(space)
.append(display.getUpperWarningLimit()).append(nl)
.append(indent).append(Messages.Probe_infoHighAlarmLimit).append(space)
.append(display.getUpperAlarmLimit()).append(nl)
.append(indent).append(Messages.Probe_infoHighDisplayLimit).append(space)
.append(display.getUpperDisplayLimit()).append(nl);
}
if (value instanceof org.epics.pvmanager.data.Enum) {
Enum enumValue = (Enum) value;
info.append(Messages.Probe_infoEnumMetadata).append(space)
.append(enumValue.getLabels().size()).append(space).append(Messages.Probe_infoLabels)
.append(nl);
for (String label : enumValue.getLabels()) {
info.append(indent).append(label).append(nl);
}
}
}
if (info.length() == 0) {
info.append(Messages.Probe_infoNoInfoAvailable);
}
final MessageBox box = new MessageBox(valueField.getShell(),
SWT.ICON_INFORMATION);
if (pv == null) {
box.setText(Messages.Probe_infoTitle);
} else {
box.setText(Messages.Probe_infoChannelInformationFor + pv.getName());
}
box.setMessage(info.toString());
box.open();
}
/**
* Changes the PV currently displayed by probe.
*
* @param pvName
* the new pv name or null
*/
public void setPVName(ProcessVariable pvName) {
log.log(Level.FINE, "setPVName ({0})", pvName); //$NON-NLS-1$
// If we are already scanning that pv, do nothing
if (this.PVName != null && this.PVName.equals(pvName)) {
// XXX Seems like something is clearing the combo-box,
// reset to the actual pv...
pvNameField.getCombo().setText(pvName.getName());
}
// The PV is different, so disconnect and reset the visuals
if (pv != null) {
pv.close();
pv = null;
}
if (pvWriter != null) {
pvWriter.close();
pvWriter = null;
}
setValue(null);
setAlarm(null);
setTime(null);
setMeter(null, null);
// If name is blank, update status to waiting and qui
if ((pvName == null) || pvName.equals("")) { //$NON-NLS-1$
pvNameField.getCombo().setText(""); //$NON-NLS-1$
setStatus(Messages.Probe_statusWaitingForPV);
}
// If new name, add to history and connect
pvNameHelper.addEntry(pvName.getName());
// Update displayed name, unless it's already current
if (!(pvNameField.getCombo().getText().equals(pvName
.getName()))) {
pvNameField.getCombo().setText(pvName.getName());
}
setStatus(Messages.Probe_statusSearching);
pv = PVManager.read(channel(pvName.getName()))
.andNotify(onSWTThread()).atHz(25);
pv.addPVValueChangeListener(new PVValueChangeListener() {
@Override
public void pvValueChanged() {
Object obj = pv.getValue();
setLastError(pv.lastException());
setValue(valueFormat.format(obj));
setAlarm(Util.alarmOf(obj));
setTime(Util.timeOf(obj));
setMeter(Util.numericValueOf(obj), Util.displayOf(obj));
}
});
try {
pvWriter = PVManager.write(toChannel(pvName.getName())).async();
newValueField.setEditable(true);
} catch (Exception e) {
newValueField.setEditable(false);
}
this.PVName = pvName;
// If this is an instance of the multiple view, show the PV name
// as the title
if (MULTIPLE_VIEW_ID.equals(getSite().getId())) {
setPartName(pvName.getName());
}
}
/**
* Returns the currently displayed PV.
*
* @return pv name or null
*/
public ProcessVariable getPVName() {
return this.PVName;
}
/**
* Passing the focus request to the viewer's control.
*/
public void setFocus() {
}
public static String createNewInstance() {
++instance;
return Integer.toString(instance);
}
/**
* Modifies the prove status.
*
* @param status new status to be displayed
*/
private void setStatus(String status) {
if (status == null) {
statusField.setText(Messages.Probe_statusWaitingForPV);
} else {
statusField.setText(status);
}
}
/**
* Displays the last error in the status.
*
* @param ex an exception
*/
private void setLastError(Exception ex) {
if (ex == null) {
statusField.setText(Messages.Probe_statusConnected);
} else {
statusField.setText(ex.getMessage());
}
}
/**
* Displays the new value.
*
* @param value a new value
*/
private void setValue(String value) {
if (value == null) {
valueField.setText(""); //$NON-NLS-1$
if (newValueField.isVisible() && !newValueField.isFocusControl()) {
newValueField.setText("");
}
} else {
valueField.setText(value);
if (newValueField.isVisible() && !newValueField.isFocusControl()) {
newValueField.setText(value);
}
}
}
/**
* Displays the new alarm.
*
* @param alarm a new alarm
*/
private void setAlarm(Alarm alarm) {
if (alarm == null) {
alarmField.setText(""); //$NON-NLS-1$
} else {
alarmField.setText(alarm.getAlarmSeverity() + " - " //$NON-NLS-1$
+ alarm.getAlarmStatus());
}
}
/**
* Displays the new time.
*
* @param time a new time
*/
private void setTime(Time time) {
if (time == null) {
timestampField.setText(""); //$NON-NLS-1$
} else {
timestampField.setText(timeFormat.format(time.getTimeStamp()));
}
}
/**
* Displays a new value in the meter.
*
* @param value the new value
* @param display the display information
*/
private void setMeter(Double value, Display display) {
if (value == null || display == null) {
meter.setEnabled(false);
// meter.setValue(0.0);
} else if (display.getUpperDisplayLimit() <= display
.getLowerDisplayLimit()) {
meter.setEnabled(false);
// meter.setValue(0.0);
} else {
meter.setEnabled(true);
meter.setLimits(display.getLowerDisplayLimit(),
display.getLowerAlarmLimit(),
display.getLowerWarningLimit(),
display.getUpperWarningLimit(),
display.getUpperAlarmLimit(),
display.getUpperDisplayLimit(), 1);
meter.setValue(value);
}
}
/**
* {@inheritDoc}
*/
@Override
public void dispose() {
if (pv != null)
pv.close();
super.dispose();
}
/**
* Open PVManagerProbe initialized to the given PV
*
* @param pvName the pv
* @return true if successful
*/
public static boolean activateWithPV(ProcessVariable pvName) {
try {
final IWorkbench workbench = PlatformUI.getWorkbench();
final IWorkbenchWindow window = workbench
.getActiveWorkbenchWindow();
final IWorkbenchPage page = window.getActivePage();
final PVManagerProbe probe = (PVManagerProbe) page.showView(
SINGLE_VIEW_ID, createNewInstance(),
IWorkbenchPage.VIEW_ACTIVATE);
probe.setPVName(pvName);
return true;
} catch (final Exception e) {
log.log(Level.WARNING, "Failed while opening probe", e);
}
return false;
}
}
|
package org.ovirt.engine.core.bll.storage.disk;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.ovirt.engine.core.bll.AbstractUserQueryTest;
import org.ovirt.engine.core.common.businessentities.VmDevice;
import org.ovirt.engine.core.common.businessentities.VmDeviceGeneralType;
import org.ovirt.engine.core.common.businessentities.VmDeviceId;
import org.ovirt.engine.core.common.businessentities.storage.Disk;
import org.ovirt.engine.core.common.businessentities.storage.DiskImage;
import org.ovirt.engine.core.common.businessentities.storage.ImageStatus;
import org.ovirt.engine.core.common.queries.IdQueryParameters;
import org.ovirt.engine.core.common.utils.VmDeviceType;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.dal.dbbroker.DbFacade;
import org.ovirt.engine.core.dao.DiskDao;
import org.ovirt.engine.core.dao.VmDeviceDao;
/**
* A test case for {@link GetAllDisksByVmIdQuery}.
* This test mocks away all the Daos, and just tests the flow of the query itself.
*/
public class GetAllDisksByVmIdQueryTest extends AbstractUserQueryTest<IdQueryParameters, GetAllDisksByVmIdQuery<IdQueryParameters>> {
private static final int NUM_DISKS_OF_EACH_KIND = 3;
/** The ID of the VM the disks belong to */
private Guid vmID;
/** A plugged disk for the test */
private DiskImage pluggedDisk;
/** An unplugged disk for the test */
private DiskImage unpluggedDisk;
/** A plugged disk snapshot for the test */
private DiskImage pluggedDiskSnapshot;
/** An unplugged disk snapshot for the test */
private DiskImage unpluggedDiskSnapshot;
@Before
@Override
public void setUp() throws Exception {
super.setUp();
vmID = Guid.newGuid();
Guid snapshotId = Guid.newGuid();
pluggedDisk = createDiskImage(true);
unpluggedDisk = createDiskImage(true);
pluggedDiskSnapshot = createDiskImage(false);
pluggedDiskSnapshot.setVmSnapshotId(snapshotId);
unpluggedDiskSnapshot = createDiskImage(false);
unpluggedDiskSnapshot.setVmSnapshotId(snapshotId);
setUpDaoMocks();
}
private void setUpDaoMocks() {
// Mock some devices
VmDevice pluggedDevice = createVMDevice(vmID, pluggedDisk);
VmDevice unpluggedDevice = createVMDevice(vmID, unpluggedDisk);
VmDevice pluggedSnapshotDevice = createVMDevice(vmID, pluggedDiskSnapshot);
VmDevice unpluggedSnapshotDevice = createVMDevice(vmID, unpluggedDiskSnapshot);
// Mock the Daos
DbFacade dbFacadeMock = getDbFacadeMockInstance();
// Disk Image Dao
List<Disk> returnArray = new ArrayList<>();
returnArray.add(pluggedDisk);
returnArray.add(unpluggedDisk);
returnArray.add(pluggedDiskSnapshot);
returnArray.add(unpluggedDiskSnapshot);
DiskDao diskDaoMock = mock(DiskDao.class);
when(dbFacadeMock.getDiskDao()).thenReturn(diskDaoMock);
when(diskDaoMock.getAllForVm(vmID, getUser().getId(), getQueryParameters().isFiltered())).thenReturn(returnArray);
// VM Device Dao
VmDeviceDao vmDeviceDaoMock = mock(VmDeviceDao.class);
when(dbFacadeMock.getVmDeviceDao()).thenReturn(vmDeviceDaoMock);
when(vmDeviceDaoMock.getVmDeviceByVmIdTypeAndDevice(vmID,
VmDeviceGeneralType.DISK,
VmDeviceType.DISK.getName(),
getUser().getId(),
getQueryParameters().isFiltered())).
thenReturn(Arrays.asList(pluggedDevice, unpluggedDevice, pluggedSnapshotDevice, unpluggedSnapshotDevice));
// Snapshots
doReturn(new ArrayList<>(Collections.nCopies(NUM_DISKS_OF_EACH_KIND,
createDiskSnapshot(pluggedDisk.getId())))).when(getQuery()).getAllImageSnapshots(pluggedDisk);
doReturn(Collections.nCopies(NUM_DISKS_OF_EACH_KIND, createDiskSnapshot(unpluggedDisk.getId()))).when(getQuery())
.getAllImageSnapshots(unpluggedDisk);
doReturn(new ArrayList<>(Collections.nCopies(NUM_DISKS_OF_EACH_KIND,
createDiskSnapshot(pluggedDiskSnapshot.getId())))).when(getQuery()).getAllImageSnapshots(pluggedDiskSnapshot);
doReturn(Collections.nCopies(NUM_DISKS_OF_EACH_KIND, createDiskSnapshot(unpluggedDiskSnapshot.getId()))).when(getQuery())
.getAllImageSnapshots(unpluggedDiskSnapshot);
}
private VmDevice createVMDevice(Guid vmID, DiskImage disk) {
return new VmDevice(new VmDeviceId(disk.getId(), vmID),
VmDeviceGeneralType.DISK,
VmDeviceType.DISK.getName(),
"",
1,
null,
true,
true,
true,
"",
null,
disk.getVmSnapshotId(),
null);
}
private DiskImage createDiskImage(boolean active) {
DiskImage di = new DiskImage();
di.setId(Guid.newGuid());
di.setImageId(Guid.newGuid());
di.setParentId(Guid.newGuid());
di.setImageStatus(ImageStatus.OK);
di.setActive(active);
return di;
}
private DiskImage createDiskSnapshot(Guid diskId) {
DiskImage di = new DiskImage();
di.setActive(false);
di.setId(diskId);
di.setImageId(Guid.newGuid());
di.setParentId(Guid.newGuid());
di.setImageStatus(ImageStatus.OK);
return di;
}
@Test
public void testExecuteQueryCommand() {
params = getQueryParameters();
when(params.getId()).thenReturn(vmID);
getQuery().executeQueryCommand();
@SuppressWarnings("unchecked")
List<DiskImage> disks = (List<DiskImage>) getQuery().getQueryReturnValue().getReturnValue();
// Assert the correct disks are returned
assertTrue("plugged disk should be in the return value", disks.contains(pluggedDisk));
assertTrue("unplugged disk should be in the return value", disks.contains(unpluggedDisk));
assertTrue("plugged disk snapshots should be in the return value", disks.contains(pluggedDiskSnapshot));
assertTrue("unplugged disk snapshots should be in the return value", disks.contains(unpluggedDiskSnapshot));
// Assert the disks have the correct snapshots
assertCorrectSnapshots(pluggedDisk);
assertCorrectSnapshots(unpluggedDisk);
}
/**
* Assert the given disk contains {@link #NUM_DISKS_OF_EACH_KIND} copies of itself as snapshot (as should have been returned by the Dao)
* @param disk The disk to check
*/
private static void assertCorrectSnapshots(DiskImage disk) {
for (int i = 0; i < NUM_DISKS_OF_EACH_KIND; ++i) {
assertEquals("Wrong snapshot " + i + " for disk ", disk.getId(), disk.getSnapshots().get(i).getId());
}
}
}
|
package com.mesosphere.dcos.cassandra.scheduler.offer;
import com.mesosphere.dcos.cassandra.scheduler.config.CuratorFrameworkConfig;
import com.mesosphere.dcos.cassandra.scheduler.config.IdentityManager;
import com.mesosphere.dcos.cassandra.scheduler.config.MesosConfig;
import com.mesosphere.dcos.cassandra.scheduler.tasks.CassandraTasks;
import org.apache.curator.RetryPolicy;
import org.apache.curator.retry.BoundedExponentialBackoffRetry;
import org.apache.curator.retry.ExponentialBackoffRetry;
import org.apache.curator.retry.RetryForever;
import org.apache.curator.retry.RetryUntilElapsed;
import org.apache.mesos.Protos;
import org.apache.mesos.Protos.TaskInfo;
import org.apache.mesos.offer.OperationRecorder;
import org.apache.mesos.state.CuratorStateStore;
import org.apache.mesos.state.StateStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
public class PersistentOperationRecorder implements OperationRecorder {
private final static Logger LOGGER = LoggerFactory.getLogger(
PersistentOperationRecorder.class);
private IdentityManager identityManager;
private CassandraTasks cassandraTasks;
private StateStore stateStore;
public PersistentOperationRecorder(
IdentityManager identityManager,
CuratorFrameworkConfig curatorConfig,
CassandraTasks cassandraTasks) {
RetryPolicy retryPolicy =
(curatorConfig.getOperationTimeout().isPresent()) ?
new RetryUntilElapsed(
curatorConfig.getOperationTimeoutMs()
.get()
.intValue()
, (int) curatorConfig.getBackoffMs()) :
new RetryForever((int) curatorConfig.getBackoffMs());
this.identityManager = identityManager;
this.cassandraTasks = cassandraTasks;
this.stateStore = new CuratorStateStore(
"/cassandra/" + identityManager.get().getName(),
curatorConfig.getServers(),
retryPolicy);
}
public void record(
Protos.Offer.Operation operation,
Protos.Offer offer) throws Exception {
if (operation.getType() == Protos.Offer.Operation.Type.LAUNCH) {
LOGGER.info("Persisting Launch Operation: " + operation);
for (TaskInfo taskInfo : operation.getLaunch().getTaskInfosList()) {
LOGGER.debug("Recording operation: {} for task: {}", operation, taskInfo);
try {
stateStore.storeTasks(Arrays.asList(taskInfo), taskInfo.getExecutor().getExecutorId());
cassandraTasks.update(taskInfo, offer);
} catch (Exception e) {
LOGGER.error("Error updating task in recorder with exception: ", e);
throw e;
}
}
}
}
}
|
package org.phenotips.data.internal.controller;
import org.phenotips.data.DictionaryPatientData;
import org.phenotips.data.Patient;
import org.phenotips.data.PatientData;
import org.phenotips.data.PatientDataController;
import org.phenotips.data.VocabularyProperty;
import org.phenotips.data.internal.AbstractPhenoTipsVocabularyProperty;
import org.xwiki.bridge.DocumentAccessBridge;
import org.xwiki.model.reference.EntityReference;
import org.xwiki.model.reference.ObjectPropertyReference;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import com.xpn.xwiki.doc.XWikiDocument;
import com.xpn.xwiki.objects.BaseObject;
import com.xpn.xwiki.objects.BaseProperty;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
/**
* Base class for handling data in different types of objects (String, List, etc) and preserving the object type. Has
* custom functions for dealing with conversion to booleans, and vocabulary codes to human readable labels.
*
* @param <T> the type of data being managed by this component, usually {@code String}, but other types are possible,
* even more complex types
* @version $Id$
* @since 1.0RC1
*/
public abstract class AbstractComplexController<T> implements PatientDataController<T>
{
/** Provides access to the underlying data storage. */
@Inject
protected DocumentAccessBridge documentAccessBridge;
/** Logging helper object. */
@Inject
private Logger logger;
@Override
@SuppressWarnings("unchecked")
public PatientData<T> load(Patient patient)
{
try {
XWikiDocument doc = (XWikiDocument) this.documentAccessBridge.getDocument(patient.getDocument());
BaseObject data = doc.getXObject(getXClassReference());
if (data == null) {
throw new NullPointerException(ERROR_MESSAGE_NO_PATIENT_CLASS);
}
Map<String, T> result = new LinkedHashMap<String, T>();
for (String propertyName : getProperties()) {
BaseProperty<ObjectPropertyReference> field =
(BaseProperty<ObjectPropertyReference>) data.getField(propertyName);
if (field != null) {
Object propertyValue = field.getValue();
/* If the controller only works with codes, store the Vocabulary Instances rather than Strings */
if (getCodeFields().contains(propertyName) && isCodeFieldsOnly()) {
List<VocabularyProperty> propertyValuesList = new LinkedList<>();
List<String> terms = (List<String>) propertyValue;
for (String termId : terms) {
propertyValuesList.add(new QuickVocabularyProperty(termId));
}
propertyValue = propertyValuesList;
}
result.put(propertyName, (T) propertyValue);
}
}
return new DictionaryPatientData<>(getName(), result);
} catch (Exception e) {
this.logger.error("Could not find requested document or some unforeseen"
+ " error has occurred during controller loading ", e.getMessage());
}
return null;
}
@Override
public void writeJSON(Patient patient, JSONObject json, Collection<String> selectedFieldNames)
{
PatientData<T> data = patient.getData(getName());
if (data == null) {
return;
}
Iterator<Map.Entry<String, T>> iterator = data.dictionaryIterator();
if (iterator == null || !iterator.hasNext()) {
return;
}
JSONObject container = json.getJSONObject(getJsonPropertyName());
while (iterator.hasNext()) {
Map.Entry<String, T> item = iterator.next();
if (selectedFieldNames == null || selectedFieldNames.contains(item.getKey())) {
if (container == null || container.isNullObject()) {
// put() is placed here because we want to create the property iff at least one field is set/enabled
json.put(getJsonPropertyName(), new JSONObject());
container = json.getJSONObject(getJsonPropertyName());
}
String itemKey = item.getKey();
container.put(itemKey, format(itemKey, item.getValue()));
}
}
}
/** @return list of fields which should be resolved to booleans */
protected abstract List<String> getBooleanFields();
/**
* @return list of fields which contain HPO codes, and therefore additional data can be obtained, such as human
* readable name
*/
protected abstract List<String> getCodeFields();
/**
* In case all fields are code fields, then the controller can store data in memory as vocabulary objects rather
* than strings.
*
* @return true if all fields contain HPO codes
*/
protected boolean isCodeFieldsOnly()
{
return false;
}
/**
* Checks if a the value needs to be formatted and then calls the appropriate function.
*
* @param key the key under which the value will be stored in JSON
* @param value the value which possibly needs to be formatted
* @return the formatted object or the original value
*/
@SuppressWarnings("unchecked")
private Object format(String key, Object value)
{
if (value == null) {
return null;
}
if (getBooleanFields().contains(key)) {
return booleanConvert(value.toString());
} else if (getCodeFields().contains(key)) {
return codeToHumanReadable((List<T>) value);
} else {
return value;
}
}
private Boolean booleanConvert(String integerValue)
{
if (StringUtils.equals("0", integerValue)) {
return false;
} else if (StringUtils.equals("1", integerValue)) {
return true;
} else {
return null;
}
}
private JSONArray codeToHumanReadable(List<T> codes)
{
JSONArray labeledList = new JSONArray();
for (T code : codes) {
QuickVocabularyProperty term;
if (code instanceof QuickVocabularyProperty) {
term = (QuickVocabularyProperty) code;
} else {
term = new QuickVocabularyProperty(code.toString());
}
labeledList.add(term.toJSON());
}
return labeledList;
}
@Override
public void writeJSON(Patient patient, JSONObject json)
{
writeJSON(patient, json, null);
}
@Override
public void save(Patient patient)
{
throw new UnsupportedOperationException();
}
@Override
public PatientData<T> readJSON(JSONObject json)
{
throw new UnsupportedOperationException();
}
protected abstract List<String> getProperties();
protected abstract String getJsonPropertyName();
/**
* The XClass used for storing data managed by this controller. By default, data is stored in the main
* {@code PhenoTips.PatientClass} object that defines the patient record. Override this method if a different type
* of XObject is used.
*
* @return a local reference (without the wiki reference) pointing to the XDocument containing the target XClass
* @since 1.2RC1
*/
protected EntityReference getXClassReference()
{
return Patient.CLASS_REFERENCE;
}
/**
* There exists no class currently that would be able to covert a vocabulary code into a human readable format given
* only a code string. Considering that there is a need for such functionality, there are 3 options: copy the code
* that performs the function needed into the controller, create a class extending
* {@link org.phenotips.data.internal.AbstractPhenoTipsVocabularyProperty} in a separate file, or create such class
* here. Given the fact the the {@link org.phenotips.data.internal.AbstractPhenoTipsVocabularyProperty} is abstract
* only by having a protected constructor, which fully satisfies the needed functionality, it makes the most sense
* to put {@link QuickVocabularyProperty} here.
*/
protected static final class QuickVocabularyProperty extends AbstractPhenoTipsVocabularyProperty
{
public QuickVocabularyProperty(String id)
{
super(id);
}
}
}
|
package org.scijava.ui.swing.console;
import java.util.concurrent.Future;
import org.scijava.Context;
import org.scijava.thread.ThreadService;
import org.scijava.ui.UIService;
/**
* A behavioral test and benchmark of {@link SwingConsolePane}.
*
* @author Curtis Rueden
*/
public class SwingConsolePaneBenchmark {
// -- Main method --
/** A manual test drive of the Swing UI's console pane. */
public static void main(final String[] args) throws Exception {
final Context context = new Context();
context.service(UIService.class).showUI();
System.out.print("Hello ");
System.err.println("world!");
final int numThreads = 50;
final int numOperations = 20;
final String[] streamLabels =
{ ": {ERR} iteration #", ": {OUT} iteration #" };
final String outLabel = streamLabels[1];
final String errLabel = streamLabels[0];
final int initialDelay = 500;
Thread.sleep(initialDelay);
final long start = System.currentTimeMillis();
// emit a bunch of output on multiple threads concurrently
final ThreadService threadService = context.service(ThreadService.class);
final Future<?>[] f = new Future<?>[numThreads];
for (int t = 0; t < numThreads; t++) {
final int tNo = t;
f[t] = threadService.run(new Runnable() {
@Override
public void run() {
for (int i = 0; i < numOperations; i++) {
System.out.print(str(tNo, outLabel, i) + "\n");
System.err.print(str(tNo, errLabel, i) + "\n");
}
}
});
}
// wait for all output threads to finish
for (int t = 0; t < numThreads; t++) {
f[t].get();
}
System.err.print("Goodbye ");
System.out.println("cruel world!");
final long end = System.currentTimeMillis();
System.out.println();
System.out.println("Benchmark took " + (end - start) + " ms");
}
// - Helper methods --
private static String str(final int t, final String separator, final int i) {
return pad(t) + separator + pad(i);
}
private static String pad(final int n) {
return n < 10 ? "0" + n : "" + n;
}
}
|
package org.phenotips.data.rest.internal;
import static org.mockito.Mockito.*;
import com.xpn.xwiki.XWikiContext;
import net.sf.json.JSONObject;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.phenotips.data.PatientRepository;
import org.phenotips.data.rest.DomainObjectFactory;
import org.phenotips.data.rest.PatientsResource;
import org.phenotips.data.Patient;
import org.phenotips.data.rest.model.PatientSummary;
import org.phenotips.data.rest.model.Patients;
import org.xwiki.component.manager.ComponentLookupException;
import org.xwiki.component.manager.ComponentManager;
import org.xwiki.component.util.ReflectionUtils;
import org.xwiki.context.Execution;
import org.xwiki.context.ExecutionContext;
import org.xwiki.model.reference.DocumentReference;
import org.xwiki.model.reference.EntityReference;
import org.xwiki.model.reference.EntityReferenceResolver;
import org.xwiki.query.Query;
import org.xwiki.query.QueryException;
import org.xwiki.query.QueryManager;
import org.xwiki.query.internal.DefaultQuery;
import org.xwiki.security.authorization.AuthorizationManager;
import org.xwiki.security.authorization.Right;
import org.xwiki.test.mockito.MockitoComponentMockingRule;
import org.xwiki.users.User;
import org.xwiki.users.UserManager;
import javax.inject.Provider;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
public class DefaultPatientsResourceImplTest {
@Rule
public MockitoComponentMockingRule<PatientsResource> mocker =
new MockitoComponentMockingRule<PatientsResource>(DefaultPatientsResourceImpl.class);
@Mock
private User currentUser;
@Mock
private Patient patient;
@Mock
private Logger logger;
@Mock
private UriInfo uriInfo;
private DomainObjectFactory factory;
private PatientRepository repository;
private QueryManager queries;
private AuthorizationManager access;
private UserManager users;
private DocumentReference userProfileDocument;
private URI uri;
private DefaultPatientsResourceImpl patientsResource;
private XWikiContext context;
@Before
public void setUp() throws ComponentLookupException, URISyntaxException {
MockitoAnnotations.initMocks(this);
Execution execution = mock(Execution.class);
ExecutionContext executionContext = mock(ExecutionContext.class);
ComponentManager compManager = this.mocker.getInstance(ComponentManager.class, "context");
Provider<XWikiContext> provider = this.mocker.getInstance(XWikiContext.TYPE_PROVIDER);
this.context = provider.get();
when(compManager.getInstance(Execution.class)).thenReturn(execution);
doReturn(executionContext).when(execution).getContext();
doReturn(this.context).when(executionContext).getProperty("xwikicontext");
this.repository = this.mocker.getInstance(PatientRepository.class);
this.users = this.mocker.getInstance(UserManager.class);
this.access = this.mocker.getInstance(AuthorizationManager.class);
this.patientsResource = (DefaultPatientsResourceImpl)this.mocker.getComponentUnderTest();
this.logger = this.mocker.getMockedLogger();
this.queries = this.mocker.getInstance(QueryManager.class);
this.uri = new URI("http://uri");
this.userProfileDocument = new DocumentReference("wiki", "user", "00000001");
this.factory = this.mocker.getInstance(DomainObjectFactory.class);
doReturn(this.uri).when(this.uriInfo).getBaseUri();
doReturn(this.uri).when(this.uriInfo).getRequestUri();
ReflectionUtils.setFieldValue(this.patientsResource, "uriInfo", this.uriInfo);
doReturn("P00000001").when(this.patient).getId();
doReturn(this.currentUser).when(this.users).getCurrentUser();
doReturn(this.userProfileDocument).when(this.currentUser).getProfileDocument();
}
@Test
public void addPatientUserDoesNotHaveAccess() {
WebApplicationException exception = null;
doReturn(false).when(this.access).hasAccess(eq(Right.EDIT), any(DocumentReference.class), any(EntityReference.class));
try {
Response response = this.patientsResource.addPatient("");
}
catch (WebApplicationException ex){
exception = ex;
}
Assert.assertNotNull(exception);
Assert.assertEquals(Response.Status.UNAUTHORIZED.getStatusCode(), exception.getResponse().getStatus());
}
@Test
public void addEmptyPatient() {
doReturn(true).when(this.access).hasAccess(eq(Right.EDIT), any(DocumentReference.class), any(EntityReference.class));
doReturn(this.patient).when(this.repository).createNewPatient();
Response response = this.patientsResource.addPatient(null);
Assert.assertEquals(Response.Status.CREATED.getStatusCode(), response.getStatus());
verify(this.logger).debug("Importing new patient from JSON via REST: {}", (String) null);
}
@Test
public void creatingPatientFails() {
JSONObject json = new JSONObject();
Exception exception = new NullPointerException();
doReturn(true).when(this.access).hasAccess(eq(Right.EDIT), any(DocumentReference.class), any(EntityReference.class));
doThrow(exception).when(this.repository).createNewPatient();
Response response = this.patientsResource.addPatient(json.toString());
Assert.assertEquals(Response.Status.BAD_REQUEST.getStatusCode(), response.getStatus());
verify(this.logger).error("Could not process remote matching request: {}", exception.getMessage(), exception);
}
@Test
public void addPatientAsJSON() {
doReturn(true).when(this.access).hasAccess(eq(Right.EDIT), any(DocumentReference.class), any(EntityReference.class));
doReturn(this.patient).when(this.repository).createNewPatient();
JSONObject jsonPatient = new JSONObject();
Response response = this.patientsResource.addPatient(jsonPatient.toString());
Assert.assertEquals(Response.Status.CREATED.getStatusCode(), response.getStatus());
verify(this.logger).debug("Importing new patient from JSON via REST: {}", jsonPatient.toString());
}
@Test
public void listPatientsNullOrderField() throws QueryException {
Query query = mock(DefaultQuery.class);
doReturn(query).when(this.queries).createQuery(anyString(), anyString());
doReturn(query).when(query).bindValue(anyString(), anyString());
doReturn(new ArrayList<Object[]>()).when(query).execute();
Patients result = this.patientsResource.listPatients(0, 30, null, "asc");
verify(this.queries).createQuery("select doc.fullName, p.external_id, doc.creator, doc.creationDate, doc.version, doc.author, doc.date"
+ " from Document doc, doc.object(PhenoTips.PatientClass) p where doc.name <> :t order by "
+ "doc.name" + " asc", "xwql");
}
@Test
public void listPatientsNullOrder() throws QueryException {
Query query = mock(DefaultQuery.class);
doReturn(query).when(this.queries).createQuery(anyString(), anyString());
doReturn(query).when(query).bindValue(anyString(), anyString());
doReturn(new ArrayList<Object[]>()).when(query).execute();
Patients result = this.patientsResource.listPatients(0, 30, "id", null);
verify(this.queries).createQuery("select doc.fullName, p.external_id, doc.creator, doc.creationDate, doc.version, doc.author, doc.date"
+ " from Document doc, doc.object(PhenoTips.PatientClass) p where doc.name <> :t order by "
+ "doc.name" + " asc", "xwql");
}
@Test
public void listPatientsNonDefaultBehaviour() throws QueryException {
Query query = mock(DefaultQuery.class);
doReturn(query).when(this.queries).createQuery(anyString(), anyString());
doReturn(query).when(query).bindValue(anyString(), anyString());
doReturn(new ArrayList<Object[]>()).when(query).execute();
Patients result = this.patientsResource.listPatients(0, 30, "eid", "desc");
verify(this.queries).createQuery("select doc.fullName, p.external_id, doc.creator, doc.creationDate, doc.version, doc.author, doc.date"
+ " from Document doc, doc.object(PhenoTips.PatientClass) p where doc.name <> :t order by "
+ "p.external_id" + " desc", "xwql");
}
@Test
public void listPatientsNoUserAccess() throws QueryException {
Object[] patientSummaryData = new Object[0];
List<Object[]> patientList = new ArrayList<Object[]>();
patientList.add(patientSummaryData);
Query query = mock(DefaultQuery.class);
doReturn(query).when(this.queries).createQuery(anyString(), anyString());
doReturn(query).when(query).bindValue(anyString(), anyString());
doReturn(patientList).when(query).execute();
doReturn(false).when(this.access).hasAccess(eq(Right.VIEW), any(DocumentReference.class), any(EntityReference.class));
Patients result = this.patientsResource.listPatients(0, 30, "id", "asc");
verify(this.queries).createQuery("select doc.fullName, p.external_id, doc.creator, doc.creationDate, doc.version, doc.author, doc.date"
+ " from Document doc, doc.object(PhenoTips.PatientClass) p where doc.name <> :t order by "
+ "doc.name" + " asc", "xwql");
Assert.assertTrue(result.getPatientSummaries().isEmpty());
}
@Test
public void listPatientsUserHasAccess() throws QueryException {
Object[] patientSummaryData = new Object[0];
List<Object[]> patientList = new ArrayList<Object[]>();
patientList.add(patientSummaryData);
Query query = mock(DefaultQuery.class);
doReturn(query).when(this.queries).createQuery(anyString(), anyString());
doReturn(query).when(query).bindValue(anyString(), anyString());
doReturn(patientList).when(query).execute();
doReturn(true).when(this.access).hasAccess(eq(Right.VIEW), any(DocumentReference.class), any(EntityReference.class));
doReturn(new PatientSummary()).when(this.factory).createPatientSummary(any(Object[].class), eq(this.uriInfo));
Patients result = this.patientsResource.listPatients(0, 30, "id", "asc");
verify(this.queries).createQuery("select doc.fullName, p.external_id, doc.creator, doc.creationDate, doc.version, doc.author, doc.date"
+ " from Document doc, doc.object(PhenoTips.PatientClass) p where doc.name <> :t order by "
+ "doc.name" + " asc", "xwql");
Assert.assertFalse(result.getPatientSummaries().isEmpty());
}
@Test
public void listPatientFailureHandling() throws QueryException {
Query query = mock(DefaultQuery.class);
WebApplicationException exception = null;
QueryException queryException= new QueryException("query.execute() failed", query, new Exception());
doReturn(query).when(this.queries).createQuery(anyString(), anyString());
doReturn(query).when(query).bindValue(anyString(), anyString());
doThrow(queryException).when(query).execute();
try {
Patients result = this.patientsResource.listPatients(0, 30, "id", "asc");
}
catch (WebApplicationException ex){
exception = ex;
}
Assert.assertNotNull(exception);
Assert.assertEquals(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), exception.getResponse().getStatus());
verify(this.logger).error("Failed to list patients: {}", queryException.getMessage(), queryException);
}
}
|
package uk.ac.kent.dover.fastGraph.graphSimilarity;
import java.util.*;
import edu.isi.karma.modeling.research.graphmatching.algorithms.VJAccess;
import uk.ac.kent.dover.fastGraph.*;
/**
* A similarity method for graphs, using random trails. To achieve symmetry use the same object
* in both g1 g2 and g2 g1 directions, as the same random seed will be applied.
*
* @author Peter Rodgers
*
*/
public class RandomTrailSimilarity extends GraphSimilarity {
private boolean nodeLabels;
private boolean directed;
private long randomSeed;
private int trailLength = 5; // maximum length for each trail
private int trailsPerNode = 10; // number of random trails found for each node
private double[][] costMatrix;
private int[] mapping;
public static void main(String [] args) {
Debugger.enabled = false;
// randomIncreasing();
randomIsomorphismComparison();
try {
int i = 3855;
double similarity1,similarity2;
LinkedList<NodeStructure> addNodes;
LinkedList<EdgeStructure> addEdges;
FastGraph g1,g2;
RandomTrailSimilarity rts;
addNodes = new LinkedList<NodeStructure>();
addNodes.add(new NodeStructure(0,"A", 1, (byte)1, (byte)0));
addNodes.add(new NodeStructure(1,"B", 1, (byte)1, (byte)0));
addEdges = new LinkedList<EdgeStructure>();
addEdges.add(new EdgeStructure(0,"es0", 1, (byte)0, (byte)0, 1, 1));
addEdges.add(new EdgeStructure(1,"es1", 1, (byte)0, (byte)0, 0, 0));
addEdges.add(new EdgeStructure(2,"es2", 1, (byte)0, (byte)0, 0, 1));
g1 = FastGraph.structureFactory("g2",(byte)0,addNodes,addEdges,false);
addNodes = new LinkedList<NodeStructure>();
addNodes.add(new NodeStructure(0,"A", 1, (byte)1, (byte)0));
addNodes.add(new NodeStructure(1,"B", 1, (byte)1, (byte)0));
addEdges = new LinkedList<EdgeStructure>();
addEdges.add(new EdgeStructure(0,"es0", 1, (byte)0, (byte)0, 1, 0));
addEdges.add(new EdgeStructure(1,"es1", 1, (byte)0, (byte)0, 0, 1));
addEdges.add(new EdgeStructure(2,"es2", 1, (byte)0, (byte)0, 0, 1));
g2 = FastGraph.structureFactory("g2",(byte)0,addNodes,addEdges,false);
rts = new RandomTrailSimilarity(false, false, 999);
rts.setTrailLength(4);
rts.setTrailsPerNode(10);
similarity1 = rts.similarity(g1, g2);
similarity2 = rts.similarity(g2, g1);
System.out.println(g1);
System.out.println(g2);
System.out.println("similarity1 "+similarity1+" similarity2 "+similarity2);
} catch (Exception e) {
e.printStackTrace();
}
}
private static void randomIsomorphismComparison() {
try {
RandomTrailSimilarity rts;
double similarity1,similarity2;
boolean isomorphic,directed,labels;
FastGraph g1,g2;
long g1Seed,g2Seed,rtsSeed;
int trailLength = 5;
int trailsPerNode = 10;
int nodes = 10;
int edges = 20;
int i = 0;
while(true) {
boolean fail = false;
i++;
if(i%1000 == 0) {
System.out.println("iteration: "+i);
}
g1Seed = i*111;
g2Seed = i*333;
rtsSeed = i*555;
directed = true;
labels = true;
g1 = FastGraph.randomGraphFactory(nodes, edges, g1Seed, false);
g2 = FastGraph.randomGraphFactory(nodes, edges, g2Seed, false);
// g2 = ExactIsomorphism.generateRandomIsomorphicGraph(g1, g2Seed, false);
isomorphic = ExactIsomorphism.isomorphic(g1, g2, directed,labels);
rts = new RandomTrailSimilarity(directed,labels,rtsSeed);
rts.setTrailLength(trailLength);
rts.setTrailsPerNode(trailsPerNode);
similarity1 = rts.similarity(g1, g2);
similarity2 = rts.similarity(g2, g1);
if(Math.abs(similarity1-similarity2) > 0.001) {
System.out.println("similarity1 "+similarity1+" similarity2 "+similarity2+" "+ directed+" "+labels);
fail = true;
}
if(similarity1 < 0.001 && !isomorphic) {
System.out.println("similarity1 "+similarity1+" isomorphic "+isomorphic+" "+ directed+" "+labels);
fail = true;
}
if(similarity1 > 0.001 && isomorphic) {
System.out.println("similarity1 "+similarity1+" isomorphic "+isomorphic+" "+ directed+" "+labels);
fail = true;
}
directed = false;
labels = true;
g1 = FastGraph.randomGraphFactory(nodes, edges, g1Seed, false);
g2 = FastGraph.randomGraphFactory(nodes, edges, g2Seed, false);
isomorphic = ExactIsomorphism.isomorphic(g1, g2, directed,labels);
rts = new RandomTrailSimilarity(directed,labels,rtsSeed);
rts.setTrailLength(trailLength);
rts.setTrailsPerNode(trailsPerNode);
similarity1 = rts.similarity(g1, g2);
similarity2 = rts.similarity(g2, g1);
if(Math.abs(similarity1-similarity2) > 0.001) {
System.out.println("similarity1 "+similarity1+" similarity2 "+similarity2+" "+ directed+" "+labels);
fail = true;
}
if(similarity1 < 0.001 && !isomorphic) {
System.out.println("similarity1 "+similarity1+" isomorphic "+isomorphic+" "+ directed+" "+labels);
fail = true;
}
if(similarity1 > 0.001 && isomorphic) {
System.out.println("similarity1 "+similarity1+" isomorphic "+isomorphic+" "+ directed+" "+labels);
fail = true;
}
directed = true;
labels = false;
g1 = FastGraph.randomGraphFactory(nodes, edges, g1Seed, false);
g2 = FastGraph.randomGraphFactory(nodes, edges, g2Seed, false);
isomorphic = ExactIsomorphism.isomorphic(g1, g2, directed,labels);
rts = new RandomTrailSimilarity(directed,labels,rtsSeed);
rts.setTrailLength(trailLength);
rts.setTrailsPerNode(trailsPerNode);
similarity1 = rts.similarity(g1, g2);
similarity2 = rts.similarity(g2, g1);
if(Math.abs(similarity1-similarity2) > 0.001) {
System.out.println("similarity1 "+similarity1+" similarity2 "+similarity2+" "+ directed+" "+labels);
fail = true;
}
if(similarity1 < 0.001 && !isomorphic) {
System.out.println("similarity1 "+similarity1+" isomorphic "+isomorphic+" "+ directed+" "+labels);
fail = true;
}
if(similarity1 > 0.001 && isomorphic) {
System.out.println("similarity1 "+similarity1+" isomorphic "+isomorphic+" "+ directed+" "+labels);
fail = true;
}
directed = false;
labels = false;
g1 = FastGraph.randomGraphFactory(nodes, edges, g1Seed, false);
g2 = FastGraph.randomGraphFactory(nodes, edges, g2Seed, false);
isomorphic = ExactIsomorphism.isomorphic(g1, g2, directed,labels);
rts = new RandomTrailSimilarity(directed,labels,rtsSeed);
rts.setTrailLength(trailLength);
rts.setTrailsPerNode(trailsPerNode);
similarity1 = rts.similarity(g1, g2);
similarity2 = rts.similarity(g2, g1);
if(Math.abs(similarity1-similarity2) > 0.001) {
System.out.println("similarity1 "+similarity1+" similarity2 "+similarity2+" "+ directed+" "+labels);
fail = true;
}
if(similarity1 < 0.001 && !isomorphic) {
System.out.println("similarity1 "+similarity1+" isomorphic "+isomorphic+" "+ directed+" "+labels);
fail = true;
}
if(similarity1 > 0.001 && isomorphic) {
System.out.println("similarity1 "+similarity1+" isomorphic "+isomorphic+" "+ directed+" "+labels);
fail = true;
}
if(fail) {
System.out.println("Failed on i "+i);
// System.out.println(g1);
// System.out.println(g2);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
private static void randomIncreasing() {
try {
RandomTrailSimilarity rts;
double similarity;
FastGraph g1,g2;
int nodes = 50;
int edges = 500;
long time;
while(true) {
System.out.println("NODES "+nodes+" EDGES "+edges);
g1 = FastGraph.randomGraphFactory(nodes, edges, false);
g2 = ExactIsomorphism.generateRandomIsomorphicGraph(g1, System.currentTimeMillis(), false);
time = System.currentTimeMillis();
if(ExactIsomorphism.isomorphic(g1, g2, true)) {
System.out.println("Ids changed directed isomorphic nodes "+nodes+" edges "+edges+" time "+(System.currentTimeMillis()-time)/1000.0);
} else {
System.out.println("NOT ISOMORPHIC Ids changed directed isomorphic nodes "+nodes+" edges "+edges+" time "+(System.currentTimeMillis()-time)/1000.0+" saving");
g1.saveBuffers(".", System.currentTimeMillis()+"Q");
g2.saveBuffers(".", System.currentTimeMillis()+"R");
}
g2 = FastGraph.randomGraphFactory(nodes, edges, false);
time = System.currentTimeMillis();
if(ExactIsomorphism.isomorphic(g1, g2, false)) {
System.out.println("Random undirected isomorphic nodes "+nodes+" edges "+edges+" time "+(System.currentTimeMillis()-time)/1000.0);
} else {
System.out.println("Random undirected not isomorphic nodes "+nodes+" edges "+edges+" time "+(System.currentTimeMillis()-time)/1000.0);
}
time = System.currentTimeMillis();
if(ExactIsomorphism.isomorphic(g1, g2, true)) {
System.out.println("Random directed isomorphic nodes "+nodes+" edges "+edges+" time "+(System.currentTimeMillis()-time)/1000.0);
} else {
System.out.println("Random directed not isomorphic nodes "+nodes+" edges "+edges+" time "+(System.currentTimeMillis()-time)/1000.0);
}
time = System.currentTimeMillis();
rts = new RandomTrailSimilarity(true,true,System.currentTimeMillis());
rts.setTrailLength(4);
rts.setTrailsPerNode(10);
similarity = rts.similarity(g1, g2);
System.out.println("similarity directed labels "+similarity+" time "+((System.currentTimeMillis()-time)/1000.0)+" seconds");
time = System.currentTimeMillis();
rts = new RandomTrailSimilarity(true,false,System.currentTimeMillis());
rts.setTrailLength(4);
rts.setTrailsPerNode(10);
similarity = rts.similarity(g1, g2);
System.out.println("similarity directed no labels "+similarity+" time "+((System.currentTimeMillis()-time)/1000.0)+" seconds");
time = System.currentTimeMillis();
rts = new RandomTrailSimilarity(false,false,System.currentTimeMillis());
rts.setTrailLength(4);
rts.setTrailsPerNode(10);
similarity = rts.similarity(g1, g2);
System.out.println("similarity undirected no labels "+similarity+" time "+((System.currentTimeMillis()-time)/1000.0)+" seconds");
nodes *= 1.1;
edges *= 1.1;
}
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* defaults to treating graph as undirected and no node label comparison.
*
* @throws FastGraphException should not throw this as all required edit operations are present by default
*/
public RandomTrailSimilarity() throws FastGraphException {
this.directed = false;
this.nodeLabels = false;
this.randomSeed = System.currentTimeMillis();
}
/**
* Constructor for specifying directed and use of node labels.
*
* @param directed true if the graph is treated as directed, false if undirected
* @param nodeLabels true if node label operations should be considered, false if they are ignored
* @param randomSeed set to System.currentTimeMillis() for true random
*/
public RandomTrailSimilarity(boolean directed, boolean nodeLabels, long randomSeed) {
this.directed = directed;
this.nodeLabels = nodeLabels;
this.randomSeed = randomSeed;
}
/**
* gets the maximum trail length for each random trail
* @return the maximum trail length
*/
public int getTrailLength() {return trailLength;}
/**
* gets the numbers of trails found for each node
* @return the number of trails
*/
public int getTrailsPerNode() {return trailsPerNode;}
/**
*
* @return the costMatrix, set after similarity is called.
*/
public double[][] getCostMatrix() {return costMatrix;}
/**
*
* @return the g1 nodes to g2 nodes mapping, set after similarity is called.
*/
public int[] getMapping() {return mapping;}
/**
* alters the maximum trail length for each random trail.
* @param trailLength the length of trails
*/
public void setTrailLength(int trailLength) {this.trailLength = trailLength;}
/**
* alters the numbers of trails found for each node.
* @param trailsPerNode the number of trails
*/
public void setTrailsPerNode(int trailsPerNode) {this.trailsPerNode = trailsPerNode;}
/**
* This returns an similarity measure between the two graphs.
* Zero means the graphs are isomorphic, greater values mean more dissimilarity.
* The method works by finding random trails from each node in g1, and seeing how
* much of each trail can be found in g2.
*
* @param g1 the first graph to be compared.
* @param g2 the second graph to be compared.
* @return the similarity between two graphs.
*/
@Override
public double similarity(FastGraph g1, FastGraph g2) {
int nodes1 = g1.getNumberOfNodes();
int nodes2 = g2.getNumberOfNodes();
int size = nodes1+nodes2;
int maxNodes = nodes1;
if(nodes2 > maxNodes) {
maxNodes = nodes2;
}
if(maxNodes == 0) {
return 0.0;
}
costMatrix = new double[size][size];
double maxCost = 0;
for(int y = 0; y < size; y++) { // g1 nodes
for(int x = 0; x < size; x++) { // g2 nodes
if(x < nodes2 && y < nodes1) { // top left of the matrix
double trailCost = randomTrailCost(y,x,g1,g2);
costMatrix[y][x] = trailCost;
if(trailCost > maxCost) {
maxCost = trailCost;
}
} else if(x >= nodes2 && y < nodes1) { // top right of the matrix
if(x-nodes2 == y) { // cost of a node deletion appears on diagonal
costMatrix[y][x] = -1; // this will be set to the largest value, 1 after normalization
} else {
costMatrix[y][x] = -2; // this will be set to MAX_VALUE
}
} else if(x < nodes2 && y >= nodes1) { // bottom left of the matrix
if(y-nodes1 == x) { // cost of a node addition appears on diagonal
costMatrix[y][x] = -1; // this will be set to the largest mapping value, 1 after normalization
} else {
costMatrix[y][x] = -2; // this will be set to MAX_VALUE
}
} else { // bottom right
costMatrix[y][x] = 0.0;
}
}
}
// normalize costs to maxCost
// set all -1s (add and delete nodes) to 1
// set all -2s (unused cells) to MAX_VALUE
for(int y = 0; y < size; y++) { // g1 nodes
for(int x = 0; x < size; x++) { // g2 nodes
double cost = costMatrix[y][x];
if(cost > 0) {
costMatrix[y][x] = cost/maxCost; // should end up between 0 and 1
}
if(cost == -1) {
costMatrix[y][x] = 1;
}
if(cost == -2) {
costMatrix[y][x] = Double.MAX_VALUE;
}
}
}
mapping = null;
VJAccess vja = new VJAccess();
vja.computeAssignment(costMatrix);
mapping = vja.getAssignment();
// get the similarity from the mapping
double cost = 0.0;
for(int i = 0; i < maxNodes; i++) {
cost += costMatrix[i][mapping[i]];
}
double ret = cost/maxNodes; // keep it between 0 and 1
return ret;
}
/**
* Find a number of trails from n1 in g2 and see how many and how far they can be repeated from n2 in g2.
*
* @param n1 node in g1
* @param n2 node in g2
* @param g1 the first graph
* @param g2 the second graph
* @return
*/
private Double randomTrailCost(int n1, int n2, FastGraph g1, FastGraph g2) {
double ret = 0.0;
// first g1 to g2
for(int i = 0; i < trailsPerNode; i++) {
// commented out because different random seeds g1 to g2 and g2 to g1 mean no symmetry
// randomSeed = randomSeed*11;
double singleCost = randomSingleTrail(n1, n2, g1, g2);
ret += singleCost;
}
// then g2 to g1
for(int i = 0; i < trailsPerNode; i++) {
// commented out because different random seeds g1 to g2 and g2 to g1 mean no symmetry
// randomSeed = randomSeed*555;
double singleCost = randomSingleTrail(n2, n1, g2, g1);
ret += singleCost;
}
return ret;
}
/**
* Find a trail from n1 in g2 and see how far it can be repeated from n2 in g2.
*
* @param n1 node in g1
* @param n2 node in g2
* @param g1 the first graph
* @param g2 the second graph
* @return the length of trail1 minus the longest trail in trail2
*/
private Double randomSingleTrail(int n1, int n2, FastGraph g1, FastGraph g2) {
ArrayList<TrailNode> trail1 = null;
try {
RandomTrail rt = new RandomTrail(directed,randomSeed);
trail1 = rt.findTrail(g1, n1, trailLength);
} catch (FastGraphException e) {
e.printStackTrace();
System.err.println("Fail in randomSingleTrail n1 "+n1+" not found in g1");
return null;
}
// exhaustive search for trail1 in g2
HashMap<TrailNode,ArrayList<Integer>> untestedNeighbours2 = new HashMap<>(trailLength*3);
ArrayList<Integer> trail2Edges = new ArrayList<>(trailLength+1);
ArrayList<TrailNode> trail2 = new ArrayList<>(trailLength+1);
TrailNode tn2 = new TrailNode(0,n2,-1);
trail2.add(tn2);
ArrayList<Integer> neighbourEdgesList = getNeighbourEdgesList(tn2,g2,trail2Edges);
untestedNeighbours2.put(tn2, neighbourEdgesList);
int longestTrail2 = 1;
while(trail2.size() != 0 && trail1.size() != trail2.size()) {
ArrayList<Integer> currentNeighbours2 = untestedNeighbours2.get(tn2);
TrailNode nextTN2 = null;
int nextEdge = -1;
while(currentNeighbours2.size() != 0) {
nextEdge = currentNeighbours2.remove(0);
nextTN2 = checkNodeMatch(tn2,nextEdge,g1,g2,trail1,trail2);
if(nextTN2 != null) { // success
break;
}
}
if(nextTN2 == null) { // finished with the search of this node, no luck
if(trail2.size() == 1) {
// removing start node, so end
break;
}
untestedNeighbours2.remove(tn2);
trail2Edges.remove(trail2Edges.size()-1);
trail2.remove(trail2.size()-1); // the same as removing tn2
tn2 = trail2.get(trail2.size()-1);
} else { // found a match
tn2 = nextTN2;
trail2.add(tn2);
trail2Edges.add(nextEdge);
if(trail2.size() > longestTrail2) {
longestTrail2 = trail2.size();
}
if(trail2.size() == trail1.size()) { // success in finding a trail
break;
}
neighbourEdgesList = getNeighbourEdgesList(tn2,g2,trail2Edges);
untestedNeighbours2.put(tn2, neighbourEdgesList);
}
}
double ret = trail1.size()-longestTrail2;
return ret;
}
/**
*
* @param tn2 the current node
* @param g2 the graph for the current node
* @param trail2Edges the current visited edges
* @return the list of unvisited neighbour edges of tn2
*/
private ArrayList<Integer> getNeighbourEdgesList(TrailNode tn2, FastGraph g2, ArrayList<Integer> trail2Edges) {
// add the neighbour edges for testing
int[] neighbourEdgesArray;
if(directed) {
neighbourEdgesArray = g2.getNodeConnectingOutEdges(tn2.getNode());
} else {
neighbourEdgesArray = g2.getNodeConnectingEdges(tn2.getNode());
}
ArrayList<Integer> neighbourEdgesList = new ArrayList<Integer>(neighbourEdgesArray.length+1);
for(int i = 0; i < neighbourEdgesArray.length;i++) {
int e = neighbourEdgesArray[i];
if(!trail2Edges.contains(e)) {
neighbourEdgesList.add(e);
}
}
return neighbourEdgesList;
}
/**
*
* Checks the match of trail2 node tn2 with the corresponding trail1 node tn1 at the position we are going to insert the new
* node into trail2. Checks equal degree, checks node label if {@link nodeLabels} is true.
*
* @param tn2 the g2 current node in a TrailNode
* @param nextEdge the edge that connects from tn2 to the next node to test
* @param g1 the first graph, with fixed trail
* @param g2 the second graph, we are building a trail in this graph
* @param trail1 the first trail
* @param trail2 the current state of the second trail
* @param visitedNodes1 a list of g1 nodes that are already in the trail
* @return the other end of nextEdge if it is a match, null otherwise
*/
private TrailNode checkNodeMatch(TrailNode tn2, int nextEdge, FastGraph g1, FastGraph g2, ArrayList<TrailNode> trail1, ArrayList<TrailNode> trail2) {
int nextNode2 = g2.oppositeEnd(nextEdge, tn2.getNode());
TrailNode nextTN2 = new TrailNode(trail2.size(), nextNode2, -1);
TrailNode tn1 = trail1.get(trail2.size()-1); // equivalent to
// if a repeated node, must be repeated in same point in trail
// plus cannot have one repeated and not the other
if(tn1.getDuplicatePosition() != tn2.getDuplicatePosition()) {
return null;
}
if(!directed) {
int n1Degree = g1.getNodeDegree(tn1.getNode());
int n2Degree = g2.getNodeDegree(tn2.getNode());
if(n1Degree != n2Degree) {
return null;
}
} else {
int n1OutDegree = g1.getNodeOutDegree(tn1.getNode());
int n2OutDegree = g2.getNodeOutDegree(tn2.getNode());
if(n1OutDegree != n2OutDegree) {
return null;
}
int n1InDegree = g1.getNodeInDegree(tn1.getNode());
int n2InDegree = g2.getNodeInDegree(tn2.getNode());
if(n1InDegree != n2InDegree) {
return null;
}
}
// set the duplicate position for the next node in trail2
for(TrailNode tn : trail2) {
if(tn.getNode() == nextNode2) { // the trail2 node is going through a previously used node
nextTN2.setDuplicatePositon(tn.getPosition());
break;
}
}
if(!nodeLabels) {
return nextTN2;
}
String label1 = g1.getNodeLabel(tn1.getNode());
String label2 = g2.getNodeLabel(tn2.getNode());
if(label1.equals(label2)) {
return nextTN2;
}
return null;
}
}
|
package it.unibz.inf.ontop.model.term.functionsymbol.db.impl;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableTable;
import com.google.inject.Inject;
import it.unibz.inf.ontop.model.term.ImmutableTerm;
import it.unibz.inf.ontop.model.term.TermFactory;
import it.unibz.inf.ontop.model.term.functionsymbol.InequalityLabel;
import it.unibz.inf.ontop.model.term.functionsymbol.db.*;
import it.unibz.inf.ontop.model.type.*;
import java.util.UUID;
import java.util.function.Function;
/**
* Mockup: for DB-independent tests only
*/
public class MockupDBFunctionSymbolFactory extends AbstractDBFunctionSymbolFactory {
private static final String CONCAT_STR = "CONCAT";
private static final String AND_STR = "AND";
private static final String OR_STR = "OR";
private static final String CHAR_LENGTH_STR = "CHARLENGTH";
private static final String NOT_STR = "NOT";
private static final String MULTIPLY_STR = "*";
protected static final String DIVIDE_STR = "/";
protected static final String ADD_STR = "+";
protected static final String SUBSTRACT_STR = "-";
private final TermType abstractRootType;
private final DBTermType dbBooleanType;
private final DBTermType abstractRootDBType;
private final DBTermType dbStringType;
private final DBTypeFactory dbTypeFactory;
@Inject
private MockupDBFunctionSymbolFactory(TypeFactory typeFactory) {
super(createDefaultRegularFunctionTable(typeFactory), typeFactory);
abstractRootType = typeFactory.getAbstractAtomicTermType();
dbTypeFactory = typeFactory.getDBTypeFactory();
dbBooleanType = dbTypeFactory.getDBBooleanType();
abstractRootDBType = dbTypeFactory.getAbstractRootDBType();
dbStringType = dbTypeFactory.getDBStringType();
}
protected static ImmutableTable<String, Integer, DBFunctionSymbol> createDefaultRegularFunctionTable(TypeFactory typeFactory) {
DBTypeFactory dbTypeFactory = typeFactory.getDBTypeFactory();
DBTermType dbStringType = dbTypeFactory.getDBStringType();
DBTermType abstractRootDBType = dbTypeFactory.getAbstractRootDBType();
ImmutableTable.Builder<String, Integer, DBFunctionSymbol> builder = ImmutableTable.builder();
return builder.build();
}
/**
* This mockup does not provide any denormalization function symbol
*/
@Override
protected ImmutableTable<DBTermType, RDFDatatype, DBTypeConversionFunctionSymbol> createDenormalizationTable() {
return ImmutableTable.of();
}
@Override
protected DBFunctionSymbol createDBCount(boolean isUnary, boolean isDistinct) {
DBTermType integerType = dbTypeFactory.getDBLargeIntegerType();
return isUnary
? new DBCountFunctionSymbolImpl(abstractRootDBType, integerType, isDistinct)
: new DBCountFunctionSymbolImpl(integerType, isDistinct);
}
@Override
protected DBFunctionSymbol createRegularUntypedFunctionSymbol(String nameInDialect, int arity) {
switch (nameInDialect) {
case AND_STR:
return createDBAnd(arity);
case OR_STR:
return createDBOr(arity);
case CONCAT_STR:
return createDBConcat(arity);
default:
return new DefaultUntypedDBFunctionSymbol(nameInDialect, arity, abstractRootDBType);
}
}
@Override
protected DBBooleanFunctionSymbol createRegularBooleanFunctionSymbol(String nameInDialect, int arity) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
private DBFunctionSymbol createDBAnd(int arity) {
return new DefaultDBAndFunctionSymbol(AND_STR, arity, dbBooleanType);
}
private DBFunctionSymbol createDBOr(int arity) {
return new DefaultDBOrFunctionSymbol(OR_STR, arity, dbBooleanType);
}
@Override
protected DBNotFunctionSymbol createDBNotFunctionSymbol(DBTermType dbBooleanType) {
return new DefaultDBNotFunctionSymbol(NOT_STR, dbBooleanType);
}
private DBFunctionSymbol createDBConcat(int arity) {
return new NullRejectingDBConcatFunctionSymbol(CONCAT_STR, arity, dbStringType, abstractRootDBType, false);
}
@Override
protected DBTypeConversionFunctionSymbol createSimpleCastFunctionSymbol(DBTermType targetType) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected DBTypeConversionFunctionSymbol createSimpleCastFunctionSymbol(DBTermType inputType, DBTermType targetType) {
return targetType.equals(dbBooleanType)
? new MockupSimpleDBBooleanCastFunctionSymbol(inputType, targetType)
: new MockupSimpleDBCastFunctionSymbol(inputType, targetType);
}
@Override
protected DBFunctionSymbol createDBCase(int arity) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected DBFunctionSymbol createCoalesceFunctionSymbol(int arity) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected DBStrictEqFunctionSymbol createDBStrictEquality(int arity) {
return new DefaultDBStrictEqFunctionSymbol(arity, abstractRootType, dbBooleanType);
}
@Override
protected DBBooleanFunctionSymbol createDBStrictNEquality(int arity) {
return new DefaultDBStrictNEqFunctionSymbol(arity, abstractRootType, dbBooleanType);
}
@Override
protected DBFunctionSymbol createR2RMLIRISafeEncode() {
return new MockupR2RMLSafeIRIEncodeFunctionSymbol(dbStringType);
}
/**
* Too simplistic!
*/
@Override
protected DBTypeConversionFunctionSymbol createDateTimeNormFunctionSymbol(DBTermType dbDateTimestampType) {
return createSimpleCastFunctionSymbol(dbTypeFactory.getDBDateTimestampType(), dbStringType);
}
/**
* Too simplistic!
* @param booleanType
*/
@Override
protected DBTypeConversionFunctionSymbol createBooleanNormFunctionSymbol(DBTermType booleanType) {
return createSimpleCastFunctionSymbol(dbTypeFactory.getDBBooleanType(), dbStringType);
}
@Override
protected DBTypeConversionFunctionSymbol createDateTimeDenormFunctionSymbol(DBTermType timestampType) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected DBTypeConversionFunctionSymbol createBooleanDenormFunctionSymbol() {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected DBMathBinaryOperator createMultiplyOperator(DBTermType dbNumericType) {
return new DefaultTypedDBMathBinaryOperator(MULTIPLY_STR, dbNumericType);
}
@Override
protected DBMathBinaryOperator createDivideOperator(DBTermType dbNumericType) {
return new DefaultTypedDBMathBinaryOperator(DIVIDE_STR, dbNumericType);
}
@Override
protected DBMathBinaryOperator createAddOperator(DBTermType dbNumericType) {
return new DefaultTypedDBMathBinaryOperator(ADD_STR, dbNumericType);
}
@Override
protected DBMathBinaryOperator createSubstractOperator(DBTermType dbNumericType) {
return new DefaultTypedDBMathBinaryOperator(SUBSTRACT_STR, dbNumericType);
}
@Override
protected DBMathBinaryOperator createUntypedMultiplyOperator() {
return new DefaultUntypedDBMathBinaryOperator(MULTIPLY_STR, abstractRootDBType);
}
@Override
protected DBMathBinaryOperator createUntypedDivideOperator() {
return new DefaultUntypedDBMathBinaryOperator(DIVIDE_STR, abstractRootDBType);
}
@Override
protected DBMathBinaryOperator createUntypedAddOperator() {
return new DefaultUntypedDBMathBinaryOperator(ADD_STR, abstractRootDBType);
}
@Override
protected DBMathBinaryOperator createUntypedSubstractOperator() {
return new DefaultUntypedDBMathBinaryOperator(SUBSTRACT_STR, abstractRootDBType);
}
@Override
protected DBFunctionSymbol createAbsFunctionSymbol(DBTermType dbTermType) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected DBFunctionSymbol createCeilFunctionSymbol(DBTermType dbTermType) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected DBFunctionSymbol createFloorFunctionSymbol(DBTermType dbTermType) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected DBFunctionSymbol createRoundFunctionSymbol(DBTermType dbTermType) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected String serializeContains(ImmutableList<? extends ImmutableTerm> immutableTerms,
Function<ImmutableTerm, String> immutableTermStringFunction,
TermFactory termFactory) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected String serializeStrBefore(ImmutableList<? extends ImmutableTerm> terms,
Function<ImmutableTerm, String> termConverter, TermFactory termFactory) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected String serializeStrAfter(ImmutableList<? extends ImmutableTerm> terms,
Function<ImmutableTerm, String> termConverter, TermFactory termFactory) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected String serializeMD5(ImmutableList<? extends ImmutableTerm> terms,
Function<ImmutableTerm, String> termConverter, TermFactory termFactory) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected String serializeSHA1(ImmutableList<? extends ImmutableTerm> terms,
Function<ImmutableTerm, String> termConverter, TermFactory termFactory) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected String serializeSHA256(ImmutableList<? extends ImmutableTerm> terms,
Function<ImmutableTerm, String> termConverter, TermFactory termFactory) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected String serializeSHA512(ImmutableList<? extends ImmutableTerm> terms,
Function<ImmutableTerm, String> termConverter, TermFactory termFactory) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected String serializeYear(ImmutableList<? extends ImmutableTerm> terms,
Function<ImmutableTerm, String> termConverter, TermFactory termFactory) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected String serializeMonth(ImmutableList<? extends ImmutableTerm> terms,
Function<ImmutableTerm, String> termConverter, TermFactory termFactory) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected String serializeDay(ImmutableList<? extends ImmutableTerm> terms,
Function<ImmutableTerm, String> termConverter, TermFactory termFactory) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected String serializeHours(ImmutableList<? extends ImmutableTerm> terms,
Function<ImmutableTerm, String> termConverter, TermFactory termFactory) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected String serializeMinutes(ImmutableList<? extends ImmutableTerm> terms,
Function<ImmutableTerm, String> termConverter, TermFactory termFactory) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected String serializeSeconds(ImmutableList<? extends ImmutableTerm> terms,
Function<ImmutableTerm, String> termConverter, TermFactory termFactory) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
protected String serializeTz(ImmutableList<? extends ImmutableTerm> terms, Function<ImmutableTerm, String> termConverter, TermFactory termFactory) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
public DBFunctionSymbol getDBIfThenElse() {
return new MockupDBIfElseNullFunctionSymbol(dbBooleanType, abstractRootDBType);
}
@Override
public DBFunctionSymbol getDBUpper() {
return getRegularDBFunctionSymbol("UPPER", 1);
}
@Override
public DBFunctionSymbol getDBLower() {
return getRegularDBFunctionSymbol("LOWER", 1);
}
@Override
public DBFunctionSymbol getDBReplace() {
return getRegularDBFunctionSymbol("REPLACE", 3);
}
@Override
public DBFunctionSymbol getDBRegexpReplace3() {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
public DBFunctionSymbol getDBRegexpReplace4() {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
public DBFunctionSymbol getDBSubString2() {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
public DBFunctionSymbol getDBSubString3() {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
public DBFunctionSymbol getDBRight() {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
public DBFunctionSymbol getDBCharLength() {
return getRegularDBFunctionSymbol(CHAR_LENGTH_STR, 1);
}
@Override
public DBConcatFunctionSymbol getNullRejectingDBConcat(int arity) {
if (arity < 2)
throw new IllegalArgumentException("Arity of CONCAT must be >= 2");
return (DBConcatFunctionSymbol) getRegularDBFunctionSymbol(CONCAT_STR, arity);
}
@Override
public DBConcatFunctionSymbol getDBConcatOperator(int arity) {
return getNullRejectingDBConcat(arity);
}
@Override
public DBAndFunctionSymbol getDBAnd(int arity) {
if (arity < 2)
throw new IllegalArgumentException("Arity of AND must be >= 2");
return (DBAndFunctionSymbol) getRegularDBFunctionSymbol(AND_STR, arity);
}
@Override
public DBOrFunctionSymbol getDBOr(int arity) {
if (arity < 2)
throw new IllegalArgumentException("Arity of OR must be >= 2");
return (DBOrFunctionSymbol) getRegularDBFunctionSymbol(OR_STR, arity);
}
@Override
public DBIsNullOrNotFunctionSymbol getDBIsNull() {
return new MockupDBIsNullOrNotFunctionSymbolImpl(true, dbBooleanType, abstractRootDBType);
}
@Override
public DBIsNullOrNotFunctionSymbol getDBIsNotNull() {
return new MockupDBIsNullOrNotFunctionSymbolImpl(false, dbBooleanType, abstractRootDBType);
}
@Override
protected DBBooleanFunctionSymbol createNonStrictNumericEquality() {
return new DefaultDBNonStrictNumericEqOperator(abstractRootDBType, dbBooleanType);
}
@Override
protected DBBooleanFunctionSymbol createNonStrictStringEquality() {
return new DefaultDBNonStrictStringEqOperator(abstractRootDBType, dbBooleanType);
}
@Override
protected DBBooleanFunctionSymbol createNonStrictDatetimeEquality() {
return new DefaultDBNonStrictDatetimeEqOperator(abstractRootDBType, dbBooleanType);
}
@Override
protected DBBooleanFunctionSymbol createNonStrictDateEquality() {
return new DefaultDBNonStrictDateEqOperator(abstractRootDBType, dbBooleanType);
}
@Override
protected DBBooleanFunctionSymbol createNonStrictDefaultEquality() {
return new DefaultDBNonStrictDefaultEqOperator(abstractRootDBType, dbBooleanType);
}
@Override
protected DBBooleanFunctionSymbol createNumericInequality(InequalityLabel inequalityLabel) {
return new DefaultDBNumericInequalityOperator(inequalityLabel, abstractRootDBType, dbBooleanType);
}
@Override
protected DBBooleanFunctionSymbol createBooleanInequality(InequalityLabel inequalityLabel) {
return new DefaultDBBooleanInequalityOperator(inequalityLabel, abstractRootDBType, dbBooleanType);
}
@Override
protected DBBooleanFunctionSymbol createStringInequality(InequalityLabel inequalityLabel) {
return new DefaultDBStringInequalityOperator(inequalityLabel, abstractRootDBType, dbBooleanType);
}
@Override
protected DBBooleanFunctionSymbol createDatetimeInequality(InequalityLabel inequalityLabel) {
return new DefaultDBDatetimeInequalityOperator(inequalityLabel, abstractRootDBType, dbBooleanType);
}
@Override
protected DBBooleanFunctionSymbol createDateInequality(InequalityLabel inequalityLabel) {
return new DefaultDBDateInequalityOperator(inequalityLabel, abstractRootDBType, dbBooleanType);
}
@Override
protected DBBooleanFunctionSymbol createDefaultInequality(InequalityLabel inequalityLabel) {
return new DefaultDBDefaultInequalityOperator(inequalityLabel, abstractRootDBType, dbBooleanType);
}
@Override
public DBBooleanFunctionSymbol getDBIsStringEmpty() {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
public DBIsTrueFunctionSymbol getIsTrue() {
return new DefaultDBIsTrueFunctionSymbol(dbBooleanType);
}
@Override
public NonDeterministicDBFunctionSymbol getDBUUID(UUID uuid) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
public DBBooleanFunctionSymbol getDBRegexpMatches2() {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
public DBBooleanFunctionSymbol getDBRegexpMatches3() {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
public DBFunctionSymbol getDBNow() {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
@Override
public NonDeterministicDBFunctionSymbol getDBRand(UUID uuid) {
throw new UnsupportedOperationException("Operation not supported by the MockupDBFunctionSymbolFactory");
}
}
|
package com.bbn.kbp.events2014;
import com.bbn.bue.common.Inspector;
import com.bbn.bue.common.StringUtils;
import com.bbn.bue.common.TextGroupPackageImmutable;
import com.bbn.bue.common.TextGroupPublicImmutable;
import com.bbn.bue.common.evaluation.AggregateBinaryFScoresInspector;
import com.bbn.bue.common.evaluation.Alignment;
import com.bbn.bue.common.evaluation.BinaryFScoreBootstrapStrategy;
import com.bbn.bue.common.evaluation.BootstrapInspector;
import com.bbn.bue.common.evaluation.BootstrapWriter;
import com.bbn.bue.common.evaluation.BrokenDownPRFAggregator;
import com.bbn.bue.common.evaluation.EquivalenceBasedProvenancedAligner;
import com.bbn.bue.common.evaluation.EvalPair;
import com.bbn.bue.common.evaluation.EvaluationConstants;
import com.bbn.bue.common.evaluation.InspectionNode;
import com.bbn.bue.common.evaluation.InspectorTreeDSL;
import com.bbn.bue.common.evaluation.InspectorTreeNode;
import com.bbn.bue.common.evaluation.ProvenancedAlignment;
import com.bbn.bue.common.evaluation.SummaryConfusionMatrices;
import com.bbn.bue.common.evaluation.SummaryConfusionMatrix;
import com.bbn.bue.common.math.PercentileComputer;
import com.bbn.bue.common.parameters.Parameters;
import com.bbn.bue.common.strings.offsets.CharOffset;
import com.bbn.bue.common.symbols.Symbol;
import com.bbn.kbp.events2014.io.DefaultCorpusQueryLoader;
import com.bbn.kbp.events2014.io.SingleFileQueryAssessmentsLoader;
import com.bbn.kbp.events2014.io.SystemOutputStore2016;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Preconditions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Range;
import com.google.common.collect.RangeSet;
import com.google.common.collect.TreeRangeSet;
import com.google.common.io.CharSink;
import com.google.common.io.Files;
import com.google.common.math.DoubleMath;
import com.google.common.reflect.TypeToken;
import org.immutables.func.Functional;
import org.immutables.value.Value;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.inspect;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformRight;
import static com.bbn.kbp.events2014.QueryDocMatchFunctions.queryID;
import static com.bbn.kbp.events2014.ResponseFunctions.predicateJustifications;
import static com.google.common.base.Functions.compose;
import static com.google.common.base.Functions.toStringFunction;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Iterables.concat;
import static com.google.common.collect.Iterables.getFirst;
public final class CorpusScorer {
private static final Logger log = LoggerFactory.getLogger(CorpusScorer.class);
private CorpusScorer() {
throw new UnsupportedOperationException();
}
public static void main(String[] argv) {
// we wrap the main method in this way to
// ensure a non-zero return value on failure
try {
final Parameters params = Parameters.loadSerifStyle(new File(argv[0]));
trueMain(params);
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
static void trueMain(Parameters params) throws IOException {
log.info(params.dump());
final File outputDir = params.getCreatableDirectory("com.bbn.tac.eal.outputDir");
final File queryFile = params.getExistingFile("com.bbn.tac.eal.queryFile");
final File queryResponseAssessmentsFile =
params.getExistingFile("com.bbn.tac.eal.queryAssessmentsFile");
final CorpusQueryAssessments queryAssessments =
SingleFileQueryAssessmentsLoader.create().loadFrom(
Files.asCharSource(queryResponseAssessmentsFile, Charsets.UTF_8));
final CorpusQuerySet2016 queries = DefaultCorpusQueryLoader.create().loadQueries(
Files.asCharSource(queryFile, Charsets.UTF_8));
final ImmutableMap<String, SystemOutputStore2016> systemOutputsByName =
loadSystemOutputs(params);
log.info("Scoring output will be written to {}", outputDir);
for (final SystemOutputStore2016 systemOutputStore : systemOutputsByName.values()) {
score(queries, queryAssessments, systemOutputStore,
QueryResponseFromERE.queryExecutorFromParamsFor2016(params),
params.getOptionalBoolean("com.bbn.tac.eal.allowUnassessed").or(false),
params.getOptionalBoolean("com.bbn.tac.eal.ignoreJustifications").or(false),
new File(outputDir, systemOutputStore.systemID().asString()));
systemOutputStore.close();
}
}
private static void score(final CorpusQuerySet2016 queries,
final CorpusQueryAssessments queryAssessments,
final SystemOutputStore2016 systemOutputStore,
final CorpusQueryExecutor2016 queryExecutor,
boolean allowUnassessed,
boolean ignoreJustifications,
final File outputDir) throws IOException {
final TypeToken<Set<QueryDocMatch>> setOfQueryMatches = new TypeToken<Set<QueryDocMatch>>() {
};
final InspectionNode<EvalPair<Set<QueryDocMatch>, SystemOutputMatches>> input =
InspectorTreeDSL.pairedInput(setOfQueryMatches, new TypeToken<SystemOutputMatches>() { });
setUpScoring(input, allowUnassessed, outputDir);
final CorrectMatchesFromAssessmentsExtractor matchesFromAssessmentsExtractor =
new CorrectMatchesFromAssessmentsExtractor();
final QueryResponsesFromSystemOutputExtractor matchesFromSystemOutputExtractor =
QueryResponsesFromSystemOutputExtractor.builder()
.corpusQueryAssessments(queryAssessments)
.queryExecutor(queryExecutor)
.ignoreJustifications(ignoreJustifications)
.build();
for (final CorpusQuery2016 query : queries) {
final Set<QueryDocMatch> correctMatches = matchesFromAssessmentsExtractor
.extractCorrectMatches(query, queryAssessments);
final SystemOutputMatches systemMatches =
matchesFromSystemOutputExtractor.extractMatches(query, systemOutputStore);
log.info("For query {}, {} key matches, {} assessed and {} unassessed system matches", query.id(),
correctMatches.size(), systemMatches.assessedMatches().size(),
systemMatches.unassessedMatches().size());
input.inspect(EvalPair.of(correctMatches, systemMatches));
}
// trigger scoring network to do final aggregated output
input.finish();
}
private static Function<EvalPair<? extends Iterable<? extends QueryDocMatch>, ? extends Iterable<? extends QueryDocMatch>>, ProvenancedAlignment<QueryDocMatch, QueryDocMatch, QueryDocMatch, QueryDocMatch>>
EXACT_MATCH_ALIGNER = EquivalenceBasedProvenancedAligner
.forEquivalenceFunction(Functions.<QueryDocMatch>identity())
.asFunction();
private static void setUpScoring(
final InspectionNode<EvalPair<Set<QueryDocMatch>, SystemOutputMatches>> rawInput,
boolean allowUnassessed,
final File outputDir) {
// pull out unassessed matches for special handling
final Inspector<EvalPair<Set<QueryDocMatch>, SystemOutputMatches>> unassessedInspector;
if (allowUnassessed) {
unassessedInspector = new LogUnassessed(Files.asCharSink(new File(outputDir, "unassessed.txt"),
Charsets.UTF_8));
} else {
unassessedInspector = new ErrorOnUnassessed();
}
inspect(rawInput).with(unassessedInspector);
// strip off unassessed matches for other scoring
final InspectorTreeNode<EvalPair<Set<QueryDocMatch>, ImmutableSet<QueryDocMatch>>>
input = transformRight(rawInput, SystemOutputMatchesFunctions.assessedMatches());
final InspectorTreeNode<ProvenancedAlignment<QueryDocMatch, QueryDocMatch, QueryDocMatch, QueryDocMatch>>
alignment = InspectorTreeDSL.transformed(input, EXACT_MATCH_ALIGNER);
inspect(alignment)
.with(AggregateBinaryFScoresInspector.createOutputtingTo("aggregate", outputDir));
inspect(alignment)
.with(BootstrapInspector.forStrategy(
BinaryFScoreBootstrapStrategy.create("Aggregate", outputDir),
1000, new Random(0)));
// bootstrapped scores per-query
inspect(alignment)
.with(BootstrapInspector.forStrategy(
BinaryFScoreBootstrapStrategy.createBrokenDownBy("ByQuery",
compose(toStringFunction(), queryID()), outputDir),
1000, new Random(0)));
// linear score (non-bootstrapped)
final File linearScoreDir = new File(outputDir, "linearScore");
inspect(alignment)
.with(LinearScoringInspector.createOutputtingTo(linearScoreDir));
// officials core (bootstrapped linear score)
inspect(alignment)
.with(BootstrapInspector.forStrategy(
LinearScoreBootstrapStrategy.create("OfficialScore", outputDir),
1000, new Random(0)));
}
private static final String MULTIPLE_SYSTEMS_PARAM = "com.bbn.tac.eal.systemOutputsDir";
private static final String SINGLE_SYSTEMS_PARAM = "com.bbn.tac.eal.systemOutputDir";
/**
* We can score one or many systems at a time, depending on the parameters
*/
private static ImmutableMap<String, SystemOutputStore2016> loadSystemOutputs(
final Parameters params) throws IOException {
params.assertExactlyOneDefined(SINGLE_SYSTEMS_PARAM, MULTIPLE_SYSTEMS_PARAM);
final ImmutableMap.Builder<String, SystemOutputStore2016> ret = ImmutableMap.builder();
if (params.isPresent(MULTIPLE_SYSTEMS_PARAM)) {
final File systemOutputsDir = params.getExistingDirectory(MULTIPLE_SYSTEMS_PARAM);
for (final File f : systemOutputsDir.listFiles()) {
if (f.isDirectory()) {
ret.put(f.getName(), KBPEA2016OutputLayout.get().open(f));
}
}
} else if (params.isPresent(SINGLE_SYSTEMS_PARAM)) {
final File singleSystemOutputDir = params.getExistingDirectory(SINGLE_SYSTEMS_PARAM);
ret.put(singleSystemOutputDir.getName(),
KBPEA2016OutputLayout.get().open(singleSystemOutputDir));
} else {
throw new RuntimeException("Can't happen");
}
return ret.build();
}
}
/**
* The match of a query against a document.
*/
@Value.Immutable
@Functional
@TextGroupPublicImmutable
abstract class _QueryDocMatch {
@Value.Parameter
public abstract Symbol queryID();
@Value.Parameter
public abstract Symbol docID();
@Value.Parameter
public abstract QueryAssessment2016 assessment();
}
/**
* Gets all matches of queries against documents by any system where some system's match was
* assessed as correct.
*/
final class CorrectMatchesFromAssessmentsExtractor {
public final Set<QueryDocMatch> extractCorrectMatches(final CorpusQuery2016 query,
final CorpusQueryAssessments input) {
checkNotNull(input);
final ImmutableSet.Builder<QueryDocMatch> ret = ImmutableSet.builder();
for (final Map.Entry<QueryResponse2016, QueryAssessment2016> e
: input.assessments().entrySet()) {
final QueryResponse2016 queryResponse = e.getKey();
final QueryAssessment2016 assessment = e.getValue();
if (query.id().equalTo(queryResponse.queryID())) {
checkArgument(!assessment.equals(QueryAssessment2016.UNASSESSED),
"Response %s for query ID {} is not assessed", queryResponse,
queryResponse.queryID());
if (assessment.equals(QueryAssessment2016.CORRECT)) {
ret.add(QueryDocMatch.of(queryResponse.queryID(), queryResponse.docID(), assessment));
}
}
}
return ret.build();
}
}
@TextGroupPublicImmutable
@Value.Immutable
abstract class _QueryResponsesFromSystemOutputExtractor {
@Value.Parameter
public abstract CorpusQueryAssessments corpusQueryAssessments();
@Value.Parameter
public abstract CorpusQueryExecutor2016 queryExecutor();
@Value.Default
public boolean ignoreJustifications() {
return false;
}
public final SystemOutputMatches extractMatches(final CorpusQuery2016 query,
final SystemOutputStore2016 input) {
checkNotNull(input);
final ImmutableSet.Builder<QueryDocMatch> assessedMatches = ImmutableSet.builder();
final ImmutableSet.Builder<UnassessedMatch> unassessedMatches = ImmutableSet.builder();
// if we have been requested to ignore justifications, do so for the key
final CorpusQueryAssessments assessmentsToUse;
if (ignoreJustifications()) {
assessmentsToUse = corpusQueryAssessments().withNeutralizedJustifications();
} else {
assessmentsToUse = corpusQueryAssessments();
}
try {
final Iterable<DocEventFrameReference> matches = queryExecutor().queryEventFrames(input, query);
final Iterable<Map.Entry<Symbol, Collection<DocEventFrameReference>>> framesByDocId =
FluentIterable.from(matches).index(DocEventFrameReferenceFunctions.docID()).asMap()
.entrySet();
final ImmutableSetMultimap<Symbol, QueryResponse2016> responsesByDocId =
QueryResponseFromERE.response2016CollapsedJustifications(framesByDocId, input, query);
for(final Map.Entry<Symbol, QueryResponse2016> e : responsesByDocId.entries()) {
final Symbol docid = e.getKey();
final QueryResponse2016 queryResponse = e.getValue();
// if we have been requested to ignore justifications, strip the justification from
// our query response
final QueryResponse2016 queryResponseToScore;
if (ignoreJustifications()) {
queryResponseToScore = queryResponse.withNeutralizedJustification();
} else {
queryResponseToScore = queryResponse;
}
final QueryAssessment2016 assessment =
assessmentsToUse.assessments().get(queryResponseToScore);
if (assessment != null) {
final QueryDocMatch docMatch = QueryDocMatch.of(query.id(), docid, assessment);
assessedMatches.add(docMatch);
} else {
unassessedMatches.add(UnassessedMatch.of(query.id(), docid));
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
return SystemOutputMatches.of(assessedMatches.build(), unassessedMatches.build());
}
/**
* Coalesce the predicate justifications of all responses, combining overlapping spans.
*/
private Set<CharOffsetSpan> mergePJs(final ResponseSet responses) {
final RangeSet<CharOffset> pjRanges = TreeRangeSet.create();
final FluentIterable<CharOffsetSpan> pjsOfAnyLinkedResponses = FluentIterable.from(responses)
.transformAndConcat(predicateJustifications());
for (final CharOffsetSpan pjSpan : pjsOfAnyLinkedResponses) {
pjRanges.add(pjSpan.asCharOffsetRange().asRange());
}
final ImmutableSet.Builder<CharOffsetSpan> ret = ImmutableSet.builder();
for (final Range<CharOffset> mergedPJ : pjRanges.asRanges()) {
ret.add(CharOffsetSpan.fromOffsetsOnly(mergedPJ.lowerEndpoint().asInt(),
mergedPJ.upperEndpoint().asInt()));
}
return ret.build();
}
}
@TextGroupPackageImmutable
@Value.Immutable
@Functional
abstract class _SystemOutputMatches {
@Value.Parameter
public abstract ImmutableSet<QueryDocMatch> assessedMatches();
@Value.Parameter
public abstract ImmutableSet<UnassessedMatch> unassessedMatches();
}
@TextGroupPackageImmutable
@Value.Immutable
@Functional
abstract class _UnassessedMatch {
@Value.Parameter
public abstract Symbol queryID();
@Value.Parameter
public abstract Symbol docID();
}
final class ErrorOnUnassessed implements Inspector<EvalPair<Set<QueryDocMatch>, SystemOutputMatches>> {
@Override
public void inspect(
final EvalPair<Set<QueryDocMatch>, SystemOutputMatches> input) {
if (!input.test().unassessedMatches().isEmpty()) {
throw new TACKBPEALException("The following document matches are unassessed: "
+ input.test().unassessedMatches());
}
}
@Override
public void finish() throws IOException {
}
}
final class LogUnassessed implements Inspector<EvalPair<Set<QueryDocMatch>, SystemOutputMatches>> {
private final CharSink output;
private ImmutableSet.Builder<UnassessedMatch> unassessed = ImmutableSet.builder();
public LogUnassessed(CharSink output) {
this.output = checkNotNull(output);
}
@Override
public void inspect(
final EvalPair<Set<QueryDocMatch>, SystemOutputMatches> input) {
unassessed.addAll(input.test().unassessedMatches());
}
@Override
public void finish() throws IOException {
output.write(StringUtils.unixNewlineJoiner().join(unassessed.build()));
}
}
// This and the very similar ArgumentScoringInspector should get refactored together someday
final class LinearScoringInspector implements
Inspector<ProvenancedAlignment<QueryDocMatch, QueryDocMatch, QueryDocMatch, QueryDocMatch>> {
private static final Logger log = LoggerFactory.getLogger(LinearScoringInspector.class);
// gamma as defined by the 2016 task guidelines.
private static final double gamma = 0.25;
private final File outputDir;
final ImmutableMap.Builder<Symbol, Integer> truePositives = ImmutableMap.builder();
final ImmutableMap.Builder<Symbol, Integer> falsePositives = ImmutableMap.builder();
final ImmutableMap.Builder<Symbol, Integer> falseNegatives = ImmutableMap.builder();
final ImmutableMap.Builder<Symbol, Double> scores = ImmutableMap.builder();
private LinearScoringInspector(final File outputDir) {
this.outputDir = outputDir;
}
public static LinearScoringInspector createOutputtingTo(final File outputDir) {
return new LinearScoringInspector(outputDir);
}
@Override
public void inspect(
final ProvenancedAlignment<QueryDocMatch, QueryDocMatch, QueryDocMatch, QueryDocMatch> evalPair) {
// left is ERE, right is system output.
final Iterable<QueryDocMatch> args =
concat(evalPair.allLeftItems(), evalPair.allRightItems());
if (Iterables.isEmpty(args)) {
log.warn("Got a query with no matches in key");
return;
}
final ImmutableSet<Symbol> queryIds = FluentIterable.from(args).transform(queryID()).toSet();
if (queryIds.size() > 1) {
throw new TACKBPEALException("All query matches being compared must be from the same query "
+ "but got " + queryID());
}
int queryTPs = evalPair.leftAligned().size();
int queryFPs = evalPair.rightUnaligned().size();
int queryFNs = evalPair.leftUnaligned().size();
final Symbol queryID = checkNotNull(getFirst(args, null)).queryID();
log.info("Gathering scores for {}", queryID);
checkArgument(evalPair.leftAligned().equals(evalPair.rightAligned()));
double score = computeLinearScore(queryTPs, queryFPs, queryFNs);
truePositives.put(queryID, queryTPs);
falsePositives.put(queryID, queryFPs);
falseNegatives.put(queryID, queryFNs);
scores.put(queryID, score);
}
static double computeLinearScore(final double queryTPs, final double queryFPs, final double queryFNs) {
double scoreDenom = Math.max(queryTPs + queryFNs, 1);
// scores are clipped at 0.
return Math.max((queryTPs - gamma * queryFPs)/scoreDenom, 0);
}
private static final String SCORE_PATTERN = "TP: %d, FP: %d, FN: %d, Score: %f\n";
@Override
public void finish() throws IOException {
final ImmutableMap<Symbol, Double> scores = this.scores.build();
final ImmutableMap<Symbol, Integer> falsePositives = this.falsePositives.build();
final ImmutableMap<Symbol, Integer> truePositives = this.truePositives.build();
final ImmutableMap<Symbol, Integer> falseNegatives = this.falseNegatives.build();
// see guidelines section 7.3.1.1.3 for aggregating rules:
outputDir.mkdirs();
final double meanScore = scores.isEmpty()?Double.NaN:DoubleMath.mean(scores.values());
Files.asCharSink(new File(outputDir, "linearScore.txt"), Charsets.UTF_8)
.write(Double.toString(meanScore));
for (final Symbol queryId : scores.keySet()) {
final File queryDir = new File(outputDir, queryId.asString());
queryDir.mkdirs();
final File queryScoreFile = new File(queryDir, "score.txt");
// avoid dividing by zero
final double normalizer = Math.max(truePositives.get(queryId) + falseNegatives.get(queryId), 1);
// see guidelines referenced above
// pretends that the corpus is a single document
Files.asCharSink(queryScoreFile, Charsets.UTF_8).write(String
.format(SCORE_PATTERN, truePositives.get(queryId), falsePositives.get(queryId),
falseNegatives.get(queryId), 100 * scores.get(queryId) / normalizer));
}
}
}
final class LinearScoreBootstrapStrategy<T> implements
BootstrapInspector.BootstrapStrategy<Alignment<? extends T, ? extends T>, SummaryConfusionMatrix> {
private final File outputDir;
private final String name;
private LinearScoreBootstrapStrategy(String name, File outputDir) {
this.name = (String) Preconditions.checkNotNull(name);
this.outputDir = (File)Preconditions.checkNotNull(outputDir);
}
public static <T> LinearScoreBootstrapStrategy<T> create(String name, File outputDir) {
return new LinearScoreBootstrapStrategy<>(name, outputDir);
}
public BootstrapInspector.ObservationSummarizer<Alignment<? extends T, ? extends T>, SummaryConfusionMatrix> createObservationSummarizer() {
return new BootstrapInspector.ObservationSummarizer<Alignment<? extends T, ? extends T>, SummaryConfusionMatrix>() {
public SummaryConfusionMatrix summarizeObservation(Alignment<? extends T, ? extends T> alignment) {
return LinearScoreBootstrapStrategy.this.confusionMatrixForAlignment(alignment);
}
};
}
private SummaryConfusionMatrix confusionMatrixForAlignment(Alignment<? extends T, ? extends T> alignment) {
com.bbn.bue.common.evaluation.SummaryConfusionMatrices.Builder summaryConfusionMatrixB = SummaryConfusionMatrices
.builder();
summaryConfusionMatrixB.accumulatePredictedGold(EvaluationConstants.PRESENT, EvaluationConstants.PRESENT, (double)alignment.rightAligned().size());
summaryConfusionMatrixB.accumulatePredictedGold(EvaluationConstants.ABSENT, EvaluationConstants.PRESENT, (double)alignment.leftUnaligned().size());
summaryConfusionMatrixB.accumulatePredictedGold(EvaluationConstants.PRESENT, EvaluationConstants.ABSENT, (double)alignment.rightUnaligned().size());
return summaryConfusionMatrixB.build();
}
public Collection<BootstrapInspector.SummaryAggregator<SummaryConfusionMatrix>> createSummaryAggregators() {
return ImmutableList.<BootstrapInspector.SummaryAggregator<SummaryConfusionMatrix>>of(new Aggregator());
}
public BootstrapInspector.SummaryAggregator<Map<String, SummaryConfusionMatrix>> prfAggregator() {
return BrokenDownPRFAggregator.create(this.name, this.outputDir);
}
private final class Aggregator implements BootstrapInspector.SummaryAggregator<SummaryConfusionMatrix> {
private final ImmutableList.Builder<Double> linearScores = ImmutableList.builder();
private static final String AGGREGATE = "Aggregate";
private static final String OFFICIAL_SCORE = "OfficialScore";
private final BootstrapWriter writer = new BootstrapWriter.Builder()
.measures(ImmutableList.of(OFFICIAL_SCORE))
.percentilesToPrint(ImmutableList.of(0.025, 0.05, 0.25, 0.5, 0.75, 0.95, 0.975))
.percentileComputer(PercentileComputer.nistPercentileComputer())
.build();
@Override
public void observeSample(final Collection<SummaryConfusionMatrix> collection) {
final List<Double> perQueryScores = new ArrayList<>();
for (final SummaryConfusionMatrix summaryConfusionMatrix : collection) {
final double queryTPs = summaryConfusionMatrix.cell(EvaluationConstants.PRESENT, EvaluationConstants.PRESENT);
final double queryFPs = summaryConfusionMatrix.cell(EvaluationConstants.PRESENT, EvaluationConstants.ABSENT);
final double queryFNs = summaryConfusionMatrix.cell(EvaluationConstants.ABSENT, EvaluationConstants.PRESENT);
perQueryScores.add(LinearScoringInspector.computeLinearScore(queryTPs, queryFPs, queryFNs));
}
linearScores.add(perQueryScores.isEmpty()?0.0:DoubleMath.mean(perQueryScores));
}
@Override
public void finish() throws IOException {
final ImmutableListMultimap<String, Double> data =
ImmutableListMultimap.<String, Double>builder()
.putAll(AGGREGATE, linearScores.build()).build();
writer.writeBootstrapData(name,
ImmutableMap.of(OFFICIAL_SCORE, data),
outputDir);
}
}
}
|
package org.eclipse.birt.data.engine.olap.data.impl.aggregation;
import java.io.IOException;
import org.eclipse.birt.data.engine.olap.data.api.IAggregationResultSet;
import org.eclipse.birt.data.engine.olap.data.impl.AggregationDefinition;
import org.eclipse.birt.data.engine.olap.data.util.DataType;
import org.eclipse.birt.data.engine.olap.data.util.IDiskArray;
/**
* Default implement class of the interface IAggregationResultSet.
*/
public class AggregationResultSet implements IAggregationResultSet
{
private AggregationDefinition aggregation;
private IDiskArray aggregationResultRow;
private int currentPosition;
private String[][] keyNames;
private String[][] attributeNames;
private int[][] keyDataTypes;
private int[][] attributeDataTypes;
private int[] aggregationDataType;
private AggregationResultRow resultObject;
/**
*
* @param aggregation
* @param aggregationResultRow
* @param keyNames
* @param attributeNames
* @throws IOException
*/
AggregationResultSet( AggregationDefinition aggregation,
IDiskArray aggregationResultRow, String[][] keyNames,
String[][] attributeNames ) throws IOException
{
this.aggregation = aggregation;
this.aggregationResultRow = aggregationResultRow;
this.keyNames = keyNames;
this.attributeNames = attributeNames;
this.resultObject = (AggregationResultRow) aggregationResultRow.get( 0 );
if ( resultObject.levelMembers != null )
{
keyDataTypes = new int[resultObject.levelMembers.length][];
attributeDataTypes = new int[resultObject.levelMembers.length][];
for ( int i = 0; i < resultObject.levelMembers.length; i++ )
{
keyDataTypes[i] = new int[resultObject.levelMembers[i].keyValues.length];
for ( int j = 0; j < resultObject.levelMembers[i].keyValues.length; j++ )
{
keyDataTypes[i][j] = DataType.getDataType(
resultObject.levelMembers[i].keyValues[j].getClass( ) );
}
if ( resultObject.levelMembers[i].attributes != null )
{
attributeDataTypes[i] = new int[resultObject.levelMembers[i].attributes.length];
for ( int j = 0; j < attributeDataTypes[i].length; j++ )
{
attributeDataTypes[i][i] = DataType.getDataType(
resultObject.levelMembers[i].attributes[i].getClass( ) );
}
}
}
}
if ( resultObject.aggregationValues != null )
{
aggregationDataType = new int[resultObject.aggregationValues.length];
for ( int i = 0; i < resultObject.aggregationValues.length; i++ )
{
aggregationDataType[i] = DataType.getDataType(
resultObject.aggregationValues[i].getClass( ) );
}
}
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getAggregationDataType(int)
*/
public int getAggregationDataType( int aggregationIndex )
throws IOException
{
if ( aggregationDataType == null || aggregationIndex < 0 )
return DataType.UNKNOWN_TYPE;
return aggregationDataType[aggregationIndex];
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getAggregationValue(int)
*/
public Object getAggregationValue( int aggregationIndex )
throws IOException
{
if ( resultObject.aggregationValues == null || aggregationIndex < 0 )
return null;
return resultObject.aggregationValues[aggregationIndex];
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getLevelAttribute(int, int)
*/
public Object getLevelAttribute( int levelIndex, int attributeIndex )
{
if ( resultObject.levelMembers == null || levelIndex < 0
|| resultObject.levelMembers[levelIndex].attributes == null )
{
return null;
}
return resultObject.levelMembers[levelIndex].attributes[attributeIndex];
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getLevelAttributeDataType(java.lang.String, java.lang.String)
*/
public int getLevelAttributeDataType( String levelName, String attributeName )
{
int levelIndex = getLevelIndex( levelName );
if ( attributeDataTypes == null || attributeDataTypes[levelIndex] == null )
{
return DataType.UNKNOWN_TYPE;
}
return this.attributeDataTypes[levelIndex][getLevelAttributeIndex( levelName,
attributeName )];
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getLevelAttributeIndex(java.lang.String, java.lang.String)
*/
public int getLevelAttributeIndex( String levelName, String attributeName )
{
int levelIndex = getLevelIndex( levelName );
if ( attributeNames == null || attributeNames[levelIndex] == null )
{
return -1;
}
for ( int i = 0; i < attributeNames[levelIndex].length; i++ )
{
if ( attributeNames[levelIndex][i].equals( attributeName ) )
{
return i;
}
}
return -1;
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getLevelAttributeIndex(int, java.lang.String)
*/
public int getLevelAttributeIndex( int levelIndex, String attributeName )
{
if ( attributeNames == null || levelIndex < 0 || attributeNames[levelIndex] == null )
{
return -1;
}
for ( int i = 0; i < attributeNames[levelIndex].length; i++ )
{
if ( attributeNames[levelIndex][i].equals( attributeName ) )
{
return i;
}
}
return -1;
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getLevelIndex(java.lang.String)
*/
public int getLevelIndex( String levelName )
{
if ( aggregation.getLevelNames( ) == null )
{
return -1;
}
for ( int i = 0; i < aggregation.getLevelNames( ).length; i++ )
{
if ( aggregation.getLevelNames( )[i].equals( levelName ) )
{
return i;
}
}
return -1;
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getLevelKeyDataType(java.lang.String, java.lang.String)
*/
public int getLevelKeyDataType( String levelName, String keyName )
{
if ( keyDataTypes == null )
{
return DataType.UNKNOWN_TYPE;
}
return getLevelKeyDataType( getLevelIndex( levelName ), keyName );
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getLevelKeyValue(int)
*/
public Object[] getLevelKeyValue( int levelIndex )
{
if ( resultObject.levelMembers == null || levelIndex < 0 )
{
return null;
}
return resultObject.levelMembers[levelIndex].keyValues;
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#length()
*/
public int length( )
{
return aggregationResultRow.size( );
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#seek(int)
*/
public void seek( int index ) throws IOException
{
currentPosition = index;
resultObject = (AggregationResultRow) aggregationResultRow.get( index );
}
public int getPosition( )
{
return currentPosition;
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getSortType(int)
*/
public int getSortType( int levelIndex )
{
if ( aggregation.getSortTypes( ) == null )
{
return -100;
}
return aggregation.getSortTypes( )[levelIndex];
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getLevelAttributeDataType(int, java.lang.String)
*/
public int getLevelAttributeDataType( int levelIndex, String attributeName )
{
if ( attributeDataTypes == null
|| levelIndex < 0 || attributeDataTypes[levelIndex] == null )
{
return DataType.UNKNOWN_TYPE;
}
return attributeDataTypes[levelIndex][getLevelAttributeIndex( levelIndex,
attributeName )];
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getAllAttributes(int)
*/
public String[] getAllAttributes( int levelIndex )
{
if ( attributeNames == null )
{
return null;
}
return attributeNames[levelIndex];
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getLevelKeyDataType(int, java.lang.String)
*/
public int getLevelKeyDataType( int levelIndex, String keyName )
{
if ( keyDataTypes == null
|| levelIndex < 0 || keyDataTypes[levelIndex] == null )
{
return DataType.UNKNOWN_TYPE;
}
return keyDataTypes[levelIndex][getLevelKeyIndex( levelIndex, keyName )];
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getLevelKeyIndex(int, java.lang.String)
*/
public int getLevelKeyIndex( int levelIndex, String keyName )
{
if ( keyNames == null || levelIndex < 0 || keyNames[levelIndex] == null )
{
return DataType.UNKNOWN_TYPE;
}
for ( int i = 0; i < keyNames[levelIndex].length; i++ )
{
if ( keyNames[levelIndex][i].equals( keyName ) )
{
return i;
}
}
return DataType.UNKNOWN_TYPE;
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getLevelKeyIndex(java.lang.String, java.lang.String)
*/
public int getLevelKeyIndex( String levelName, String keyName )
{
if ( keyNames == null )
{
return -1;
}
return getLevelKeyIndex( getLevelIndex( levelName ), keyName );
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getLevelAttributeColCount(int)
*/
public int getLevelAttributeColCount( int levelIndex )
{
if ( attributeNames == null || attributeNames[levelIndex] == null )
return 0;
return attributeNames[levelIndex].length;
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getLevelKeyColCount(int)
*/
public int getLevelKeyColCount( int levelIndex )
{
if ( keyNames == null || keyNames[levelIndex] == null )
return 0;
return keyNames[levelIndex].length;
}
/*
* (non-Javadoc)
* @see org.eclipse.birt.data.olap.data.api.IAggregationResultSet#getLevelCount()
*/
public int getLevelCount( )
{
if ( keyNames == null )
return 0;
return keyNames.length;
}
}
|
package de.setsoftware.reviewtool.changesources.svn;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.regex.Pattern;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.OperationCanceledException;
import org.eclipse.core.runtime.Platform;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.IJobFunction;
import org.eclipse.core.runtime.jobs.Job;
import org.osgi.framework.Bundle;
import org.osgi.framework.FrameworkUtil;
import org.tmatesoft.svn.core.SVNDepth;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.internal.wc.DefaultSVNAuthenticationManager;
import org.tmatesoft.svn.core.wc.ISVNStatusHandler;
import org.tmatesoft.svn.core.wc.SVNClientManager;
import org.tmatesoft.svn.core.wc.SVNRevision;
import org.tmatesoft.svn.core.wc.SVNStatus;
import de.setsoftware.reviewtool.base.Pair;
import de.setsoftware.reviewtool.base.ReviewtoolException;
import de.setsoftware.reviewtool.model.api.IBinaryChange;
import de.setsoftware.reviewtool.model.api.IChange;
import de.setsoftware.reviewtool.model.api.IChangeData;
import de.setsoftware.reviewtool.model.api.IChangeSource;
import de.setsoftware.reviewtool.model.api.IChangeSourceUi;
import de.setsoftware.reviewtool.model.api.ICommit;
import de.setsoftware.reviewtool.model.api.IFileHistoryEdge;
import de.setsoftware.reviewtool.model.api.IFileHistoryNode;
import de.setsoftware.reviewtool.model.api.IHunk;
import de.setsoftware.reviewtool.model.api.IRevisionedFile;
import de.setsoftware.reviewtool.model.changestructure.ChangestructureFactory;
/**
* A simple change source that loads the changes from subversion.
*/
final class SvnChangeSource implements IChangeSource {
private static final String KEY_PLACEHOLDER = "${key}";
private final Map<File, Set<File>> projectsPerWcMap;
private final String logMessagePattern;
private final SVNClientManager mgr = SVNClientManager.newInstance();
private final long maxTextDiffThreshold;
SvnChangeSource(
final String logMessagePattern,
final String user,
final String pwd,
final long maxTextDiffThreshold,
final int logCacheMinSize) {
this.mgr.setAuthenticationManager(new DefaultSVNAuthenticationManager(
null, false, user, pwd.toCharArray(), null, null));
this.projectsPerWcMap = new LinkedHashMap<>();
this.logMessagePattern = logMessagePattern;
//check that the pattern can be parsed
this.createPatternForKey("TEST-123");
this.maxTextDiffThreshold = maxTextDiffThreshold;
SvnRepositoryManager.getInstance().init(this.mgr, logCacheMinSize);
SvnWorkingCopyManager.getInstance().init(this.mgr);
}
private File determineWorkingCopyRoot(final File projectRoot) {
File curPotentialRoot = projectRoot;
while (!this.isPotentialRoot(curPotentialRoot)) {
curPotentialRoot = curPotentialRoot.getParentFile();
if (curPotentialRoot == null) {
return null;
}
}
while (true) {
final File next = curPotentialRoot.getParentFile();
if (next == null || !this.isPotentialRoot(next)) {
return curPotentialRoot;
}
curPotentialRoot = next;
}
}
private boolean isPotentialRoot(final File next) {
final File dotsvn = new File(next, ".svn");
return dotsvn.isDirectory();
}
private Pattern createPatternForKey(final String key) {
return Pattern.compile(
this.logMessagePattern.replace(KEY_PLACEHOLDER, Pattern.quote(key)),
Pattern.DOTALL);
}
@Override
public Collection<SvnRepo> getRepositories() {
return Collections.unmodifiableCollection(SvnRepositoryManager.getInstance().getRepositories());
}
@Override
public SvnRepo getRepositoryById(final String id) {
for (final SvnRepo repo : SvnRepositoryManager.getInstance().getRepositories()) {
if (repo.getId().equals(id)) {
return repo;
}
}
return null;
}
@Override
public IChangeData getRepositoryChanges(final String key, final IChangeSourceUi ui) {
try {
ui.subTask("Determining relevant commits...");
final List<Pair<SvnWorkingCopy, SvnRepoRevision>> revisions = this.determineRelevantRevisions(key, ui);
final Map<ISvnRepo, Long> neededRevisionPerRepo = this.determineMaxRevisionPerRepo(revisions);
ui.subTask("Checking state of working copy...");
this.checkWorkingCopiesUpToDate(neededRevisionPerRepo, ui);
ui.subTask("Analyzing commits...");
final List<ICommit> commits = this.convertRepoRevisionsToChanges(revisions, ui);
return ChangestructureFactory.createChangeData(this, commits);
} catch (final SVNException e) {
throw new ReviewtoolException(e);
}
}
@Override
public IChangeData getLocalChanges(
final IChangeData remoteChanges,
final List<File> relevantPaths,
final IProgressMonitor ui) {
try {
ui.subTask("Collecting local changes...");
final List<SvnWorkingCopyRevision> revisions = this.collectWorkingCopyChanges(relevantPaths, ui);
ui.subTask("Analyzing local changes...");
final List<ICommit> commits = this.convertLocalRevisionsToChanges(revisions, ui);
final Map<File, IRevisionedFile> localPathMap = this.extractLocalPaths(revisions);
return ChangestructureFactory.createChangeData(this, commits, localPathMap);
} catch (final SVNException e) {
throw new ReviewtoolException(e);
}
}
@Override
public void addProject(final File projectRoot) {
final File wcRoot = this.determineWorkingCopyRoot(projectRoot);
if (wcRoot != null) {
boolean wcCreated = false;
synchronized (this.projectsPerWcMap) {
Set<File> projects = this.projectsPerWcMap.get(wcRoot);
if (projects == null) {
projects = new LinkedHashSet<>();
this.projectsPerWcMap.put(wcRoot, projects);
wcCreated = true;
}
projects.add(projectRoot);
}
if (wcCreated) {
final Job job = Job.create("Analyzing SVN working copy at " + wcRoot,
new IJobFunction() {
@Override
public IStatus run(final IProgressMonitor monitor) {
SvnWorkingCopyManager.getInstance().getWorkingCopy(wcRoot);
return Status.OK_STATUS;
}
});
job.schedule();
}
}
}
@Override
public void removeProject(final File projectRoot) {
final File wcRoot = this.determineWorkingCopyRoot(projectRoot);
if (wcRoot != null) {
boolean wcHasProjects = true;
synchronized (this.projectsPerWcMap) {
final Set<File> projects = this.projectsPerWcMap.get(wcRoot);
if (projects != null) {
projects.remove(projectRoot);
if (projects.isEmpty()) {
this.projectsPerWcMap.remove(wcRoot);
wcHasProjects = false;
}
}
}
if (!wcHasProjects) {
SvnWorkingCopyManager.getInstance().removeWorkingCopy(wcRoot);
}
}
}
/**
* Checks whether the working copy should be updated in order to incorporate remote changes.
* @param neededRevisionPerRepo A map storing the last known revisions for each repository.
*/
private void checkWorkingCopiesUpToDate(
final Map<ISvnRepo, Long> neededRevisionPerRepo,
final IChangeSourceUi ui) throws SVNException {
for (final SvnWorkingCopy wc : SvnWorkingCopyManager.getInstance().getWorkingCopies()) {
if (ui.isCanceled()) {
throw new OperationCanceledException();
}
final ISvnRepo repo = wc.getRepository();
if (neededRevisionPerRepo.containsKey(repo)) {
final long remoteRev = neededRevisionPerRepo.get(repo);
final File wcRoot = wc.getLocalRoot();
final long wcRev = this.mgr.getStatusClient().doStatus(wcRoot, false).getRevision().getNumber();
if (wcRev < remoteRev) {
final Boolean doUpdate = ui.handleLocalWorkingCopyOutOfDate(wc.toString());
if (doUpdate == null) {
throw new OperationCanceledException();
}
if (doUpdate) {
this.mgr.getUpdateClient().doUpdate(wcRoot, SVNRevision.HEAD, SVNDepth.INFINITY, true, false);
}
}
}
}
}
/**
* Collects all local changes and integrates them into the {@link SvnFileHistoryGraph}.
* @param relevantPaths The list of paths to check. If {@code null}, the whole working copy is analyzed.
* @return A list of {@link SvnWorkingCopyRevision}s. May be empty if no relevant local changes have been found.
*/
private List<SvnWorkingCopyRevision> collectWorkingCopyChanges(
final List<File> relevantPaths,
final IProgressMonitor ui) throws SVNException {
final List<SvnWorkingCopyRevision> revisions = new ArrayList<>();
for (final SvnWorkingCopy wc : SvnWorkingCopyManager.getInstance().getWorkingCopies()) {
if (ui.isCanceled()) {
throw new OperationCanceledException();
}
final SortedMap<String, CachedLogEntryPath> changeMap = new TreeMap<>();
final ISVNStatusHandler handler = new ISVNStatusHandler() {
@Override
public void handleStatus(final SVNStatus status) throws SVNException {
if (status.isVersioned()) {
final CachedLogEntryPath entry = new CachedLogEntryPath(wc.getRepository(), status);
changeMap.put(entry.getPath(), entry);
}
}
};
if (relevantPaths != null) {
final Set<File> filteredPaths = this.filterPaths(relevantPaths, wc);
this.collectWorkingCopyChanges(filteredPaths, handler);
} else {
this.collectWorkingCopyChanges(wc, handler);
}
final SvnWorkingCopyRevision wcRevision = new SvnWorkingCopyRevision(wc, changeMap);
final SvnFileHistoryGraph localFileHistoryGraph = new SvnFileHistoryGraph();
localFileHistoryGraph.processRevision(wcRevision);
wc.setLocalFileHistoryGraph(localFileHistoryGraph);
revisions.add(wcRevision);
}
return revisions;
}
/**
* Collects local changes given a set of paths.
* @param paths The paths to consider.
* @param handler Receives information about changes files.
*/
private void collectWorkingCopyChanges(final Set<File> paths, final ISVNStatusHandler handler)
throws SVNException {
for (final File path : paths) {
this.mgr.getStatusClient().doStatus(
path,
SVNRevision.WORKING,
SVNDepth.EMPTY,
false, // no remote
false, // report only modified files
false, // don't include ignored files
false, // ignored
handler,
null); // no change lists
}
}
/**
* Collects local changes within a whole working copy.
* @param wc The working copy to consider.
* @param handler Receives information about changes files.
*/
private void collectWorkingCopyChanges(final SvnWorkingCopy wc, final ISVNStatusHandler handler)
throws SVNException {
this.mgr.getStatusClient().doStatus(
wc.getLocalRoot(), // analyse whole working copy
SVNRevision.WORKING,
SVNDepth.INFINITY,
false, // no remote
false, // report only modified files
false, // don't include ignored files
false, // ignored
handler,
null); // no change lists
}
/**
* Filters out paths that do not belong to passed working copy.
* @param relevantPaths The paths to filter.
* @param wc The relevant working copy.
* @return A set of filtered paths.
*/
private Set<File> filterPaths(final List<File> relevantPaths, final SvnWorkingCopy wc) {
final Set<File> paths = new LinkedHashSet<>();
for (final File path : relevantPaths) {
final String repoPath = wc.toAbsolutePathInRepo(path);
if (repoPath != null) {
paths.add(path);
}
}
return paths;
}
private Map<File, IRevisionedFile> extractLocalPaths(final Collection<SvnWorkingCopyRevision> revisions) {
final Map<File, IRevisionedFile> result = new LinkedHashMap<>();
for (final SvnWorkingCopyRevision revision : revisions) {
for (final CachedLogEntryPath path : revision.getChangedPaths().values()) {
final File localPath = path.getLocalPath();
if (localPath != null) {
result.put(
localPath,
ChangestructureFactory.createFileInRevision(path.getPath(), revision.toRevision()));
}
}
}
return result;
}
private Map<ISvnRepo, Long> determineMaxRevisionPerRepo(
final List<Pair<SvnWorkingCopy, SvnRepoRevision>> revisions) {
final Map<ISvnRepo, Long> ret = new LinkedHashMap<>();
for (final Pair<SvnWorkingCopy, SvnRepoRevision> p : revisions) {
final SvnRepoRevision revision = p.getSecond();
final ISvnRepo repo = revision.getRepository();
final long curRev = revision.getRevisionNumber();
if (ret.containsKey(repo)) {
if (curRev > ret.get(repo)) {
ret.put(repo, curRev);
}
} else {
ret.put(repo, curRev);
}
}
return ret;
}
private List<Pair<SvnWorkingCopy, SvnRepoRevision>> determineRelevantRevisions(
final String key,
final IChangeSourceUi ui) throws SVNException {
final Pattern pattern = this.createPatternForKey(key);
final CachedLogLookupHandler handler = new CachedLogLookupHandler() {
@Override
public boolean handleLogEntry(final CachedLogEntry logEntry) throws SVNException {
final String message = logEntry.getMessage();
return message != null && pattern.matcher(message).matches();
}
};
return SvnWorkingCopyManager.getInstance().traverseRecentEntries(handler, ui);
}
private List<ICommit> convertRepoRevisionsToChanges(
final List<Pair<SvnWorkingCopy, SvnRepoRevision>> revisions,
final IProgressMonitor ui) {
final List<ICommit> ret = new ArrayList<>();
for (final Pair<SvnWorkingCopy, SvnRepoRevision> e : revisions) {
if (ui.isCanceled()) {
throw new OperationCanceledException();
}
this.convertToCommitIfPossible(e.getFirst(), e.getSecond(), ret, ui);
}
return ret;
}
private List<ICommit> convertLocalRevisionsToChanges(
final List<SvnWorkingCopyRevision> revisions,
final IProgressMonitor ui) {
final List<ICommit> ret = new ArrayList<>();
for (final SvnWorkingCopyRevision revision : revisions) {
if (ui.isCanceled()) {
throw new OperationCanceledException();
}
this.convertToCommitIfPossible(revision.getWorkingCopy(), revision, ret, ui);
}
return ret;
}
private void convertToCommitIfPossible(
final SvnWorkingCopy wc,
final SvnRevision e,
final Collection<? super ICommit> result,
final IProgressMonitor ui) {
final List<? extends IChange> changes = this.determineChangesInCommit(wc, e, ui);
if (!changes.isEmpty()) {
result.add(ChangestructureFactory.createCommit(
wc,
e.toPrettyString(),
changes,
e.toRevision(),
e.getDate()));
}
}
/**
* Helpers class to account for the fact that SVN does not fill the copy path
* for single files when the whole containing directory has been copied.
*/
private static final class DirectoryCopyInfo {
private final List<Pair<String, String>> directoryCopies = new ArrayList<>();
public DirectoryCopyInfo(final Collection<CachedLogEntryPath> values) {
for (final CachedLogEntryPath p : values) {
if (p.isDir() && p.getCopyPath() != null) {
this.directoryCopies.add(Pair.create(p.getCopyPath(), p.getPath()));
}
}
}
private String determineOldPath(final CachedLogEntryPath entryInfo) {
if (entryInfo.getCopyPath() != null) {
return entryInfo.getCopyPath();
}
final String path = entryInfo.getPath();
for (final Pair<String, String> dirCopy : this.directoryCopies) {
if (path.startsWith(dirCopy.getSecond())) {
return dirCopy.getFirst() + path.substring(dirCopy.getSecond().length());
}
}
return path;
}
}
private List<? extends IChange> determineChangesInCommit(
final SvnWorkingCopy wc,
final SvnRevision e,
final IProgressMonitor ui) {
final List<IChange> ret = new ArrayList<>();
final Map<String, CachedLogEntryPath> changedPaths = e.getChangedPaths();
final DirectoryCopyInfo dirCopies = new DirectoryCopyInfo(changedPaths.values());
final Set<String> copySources = this.determineCopySources(changedPaths.values(), dirCopies);
final List<String> sortedPaths = new ArrayList<>(changedPaths.keySet());
Collections.sort(sortedPaths);
for (final String path : sortedPaths) {
if (ui.isCanceled()) {
throw new OperationCanceledException();
}
final CachedLogEntryPath value = changedPaths.get(path);
if (!value.isFile()) {
continue;
}
if (value.isDeleted() && copySources.contains(value.getPath())) {
//Moves are contained twice, as a copy and a deletion. The deletion shall not result in a fragment.
continue;
}
final IRevisionedFile fileInfo = ChangestructureFactory.createFileInRevision(path, e.toRevision());
final IFileHistoryNode node = wc.getFileHistoryGraph().getNodeFor(fileInfo);
if (node != null) {
try {
ret.addAll(this.determineChangesInFile(wc, node));
} catch (final Exception ex) {
final IStatus status = new Status(
IStatus.ERROR,
"CoRT",
"An error occurred while computing changes for " + fileInfo.toString(),
ex);
final Bundle bundle = FrameworkUtil.getBundle(this.getClass());
Platform.getLog(bundle).log(status);
}
}
}
return ret;
}
private IBinaryChange createBinaryChange(
final SvnWorkingCopy wc,
final IFileHistoryNode node,
final IFileHistoryNode ancestor) {
final IRevisionedFile oldFileInfo = ChangestructureFactory.createFileInRevision(
ancestor.getFile().getPath(),
ancestor.getFile().getRevision());
return ChangestructureFactory.createBinaryChange(
wc,
oldFileInfo,
node.getFile(),
false);
}
private List<? extends IChange> determineChangesInFile(
final SvnWorkingCopy wc,
final IFileHistoryNode node) throws Exception {
final byte[] newFileContents = node.getFile().getContents();
final boolean newFileContentsUseTextualDiff = this.isUseTextualDiff(newFileContents);
final List<IChange> changes = new ArrayList<>();
for (final IFileHistoryEdge ancestorEdge : node.getAncestors()) {
final IFileHistoryNode ancestor = ancestorEdge.getAncestor();
final byte[] oldFileContents = ancestor.getFile().getContents();
final boolean oldFileContentsUseTextualDiff = this.isUseTextualDiff(oldFileContents);
if (oldFileContentsUseTextualDiff && newFileContentsUseTextualDiff) {
final List<? extends IHunk> hunks = ancestorEdge.getDiff().getHunks();
for (final IHunk hunk : hunks) {
changes.add(ChangestructureFactory.createTextualChangeHunk(
wc,
hunk.getSource(),
hunk.getTarget(),
false));
}
} else {
changes.add(this.createBinaryChange(wc, node, ancestor));
}
}
return changes;
}
private boolean isUseTextualDiff(final byte[] newFileContent) {
return !contentLooksBinary(newFileContent) && newFileContent.length <= this.maxTextDiffThreshold;
}
private static boolean contentLooksBinary(final byte[] fileContent) {
if (fileContent.length == 0) {
return false;
}
final int max = Math.min(128, fileContent.length);
for (int i = 0; i < max; i++) {
if (isStrangeChar(fileContent[i])) {
//we only count ASCII control chars as "strange" (to be UTF-8 agnostic), so
// a single strange char should suffice to declare a file non-text
return true;
}
}
return false;
}
private static boolean isStrangeChar(final byte b) {
return b != '\n' && b != '\r' && b != '\t' && b < 0x20 && b >= 0;
}
private Set<String> determineCopySources(
final Collection<CachedLogEntryPath> entries,
final DirectoryCopyInfo dirMoves) {
final Set<String> ret = new LinkedHashSet<>();
for (final CachedLogEntryPath p : entries) {
final String copyPath = dirMoves.determineOldPath(p);
if (!copyPath.equals(p.getPath())) {
ret.add(copyPath);
}
}
return ret;
}
}
|
package ai.vespa.hosted.auth;
import com.yahoo.config.provision.SystemName;
import com.yahoo.security.KeyUtils;
import com.yahoo.security.SslContextBuilder;
import com.yahoo.security.X509CertificateUtils;
import javax.net.ssl.SSLContext;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.cert.X509Certificate;
import java.time.Instant;
import java.util.Optional;
import java.util.logging.Logger;
import static ai.vespa.hosted.api.Properties.getNonBlankProperty;
/**
* Authenticates against the hosted Vespa API using private key signatures, and against Vespa applications using mutual TLS.
*
* @author jonmv
*/
public class EndpointAuthenticator implements ai.vespa.hosted.api.EndpointAuthenticator {
private static final Logger logger = Logger.getLogger(EndpointAuthenticator.class.getName());
/** Don't touch. */
public EndpointAuthenticator(@SuppressWarnings("unused") SystemName __) { }
/**
* If {@code System.getProperty("vespa.test.credentials.root")} is set, key and certificate files
* "key" and "cert" in that directory are used; otherwise, the system default SSLContext is returned.
*/
@Override
public SSLContext sslContext() {
try {
Path certificateFile = null;
Path privateKeyFile = null;
Optional<String> credentialsRootProperty = getNonBlankProperty("vespa.test.credentials.root");
if (credentialsRootProperty.isPresent()) {
Path credentialsRoot = Path.of(credentialsRootProperty.get());
certificateFile = credentialsRoot.resolve("cert");
privateKeyFile = credentialsRoot.resolve("key");
}
else {
Optional<String> certificateFileProperty = getNonBlankProperty("dataPlaneCertificateFile");
if (certificateFileProperty.isPresent())
certificateFile = Path.of(certificateFileProperty.get());
Optional<String> privateKeyFileProperty = getNonBlankProperty("dataPlaneKeyFile");
if (privateKeyFileProperty.isPresent())
privateKeyFile = Path.of(privateKeyFileProperty.get());
}
if (certificateFile != null && privateKeyFile != null) {
X509Certificate certificate = X509CertificateUtils.fromPem(new String(Files.readAllBytes(certificateFile)));
if ( Instant.now().isBefore(certificate.getNotBefore().toInstant())
|| Instant.now().isAfter(certificate.getNotAfter().toInstant()))
throw new IllegalStateException("Certificate at '" + certificateFile + "' is valid between " +
certificate.getNotBefore() + " and " + certificate.getNotAfter() + " — not now.");
PrivateKey privateKey = KeyUtils.fromPemEncodedPrivateKey(new String(Files.readAllBytes(privateKeyFile)));
return new SslContextBuilder().withKeyStore(privateKey, certificate).build();
}
logger.warning( "
+ "# Data plane key and/or certificate missing; please specify #\n"
+ "# '-DdataPlaneCertificateFile=/path/to/certificate' and #\n"
+ "# '-DdataPlaneKeyFile=/path/to/private_key. #\n"
+ "# Trying the default SSLContext, but this will most likely cause HTTP error 401. #\n"
+ "
return SSLContext.getDefault();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
catch (NoSuchAlgorithmException e) {
throw new IllegalStateException(e);
}
}
}
|
package org.elasticsearch.watcher.support.xcontent;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.watcher.support.clock.Clock;
import org.elasticsearch.watcher.support.clock.SystemClock;
import org.elasticsearch.watcher.support.secret.Secret;
import org.elasticsearch.watcher.support.secret.SecretService;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
* A xcontent parser that is used by watcher. This is a special parser that is
* aware of watcher services. In particular, it's aware of the used {@link Clock}
* and the {@link SecretService}. The former (clock) may be used when the current time
* is required during the parse phase of construct. The latter (secret service) is used
* to convert secret values (e.g. passwords, security tokens, etc..) to {@link Secret}s.
* {@link Secret}s are encrypted values that are stored in memory and are decrypted
* on demand when needed.
*/
public class WatcherXContentParser implements XContentParser {
public static Secret secret(XContentParser parser) throws IOException {
char[] chars = parser.text().toCharArray();
if (parser instanceof WatcherXContentParser) {
WatcherXContentParser watcherParser = (WatcherXContentParser) parser;
if (watcherParser.secretService != null) {
chars = watcherParser.secretService.encrypt(chars);
}
}
return new Secret(chars);
}
public static Secret secretOrNull(XContentParser parser) throws IOException {
String text = parser.textOrNull();
if (text == null) {
return null;
}
char[] chars = parser.text().toCharArray();
if (parser instanceof WatcherXContentParser) {
WatcherXContentParser watcherParser = (WatcherXContentParser) parser;
if (watcherParser.secretService != null) {
chars = watcherParser.secretService.encrypt(text.toCharArray());
}
return new Secret(chars);
}
return new Secret(chars);
}
public static Clock clock(XContentParser parser) {
if (parser instanceof WatcherXContentParser) {
return ((WatcherXContentParser) parser).clock;
}
return SystemClock.INSTANCE;
}
private final Clock clock;
private final XContentParser parser;
private final @Nullable SecretService secretService;
private ParseFieldMatcher parseFieldMatcher = ParseFieldMatcher.EMPTY;
public WatcherXContentParser(XContentParser parser, Clock clock, @Nullable SecretService secretService) {
this.clock = clock;
this.parser = parser;
this.secretService = secretService;
}
@Override
public XContentType contentType() {
return parser.contentType();
}
@Override
public Token nextToken() throws IOException {
return parser.nextToken();
}
@Override
public void skipChildren() throws IOException {
parser.skipChildren();
}
@Override
public Token currentToken() {
return parser.currentToken();
}
@Override
public String currentName() throws IOException {
return parser.currentName();
}
@Override
public Map<String, Object> map() throws IOException {
return parser.map();
}
@Override
public Map<String, Object> mapOrdered() throws IOException {
return parser.mapOrdered();
}
@Override
public List<Object> list() throws IOException {
return parser.list();
}
@Override
public List<Object> listOrderedMap() throws IOException {
return parser.listOrderedMap();
}
@Override
public String text() throws IOException {
return parser.text();
}
@Override
public String textOrNull() throws IOException {
return parser.textOrNull();
}
@Override
public BytesRef utf8BytesOrNull() throws IOException {
return parser.utf8BytesOrNull();
}
@Override
public BytesRef utf8Bytes() throws IOException {
return parser.utf8Bytes();
}
@Override
public Object objectText() throws IOException {
return parser.objectText();
}
@Override
public Object objectBytes() throws IOException {
return parser.objectBytes();
}
@Override
public boolean hasTextCharacters() {
return parser.hasTextCharacters();
}
@Override
public char[] textCharacters() throws IOException {
return parser.textCharacters();
}
@Override
public int textLength() throws IOException {
return parser.textLength();
}
@Override
public int textOffset() throws IOException {
return parser.textOffset();
}
@Override
public Number numberValue() throws IOException {
return parser.numberValue();
}
@Override
public NumberType numberType() throws IOException {
return parser.numberType();
}
@Override
public short shortValue(boolean coerce) throws IOException {
return parser.shortValue(coerce);
}
@Override
public int intValue(boolean coerce) throws IOException {
return parser.intValue(coerce);
}
@Override
public long longValue(boolean coerce) throws IOException {
return parser.longValue(coerce);
}
@Override
public float floatValue(boolean coerce) throws IOException {
return parser.floatValue(coerce);
}
@Override
public double doubleValue(boolean coerce) throws IOException {
return parser.doubleValue(coerce);
}
@Override
public short shortValue() throws IOException {
return parser.shortValue();
}
@Override
public int intValue() throws IOException {
return parser.intValue();
}
@Override
public long longValue() throws IOException {
return parser.longValue();
}
@Override
public float floatValue() throws IOException {
return parser.floatValue();
}
@Override
public double doubleValue() throws IOException {
return parser.doubleValue();
}
@Override
public boolean isBooleanValue() throws IOException {
return parser.isBooleanValue();
}
@Override
public boolean booleanValue() throws IOException {
return parser.booleanValue();
}
@Override
public byte[] binaryValue() throws IOException {
return parser.binaryValue();
}
@Override
public XContentLocation getTokenLocation() {
return parser.getTokenLocation();
}
@Override
public boolean isClosed() {
return parser.isClosed();
}
@Override
public ParseFieldMatcher getParseFieldMatcher() {
return parseFieldMatcher;
}
@Override
public void setParseFieldMatcher(ParseFieldMatcher matcher) {
this.parseFieldMatcher = matcher;
}
@Override
public void close() throws ElasticsearchException {
parser.close();
}
}
|
package org.fusesource.fabric.itests.smoke;
import org.apache.curator.framework.CuratorFramework;
import org.fusesource.fabric.api.Container;
import org.fusesource.fabric.itests.paxexam.support.ContainerBuilder;
import org.fusesource.fabric.itests.paxexam.support.FabricTestSupport;
import org.fusesource.fabric.itests.paxexam.support.Provision;
import org.fusesource.fabric.itests.paxexam.support.WaitForConfigurationChange;
import org.fusesource.tooling.testing.pax.exam.karaf.ServiceLocator;
import org.junit.After;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.ops4j.pax.exam.Option;
import org.ops4j.pax.exam.junit.Configuration;
import org.ops4j.pax.exam.junit.ExamReactorStrategy;
import org.ops4j.pax.exam.junit.JUnit4TestRunner;
import org.ops4j.pax.exam.options.DefaultCompositeOption;
import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.exists;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@RunWith(JUnit4TestRunner.class)
@ExamReactorStrategy(AllConfinedStagedReactorFactory.class)
public class ContainerUpgradeAndRollbackTest extends FabricTestSupport {
@After
public void tearDown() throws InterruptedException {
ContainerBuilder.destroy();
}
/**
* This tests the simple scenario of
* 1. create a child container
* 2. create a new version
* 3. modify the profile of the new version
* 4. upgrade all containers
* 5. verify that child is provisioned according to the new version
* 6. rollback containers.
* 7. verify that the child is provisioned according to the old version.
*/
@Test
public void testContainerUpgradeAndRollback() throws Exception {
System.out.println(executeCommand("fabric:create -n"));
Set<Container> containers = ContainerBuilder.create().withName("camel").withProfiles("feature-camel").assertProvisioningResult().build();
System.out.println(executeCommand("fabric:version-create --parent 1.0 1.1"));
//Make sure that the profile change has been applied before changing the version
CountDownLatch latch = WaitForConfigurationChange.on(getFabricService());
System.out.println(executeCommand("fabric:profile-edit --features camel-hazelcast feature-camel 1.1"));
latch.await(5, TimeUnit.SECONDS);
System.out.println(executeCommand("fabric:profile-display --version 1.1 feature-camel"));
System.out.println(executeCommand("fabric:container-upgrade --all 1.1"));
Provision.provisioningSuccess(containers, PROVISION_TIMEOUT);
System.out.println(executeCommand("fabric:container-list"));
for (Container container : containers) {
assertEquals("Container should have version 1.1", "1.1", container.getVersion().getId());
String bundles = executeCommand("container-connect -u admin -p admin " + container.getId() + " osgi:list -s | grep camel-hazelcast");
System.out.println(executeCommand("fabric:container-list"));
assertNotNull(bundles);
System.out.println(bundles);
assertFalse("Expected camel-hazelcast installed.", bundles.isEmpty());
}
System.out.println(executeCommand("fabric:container-rollback --all 1.0"));
Provision.provisioningSuccess(containers, PROVISION_TIMEOUT);
System.out.println(executeCommand("fabric:container-list"));
for (Container container : containers) {
assertEquals("Container should have version 1.0", "1.0", container.getVersion().getId());
String bundles = executeCommand("container-connect -u admin -p admin " + container.getId() + " osgi:list -s | grep camel-hazelcast");
assertNotNull(bundles);
System.out.println(bundles);
assertTrue("Expected no camel-hazelcast installed.", bundles.isEmpty());
}
}
@Test
public void testContainerAfterVersionUpgradeAndDowngrade() throws Exception {
System.out.println(executeCommand("fabric:create -n"));
waitForFabricCommands();
System.out.println(executeCommand("fabric:version-create --parent 1.0 1.1"));
Set<Container> containers = ContainerBuilder.create().withName("camel").withProfiles("feature-camel").assertProvisioningResult().build();
//Make sure that the profile change has been applied before changing the version
CountDownLatch latch = WaitForConfigurationChange.on(getFabricService());
System.out.println(executeCommand("fabric:profile-edit --features camel-hazelcast feature-camel 1.1"));
latch.await(5, TimeUnit.SECONDS);
System.out.println(executeCommand("fabric:container-upgrade --all 1.1"));
Provision.provisioningSuccess(containers, PROVISION_TIMEOUT);
System.out.println(executeCommand("fabric:container-list"));
for (Container container : containers) {
assertEquals("Container should have version 1.1", "1.1", container.getVersion().getId());
String bundles = executeCommand("container-connect -u admin -p admin " + container.getId() + " osgi:list -s | grep camel-hazelcast");
System.out.println(executeCommand("fabric:container-list"));
assertNotNull(bundles);
System.out.println(bundles);
assertFalse("Expected camel-hazelcast installed.", bundles.isEmpty());
}
System.out.println(executeCommand("fabric:container-rollback --all 1.0"));
Provision.provisioningSuccess(containers, PROVISION_TIMEOUT);
System.out.println(executeCommand("fabric:container-list"));
for (Container container : containers) {
assertEquals("Container should have version 1.0", "1.0", container.getVersion().getId());
String bundles = executeCommand("container-connect -u admin -p admin " + container.getId() + " osgi:list -s | grep camel-hazelcast");
assertNotNull(bundles);
System.out.println(bundles);
assertTrue("Expected no camel-hazelcast installed.", bundles.isEmpty());
}
}
@Test
@Ignore("[FABRIC-640] Fix fabric smoke ContainerUpgradeAndRollbackTest")
public void testContainerAfterVersionDowngrade() throws Exception {
System.out.println(executeCommand("fabric:create -n"));
waitForFabricCommands();
System.out.println(executeCommand("fabric:version-create --parent 1.0 1.1"));
System.out.println(executeCommand("fabric:container-upgrade --all 1.1"));
Set<Container> containers = ContainerBuilder.create().withName("camel").withProfiles("feature-camel").assertProvisioningResult().build();
System.out.println(executeCommand("fabric:container-rollback --all 1.0"));
Provision.provisioningSuccess(containers, PROVISION_TIMEOUT);
for (Container container : containers) {
assertEquals("Container should have version 1.0", "1.0", container.getVersion().getId());
assertNotNull(exists(ServiceLocator.getOsgiService(CuratorFramework.class), "/fabric/configs/versions/1.0/containers/" + container.getId()));
}
}
@Configuration
public Option[] config() {
return new Option[]{
new DefaultCompositeOption(fabricDistributionConfiguration())
};
}
}
|
package edu.ch.unifr.diuf.testing_tool;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public class TestParams
{
public int numClients;
public String testId;
private String testServerSourceGraphName;
private String testServerDestGraphName;
private int testServerGraphReset;
private int testServerGraphSnaphshot;
private String testServerReadCons;
private String testServerWriteCons;
private String testServerTransLockingGran;
private int testServerReplicationFactor;
private int testNum;
private List testThreadNumber;
private int testWarmupPer;
private int testRunningPer;
private int testOperationType;
private int testOperationNum;
private int testTransRetrials;
private String testConflictsFlag;
// number of different entities, properties per entity and values per property
private int testDiffEnt;
private int testDiffPropPerEnt;
private int testDiffValuesPerProf;
public TestParams() {
this.testThreadNumber = new ArrayList<>();
}
public void setNumClients(int numClients) {
this.numClients = numClients;
}
public int getNumClients() {
return this.numClients;
}
public String getTestServerSourceGraphName() {
return this.testServerSourceGraphName;
}
public void setTestServerSourceGraphName(String graphName) {
this.testServerSourceGraphName = graphName;
}
public String getTestServerDestGraphName() {
return this.testServerDestGraphName;
}
public void setTestServerDestGraphName(String graphName) {
this.testServerDestGraphName = graphName;
}
public int getTestServerGraphReset() {
return this.testServerGraphReset;
}
public void setTestServerGraphReset(int reset) {
this.testServerGraphReset = reset;
}
public int getGraphSnapshot() {
return this.testServerGraphSnaphshot;
}
public void setGraphSnapshot(int snapshot) {
this.testServerGraphSnaphshot = snapshot;
}
public String getTestReadCons() {
return this.testServerReadCons;
}
public void setTestReadCons(String read_cons) {
this.testServerReadCons = read_cons;
}
public String getTestWriteCons() {
return this.testServerWriteCons;
}
public void setTestWriteCons(String write_cons) {
this.testServerWriteCons = write_cons;
}
public String getTransLockGran() {
return this.testServerTransLockingGran;
}
public void setTransLockGran(String lock_gran) {
this.testServerTransLockingGran = lock_gran;
}
public int getReplicationFactor() {
return this.testServerReplicationFactor;
}
public void setReplicationFactor(int factor) {
this.testServerReplicationFactor = factor;
}
public int getTransRetrials() {
return this.testTransRetrials;
}
public void setTransRetrials(int retrials) {
this.testTransRetrials = retrials;
}
public int getTestNum() {
return this.testNum;
}
public void setTestNum(int num) {
this.testNum = num;
}
public List getTestThreadNum() {
return this.testThreadNumber;
}
public void addTestThreadNum(int thread_num) {
this.testThreadNumber.add(thread_num);
}
public int getTestWarmupPer() {
return this.testWarmupPer;
}
public void setTestWarmupPer(int warmup_per) {
this.testWarmupPer = warmup_per;
}
public int getTestRunningPer() {
return this.testRunningPer;
}
public void setTestRunningPer(int running_per) {
this.testRunningPer = running_per;
}
public int getTestOperationType() {
return this.testOperationType;
}
public void setTestOperationType(int oper_type) {
this.testOperationType = oper_type;
}
public int getTestOperationNum() {
return this.testOperationNum;
}
public void setTestOperationNum(int oper_num) {
this.testOperationNum = oper_num;
}
public int getTestTransRetrials() {
return this.testTransRetrials;
}
public void setTestTransRetrials(int retrials) {
this.testTransRetrials = retrials;
}
public int getDiffEnt() {
return this.testDiffEnt;
}
public void setDiffEnt(int diffEnt) {
this.testDiffEnt = diffEnt;
}
public int getDiffPropPerEnt() {
return this.testDiffPropPerEnt;
}
public void setDiffPropPerEnt(int propPerEnt) {
this.testDiffPropPerEnt = propPerEnt;
}
public int getDiffValuesPerProp() {
return this.testDiffValuesPerProf;
}
public void setDiffValuesPerProp(int valuesPerProp) {
this.testDiffValuesPerProf = valuesPerProp;
}
private double getProbabilityOfConflicts(int no_threads) {
return ((no_threads+0.0)*numClients)/
(testDiffEnt*testDiffPropPerEnt*testDiffValuesPerProf)*100;
}
public String getConflictsParameter() {
return this.testConflictsFlag;
}
public void setConflictsParameter(String param) {
this.testConflictsFlag = param;
}
// meaning with directory if any
public String getFullTestName() {
return this.testId;
}
public String getTestName() {
if( this.testId.contains("/") )
return testId.substring(testId.indexOf("/")+1);
return testId;
}
public void setTestName(String name) {
this.testId = name;
}
public String getFinalRestultFilename() {
return "final_result-"+this.getTestName()+".data";
}
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("\tSERVER PARAMS: ").append(testServerSourceGraphName)
.append("/").append(testServerDestGraphName).append(" ");
sb.append(testServerGraphReset).append(" ");
sb.append(testServerGraphSnaphshot).append(" ").append(testServerReadCons);
sb.append(" ").append(testServerWriteCons).append(" ").append(testServerTransLockingGran);
sb.append(" ").append(testServerReplicationFactor).append("\n");
sb.append("\tTEST PARAMS: \n");
//sb.append("\t\tinput filename: ").append(testInputFilename).append("\n");
sb.append("\t\trun steps: ").append(testNum).append("\n");
sb.append("\t\tthread num per client: ");
for(Iterator it=testThreadNumber.iterator(); it.hasNext(); ) {
sb.append((it.next())).append(" ");
}
sb.append("\n");
sb.append("\t\twarmup period sec: ").append(testWarmupPer).append("\n");
sb.append("\t\trunning period sec: ").append(testRunningPer).append("\n");
sb.append("\t\toperation type: ").append(testOperationType).append("\n");
sb.append("\t\tnum oper per trans: ").append(testOperationNum).append("\n");
sb.append("\t\ttrans num of retrials: ").append(testRunningPer).append("\n");
sb.append("\t\tconflicts flag: ").append(testConflictsFlag).append("\n");
if( testConflictsFlag.equals("yes") ) {
sb.append("\t\tnum of different entities: ").append(testDiffEnt).append("\n");
sb.append("\t\tnum of different prop per ent: ").append(testDiffPropPerEnt).append("\n");
sb.append("\t\tnum of different values per prop: ").append(testDiffValuesPerProf).append("\n");
sb.append("\t\tPROBABILITY of conflicts (%): ");
for(Iterator it=testThreadNumber.iterator(); it.hasNext(); ) {
int no_threads = (int)(it.next());
sb.append(no_threads).append("th->").append(getProbabilityOfConflicts(no_threads))
.append(" ");
}
sb.append("\n");
}
return sb.toString();
}
}
|
package org.spongepowered.mod.test;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.entity.Entity;
import net.minecraft.entity.passive.EntityChicken;
import net.minecraft.item.Item;
import net.minecraft.item.ItemEgg;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ResourceLocation;
import net.minecraft.world.World;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.event.RegistryEvent;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.common.event.FMLPreInitializationEvent;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import org.spongepowered.api.Sponge;
import org.spongepowered.api.entity.EntityTypes;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.event.Listener;
import org.spongepowered.api.event.filter.cause.Root;
import org.spongepowered.api.event.filter.type.Exclude;
import org.spongepowered.api.event.item.inventory.DropItemEvent;
import org.spongepowered.api.item.ItemType;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
@Mod(modid = CustomItemDropTest.MOD_ID, name = "Custom Item Drop Test")
public class CustomItemDropTest {
public static final String MOD_ID = "customitemdroptest";
public static final String ITEM_ID = "dropitem";
private ItemType EGG_TOSS;
@Mod.EventHandler
public void preInit(FMLPreInitializationEvent event) {
MinecraftForge.EVENT_BUS.register(this);
Sponge.getEventManager().registerListeners(this, this);
}
@SubscribeEvent
public void registerItems(RegistryEvent.Register<Item> event) {
EggItem item = new EggItem();
item.setCreativeTab(CreativeTabs.FOOD);
item.setRegistryName(new ResourceLocation(MOD_ID, ITEM_ID));
event.getRegistry().register(item);
final Optional<ItemType> type = Sponge.getRegistry().getType(ItemType.class, MOD_ID + ":" + ITEM_ID);
type.ifPresent(itemType -> this.EGG_TOSS = itemType);
}
@Listener(beforeModifications = false)
@Exclude(DropItemEvent.Pre.class)
public void onDropItem(DropItemEvent event) {
event.setCancelled(true);
}
@Listener(beforeModifications = true)
public void onDropItem(DropItemEvent.Dispense event, @Root Player player) {
final List<org.spongepowered.api.entity.Item> collections = event.getEntities()
.stream()
.filter(entity -> entity instanceof org.spongepowered.api.entity.Item)
.map(entity -> (org.spongepowered.api.entity.Item) entity)
.filter(itemEntity -> itemEntity.getItemType().equals(this.EGG_TOSS))
.collect(Collectors.toList());
if (!collections.isEmpty()) {
final org.spongepowered.api.entity.Entity entity = player.getWorld().createEntity(EntityTypes.CREEPER, player.getPosition());
event.getEntities().add(entity);
}
}
public static class EggItem extends ItemEgg {
@Override
@Nullable
public Entity createEntity(World world, Entity location, ItemStack itemstack) {
EntityChicken entity = new EntityChicken(world);
entity.copyLocationAndAnglesFrom(location);
return entity;
}
@Override
public boolean hasCustomEntity(ItemStack stack) {
return true;
}
}
}
|
package org.innovateuk.ifs.invite.transactional;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.innovateuk.ifs.commons.error.CommonFailureKeys;
import org.innovateuk.ifs.commons.error.Error;
import org.innovateuk.ifs.commons.service.ServiceFailure;
import org.innovateuk.ifs.commons.service.ServiceResult;
import org.innovateuk.ifs.invite.constant.InviteStatus;
import org.innovateuk.ifs.invite.domain.ApplicationInvite;
import org.innovateuk.ifs.invite.domain.ProjectInvite;
import org.innovateuk.ifs.invite.domain.RoleInvite;
import org.innovateuk.ifs.invite.mapper.RoleInviteMapper;
import org.innovateuk.ifs.invite.repository.ApplicationInviteRepository;
import org.innovateuk.ifs.invite.repository.InviteProjectRepository;
import org.innovateuk.ifs.invite.repository.InviteRoleRepository;
import org.innovateuk.ifs.invite.resource.ExternalInviteResource;
import org.innovateuk.ifs.invite.resource.RoleInvitePageResource;
import org.innovateuk.ifs.invite.resource.RoleInviteResource;
import org.innovateuk.ifs.notifications.resource.ExternalUserNotificationTarget;
import org.innovateuk.ifs.notifications.resource.NotificationTarget;
import org.innovateuk.ifs.project.transactional.EmailService;
import org.innovateuk.ifs.security.LoggedInUserSupplier;
import org.innovateuk.ifs.transactional.BaseTransactionalService;
import org.innovateuk.ifs.user.domain.Role;
import org.innovateuk.ifs.user.mapper.RoleMapper;
import org.innovateuk.ifs.user.resource.RoleResource;
import org.innovateuk.ifs.user.resource.SearchCategory;
import org.innovateuk.ifs.user.resource.UserResource;
import org.innovateuk.ifs.user.resource.UserRoleType;
import org.innovateuk.ifs.userorganisation.domain.UserOrganisation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.time.ZonedDateTime;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.innovateuk.ifs.commons.error.CommonErrors.notFoundError;
import static org.innovateuk.ifs.commons.error.CommonFailureKeys.*;
import static org.innovateuk.ifs.commons.service.ServiceResult.serviceFailure;
import static org.innovateuk.ifs.commons.service.ServiceResult.serviceSuccess;
import static org.innovateuk.ifs.invite.constant.InviteStatus.CREATED;
import static org.innovateuk.ifs.invite.constant.InviteStatus.SENT;
import static org.innovateuk.ifs.invite.domain.Invite.generateInviteHash;
import static org.innovateuk.ifs.util.CollectionFunctions.simpleMap;
import static org.innovateuk.ifs.util.CollectionFunctions.simpleMapSet;
import static org.innovateuk.ifs.util.EntityLookupCallbacks.find;
/**
* Transactional and secured service implementation providing operations around invites for users.
*/
@Service
public class InviteUserServiceImpl extends BaseTransactionalService implements InviteUserService {
@Autowired
private InviteRoleRepository inviteRoleRepository;
@Autowired
private InviteProjectRepository inviteProjectRepository;
@Autowired
private ApplicationInviteRepository applicationInviteRepository;
@Autowired
private RoleInviteMapper roleInviteMapper;
@Autowired
private EmailService emailService;
@Autowired
private LoggedInUserSupplier loggedInUserSupplier;
@Autowired
private RoleMapper roleMapper;
private static final Log LOG = LogFactory.getLog(InviteUserServiceImpl.class);
@Value("${ifs.web.baseURL}")
private String webBaseUrl;
public static final String WEB_CONTEXT = "/management/registration";
enum Notifications {
INVITE_INTERNAL_USER
}
private static final String DEFAULT_INTERNAL_USER_EMAIL_DOMAIN = "innovateuk.gov.uk";
@Value("${ifs.system.internal.user.email.domain}")
private String internalUserEmailDomain;
@Override
@Transactional
public ServiceResult<Void> saveUserInvite(UserResource invitedUser, UserRoleType adminRoleType) {
return validateInvite(invitedUser, adminRoleType)
.andOnSuccess(() -> validateInternalUserRole(adminRoleType))
.andOnSuccess(() -> validateEmail(invitedUser.getEmail()))
.andOnSuccess(() -> validateUserEmailAvailable(invitedUser))
.andOnSuccess(() -> validateUserNotAlreadyInvited(invitedUser))
.andOnSuccess(() -> getRole(adminRoleType))
.andOnSuccess(role -> saveInvite(invitedUser, role))
.andOnSuccess(this::inviteInternalUser);
}
private ServiceResult<Void> validateInvite(UserResource invitedUser, UserRoleType adminRoleType) {
if (StringUtils.isEmpty(invitedUser.getEmail()) || StringUtils.isEmpty(invitedUser.getFirstName())
|| StringUtils.isEmpty(invitedUser.getLastName()) || adminRoleType == null){
return serviceFailure(USER_ROLE_INVITE_INVALID);
}
return serviceSuccess();
}
private ServiceResult<Void> validateInternalUserRole(UserRoleType userRoleType) {
return UserRoleType.internalRoles().stream().anyMatch(internalRole -> internalRole.equals(userRoleType))?
serviceSuccess() : serviceFailure(NOT_AN_INTERNAL_USER_ROLE);
}
private ServiceResult<Void> validateEmail(String email) {
internalUserEmailDomain = StringUtils.defaultIfBlank(internalUserEmailDomain, DEFAULT_INTERNAL_USER_EMAIL_DOMAIN);
String domain = StringUtils.substringAfter(email, "@");
if (!internalUserEmailDomain.equalsIgnoreCase(domain)) {
return serviceFailure(USER_ROLE_INVITE_INVALID_EMAIL);
}
return serviceSuccess();
}
private ServiceResult<Void> validateUserEmailAvailable(UserResource invitedUser) {
return userRepository.findByEmail(invitedUser.getEmail()).isPresent() ? serviceFailure(USER_ROLE_INVITE_EMAIL_TAKEN) : serviceSuccess() ;
}
private ServiceResult<Void> validateUserNotAlreadyInvited(UserResource invitedUser) {
List<RoleInvite> existingInvites = inviteRoleRepository.findByEmail(invitedUser.getEmail());
return existingInvites.isEmpty() ? serviceSuccess() : serviceFailure(USER_ROLE_INVITE_TARGET_USER_ALREADY_INVITED);
}
private ServiceResult<RoleInvite> saveInvite(UserResource invitedUser, Role role) {
RoleInvite roleInvite = new RoleInvite(invitedUser.getFirstName() + " " + invitedUser.getLastName(),
invitedUser.getEmail(),
generateInviteHash(),
role,
CREATED);
RoleInvite invite = inviteRoleRepository.save(roleInvite);
return serviceSuccess(invite);
}
private ServiceResult<Void> inviteInternalUser(RoleInvite roleInvite) {
try {
Map<String, Object> globalArgs = createGlobalArgsForInternalUserInvite(roleInvite);
ServiceResult<Void> inviteContactEmailSendResult = emailService.sendEmail(
Collections.singletonList(createInviteInternalUserNotificationTarget(roleInvite)),
globalArgs,
Notifications.INVITE_INTERNAL_USER);
inviteContactEmailSendResult.handleSuccessOrFailure(
failure -> handleInviteError(roleInvite, failure),
success -> handleInviteSuccess(roleInvite)
);
return inviteContactEmailSendResult;
} catch (IllegalArgumentException e) {
LOG.error(String.format("Role %s lookup failed for user %s", roleInvite.getEmail(), roleInvite.getTarget().getName()));
return ServiceResult.serviceFailure(new Error(CommonFailureKeys.ADMIN_INVALID_USER_ROLE));
}
}
private NotificationTarget createInviteInternalUserNotificationTarget(RoleInvite roleInvite) {
return new ExternalUserNotificationTarget(roleInvite.getName(), roleInvite.getEmail());
}
private Map<String, Object> createGlobalArgsForInternalUserInvite(RoleInvite roleInvite) {
Map<String, Object> globalArguments = new HashMap<>();
RoleResource roleResource = roleMapper.mapIdToResource(roleInvite.getTarget().getId());
globalArguments.put("role", roleResource.getDisplayName());
globalArguments.put("inviteUrl", getInviteUrl(webBaseUrl + WEB_CONTEXT, roleInvite));
return globalArguments;
}
private String getInviteUrl(String baseUrl, RoleInvite inviteResource) {
return String.format("%s/%s/%s", baseUrl, inviteResource.getHash(), "register");
}
@Override
public ServiceResult<RoleInviteResource> getInvite(String inviteHash) {
RoleInvite roleInvite = inviteRoleRepository.getByHash(inviteHash);
return serviceSuccess(roleInviteMapper.mapToResource(roleInvite));
}
@Override
public ServiceResult<Boolean> checkExistingUser(String inviteHash) {
return getByHash(inviteHash)
.andOnSuccessReturn(i -> userRepository.findByEmail(i.getEmail()))
.andOnSuccess(u -> serviceSuccess(u.isPresent()));
}
private ServiceResult<RoleInvite> getByHash(String hash) {
return find(inviteRoleRepository.getByHash(hash), notFoundError(RoleInvite.class, hash));
}
private ServiceResult<Boolean> handleInviteError(RoleInvite i, ServiceFailure failure) {
LOG.error(String.format("Invite failed %s, %s, %s (error count: %s)", i.getId(), i.getEmail(), i.getTarget().getName(), failure.getErrors().size()));
List<Error> errors = failure.getErrors();
return serviceFailure(errors);
}
private boolean handleInviteSuccess(RoleInvite roleInvite) {
inviteRoleRepository.save(roleInvite.send(loggedInUserSupplier.get(), ZonedDateTime.now()));
return true;
}
@Override
public ServiceResult<RoleInvitePageResource> findPendingInternalUserInvites(Pageable pageable) {
Page<RoleInvite> pagedResult = inviteRoleRepository.findByStatus(InviteStatus.SENT, pageable);
List<RoleInviteResource> roleInviteResources = simpleMap(pagedResult.getContent(), roleInvite -> roleInviteMapper.mapToResource(roleInvite));
return serviceSuccess(new RoleInvitePageResource(pagedResult.getTotalElements(), pagedResult.getTotalPages(), sortByName(roleInviteResources), pagedResult.getNumber(), pagedResult.getSize()));
}
private List<RoleInviteResource> sortByName(List<RoleInviteResource> roleInviteResources) {
return roleInviteResources.stream().sorted(Comparator.comparing(roleInviteResource -> roleInviteResource.getName().toUpperCase())).collect(Collectors.toList());
}
@Override
public ServiceResult<List<ExternalInviteResource>> findExternalInvites(String searchString, SearchCategory searchCategory) {
return validateSearchString(searchString)
.andOnSuccess(() ->
find(() -> findApplicationInvitesBySearchCriteria(searchString, searchCategory), () -> findProjectInvitesBySearchCriteria(searchString, searchCategory))
.andOnSuccess((appInvites, prjInvites) ->
serviceSuccess(sortByEmail(Stream.concat(
getApplicationInvitesAsExternalInviteResource(appInvites).stream(),
getProjectInvitesAsExternalInviteResource(prjInvites).stream()).collect(Collectors.toList())))
));
}
private ServiceResult<Void> validateSearchString(String searchString) {
searchString = StringUtils.trim(searchString);
if (StringUtils.isEmpty(searchString) || StringUtils.length(searchString) < 5) {
return serviceFailure(CommonFailureKeys.GENERAL_INVALID_ARGUMENT);
} else {
return serviceSuccess();
}
}
private ServiceResult<List<ApplicationInvite>> findApplicationInvitesBySearchCriteria(String searchString, SearchCategory searchCategory ) {
List<ApplicationInvite> applicationInvites;
switch (searchCategory) {
case NAME:
applicationInvites = applicationInviteRepository.findByNameLikeAndStatusIn(searchString, EnumSet.of(CREATED, SENT));
break;
case ORGANISATION_NAME:
applicationInvites = applicationInviteRepository.findByInviteOrganisationOrganisationNameLikeAndStatusIn(searchString, EnumSet.of(CREATED, SENT));
break;
case EMAIL:
default:
applicationInvites = applicationInviteRepository.findByEmailLikeAndStatusIn(searchString, EnumSet.of(CREATED, SENT));
break;
}
return serviceSuccess(applicationInvites);
}
private ServiceResult<List<ProjectInvite>> findProjectInvitesBySearchCriteria(String searchString, SearchCategory searchCategory ) {
List<ProjectInvite> projectInvites;
switch (searchCategory) {
case NAME:
projectInvites = inviteProjectRepository.findByNameLikeAndStatusIn(searchString, EnumSet.of(CREATED, SENT));
break;
case ORGANISATION_NAME:
projectInvites = inviteProjectRepository.findByOrganisationNameLikeAndStatusIn(searchString, EnumSet.of(CREATED, SENT));
break;
case EMAIL:
default:
projectInvites = inviteProjectRepository.findByEmailLikeAndStatusIn(searchString, EnumSet.of(CREATED, SENT));
break;
}
return serviceSuccess(projectInvites);
}
private List<ExternalInviteResource> getApplicationInvitesAsExternalInviteResource(List<ApplicationInvite> appInvites){
return appInvites.stream().map(appInvite -> new ExternalInviteResource(
appInvite.getName(),
appInvite.getInviteOrganisation().getOrganisation() != null ? appInvite.getInviteOrganisation().getOrganisation().getName() : appInvite.getInviteOrganisation().getOrganisationName(), // organisation may not exist yet (new collaborator)
appInvite.getInviteOrganisation().getOrganisation() != null ? appInvite.getInviteOrganisation().getOrganisation().getId().toString() : "new",
appInvite.getEmail(),
appInvite.getTarget().getId(),
appInvite.getStatus())).collect(Collectors.toList());
}
private List<ExternalInviteResource> getProjectInvitesAsExternalInviteResource(List<ProjectInvite> prjInvites){
return prjInvites.stream().map(projectInvite ->
new ExternalInviteResource(
projectInvite.getName(),
projectInvite.getOrganisation().getName(),
projectInvite.getOrganisation().getId().toString(),
projectInvite.getEmail(),
projectInvite.getTarget().getApplication().getId(),
projectInvite.getStatus())).collect(Collectors.toList());
}
private List<ExternalInviteResource> sortByEmail(List<ExternalInviteResource> extInviteResources) {
return extInviteResources.stream().sorted(Comparator.comparing(extInviteResource -> extInviteResource.getEmail().toUpperCase())).collect(Collectors.toList());
}
}
|
package org.innovateuk.ifs.project.status.transactional;
import org.innovateuk.ifs.BaseServiceUnitTest;
import org.innovateuk.ifs.application.domain.Application;
import org.innovateuk.ifs.commons.service.ServiceResult;
import org.innovateuk.ifs.competition.domain.Competition;
import org.innovateuk.ifs.finance.domain.ApplicationFinance;
import org.innovateuk.ifs.finance.resource.ApplicationFinanceResource;
import org.innovateuk.ifs.project.bankdetails.domain.BankDetails;
import org.innovateuk.ifs.project.builder.PartnerOrganisationBuilder;
import org.innovateuk.ifs.project.constant.ProjectActivityStates;
import org.innovateuk.ifs.project.domain.MonitoringOfficer;
import org.innovateuk.ifs.project.domain.PartnerOrganisation;
import org.innovateuk.ifs.project.domain.Project;
import org.innovateuk.ifs.project.domain.ProjectUser;
import org.innovateuk.ifs.project.finance.domain.SpendProfile;
import org.innovateuk.ifs.project.resource.ApprovalType;
import org.innovateuk.ifs.project.resource.ProjectUserResource;
import org.innovateuk.ifs.project.status.resource.CompetitionProjectsStatusResource;
import org.innovateuk.ifs.project.status.resource.ProjectStatusResource;
import org.innovateuk.ifs.project.transactional.ProjectStatusService;
import org.innovateuk.ifs.project.transactional.ProjectStatusServiceImpl;
import org.innovateuk.ifs.user.domain.Organisation;
import org.innovateuk.ifs.user.domain.OrganisationType;
import org.innovateuk.ifs.user.domain.ProcessRole;
import org.innovateuk.ifs.user.domain.Role;
import org.innovateuk.ifs.user.domain.User;
import org.innovateuk.ifs.user.resource.OrganisationTypeEnum;
import org.innovateuk.ifs.user.resource.UserRoleType;
import org.junit.Test;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static java.util.Arrays.asList;
import static org.innovateuk.ifs.application.builder.ApplicationBuilder.newApplication;
import static org.innovateuk.ifs.commons.service.ServiceResult.serviceSuccess;
import static org.innovateuk.ifs.competition.builder.CompetitionBuilder.newCompetition;
import static org.innovateuk.ifs.finance.builder.ApplicationFinanceBuilder.newApplicationFinance;
import static org.innovateuk.ifs.finance.builder.ApplicationFinanceResourceBuilder.newApplicationFinanceResource;
import static org.innovateuk.ifs.invite.domain.ProjectParticipantRole.PROJECT_FINANCE_CONTACT;
import static org.innovateuk.ifs.invite.domain.ProjectParticipantRole.PROJECT_PARTNER;
import static org.innovateuk.ifs.project.bankdetails.builder.BankDetailsBuilder.newBankDetails;
import static org.innovateuk.ifs.project.builder.MonitoringOfficerBuilder.newMonitoringOfficer;
import static org.innovateuk.ifs.project.builder.PartnerOrganisationBuilder.newPartnerOrganisation;
import static org.innovateuk.ifs.project.builder.ProjectBuilder.newProject;
import static org.innovateuk.ifs.project.builder.ProjectUserBuilder.newProjectUser;
import static org.innovateuk.ifs.project.builder.ProjectUserResourceBuilder.newProjectUserResource;
import static org.innovateuk.ifs.project.builder.SpendProfileBuilder.newSpendProfile;
import static org.innovateuk.ifs.project.constant.ProjectActivityStates.ACTION_REQUIRED;
import static org.innovateuk.ifs.project.constant.ProjectActivityStates.COMPLETE;
import static org.innovateuk.ifs.project.constant.ProjectActivityStates.NOT_STARTED;
import static org.innovateuk.ifs.project.constant.ProjectActivityStates.PENDING;
import static org.innovateuk.ifs.project.constant.ProjectActivityStates.REJECTED;
import static org.innovateuk.ifs.project.resource.ApprovalType.APPROVED;
import static org.innovateuk.ifs.user.builder.OrganisationBuilder.newOrganisation;
import static org.innovateuk.ifs.user.builder.OrganisationTypeBuilder.newOrganisationType;
import static org.innovateuk.ifs.user.builder.ProcessRoleBuilder.newProcessRole;
import static org.innovateuk.ifs.user.builder.RoleBuilder.newRole;
import static org.innovateuk.ifs.user.builder.UserBuilder.newUser;
import static org.innovateuk.ifs.user.resource.UserRoleType.APPLICANT;
import static org.innovateuk.ifs.user.resource.UserRoleType.COMP_ADMIN;
import static org.innovateuk.ifs.user.resource.UserRoleType.LEADAPPLICANT;
import static org.innovateuk.ifs.user.resource.UserRoleType.PARTNER;
import static org.innovateuk.ifs.util.MapFunctions.asMap;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.powermock.api.mockito.PowerMockito.when;
public class ProjectStatusServiceImplTest extends BaseServiceUnitTest<ProjectStatusService> {
@Override
protected ProjectStatusService supplyServiceUnderTest() {
return new ProjectStatusServiceImpl();
}
@Test
public void testGetCompetitionStatus(){
Long competitionId = 123L;
Competition competition = newCompetition().withId(competitionId).build();
/**
* Create partner and lead applicant role
*/
Role leadApplicantRole = newRole().withType(LEADAPPLICANT).build();
Role applicantRole = newRole().withType(APPLICANT).build();
Role partnerRole = newRole().withType(PARTNER).build();
OrganisationType businessOrganisationType = newOrganisationType().withOrganisationType(OrganisationTypeEnum.BUSINESS).build();
OrganisationType academicOrganisationType = newOrganisationType().withOrganisationType(OrganisationTypeEnum.ACADEMIC).build();
List<Organisation> organisations = newOrganisation().withOrganisationType(businessOrganisationType).build(2);
organisations.add(newOrganisation().withOrganisationType(academicOrganisationType).build());
/**
* Create 3 users, one for each organisation
*/
List<User> users = newUser().build(3);
/**
* Create 3 applications, one for each org, with process roles
*/
List<ProcessRole> applicantProcessRoles = newProcessRole().withUser(users.get(0), users.get(1), users.get(2)).withRole(leadApplicantRole, applicantRole, applicantRole).withOrganisation(organisations.get(0), organisations.get(1), organisations.get(2)).build(3);
List<Application> applications = newApplication().withCompetition(competition).withProcessRoles(applicantProcessRoles.get(0), applicantProcessRoles.get(1), applicantProcessRoles.get(2)).build(3);
/**
* Create 3 project with 3 Project Users from 3 different organisations with associated applications
*/
List<ProjectUser> projectUsers = newProjectUser().withRole(PROJECT_PARTNER).withUser(users.get(0), users.get(1), users.get(2)).withOrganisation(organisations.get(0), organisations.get(1), organisations.get(2)).build(3);
List<Project> projects = newProject().withApplication(applications.get(0), applications.get(1), applications.get(2)).withProjectUsers(projectUsers).build(3);
/**
* Create 3 bank detail records, one for each organisation
*/
List<BankDetails> bankDetails = newBankDetails().withOrganisation(organisations.get(0), organisations.get(1), organisations.get(2)).build(3);
/**
* Build spend profile object for use with one of the partners
*/
SpendProfile spendProfile = newSpendProfile().build();
when(competitionRepositoryMock.findOne(competitionId)).thenReturn(competition);
when(projectRepositoryMock.findOne(projects.get(0).getId())).thenReturn(projects.get(0));
when(projectRepositoryMock.findOne(projects.get(1).getId())).thenReturn(projects.get(1));
when(projectRepositoryMock.findOne(projects.get(2).getId())).thenReturn(projects.get(2));
when(projectRepositoryMock.findByApplicationCompetitionId(competitionId)).thenReturn(projects);
when(projectUserRepositoryMock.findByProjectId(projects.get(0).getId())).thenReturn(projectUsers);
when(projectUserRepositoryMock.findByProjectId(projects.get(1).getId())).thenReturn(projectUsers);
when(projectUserRepositoryMock.findByProjectId(projects.get(2).getId())).thenReturn(projectUsers);
when(bankDetailsRepositoryMock.findByProjectIdAndOrganisationId(projects.get(0).getId(), organisations.get(0).getId())).thenReturn(bankDetails.get(0));
when(bankDetailsRepositoryMock.findByProjectIdAndOrganisationId(projects.get(1).getId(), organisations.get(1).getId())).thenReturn(bankDetails.get(1));
when(bankDetailsRepositoryMock.findByProjectIdAndOrganisationId(projects.get(2).getId(), organisations.get(2).getId())).thenReturn(bankDetails.get(2));
when(spendProfileRepositoryMock.findOneByProjectIdAndOrganisationId(projects.get(0).getId(), organisations.get(0).getId())).thenReturn(Optional.of(spendProfile));
when(spendProfileRepositoryMock.findOneByProjectIdAndOrganisationId(projects.get(1).getId(), organisations.get(1).getId())).thenReturn(Optional.of(spendProfile));
when(spendProfileRepositoryMock.findOneByProjectIdAndOrganisationId(projects.get(2).getId(), organisations.get(2).getId())).thenReturn(Optional.of(spendProfile));
MonitoringOfficer monitoringOfficerInDB = newMonitoringOfficer().build();
when(monitoringOfficerRepositoryMock.findOneByProjectId(projects.get(0).getId())).thenReturn(monitoringOfficerInDB);
when(monitoringOfficerRepositoryMock.findOneByProjectId(projects.get(1).getId())).thenReturn(monitoringOfficerInDB);
when(monitoringOfficerRepositoryMock.findOneByProjectId(projects.get(2).getId())).thenReturn(monitoringOfficerInDB);
when(organisationRepositoryMock.findOne(organisations.get(0).getId())).thenReturn(organisations.get(0));
when(organisationRepositoryMock.findOne(organisations.get(1).getId())).thenReturn(organisations.get(1));
when(organisationRepositoryMock.findOne(organisations.get(2).getId())).thenReturn(organisations.get(2));
List<ApplicationFinance> applicationFinances = newApplicationFinance().build(3);
when(applicationFinanceRepositoryMock.findByApplicationIdAndOrganisationId(projects.get(0).getApplication().getId(), organisations.get(0).getId())).thenReturn(applicationFinances.get(0));
when(applicationFinanceRepositoryMock.findByApplicationIdAndOrganisationId(projects.get(0).getApplication().getId(), organisations.get(1).getId())).thenReturn(applicationFinances.get(1));
when(applicationFinanceRepositoryMock.findByApplicationIdAndOrganisationId(projects.get(0).getApplication().getId(), organisations.get(2).getId())).thenReturn(applicationFinances.get(2));
when(applicationFinanceRepositoryMock.findByApplicationIdAndOrganisationId(projects.get(1).getApplication().getId(), organisations.get(0).getId())).thenReturn(applicationFinances.get(0));
when(applicationFinanceRepositoryMock.findByApplicationIdAndOrganisationId(projects.get(1).getApplication().getId(), organisations.get(1).getId())).thenReturn(applicationFinances.get(1));
when(applicationFinanceRepositoryMock.findByApplicationIdAndOrganisationId(projects.get(1).getApplication().getId(), organisations.get(2).getId())).thenReturn(applicationFinances.get(2));
when(applicationFinanceRepositoryMock.findByApplicationIdAndOrganisationId(projects.get(2).getApplication().getId(), organisations.get(0).getId())).thenReturn(applicationFinances.get(0));
when(applicationFinanceRepositoryMock.findByApplicationIdAndOrganisationId(projects.get(2).getApplication().getId(), organisations.get(1).getId())).thenReturn(applicationFinances.get(1));
when(applicationFinanceRepositoryMock.findByApplicationIdAndOrganisationId(projects.get(2).getApplication().getId(), organisations.get(2).getId())).thenReturn(applicationFinances.get(2));
ApplicationFinanceResource applicationFinanceResource0 = newApplicationFinanceResource().withGrantClaimPercentage(20).withOrganisation(organisations.get(0).getId()).build();
when(applicationFinanceMapperMock.mapToResource(applicationFinances.get(0))).thenReturn(applicationFinanceResource0);
ApplicationFinanceResource applicationFinanceResource1 = newApplicationFinanceResource().withGrantClaimPercentage(20).withOrganisation(organisations.get(1).getId()).build();
when(applicationFinanceMapperMock.mapToResource(applicationFinances.get(1))).thenReturn(applicationFinanceResource1);
ApplicationFinanceResource applicationFinanceResource2 = newApplicationFinanceResource().withGrantClaimPercentage(20).withOrganisation(organisations.get(2).getId()).build();
when(applicationFinanceMapperMock.mapToResource(applicationFinances.get(2))).thenReturn(applicationFinanceResource2);
List<ProjectUserResource> puResource = newProjectUserResource().withProject(projects.get(0).getId()).withOrganisation(organisations.get(0).getId(), organisations.get(1).getId(), organisations.get(2).getId()).withRole(partnerRole.getId()).withRoleName(PROJECT_PARTNER.getName()).build(3);
when(projectUserMapperMock.mapToResource(projectUsers.get(0))).thenReturn(puResource.get(0));
when(projectUserMapperMock.mapToResource(projectUsers.get(1))).thenReturn(puResource.get(1));
when(projectUserMapperMock.mapToResource(projectUsers.get(2))).thenReturn(puResource.get(2));
when(financeRowServiceMock.organisationSeeksFunding(projects.get(0).getId(), projects.get(0).getApplication().getId(), organisations.get(0).getId())).thenReturn(serviceSuccess(true));
when(financeRowServiceMock.organisationSeeksFunding(projects.get(0).getId(), projects.get(0).getApplication().getId(), organisations.get(1).getId())).thenReturn(serviceSuccess(false));
when(financeRowServiceMock.organisationSeeksFunding(projects.get(0).getId(), projects.get(0).getApplication().getId(), organisations.get(2).getId())).thenReturn(serviceSuccess(false));
when(financeRowServiceMock.organisationSeeksFunding(projects.get(1).getId(), projects.get(1).getApplication().getId(), organisations.get(0).getId())).thenReturn(serviceSuccess(true));
when(financeRowServiceMock.organisationSeeksFunding(projects.get(1).getId(), projects.get(1).getApplication().getId(), organisations.get(1).getId())).thenReturn(serviceSuccess(false));
when(financeRowServiceMock.organisationSeeksFunding(projects.get(1).getId(), projects.get(1).getApplication().getId(), organisations.get(2).getId())).thenReturn(serviceSuccess(false));
when(financeRowServiceMock.organisationSeeksFunding(projects.get(2).getId(), projects.get(2).getApplication().getId(), organisations.get(0).getId())).thenReturn(serviceSuccess(true));
when(financeRowServiceMock.organisationSeeksFunding(projects.get(2).getId(), projects.get(2).getApplication().getId(), organisations.get(1).getId())).thenReturn(serviceSuccess(false));
when(financeRowServiceMock.organisationSeeksFunding(projects.get(2).getId(), projects.get(2).getApplication().getId(), organisations.get(2).getId())).thenReturn(serviceSuccess(false));
when(projectUsersHelperMock.getPartnerOrganisations(projects.get(0).getId())).thenReturn(organisations);
when(projectUsersHelperMock.getPartnerOrganisations(projects.get(1).getId())).thenReturn(organisations);
when(projectUsersHelperMock.getPartnerOrganisations(projects.get(2).getId())).thenReturn(organisations);
when(projectFinanceServiceMock.getSpendProfileStatusByProjectId(projects.get(0).getId())).thenReturn(serviceSuccess(ApprovalType.EMPTY));
when(projectFinanceServiceMock.getSpendProfileStatusByProjectId(projects.get(1).getId())).thenReturn(serviceSuccess(ApprovalType.EMPTY));
when(projectFinanceServiceMock.getSpendProfileStatusByProjectId(projects.get(2).getId())).thenReturn(serviceSuccess(ApprovalType.EMPTY));
ServiceResult<CompetitionProjectsStatusResource> result = service.getCompetitionStatus(competitionId);
assertTrue(result.isSuccess());
CompetitionProjectsStatusResource competitionProjectsStatusResource = result.getSuccessObject();
assertEquals(3, competitionProjectsStatusResource.getProjectStatusResources().size());
assertEquals(new Integer(3), competitionProjectsStatusResource.getProjectStatusResources().get(0).getNumberOfPartners());
assertEquals(new Integer(3), competitionProjectsStatusResource.getProjectStatusResources().get(1).getNumberOfPartners());
assertEquals(new Integer(3), competitionProjectsStatusResource.getProjectStatusResources().get(2).getNumberOfPartners());
}
@Test
public void getProjectStatusResourceByProject() {
Long projectId = 2345L;
Project project = createProjectStatusResource(projectId, ApprovalType.EMPTY, Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null);
when(financeRowServiceMock.organisationSeeksFunding(any(Long.class), any(Long.class), any(Long.class))).thenReturn(serviceSuccess(Boolean.TRUE));
ServiceResult<ProjectStatusResource> result = service.getProjectStatusByProjectId(projectId);
ProjectStatusResource returnedProjectStatusResource = result.getSuccessObject();
assertTrue(result.isSuccess());
assertEquals(project.getName(), returnedProjectStatusResource.getProjectTitle());
assertEquals(project.getId(), returnedProjectStatusResource.getProjectNumber());
assertEquals(Integer.valueOf(1), returnedProjectStatusResource.getNumberOfPartners());
assertEquals(PENDING, returnedProjectStatusResource.getProjectDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getBankDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getFinanceChecksStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getSpendProfileStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getMonitoringOfficerStatus());
assertEquals(PENDING, returnedProjectStatusResource.getOtherDocumentsStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getGrantOfferLetterStatus());
Map<UserRoleType, ProjectActivityStates> roles = asMap(COMP_ADMIN, NOT_STARTED);
assertTrue(roles.equals(returnedProjectStatusResource.getRoleSpecificGrantOfferLetterState()));
when(projectRepositoryMock.findOne(projectId)).thenReturn(null);
ServiceResult<ProjectStatusResource> resultFailure = service.getProjectStatusByProjectId(projectId);
assertTrue(resultFailure.isFailure());
}
@Test
public void getProjectStatusFinanceContactComplete() {
Long projectId = 2345L;
Long organisationId = 123L;
Project project = createProjectStatusResource(projectId, ApprovalType.EMPTY, Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null);
Organisation o = newOrganisation().withId(organisationId).build();
List<PartnerOrganisation> po = asList(newPartnerOrganisation().withOrganisation(o).build());
project.setPartnerOrganisations(po);
Optional<ProjectUser> pu = Optional.of(newProjectUser().withRole(PROJECT_FINANCE_CONTACT).build());
when(projectUsersHelperMock.getFinanceContact(projectId, organisationId)).thenReturn(pu);
when(projectDetailsWorkflowHandlerMock.isSubmitted(project)).thenReturn(true);
when(financeRowServiceMock.organisationSeeksFunding(any(Long.class), any(Long.class), any(Long.class))).thenReturn(serviceSuccess(Boolean.TRUE));
ServiceResult<ProjectStatusResource> result = service.getProjectStatusByProjectId(projectId);
ProjectStatusResource returnedProjectStatusResource = result.getSuccessObject();
assertTrue(result.isSuccess());
assertEquals(project.getName(), returnedProjectStatusResource.getProjectTitle());
assertEquals(project.getId(), returnedProjectStatusResource.getProjectNumber());
assertEquals(Integer.valueOf(1), returnedProjectStatusResource.getNumberOfPartners());
assertEquals(COMPLETE, returnedProjectStatusResource.getProjectDetailsStatus());
assertEquals(PENDING, returnedProjectStatusResource.getBankDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getFinanceChecksStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getSpendProfileStatus());
assertEquals(COMPLETE, returnedProjectStatusResource.getMonitoringOfficerStatus());
assertEquals(PENDING, returnedProjectStatusResource.getOtherDocumentsStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getGrantOfferLetterStatus());
Map<UserRoleType, ProjectActivityStates> roles = asMap(COMP_ADMIN, NOT_STARTED);
assertTrue(roles.equals(returnedProjectStatusResource.getRoleSpecificGrantOfferLetterState()));
when(projectRepositoryMock.findOne(projectId)).thenReturn(null);
ServiceResult<ProjectStatusResource> resultFailure = service.getProjectStatusByProjectId(projectId);
assertTrue(resultFailure.isFailure());
}
@Test
public void getProjectStatusFinanceContactIncomplete() {
Long projectId = 2345L;
Long organisationId = 123L;
Project project = createProjectStatusResource(projectId, ApprovalType.EMPTY, Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null);
Organisation o = newOrganisation().withId(organisationId).build();
List<PartnerOrganisation> po = asList(newPartnerOrganisation().withOrganisation(o).build());
project.setPartnerOrganisations(po);
when(projectUsersHelperMock.getFinanceContact(projectId, organisationId)).thenReturn(Optional.empty());
when(projectDetailsWorkflowHandlerMock.isSubmitted(project)).thenReturn(true);
when(monitoringOfficerRepositoryMock.findOneByProjectId(projectId)).thenReturn(null);
when(financeRowServiceMock.organisationSeeksFunding(any(Long.class), any(Long.class), any(Long.class))).thenReturn(serviceSuccess(Boolean.TRUE));
ServiceResult<ProjectStatusResource> result = service.getProjectStatusByProjectId(projectId);
ProjectStatusResource returnedProjectStatusResource = result.getSuccessObject();
assertTrue(result.isSuccess());
assertEquals(project.getName(), returnedProjectStatusResource.getProjectTitle());
assertEquals(project.getId(), returnedProjectStatusResource.getProjectNumber());
assertEquals(Integer.valueOf(1), returnedProjectStatusResource.getNumberOfPartners());
assertEquals(PENDING, returnedProjectStatusResource.getProjectDetailsStatus());
assertEquals(PENDING, returnedProjectStatusResource.getBankDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getFinanceChecksStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getSpendProfileStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getMonitoringOfficerStatus());
assertEquals(PENDING, returnedProjectStatusResource.getOtherDocumentsStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getGrantOfferLetterStatus());
Map<UserRoleType, ProjectActivityStates> roles = asMap(COMP_ADMIN, NOT_STARTED);
assertTrue(roles.equals(returnedProjectStatusResource.getRoleSpecificGrantOfferLetterState()));
when(projectRepositoryMock.findOne(projectId)).thenReturn(null);
ServiceResult<ProjectStatusResource> resultFailure = service.getProjectStatusByProjectId(projectId);
assertTrue(resultFailure.isFailure());
}
@Test
public void getProjectStatusResourceByProjectSpendProfileRejected() {
Long projectId = 2345L;
Project project = createProjectStatusResource(projectId, ApprovalType.REJECTED, Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null);
when(financeRowServiceMock.organisationSeeksFunding(any(Long.class), any(Long.class), any(Long.class))).thenReturn(serviceSuccess(Boolean.TRUE));
ServiceResult<ProjectStatusResource> result = service.getProjectStatusByProjectId(projectId);
ProjectStatusResource returnedProjectStatusResource = result.getSuccessObject();
assertTrue(result.isSuccess());
assertEquals(project.getName(), returnedProjectStatusResource.getProjectTitle());
assertEquals(project.getId(), returnedProjectStatusResource.getProjectNumber());
assertEquals(Integer.valueOf(1), returnedProjectStatusResource.getNumberOfPartners());
assertEquals(PENDING, returnedProjectStatusResource.getProjectDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getBankDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getFinanceChecksStatus());
assertEquals(REJECTED, returnedProjectStatusResource.getSpendProfileStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getMonitoringOfficerStatus());
assertEquals(PENDING, returnedProjectStatusResource.getOtherDocumentsStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getGrantOfferLetterStatus());
Map<UserRoleType, ProjectActivityStates> roles = asMap(COMP_ADMIN, NOT_STARTED);
assertTrue(roles.equals(returnedProjectStatusResource.getRoleSpecificGrantOfferLetterState()));
when(projectRepositoryMock.findOne(projectId)).thenReturn(null);
ServiceResult<ProjectStatusResource> resultFailure = service.getProjectStatusByProjectId(projectId);
assertTrue(resultFailure.isFailure());
}
@Test
public void getProjectStatusResourceByProjectGolPrecursorsCompleteAndGolApproved() {
Long projectId = 2345L;
Project project = createProjectStatusResource(projectId, APPROVED, Boolean.TRUE, Boolean.FALSE, Boolean.FALSE, Boolean.TRUE, LocalDateTime.now());
when(financeRowServiceMock.organisationSeeksFunding(any(Long.class), any(Long.class), any(Long.class))).thenReturn(serviceSuccess(Boolean.TRUE));
ServiceResult<ProjectStatusResource> result = service.getProjectStatusByProjectId(projectId);
ProjectStatusResource returnedProjectStatusResource = result.getSuccessObject();
assertTrue(result.isSuccess());
assertEquals(project.getName(), returnedProjectStatusResource.getProjectTitle());
assertEquals(project.getId(), returnedProjectStatusResource.getProjectNumber());
assertEquals(Integer.valueOf(1), returnedProjectStatusResource.getNumberOfPartners());
assertEquals(PENDING, returnedProjectStatusResource.getProjectDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getBankDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getFinanceChecksStatus());
assertEquals(COMPLETE, returnedProjectStatusResource.getSpendProfileStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getMonitoringOfficerStatus());
assertEquals(COMPLETE, returnedProjectStatusResource.getOtherDocumentsStatus());
assertEquals(PENDING, returnedProjectStatusResource.getGrantOfferLetterStatus());
Map<UserRoleType, ProjectActivityStates> roles = asMap(COMP_ADMIN, COMPLETE);
assertTrue(roles.equals(returnedProjectStatusResource.getRoleSpecificGrantOfferLetterState()));
when(projectRepositoryMock.findOne(projectId)).thenReturn(null);
ServiceResult<ProjectStatusResource> resultFailure = service.getProjectStatusByProjectId(projectId);
assertTrue(resultFailure.isFailure());
}
@Test
public void getProjectStatusResourceByProjectGolPrecursorsCompleteAndGolSent() {
Long projectId = 2345L;
Project project = createProjectStatusResource(projectId, APPROVED, Boolean.TRUE, Boolean.FALSE, Boolean.TRUE, Boolean.FALSE, LocalDateTime.now());
when(financeRowServiceMock.organisationSeeksFunding(any(Long.class), any(Long.class), any(Long.class))).thenReturn(serviceSuccess(Boolean.TRUE));
ServiceResult<ProjectStatusResource> result = service.getProjectStatusByProjectId(projectId);
ProjectStatusResource returnedProjectStatusResource = result.getSuccessObject();
assertTrue(result.isSuccess());
assertEquals(project.getName(), returnedProjectStatusResource.getProjectTitle());
assertEquals(project.getId(), returnedProjectStatusResource.getProjectNumber());
assertEquals(Integer.valueOf(1), returnedProjectStatusResource.getNumberOfPartners());
assertEquals(PENDING, returnedProjectStatusResource.getProjectDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getBankDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getFinanceChecksStatus());
assertEquals(COMPLETE, returnedProjectStatusResource.getSpendProfileStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getMonitoringOfficerStatus());
assertEquals(COMPLETE, returnedProjectStatusResource.getOtherDocumentsStatus());
assertEquals(PENDING, returnedProjectStatusResource.getGrantOfferLetterStatus());
Map<UserRoleType, ProjectActivityStates> roles = asMap(COMP_ADMIN, PENDING);
assertTrue(roles.equals(returnedProjectStatusResource.getRoleSpecificGrantOfferLetterState()));
when(projectRepositoryMock.findOne(projectId)).thenReturn(null);
ServiceResult<ProjectStatusResource> resultFailure = service.getProjectStatusByProjectId(projectId);
assertTrue(resultFailure.isFailure());
}
@Test
public void getProjectStatusResourceByProjectGolPrecursorsCompleteAndSignedGolSubmitted() {
Long projectId = 2345L;
Project project = createProjectStatusResource(projectId, APPROVED, Boolean.TRUE, Boolean.TRUE, Boolean.FALSE, Boolean.FALSE, LocalDateTime.now());
when(financeRowServiceMock.organisationSeeksFunding(any(Long.class), any(Long.class), any(Long.class))).thenReturn(serviceSuccess(Boolean.TRUE));
ServiceResult<ProjectStatusResource> result = service.getProjectStatusByProjectId(projectId);
ProjectStatusResource returnedProjectStatusResource = result.getSuccessObject();
assertTrue(result.isSuccess());
assertEquals(project.getName(), returnedProjectStatusResource.getProjectTitle());
assertEquals(project.getId(), returnedProjectStatusResource.getProjectNumber());
assertEquals(Integer.valueOf(1), returnedProjectStatusResource.getNumberOfPartners());
assertEquals(PENDING, returnedProjectStatusResource.getProjectDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getBankDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getFinanceChecksStatus());
assertEquals(COMPLETE, returnedProjectStatusResource.getSpendProfileStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getMonitoringOfficerStatus());
assertEquals(COMPLETE, returnedProjectStatusResource.getOtherDocumentsStatus());
assertEquals(PENDING, returnedProjectStatusResource.getGrantOfferLetterStatus());
Map<UserRoleType, ProjectActivityStates> roles = asMap(COMP_ADMIN, ACTION_REQUIRED);
assertTrue(roles.equals(returnedProjectStatusResource.getRoleSpecificGrantOfferLetterState()));
when(projectRepositoryMock.findOne(projectId)).thenReturn(null);
ServiceResult<ProjectStatusResource> resultFailure = service.getProjectStatusByProjectId(projectId);
assertTrue(resultFailure.isFailure());
}
@Test
public void getProjectStatusResourceByProjectGolPrecursorsCompleteAndGolReadyToSend() {
Long projectId = 2345L;
Project project = createProjectStatusResource(projectId, APPROVED, Boolean.TRUE, Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, LocalDateTime.now());
when(financeRowServiceMock.organisationSeeksFunding(any(Long.class), any(Long.class), any(Long.class))).thenReturn(serviceSuccess(Boolean.TRUE));
ServiceResult<ProjectStatusResource> result = service.getProjectStatusByProjectId(projectId);
ProjectStatusResource returnedProjectStatusResource = result.getSuccessObject();
assertTrue(result.isSuccess());
assertEquals(project.getName(), returnedProjectStatusResource.getProjectTitle());
assertEquals(project.getId(), returnedProjectStatusResource.getProjectNumber());
assertEquals(Integer.valueOf(1), returnedProjectStatusResource.getNumberOfPartners());
assertEquals(PENDING, returnedProjectStatusResource.getProjectDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getBankDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getFinanceChecksStatus());
assertEquals(COMPLETE, returnedProjectStatusResource.getSpendProfileStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getMonitoringOfficerStatus());
assertEquals(COMPLETE, returnedProjectStatusResource.getOtherDocumentsStatus());
assertEquals(PENDING, returnedProjectStatusResource.getGrantOfferLetterStatus());
Map<UserRoleType, ProjectActivityStates> roles = asMap(COMP_ADMIN, ACTION_REQUIRED);
assertTrue(roles.equals(returnedProjectStatusResource.getRoleSpecificGrantOfferLetterState()));
when(projectRepositoryMock.findOne(projectId)).thenReturn(null);
ServiceResult<ProjectStatusResource> resultFailure = service.getProjectStatusByProjectId(projectId);
assertTrue(resultFailure.isFailure());
}
@Test
public void getProjectStatusResourceByProjectOtherDocumentsRejected() {
Long projectId = 2345L;
Project project = createProjectStatusResource(projectId, APPROVED, Boolean.FALSE, Boolean.FALSE, Boolean.TRUE, Boolean.FALSE, LocalDateTime.now());
when(financeRowServiceMock.organisationSeeksFunding(any(Long.class), any(Long.class), any(Long.class))).thenReturn(serviceSuccess(Boolean.TRUE));
ServiceResult<ProjectStatusResource> result = service.getProjectStatusByProjectId(projectId);
ProjectStatusResource returnedProjectStatusResource = result.getSuccessObject();
assertTrue(result.isSuccess());
assertEquals(project.getName(), returnedProjectStatusResource.getProjectTitle());
assertEquals(project.getId(), returnedProjectStatusResource.getProjectNumber());
assertEquals(Integer.valueOf(1), returnedProjectStatusResource.getNumberOfPartners());
assertEquals(PENDING, returnedProjectStatusResource.getProjectDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getBankDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getFinanceChecksStatus());
assertEquals(COMPLETE, returnedProjectStatusResource.getSpendProfileStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getMonitoringOfficerStatus());
assertEquals(REJECTED, returnedProjectStatusResource.getOtherDocumentsStatus());
assertEquals(PENDING, returnedProjectStatusResource.getGrantOfferLetterStatus());
Map<UserRoleType, ProjectActivityStates> roles = asMap(COMP_ADMIN, NOT_STARTED);
assertTrue(roles.equals(returnedProjectStatusResource.getRoleSpecificGrantOfferLetterState()));
when(projectRepositoryMock.findOne(projectId)).thenReturn(null);
ServiceResult<ProjectStatusResource> resultFailure = service.getProjectStatusByProjectId(projectId);
assertTrue(resultFailure.isFailure());
}
@Test
public void getProjectStatusBankDetailsCompleteNotApproved() {
Long projectId = 2345L;
Long organisationId = 123L;
Project project = createProjectStatusResource(projectId, ApprovalType.EMPTY, Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null);
Organisation o = newOrganisation().withId(organisationId).build();
List<PartnerOrganisation> po = asList(newPartnerOrganisation().withOrganisation(o).build());
project.setPartnerOrganisations(po);
Optional<ProjectUser> pu = Optional.of(newProjectUser().withRole(PROJECT_FINANCE_CONTACT).build());
when(bankDetailsRepositoryMock.findByProjectIdAndOrganisationId(projectId, organisationId)).thenReturn(new BankDetails());
when(projectUsersHelperMock.getFinanceContact(projectId, organisationId)).thenReturn(pu);
when(projectDetailsWorkflowHandlerMock.isSubmitted(project)).thenReturn(true);
when(financeRowServiceMock.organisationSeeksFunding(any(Long.class), any(Long.class), any(Long.class))).thenReturn(serviceSuccess(Boolean.TRUE));
ServiceResult<ProjectStatusResource> result = service.getProjectStatusByProjectId(projectId);
ProjectStatusResource returnedProjectStatusResource = result.getSuccessObject();
assertTrue(result.isSuccess());
assertEquals(project.getName(), returnedProjectStatusResource.getProjectTitle());
assertEquals(project.getId(), returnedProjectStatusResource.getProjectNumber());
assertEquals(Integer.valueOf(1), returnedProjectStatusResource.getNumberOfPartners());
assertEquals(COMPLETE, returnedProjectStatusResource.getProjectDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getBankDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getFinanceChecksStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getSpendProfileStatus());
assertEquals(COMPLETE, returnedProjectStatusResource.getMonitoringOfficerStatus());
assertEquals(PENDING, returnedProjectStatusResource.getOtherDocumentsStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getGrantOfferLetterStatus());
Map<UserRoleType, ProjectActivityStates> roles = asMap(COMP_ADMIN, NOT_STARTED);
assertTrue(roles.equals(returnedProjectStatusResource.getRoleSpecificGrantOfferLetterState()));
}
@Test
public void getProjectStatusBankDetailsApproved() {
Long projectId = 2345L;
Long organisationId = 123L;
Project project = createProjectStatusResource(projectId, ApprovalType.EMPTY, Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null);
Organisation o = newOrganisation().withId(organisationId).build();
List<PartnerOrganisation> po = asList(newPartnerOrganisation().withOrganisation(o).build());
project.setPartnerOrganisations(po);
Optional<ProjectUser> pu = Optional.of(newProjectUser().withRole(PROJECT_FINANCE_CONTACT).build());
when(bankDetailsRepositoryMock.findByProjectIdAndOrganisationId(projectId, organisationId)).thenReturn(newBankDetails().withApproval(true).build());
when(projectUsersHelperMock.getFinanceContact(projectId, organisationId)).thenReturn(pu);
when(projectDetailsWorkflowHandlerMock.isSubmitted(project)).thenReturn(true);
when(financeRowServiceMock.organisationSeeksFunding(any(Long.class), any(Long.class), any(Long.class))).thenReturn(serviceSuccess(Boolean.TRUE));
ServiceResult<ProjectStatusResource> result = service.getProjectStatusByProjectId(projectId);
ProjectStatusResource returnedProjectStatusResource = result.getSuccessObject();
assertTrue(result.isSuccess());
assertEquals(project.getName(), returnedProjectStatusResource.getProjectTitle());
assertEquals(project.getId(), returnedProjectStatusResource.getProjectNumber());
assertEquals(Integer.valueOf(1), returnedProjectStatusResource.getNumberOfPartners());
assertEquals(COMPLETE, returnedProjectStatusResource.getProjectDetailsStatus());
assertEquals(COMPLETE, returnedProjectStatusResource.getBankDetailsStatus());
assertEquals(ACTION_REQUIRED, returnedProjectStatusResource.getFinanceChecksStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getSpendProfileStatus());
assertEquals(COMPLETE, returnedProjectStatusResource.getMonitoringOfficerStatus());
assertEquals(PENDING, returnedProjectStatusResource.getOtherDocumentsStatus());
assertEquals(NOT_STARTED, returnedProjectStatusResource.getGrantOfferLetterStatus());
Map<UserRoleType, ProjectActivityStates> roles = asMap(COMP_ADMIN, NOT_STARTED);
assertTrue(roles.equals(returnedProjectStatusResource.getRoleSpecificGrantOfferLetterState()));
}
private Project createProjectStatusResource(Long projectId, ApprovalType spendProfileStatus, Boolean otherDocsApproved, Boolean golReadyToApprove, Boolean golIsSent, Boolean golIsApproved, LocalDateTime otherDocsSubmittedDate) {
Role role = newRole().build();
ProcessRole processRole = newProcessRole().withRole(role).build();
Application application = newApplication().withProcessRoles(processRole).build();
Organisation organisation = newOrganisation().build();
PartnerOrganisation partnerOrganisation = PartnerOrganisationBuilder.newPartnerOrganisation().withOrganisation(organisation).build();
Project project;
if(otherDocsSubmittedDate != null)
project = newProject().withId(projectId).withApplication(application).withPartnerOrganisations(asList(partnerOrganisation)).withOtherDocumentsApproved(otherDocsApproved).withOtherDocumentsSubmittedDate(otherDocsSubmittedDate).build();
else
project = newProject().withId(projectId).withApplication(application).withPartnerOrganisations(asList(partnerOrganisation)).withOtherDocumentsApproved(otherDocsApproved).build();
BankDetails bankDetail = newBankDetails().withProject(project).build();
SpendProfile spendprofile = newSpendProfile().withOrganisation(organisation).build();
MonitoringOfficer monitoringOfficer = newMonitoringOfficer().build();
when(projectRepositoryMock.findOne(projectId)).thenReturn(project);
when(projectFinanceServiceMock.getSpendProfileStatusByProjectId(projectId)).thenReturn(serviceSuccess(ApprovalType.EMPTY));
when(projectUsersHelperMock.getPartnerOrganisations(project.getId())).thenReturn(asList(organisation));
when(bankDetailsRepositoryMock.findByProjectIdAndOrganisationId(project.getId(), organisation.getId())).thenReturn(bankDetail);
when(spendProfileRepositoryMock.findOneByProjectIdAndOrganisationId(project.getId(), organisation.getId())).thenReturn(Optional.of(spendprofile));
when(monitoringOfficerRepositoryMock.findOneByProjectId(project.getId())).thenReturn(monitoringOfficer);
when(projectFinanceServiceMock.getSpendProfileStatusByProjectId(projectId)).thenReturn(serviceSuccess(spendProfileStatus));
when(golWorkflowHandlerMock.isApproved(project)).thenReturn(golIsApproved);
if(!golIsApproved) {
when(golWorkflowHandlerMock.isReadyToApprove(project)).thenReturn(golReadyToApprove);
if (!golReadyToApprove)
when(golWorkflowHandlerMock.isSent(project)).thenReturn(golIsSent);
}
return project;
}
}
|
package com.jenjinstudios.world.server.message;
import com.jenjinstudios.core.io.Message;
import com.jenjinstudios.core.io.MessageRegistry;
import com.jenjinstudios.world.server.WorldClientHandler;
import com.jenjinstudios.world.server.WorldServer;
import org.testng.annotations.Test;
import static org.mockito.Mockito.*;
/**
* @author Caleb Brinkman
*/
public class ExecutableWorldChecksumRequestTest
{
@SuppressWarnings("unchecked")
@Test
public void testExecuteMessage() {
Message message = mock(Message.class);
Message response = MessageRegistry.getInstance().createMessage("WorldChecksumResponse");
byte[] checksum = {1, 2, 3, 4, 5};
response.setArgument("checksum", checksum);
WorldClientHandler clientHandler = mock(WorldClientHandler.class);
WorldServer server = mock(WorldServer.class);
WorldServerMessageFactory messageFactory = mock(WorldServerMessageFactory.class);
when(messageFactory.generateWorldChecksumResponse(any())).thenReturn(response);
when(server.getWorldFileChecksum()).thenReturn(checksum);
when(clientHandler.getMessageFactory()).thenReturn(messageFactory);
when(clientHandler.getServer()).thenReturn(server);
ExecutableWorldChecksumRequest exec = new ExecutableWorldChecksumRequest(clientHandler, message);
exec.runImmediate();
exec.runDelayed();
verify(clientHandler).queueOutgoingMessage(response);
}
}
|
package de.gurkenlabs.litiengine.graphics;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.Point;
import java.awt.Shape;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.awt.image.BufferedImage;
import de.gurkenlabs.litiengine.Game;
import de.gurkenlabs.litiengine.environment.Environment;
import de.gurkenlabs.litiengine.environment.tilemap.IMap;
import de.gurkenlabs.litiengine.environment.tilemap.ITile;
import de.gurkenlabs.litiengine.environment.tilemap.ITileOffset;
import de.gurkenlabs.litiengine.environment.tilemap.ITileset;
import de.gurkenlabs.litiengine.environment.tilemap.MapUtilities;
import de.gurkenlabs.litiengine.util.Imaging;
import de.gurkenlabs.litiengine.util.MathUtilities;
import de.gurkenlabs.litiengine.util.geom.GeometricUtilities;
public abstract class ColorLayer implements IRenderable {
private final Environment environment;
private final Image[][] tiles;
private Color color;
protected ColorLayer(Environment env, final Color color) {
this.environment = env;
this.color = color;
this.tiles = new Image[env.getMap().getWidth()][env.getMap().getHeight()];
this.updateSection(this.environment.getMap().getBounds());
}
@Override
public void render(Graphics2D g) {
final Rectangle2D viewport = Game.world().camera().getViewport();
final IMap map = this.getEnvironment().getMap();
// draw the tile on the layer image
for (int x = 0; x < map.getWidth(); x++) {
for (int y = 0; y < map.getHeight(); y++) {
Rectangle2D tileBounds = map.getOrientation().getShape(x, y, map).getBounds2D();
ITile tile = map.getTileLayers().get(0).getTile(x, y);
if (!viewport.intersects(tileBounds)) {
continue;
}
final double offsetX = -(viewport.getX());
final double offsetY = -(viewport.getY());
int tileOffsetX = 0;
int tileOffsetY = 0;
ITileset tileset = MapUtilities.findTileSet(map, tile);
if (tileset != null) {
final ITileOffset tileOffset = tileset.getTileOffset();
if (tileOffset != null) {
tileOffsetX = tileOffset.getX();
tileOffsetY = tileOffset.getY();
}
}
ImageRenderer.render(g, tiles[x][y], offsetX + tileBounds.getX() + tileOffsetX, offsetY + tileBounds.getY() + tileOffsetY);
}
}
}
public Color getColor() {
return this.color;
}
public void setAlpha(int ambientAlpha) {
this.setColor(new Color(this.getColor().getRed(), this.getColor().getGreen(), this.getColor().getBlue(), MathUtilities.clamp(ambientAlpha, 0, 255)));
this.updateSection(this.environment.getMap().getBounds());
}
public void setColor(final Color color) {
this.color = color;
this.updateSection(this.environment.getMap().getBounds());
}
public void updateSection(Rectangle2D section) {
if (this.getColor() == null) {
return;
}
final IMap map = this.getEnvironment().getMap();
final Rectangle2D tileSection = MapUtilities.getTileBoundingBox(map, section);
if (tileSection == null || (tileSection.getWidth() == 0 && tileSection.getHeight() == 0)) {
return;
}
final BufferedImage img = Imaging.getCompatibleImage((int) tileSection.getWidth(), (int) tileSection.getHeight());
final Graphics2D g = img.createGraphics();
this.renderSection(g, tileSection);
g.dispose();
this.setTiles(img, tileSection);
}
private void setTiles(BufferedImage img, Rectangle2D section) {
if (img == null) {
return;
}
final IMap map = this.getEnvironment().getMap();
final Point startTile = map.getOrientation().getTile(section.getX(), section.getY(), map);
final Point endTile = map.getOrientation().getTile(section.getMaxX(), section.getMaxY(), map);
final int startX = MathUtilities.clamp(startTile.x, 0, Math.min(endTile.x, tiles.length) - 1);
final int startY = MathUtilities.clamp(startTile.y, 0, Math.min(endTile.y, tiles[0].length) - 1);
final int endX = MathUtilities.clamp(endTile.x - 1, 0, tiles.length - 1);
final int endY = MathUtilities.clamp(endTile.y - 1, 0, tiles[0].length - 1);
final Shape startTileShape = map.getOrientation().getShape(startX, startY, map);
for (int x = startX; x <= endX; x++) {
for (int y = startY; y <= endY; y++) {
Shape tile = map.getOrientation().getShape(x, y, map);
Shape translatedTile = GeometricUtilities.translateShape(tile, new Point2D.Double(0, 0));
int subX = MathUtilities.clamp((int) (tile.getBounds().getX() - startTileShape.getBounds().getX()), 0, img.getWidth() - map.getTileWidth());
int subY = MathUtilities.clamp((int) (tile.getBounds().getY() - startTileShape.getBounds().getY()), 0, img.getHeight() - map.getTileHeight());
final BufferedImage smallImage = img.getSubimage(subX, subY, map.getTileWidth(), map.getTileHeight());
final BufferedImage clippedImage = Imaging.getCompatibleImage(smallImage.getWidth(), smallImage.getHeight());
Graphics2D g = clippedImage.createGraphics();
g.clip(translatedTile);
g.drawImage(smallImage, 0, 0, null);
g.dispose();
this.tiles[x][y] = clippedImage;
}
}
}
protected abstract void renderSection(Graphics2D g, Rectangle2D section);
protected Environment getEnvironment() {
return this.environment;
}
}
|
package org.junit.jupiter.migrationsupport.rules;
import static org.junit.platform.commons.meta.API.Usage.Experimental;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Member;
import java.util.Arrays;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.junit.jupiter.api.extension.TestExtensionContext;
import org.junit.jupiter.migrationsupport.rules.adapter.AbstractTestRuleAdapter;
import org.junit.jupiter.migrationsupport.rules.member.RuleAnnotatedField;
import org.junit.jupiter.migrationsupport.rules.member.RuleAnnotatedMember;
import org.junit.platform.commons.meta.API;
import org.junit.rules.TestRule;
@API(Experimental)
class TestRuleFieldSupport extends AbstractTestRuleSupport {
TestRuleFieldSupport(Function<RuleAnnotatedMember, AbstractTestRuleAdapter> adapterGenerator,
Class<? extends TestRule> ruleType) {
super(adapterGenerator, ruleType);
}
@Override
protected RuleAnnotatedMember createRuleAnnotatedMember(TestExtensionContext context, Member member) {
return new RuleAnnotatedField(context.getTestInstance(), (Field) member);
}
@Override
protected List<Member> findRuleAnnotatedMembers(Object testInstance) {
return findAnnotatedFields(testInstance, super.getRuleType(), super.getAnnotationType()).collect(
Collectors.toList());
}
private static Stream<Field> findAnnotatedFields(Object instance, Class<?> fieldType,
Class<? extends Annotation> annotationType) {
Field[] fields = instance.getClass().getFields();
// @formatter:off
return Arrays.stream(fields)
.filter(field -> fieldType.isAssignableFrom(field.getType()))
.filter(field -> field.isAnnotationPresent(annotationType));
// @formatter:on
}
}
|
package de.mrapp.android.adapter.view;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
/**
* An inflater, which allows to inflate views, which may be referenced by a
* resource id.
*
* @author Michael Rapp
*
* @since 1.0.0
*/
public class ViewIdInflater implements ViewInflater {
/**
* The resource id of the view, which should be inflated.
*/
private final int viewId;
/**
* Creates a new inflater, which allows to inflate views, which may be
* referenced by a resource id.
*
* @param viewId
* The resource id of the view, which should be inflated, as an
* {@link Integer} value. The id must correspond to a valid view
* resource
*/
public ViewIdInflater(final int viewId) {
this.viewId = viewId;
}
@Override
public final View inflateView(final Context context, final ViewGroup parent) {
LayoutInflater inflater = (LayoutInflater) context
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
return inflater.inflate(viewId, parent);
}
}
|
package org.eclipse.kura.core.keystore.util;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.StringReader;
import java.security.GeneralSecurityException;
import java.security.Key;
import java.security.KeyStore;
import java.security.KeyStore.Entry;
import java.security.KeyStore.PrivateKeyEntry;
import java.security.KeyStore.TrustedCertificateEntry;
import java.security.KeyStoreException;
import java.security.PrivateKey;
import java.security.Security;
import java.security.cert.Certificate;
import java.security.cert.CertificateEncodingException;
import java.security.cert.CertificateException;
import java.security.cert.CertificateFactory;
import java.security.cert.CertificateParsingException;
import java.security.cert.X509Certificate;
import java.security.interfaces.DSAPublicKey;
import java.security.interfaces.ECPublicKey;
import java.security.interfaces.RSAPublicKey;
import java.security.spec.ECParameterSpec;
import java.util.ArrayList;
import java.util.Base64;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.security.auth.x500.X500Principal;
import javax.ws.rs.WebApplicationException;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.bouncycastle.openssl.PEMParser;
import org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter;
import org.eclipse.kura.KuraException;
import org.eclipse.kura.core.keystore.rest.provider.CsrReadRequest;
import org.eclipse.kura.security.keystore.KeystoreInfo;
import org.eclipse.kura.security.keystore.KeystoreService;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Constants;
import org.osgi.framework.Filter;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceReference;
import org.osgi.service.component.ComponentContext;
import org.osgi.util.tracker.ServiceTracker;
import org.osgi.util.tracker.ServiceTrackerCustomizer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class KeystoreRemoteService {
private static final Logger logger = LoggerFactory.getLogger(KeystoreRemoteService.class);
public static final String BEGIN_CERT = "
public static final String END_CERT = "
public static final String LINE_SEPARATOR = System.getProperty("line.separator");
protected Map<String, KeystoreService> keystoreServices = new HashMap<>();
protected BundleContext bundleContext;
private ServiceTrackerCustomizer<KeystoreService, KeystoreService> keystoreServiceTrackerCustomizer;
private ServiceTracker<KeystoreService, KeystoreService> keystoreServiceTracker;
public void activate(ComponentContext componentContext) {
this.bundleContext = componentContext.getBundleContext();
this.keystoreServiceTrackerCustomizer = new KeystoreServiceTrackerCustomizer();
initKeystoreServiceTracking();
}
public void deactivate(ComponentContext componentContext) {
if (this.keystoreServiceTracker != null) {
this.keystoreServiceTracker.close();
}
}
public static TrustedCertificateEntry createCertificateEntry(String certificate) throws CertificateException {
CertificateFactory certFactory = CertificateFactory.getInstance("X.509");
ByteArrayInputStream is = new ByteArrayInputStream(certificate.getBytes());
X509Certificate cert = (X509Certificate) certFactory.generateCertificate(is);
return new TrustedCertificateEntry(cert);
}
public static PrivateKeyEntry createPrivateKey(String privateKey, String publicKey)
throws IOException, GeneralSecurityException {
// Works with RSA and DSA. EC is not supported since the certificate is encoded
// with ECDSA while the corresponding private key with EC.
// This cause an error when the PrivateKeyEntry is generated.
Certificate[] certs = parsePublicCertificates(publicKey);
Security.addProvider(new BouncyCastleProvider());
PEMParser pemParser = new PEMParser(new StringReader(privateKey));
Object object = pemParser.readObject();
pemParser.close();
JcaPEMKeyConverter converter = new JcaPEMKeyConverter().setProvider("BC");
PrivateKey privkey = null;
if (object instanceof org.bouncycastle.asn1.pkcs.PrivateKeyInfo) {
privkey = converter.getPrivateKey((org.bouncycastle.asn1.pkcs.PrivateKeyInfo) object);
} else if (object instanceof org.bouncycastle.openssl.PEMKeyPair) {
privkey = converter.getKeyPair((org.bouncycastle.openssl.PEMKeyPair) object).getPrivate();
} else {
throw new IOException("PrivateKey not recognized.");
}
return new PrivateKeyEntry(privkey, certs);
}
public static X509Certificate[] parsePublicCertificates(String publicKey) throws CertificateException {
List<X509Certificate> certificateChain = new ArrayList<>();
CertificateFactory certFactory = CertificateFactory.getInstance("X.509");
ByteArrayInputStream is = new ByteArrayInputStream(publicKey.getBytes());
certificateChain.add((X509Certificate) certFactory.generateCertificate(is));
return certificateChain.toArray(new X509Certificate[0]);
}
protected List<KeystoreInfo> listKeystoresInternal() {
List<KeystoreInfo> keystores = new ArrayList<>();
this.keystoreServices.entrySet().stream().forEach(entry -> {
try {
if (entry.getValue().getKeyStore() != null) {
keystores.add(buildKeystoreInfo(entry.getKey(), entry.getValue().getKeyStore()));
}
} catch (KuraException | KeyStoreException e) {
throw new WebApplicationException(e);
}
});
return keystores;
}
protected List<EntryInfo> getKeysInternal() {
List<EntryInfo> keys = new ArrayList<>();
this.keystoreServices.entrySet().stream().forEach(keystoreService -> {
if (keystoreService != null) {
try {
keystoreService.getValue().getEntries().entrySet().stream().forEach(entry -> {
if (entry.getValue() instanceof PrivateKeyEntry) {
keys.add(buildPrivateKeyInfo(keystoreService.getKey(), entry.getKey(),
(PrivateKeyEntry) entry.getValue(), false));
} else if (entry.getValue() instanceof TrustedCertificateEntry) {
keys.add(buildCertificateInfo(keystoreService.getKey(), entry.getKey(),
(TrustedCertificateEntry) entry.getValue(), false));
}
});
} catch (KuraException e) {
throw new WebApplicationException(e);
}
}
});
return keys;
}
protected List<EntryInfo> getKeysByPidInternal(final String keystoreServicePid) {
List<EntryInfo> keys = new ArrayList<>();
KeystoreService keystoreService = this.keystoreServices.get(keystoreServicePid);
if (keystoreService != null) {
try {
keystoreService.getEntries().entrySet().stream().forEach(entry -> {
if (entry.getValue() instanceof PrivateKeyEntry) {
keys.add(buildPrivateKeyInfo(keystoreServicePid, entry.getKey(),
(PrivateKeyEntry) entry.getValue(), true));
} else if (entry.getValue() instanceof TrustedCertificateEntry) {
keys.add(buildCertificateInfo(keystoreServicePid, entry.getKey(),
(TrustedCertificateEntry) entry.getValue(), true));
}
});
} catch (KuraException e) {
throw new WebApplicationException(e);
}
} else {
throw new WebApplicationException(404);
}
return keys;
}
protected List<EntryInfo> getKeysByAliasInternal(final String alias) {
List<EntryInfo> keys = new ArrayList<>();
this.keystoreServices.entrySet().stream().filter(entry -> {
try {
return entry.getValue().getAliases().contains(alias);
} catch (KuraException e) {
throw new WebApplicationException(e);
}
}).forEach(entry -> {
try {
Entry keystoreEntry = entry.getValue().getEntry(alias);
if (keystoreEntry instanceof PrivateKeyEntry) {
keys.add(buildPrivateKeyInfo(entry.getKey(), alias, (PrivateKeyEntry) keystoreEntry, true));
} else if (keystoreEntry instanceof TrustedCertificateEntry) {
keys.add(
buildCertificateInfo(entry.getKey(), alias, (TrustedCertificateEntry) keystoreEntry, true));
} else {
throw new WebApplicationException(404);
}
} catch (KuraException e) {
throw new WebApplicationException(e);
}
});
return keys;
}
protected EntryInfo getKeyInternal(final String keystoreServicePid, final String alias) {
Entry entry;
KeystoreService keystoreService = this.keystoreServices.get(keystoreServicePid);
if (keystoreService != null) {
try {
entry = keystoreService.getEntry(alias);
if (entry instanceof PrivateKeyEntry) {
return buildPrivateKeyInfo(keystoreServicePid, alias, (PrivateKeyEntry) entry, true);
} else if (entry instanceof TrustedCertificateEntry) {
return buildCertificateInfo(keystoreServicePid, alias, (TrustedCertificateEntry) entry, true);
} else {
throw new WebApplicationException(404);
}
} catch (KuraException e) {
throw new WebApplicationException(e);
}
} else {
throw new WebApplicationException(404);
}
}
protected String getCSRInternal(final CsrInfo info) {
try {
X500Principal principal = new X500Principal(info.getAttributes());
return this.keystoreServices.get(info.getKeystoreServicePid()).getCSR(info.getAlias(), principal,
info.getSignatureAlgorithm());
} catch (KuraException e) {
throw new WebApplicationException(e);
}
}
protected String getCSRInternal(final CsrReadRequest request) {
try {
X500Principal principal = new X500Principal(request.getAttributes());
return this.keystoreServices.get(request.getKeystoreServicePid()).getCSR(request.getAlias(), principal,
request.getSignatureAlgorithm());
} catch (KuraException e) {
throw new WebApplicationException(e);
}
}
protected void storeTrustedCertificateEntryInternal(final CertificateInfo writeRequest) {
try {
this.keystoreServices.get(writeRequest.getKeystoreServicePid()).setEntry(writeRequest.getAlias(),
createCertificateEntry(writeRequest.getCertificate()));
} catch (GeneralSecurityException | KuraException e) {
throw new WebApplicationException(e);
}
}
protected void storeKeyPairEntryInternal(final KeyPairInfo writeRequest) {
try {
this.keystoreServices.get(writeRequest.getKeystoreServicePid()).createKeyPair(writeRequest.getAlias(),
writeRequest.getAlgorithm(), writeRequest.getSize(), writeRequest.getSignatureAlgorithm(),
writeRequest.getAttributes());
} catch (KuraException e) {
throw new WebApplicationException(e);
}
}
protected void deleteKeyEntryInternal(String keystoreServicePid, String alias) {
try {
this.keystoreServices.get(keystoreServicePid).deleteEntry(alias);
} catch (KuraException e) {
throw new WebApplicationException(e);
}
}
private KeystoreInfo buildKeystoreInfo(String keystoreServicePid, KeyStore keystore) throws KeyStoreException {
KeystoreInfo keystoreInfo = new KeystoreInfo(keystoreServicePid);
keystoreInfo.setType(keystore.getType());
keystoreInfo.setSize(keystore.size());
return keystoreInfo;
}
private CertificateInfo buildCertificateInfo(String keystoreServicePid, String alias,
TrustedCertificateEntry certificate, boolean withCertificate) {
CertificateInfo certificateInfo = new CertificateInfo(keystoreServicePid, alias);
if (certificate != null && certificate.getTrustedCertificate() instanceof X509Certificate) {
X509Certificate x509Certificate = (X509Certificate) certificate.getTrustedCertificate();
certificateInfo.setSubjectDN(x509Certificate.getSubjectDN().getName());
certificateInfo.setIssuer(x509Certificate.getIssuerX500Principal().getName());
certificateInfo.setStartDate(x509Certificate.getNotBefore().getTime());
certificateInfo.setExpirationDate(x509Certificate.getNotAfter().getTime());
certificateInfo.setAlgorithm(x509Certificate.getSigAlgName());
certificateInfo.setSize(getSize(x509Certificate.getPublicKey()));
try {
certificateInfo.setSubjectAN(x509Certificate.getSubjectAlternativeNames());
} catch (CertificateParsingException e) {
logger.error("Cannot parse certificate subject alternative names", e);
}
if (withCertificate) {
final Base64.Encoder encoder = Base64.getMimeEncoder(64, LINE_SEPARATOR.getBytes());
StringBuilder pemCertificate = new StringBuilder();
pemCertificate.append(BEGIN_CERT);
pemCertificate.append(LINE_SEPARATOR);
try {
pemCertificate.append(encoder.encodeToString(x509Certificate.getEncoded()));
} catch (CertificateEncodingException e) {
logger.error("Cannot encode certificate", e);
}
pemCertificate.append(LINE_SEPARATOR);
pemCertificate.append(END_CERT);
certificateInfo.setCertificate(pemCertificate.toString());
}
}
return certificateInfo;
}
private PrivateKeyInfo buildPrivateKeyInfo(String keystoreServicePid, String alias, PrivateKeyEntry privateKey,
boolean withCertificate) {
PrivateKeyInfo privateKeyInfo = new PrivateKeyInfo(keystoreServicePid, alias);
if (privateKey != null) {
privateKeyInfo.setAlgorithm(privateKey.getPrivateKey().getAlgorithm());
privateKeyInfo.setSize(getSize(privateKey.getCertificate().getPublicKey()));
if (withCertificate) {
final Base64.Encoder encoder = Base64.getMimeEncoder(64, LINE_SEPARATOR.getBytes());
String[] certificateChain = new String[privateKey.getCertificateChain().length];
for (int i = 0; i < certificateChain.length; i++) {
StringBuilder pemCertificate = new StringBuilder();
pemCertificate.append(BEGIN_CERT);
pemCertificate.append(LINE_SEPARATOR);
try {
pemCertificate.append(encoder.encodeToString(privateKey.getCertificateChain()[i].getEncoded()));
} catch (CertificateEncodingException e) {
logger.error("Cannot encode certificate", e);
}
pemCertificate.append(LINE_SEPARATOR);
pemCertificate.append(END_CERT);
certificateChain[i] = pemCertificate.toString();
}
privateKeyInfo.setCertificateChain(certificateChain);
}
}
return privateKeyInfo;
}
private int getSize(Key key) {
int size = 0;
if (key instanceof RSAPublicKey) {
size = ((RSAPublicKey) key).getModulus().bitLength();
} else if (key instanceof ECPublicKey) {
ECParameterSpec spec = ((ECPublicKey) key).getParams();
if (spec != null) {
size = spec.getOrder().bitLength();
}
} else if (key instanceof DSAPublicKey) {
DSAPublicKey dsaCertificate = (DSAPublicKey) key;
if (dsaCertificate.getParams() != null) {
size = dsaCertificate.getParams().getP().bitLength();
} else {
size = dsaCertificate.getY().bitLength();
}
}
return size;
}
private void initKeystoreServiceTracking() {
String filterString = String.format("(&(%s=%s))", Constants.OBJECTCLASS, KeystoreService.class.getName());
Filter filter = null;
try {
filter = this.bundleContext.createFilter(filterString);
} catch (InvalidSyntaxException e) {
logger.error("Filter setup exception ", e);
}
this.keystoreServiceTracker = new ServiceTracker<>(this.bundleContext, filter,
this.keystoreServiceTrackerCustomizer);
this.keystoreServiceTracker.open();
}
private final class KeystoreServiceTrackerCustomizer
implements ServiceTrackerCustomizer<KeystoreService, KeystoreService> {
private static final String KURA_SERVICE_PID = "kura.service.pid";
@Override
public KeystoreService addingService(final ServiceReference<KeystoreService> reference) {
String kuraServicePid = (String) reference.getProperty(KURA_SERVICE_PID);
KeystoreRemoteService.this.keystoreServices.put(kuraServicePid,
KeystoreRemoteService.this.bundleContext.getService(reference));
return KeystoreRemoteService.this.keystoreServices.get(kuraServicePid);
}
@Override
public void modifiedService(final ServiceReference<KeystoreService> reference, final KeystoreService service) {
String kuraServicePid = (String) reference.getProperty(KURA_SERVICE_PID);
KeystoreRemoteService.this.keystoreServices.put(kuraServicePid,
KeystoreRemoteService.this.bundleContext.getService(reference));
}
@Override
public void removedService(final ServiceReference<KeystoreService> reference, final KeystoreService service) {
String kuraServicePid = (String) reference.getProperty(KURA_SERVICE_PID);
KeystoreRemoteService.this.keystoreServices.remove(kuraServicePid);
}
}
}
|
package org.geotools.data.shapefile.ng;
import java.io.IOException;
import java.net.URL;
import org.geotools.TestData;
import org.geotools.util.KVP;
import org.junit.Test;
import static org.geotools.data.shapefile.ng.ShapefileDataStoreFactory.*;
/**
* Test the functionality of ShapefileDataStoreFactory; specifically the handling of
* connection parameters.
*
* @author Jody Garnett
*/
public class ShapefileDataStoreFactoryTest extends TestCaseSupport {
private ShapefileDataStore store = null;
private ShapefileDataStoreFactory factory = new ShapefileDataStoreFactory();
public ShapefileDataStoreFactoryTest(String testName) throws IOException {
super(testName);
}
@Override
protected void tearDown() throws Exception {
if(store != null) {
store.dispose();
}
super.tearDown();
}
@Test
public void testFSTypeParameter() throws Exception {
URL url = TestData.url(STATE_POP);
KVP params = new KVP( URLP.key,url );
assertTrue( "Sorting is optional", factory.canProcess(params) );
params.put( FSTYPE.key, "shape-ng" );
assertTrue( "Shape NG supported", factory.canProcess(params) );
params.put(FSTYPE.key, "shape" );
assertFalse( "Plain shape not supported", factory.canProcess(params) );
params.put(FSTYPE.key, "index" );
assertFalse( "Plain index not supported", factory.canProcess(params) );
params.put( FSTYPE.key, "smurf" );
assertFalse( "Feeling blue; don't try a smruf", factory.canProcess(params) );
}
}
|
package com.noinnion.android.newsplus.extension.newsblurplus;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.content.Context;
import android.os.RemoteException;
import android.text.TextUtils;
import com.androidquery.AQuery;
import com.androidquery.callback.AjaxCallback;
import com.androidquery.callback.AjaxStatus;
import com.androidquery.util.AQUtility;
import com.noinnion.android.reader.api.ReaderException;
import com.noinnion.android.reader.api.ReaderExtension;
import com.noinnion.android.reader.api.internal.IItemIdListHandler;
import com.noinnion.android.reader.api.internal.IItemListHandler;
import com.noinnion.android.reader.api.internal.ISubscriptionListHandler;
import com.noinnion.android.reader.api.internal.ITagListHandler;
import com.noinnion.android.reader.api.provider.IItem;
import com.noinnion.android.reader.api.provider.ISubscription;
import com.noinnion.android.reader.api.provider.ITag;
public class NewsBlurPlus extends ReaderExtension {
private List<ITag> tags;
private List<ISubscription> feeds;
private ITag starredTag;
/*
* Main sync function to get folders, feeds, and counts.
* 1. Get the folders (tags) and their feeds.
* 2. Ask NewsBlur to Refresh feed counts + save to feeds.
* 3. Send handler the tags and feeds.
*/
@Override
public void handleReaderList(ITagListHandler tagHandler, ISubscriptionListHandler subHandler, long syncTime) throws IOException, ReaderException {
AjaxCallback<JSONObject> cb = new AjaxCallback<JSONObject>();
AQuery aq = new AQuery(this);
Context c = getApplicationContext();
APIHelper.wrapCallback(c, cb);
cb.url(APIHelper.API_URL_FOLDERS_AND_FEEDS).type(JSONObject.class);
aq.sync(cb);
JSONObject json = cb.getResult();
AjaxStatus status = cb.getStatus();
if (APIHelper.isJSONResponseValid(json, status)) {
try {
JSONObject json_feeds = json.getJSONObject("feeds");
JSONObject json_folders = json.getJSONObject("flat_folders");
Iterator<?> keys = json_folders.keys();
if (keys.hasNext()) {
tags = new ArrayList<ITag>();
feeds = new ArrayList<ISubscription>();
if (starredTag == null) {
starredTag = APIHelper.createTag("Starred items", true);
tags.add(starredTag);
}
}
while (keys.hasNext()) {
String catName = ((String)keys.next());
JSONArray feedsPerFolder = json_folders.getJSONArray(catName);
catName = catName.trim();
ITag cat = APIHelper.createTag(catName, false);
if (!TextUtils.isEmpty(catName))
tags.add(cat);
// Add all feeds in this category
for (int i=0; i<feedsPerFolder.length(); i++) {
ISubscription sub = new ISubscription();
String feedID = feedsPerFolder.getString(i);
JSONObject f = json_feeds.getJSONObject(feedID);
Calendar updateTime = Calendar.getInstance();
updateTime.add(Calendar.SECOND, (-1) * f.getInt("updated_seconds_ago"));
sub.newestItemTime = updateTime.getTimeInMillis() / 1000;
sub.uid = "FEED:" + APIHelper.getFeedUrlFromFeedId(feedID);
sub.title = f.getString("feed_title");
sub.htmlUrl = f.getString("feed_link");
sub.unreadCount = f.getInt("nt") + f.getInt("ps");
if (!TextUtils.isEmpty(catName))
sub.addCategory(cat.uid);
feeds.add(sub);
}
}
if (feeds.size() == 0)
throw new ReaderException("Network error");
else {
updateFeedCounts();
tagHandler.tags(tags);
subHandler.subscriptions(feeds);
}
}
catch (JSONException e) {
AQUtility.report(e);
}
catch (RemoteException e) {
throw new ReaderException(e);
}
}
}
/*
* Get a list of unread story IDS (URLs), UI will mark all other as read.
* This really speeds up the sync process.
*/
@Override
public void handleItemIdList(IItemIdListHandler handler, long syncTime) throws IOException, ReaderException {
try {
AQuery aq = new AQuery(this);
Context c = getApplicationContext();
AjaxCallback<JSONObject> cb = new AjaxCallback<JSONObject>();
APIHelper.wrapCallback(c, cb);
if (handler.stream().startsWith(ReaderExtension.STATE_STARRED)) {
cb.url(APIHelper.API_URL_STARRED_ITEMS).type(JSONObject.class);
}
else {
List<String> unread_hashes = APIHelper.getUnreadHashes(aq, c);
String url = APIHelper.API_URL_RIVER;
for (String h : unread_hashes)
url += "h=" + h + "&";
cb.url(url + "read_filter=unread").type(JSONObject.class);
}
aq.sync(cb);
JSONObject json = cb.getResult();
AjaxStatus status = cb.getStatus();
if (APIHelper.isJSONResponseValid(json, status)) {
List<String> unread = APIHelper.getStoryIDs(json);
handler.items(unread);
}
}
catch (JSONException e) {
throw new ReaderException(e);
}
catch (RemoteException e) {
throw new ReaderException(e);
}
}
/*
* Call for an update on all feeds' unread counters, and store the result
*/
private void updateFeedCounts() {
AjaxCallback<JSONObject> cb = new AjaxCallback<JSONObject>();
AQuery aq = new AQuery(this);
Context c = getApplicationContext();
APIHelper.wrapCallback(c, cb);
cb.url(APIHelper.API_URL_REFRESH_FEEDS).type(JSONObject.class);
aq.sync(cb);
JSONObject json = cb.getResult();
AjaxStatus status = cb.getStatus();
if (APIHelper.isJSONResponseValid(json, status)) {
try {
JSONObject json_feeds = json.getJSONObject("feeds");
for (ISubscription sub : feeds) {
JSONObject f = json_feeds.getJSONObject(APIHelper.getFeedIdFromFeedUrl(sub.uid));
sub.unreadCount = f.getInt("ps") + f.getInt("nt");
}
}
catch (Exception e) {
AQUtility.report(e);
}
}
}
/*
* Handle a single item list (a feed or a folder).
* This functions calls the parseItemList function.
*/
@Override
public void handleItemList(IItemListHandler handler, long syncTime) throws IOException, ReaderException {
try {
if ((tags != null) && (feeds != null)) {
String uid = handler.stream();
if (uid.equals(ReaderExtension.STATE_READING_LIST)) {
for (ISubscription sub : feeds)
if (sub.unreadCount > 0 && !handler.excludedStreams().contains(sub.uid))
parseItemList(sub.uid.replace("FEED:", ""), handler, sub.getCategories());
}
else if (uid.startsWith("FOL:")) {
for (ISubscription sub : feeds)
if (sub.unreadCount > 0 && sub.getCategories().contains(uid) && !handler.excludedStreams().contains(sub.uid))
parseItemList(sub.uid.replace("FEED:", ""), handler, sub.getCategories());
}
else if (uid.startsWith("FEED:")) {
if (!handler.excludedStreams().contains(uid))
parseItemList(handler.stream().replace("FEED:", ""), handler, null);
}
else if (uid.startsWith(ReaderExtension.STATE_STARRED)) {
parseItemList(APIHelper.API_URL_STARRED_ITEMS, handler, null);
}
}
}
catch (RemoteException e) {
throw new ReaderException(e);
}
}
public void parseItemList(String url, IItemListHandler handler, List<String> categories) throws IOException, ReaderException {
AjaxCallback<JSONObject> cb = new AjaxCallback<JSONObject>();
AQuery aq = new AQuery(this);
Context c = getApplicationContext();
APIHelper.wrapCallback(c, cb);
cb.url(url).type(JSONObject.class);
aq.sync(cb);
JSONObject json = cb.getResult();
AjaxStatus status = cb.getStatus();
if (APIHelper.isJSONResponseValid(json, status)) {
try {
List<IItem> items = new ArrayList<IItem>();
JSONArray arr = json.getJSONArray("stories");
int length = 0;
for (int i=0; i<arr.length(); i++) {
JSONObject story = arr.getJSONObject(i);
IItem item = new IItem();
item.subUid = "FEED:" + url;
item.title = story.getString("story_title");
item.link = story.getString("story_permalink");
item.uid = story.getString("id");
item.author = story.getString("story_authors");
item.updatedTime = story.getLong("story_timestamp");
item.publishedTime = story.getLong("story_timestamp");
item.read = (story.getInt("read_status") == 1);
item.content = story.getString("story_content");
if (story.has("starred") && story.getString("starred") == "true") {
item.starred = true;
item.addCategory(starredTag.uid);
}
if (categories != null)
for (String cat : categories)
item.addCategory(cat);
items.add(item);
// Handle TransactionTooLargeException, based on Noin's recommendation
length += item.getLength();
if (items.size() % 200 == 0 || length > 300000) {
handler.items(items);
items.clear();
length = 0;
}
}
handler.items(items);
}
catch (Exception e) {
AQUtility.report(e);
}
}
}
/*
* Main function for marking stories (and their feeds) as read/unread.
*/
private boolean markAs(boolean read, String[] itemUids, String[] subUIds) throws IOException, ReaderException {
AjaxCallback<JSONObject> cb = new AjaxCallback<JSONObject>();
AQuery aq = new AQuery(this);
Context c = getApplicationContext();
APIHelper.wrapCallback(c, cb);
if (itemUids == null && subUIds == null) {
cb.url(APIHelper.API_URL_MARK_ALL_AS_READ).type(JSONObject.class);
}
else {
if (itemUids == null) {
Map<String, Object> params = new HashMap<String, Object>();
for (String sub : subUIds)
params.put("feed_id", APIHelper.getFeedIdFromFeedUrl(sub));
cb.url(APIHelper.API_URL_MARK_FEED_AS_READ).params(params).type(JSONObject.class);
}
else {
String url = read ? APIHelper.API_URL_MARK_STORY_AS_READ : APIHelper.API_URL_MARK_STORY_AS_UNREAD;
Map<String, Object> params = new HashMap<String, Object>();
for (int i=0; i<itemUids.length; i++) {
params.put("story_id", itemUids[i]);
params.put("feed_id", APIHelper.getFeedIdFromFeedUrl(subUIds[i]));
}
cb.url(url).params(params).type(JSONObject.class);
}
}
aq.sync(cb);
JSONObject json = cb.getResult();
AjaxStatus status = cb.getStatus();
try {
return (APIHelper.isJSONResponseValid(json, status) && json.getString("result").startsWith("ok"));
}
catch (JSONException e) {
return false;
}
}
/*
* Mark a list of stories (and their feeds) as read
*/
@Override
public boolean markAsRead(String[] itemUids, String[] subUIds) throws IOException, ReaderException {
return this.markAs(true, itemUids, subUIds);
}
/*
* Mark a list of stories (and their feeds) as unread
*/
@Override
public boolean markAsUnread(String[] itemUids, String[] subUids, boolean keepUnread) throws IOException, ReaderException {
return this.markAs(false, itemUids, subUids);
}
/*
* Mark all stories on all feeds as read.
* Note: S = subscription (feed), t = tag
*/
@Override
public boolean markAllAsRead(String s, String t, long syncTime) throws IOException, ReaderException {
boolean result = true;
if (s == null && t == null)
return this.markAs(true, null, null);
else if (s.startsWith("FEED:")) {
String[] feed = { APIHelper.getFeedIdFromFeedUrl(s) };
result = this.markAs(true, null, feed);
}
else if (s.startsWith("FOL:")) {
List<String> subUIDs = new ArrayList<String>();
for (ISubscription sub : feeds)
if (sub.getCategories().contains(s))
subUIDs.add(sub.uid);
if (subUIDs.size() > 0)
result = this.markAs(true, null, (String[])subUIDs.toArray());
}
else
result = false; // Can't mark a folder/tag as read
return result;
}
//TODO: Tag/Folder handling
@Override
public boolean editItemTag(String[] itemUids, String[] subUids, String[] addTags, String[] removeTags) throws IOException, ReaderException {
return false;
}
@Override
public boolean editSubscription(String uid, String title, String url, String[] tag, int action, long syncTime) throws IOException, ReaderException {
return false;
}
@Override
public boolean renameTag(String tagUid, String oldLabel, String newLabel) throws IOException, ReaderException {
return false;
}
@Override
public boolean disableTag(String tagUid, String label) throws IOException, ReaderException {
return false;
}
}
|
package org.opennms.netmgt.provision.service.tasks;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
/*
* ContainerTask
* @author brozow
*
* TODO derive directly from Task
*/
public class ContainerTask extends Task {
/**
* TaskTrigger
*
* @author brozow
*/
private final class TaskTrigger extends Task {
public TaskTrigger(DefaultTaskCoordinator coordinator, ContainerTask parent) {
super(coordinator, parent);
}
@Override
protected void completeSubmit() {
getCoordinator().markTaskAsCompleted(TaskTrigger.this);
}
public String toString() { return "Trigger For "+ContainerTask.this; }
}
protected final Task m_triggerTask;
private final List<Task> m_children = Collections.synchronizedList(new ArrayList<Task>());
public ContainerTask(DefaultTaskCoordinator coordinator, ContainerTask parent) {
super(coordinator, parent);
m_triggerTask = new TaskTrigger(coordinator, this);
}
@Override
public void addPrerequisite(Task task) {
super.addPrerequisite(task);
m_triggerTask.addPrerequisite(task);
}
AtomicInteger m_child = new AtomicInteger(0);
@Override
public void preSchedule() {
m_triggerTask.schedule();
List<Task> children;
synchronized(m_children) {
children = new ArrayList<Task>(m_children);
m_children.clear();
}
for(Task task : children) {
task.schedule();
}
}
public void add(Task task) {
super.addPrerequisite(task);
addChildDependencies(task);
boolean scheduleChild;
synchronized(m_children) {
scheduleChild = isScheduled();
if (!scheduleChild) {
m_children.add(task);
}
}
if (scheduleChild) {
task.schedule();
}
}
protected Task getTriggerTask() {
return m_triggerTask;
}
// private void setPreferredExecutorOfChild(Task task) {
// if (task instanceof ContainerTask) {
// ContainerTask container = (ContainerTask)task;
// if (container.getChildPreferredExecutor().equals(DEFAULT_EXECUTOR)) {
// container.setPreferredExecutor(getChildPreferredExecutor());
// } else if (task instanceof SyncTask){
// SyncTask syncTask = (SyncTask)task;
// if (syncTask.getPreferredExecutor().equals(DEFAULT_EXECUTOR)) {
// syncTask.setPreferredExecutor(getChildPreferredExecutor());
@Override
protected void completeSubmit() {
getCoordinator().markTaskAsCompleted(this);
}
public SyncTask add(Runnable runnable) {
SyncTask task = createTask(runnable);
add(task);
return task;
}
public SyncTask add(Runnable runnable, String schedulingHint) {
SyncTask task = createTask(runnable, schedulingHint);
add(task);
return task;
}
public <T> AsyncTask<T> add(Async<T> async, Callback<T> cb) {
AsyncTask<T> task = createTask(async, cb);
add(task);
return task;
}
private SyncTask createTask(Runnable runnable) {
return getCoordinator().createTask(this, runnable);
}
private SyncTask createTask(Runnable runnable, String schedulingHint) {
return getCoordinator().createTask(this, runnable, schedulingHint);
}
private <T> AsyncTask<T> createTask(Async<T> async, Callback<T> cb) {
return getCoordinator().createTask(this, async, cb);
}
protected void addChildDependencies(Task child) {
child.addPrerequisite(m_triggerTask);
}
}
|
package org.eclipse.titan.designer.AST.brokenpartsanalyzers;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.eclipse.swt.widgets.Display;
import org.eclipse.titan.designer.AST.Assignment;
import org.eclipse.titan.designer.AST.Assignments;
import org.eclipse.titan.designer.AST.Module;
import org.eclipse.titan.designer.AST.TTCN3.definitions.TTCN3Module;
import org.eclipse.titan.designer.consoles.TITANDebugConsole;
import org.eclipse.titan.designer.parsers.CompilationTimeStamp;
import org.eclipse.ui.console.MessageConsoleStream;
/**
* @author Peter Olah
*/
//FIXME clean up selection methods if this way of working is tested to be OK
public final class BrokenPartsViaReferences extends SelectionMethodBase implements IBaseAnalyzer {
// when the definition based search for parts to be analyzed exceeds this limit we switch back to the import based method.
// 1 second in nanoseconds
private final static long TIMELIMIT = 10 * (long)1e+9;
/**
* When the percentage of broken modules is bigger than this limit
* the module level selection shall be used.
*
* Otherwise the assignment level detection would take too long.
*/
private final static float BROKEN_MODULE_LIMIT = 10;
private final CompilationTimeStamp timestamp;
private final Map<Module, List<Assignment>> moduleAndBrokenAssignments;
private boolean analyzeOnlyAssignments;
public BrokenPartsViaReferences(final SelectionAlgorithm selectionAlgorithm, final CompilationTimeStamp timestamp) {
super(selectionAlgorithm);
moduleAndBrokenAssignments = new HashMap<Module, List<Assignment>>();
analyzeOnlyAssignments = false;
this.timestamp = timestamp;
header = "\n**Selection with Broken parts via references is started at:";
footer = "**Selection with Broken parts via references is ended at: ";
}
public Map<Module, List<Assignment>> getModuleAndBrokenDefs() {
return moduleAndBrokenAssignments;
}
public boolean getAnalyzeOnlyDefinitions() {
return analyzeOnlyAssignments;
}
@Override
public void execute() {
if (writeDebugInfo) {
TITANDebugConsole.println(String.format(format, header, simpleDateFormat.format(new Date())));
}
start = System.nanoTime();
final List<Module> startModules = new ArrayList<Module>();
final Map<Module, List<Module>> invertedImports = buildInvertedImportStructure(allModules, startModules);
computeAnalyzeOnlyDefinitionsFlag(allModules, startModules);
if (analyzeOnlyAssignments) {
final Map<Module, List<AssignmentHandler>> result = collectBrokenParts(startModules, invertedImports);
if (writeDebugInfo && (System.nanoTime()-start < TIMELIMIT)) {
writeDebugInfo(result);
}
collectRealBrokenParts(result);
}
if(writeDebugInfo && System.nanoTime() - start > TIMELIMIT) {
TITANDebugConsole.println(" Switching back to old selection format");
}
// if we need to use the old selection or the new selection method took too long
if(!analyzeOnlyAssignments || (System.nanoTime()-start > TIMELIMIT)) {
analyzeOnlyAssignments = false;
modulesToCheck.clear();
moduleAndBrokenAssignments.clear();
final List<Module> modules = collectBrokenModulesViaInvertedImports(startModules, invertedImports);
modulesToCheck.addAll(modules);
}
afterExecute();
end = System.nanoTime() - start;
if (writeDebugInfo) {
TITANDebugConsole.println(String.format(format, footer, simpleDateFormat.format(new Date())));
infoAfterExecute();
}
}
public void computeAnalyzeOnlyDefinitionsFlag(final List<Module> allModules, final List<Module> startModules) {
float brokenModulesRatio = (float) ((startModules.size() * 100.0) / allModules.size());
if (Float.compare(brokenModulesRatio, (float) BROKEN_MODULE_LIMIT) < 0) {
analyzeOnlyAssignments = true;
}
}
/**
* It is build an inverted import structure and identify startmodules, which CompilationTimeStamp is null.
*
* @param allModules
* the list of modules to be check. Initially all modules.
* @param startModules
* the list of modules to be check. Initially all modules, but the function will remove those that can be skipped.
*
* @return invertedImports contains the next:<br>
* - key: a module.<br>
* - values: in these modules the key module is used, so all values imported this module, it is an inverted "imported" connection.<br>
* If module A import B, C, D and module B import D, E then the next structure will be built:<br>
* A = [],<br>
* B = [A],<br>
* C = [A],<br>
* D = [A, B],<br>
* E = [B]<br>
*/
protected Map<Module, List<Module>> buildInvertedImportStructure(final List<Module> allModules, final List<Module> startModules) {
final Map<Module, List<Module>> invertedImports = new HashMap<Module, List<Module>>();
for (Module actualModule : allModules) {
// Collect injured modules directly into startModules, we will start the checking from these modules.
// Collect modules which have not been checked semantically.
if ((actualModule.getLastCompilationTimeStamp() == null || actualModule.isCheckRoot() || !semanticallyChecked.contains(actualModule.getName())) && !startModules.contains(actualModule) ) {
startModules.add(actualModule);
}
// We have to add all module to get a correct inverted import structure.
// It covers the case when a module is a top-level module, so it has't got any import.
if (!invertedImports.containsKey(actualModule)) {
invertedImports.put(actualModule, new ArrayList<Module>());
}
for (Module actualImportedModule : actualModule.getImportedModules()) {
if (invertedImports.containsKey(actualImportedModule)) {
final List<Module> dependentModules = invertedImports.get(actualImportedModule);
if (!dependentModules.contains(actualModule)) {
dependentModules.add(actualModule);
}
} else {
final List<Module> temp = new ArrayList<Module>();
temp.add(actualModule);
invertedImports.put(actualImportedModule, temp);
}
}
}
return invertedImports;
}
protected List<Module> collectBrokenModulesViaInvertedImports(final List<Module> startModules, final Map<Module, List<Module>> invertedImports) {
final List<Module> startModulesCopy = new ArrayList<Module>(startModules);
final List<Module> result = new ArrayList<Module>();
final MessageConsoleStream stream = TITANDebugConsole.getConsole().newMessageStream();
if (writeDebugInfo) {
for (Module startModule: startModules) {
TITANDebugConsole.println(" ** Module " + startModule.getName() + " can not be skipped as it was not yet analyzed.", stream);
}
}
for (int s = 0; s < startModulesCopy.size(); ++s) {
final Module startModule = startModulesCopy.get(s);
if (!result.contains(startModule)) {
result.add(startModule);
}
final List<Module> whereStartModuleUsed = invertedImports.get(startModule);
for (int d = 0; d < whereStartModuleUsed.size(); ++d) {
final Module dependentModule = whereStartModuleUsed.get(d);
if (!startModulesCopy.contains(dependentModule)) {
startModulesCopy.add(dependentModule);
if (writeDebugInfo) {
TITANDebugConsole.println(" ** Module " + dependentModule.getName() + " can not be skipped as it depends on " + startModule.getName() + " which needs to be checked.", stream);
}
}
}
startModule.notCheckRoot();
final Assignments assignments = startModule.getAssignments();
for (int d = 0; d < assignments.getNofAssignments(); ++d) {
final Assignment assignment = assignments.getAssignmentByIndex(d);
assignment.notCheckRoot();
}
}
return result;
}
protected Map<Module, List<AssignmentHandler>> collectBrokenParts(final List<Module> startModules, final Map<Module, List<Module>> invertedImports) {
final List<Module> startModulesCopy = new ArrayList<Module>(startModules);
final Map<Module, List<AssignmentHandler>> moduleAndBrokenAssignments = new HashMap<Module, List<AssignmentHandler>>();
processStartModules(startModulesCopy, moduleAndBrokenAssignments);
for (int i = 0; i < startModulesCopy.size() && (System.nanoTime()-start < TIMELIMIT); ++i) {
final Module startModule = startModulesCopy.get(i);
List<AssignmentHandler> startAssignments;
if (moduleAndBrokenAssignments.containsKey(startModule)) {
startAssignments = moduleAndBrokenAssignments.get(startModule);
} else {
startAssignments = getAssignmentsFrom(startModule);
moduleAndBrokenAssignments.put(startModule, startAssignments);
}
if (!startAssignments.isEmpty()) {
final List<Module> whereStartModuleUsed = invertedImports.get(startModule);
for (int j = 0; j < whereStartModuleUsed.size(); ++j) {
final Module dependentModule = whereStartModuleUsed.get(j);
List<AssignmentHandler> dependentAssignments;
if (moduleAndBrokenAssignments.containsKey(dependentModule)) {
dependentAssignments = moduleAndBrokenAssignments.get(dependentModule);
} else {
dependentAssignments = getAssignmentsFrom(dependentModule);
moduleAndBrokenAssignments.put(dependentModule, dependentAssignments);
}
// We have to separate broken and not broken definition, because of postcheck.
final List<AssignmentHandler> brokens = new ArrayList<AssignmentHandler>();
final List<AssignmentHandler> notBrokens = new ArrayList<AssignmentHandler>();
for (int s = 0; s < startAssignments.size(); ++s) {
final AssignmentHandler startAssignment = startAssignments.get(s);
if (startAssignment.getIsContagious()) {
for (int d = 0; d < dependentAssignments.size(); ++d) {
final AssignmentHandler dependentAssignment = dependentAssignments.get(d);
dependentAssignment.check(startAssignment);
if (dependentAssignment.getIsInfected()) {
if (!startModulesCopy.contains(dependentModule)) {
startModulesCopy.add(dependentModule);
}
brokens.add(dependentAssignment);
}
}
}
}
for (int d = 0; d < dependentAssignments.size(); ++d) {
final AssignmentHandler dependentAssignment = dependentAssignments.get(d);
if (!dependentAssignment.getIsInfected()) {
notBrokens.add(dependentAssignment);
}
}
// Have to post check of local definition of modules.
// A definition can reference an other definition too.
checkLocalAssignments(brokens, notBrokens);
// If dependent module not added startModules,
// it means it has not got broken definition,
// so we have to delete it from moduleAndBrokenDefs.
if (!startModulesCopy.contains(dependentModule)) {
moduleAndBrokenAssignments.remove(dependentModule);
}
}
}
}
return moduleAndBrokenAssignments;
}
protected void collectRealBrokenParts(final Map<Module, List<AssignmentHandler>> moduleAndAssignments ) {
for (Map.Entry<Module, List<AssignmentHandler>> entry : moduleAndAssignments.entrySet()) {
List<Assignment> assignments = new ArrayList<Assignment>();
for (AssignmentHandler assignmentHandler : entry.getValue()) {
if (assignmentHandler.getIsInfected()) {
assignments.add(assignmentHandler.getAssignment());
}
assignmentHandler.assignment.notCheckRoot();
}
if (!assignments.isEmpty()) {
final Module module = entry.getKey();
moduleAndBrokenAssignments.put(module, assignments);
modulesToCheck.add(module);
}
}
}
public void processStartModules(final List<Module> startModules, final Map<Module, List<AssignmentHandler>> moduleAndBrokenAssignments) {
for (Module startModule : startModules) {
if(System.nanoTime()-start > TIMELIMIT) {
return;
}
if (startModule instanceof TTCN3Module && startModule.getLastCompilationTimeStamp() != null && !startModule.isCheckRoot()) {
final Assignments startAssignments = startModule.getAssignments();
final List<AssignmentHandler> brokens = new ArrayList<AssignmentHandler>();
final List<AssignmentHandler> notBrokens = new ArrayList<AssignmentHandler>();
for (int d = 0; d < startAssignments.getNofAssignments(); ++d) {
final Assignment startAssignment = startAssignments.getAssignmentByIndex(d);
final AssignmentHandler assignmentHandler = AssignmentHandlerFactory.getDefinitionHandler(startAssignment);
if (startAssignment.getLastTimeChecked() == null) {
startAssignment.check(timestamp);
}
startAssignment.accept(assignmentHandler);
if (startAssignment.isCheckRoot()) {
assignmentHandler.initStartParts();
startAssignment.notCheckRoot();
assignmentHandler.addReason("Definition's infected, because of incremental parsing.");
brokens.add(assignmentHandler);
} else if (assignmentHandler.getIsInfected()) {
assignmentHandler.addReason("Definition contains an infected reference.");
brokens.add(assignmentHandler);
} else {
notBrokens.add(assignmentHandler);
}
}
if (!brokens.isEmpty()) {
checkLocalAssignments(brokens, notBrokens);
if (moduleAndBrokenAssignments.containsKey(startModule)) {
moduleAndBrokenAssignments.get(startModule).addAll(brokens);
} else {
moduleAndBrokenAssignments.put(startModule, brokens);
}
}
} else {
if (startModule.getLastCompilationTimeStamp() == null) {
startModule.check(timestamp);
}
final List<AssignmentHandler> startAssignments = getAssignmentsFrom(startModule);
for (AssignmentHandler assignmentHandler : startAssignments) {
assignmentHandler.initStartParts();
assignmentHandler.assignment.notCheckRoot();
assignmentHandler.addReason("Parent module's CompilationTimeStamp is null.");
}
if (moduleAndBrokenAssignments.containsKey(startModule)) {
moduleAndBrokenAssignments.get(startModule).addAll(startAssignments);
} else {
moduleAndBrokenAssignments.put(startModule, startAssignments);
}
}
startModule.notCheckRoot();
}
}
public List<AssignmentHandler> getAssignmentsFrom(final Module module) {
final List<AssignmentHandler> assignmentHandlers = new ArrayList<AssignmentHandler>();
final Assignments assignments = module.getAssignments();
for (int d = 0; d < assignments.getNofAssignments(); ++d) {
final Assignment assignment = assignments.getAssignmentByIndex(d);
final AssignmentHandler assignmentHandler = AssignmentHandlerFactory.getDefinitionHandler(assignment);
assignment.accept(assignmentHandler);
assignmentHandlers.add(assignmentHandler);
}
return assignmentHandlers;
}
protected void checkLocalAssignments(final List<AssignmentHandler> brokens, final List<AssignmentHandler> notBrokens) {
if (brokens.isEmpty() || notBrokens.isEmpty()) {
return;
}
final HashMap<String, AssignmentHandler> brokenMap = new HashMap<String, AssignmentHandler>(brokens.size() + notBrokens.size());
for(AssignmentHandler handler: brokens) {
brokenMap.put(handler.getAssignment().getIdentifier().getDisplayName(), handler);
}
boolean proceed = true;
while (proceed) {
proceed = false;
for (int i = notBrokens.size() -1; i >=0; --i) {
final AssignmentHandler notBroken = notBrokens.get(i);
boolean found = false;
for (String name : notBroken.getContagiousReferences()) {
if(brokenMap.containsKey(name)) {
notBroken.check(brokenMap.get(name));
found = true;
break;
}
}
if(!found) {
for (String name : notBroken.getNonContagiousReferences()) {
if(brokenMap.containsKey(name)) {
notBroken.check(brokenMap.get(name));
found = true;
break;
}
}
}
if(found) {
proceed = true;
notBrokens.remove(i);
brokens.add(notBroken);
brokenMap.put(notBroken.getAssignment().getIdentifier().getDisplayName(), notBroken);
}
}
}
}
protected void writeDebugInfo(final Map<Module, List<AssignmentHandler>> moduleAndAssignments) {
Display.getDefault().syncExec(new Runnable() {
@Override
public void run() {
TITANDebugConsole.println(" Detailed info:");
for (Map.Entry<Module, List<AssignmentHandler>> entry : moduleAndAssignments.entrySet()) {
final List<AssignmentHandler> values = entry.getValue();
TITANDebugConsole.println(" module: " + entry.getKey().getIdentifier().getDisplayName());
for (AssignmentHandler assignmentHandler : values) {
TITANDebugConsole.println(" " + assignmentHandler + " | " + assignmentHandler.getReasons());
if(assignmentHandler.getIsInfected() || assignmentHandler.getIsContagious()) {
TITANDebugConsole.println(" " + (assignmentHandler.getIsInfected() ? "[+]" : "[-]") + " : infected");
TITANDebugConsole.println(" " + (assignmentHandler.getIsContagious() ? "[+]" : "[-]") + " : contagious");
TITANDebugConsole.println(" nonContagious references: " + assignmentHandler.getNonContagiousReferences());
TITANDebugConsole.println(" contagious references: " + assignmentHandler.getContagiousReferences());
TITANDebugConsole.println(" infected references: " + assignmentHandler.getInfectedReferences());
}
}
}
TITANDebugConsole.println(" in dot format:");
TITANDebugConsole.println(" digraph {");
TITANDebugConsole.println(" rankdir=LR;");
final ArrayList<Module> modules = new ArrayList<Module>(moduleAndAssignments.keySet());
Collections.sort(modules, new Comparator<Module>() {
@Override
public int compare(final Module o1, final Module o2) {
return o1.getName().compareTo(o2.getName());
}
});
for (Module module : modules) {
final String moduleName = module.getName();
TITANDebugConsole.println(" subgraph cluster_" + moduleName + " {");
TITANDebugConsole.println(" label=\" " + module.getIdentifier().getDisplayName() + "\";");
final List<AssignmentHandler> values = moduleAndAssignments.get(module);
for (AssignmentHandler assignmentHandler : values) {
for(String reference: assignmentHandler.getInfectedReferences()) {
TITANDebugConsole.println(" " + assignmentHandler.getAssignment().getIdentifier().getDisplayName() + "->" + reference + ";");
}
}
TITANDebugConsole.println(" }");
}
TITANDebugConsole.println(" }");
}
});
}
}
|
package jetbrains.buildServer.notification.tray.web;
import com.google.gson.Gson;
import com.intellij.openapi.diagnostic.Logger;
import jetbrains.buildServer.notification.tray.model.Notification;
import jetbrains.buildServer.users.SUser;
import jetbrains.buildServer.web.util.SessionUser;
import jetbrains.buildServer.web.util.WebUtil;
import org.atmosphere.cpr.*;
import org.atmosphere.handler.AbstractReflectorAtmosphereHandler;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
/**
* Notification request handler.
*/
public class NotificationHandler extends AbstractReflectorAtmosphereHandler {
private static final Logger LOG = Logger.getInstance(NotificationHandler.class.getName());
private static final String USER_ID = "USER_ID";
private static final Map<Long, Queue<AtmosphereResource>> myResources = new ConcurrentHashMap<Long, Queue<AtmosphereResource>>();
private final Gson myGson = new Gson();
@Override
public final void onRequest(AtmosphereResource resource) throws IOException {
if (resource.getRequest().getMethod().equalsIgnoreCase("GET")) {
onOpen(resource);
}
}
@Override
public final void onStateChange(AtmosphereResourceEvent event) throws IOException {
final AtmosphereResource resource = event.getResource();
LOG.debug(String.format("%s with event %s", resource.uuid(), event));
if (event.isCancelled() || event.isClosedByApplication() || event.isClosedByClient()) {
onDisconnect(resource);
} else {
final Object message = event.getMessage();
final AtmosphereResponse response = resource.getResponse();
if (message != null && List.class.isAssignableFrom(message.getClass())) {
List<String> messages = List.class.cast(message);
for (String t : messages) {
onMessage(response, t);
}
} else if (event.isResuming()) {
onResume(resource);
} else if (event.isResumedOnTimeout()) {
onTimeout(resource);
} else if (event.isSuspended()) {
onMessage(response, (String) message);
}
}
postStateChange(event);
}
private void onMessage(AtmosphereResponse response, String message) throws IOException {
response.getWriter().write(message);
}
@Override
public final void destroy() {
myResources.clear();
}
/**
* This method will be invoked when an connection has been received and not haven't yet be suspended. Note that
* the connection will be suspended AFTER the method has been invoked when used with {@link org.atmosphere.interceptor.AtmosphereResourceLifecycleInterceptor}
*
* @param resource an {@link AtmosphereResource}
* @throws IOException
*/
public void onOpen(AtmosphereResource resource) throws IOException {
final SUser currentUser = SessionUser.getUser(resource.getRequest());
if (currentUser == null) {
LOG.error("Websocket Open request with unknown user. Request: " + WebUtil.getRequestDump(resource.getRequest()));
return;
}
LOG.debug("WebSocket connection is opened by " + currentUser.getUsername() + ". Connection UUID: " + resource.uuid());
// Store connection
AtmosphereResourceSessionFactory.getDefault().getSession(resource).setAttribute(USER_ID, currentUser.getId());
Queue<AtmosphereResource> resources = myResources.get(currentUser.getId());
if (resources == null){
resources = new ConcurrentLinkedQueue<AtmosphereResource>();
myResources.put(currentUser.getId(), resources);
}
resources.add(resource);
resource.suspend();
}
/**
* This method will be invoked during the process of resuming a connection. By default this method does nothing.
*
* @param resource an {@link AtmosphereResource}.
* @throws IOException
*/
public void onResume(AtmosphereResource resource) throws IOException {
restoreResource(resource);
}
/**
* This method will be invoked when a suspended connection times out, e.g no activity has occurred for the
* specified time used when suspending. By default this method does nothing.
*
* @param resource an {@link AtmosphereResource}.
* @throws IOException
*/
public void onTimeout(AtmosphereResource resource) throws IOException {
restoreResource(resource);
}
/**
* This method will be invoked when the underlying WebServer detects a connection has been closed. Please
* note that not all WebServer supports that features (see Atmosphere's WIKI for help). By default this method does nothing.
*
* @param resource an {@link AtmosphereResource}.
* @throws IOException
*/
public void onDisconnect(AtmosphereResource resource) throws IOException {
removeResource(resource);
}
private void removeResource(AtmosphereResource resource) {
final Long userId = AtmosphereResourceSessionFactory.getDefault().getSession(resource).getAttribute(USER_ID, Long.class);
final Queue<AtmosphereResource> resources = myResources.get(userId);
if (resources == null) return;
resources.remove(resource);
}
private AtmosphereResource getOriginalResource(AtmosphereResource resource) throws IOException {
final String originalUUID = (String) resource.getRequest().getAttribute(ApplicationConfig.SUSPENDED_ATMOSPHERE_RESOURCE_UUID);
final AtmosphereResource originalResource = AtmosphereResourceFactory.getDefault().find(originalUUID);
if (originalResource == null) {
LOG.warn(String.format("Connection received from the unknown client. Current request uuid: %s, original request uuid: %s.", resource.uuid(), originalUUID));
onRequest(resource);
return null;
}
return originalResource;
}
private void restoreResource(AtmosphereResource resource) throws IOException {
final AtmosphereResource originalResource = getOriginalResource(resource);
if (originalResource != null) {
removeResource(originalResource);
}
onOpen(resource);
}
public void broadcast(Notification notification, Set<SUser> users) {
final String message = myGson.toJson(notification);
for (SUser user : users) {
final Queue<AtmosphereResource> resources = myResources.get(user.getId());
if (resources == null) continue;
for (AtmosphereResource resource : resources) {
resource.getBroadcaster().broadcast(message);
}
}
}
}
|
package hu.bme.mit.massif.simulink.api.adapter.block;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import hu.bme.mit.massif.communication.command.MatlabCommand;
import hu.bme.mit.massif.communication.command.MatlabCommandFactory;
import hu.bme.mit.massif.communication.datatype.CellMatlabData;
import hu.bme.mit.massif.communication.datatype.Handle;
import hu.bme.mit.massif.communication.datatype.IVisitableMatlabData;
import hu.bme.mit.massif.communication.datatype.MatlabString;
import hu.bme.mit.massif.communication.datatype.StructMatlabData;
import hu.bme.mit.massif.simulink.Block;
import hu.bme.mit.massif.simulink.Property;
import hu.bme.mit.massif.simulink.PropertyType;
import hu.bme.mit.massif.simulink.SimulinkFactory;
import hu.bme.mit.massif.simulink.SimulinkReference;
import hu.bme.mit.massif.simulink.api.Importer;
import hu.bme.mit.massif.simulink.api.extension.IParameterImportFilter;
/**
* Generic adapter for non-specific blocks. This adapter is used when no adapter is registered for a block type. The
* process method should be called from every class subclassing it.
*/
public class DefaultBlockAdapter implements IBlockAdapter {
@Override
public Block getBlock(Importer traverser) {
return SimulinkFactory.eINSTANCE.createBlock();
}
@Override
public void process(Importer traverser, SimulinkReference parentSimRef, Block blockToProcess) {
// The default block processing implementation goes here
MatlabCommandFactory commandFactory = traverser.getCommandFactory();
String blockFQN = blockToProcess.getSimulinkRef().getFQN();
List<Property> blockProperties = new LinkedList<Property>();
MatlabCommand getAllBlockParameters = commandFactory.customCommand("get_all_block_parameters", 1).addParam(blockFQN);
Map<String, IVisitableMatlabData> blockPropsMap = StructMatlabData.getStructMatlabDataData(getAllBlockParameters.execute());
Set<IParameterImportFilter> parameterFilters = traverser.getParameterFilters();
Set<Entry<String, IVisitableMatlabData>> entries = blockPropsMap.entrySet();
for (Entry<String, IVisitableMatlabData> entry : entries) {
String propertyName = entry.getKey();
boolean isFiltered = false;
for (IParameterImportFilter paramFilter : parameterFilters) {
isFiltered |= paramFilter.filter(commandFactory, propertyName);
}
if(isFiltered) {
continue;
}
IVisitableMatlabData value = entry.getValue();
Property prop = SimulinkFactory.eINSTANCE.createProperty();
prop.setName(propertyName);
if (value == null) {
// Default: empty string
prop.setType(PropertyType.STRING_PROPERTY);
prop.setValue("");
blockProperties.add(prop);
} else {
if(value instanceof MatlabString) {
prop.setType(PropertyType.STRING_PROPERTY);
} else if(value instanceof Handle) {
prop.setType(PropertyType.DOUBLE_PROPERTY);
} else if (value instanceof CellMatlabData) {
prop.setType(PropertyType.STRING_PROPERTY);
} else if(value instanceof StructMatlabData) {
prop.setType(PropertyType.STRING_PROPERTY);
}
prop.setValue(value.toString());
blockProperties.add(prop);
}
}
blockToProcess.getProperties().addAll(blockProperties);
}
}
|
package com.matthewtamlin.spyglass.processors.annotation_utils.annotation_mirror_util;
import com.matthewtamlin.java_compiler_utilities.element_supplier.ElementId;
public class Data {
public static final String SPECIFIED_VALUE = "specified value";
@ElementId("get annotation mirror: without annotation")
public String field1;
@ElementId("get annotation mirror: with annotation")
@AnnotationWithValues
public String field2;
@ElementId("get annotation value ignoring defaults: no value")
@AnnotationWithValues()
public Object field3;
@ElementId("get annotation value ignoring defaults: with value")
@AnnotationWithValues(value = SPECIFIED_VALUE)
public Object field4;
@ElementId("get annotation value with defaults: no value")
@AnnotationWithValues()
public Object field5;
@ElementId("get annotation value with defaults: with value")
@AnnotationWithValues(value = SPECIFIED_VALUE)
public Object field6;
}
|
package com.opengamma.financial.analytics.model.credit;
import static com.opengamma.engine.value.ValuePropertyNames.CALCULATION_METHOD;
import static com.opengamma.engine.value.ValuePropertyNames.CURRENCY;
import static com.opengamma.engine.value.ValuePropertyNames.CURVE;
import static com.opengamma.engine.value.ValuePropertyNames.CURVE_CALCULATION_METHOD;
import static com.opengamma.financial.analytics.model.credit.CreditInstrumentPropertyNamesAndValues.PROPERTY_HAZARD_RATE_CURVE;
import static com.opengamma.financial.analytics.model.credit.CreditInstrumentPropertyNamesAndValues.PROPERTY_HAZARD_RATE_CURVE_CALCULATION_METHOD;
import static com.opengamma.financial.analytics.model.credit.CreditInstrumentPropertyNamesAndValues.PROPERTY_HAZARD_RATE_CURVE_N_ITERATIONS;
import static com.opengamma.financial.analytics.model.credit.CreditInstrumentPropertyNamesAndValues.PROPERTY_HAZARD_RATE_CURVE_RANGE_MULTIPLIER;
import static com.opengamma.financial.analytics.model.credit.CreditInstrumentPropertyNamesAndValues.PROPERTY_HAZARD_RATE_CURVE_TOLERANCE;
import static com.opengamma.financial.analytics.model.credit.CreditInstrumentPropertyNamesAndValues.PROPERTY_N_INTEGRATION_POINTS;
import static com.opengamma.financial.analytics.model.credit.CreditInstrumentPropertyNamesAndValues.PROPERTY_YIELD_CURVE;
import static com.opengamma.financial.analytics.model.credit.CreditInstrumentPropertyNamesAndValues.PROPERTY_YIELD_CURVE_CALCULATION_METHOD;
import java.util.Collections;
import java.util.Set;
import javax.time.calendar.ZonedDateTime;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.analytics.financial.credit.PriceType;
import com.opengamma.analytics.financial.credit.calibratehazardratecurve.HazardRateCurve;
import com.opengamma.analytics.financial.credit.cds.ISDACurve;
import com.opengamma.analytics.financial.credit.creditdefaultswap.definition.legacy.LegacyVanillaCreditDefaultSwapDefinition;
import com.opengamma.analytics.financial.credit.creditdefaultswap.pricing.legacy.PresentValueLegacyCreditDefaultSwap;
import com.opengamma.core.holiday.HolidaySource;
import com.opengamma.core.region.RegionSource;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.ComputationTargetType;
import com.opengamma.engine.function.AbstractFunction;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.FunctionExecutionContext;
import com.opengamma.engine.function.FunctionInputs;
import com.opengamma.engine.value.ComputedValue;
import com.opengamma.engine.value.ValueProperties;
import com.opengamma.engine.value.ValueRequirement;
import com.opengamma.engine.value.ValueRequirementNames;
import com.opengamma.engine.value.ValueSpecification;
import com.opengamma.financial.OpenGammaCompilationContext;
import com.opengamma.financial.analytics.conversion.CreditDefaultSwapSecurityConverter;
import com.opengamma.financial.analytics.model.cds.ISDAFunctionConstants;
import com.opengamma.financial.security.FinancialSecurityUtils;
import com.opengamma.financial.security.cds.LegacyVanillaCDSSecurity;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.async.AsynchronousExecution;
import com.opengamma.util.money.Currency;
public abstract class LegacyVanillaCDSFunction extends AbstractFunction.NonCompiledInvoker {
private CreditDefaultSwapSecurityConverter _converter;
private final String _valueRequirement;
private final PriceType _priceType;
public LegacyVanillaCDSFunction(final String valueRequirement) {
ArgumentChecker.notNull(valueRequirement, "value requirement");
if (valueRequirement.equals(ValueRequirementNames.DIRTY_PRICE)) {
_priceType = PriceType.DIRTY;
} else if (valueRequirement.equals(ValueRequirementNames.CLEAN_PRICE)) {
_priceType = PriceType.CLEAN;
} else {
throw new IllegalArgumentException("Can only calculate clean and dirty prices");
}
_valueRequirement = valueRequirement;
}
@Override
public void init(final FunctionCompilationContext context) {
final HolidaySource holidaySource = OpenGammaCompilationContext.getHolidaySource(context);
final RegionSource regionSource = OpenGammaCompilationContext.getRegionSource(context);
_converter = new CreditDefaultSwapSecurityConverter(holidaySource, regionSource);
}
@Override
public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target,
final Set<ValueRequirement> desiredValues) throws AsynchronousExecution {
final ZonedDateTime now = executionContext.getValuationClock().zonedDateTime();
final Object yieldCurveObject = inputs.getValue(ValueRequirementNames.YIELD_CURVE);
if (yieldCurveObject == null) {
throw new OpenGammaRuntimeException("Could not get yield curve");
}
final Object hazardRateCurveObject = inputs.getValue(ValueRequirementNames.HAZARD_RATE_CURVE);
if (hazardRateCurveObject == null) {
throw new OpenGammaRuntimeException("Could not get hazard rate curve");
}
final ISDACurve yieldCurve = (ISDACurve) yieldCurveObject;
final HazardRateCurve hazardRateCurve = (HazardRateCurve) hazardRateCurveObject;
final LegacyVanillaCDSSecurity security = (LegacyVanillaCDSSecurity) target.getSecurity();
final ValueRequirement desiredValue = Iterables.getOnlyElement(desiredValues);
final String nPointsProperty = desiredValue.getConstraint(PROPERTY_N_INTEGRATION_POINTS);
final int nIntegrationPoints = Integer.parseInt(nPointsProperty);
final PresentValueLegacyCreditDefaultSwap calculator = new PresentValueLegacyCreditDefaultSwap();
final LegacyVanillaCreditDefaultSwapDefinition cds = _converter.visitLegacyVanillaCDSSecurity(security);
final double price = calculator.getPresentValueLegacyCreditDefaultSwap(now, cds, yieldCurve, hazardRateCurve, _priceType);
final ValueProperties properties = getProperties(desiredValue);
final ValueSpecification spec = new ValueSpecification(_valueRequirement, target.toSpecification(), properties);
return Collections.singleton(new ComputedValue(spec, price));
}
@Override
public ComputationTargetType getTargetType() {
return ComputationTargetType.SECURITY;
}
@Override
public boolean canApplyTo(final FunctionCompilationContext context, final ComputationTarget target) {
if (target.getType() != ComputationTargetType.SECURITY) {
return false;
}
return target.getSecurity() instanceof LegacyVanillaCDSSecurity;
}
@Override
public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target) {
final ValueProperties properties = getProperties(target);
return Collections.singleton(new ValueSpecification(_valueRequirement, target.toSpecification(), properties));
}
@Override
public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) {
final ValueProperties constraints = desiredValue.getConstraints();
final Set<String> yieldCurveNames = constraints.getValues(PROPERTY_YIELD_CURVE);
if (yieldCurveNames == null || yieldCurveNames.size() != 1) {
return null;
}
final Set<String> yieldCurveCalculationMethods = constraints.getValues(PROPERTY_YIELD_CURVE_CALCULATION_METHOD);
if (yieldCurveCalculationMethods == null || yieldCurveCalculationMethods.size() != 1) {
return null;
}
final Set<String> hazardRateCurveNames = constraints.getValues(PROPERTY_HAZARD_RATE_CURVE);
if (hazardRateCurveNames == null || hazardRateCurveNames.size() != 1) {
return null;
}
final Set<String> hazardRateCurveCalculationMethods = constraints.getValues(PROPERTY_HAZARD_RATE_CURVE_CALCULATION_METHOD);
if (hazardRateCurveCalculationMethods == null || hazardRateCurveCalculationMethods.size() != 1) {
return null;
}
final Set<String> nCurveIterationsName = constraints.getValues(PROPERTY_HAZARD_RATE_CURVE_N_ITERATIONS);
if (nCurveIterationsName == null || nCurveIterationsName.size() != 1) {
return null;
}
final Set<String> tolerances = constraints.getValues(PROPERTY_HAZARD_RATE_CURVE_TOLERANCE);
if (tolerances == null || tolerances.size() != 1) {
return null;
}
final Set<String> rangeMultipliers = constraints.getValues(PROPERTY_HAZARD_RATE_CURVE_RANGE_MULTIPLIER);
if (rangeMultipliers == null || rangeMultipliers.size() != 1) {
return null;
}
final Set<String> nIntegrationPoints = constraints.getValues(PROPERTY_N_INTEGRATION_POINTS);
if (nIntegrationPoints == null || nIntegrationPoints.size() != 1) {
return null;
}
final String yieldCurveName = Iterables.getOnlyElement(yieldCurveNames);
final String yieldCurveCalculationMethod = Iterables.getOnlyElement(yieldCurveCalculationMethods);
final String hazardRateCurveName = Iterables.getOnlyElement(hazardRateCurveNames);
final String hazardRateCurveCalculationMethod = Iterables.getOnlyElement(hazardRateCurveCalculationMethods);
final String nCurveIterations = Iterables.getOnlyElement(nCurveIterationsName);
final String tolerance = Iterables.getOnlyElement(tolerances);
final String rangeMultiplier = Iterables.getOnlyElement(rangeMultipliers);
final LegacyVanillaCDSSecurity security = (LegacyVanillaCDSSecurity) target.getSecurity();
final Currency currency = FinancialSecurityUtils.getCurrency(security);
final ValueProperties ycProperties = ValueProperties.builder()
.with(CURVE, yieldCurveName)
.with(CURVE_CALCULATION_METHOD, yieldCurveCalculationMethod).get();
final ValueProperties hazardRateCurveProperties = ValueProperties.builder()
.with(CURVE, hazardRateCurveName)
.with(PROPERTY_HAZARD_RATE_CURVE_CALCULATION_METHOD, hazardRateCurveCalculationMethod)
.with(PROPERTY_HAZARD_RATE_CURVE_N_ITERATIONS, nCurveIterations)
.with(PROPERTY_HAZARD_RATE_CURVE_TOLERANCE, tolerance)
.with(PROPERTY_HAZARD_RATE_CURVE_RANGE_MULTIPLIER, rangeMultiplier).get();
final ValueRequirement yieldCurveRequirement = new ValueRequirement(ValueRequirementNames.YIELD_CURVE, ComputationTargetType.PRIMITIVE, currency.getUniqueId(), ycProperties);
final ValueRequirement hazardCurveRequirement = new ValueRequirement(ValueRequirementNames.HAZARD_RATE_CURVE, target.toSpecification(), hazardRateCurveProperties);
return Sets.newHashSet(yieldCurveRequirement, hazardCurveRequirement);
}
private ValueProperties getProperties(final ComputationTarget target) {
final String currency = FinancialSecurityUtils.getCurrency(target.getSecurity()).getCode();
return createValueProperties()
.withAny(PROPERTY_YIELD_CURVE)
.withAny(PROPERTY_YIELD_CURVE_CALCULATION_METHOD)
.withAny(PROPERTY_HAZARD_RATE_CURVE)
.withAny(PROPERTY_HAZARD_RATE_CURVE_CALCULATION_METHOD)
.withAny(PROPERTY_HAZARD_RATE_CURVE_N_ITERATIONS)
.withAny(PROPERTY_HAZARD_RATE_CURVE_TOLERANCE)
.withAny(PROPERTY_HAZARD_RATE_CURVE_RANGE_MULTIPLIER)
.with(CURRENCY, currency)
.with(CALCULATION_METHOD, ISDAFunctionConstants.ISDA_METHOD_NAME)
.withAny(PROPERTY_N_INTEGRATION_POINTS)
.get();
}
private ValueProperties getProperties(final ValueRequirement desiredValue) {
return desiredValue.getConstraints().copy().get();
}
}
|
package com.opengamma.master.historicaltimeseries.impl;
import com.opengamma.id.ExternalId;
import com.opengamma.id.ExternalIdBundle;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesResolutionResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesResolver;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeriesInfo;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.ehcache.EHCacheUtils;
import net.sf.ehcache.Cache;
import net.sf.ehcache.CacheManager;
import net.sf.ehcache.Element;
import javax.time.calendar.LocalDate;
import java.text.MessageFormat;
/**
* A <code>HistoricalTimeSeriesResolver</code> that tries to find
* the distribution spec in a cache. If it doesn't find it, it will
* delegate to an underlying <code>HistoricalTimeSeriesResolver</code>.
*
*/
public class EHCachingHistoricalTimeSeriesResolver implements HistoricalTimeSeriesResolver {
private static final String SEPARATOR = "~";
/**
* Cache key format for hts resolution.
*/
private static final String HISTORICAL_TIME_SERIES_RESOLUTION_CACHE_FORMAT = "htsResolution.{0}";
/**
* Default cache key format arg.
*/
private static final String HISTORICAL_TIME_SERIES_RESOLUTION_CACHE_DEFAULT_ARG = "DEFAULT";
private final HistoricalTimeSeriesResolver _underlying;
/**
* The cache manager.
*/
private final CacheManager _cacheManager;
/**
* The reference data cache.
*/
private final Cache _cache;
public EHCachingHistoricalTimeSeriesResolver(final HistoricalTimeSeriesResolver underlying, final CacheManager cacheManager) {
this(underlying, cacheManager, HISTORICAL_TIME_SERIES_RESOLUTION_CACHE_DEFAULT_ARG);
}
public EHCachingHistoricalTimeSeriesResolver(final HistoricalTimeSeriesResolver underlying, final CacheManager cacheManager, String cacheName) {
ArgumentChecker.notNull(underlying, "Underlying HistoricalTimeSeriesResolver");
ArgumentChecker.notNull(cacheManager, "cacheManager");
ArgumentChecker.notNull(cacheName, "cacheName");
_underlying = underlying;
_cacheManager = cacheManager;
String combinedCacheName = MessageFormat.format(HISTORICAL_TIME_SERIES_RESOLUTION_CACHE_FORMAT, cacheName);
EHCacheUtils.addCache(cacheManager, combinedCacheName);
_cache = EHCacheUtils.getCacheFromManager(cacheManager, combinedCacheName);
}
public CacheManager getCacheManager() {
return _cacheManager;
}
@Override
public HistoricalTimeSeriesResolutionResult resolve(ExternalIdBundle identifierBundle, LocalDate identifierValidityDate, String dataSource, String dataProvider, String dataField, String resolutionKey) {
for (ExternalId id : identifierBundle) {
String key =
id.toString() + SEPARATOR +
dataField + SEPARATOR +
(dataSource != null ? dataSource : "") + SEPARATOR +
(dataProvider != null ? dataProvider : "") + SEPARATOR +
resolutionKey + SEPARATOR +
(identifierValidityDate != null ? identifierValidityDate.toString() : "");
Element cachedHtsInfo = _cache.get(key);
if (cachedHtsInfo != null) {
return (HistoricalTimeSeriesResolutionResult) cachedHtsInfo.getObjectValue();
}
}
HistoricalTimeSeriesResolutionResult returnValue =
_underlying.resolve(identifierBundle, identifierValidityDate, dataSource, dataProvider, dataField, resolutionKey);
if (returnValue != null) {
ManageableHistoricalTimeSeriesInfo info = returnValue.getHistoricalTimeSeriesInfo();
for (ExternalId id : info.getExternalIdBundle().toBundle()) {
String key =
id.toString() + SEPARATOR +
dataField + SEPARATOR +
info.getDataSource() + SEPARATOR +
info.getDataProvider() + SEPARATOR +
resolutionKey + SEPARATOR +
(identifierValidityDate != null ? identifierValidityDate.toString() : "");
_cache.put(new Element(key, returnValue));
key =
id.toString() + SEPARATOR +
dataField + SEPARATOR +
SEPARATOR +
info.getDataProvider() + SEPARATOR +
resolutionKey + SEPARATOR +
(identifierValidityDate != null ? identifierValidityDate.toString() : "");
_cache.put(new Element(key, returnValue));
key =
id.toString() + SEPARATOR +
dataField + SEPARATOR +
info.getDataSource() + SEPARATOR +
SEPARATOR +
resolutionKey + SEPARATOR +
(identifierValidityDate != null ? identifierValidityDate.toString() : "");
_cache.put(new Element(key, returnValue));
key =
id.toString() + SEPARATOR +
dataField + SEPARATOR +
SEPARATOR +
SEPARATOR +
resolutionKey + SEPARATOR +
(identifierValidityDate != null ? identifierValidityDate.toString() : "");
_cache.put(new Element(key, returnValue));
}
} else {
// PLAT-2633: Record resolution failures (misses) in the cache as well
for (ExternalId id : identifierBundle) {
String key =
id.toString() + SEPARATOR +
dataField + SEPARATOR +
dataSource + SEPARATOR +
dataProvider + SEPARATOR +
resolutionKey + SEPARATOR +
(identifierValidityDate != null ? identifierValidityDate.toString() : "");
_cache.put(new Element(key, null));
key =
id.toString() + SEPARATOR +
dataField + SEPARATOR +
SEPARATOR +
dataProvider + SEPARATOR +
resolutionKey + SEPARATOR +
(identifierValidityDate != null ? identifierValidityDate.toString() : "");
_cache.put(new Element(key, null));
key =
id.toString() + SEPARATOR +
dataField + SEPARATOR +
dataSource + SEPARATOR +
SEPARATOR +
resolutionKey + SEPARATOR +
(identifierValidityDate != null ? identifierValidityDate.toString() : "");
_cache.put(new Element(key, null));
key =
id.toString() + SEPARATOR +
dataField + SEPARATOR +
SEPARATOR +
SEPARATOR +
resolutionKey + SEPARATOR +
(identifierValidityDate != null ? identifierValidityDate.toString() : "");
_cache.put(new Element(key, null));
}
}
return returnValue;
}
}
|
package org.sagebionetworks.repo.web.service.metadata;
import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
import org.sagebionetworks.StackConfiguration;
import org.sagebionetworks.repo.model.DatastoreException;
import org.sagebionetworks.repo.model.DockerNodeDao;
import org.sagebionetworks.repo.model.EntityHeader;
import org.sagebionetworks.repo.model.EntityType;
import org.sagebionetworks.repo.model.EntityTypeUtils;
import org.sagebionetworks.repo.model.InvalidModelException;
import org.sagebionetworks.repo.model.NodeDAO;
import org.sagebionetworks.repo.model.UnauthorizedException;
import org.sagebionetworks.repo.model.docker.DockerRepository;
import org.sagebionetworks.repo.model.jdo.KeyFactory;
import org.sagebionetworks.repo.model.util.DockerNameUtil;
import org.sagebionetworks.repo.web.NotFoundException;
import org.springframework.beans.factory.annotation.Autowired;
public class ExternalDockerRepoValidator implements EntityValidator<DockerRepository> {
@Autowired
private NodeDAO nodeDAO;
@Autowired
private DockerNodeDao dockerNodeDao;
public static boolean isReserved(String registryHost) {
if (registryHost==null) return false; // it's an implicit reference to DockerHub
String hostSansPort = DockerNameUtil.getRegistryHostSansPort(registryHost);
List<String> reservedHostRegexps = StackConfiguration.getDockerReservedRegistryHosts();
for (String reservedHostRegexp : reservedHostRegexps) {
if (Pattern.compile(reservedHostRegexp).matcher(hostSansPort).find())
return true;
}
return false;
}
/*
* Allow only the creation of an external docker repository. No update is allowed.
* @see org.sagebionetworks.repo.web.service.metadata.EntityValidator#validateEntity(org.sagebionetworks.repo.model.Entity, org.sagebionetworks.repo.web.service.metadata.EntityEvent)
*/
@Override
public void validateEntity(DockerRepository dockerRepository, EntityEvent event)
throws InvalidModelException, NotFoundException,
DatastoreException, UnauthorizedException {
if (event.getType()!=EventType.CREATE && event.getType()!=EventType.UPDATE) {
throw new IllegalArgumentException("Unexpected event type "+event.getType());
}
String repositoryName = dockerRepository.getRepositoryName();
DockerNameUtil.validateName(repositoryName);
String registryHost = DockerNameUtil.getRegistryHost(repositoryName);
if (registryHost!=null) {
if (StackConfiguration.getDockerRegistryHosts().contains(registryHost)) {
throw new InvalidModelException("Cannot create or update a managed Docker repository.");
} else if (isReserved(registryHost)) {
throw new InvalidModelException("Cannot create or update a Docker repository having a reserved registry host.");
}
}
dockerRepository.setIsManaged(false);
String parentId = dockerRepository.getParentId();
if (parentId==null) throw new IllegalArgumentException("parentId is required.");
List<EntityHeader> headers = nodeDAO.getEntityHeader(Collections.singleton(KeyFactory.stringToKey(parentId)));
if (headers.size()==0) throw new NotFoundException("parentId "+parentId+" does not exist.");
if (headers.size()>1) throw new IllegalStateException("Expected 0-1 result for "+parentId+" but found "+headers.size());
if (EntityTypeUtils.getEntityTypeForClassName(headers.get(0).getType())!=EntityType.project) {
throw new InvalidModelException("Parent must be a project.");
}
if (event.getType()==EventType.UPDATE) {
if (dockerRepository.getId()==null) throw new InvalidModelException("Entity ID is required for update.");
// Check whether entity ID of updated Docker Repository is already used for a managed repository.
// If so, reject the update.
String managedRepositoryName = dockerNodeDao.getRepositoryNameForEntityId(dockerRepository.getId());
if (managedRepositoryName!=null) {
throw new InvalidModelException("Cannot convert a managed Docker repository into an unmanaged one.");
}
}
}
}
|
package com.splicemachine.derby.impl.sql.execute.operations;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.ArrayList;
import java.util.List;
import com.splicemachine.db.iapi.error.StandardException;
import com.splicemachine.db.iapi.reference.SQLState;
import com.splicemachine.db.iapi.services.sanity.SanityManager;
import com.splicemachine.db.iapi.sql.Activation;
import com.splicemachine.db.iapi.sql.conn.StatementContext;
import com.splicemachine.db.iapi.sql.execute.CursorResultSet;
import com.splicemachine.db.iapi.sql.execute.ExecRow;
import com.splicemachine.db.iapi.sql.execute.NoPutResultSet;
import com.splicemachine.db.iapi.sql.execute.RowChanger;
import com.splicemachine.db.iapi.types.RowLocation;
import com.splicemachine.db.impl.sql.execute.CursorActivation;
import com.splicemachine.derby.stream.function.ScrollInsensitiveFunction;
import com.splicemachine.derby.stream.iapi.DataSet;
import com.splicemachine.derby.stream.iapi.DataSetProcessor;
import com.splicemachine.derby.stream.iapi.OperationContext;
import com.splicemachine.utils.SpliceLogUtils;
import org.apache.log4j.Logger;
import com.splicemachine.derby.iapi.sql.execute.SpliceOperation;
public class ScrollInsensitiveOperation extends SpliceBaseOperation {
private static Logger LOG = Logger.getLogger(ScrollInsensitiveOperation.class);
protected int sourceRowWidth;
protected SpliceOperation source;
protected boolean scrollable;
protected boolean keepAfterCommit;
private int maxRows;
protected StatementContext statementContext;
private int positionInSource;
private int currentPosition;
private int lastPosition;
private boolean seenLast;
private boolean beforeFirst = true;
private boolean afterLast;
/* Reference to the target result set. Target is used for updatable result
* sets in order to keep the target result set on the same row as the
* ScrollInsensitiveResultSet.
*/
private CursorResultSet target;
protected static final String NAME = ScrollInsensitiveOperation.class.getSimpleName().replaceAll("Operation","");
@Override
public String getName() {
return NAME;
}
public ScrollInsensitiveOperation () {
super();
}
public ScrollInsensitiveOperation(SpliceOperation source,
Activation activation, int resultSetNumber,
int sourceRowWidth,
boolean scrollable,
double optimizerEstimatedRowCount,
double optimizerEstimatedCost) throws StandardException {
super(activation, resultSetNumber, optimizerEstimatedRowCount, optimizerEstimatedCost);
this.keepAfterCommit = activation.getResultSetHoldability();
this.maxRows = activation.getMaxRows();
this.sourceRowWidth = sourceRowWidth;
this.source = source;
this.scrollable = scrollable;
recordConstructorTime();
if (isForUpdate()) {
target = ((CursorActivation)activation).getTargetResultSet();
} else {
target = null;
}
}
@Override
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
if (LOG.isTraceEnabled())
LOG.trace("readExternal");
super.readExternal(in);
sourceRowWidth = in.readInt();
scrollable = in.readBoolean();
keepAfterCommit = in.readBoolean();
maxRows = in.readInt();
}
@Override
public void writeExternal(ObjectOutput out) throws IOException {
if (LOG.isTraceEnabled())
LOG.trace("writeExternal");
super.writeExternal(out);
out.writeInt(sourceRowWidth);
out.writeBoolean(scrollable);
out.writeBoolean(keepAfterCommit);
out.writeInt(maxRows);
}
@Override
public List<SpliceOperation> getSubOperations() {
if (LOG.isTraceEnabled())
LOG.trace("getSubOperations");
List<SpliceOperation> operations = new ArrayList<SpliceOperation>();
operations.add((SpliceOperation) source);
return operations;
}
@Override
public SpliceOperation getLeftOperation() {
if (LOG.isTraceEnabled())
LOG.trace("getLeftOperation");
return (SpliceOperation) source;
}
@Override
public int[] getRootAccessedCols(long tableNumber) throws StandardException {
return ((SpliceOperation)source).getRootAccessedCols(tableNumber);
}
@Override
public boolean isReferencingTable(long tableNumber) {
return ((SpliceOperation)source).isReferencingTable(tableNumber);
}
@Override
public String prettyPrint(int indentLevel) {
return "ScrollInsensitive"; //this class is never used
}
public NoPutResultSet getSource() {
return this.source;
}
public boolean isForUpdate() {
return source.isForUpdate();
}
public void reopenCore() throws StandardException {
if (LOG.isTraceEnabled())
SpliceLogUtils.trace(LOG,"reopenCore");
openCore();
}
public ExecRow getAbsoluteRow(int row) throws StandardException {
checkIsOpen("absolute");
attachStatementContext();
return null;
}
public ExecRow getRelativeRow(int row) throws StandardException {
checkIsOpen("relative");
attachStatementContext();
return null;
}
public ExecRow setBeforeFirstRow() {
currentPosition = 0;
beforeFirst = true;
afterLast = false;
currentRow = null;
return null;
}
public ExecRow getFirstRow() throws StandardException {
checkIsOpen("first");
attachStatementContext();
return null;
}
public ExecRow getNextRowCore() throws StandardException {
checkIsOpen("next");
return super.getNextRowCore();
}
public ExecRow getPreviousRow() throws StandardException {
checkIsOpen("previous");
return null;
}
public ExecRow getLastRow() throws StandardException {
checkIsOpen("last");
return null;
}
public ExecRow setAfterLastRow() throws StandardException {
return null;
}
public int getRowNumber() {
return currentRow == null ? 0 : currentPosition;
}
protected void checkIsOpen(String name) throws StandardException {
if (!isOpen)
throw StandardException.newException(SQLState.LANG_RESULT_SET_NOT_OPEN, name);
}
public RowLocation getRowLocation() throws StandardException
{
if (SanityManager.DEBUG)
SanityManager.ASSERT(source instanceof CursorResultSet, "source not CursorResultSet");
return ( (CursorResultSet)source ).getRowLocation();
}
public void updateRow(ExecRow row, RowChanger rowChanger)
throws StandardException {
}
@Override
public <Op extends SpliceOperation> DataSet<LocatedRow> getDataSet(DataSetProcessor dsp) throws StandardException {
OperationContext operationContext = dsp.createOperationContext(this);
try {
operationContext.pushScope("Scroll Insensitive");
return source.getDataSet(dsp).map(new ScrollInsensitiveFunction(operationContext));
}
finally {
operationContext.popScope();
}
}
}
|
package edu.washington.escience.myriad.parallel;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.NoSuchElementException;
import java.util.Random;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.almworks.sqlite4java.SQLiteConnection;
import com.almworks.sqlite4java.SQLiteException;
import com.almworks.sqlite4java.SQLiteStatement;
import edu.washington.escience.myriad.DbException;
import edu.washington.escience.myriad.Predicate;
import edu.washington.escience.myriad.Schema;
import edu.washington.escience.myriad.TupleBatch;
import edu.washington.escience.myriad.Type;
import edu.washington.escience.myriad.accessmethod.JdbcAccessMethod;
import edu.washington.escience.myriad.operator.BlockingDataReceiver;
import edu.washington.escience.myriad.operator.DupElim;
import edu.washington.escience.myriad.operator.Filter;
import edu.washington.escience.myriad.operator.JdbcQueryScan;
import edu.washington.escience.myriad.operator.JdbcSQLProcessor;
import edu.washington.escience.myriad.operator.LocalJoin;
import edu.washington.escience.myriad.operator.Operator;
import edu.washington.escience.myriad.operator.Project;
import edu.washington.escience.myriad.operator.SQLiteQueryScan;
import edu.washington.escience.myriad.operator.SQLiteSQLProcessor;
import edu.washington.escience.myriad.parallel.Exchange.ExchangePairID;
import edu.washington.escience.myriad.table._TupleBatch;
/**
* Runs some simple tests.
*
* @author dhalperi, slxu
*
*/
public final class Main {
public static final int MASTER_ID = 0;
public static final int WORKER_1_ID = 1;
public static final int WORKER_2_ID = 2;;
public static void localJoinTestSQLite(final String[] args) throws DbException, IOException {
final ExchangePairID serverReceiveID = ExchangePairID.newID();
final ExchangePairID collectID = ExchangePairID.newID();
final Type[] table1Types = new Type[] { Type.LONG_TYPE, Type.STRING_TYPE };
final String[] table1ColumnNames = new String[] { "id", "name" };
final Type[] table2Types = new Type[] { Type.LONG_TYPE, Type.STRING_TYPE };
final String[] table2ColumnNames = new String[] { "id", "name" };
final Type[] outputTypes = new Type[] { Type.LONG_TYPE, Type.STRING_TYPE, Type.LONG_TYPE, Type.STRING_TYPE };
final String[] outputColumnNames = new String[] { "id", "name", "id", "name" };
final Schema tableSchema1 = new Schema(table1Types, table1ColumnNames);
final Schema tableSchema2 = new Schema(table2Types, table2ColumnNames);
final Schema outputSchema = new Schema(outputTypes, outputColumnNames);
final int numPartition = 2;
// final PartitionFunction<String, Integer> pf = new SingleFieldHashPartitionFunction(numPartition);
// pf.setAttribute(SingleFieldHashPartitionFunction.FIELD_INDEX, 1); // partition by name
final SQLiteQueryScan scan1 = new SQLiteQueryScan("testtable1.db", "select * from testtable1", tableSchema1);
final SQLiteQueryScan scan2 = new SQLiteQueryScan("testtable2.db", "select * from testtable2", tableSchema2);
final LocalJoin localjoin = new LocalJoin(outputSchema, scan1, scan2, new int[] { 0 }, new int[] { 0 });
// final LocalJoin localjoin2 = new LocalJoin(outputSchema, scan1, scan2, new int[] { 0 }, new int[] { 0 });
final CollectProducer cp1 = new CollectProducer(localjoin, serverReceiveID, MASTER_ID);
final CollectProducer cp2 = new CollectProducer(localjoin, serverReceiveID, MASTER_ID);
// final CollectConsumer cc1 = new CollectConsumer(cp1, collectID, new int[] { WORKER_1_ID, WORKER_2_ID });
// final DupElim dumElim3 = new DupElim(tableSchema1, cc1);
final HashMap<Integer, Operator> workerPlans = new HashMap<Integer, Operator>();
workerPlans.put(WORKER_1_ID, cp1);// new CollectProducer(dumElim3, serverReceiveID, MASTER_ID));
// workerPlans.put(WORKER_2_ID, cp2);// new CollectProducer(dupElim2, collectID, WORKER_1_ID));
// OutputStreamSinkTupleBatch serverBuffer = new OutputStreamSinkTupleBatch(outputSchema, System.out);
new Thread() {
@Override
public void run() {
try {
Server.main(args);
} catch (final Exception e) {
e.printStackTrace();
}
}
}.start();
while (Server.runningInstance == null) {
try {
Thread.sleep(10);
} catch (final InterruptedException e) {
}
}
Server.runningInstance.exchangeSchema.put(serverReceiveID, outputSchema);
final LinkedBlockingQueue<ExchangeTupleBatch> buffer = new LinkedBlockingQueue<ExchangeTupleBatch>();
final CollectConsumer serverPlan = new CollectConsumer(outputSchema, serverReceiveID, new int[] { WORKER_1_ID });
// WORKER_2_ID
serverPlan.setInputBuffer(buffer);
Server.runningInstance.dataBuffer.put(serverPlan.getOperatorID(), buffer);
Server.runningInstance.dispatchWorkerQueryPlans(workerPlans);
System.out.println("Query dispatched to the workers");
Server.runningInstance.startServerQuery(serverPlan);
// Server.runningInstance.dispatchWorkerQueryPlans(workerPlans);
// System.out.println("Query dispatched to the workers");
// Server.runningInstance.startServerQuery(new CollectConsumer(outputSchema, serverReceiveID, new int[] { 1, 2 }));
}
public static void dupElimTestSQLite(final String[] args) throws DbException, IOException {
final ExchangePairID serverReceiveID = ExchangePairID.newID();
final ExchangePairID collectID = ExchangePairID.newID();
final Type[] table1Types = new Type[] { Type.LONG_TYPE, Type.STRING_TYPE };
final String[] table1ColumnNames = new String[] { "id", "name" };
final Type[] table2Types = new Type[] { Type.LONG_TYPE, Type.STRING_TYPE };
final String[] table2ColumnNames = new String[] { "id", "name" };
final Type[] outputTypes = new Type[] { Type.LONG_TYPE, Type.STRING_TYPE };
final String[] outputColumnNames = new String[] { "id", "name" };
final Schema tableSchema1 = new Schema(table1Types, table1ColumnNames);
final Schema tableSchema2 = new Schema(table2Types, table2ColumnNames);
final Schema outputSchema = new Schema(outputTypes, outputColumnNames);
final int numPartition = 2;
final PartitionFunction<String, Integer> pf = new SingleFieldHashPartitionFunction(numPartition);
pf.setAttribute(SingleFieldHashPartitionFunction.FIELD_INDEX, 1); // partition by name
final SQLiteQueryScan scan1 = new SQLiteQueryScan("testtable.db", "select * from testtable1", tableSchema1);
// ShuffleProducer sp1 = new ShuffleProducer(scan1, shuffle1ID, workers, pf);
final SQLiteQueryScan scan2 = new SQLiteQueryScan("testtable.db", "select * from testtable2", tableSchema2);
// ShuffleProducer sp2 = new ShuffleProducer(scan2, shuffle2ID, workers, pf);
// SQLiteTupleBatch bufferWorker1 = new SQLiteTupleBatch(tableSchema1, "temptable.db", "temptable1");
// ShuffleConsumer sc1 = new ShuffleConsumer(sp1, shuffle1ID, workers, bufferWorker1);
// SQLiteTupleBatch bufferWorker2 = new SQLiteTupleBatch(tableSchema2, "temptable.db", "temptable2");
// ShuffleConsumer sc2 = new ShuffleConsumer(sp2, shuffle2ID, workers, bufferWorker2);
// SQLiteSQLProcessor ssp =
// new SQLiteSQLProcessor("testtable.db",
// "select * from testtable1 union select * from testtable2", outputSchema,
// new Operator[] { scan1, scan2 });
// DoNothingOperator dno = new DoNothingOperator(outputSchema, new Operator[] { sc1, sc2 });
// CollectProducer cp = new CollectProducer(ssp, serverReceiveID, server.getAddress());
final DupElim dupElim1 = new DupElim(tableSchema1, scan1);
final DupElim dupElim2 = new DupElim(tableSchema2, scan2);
final HashMap<Integer, Operator> workerPlans = new HashMap<Integer, Operator>();
final CollectProducer cp1 = new CollectProducer(dupElim1, collectID, WORKER_1_ID);
final CollectConsumer cc1 = new CollectConsumer(cp1, collectID, new int[] { WORKER_1_ID, WORKER_2_ID });
final DupElim dumElim3 = new DupElim(tableSchema1, cc1);
workerPlans.put(WORKER_1_ID, new CollectProducer(dumElim3, serverReceiveID, MASTER_ID));
workerPlans.put(WORKER_2_ID, new CollectProducer(dupElim2, collectID, WORKER_1_ID));
// OutputStreamSinkTupleBatch serverBuffer = new OutputStreamSinkTupleBatch(outputSchema, System.out);
new Thread() {
@Override
public void run() {
try {
Server.main(args);
} catch (final Exception e) {
e.printStackTrace();
}
}
}.start();
while (Server.runningInstance == null) {
try {
Thread.sleep(10);
} catch (final InterruptedException e) {
}
}
Server.runningInstance.exchangeSchema.put(serverReceiveID, outputSchema);
final LinkedBlockingQueue<ExchangeTupleBatch> buffer = new LinkedBlockingQueue<ExchangeTupleBatch>();
final CollectConsumer serverPlan = new CollectConsumer(outputSchema, serverReceiveID, new int[] { WORKER_1_ID });
serverPlan.setInputBuffer(buffer);
Server.runningInstance.dataBuffer.put(serverPlan.getOperatorID(), buffer);
Server.runningInstance.dispatchWorkerQueryPlans(workerPlans);
System.out.println("Query dispatched to the workers");
Server.runningInstance.startServerQuery(serverPlan);
// Server.runningInstance.dispatchWorkerQueryPlans(workerPlans);
// System.out.println("Query dispatched to the workers");
// Server.runningInstance.startServerQuery(new CollectConsumer(outputSchema, serverReceiveID, new int[] { 1, 2 }));
}
public static void filesystemWriteTest() throws Exception {
final Date now = new Date();
final Date begin = now;
final Random r = new Random();
final File f = new File("/tmp/tmpfile");
final FileOutputStream fos = new FileOutputStream(f);
for (int i = 0; i < 1000000; i++) {
fos.write((i + "|" + i + "th " + r.nextInt()).getBytes());
}
fos.close();
System.out.println((new Date().getTime() - begin.getTime()) * 1.0 / 1000 + " seconds in total");
// 2.371 seconds
}
public static void JdbcTest() throws DbException {
final String host = "54.245.108.198";
final int port = 3306;
final String user = "myriad";
final String password = "nays26[shark";
final String dbms = "mysql";
final String databaseName = "myriad_test";
final String jdbcDriverName = "com.mysql.jdbc.Driver";
final String query = "select * from testtable";
final String insert = "INSERT INTO testtable2 VALUES(?)";
final Schema schema = new Schema(new Type[] { Type.INT_TYPE, Type.STRING_TYPE }, new String[] { "id", "name" });
final String connectionString =
"jdbc:" + dbms + "://" + host + ":" + port + "/" + databaseName + "?user=" + user + "&password=" + password;
final JdbcQueryScan scan = new JdbcQueryScan(jdbcDriverName, connectionString, query, schema, "", "");
final Filter filter1 = new Filter(Predicate.Op.GREATER_THAN_OR_EQ, 0, new Integer(50), scan);
final Filter filter2 = new Filter(Predicate.Op.LESS_THAN_OR_EQ, 0, new Integer(60), filter1);
final ArrayList<Integer> fieldIdx = new ArrayList<Integer>();
fieldIdx.add(1);
final ArrayList<Type> fieldType = new ArrayList<Type>();
fieldType.add(Type.STRING_TYPE);
final Project project = new Project(fieldIdx, fieldType, filter2);
final Operator root = project;
root.open();
// Schema schema = root.getSchema();
// if (schema != null)
// System.out.println("Schema of result is: " + schema);
// } else {
// System.err.println("Result has no Schema, exiting");
// root.close();
// return;
_TupleBatch tb = null;
while ((tb = root.next()) != null) {
System.out.println(tb);
JdbcAccessMethod.tupleBatchInsert(jdbcDriverName, connectionString, insert, (TupleBatch) tb, "", "");
}
root.close();
}
public static void jdbcTest_slxu(final String[] args) throws NoSuchElementException, DbException {
final Schema outputSchema =
new Schema(new Type[] { Type.INT_TYPE, Type.STRING_TYPE }, new String[] { "id", "name" });
final JdbcQueryScan scan =
new JdbcQueryScan("com.mysql.jdbc.Driver", "jdbc:mysql://localhost:3306/test", "select * from testtable1",
outputSchema, "", "");
// Select filter1 = new Select(Predicate.Op.GREATER_THAN_OR_EQ, 0, new Integer(50), scan);
// Select filter2 = new Select(Predicate.Op.LESS_THAN_OR_EQ, 0, new Integer(60), filter1);
// ArrayList<Integer> fieldIdx = new ArrayList<Integer>();
// fieldIdx.add(1);
// ArrayList<Type> fieldType = new ArrayList<Type>();
// fieldType.add(Type.STRING_TYPE);
// Project project = new Project(fieldIdx, fieldType, filter2);
final Operator root = scan;
root.open();
// scan.open();
// Schema schema = root.getSchema();
// if (schema != null) {
// System.out.println(schema);
// } else
// return;
_TupleBatch tb = null;
while ((tb = root.next()) != null) {
System.out.println(tb.outputRawData());
}
}
public static void main(final String[] args) throws Exception {
// JdbcTest();
// SQLiteTest();
// sqliteEmptyTest();
// parallelTestJDBC(args);
// parallelTestSQLite(args);
// jdbcTest_slxu(args);
// shuffleTestSQLite(args);
// sqliteInsertSpeedTest();
// filesystemWriteTest();
// shuffleTestSQLite(args);
// dupElimTestSQLite(args);
localJoinTestSQLite(args);
// shuffleTestSQLite(args);
// sqliteInsertSpeedTest();
// filesystemWriteTest();
}
public static void parallelTestJDBC(final String[] args) throws DbException, IOException {
// create table testtable1 (id int, name varchar(20));
// insert into testtable1 (id,name) values (1,'name1'), (2, 'name2');
// create table testtable2 (id int, name varchar(20));
// insert into testtable2 (id,name) values (1,'name1'), (2, 'name2');
// create table temptable1 (id int, name varchar(20));
// Process worker1P = new
// ProcessBuilder("/usr/bin/java","-Dfile.encoding=UTF-8 -classpath /home/slxu/workspace/JdbcAccessMethod/bin:/home/slxu/workspace/JdbcAccessMethod/lib/mysql-connector-java-5.1.21-bin.jar:/home/slxu/workspace/JdbcAccessMethod/lib/sqlite4java-282/sqlite4java.jar:/home/slxu/workspace/JdbcAccessMethod/lib/guava-12.0.1.jar:/home/slxu/workspace/JdbcAccessMethod/lib/mina-core-2.0.4.jar:/home/slxu/workspace/JdbcAccessMethod/lib/mina-filter-compression-2.0.4.jar:/home/slxu/workspace/JdbcAccessMethod/lib/slf4j-api-1.6.1.jar:/home/slxu/workspace/JdbcAccessMethod/lib/slf4j-log4j12-1.6.1.jar:/home/slxu/workspace/JdbcAccessMethod/lib/log4j-1.2.17.jar:/home/slxu/workspace/JdbcAccessMethod/lib/jline-0.9.94.jar:/home/slxu/workspace/JdbcAccessMethod/lib/commons-lang3-3.1.jar edu.washington.escience.parallel.Worker localhost:9001 localhost:8001").start();
// Process worker2P = new ProcessBuilder("java",
// "-Dfile.encoding=UTF-8 -classpath /home/slxu/workspace/JdbcAccessMethod/bin:/home/slxu/workspace/JdbcAccessMethod/lib/mysql-connector-java-5.1.21-bin.jar:/home/slxu/workspace/JdbcAccessMethod/lib/sqlite4java-282/sqlite4java.jar:/home/slxu/workspace/JdbcAccessMethod/lib/guava-12.0.1.jar:/home/slxu/workspace/JdbcAccessMethod/lib/mina-core-2.0.4.jar:/home/slxu/workspace/JdbcAccessMethod/lib/mina-filter-compression-2.0.4.jar:/home/slxu/workspace/JdbcAccessMethod/lib/slf4j-api-1.6.1.jar:/home/slxu/workspace/JdbcAccessMethod/lib/slf4j-log4j12-1.6.1.jar:/home/slxu/workspace/JdbcAccessMethod/lib/log4j-1.2.17.jar:/home/slxu/workspace/JdbcAccessMethod/lib/jline-0.9.94.jar:/home/slxu/workspace/JdbcAccessMethod/lib/commons-lang3-3.1.jar edu.washington.escience.parallel.Worker localhost:9002 localhost:8001").start();
final String username = "root";
final String password = "1234";
final ExchangePairID serverReceiveID = ExchangePairID.newID();
final ExchangePairID worker2ReceiveID = ExchangePairID.newID();
final Type[] types = new Type[] { Type.INT_TYPE, Type.STRING_TYPE };
final String[] columnNames = new String[] { "id", "name" };
final Schema outputSchema = new Schema(types, columnNames);
final JdbcQueryScan scan1 =
new JdbcQueryScan("com.mysql.jdbc.Driver", "jdbc:mysql://localhost:3306/test",
"select distinct * from testtable1", outputSchema, username, password);
final CollectProducer cp1 = new CollectProducer(scan1, worker2ReceiveID, WORKER_2_ID);
final JdbcQueryScan scan2 =
new JdbcQueryScan("com.mysql.jdbc.Driver", "jdbc:mysql://localhost:3306/test",
"select distinct * from testtable2", outputSchema, username, password);
final CollectProducer cp2 = new CollectProducer(scan2, worker2ReceiveID, WORKER_2_ID);
// CollectProducer child, ParallelOperatorID operatorID, SocketInfo[] workers
final JdbcTupleBatch bufferWorker2 =
new JdbcTupleBatch(outputSchema, "temptable1", "jdbc:mysql://localhost:3306/test", "com.mysql.jdbc.Driver",
username, password);
final CollectConsumer cc2 = new CollectConsumer(cp2, worker2ReceiveID, new int[] { 1, 2 });
final BlockingDataReceiver block2 = new BlockingDataReceiver(bufferWorker2, cc2);
final JdbcSQLProcessor scan22 =
new JdbcSQLProcessor("com.mysql.jdbc.Driver", "jdbc:mysql://localhost:3306/test",
"select distinct * from temptable1", outputSchema, block2, username, password);
final CollectProducer cp22 = new CollectProducer(scan22, serverReceiveID, MASTER_ID);
final HashMap<Integer, Operator> workerPlans = new HashMap<Integer, Operator>();
workerPlans.put(WORKER_1_ID, cp1);
workerPlans.put(WORKER_2_ID, cp22);
new Thread() {
@Override
public void run() {
try {
Server.main(args);
} catch (final Exception e) {
e.printStackTrace();
}
}
}.start();
while (Server.runningInstance == null) {
try {
Thread.sleep(10);
} catch (final InterruptedException e) {
}
}
Server.runningInstance.dispatchWorkerQueryPlans(workerPlans);
System.out.println("Query dispatched to the workers");
Server.runningInstance.startServerQuery(new CollectConsumer(outputSchema, serverReceiveID,
new int[] { WORKER_2_ID }));
}
public static void parallelTestSQLite(final String[] args) throws DbException, IOException {
// create table testtable1 (id int, name varchar(20));
// insert into testtable1 (id,name) values (1,'name1'), (2, 'name2');
// create table testtable2 (id int, name varchar(20));
// insert into testtable2 (id,name) values (1,'name1'), (2, 'name2');
// create table temptable1 (id int, name varchar(20));
// Process worker1P = new
// ProcessBuilder("/usr/bin/java","-Dfile.encoding=UTF-8 -classpath /home/slxu/workspace/JdbcAccessMethod/bin:/home/slxu/workspace/JdbcAccessMethod/lib/mysql-connector-java-5.1.21-bin.jar:/home/slxu/workspace/JdbcAccessMethod/lib/sqlite4java-282/sqlite4java.jar:/home/slxu/workspace/JdbcAccessMethod/lib/guava-12.0.1.jar:/home/slxu/workspace/JdbcAccessMethod/lib/mina-core-2.0.4.jar:/home/slxu/workspace/JdbcAccessMethod/lib/mina-filter-compression-2.0.4.jar:/home/slxu/workspace/JdbcAccessMethod/lib/slf4j-api-1.6.1.jar:/home/slxu/workspace/JdbcAccessMethod/lib/slf4j-log4j12-1.6.1.jar:/home/slxu/workspace/JdbcAccessMethod/lib/log4j-1.2.17.jar:/home/slxu/workspace/JdbcAccessMethod/lib/jline-0.9.94.jar:/home/slxu/workspace/JdbcAccessMethod/lib/commons-lang3-3.1.jar edu.washington.escience.parallel.Worker localhost:9001 localhost:8001").start();
// Process worker2P = new ProcessBuilder("java",
// "-Dfile.encoding=UTF-8 -classpath /home/slxu/workspace/JdbcAccessMethod/bin:/home/slxu/workspace/JdbcAccessMethod/lib/mysql-connector-java-5.1.21-bin.jar:/home/slxu/workspace/JdbcAccessMethod/lib/sqlite4java-282/sqlite4java.jar:/home/slxu/workspace/JdbcAccessMethod/lib/guava-12.0.1.jar:/home/slxu/workspace/JdbcAccessMethod/lib/mina-core-2.0.4.jar:/home/slxu/workspace/JdbcAccessMethod/lib/mina-filter-compression-2.0.4.jar:/home/slxu/workspace/JdbcAccessMethod/lib/slf4j-api-1.6.1.jar:/home/slxu/workspace/JdbcAccessMethod/lib/slf4j-log4j12-1.6.1.jar:/home/slxu/workspace/JdbcAccessMethod/lib/log4j-1.2.17.jar:/home/slxu/workspace/JdbcAccessMethod/lib/jline-0.9.94.jar:/home/slxu/workspace/JdbcAccessMethod/lib/commons-lang3-3.1.jar edu.washington.escience.parallel.Worker localhost:9002 localhost:8001").start();
// String username = "root";
// String password = "1234";
final ExchangePairID serverReceiveID = ExchangePairID.newID();
final ExchangePairID worker2ReceiveID = ExchangePairID.newID();
final Type[] types = new Type[] { Type.LONG_TYPE, Type.STRING_TYPE };
final String[] columnNames = new String[] { "id", "name" };
final Schema outputSchema = new Schema(types, columnNames);
final SQLiteQueryScan scan1 =
new SQLiteQueryScan("testtable1.db", "select distinct * from testtable1", outputSchema);
final CollectProducer cp1 = new CollectProducer(scan1, worker2ReceiveID, WORKER_2_ID);
final SQLiteQueryScan scan2 =
new SQLiteQueryScan("testtable2.db", "select distinct * from testtable2", outputSchema);
final CollectProducer cp2 = new CollectProducer(scan2, worker2ReceiveID, WORKER_2_ID);
// CollectProducer child, ParallelOperatorID operatorID, SocketInfo[] workers
final SQLiteTupleBatch bufferWorker2 = new SQLiteTupleBatch(outputSchema, "/tmp/temptable1.db", "temptable1");
final CollectConsumer cc2 = new CollectConsumer(cp2, worker2ReceiveID, new int[] { WORKER_1_ID, WORKER_2_ID });
final BlockingDataReceiver block2 = new BlockingDataReceiver(bufferWorker2, cc2);
final SQLiteSQLProcessor scan22 =
new SQLiteSQLProcessor("temptable1.db", "select distinct * from temptable1", outputSchema,
new Operator[] { block2 });
final CollectProducer cp22 = new CollectProducer(scan22, serverReceiveID, MASTER_ID);
final HashMap<Integer, Operator> workerPlans = new HashMap<Integer, Operator>();
workerPlans.put(WORKER_1_ID, cp1);
workerPlans.put(WORKER_2_ID, cp22);
new Thread() {
@Override
public void run() {
try {
Server.main(args);
} catch (final Exception e) {
e.printStackTrace();
}
}
}.start();
while (Server.runningInstance == null) {
try {
Thread.sleep(10);
} catch (final InterruptedException e) {
}
}
Server.runningInstance.dispatchWorkerQueryPlans(workerPlans);
System.out.println("Query dispatched to the workers");
Server.runningInstance.startServerQuery(new CollectConsumer(outputSchema, serverReceiveID, new int[] { 2 }));
}
public static void shuffleTestSQLite(final String[] args) throws DbException, IOException {
final ExchangePairID serverReceiveID = ExchangePairID.newID();
final ExchangePairID shuffle1ID = ExchangePairID.newID();
final ExchangePairID shuffle2ID = ExchangePairID.newID();
final Type[] table1Types = new Type[] { Type.LONG_TYPE, Type.STRING_TYPE };
final String[] table1ColumnNames = new String[] { "id", "name" };
final Type[] table2Types = new Type[] { Type.LONG_TYPE, Type.STRING_TYPE };
final String[] table2ColumnNames = new String[] { "id", "name" };
final Type[] outputTypes = new Type[] { Type.LONG_TYPE, Type.STRING_TYPE, Type.LONG_TYPE, Type.STRING_TYPE };
final String[] outputColumnNames = new String[] { "id", "name", "id", "name" };
final Schema tableSchema1 = new Schema(table1Types, table1ColumnNames);
final Schema tableSchema2 = new Schema(table2Types, table2ColumnNames);
final Schema outputSchema = new Schema(outputTypes, outputColumnNames);
final int numPartition = 2;
final PartitionFunction<String, Integer> pf = new SingleFieldHashPartitionFunction(numPartition);
pf.setAttribute(SingleFieldHashPartitionFunction.FIELD_INDEX, 1); // partition by name
final SQLiteQueryScan scan1 = new SQLiteQueryScan("testtable.db", "select * from testtable1", tableSchema1);
final ShuffleProducer sp1 = new ShuffleProducer(scan1, shuffle1ID, new int[] { WORKER_1_ID, WORKER_2_ID }, pf);
final SQLiteQueryScan scan2 = new SQLiteQueryScan("testtable.db", "select * from testtable2", tableSchema2);
final ShuffleProducer sp2 = new ShuffleProducer(scan2, shuffle2ID, new int[] { WORKER_1_ID, WORKER_2_ID }, pf);
final SQLiteTupleBatch bufferWorker1 = new SQLiteTupleBatch(tableSchema1, "temptable.db", "temptable1");
final ShuffleConsumer sc1 = new ShuffleConsumer(sp1, shuffle1ID, new int[] { WORKER_1_ID, WORKER_2_ID });
final BlockingDataReceiver buffer1 = new BlockingDataReceiver(bufferWorker1, sc1);
final SQLiteTupleBatch bufferWorker2 = new SQLiteTupleBatch(tableSchema2, "temptable.db", "temptable2");
final ShuffleConsumer sc2 = new ShuffleConsumer(sp2, shuffle2ID, new int[] { WORKER_1_ID, WORKER_2_ID });
final BlockingDataReceiver buffer2 = new BlockingDataReceiver(bufferWorker2, sc2);
final SQLiteSQLProcessor ssp =
new SQLiteSQLProcessor("temptable.db",
"select * from temptable1 inner join temptable2 on temptable1.name=temptable2.name", outputSchema,
new Operator[] { buffer1, buffer2 });
// DoNothingOperator dno = new DoNothingOperator(outputSchema, new Operator[] { buffer1, buffer2 });
final CollectProducer cp = new CollectProducer(ssp, serverReceiveID, MASTER_ID);
final HashMap<Integer, Operator> workerPlans = new HashMap<Integer, Operator>();
workerPlans.put(WORKER_1_ID, cp);
workerPlans.put(WORKER_2_ID, cp);
new Thread() {
@Override
public void run() {
try {
Server.main(args);
} catch (final Exception e) {
e.printStackTrace();
}
}
}.start();
while (Server.runningInstance == null) {
try {
Thread.sleep(10);
} catch (final InterruptedException e) {
}
}
Server.runningInstance.exchangeSchema.put(serverReceiveID, outputSchema);
final LinkedBlockingQueue<ExchangeTupleBatch> buffer = new LinkedBlockingQueue<ExchangeTupleBatch>();
final CollectConsumer serverPlan =
new CollectConsumer(outputSchema, serverReceiveID, new int[] { WORKER_1_ID, WORKER_2_ID });
serverPlan.setInputBuffer(buffer);
Server.runningInstance.dataBuffer.put(serverPlan.getOperatorID(), buffer);
Server.runningInstance.dispatchWorkerQueryPlans(workerPlans);
System.out.println("Query dispatched to the workers");
Server.runningInstance.startServerQuery(serverPlan);
}
public static void sqliteEmptyTest() throws SQLiteException {
final SQLiteConnection sqliteConnection = new SQLiteConnection(new File("/tmp/test/emptytable.db"));
sqliteConnection.open(false);
/* Set up and execute the query */
final SQLiteStatement statement = sqliteConnection.prepare("select * from empty");
/* Step the statement once so we can figure out the Schema */
statement.step();
try {
if (!statement.hasStepped()) {
statement.step();
}
System.out.println(Schema.fromSQLiteStatement(statement));
} catch (final SQLiteException e) {
throw new RuntimeException(e.getMessage());
}
}
public static void sqliteInsertSpeedTest() throws SQLiteException {
final SQLiteConnection sqliteConnection = new SQLiteConnection(new File("/tmp/test/test.db"));
sqliteConnection.open(false);
/* Set up and execute the query */
final SQLiteStatement statement = sqliteConnection.prepare("insert into test (id,name) values (?,?)");
Date now = new Date();
final Date begin = now;
final Random r = new Random();
for (int i = 0; i < 1000000; i++) {
if (i % 100 == 0) {
sqliteConnection.exec("begin transaction");
}
statement.bind(1, i);
statement.bind(2, i + "th " + r.nextInt());
statement.step();
statement.reset();
if (i % 1000 == 0) {
final Date tmp = new Date();
System.out.println((tmp.getTime() - now.getTime()) * 1.0 / 1000 + " seconds per 1000");
now = tmp;
}
if (i % 100 == 99) {
sqliteConnection.exec("commit transaction");
}
}
System.out.println((new Date().getTime() - begin.getTime()) * 1.0 / 1000 + " seconds in total");
// 4 seconds for 1000000 tuples insert in one transaction
// 93.884 seconds for 1000000 tuples insert in 1000-size tuplebatches.
}
public static void SQLiteTest() throws DbException {
final String filename = "sql/sqlite.myriad_test/myriad_sqlite_test.db";
final String query = "SELECT * FROM testtable";
Logger.getLogger("com.almworks.sqlite4java").setLevel(Level.OFF);
final Schema outputSchema =
new Schema(new Type[] { Type.LONG_TYPE, Type.STRING_TYPE }, new String[] { "id", "name" });
/* Scan the testtable in database */
final SQLiteQueryScan scan = new SQLiteQueryScan(filename, query, outputSchema);
/* Filter on first column INTEGER >= 50 */
// Filter filter1 = new Filter(Predicate.Op.GREATER_THAN_OR_EQ, 0, new Long(50), scan);
/* Filter on first column INTEGER <= 60 */
// Filter filter2 = new Filter(Predicate.Op.LESS_THAN_OR_EQ, 0, new Long(60), filter1);
/* Project onto second column STRING */
final ArrayList<Integer> fieldIdx = new ArrayList<Integer>();
fieldIdx.add(1);
final ArrayList<Type> fieldType = new ArrayList<Type>();
fieldType.add(Type.STRING_TYPE);
// Project project = new Project(fieldIdx, fieldType, filter2);
/* Project is the output operator */
final Operator root = scan;
root.open();
/* For debugging purposes, print Schema */
final Schema schema = root.getSchema();
if (schema != null) {
System.out.println("Schema of result is: " + schema);
} else {
System.err.println("Result has no Schema, exiting");
root.close();
return;
}
/* Print all the results */
_TupleBatch tb = null;
while ((tb = root.next()) != null) {
System.out.println(tb);
// SQLiteAccessMethod.tupleBatchInsert(filename, insert, (TupleBatch) tb);
}
/* Cleanup */
root.close();
}
/** Inaccessible. */
private Main() {
}
}
|
package ca.corefacility.bioinformatics.irida.processing.concatenate.impl;
import java.io.BufferedWriter;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Set;
import org.apache.commons.io.IOUtils;
import ca.corefacility.bioinformatics.irida.exceptions.ConcatenateException;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFile;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequencingObject;
import ca.corefacility.bioinformatics.irida.processing.concatenate.SequencingObjectConcatenator;
public class SequenceFilePairConcatenator implements SequencingObjectConcatenator<SequenceFilePair> {
public SequenceFilePairConcatenator(){
}
@Override
public SequenceFilePair concatenateFiles(Set<? extends SequencingObject> toConcatenate)
throws ConcatenateException {
SequenceFilePair firstFile = (SequenceFilePair) toConcatenate.iterator().next();
SequenceFile originalForward = firstFile.getForwardSequenceFile();
SequenceFile originalReverse = firstFile.getReverseSequenceFile();
String forwardName = originalForward.getFileName();
String reverseName = originalReverse.getFileName();
Path forwardFile;
Path reverseFile;
try {
Path tempDirectory = Files.createTempDirectory(null);
forwardFile = tempDirectory.resolve(forwardName);
reverseFile = tempDirectory.resolve(reverseName);
forwardFile = Files.createFile(forwardFile);
reverseFile = Files.createFile(reverseFile);
} catch (IOException e) {
throw new ConcatenateException("Could not create temporary files", e);
}
for (SequencingObject f : toConcatenate) {
SequenceFilePair pair = (SequenceFilePair) f;
SequenceFile forwardSequenceFile = pair.getForwardSequenceFile();
SequenceFile reverseSequenceFile = pair.getReverseSequenceFile();
appendToFile(forwardFile, forwardSequenceFile);
appendToFile(reverseFile, reverseSequenceFile);
}
SequenceFile forward = new SequenceFile(forwardFile);
SequenceFile reverse = new SequenceFile(reverseFile);
SequenceFilePair sequenceFilePair = new SequenceFilePair(forward, reverse);
return sequenceFilePair;
}
private void appendToFile(Path target, SequenceFile file) throws ConcatenateException {
try (FileWriter fw = new FileWriter(target.toFile(), true);
BufferedWriter writer = new BufferedWriter(fw);
FileReader reader = new FileReader(file.getFile().toFile())) {
IOUtils.copy(reader, writer);
} catch (IOException e) {
throw new ConcatenateException("Could not open target file for writing", e);
}
}
}
|
package edu.wheaton.simulator.gui.screen;
import java.awt.Component;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ArrayList;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextField;
import javax.swing.ScrollPaneConstants;
import com.google.common.collect.ImmutableMap;
import edu.wheaton.simulator.gui.BoxLayoutAxis;
import edu.wheaton.simulator.gui.Gui;
import edu.wheaton.simulator.gui.MaxSize;
import edu.wheaton.simulator.gui.MinSize;
import edu.wheaton.simulator.gui.PrefSize;
import edu.wheaton.simulator.gui.SimulatorFacade;
//TODO add elements for step delay
public class SetupScreen extends Screen {
private JTextField nameField;
private JTextField timeField;
private JTextField widthField;
private JTextField heightField;
private JTextField delayField;
private String[] agentNames;
private JComboBox updateBox;
private ArrayList<JComboBox> agentTypes;
private ArrayList<JTextField> values;
private ArrayList<JButton> deleteButtons;
private ArrayList<JPanel> subPanels;
private JScrollPane scrollPane;
private JPanel conListPanel;
private JButton addConditionButton;
private static final long serialVersionUID = -8347080877399964861L;
public SetupScreen(final SimulatorFacade gm) {
super(gm);
this.setLayout(new GridBagLayout());
this.setMinimumSize(new MinSize(410,400));
this.setPreferredSize(new PrefSize(410,400));
agentNames = new String[0];
agentTypes = new ArrayList<JComboBox>();
deleteButtons = new ArrayList<JButton>();
subPanels = new ArrayList<JPanel>();
agentTypes = new ArrayList<JComboBox>();
values = new ArrayList<JTextField>();
JLabel nameLabel = Gui.makeLabel("Name:",
new MinSize(50,30));
nameField = Gui.makeTextField(gm.getSimName(), 20,
null, new MinSize(75,30));
JLabel widthLabel = Gui.makeLabel("Width:",
new MinSize(50,30));
widthField = Gui.makeTextField("10", 5, MaxSize.NULL,
new MinSize(50,30));
JLabel yLabel = Gui.makeLabel("Height:",
new MinSize(60,30));
heightField = Gui.makeTextField("10", 5, MaxSize.NULL,
new MinSize(50,30));
JLabel updateLabel = Gui.makeLabel("Update type:", MaxSize.NULL, null);
updateBox = Gui.makeComboBox(new String[] { "Linear", "Atomic",
"Priority" }, MaxSize.NULL);
updateBox.setMinimumSize(new MinSize(250,30));
updateBox.setPreferredSize(new PrefSize(250,30));
//TODO working on adding step delay components
JLabel delayLabel = Gui.makeLabel("Step delay (ms):", MaxSize.NULL, null);
delayField = Gui.makeTextField("1.0", 20, MaxSize.NULL,
new MinSize(100,30));
JLabel timeLabel = Gui.makeLabel("Max steps:", MaxSize.NULL,
null);
timeField = Gui.makeTextField(null, 20, MaxSize.NULL,
new MinSize(60,30));
JLabel agentTypeLabel = Gui.makeLabel("Agent Type", MaxSize.NULL, null);
JLabel valueLabel = Gui.makeLabel("Population Limit", MaxSize.NULL, null);
conListPanel = makeConditionListPanel();
conListPanel.setMinimumSize(new MinSize(250,300));
conListPanel.setPreferredSize(new PrefSize(250,300));
scrollPane = new JScrollPane(conListPanel,
ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS,ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
scrollPane.setMinimumSize(new MinSize(400,300));
scrollPane.setPreferredSize(new PrefSize(400,300));
JPanel scrollPaneWrapper = Gui.makePanel((BoxLayoutAxis)null,MaxSize.NULL,null,(Component[])null);
scrollPaneWrapper.add(scrollPane);
addConditionButton = Gui.makeButton("Add Field", null,
new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
addCondition();
}
});
JPanel bottomButtons = Gui.makePanel(
Gui.makeButton("Revert", null, new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
load();
}
}), makeConfirmButton(),addConditionButton);
GridBagConstraints c = new GridBagConstraints();
c.gridx = 0;
c.gridy = 0;
c.insets = new Insets(0, 0, 0, 3);
this.add(nameLabel, c);
c = new GridBagConstraints();
c.gridx = 1;
c.gridy = 0;
c.gridwidth = 3;
c.insets = new Insets(0, 0, 0, 3);
this.add(nameField, c);
c = new GridBagConstraints();
c.gridx = 0;
c.gridy = 1;
c.insets = new Insets(0, 0, 0, 3);
this.add(widthLabel, c);
c = new GridBagConstraints();
c.gridx = 1;
c.gridy = 1;
c.insets = new Insets(0, -53, 0, 3);
this.add(widthField, c);
c = new GridBagConstraints();
c.gridx = 2;
c.gridy = 1;
c.insets = new Insets(0, -45, 0, 3);
this.add(yLabel, c);
c = new GridBagConstraints();
c.gridx = 3;
c.gridy = 1;
c.insets = new Insets(0, 0, 0, 3);
this.add(heightField, c);
c = new GridBagConstraints();
c.gridx = 0;
c.gridy = 2;
c.insets = new Insets(0, 0, 0, 3);
this.add(delayLabel, c);
c = new GridBagConstraints();
c.gridx = 1;
c.gridy = 2;
c.insets = new Insets(0, 0, 0, 3);
c.gridwidth = 3;
this.add(delayField, c);
c = new GridBagConstraints();
c.gridx = 0;
c.gridy = 3;
c.insets = new Insets(0, 0, 0, 3);
this.add(updateLabel, c);
c = new GridBagConstraints();
c.gridx = 1;
c.gridy = 3;
c.insets = new Insets(0, 0, 0, 3);
c.gridwidth = 3;
this.add(updateBox, c);
c = new GridBagConstraints();
c.gridx = 0;
c.gridy = 4;
c.insets = new Insets(0,0, 0, 3);
this.add(timeLabel, c);
c = new GridBagConstraints();
c.gridx = 1;
c.gridy = 4;
c.insets = new Insets(0, 0, 0, 3);
c.gridwidth = 3;
this.add(timeField, c);
c = new GridBagConstraints();
c.gridx = 0;
c.gridy = 5;
c.insets = new Insets(10, 30, 0, 0);
this.add(agentTypeLabel, c);
c = new GridBagConstraints();
c.gridx = 1;
c.gridy = 5;
c.insets = new Insets(10, 50, 0, 0);
this.add(valueLabel, c);
c = new GridBagConstraints();
c.gridx = 0;
c.gridy = 6;
c.gridwidth = 4;
c.insets = new Insets(2,0, 0, 0);
c.fill = c.HORIZONTAL;
this.add(scrollPaneWrapper,c);
c = new GridBagConstraints();
c.gridy = 8;
c.insets = new Insets(0,0,0,0);
c.gridwidth = 4;
c.fill = c.HORIZONTAL;
this.add(Gui.makePanel(bottomButtons),c);
validate();
}
private JButton makeConfirmButton() {
return Gui.makeButton("Confirm", null, new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
try {
SimulatorFacade gm = getGuiManager();
int newWidth = Integer.parseInt(widthField.getText());
int newHeight = Integer.parseInt(widthField.getText());
int newTime = Integer.parseInt(timeField.getText());
int newDelay = Integer.parseInt(delayField.getText());
if (newWidth <= 0 || newHeight <= 0 || newTime <= 0 || newDelay < 0)
throw new NumberFormatException();
if (newWidth < gm.getGridWidth()
|| newHeight < gm.getGridHeight()) {
int selection = JOptionPane
.showConfirmDialog(
null,
"The new grid size you provided"
+ "\nis smaller than its current value."
+ "\nThis may result in the deletion of objects placed in the grid that"
+ "\ncannot fit within these new dimensions."
+ "\nFurthermore, agent data that depended on specific coordinates may"
+ "\nneed to be checked for bugs after resizing."
+ "\n\nIf you are sure you want to apply these changes, click 'Ok', otherwise click 'No' or 'Cancel'");
if (selection == JOptionPane.YES_OPTION)
gm.resizeGrid(newWidth, newHeight);
else
return;
}
if (nameField.getText().equals(""))
throw new Exception("All fields must have input");
gm.setName(nameField.getText());
for (int i = 0; i < values.size(); i++)
if (values.get(i).getText().equals(""))
throw new Exception("All fields must have input.");
gm.resizeGrid(newWidth, newHeight);
gm.setStepLimit(newTime);
gm.setSleepPeriod(newDelay);
String str = (String) updateBox.getSelectedItem();
if (str.equals("Linear"))
gm.setLinearUpdate();
else if (str.equals("Atomic"))
gm.setAtomicUpdate();
else
gm.setPriorityUpdate(0, 50);
for (int i = 0; i < values.size(); i++) {
gm.setPopLimit(
(String) (agentTypes.get(i).getSelectedItem()),
Integer.parseInt(values.get(i).getText()));
}
load();
} catch (NumberFormatException excep) {
JOptionPane
.showMessageDialog(null,
"Width and Height fields must be integers greater than 0. Time delay must not be less than 0.");
} catch (Exception excep) {
JOptionPane.showMessageDialog(null, excep.getMessage());
}
}
});
}
@Override
public void load() {
reset();
nameField.setText(getGuiManager().getSimName());
updateBox.setSelectedItem(getGuiManager().getCurrentUpdater());
widthField.setText(gm.getGridWidth().toString());
heightField.setText(gm.getGridHeight().toString());
delayField.setText(gm.getSleepPeriod().toString());
SimulatorFacade gm = getGuiManager();
int stepLimit = gm.getStepLimit();
agentNames = gm.getPrototypeNames().toArray(agentNames);
timeField.setText(stepLimit + "");
// to prevent accidental starting simulation with time limit of 0
if (stepLimit <= 0)
timeField.setText(10 + "");
ImmutableMap<String, Integer> popLimits = gm.getPopLimits();
if (popLimits.size() == 0) {
addConditionButton.setEnabled(true);
} else {
int i = 0;
for (String p : popLimits.keySet()) {
addCondition();
agentTypes.get(i).setSelectedItem(p);
values.get(i).setText(popLimits.get(p) + "");
i++;
}
}
validate();
}
private static JPanel makeConditionListPanel() {
JPanel conListPanel = Gui.makePanel(BoxLayoutAxis.PAGE_AXIS, null, null);
return conListPanel;
}
private void addCondition() {
JComboBox newBox = Gui.makeComboBox(agentNames, MaxSize.NULL);
newBox.setPreferredSize(new PrefSize(120,30));
agentTypes.add(newBox);
JTextField newValue = Gui.makeTextField(null, 10,
MaxSize.NULL, MinSize.NULL);
values.add(newValue);
JButton newButton = Gui.makeButton("Delete", null,
new DeleteListener());
newButton.setMinimumSize(new MinSize(80,30));
newButton.setPreferredSize(new PrefSize(80,30));
deleteButtons.add(newButton);
newButton.setActionCommand(deleteButtons.indexOf(newButton) + "");
JPanel newPanel = new JPanel();
newPanel.setPreferredSize(new PrefSize(250,30));
newPanel.add(newBox);
newPanel.add(newValue);
newPanel.add(newButton);
subPanels.add(newPanel);
conListPanel.add(newPanel);
validate();
}
private void reset() {
nameField.setText("");
conListPanel.removeAll();
agentTypes.clear();
values.clear();
deleteButtons.clear();
subPanels.clear();
}
private class DeleteListener implements ActionListener {
@Override
public void actionPerformed(ActionEvent e) {
int n = Integer.parseInt(e.getActionCommand());
String str = (String) agentTypes.get(n).getSelectedItem();
if (str != null)
getGuiManager().removePopLimit(str);
conListPanel.remove(subPanels.get(n));
agentTypes.remove(n);
values.remove(n);
deleteButtons.remove(n);
for (int i = n; i < deleteButtons.size(); i++)
deleteButtons.get(i).setActionCommand(
(Integer.parseInt(deleteButtons.get(i)
.getActionCommand()) - 1) + "");
subPanels.remove(n);
validate();
repaint();
}
}
}
|
package edu.wheaton.simulator.gui.screen;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ArrayList;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTextField;
import com.google.common.collect.ImmutableMap;
import edu.wheaton.simulator.gui.BoxLayoutAxis;
import edu.wheaton.simulator.gui.Gui;
import edu.wheaton.simulator.gui.HorizontalAlignment;
import edu.wheaton.simulator.gui.MaxSize;
import edu.wheaton.simulator.gui.MinSize;
import edu.wheaton.simulator.gui.PrefSize;
import edu.wheaton.simulator.gui.SimulatorGuiManager;
import edu.wheaton.simulator.simulation.Simulator;
//TODO commented code for adding operators to ending conditions :
// see if it should stay for future use or just be deleted
//TODO commented out code for changing width and height of grid :
// causing too many problems and not providing any value atm.
public class SetupScreen extends Screen {
private JTextField nameField;
private JTextField timeField;
private String[] agentNames;
private JComboBox updateBox;
private ArrayList<JComboBox> agentTypes;
private ArrayList<JTextField> values;
private ArrayList<JButton> deleteButtons;
private ArrayList<JPanel> subPanels;
private JPanel conListPanel;
private JButton addConditionButton;
private static final long serialVersionUID = -8347080877399964861L;
public SetupScreen(final SimulatorGuiManager gm) {
super(gm);
this.setLayout(new GridBagLayout());
GridBagConstraints c = new GridBagConstraints();
c.gridwidth = 3;
c.gridheight = 1;
c.gridx = 0;
c.gridy = 1;
this.add(new JLabel("Name: "), c);
c.gridx = 1;
c.gridy = 1;
nameField = Gui.makeTextField(gm.getSimName(), 25,new MaxSize(400,30),new MinSize(250,30));
this.add(nameField, c);
c.gridx = 0;
c.gridy = 2;
JLabel updateLabel = Gui.makeLabel("Update type: ",new MaxSize(100,40),null);
this.add(updateLabel,c);
c.gridx = 1;
c.gridy = 2;
String[] updateTypes = {"Linear", "Atomic", "Priority"};
updateBox = Gui.makeComboBox(updateTypes, new MaxSize(200,40));
this.add(updateBox,c);
c.gridx = 1;
c.gridy = 3;
JLabel conHeader = Gui.makeLabel("Ending Conditions",new PrefSize(300,100),HorizontalAlignment.CENTER );
this.add(conHeader,c);
c.gridx = 0;
c.gridy = 4;
JLabel timeLabel = new JLabel("Time limit: ");
this.add(timeLabel,c);
c.gridx = 1;
c.gridy = 4;
timeField = Gui.makeTextField(null,15,new MaxSize(200,40),new MinSize(100,30));
this.add(timeField,c);
c.gridx = 0;
c.gridy = 6;
conListPanel = Gui.makePanel(BoxLayoutAxis.LINE_AXIS,null,null);
this.add(conListPanel,c);
c.gridx = 0;
c.gridy = 6;
JLabel agentTypeLabel = Gui.makeLabel("Agent Type",new PrefSize(200,30),HorizontalAlignment.LEFT);
conListPanel.add(agentTypeLabel,c);
c.gridx = 1;
c.gridy = 6;
JLabel valueLabel = Gui.makeLabel("Population Limit",new PrefSize(400,30),HorizontalAlignment.CENTER);
conListPanel.add(valueLabel,c);
c.gridx = 2;
c.gridy = 6;
addConditionButton = Gui.makeButton("Add Field",null,
new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
addCondition();
}
});
conListPanel.add(Gui.makePanel(addConditionButton),c);
c.gridx = 2;
c.gridy = 7;
this.add(
Gui.makePanel(
Gui.makeButton("Revert",null,new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
load();
}}),
makeConfirmButton()
), c);
agentNames = new String[0];
agentTypes = new ArrayList<JComboBox>();
deleteButtons = new ArrayList<JButton>();
subPanels = new ArrayList<JPanel>();
agentTypes = new ArrayList<JComboBox>();
values = new ArrayList<JTextField>();
}
private JButton makeConfirmButton(){
return Gui.makeButton("Confirm",null,new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
try {
SimulatorGuiManager gm = getGuiManager();
if (nameField.getText().equals(""))
throw new Exception("All fields must have input");
gm.setSimName(nameField.getText());
for (int i = 0; i < values.size(); i++)
if (values.get(i).getText().equals(""))
throw new Exception("All fields must have input.");
gm.setSimStepLimit(Integer.parseInt(timeField.getText()));
String str = (String)updateBox.getSelectedItem();
if (str.equals("Linear"))
gm.setSimLinearUpdate();
else if (str.equals("Atomic"))
gm.setSimAtomicUpdate();
else
gm.setSimPriorityUpdate(0, 50);
for (int i = 0; i < values.size(); i++) {
gm.setSimPopLimit(
(String)(agentTypes.get(i).getSelectedItem()),
Integer.parseInt(values.get(i).getText())
);
}
load();
}
catch (NumberFormatException excep) {
JOptionPane.showMessageDialog(null,
"Width and Height fields must be integers greater than 0");
}
catch (Exception excep) {
JOptionPane.showMessageDialog(null, excep.getMessage());
}
}
});
}
@Override
public void load() {
reset();
nameField.setText(getGuiManager().getSimName());
updateBox.setSelectedItem(getGuiManager().getCurrentSimUpdater());
SimulatorGuiManager gm = getGuiManager();
int stepLimit = gm.getSimStepLimit();
agentNames = Simulator.prototypeNames().toArray(agentNames);
timeField.setText(stepLimit + "");
//to prevent accidental starting simulation with time limit of 0
if (stepLimit <= 0)
timeField.setText(10 + "");
ImmutableMap<String, Integer> popLimits = gm.getSimPopLimits();
if (popLimits.size() == 0) {
conListPanel.add(addConditionButton);
}
else {
int i = 0;
for (String p : popLimits.keySet()) {
addCondition();
agentTypes.get(i).setSelectedItem(p);
values.get(i).setText(popLimits.get(p) + "");
i++;
}
}
validate();
}
private void addCondition() {
JComboBox newBox = Gui.makeComboBox(agentNames,new MaxSize(300,40));
agentTypes.add(newBox);
JTextField newValue = Gui.makeTextField(null,25,new MaxSize(300,40),MinSize.NULL);
values.add(newValue);
JButton newButton = Gui.makeButton("Delete",null,new DeleteListener());
newButton.setActionCommand(deleteButtons.indexOf(newButton) + "");
deleteButtons.add(newButton);
JPanel newPanel = Gui.makePanel( BoxLayoutAxis.X_AXIS,null,null);
newPanel.add(newBox);
newPanel.add(newValue);
newPanel.add(newButton);
subPanels.add(newPanel);
conListPanel.add(newPanel);
conListPanel.add(addConditionButton);
conListPanel.validate();
validate();
}
private void reset() {
nameField.setText("");
conListPanel.removeAll();
agentTypes.clear();
values.clear();
deleteButtons.clear();
subPanels.clear();
}
private class DeleteListener implements ActionListener {
@Override
public void actionPerformed(ActionEvent e){
int n = Integer.parseInt(e.getActionCommand());
String str = (String) agentTypes.get(n).getSelectedItem();
if (str != null)
getGuiManager().removeSimPopLimit(str);
conListPanel.remove(subPanels.get(n));
agentTypes.remove(n);
values.remove(n);
deleteButtons.remove(n);
for (int i = n; i < deleteButtons.size(); i++)
deleteButtons.get(i).setActionCommand(
(Integer.parseInt(deleteButtons.get(i).getActionCommand()) - 1) + "");
subPanels.remove(n);
validate();
repaint();
}
}
}
|
package es.ucm.fdi.tp.control.multiplayer;
import java.io.IOException;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.ArrayList;
import java.util.List;
import com.sun.istack.internal.NotNull;
import es.ucm.fdi.tp.basecode.bgame.control.Controller;
import es.ucm.fdi.tp.basecode.bgame.control.GameFactory;
import es.ucm.fdi.tp.basecode.bgame.control.Player;
import es.ucm.fdi.tp.basecode.bgame.control.commands.Command;
import es.ucm.fdi.tp.basecode.bgame.model.Board;
import es.ucm.fdi.tp.basecode.bgame.model.Game;
import es.ucm.fdi.tp.basecode.bgame.model.Game.State;
import es.ucm.fdi.tp.basecode.bgame.model.GameError;
import es.ucm.fdi.tp.basecode.bgame.model.GameObserver;
import es.ucm.fdi.tp.basecode.bgame.model.Piece;
import es.ucm.fdi.tp.control.multiplayer.Responses.*;
public class GameServer extends Controller implements GameObserver {
private List<Connection> clients;
volatile private ServerSocket server;
private int port;
private NetObserver view;
volatile private boolean stopped;
volatile private boolean gameOver;
public static int REQUIRED_PLAYERS = 4;
private int nPlayers = 0;
private GameFactory gameFactorry;
private List<Piece> pieces;
public GameServer(GameFactory factory, List<Piece> pieces, int port) {
super(new Game(factory.gameRules()), pieces);
this.clients = new ArrayList<Connection>();
this.gameFactorry = factory;
this.pieces = pieces;
this.port = port;
game.addObserver(this);
}
private void startClientListener(Connection c) throws IOException {
this.gameOver = false;
c.sendObject(gameFactorry);
c.sendObject(pieces);
Thread t = new Thread() {
@Override
public void run() {
while (!stopped && !gameOver) {
try {
Command cmd = (Command) c.getObject();
cmd.execute(GameServer.this);
} catch (ClassNotFoundException | ClassCastException | IOException e) {
if (!stopped && !gameOver) {
GameServer.this.stop();
}
}
}
}
};
t.start();
view.onPlayerConnected(pieces.get(nPlayers-1));
}
public int playersConnected() {
return nPlayers;
}
private void startServer() {
Socket soc;
try {
this.server = new ServerSocket(port);
view.onServerOpened(game.gameDesc());
this.stopped = false;
} catch (IOException e) {
view.log("Error opening the server");
System.err.println("Error opening the server: " + e.getMessage());
view.onServerClosed();
this.stopped = true;
}
this.stopped = false;
while(!this.stopped) {
try {
soc = server.accept();
handleRequest(soc);
} catch (IOException e) {
if(!this.stopped){
view.log("Error while waiting for a connection");
System.err.println("Error while waiting for a connection: " + e.getMessage());
}
}
}
try {
server.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private void handleRequest(Socket s) {
try {
Connection c = new Connection(s);
Object clientRequest = c.getObject();
if(!(clientRequest instanceof String) &&
!((String) clientRequest).equalsIgnoreCase("Connect") ) {
c.sendObject(new GameError("Invalid Request"));
c.stop();
return;
}
if(nPlayers >= REQUIRED_PLAYERS) {
c.sendObject(new GameError("Room is already full"));
c.stop();
view.log("A client connection was refused: Maximum players connections reached ");
return;
}
nPlayers++;
clients.add(c);
view.log("new player into to room");
c.sendObject("OK");
c.sendObject(this.gameFactorry);
c.sendObject(this.pieces.get(nPlayers-1));
if(nPlayers >= REQUIRED_PLAYERS)
startGame();
startClientListener(c);
} catch (IOException | ClassNotFoundException e) {
e.printStackTrace();
}
}
private void startGame() {
if(game.getState() == State.Starting)
game.start(pieces);
else
game.restart();
}
public void setView(@NotNull NetObserver o) {
this.view = o;
}
@Override
public synchronized void makeMove(Player p) {
try {
super.makeMove(p);
} catch (GameError e) {
view.log(e.getMessage());
}
}
@Override
public synchronized void restart() {
try {
super.restart();
} catch (GameError e) {
view.log(e.getMessage());
}
}
@Override
public synchronized void stop() {
try {
this.gameOver = true;
super.stop();
} catch (GameError e) {
view.log(e.getMessage());
} catch(Exception e) {
e.printStackTrace();
} finally {
for(Connection c : clients) {
try {
c.stop();
} catch(IOException e) { e.printStackTrace(); }
}
this.stop();
}
view.onServerClosed();
}
@Override
public void start() {
if(view != null)
startServer();
else
throw new NullPointerException(
"Uninitialized view. You must call setView() first.");
}
//Done
private void forwardNotification(Response r) {
for(Connection c : clients){
try {
c.sendObject(r);
} catch (IOException e) {
clients.remove(c);
}
}
}
@Override
public void onGameStart(Board board, String gameDesc, List<Piece> pieces,
Piece turn) {
forwardNotification(new GameStartResponse(board, gameDesc, pieces, turn));
}
@Override
public void onGameOver(Board board, State state, Piece winner) {
forwardNotification(new GameOverResponse(board, state, winner));
}
@Override
public void onMoveStart(Board board, Piece turn) {
forwardNotification(new MoveStartResponse(board, turn));
}
@Override
public void onMoveEnd(Board board, Piece turn, boolean success) {
forwardNotification(new MoveEndResponse(board, turn, success));
}
@Override
public void onChangeTurn(Board board, Piece turn) {
forwardNotification(new ChangeTurnResponse(board, turn));
}
@Override
public void onError(String msg) {
forwardNotification(new ErrorResponse(msg));
}
}
|
package org.apereo.cas.consent;
import org.apereo.cas.authentication.principal.Principal;
import org.apereo.cas.authentication.principal.Service;
import org.apereo.cas.configuration.model.support.consent.DynamoDbConsentProperties;
import org.apereo.cas.dynamodb.DynamoDbQueryBuilder;
import org.apereo.cas.dynamodb.DynamoDbTableUtils;
import org.apereo.cas.util.CollectionUtils;
import org.apereo.cas.util.DateTimeUtils;
import org.apereo.cas.util.serialization.JacksonObjectMapperFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import software.amazon.awssdk.services.dynamodb.DynamoDbClient;
import software.amazon.awssdk.services.dynamodb.model.AttributeDefinition;
import software.amazon.awssdk.services.dynamodb.model.AttributeValue;
import software.amazon.awssdk.services.dynamodb.model.ComparisonOperator;
import software.amazon.awssdk.services.dynamodb.model.DeleteItemRequest;
import software.amazon.awssdk.services.dynamodb.model.KeySchemaElement;
import software.amazon.awssdk.services.dynamodb.model.KeyType;
import software.amazon.awssdk.services.dynamodb.model.PutItemRequest;
import software.amazon.awssdk.services.dynamodb.model.ScalarAttributeType;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* This is {@link DynamoDbConsentFacilitator}.
*
* @author Misagh Moayyed
* @since 6.5.0
*/
@Slf4j
@Getter
@RequiredArgsConstructor
@SuppressWarnings("JavaUtilDate")
public class DynamoDbConsentFacilitator {
private static final ObjectMapper MAPPER = JacksonObjectMapperFactory.builder()
.defaultTypingEnabled(false).build().toObjectMapper();
private final DynamoDbConsentProperties dynamoDbProperties;
private final DynamoDbClient amazonDynamoDBClient;
@SneakyThrows
private static Map<String, AttributeValue> buildTableAttributeValuesMap(final ConsentDecision record) {
val values = new HashMap<String, AttributeValue>();
values.put(ColumnNames.PRINCIPAL.getColumnName(), AttributeValue.builder().s(record.getPrincipal()).build());
values.put(ColumnNames.SERVICE.getColumnName(), AttributeValue.builder().s(record.getService()).build());
values.put(ColumnNames.ID.getColumnName(), AttributeValue.builder().n(String.valueOf(record.getId())).build());
val body = MAPPER.writeValueAsString(record);
values.put(ColumnNames.BODY.getColumnName(), AttributeValue.builder().s(body).build());
val time = DateTimeUtils.dateOf(record.getCreatedDate());
values.put(ColumnNames.CREATED_DATE.getColumnName(), AttributeValue.builder().s(String.valueOf(time)).build());
LOGGER.debug("Created attribute values [{}] based on [{}]", values, record);
return values;
}
@SneakyThrows
private static ConsentDecision extractAttributeValuesFrom(final Map<String, AttributeValue> item) {
val principal = item.get(ColumnNames.PRINCIPAL.getColumnName()).s();
val id = Long.valueOf(item.get(ColumnNames.ID.getColumnName()).n());
val body = item.get(ColumnNames.BODY.getColumnName()).s();
LOGGER.debug("Extracting consent decision id [{}] for [{}]", id, principal);
return MAPPER.readValue(body, ConsentDecision.class);
}
/**
* Create tables.
*
* @param deleteTables the delete tables
*/
@SneakyThrows
public void createTable(final boolean deleteTables) {
val attributes = List.of(AttributeDefinition.builder()
.attributeName(ColumnNames.ID.getColumnName())
.attributeType(ScalarAttributeType.N)
.build());
val schema = List.of(KeySchemaElement.builder()
.attributeName(ColumnNames.ID.getColumnName())
.keyType(KeyType.HASH)
.build());
DynamoDbTableUtils.createTable(amazonDynamoDBClient, dynamoDbProperties,
dynamoDbProperties.getTableName(), deleteTables, attributes, schema);
}
/**
* Save.
*
* @param record the record
*/
public void save(final ConsentDecision record) {
val values = buildTableAttributeValuesMap(record);
val putItemRequest = PutItemRequest.builder().tableName(dynamoDbProperties.getTableName()).item(values).build();
LOGGER.debug("Submitting put request [{}] for record [{}]", putItemRequest, record);
val putItemResult = amazonDynamoDBClient.putItem(putItemRequest);
LOGGER.debug("Record added with result [{}]", putItemResult);
}
/**
* Remove all.
*/
public void removeAll() {
createTable(true);
}
/**
* Load collection.
*
* @return the collection
*/
public Collection<? extends ConsentDecision> load() {
return getRecordsByKeys(List.of()).collect(Collectors.toList());
}
/**
* Find collection.
*
* @param principal the principal
* @return the collection
*/
public Collection<? extends ConsentDecision> find(final String principal) {
val query = DynamoDbQueryBuilder.builder()
.key(ColumnNames.PRINCIPAL.getColumnName())
.attributeValue(List.of(AttributeValue.builder().s(principal).build()))
.operator(ComparisonOperator.EQ)
.build();
return getRecordsByKeys(List.of(query)).collect(Collectors.toList());
}
/**
* Find consent decision.
*
* @param service the service
* @param principal the principal
* @return the consent decision
*/
public ConsentDecision find(final Service service, final Principal principal) {
val query = List.of(
DynamoDbQueryBuilder.builder()
.key(ColumnNames.PRINCIPAL.getColumnName())
.attributeValue(List.of(AttributeValue.builder().s(principal.getId()).build()))
.operator(ComparisonOperator.EQ)
.build(),
DynamoDbQueryBuilder.builder()
.key(ColumnNames.SERVICE.getColumnName())
.attributeValue(List.of(AttributeValue.builder().s(service.getId()).build()))
.operator(ComparisonOperator.GE)
.build());
return getRecordsByKeys(query).findFirst().orElse(null);
}
/**
* Delete.
*
* @param id the id
* @param principal the principal
* @return the boolean
*/
public boolean delete(final long id, final String principal) {
val keys = List.of(
DynamoDbQueryBuilder.builder()
.key(ColumnNames.PRINCIPAL.getColumnName())
.attributeValue(List.of(AttributeValue.builder().s(principal).build()))
.operator(ComparisonOperator.EQ)
.build(),
DynamoDbQueryBuilder.builder()
.key(ColumnNames.ID.getColumnName())
.attributeValue(List.of(AttributeValue.builder().n(String.valueOf(id)).build()))
.operator(ComparisonOperator.GE)
.build());
val results = getRecordsByKeys(keys);
val deleteCount = results
.map(decision -> {
val del = DeleteItemRequest.builder()
.tableName(dynamoDbProperties.getTableName())
.key(CollectionUtils.wrap(ColumnNames.ID.getColumnName(),
AttributeValue.builder().n(String.valueOf(decision.getId())).build()))
.build();
LOGGER.debug("Submitting delete request [{}] for decision id [{}] and principal [{}]", del, id, principal);
val res = amazonDynamoDBClient.deleteItem(del);
LOGGER.debug("Delete request came back with result [{}]", res);
return res;
})
.filter(Objects::nonNull)
.count();
return deleteCount > 0;
}
/**
* Delete.
*
* @param principal the principal
* @return the boolean
*/
public boolean delete(final String principal) {
val keys = List.of(
DynamoDbQueryBuilder.builder()
.key(ColumnNames.PRINCIPAL.getColumnName())
.attributeValue(List.of(AttributeValue.builder().s(principal).build()))
.operator(ComparisonOperator.EQ)
.build());
val results = getRecordsByKeys(keys);
results.forEach(decision -> {
val del = DeleteItemRequest.builder()
.tableName(dynamoDbProperties.getTableName())
.key(CollectionUtils.wrap(ColumnNames.ID.getColumnName(),
AttributeValue.builder().n(String.valueOf(decision.getId())).build()))
.build();
LOGGER.debug("Submitting delete request [{}] for decision id [{}] and principal [{}]",
del, decision.getId(), principal);
val res = amazonDynamoDBClient.deleteItem(del);
LOGGER.debug("Delete request came back with result [{}]", res);
});
return true;
}
/**
* Column names for tables holding records.
*/
@Getter
@RequiredArgsConstructor
public enum ColumnNames {
/**
* Principal column.
*/
PRINCIPAL("principal"),
/**
* ID column.
*/
ID("id"),
/**
* Service column.
*/
SERVICE("service"),
/**
* Created-Date column.
*/
CREATED_DATE("createdDate"),
/**
* Body column.
*/
BODY("body");
private final String columnName;
}
@SneakyThrows
private Stream<ConsentDecision> getRecordsByKeys(final List<DynamoDbQueryBuilder> queries) {
return DynamoDbTableUtils.getRecordsByKeys(amazonDynamoDBClient,
dynamoDbProperties.getTableName(),
queries,
DynamoDbConsentFacilitator::extractAttributeValuesFrom);
}
}
|
package org.psem2m.isolates.services.remote.beans;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.osgi.framework.Constants;
import org.psem2m.isolates.constants.IPlatformProperties;
/**
* Represents a remote service
*
* @author Thomas Calmant
*/
public class RemoteServiceRegistration implements Serializable {
/** Serial version UID */
private static final long serialVersionUID = 1L;
/** Unknown isolate ID constant */
public static final String UNKNOWN_ISOLATE_ID = "unknown";
/** Remote service end points */
private final Set<EndpointDescription> pEndpoints = new HashSet<EndpointDescription>();
/** Exported interfaces */
private final String[] pExportedInterfaces;
/** An ID representing the service */
private final String pServiceId;
/** Service properties copy */
private final Map<String, Object> pServiceProperties = new HashMap<String, Object>();
/**
* Stores a remote service registration description.
*
* @param aExportedInterface
* Interface exported
* @param aServiceProperties
* Service properties (will be copied)
* @param aEndpoints
* End points to access to the service
*/
public RemoteServiceRegistration(final String aExportedInterface,
final Map<String, Object> aServiceProperties,
final EndpointDescription[] aArray) {
this(new String[] { aExportedInterface }, aServiceProperties, aArray);
}
/**
* Stores a remote service registration description.
*
* @param aExportedInterfaces
* Interfaces exported
* @param aServiceProperties
* Service properties (will be copied)
* @param aEndpoints
* End points to access to the service
*/
public RemoteServiceRegistration(final String[] aExportedInterfaces,
final Map<String, Object> aServiceProperties,
final Collection<EndpointDescription> aEndpoints) {
pExportedInterfaces = aExportedInterfaces;
pServiceProperties.putAll(aServiceProperties);
pEndpoints.addAll(aEndpoints);
// Get the current isolate ID
final String sourceIsolate = System.getProperty(
IPlatformProperties.PROP_PLATFORM_ISOLATE_ID,
UNKNOWN_ISOLATE_ID);
// Generate a service ID
final StringBuilder builder = new StringBuilder(sourceIsolate);
builder.append(".");
builder.append(pServiceProperties.get(Constants.SERVICE_ID));
pServiceId = builder.toString();
}
/**
* Stores a remote service registration description.
*
* @param aExportedInterfaces
* Interfaces exported
* @param aServiceProperties
* Service properties (will be copied)
* @param aEndpoints
* End points to access to the service
*/
public RemoteServiceRegistration(final String[] aExportedInterfaces,
final Map<String, Object> aServiceProperties,
final EndpointDescription[] aEndpoints) {
this(aExportedInterfaces, aServiceProperties, Arrays.asList(aEndpoints));
}
/**
* Retrieves the end points to access to the service
*
* @return the end points
*/
public EndpointDescription[] getEndpoints() {
synchronized (pEndpoints) {
return pEndpoints
.toArray(new EndpointDescription[pEndpoints.size()]);
}
}
/**
* Retrieves the interfaces exported by the end points
*
* @return the exported interfaces
*/
public String[] getExportedInterfaces() {
return pExportedInterfaces;
}
/**
* Retrieves the service ID, based on the source isolate
*
* @return the service id
*/
public String getServiceId() {
return pServiceId;
}
/**
* Retrieves a copy of the service properties at the time of the end points
* creation.
*
* @return the service properties
*/
public Map<String, Object> getServiceProperties() {
return pServiceProperties;
}
/**
* Removes the given end points from the registration.
*
* <b>This method should only be called by a Remote Service Repository</b>.
*
* @param aEndpoints
* End points to be removed
*/
public void removeEndpoints(final EndpointDescription... aEndpoints) {
if (aEndpoints != null) {
synchronized (pEndpoints) {
pEndpoints.removeAll(Arrays.asList(aEndpoints));
}
}
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
final StringBuilder builder = new StringBuilder(
"RemoteServiceRegistration(");
builder.append("serviceId = ").append(pServiceId);
builder.append(", Endpoints : [");
for (EndpointDescription endpoint : pEndpoints) {
builder.append(endpoint);
}
builder.append("])");
return builder.toString();
}
}
|
package net.ssehub.easy.reasoning.core.reasoner;
import net.ssehub.easy.reasoning.core.frontend.ReasonerFrontend;
/**
* Describes the configuration to be passed via the {@link ReasonerFrontend} to the individual
* reasoners. Data in this class may be defined e.g. via the UI or from a configuration file.
*
* @author Holger Eichelberger
*/
public class ReasonerConfiguration {
public static final IAdditionalInformationLogger ADDITIONAL_INFO_LOG_SYSOUT = new IAdditionalInformationLogger() {
@Override
public void info(String text) {
System.out.println(text);
}
};
public static final IAdditionalInformationLogger ADDITIONAL_INFO_LOG_NONE = new IAdditionalInformationLogger() {
@Override
public void info(String text) {
}
};
private AttributeValues attributeValues;
private int timeout;
private ReasonerDescriptor defaultReasoner;
private boolean customMessages;
private boolean runtime;
private boolean freshConfig;
private IAdditionalInformationLogger logger = ADDITIONAL_INFO_LOG_SYSOUT; // the legacy behavior
/**
* Describes an information logger.
*
* @author Holger Eichelberger
*/
public interface IAdditionalInformationLogger {
/**
* Emits some information.
*
* @param text the text to be printed
*/
public void info(String text);
}
/**
* Creates a new instance and initializes it with default values, i.e.
* the global timeout from {@link ReasonerFrontend#getTimeout()}, an
* unspecified attributes values set (<b>null</b>) and an unspecified
* default reasoner (<b>null</b>).
*/
public ReasonerConfiguration() {
this(false);
this.freshConfig = true;
}
/**
* Creates a new instance and initializes it with default values, i.e.
* the global timeout from {@link ReasonerFrontend#getTimeout()}, an
* unspecified attributes values set (<b>null</b>), custom messaging and an unspecified
* default reasoner (<b>null</b>).
* @param defParamValue Enables custom messages (comments instead of constraints).
*/
public ReasonerConfiguration(boolean defParamValue) {
this.timeout = ReasonerFrontend.getInstance().getTimeout();
this.customMessages = defParamValue;
}
/**
* Changes the timeout for reasoning.
*
* @param timeout the timeout, shall be ignored if less negative or null
*/
public void setTimeout(int timeout) {
this.timeout = timeout;
}
/**
* Returns the timeout for reasoning.
*
* @return the timeout, shall be ignored if less negative or null
*/
public int getTimeout() {
return timeout;
}
/**
* Returns the specific timeout for reasoning. This method uses
* {@link ReasonerFrontend#getTimeout()} as fallback. Reasoners shall
* call this method rather than {@link #getTimeout()}.
*
* @param configuration the configuration the timeout shall be returned for (may be <b>null</b>)
* @return the timeout, shall be ignored if less negative or null
*/
public int getTimeout(ReasonerConfiguration configuration) {
int result = 0;
if (null != configuration) {
timeout = configuration.getTimeout();
}
if (result <= 0) {
result = ReasonerFrontend.getInstance().getTimeout();
}
return result;
}
/**
* Changes attribute values for reasoning.
*
* @param attributeValues the values instance (may ba <b>null</b>)
*/
public void setAttributeValues(AttributeValues attributeValues) {
this.attributeValues = attributeValues;
}
/**
* Returns the attribute values for reasoning. Reasoners shall call
* {@link #getAttributeValues(ReasonerConfiguration)} instead.
*
* @return the attribute values for reasoning (may be <b>null</b> if none is provided)
*/
public AttributeValues getAttributeValues() {
return attributeValues;
}
/**
* Null-safe access to the attribute values.
*
* @param config the configuration instance holding the attribute values.
* @return the values or <b>null</b>
*/
public static AttributeValues getAttributeValues(ReasonerConfiguration config) {
AttributeValues result = null;
if (null != config) {
result = config.getAttributeValues();
}
return result;
}
/**
* Changes the default reasoner.
*
* @param defaultReasoner the default reasoner or <b>null</b> if there is none configured
*/
public void setDefaultReasoner(ReasonerDescriptor defaultReasoner) {
this.defaultReasoner = defaultReasoner;
}
/**
* Returns the default reasoner.
*
* @return the default reasoner or <b>null</b> if there is none configured
*/
public ReasonerDescriptor getDefaultResoner() {
return defaultReasoner;
}
/**
* Enables custom messages.
*/
public void enableCustomMessages() {
this.customMessages = true;
}
/**
* Disable custom messages.
*/
public void disableCustomMessages() {
this.customMessages = false;
}
/**
* Returns if custom messages are enabled.
* @return Returns true if custom messages are enabled.
*/
public boolean getCustomMessages() {
return customMessages;
}
/**
* Returns the additional information logger.
*
* @return the information logger
*/
public IAdditionalInformationLogger getLogger() {
return logger;
}
/**
* Defines the additional information logger.
*
* @param logger the logger
*/
public void setAdditionalInformationLogger(IAdditionalInformationLogger logger) {
if (null != logger) {
this.logger = logger;
}
}
/**
* Defines whether runtime reasoning reasoning shall be activated. Currently, runtime reasoning assumes
* that all required defaults and assignments have already been processed. In particular, the caller
* is responsible for passing a "clean" configuration to the reasoner that leads to the expected
* results.
*
* @param runtime if reasoning shall happen for runtime
*/
public void setRuntimeMode(boolean runtime) {
this.runtime = runtime;
}
/**
* Returns whether runtime reasoning shall be done (see {@link #setRuntimeMode(boolean)}.
*
* @return <code>true</code> if reasoning shall happen for runtime, <code>false</code> else
*/
public boolean isRuntimeMode() {
return runtime;
}
/**
* Defines whether the reasoner should create a fresh {@link Configuration}.
*/
public void createFreshConfiguration() {
freshConfig = true;
}
/**
* Returns whether a fresh {@link Configuration} should be created.
* @return <code>true</code> if Fresh {@link Configuration} is created.
*/
public boolean isFreshConfiguration() {
return freshConfig;
}
/**
* Specification whether a fresh configuration should be created for reasoning.
* @param freshConfig <tt>true</tt> A fresh configuration will be created, otherwise not.
*/
public void setFreshConfiguration(boolean freshConfig) {
this.freshConfig = freshConfig;
}
}
|
package org.csstudio.trends.databrowser2.ui;
import java.io.File;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import org.csstudio.csdata.ProcessVariable;
import org.csstudio.swt.rtplot.Annotation;
import org.csstudio.swt.rtplot.Axis;
import org.csstudio.swt.rtplot.AxisRange;
import org.csstudio.swt.rtplot.PlotListenerAdapter;
import org.csstudio.swt.rtplot.RTTimePlot;
import org.csstudio.swt.rtplot.Trace;
import org.csstudio.swt.rtplot.YAxis;
import org.csstudio.trends.databrowser2.Activator;
import org.csstudio.trends.databrowser2.Messages;
import org.csstudio.trends.databrowser2.model.AnnotationInfo;
import org.csstudio.trends.databrowser2.model.ArchiveDataSource;
import org.csstudio.trends.databrowser2.model.AxisConfig;
import org.csstudio.trends.databrowser2.model.ChannelInfo;
import org.csstudio.trends.databrowser2.model.Model;
import org.csstudio.trends.databrowser2.model.ModelItem;
import org.csstudio.ui.util.dialogs.ExceptionDetailsErrorDialog;
import org.csstudio.ui.util.dnd.ControlSystemDropTarget;
import org.eclipse.osgi.util.NLS;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.ToolItem;
/** Data Browser 'Plot' that displays the samples in a {@link Model}.
* <p>
* Links the underlying {@link RTTimePlot} to the {@link Model}.
*
* @author Kay Kasemir
* @author Laurent PHILIPPE Modify addListener method to add property changed event capability
*/
@SuppressWarnings("nls")
public class ModelBasedPlot
{
/** Plot Listener */
private Optional<PlotListener> listener = Optional.empty();
/** {@link Display} used by this plot */
final private Display display;
/** Plot widget/figure */
final private RTTimePlot plot;
final private Map<Trace<Instant>, ModelItem> items_by_trace = new ConcurrentHashMap<>();
/** Initialize plot
* @param parent Parent widget
*/
public ModelBasedPlot(final Composite parent)
{
this.display = parent.getDisplay();
plot = new RTTimePlot(parent);
final ToolItem time_config_button =
plot.addToolItem(SWT.PUSH, Activator.getDefault().getImage("icons/time_range.png"), Messages.StartEndDialogTT);
time_config_button.addSelectionListener(new SelectionAdapter()
{
@Override
public void widgetSelected(SelectionEvent e)
{
listener.ifPresent((l) -> l.timeConfigRequested());
}
});
// Configure axes
final Axis<Instant> time_axis = plot.getXAxis();
time_axis.setName(Messages.Plot_TimeAxisName);
final YAxis<Instant> value_axis = plot.getYAxes().get(0);
value_axis.setName(Messages.Plot_ValueAxisName);
// Forward user changes to plot to model
plot.addListener(new PlotListenerAdapter<Instant>()
{
@Override
public void changedXAxis(final Axis<Instant> x_axis)
{
final AxisRange<Instant> range = x_axis.getValueRange();
listener.ifPresent((l) -> l.timeAxisChanged(plot.isScrolling(), range.getLow(), range.getHigh()));
}
@Override
public void changedYAxis(final YAxis<Instant> y_axis)
{
final int index = plot.getYAxes().indexOf(y_axis);
final AxisRange<Double> range = y_axis.getValueRange();
listener.ifPresent((l) -> l.valueAxisChanged(index, range.getLow(), range.getHigh()));
}
@Override
public void changedAnnotations()
{
final List<AnnotationInfo> annotations = new ArrayList<>();
final List<Trace<Instant>> traces = new ArrayList<>();
for (Trace<Instant> trace : plot.getTraces())
traces.add(trace);
for (Annotation<Instant> annotation : plot.getAnnotations())
{
final int item_index = traces.indexOf(annotation.getTrace());
annotations.add(new AnnotationInfo(item_index,
annotation.getPosition(), annotation.getValue(),
annotation.getOffset(), annotation.getText()));
}
listener.ifPresent((l) -> l.changedAnnotations(annotations));
}
@Override
public void changedCursors()
{
for (Trace<Instant> trace : plot.getTraces())
findModelItem(trace).setSelectedSample(trace.getSelectedSample());
listener.ifPresent((l) -> l.selectedSamplesChanged());
}
});
hookDragAndDrop(plot);
}
/** @return RTTimePlot */
public RTTimePlot getPlot()
{
return plot;
}
/**
* Attach to drag-and-drop, notifying the plot listener
*
* @param canvas
*/
private void hookDragAndDrop(final Composite parent)
{
// Allow dropped arrays
new ControlSystemDropTarget(parent, ChannelInfo[].class,
ProcessVariable[].class, ArchiveDataSource[].class,
File.class,
String.class)
{
@Override
public void handleDrop(final Object item)
{
final PlotListener lst = listener.orElse(null);
if (lst == null)
return;
if (item instanceof ChannelInfo[])
{
final ChannelInfo[] channels = (ChannelInfo[]) item;
for (ChannelInfo channel : channels)
lst.droppedPVName(channel.getProcessVariable(),
channel.getArchiveDataSource());
}
else if (item instanceof ProcessVariable[])
{
final ProcessVariable[] pvs = (ProcessVariable[]) item;
for (ProcessVariable pv : pvs)
lst.droppedPVName(pv, null);
}
else if (item instanceof ArchiveDataSource[])
{
final ArchiveDataSource[] archives = (ArchiveDataSource[]) item;
for (ArchiveDataSource archive : archives)
lst.droppedPVName(null, archive);
}
else if (item instanceof String)
lst.droppedName(item.toString());
else if (item instanceof String[])
{ // File names arrive as String[]...
final String[] files = (String[])item;
try
{
for (String filename : files)
lst.droppedFilename(filename);
}
catch (Exception ex)
{
ExceptionDetailsErrorDialog.openError(parent.getShell(), Messages.Error, ex);
}
}
}
};
}
/** Add a listener (currently only one supported) */
public void addListener(final PlotListener listener)
{
if (this.listener.isPresent())
throw new IllegalStateException("Only one listener supported");
this.listener = Optional.of(listener);
}
/** Remove all axes and traces */
public void removeAll()
{
items_by_trace.clear();
// Remove all traces
for (Trace<Instant> trace : plot.getTraces())
plot.removeTrace(trace);
// Now that Y axes are unused, remove all except for primary
int N = plot.getYAxes().size();
while (N > 1)
plot.removeYAxis(--N);
}
/** @param index
* Index of Y axis. If it doesn't exist, it will be created.
* @return Y Axis
*/
private YAxis<Instant> getYAxis(final int index)
{
// Get Y Axis, creating new ones if needed
int N = plot.getYAxes().size();
while (N <= index)
{
plot.addYAxis(NLS.bind(Messages.Plot_ValueAxisNameFMT, N));
N = plot.getYAxes().size();
}
return plot.getYAxes().get(index);
}
/** Update value axis from model
* @param index Axis index. Y axes will be created as needed.
* @param config Desired axis configuration
*/
public void updateAxis(final int index, final AxisConfig config)
{
final YAxis<Instant> axis = getYAxis(index);
axis.setName(config.getResolvedName());
axis.useAxisName(config.isUsingAxisName());
axis.useTraceNames(config.isUsingTraceNames());
axis.setColor(config.getColor());
axis.setLogarithmic(config.isLogScale());
axis.setGridVisible(config.isGridVisible());
axis.setAutoscale(config.isAutoScale());
axis.setValueRange(config.getMin(), config.getMax());
axis.setVisible(config.isVisible());
axis.setOnRight(config.isOnRight());
}
/** Add a trace to the plot
* @param item ModelItem for which to add a trace
* @author Laurent PHILIPPE
*/
public void addTrace(final ModelItem item)
{
final Trace<Instant> trace = plot.addTrace(item.getResolvedDisplayName(), item.getSamples(),
item.getColor(),
item.getTraceType(), item.getLineWidth(),
item.getPointType(), item.getPointSize(),
item.getAxisIndex());
items_by_trace.put(trace, item);
}
/** @param item ModelItem to remove from plot */
public void removeTrace(final ModelItem item)
{
final Trace<Instant> trace = findTrace(item);
plot.removeTrace(trace);
items_by_trace.remove(trace);
}
/** Update the configuration of a trace from Model Item
* @param item Item that was previously added to the Plot
*/
public void updateTrace(final ModelItem item)
{
final Trace<Instant> trace = findTrace(item);
// Update Trace with item's configuration
if (!trace.getName().equals(item.getDisplayName()))
trace.setName(item.getDisplayName());
// These happen to not cause an immediate redraw, so
// set even if no change
trace.setColor(item.getColor());
trace.setType(item.getTraceType());
trace.setWidth(item.getLineWidth());
trace.setPointType(item.getPointType());
trace.setPointSize(item.getPointSize());
// Locate index of current Y Axis
if (trace.getYAxis() != item.getAxisIndex())
plot.moveTrace(trace, item.getAxisIndex());
}
/** @param item {@link ModelItem} for which to locate the {@link Trace}
* @return Trace
* @throws RuntimeException When trace not found
*/
private Trace<Instant> findTrace(final ModelItem item)
{
for (Trace<Instant> trace : plot.getTraces())
if (trace.getData() == item.getSamples())
return trace;
throw new RuntimeException("Cannot locate trace for " + item);
}
/** @param trace {@link Trace} for which to locate the {@link ModelItem}
* @return ModelItem
* @throws RuntimeException When not found
*/
private ModelItem findModelItem(final Trace<Instant> trace)
{
try
{
return items_by_trace.get(trace);
}
catch (Throwable ex)
{
throw new RuntimeException("Cannot locate item for " + trace, ex);
}
}
/** Update plot to given time range.
* Can be called from any thread.
* @param start
* @param end
*/
public void setTimeRange(final Instant start, final Instant end)
{
display.asyncExec(() -> plot.getXAxis().setValueRange(start, end));
}
/** Refresh the plot because the data has changed */
public void redrawTraces()
{
plot.requestUpdate();
}
}
|
package org.voltdb.rejoin;
import org.apache.zookeeper_voltpatches.KeeperException;
import org.json_voltpatches.JSONException;
import org.json_voltpatches.JSONStringer;
import org.voltcore.messaging.HostMessenger;
import org.voltdb.ClientInterface;
import org.voltdb.ClientResponseImpl;
import org.voltdb.VoltDB;
import org.voltdb.VoltTable;
import org.voltdb.catalog.Database;
import org.voltdb.client.ClientResponse;
import org.voltdb.iv2.Cartographer;
import org.voltdb.messaging.LocalMailbox;
import org.voltdb.sysprocs.saverestore.SnapshotUtil;
import java.util.Collection;
import java.util.Map;
/**
* Coordinates the sites to perform rejoin
*/
public abstract class RejoinCoordinator extends LocalMailbox {
protected final HostMessenger m_messenger;
/*
* m_handler is called when a SnapshotUtil.requestSnapshot response occurs.
* This callback runs on the snapshot daemon thread.
*/
protected SnapshotUtil.SnapshotResponseHandler m_handler =
new SnapshotUtil.SnapshotResponseHandler() {
@Override
public void handleResponse(ClientResponse resp)
{
if (resp == null) {
VoltDB.crashLocalVoltDB("Failed to initiate rejoin snapshot",
false, null);
} else if (resp.getStatus() != ClientResponseImpl.SUCCESS) {
VoltDB.crashLocalVoltDB("Failed to initiate rejoin snapshot: "
+ resp.getStatusString(), true, resp.getException());
}
VoltTable[] results = resp.getResults();
if (SnapshotUtil.didSnapshotRequestSucceed(results)) {
String appStatus = resp.getAppStatusString();
if (appStatus == null) {
VoltDB.crashLocalVoltDB("Rejoin snapshot request failed: "
+ resp.getStatusString(), false, null);
}
else {
// success is buried down here...
return;
}
} else {
VoltDB.crashLocalVoltDB("Snapshot request for rejoin failed",
false, null);
}
}
};
public RejoinCoordinator(HostMessenger hostMessenger) {
super(hostMessenger, hostMessenger.generateMailboxId(null));
m_messenger = hostMessenger;
}
public void setClientInterface(ClientInterface ci) {}
/**
* Starts the rejoin process.
*/
public abstract boolean startJoin(Database catalog, Cartographer cartographer)
throws KeeperException, InterruptedException, JSONException;
/**
* Discard the mailbox.
*/
public void close() {
m_messenger.removeMailbox(getHSId());
}
protected String makeSnapshotNonce(String type, long HSId)
{
return type + "_" + HSId + "_" + System.currentTimeMillis();
}
protected String makeSnapshotRequest(Map<Long, Long> sourceToDests,
Collection<Integer> tableIds)
{
try {
JSONStringer jsStringer = new JSONStringer();
jsStringer.object();
jsStringer.key("streamPairs");
jsStringer.object();
for (Map.Entry<Long, Long> entry : sourceToDests.entrySet()) {
jsStringer.key(Long.toString(entry.getKey())).value(Long.toString(entry.getValue()));
}
jsStringer.endObject();
jsStringer.key("tableIds");
jsStringer.array();
if (tableIds != null) {
for (int id : tableIds) {
jsStringer.value(id);
}
}
jsStringer.endArray();
jsStringer.endObject();
return jsStringer.toString();
} catch (Exception e) {
VoltDB.crashLocalVoltDB("Failed to serialize to JSON", true, e);
}
// unreachable;
return null;
}
}
|
package org.ovirt.engine.core.bll.storage.connection;
import java.lang.reflect.Constructor;
import java.util.EnumMap;
import java.util.Map;
import org.ovirt.engine.core.common.businessentities.storage.StorageType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class StorageHelperDirector {
private static final String ACTION_TYPE_PACKAGE = "org.ovirt.engine.core.bll.storage.connection";
private static final String ACTION_TYPE_CLASS = "StorageHelper";
private static final Logger log = LoggerFactory.getLogger(StorageHelperDirector.class);
private static StorageHelperDirector _instance = new StorageHelperDirector();
private Map<StorageType, IStorageHelper> _helpers = new EnumMap<>(StorageType.class);
public static StorageHelperDirector getInstance() {
return _instance;
}
private StorageHelperDirector() {
initializeHelpers();
}
private void initializeHelpers() {
try {
for (StorageType storageType : StorageType.values()) {
if (storageType.isConcreteStorageType()) {
Class<?> actionType = null;
String formattedClassName = String.format("%1$s.%2$s%3$s",
ACTION_TYPE_PACKAGE,
storageType.name(),
ACTION_TYPE_CLASS);
try {
actionType = Class.forName(formattedClassName);
} catch (ClassNotFoundException cnfe) {
log.debug("StorageHelperDirector Error:: the lookup for following class has failed: {}",
formattedClassName);
}
// if action type not exist - operation invalid
if (actionType != null) {
Constructor<?> info = actionType.getConstructors()[0];
IStorageHelper currentHelper = (IStorageHelper) info.newInstance(null);
_helpers.put(storageType, currentHelper);
}
}
}
} catch (Exception ex) {
log.error("StorageHelperDirector Error:: exception was encountered during initializeHelpers() execution: {}",
ex.getMessage());
log.debug("Exception");
throw new RuntimeException(ex);
}
}
public IStorageHelper getItem(StorageType index) {
return _helpers.get(index);
}
}
|
package org.voltdb.sysprocs;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.ByteBuffer;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Future;
import java.io.*;
import org.voltdb.BackendTarget;
import org.voltdb.DependencyPair;
import org.voltdb.dtxn.MailboxTracker;
import org.voltdb.ExecutionSite.SystemProcedureExecutionContext;
import org.voltdb.HsqlBackend;
import org.voltdb.ParameterSet;
import org.voltdb.PrivateVoltTableFactory;
import org.voltdb.ProcInfo;
import org.voltdb.SiteProcedureConnection;
import org.voltdb.TheHashinator;
import org.voltdb.VoltDB;
import org.voltdb.VoltSystemProcedure;
import org.voltdb.VoltTable;
import org.voltdb.VoltTable.ColumnInfo;
import org.voltdb.VoltType;
import org.voltdb.VoltTypeException;
import org.voltdb.catalog.Cluster;
import org.voltdb.catalog.Database;
import org.voltdb.catalog.Procedure;
import org.voltdb.catalog.Table;
import org.voltdb.client.ConnectionUtil;
import org.voltdb.dtxn.DtxnConstants;
import org.voltdb.export.ExportManager;
import org.voltcore.logging.VoltLogger;
import org.voltdb.sysprocs.saverestore.ClusterSaveFileState;
import org.voltdb.sysprocs.saverestore.SavedTableConverter;
import org.voltdb.sysprocs.saverestore.SnapshotUtil;
import org.voltdb.sysprocs.saverestore.TableSaveFile;
import org.voltdb.sysprocs.saverestore.TableSaveFileState;
import org.voltcore.utils.DBBPool.BBContainer;
import org.voltdb.utils.CatalogUtil;
import org.voltdb.utils.CompressionService;
import org.voltdb.utils.VoltFile;
import org.json_voltpatches.*;
import org.voltcore.utils.Pair;
@ProcInfo (
singlePartition = false
)
public class SnapshotRestore extends VoltSystemProcedure
{
private static final VoltLogger TRACE_LOG = new VoltLogger(SnapshotRestore.class.getName());
private static final VoltLogger HOST_LOG = new VoltLogger("HOST");
private static final int DEP_restoreScan = (int)
SysProcFragmentId.PF_restoreScan | DtxnConstants.MULTIPARTITION_DEPENDENCY;
private static final int DEP_restoreScanResults = (int)
SysProcFragmentId.PF_restoreScanResults;
/*
* Plan fragments for retrieving the digests
* for the snapshot visible at every node. Can't be combined
* with the other scan because only one result table can be returned
* by a plan fragment.
*/
private static final int DEP_restoreDigestScan = (int)
SysProcFragmentId.PF_restoreDigestScan | DtxnConstants.MULTIPARTITION_DEPENDENCY;
private static final int DEP_restoreDigestScanResults = (int)
SysProcFragmentId.PF_restoreDigestScanResults;
/*
* Plan fragments for distributing the full set of export sequence numbers
* to every partition where the relevant ones can be selected
* and forwarded to the EE. Also distributes the txnId of the snapshot
* which is used to truncate export data on disk from after the snapshot
*/
private static final int DEP_restoreDistributeExportSequenceNumbers = (int)
SysProcFragmentId.PF_restoreDistributeExportSequenceNumbers | DtxnConstants.MULTIPARTITION_DEPENDENCY;
private static final int DEP_restoreDistributeExportSequenceNumbersResults = (int)
SysProcFragmentId.PF_restoreDistributeExportSequenceNumbersResults;
private static HashSet<String> m_initializedTableSaveFileNames = new HashSet<String>();
private static ArrayDeque<TableSaveFile> m_saveFiles = new ArrayDeque<TableSaveFile>();
public static volatile boolean m_haveDoneRestore = false;
private static synchronized void initializeTableSaveFiles(
String filePath,
String fileNonce,
String tableName,
int originalHostIds[],
int relevantPartitionIds[]) throws IOException {
// This check ensures that only one site per host attempts to
// distribute this table. @SnapshotRestore sends plan fragments
// to every site on this host with the tables and partition ID that
// this host is going to distribute to the cluster. The first
// execution site to get into this synchronized method is going to
// 'win', add the table it's doing to this set, and then do the rest
// of the work. Subsequent sites will just return here.
if (!m_initializedTableSaveFileNames.add(tableName)) {
return;
}
// To avoid pulling duplicate rows when we have multiple files
// that contain the data for a partition, we're going to assign
// all of the partition IDs that were passed in to one and only one
// TableSaveFile. We'll pull them out of this set as we find
// files for them, and then once the set is empty we can bail out of
// this loop. The restore planner called in @SnapshotRestore should
// ensure that we can, in fact, find files for all these partitions.
HashSet<Integer> relevantPartitionSet =
new HashSet<Integer>();
for (int part_id : relevantPartitionIds)
{
relevantPartitionSet.add(part_id);
}
for (int originalHostId : originalHostIds) {
final File f = getSaveFileForPartitionedTable(filePath, fileNonce,
tableName,
originalHostId);
TableSaveFile savefile = getTableSaveFile(
f,
org.voltdb.VoltDB.instance().getLocalSites().size() * 4,
relevantPartitionSet.toArray(new Integer[relevantPartitionSet.size()]));
m_saveFiles.offer(savefile);
for (int part_id : savefile.getPartitionIds())
{
relevantPartitionSet.remove(part_id);
}
if (relevantPartitionSet.isEmpty())
{
break;
}
assert(m_saveFiles.peekLast().getCompleted());
}
}
private static synchronized boolean hasMoreChunks() throws IOException {
boolean hasMoreChunks = false;
while (!hasMoreChunks && m_saveFiles.peek() != null) {
TableSaveFile f = m_saveFiles.peek();
hasMoreChunks = f.hasMoreChunks();
if (!hasMoreChunks) {
try {
f.close();
} catch (IOException e) {
}
m_saveFiles.poll();
}
}
return hasMoreChunks;
}
private static synchronized BBContainer getNextChunk() throws IOException {
BBContainer c = null;
while (c == null && m_saveFiles.peek() != null) {
TableSaveFile f = m_saveFiles.peek();
c = f.getNextChunk();
if (c == null) {
f.close();
m_saveFiles.poll();
}
}
return c;
}
@Override
public void init(int numberOfPartitions, SiteProcedureConnection site,
Procedure catProc, BackendTarget eeType, HsqlBackend hsql, Cluster cluster)
{
super.init(numberOfPartitions, site, catProc, eeType, hsql, cluster);
site.registerPlanFragment(SysProcFragmentId.PF_restoreScan, this);
site.registerPlanFragment(SysProcFragmentId.PF_restoreScanResults,
this);
site.registerPlanFragment(SysProcFragmentId.
PF_restoreLoadReplicatedTable,
this);
site.registerPlanFragment(SysProcFragmentId.
PF_restoreLoadReplicatedTableResults,
this);
site.registerPlanFragment(SysProcFragmentId.
PF_restoreDistributeReplicatedTable,
this);
site.registerPlanFragment(SysProcFragmentId.
PF_restoreDistributePartitionedTable,
this);
site.registerPlanFragment(SysProcFragmentId.
PF_restoreDistributePartitionedTableResults,
this);
site.registerPlanFragment(SysProcFragmentId.
PF_restoreSendReplicatedTable,
this);
site.registerPlanFragment(SysProcFragmentId.
PF_restoreSendReplicatedTableResults,
this);
site.registerPlanFragment(SysProcFragmentId.
PF_restoreSendPartitionedTable,
this);
site.registerPlanFragment(SysProcFragmentId.
PF_restoreSendPartitionedTableResults,
this);
site.registerPlanFragment(SysProcFragmentId.
PF_restoreDigestScan,
this);
site.registerPlanFragment(SysProcFragmentId.
PF_restoreDigestScanResults,
this);
site.registerPlanFragment(SysProcFragmentId.
PF_restoreDistributeExportSequenceNumbers,
this);
site.registerPlanFragment(SysProcFragmentId.
PF_restoreDistributeExportSequenceNumbersResults,
this);
m_cluster = cluster;
m_siteId = site.getCorrespondingSiteId();
m_hostId = MailboxTracker.getHostForHSId(m_siteId);
// XXX HACK GIANT HACK given the current assumption that there is
// only one database per cluster, I'm asserting this and then
// skirting around the need to have the database name in order to get
// to the set of tables. --izzy
assert(m_cluster.getDatabases().size() == 1);
m_database = m_cluster.getDatabases().get("database");
}
@Override
public DependencyPair
executePlanFragment(HashMap<Integer, List<VoltTable>> dependencies, long fragmentId, ParameterSet params,
SystemProcedureExecutionContext context)
{
if (fragmentId == SysProcFragmentId.PF_restoreDistributeExportSequenceNumbers)
{
assert(params.toArray()[0] != null);
assert(params.toArray().length == 2);
assert(params.toArray()[0] instanceof byte[]);
VoltTable result = new VoltTable(new VoltTable.ColumnInfo("RESULT", VoltType.STRING));
long snapshotTxnId = ((Long)params.toArray()[1]).longValue();
// Choose the lowest site ID on this host to truncate export data
int host_id = context.getExecutionSite().getCorrespondingHostId();
Long lowest_hs_id =
VoltDB.instance().getCatalogContext().siteTracker.
getLowestLiveExecSiteIdForHost(host_id);
if (context.getExecutionSite().getSiteId() == lowest_hs_id)
{
ExportManager.instance().
truncateExportToTxnId(snapshotTxnId);
}
try {
ByteArrayInputStream bais = new ByteArrayInputStream((byte[])params.toArray()[0]);
ObjectInputStream ois = new ObjectInputStream(bais);
//Sequence numbers for every table and partition
Map<String, Map<Integer, Long>> exportSequenceNumbers =
(Map<String, Map<Integer, Long>>)ois.readObject();
Database db = context.getDatabase();
Integer myPartitionId = context.getExecutionSite().getCorrespondingPartitionId();
//Iterate the export tables
for (Table t : db.getTables()) {
if (!CatalogUtil.isTableExportOnly( db, t))
continue;
String signature = t.getSignature();
String name = t.getTypeName();
//Sequence numbers for this table for every partition
Map<Integer, Long> sequenceNumberPerPartition = exportSequenceNumbers.get(name);
if (sequenceNumberPerPartition == null) {
HOST_LOG.warn("Could not find export sequence number for table " + name +
". This warning is safe to ignore if you are loading a pre 1.3 snapshot" +
" which would not contain these sequence numbers (added in 1.3)." +
" If this is a post 1.3 snapshot then the restore has failed and export sequence " +
" are reset to 0");
continue;
}
Long sequenceNumber =
sequenceNumberPerPartition.get(myPartitionId);
if (sequenceNumber == null) {
HOST_LOG.warn("Could not find an export sequence number for table " + name +
" partition " + myPartitionId +
". This warning is safe to ignore if you are loading a pre 1.3 snapshot " +
" which would not contain these sequence numbers (added in 1.3)." +
" If this is a post 1.3 snapshot then the restore has failed and export sequence " +
" are reset to 0");
continue;
}
//Forward the sequence number to the EE
context.getExecutionEngine().exportAction(
false,
0,
sequenceNumber,
myPartitionId,
signature);
}
} catch (Exception e) {
e.printStackTrace();//l4j doesn't print the stack trace
HOST_LOG.error(e);
result.addRow("FAILURE");
}
return new DependencyPair(DEP_restoreDistributeExportSequenceNumbers, result);
}
else if (fragmentId == SysProcFragmentId.PF_restoreDistributeExportSequenceNumbersResults)
{
TRACE_LOG.trace("Aggregating digest scan state");
assert(dependencies.size() > 0);
List<VoltTable> dep = dependencies.get(DEP_restoreDistributeExportSequenceNumbers);
VoltTable result = new VoltTable(new VoltTable.ColumnInfo("RESULT", VoltType.STRING));
for (VoltTable table : dep)
{
while (table.advanceRow())
{
// the actually adds the active row... weird...
result.add(table);
}
}
return new DependencyPair(DEP_restoreDistributeExportSequenceNumbersResults, result);
} else if (fragmentId == SysProcFragmentId.PF_restoreDigestScan)
{
VoltTable result = new VoltTable(
new VoltTable.ColumnInfo("DIGEST", VoltType.STRING),
new VoltTable.ColumnInfo("RESULT", VoltType.STRING),
new VoltTable.ColumnInfo("ERR_MSG", VoltType.STRING));
// Choose the lowest site ID on this host to do the file scan
// All other sites should just return empty results tables.
int host_id = context.getExecutionSite().getCorrespondingHostId();
Long lowest_hs_id =
VoltDB.instance().getCatalogContext().siteTracker.
getLowestLiveExecSiteIdForHost(host_id);
if (context.getExecutionSite().getSiteId() == lowest_hs_id)
{
try {
// implicitly synchronized by the way restore operates.
// this scan must complete on every site and return results
// to the coordinator for aggregation before it will send out
// distribution fragments, so two sites on the same node
// can't be attempting to set and clear this HashSet simultaneously
TRACE_LOG.trace("Checking saved table digest state for restore of: "
+ m_filePath + ", " + m_fileNonce);
List<JSONObject> digests =
SnapshotUtil.retrieveDigests(m_filePath, m_fileNonce);
for (JSONObject obj : digests) {
result.addRow(obj.toString(), "SUCCESS", null);
}
} catch (Exception e) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
pw.flush();
e.printStackTrace();//l4j doesn't print stack traces
HOST_LOG.error(e);
result.addRow(null, "FAILURE", sw.toString());
return new DependencyPair(DEP_restoreDigestScan, result);
}
}
return new DependencyPair(DEP_restoreDigestScan, result);
}
else if (fragmentId == SysProcFragmentId.PF_restoreDigestScanResults)
{
TRACE_LOG.trace("Aggregating digest scan state");
assert(dependencies.size() > 0);
List<VoltTable> dep = dependencies.get(DEP_restoreDigestScan);
VoltTable result = new VoltTable(
new VoltTable.ColumnInfo("DIGEST", VoltType.STRING),
new VoltTable.ColumnInfo("RESULT", VoltType.STRING),
new VoltTable.ColumnInfo("ERR_MSG", VoltType.STRING));
for (VoltTable table : dep)
{
while (table.advanceRow())
{
// the actually adds the active row... weird...
result.add(table);
}
}
return new DependencyPair(DEP_restoreDigestScanResults, result);
} else if (fragmentId == SysProcFragmentId.PF_restoreScan)
{
assert(params.toArray()[0] != null);
assert(params.toArray()[1] != null);
String hostname = ConnectionUtil.getHostnameOrAddress();
VoltTable result = ClusterSaveFileState.constructEmptySaveFileStateVoltTable();
// Choose the lowest site ID on this host to do the file scan
// All other sites should just return empty results tables.
int host_id = context.getExecutionSite().getCorrespondingHostId();
Long lowest_hs_id =
VoltDB.instance().getCatalogContext().siteTracker.
getLowestLiveExecSiteIdForHost(host_id);
if (context.getExecutionSite().getSiteId() == lowest_hs_id)
{
// implicitly synchronized by the way restore operates.
// this scan must complete on every site and return results
// to the coordinator for aggregation before it will send out
// distribution fragments, so two sites on the same node
// can't be attempting to set and clear this HashSet simultaneously
m_initializedTableSaveFileNames.clear();
m_saveFiles.clear();//Tests will reused a VoltDB process that fails a restore
m_filePath = (String) params.toArray()[0];
m_fileNonce = (String) params.toArray()[1];
TRACE_LOG.trace("Checking saved table state for restore of: "
+ m_filePath + ", " + m_fileNonce);
File[] savefiles = SnapshotUtil.retrieveRelevantFiles(m_filePath, m_fileNonce);
if (savefiles == null) {
return new DependencyPair(DEP_restoreScan, result);
}
for (File file : savefiles)
{
TableSaveFile savefile = null;
try
{
savefile = getTableSaveFile(file, 1, null);
try {
if (!savefile.getCompleted()) {
continue;
}
String is_replicated = "FALSE";
if (savefile.isReplicated())
{
is_replicated = "TRUE";
}
int partitionIds[] = savefile.getPartitionIds();
for (int pid : partitionIds) {
result.addRow(m_hostId,
hostname,
savefile.getHostId(),
savefile.getHostname(),
savefile.getClusterName(),
savefile.getDatabaseName(),
savefile.getTableName(),
savefile.getTxnId(),
is_replicated,
pid,
savefile.getTotalPartitions());
}
} finally {
savefile.close();
}
}
catch (FileNotFoundException e)
{
// retrieveRelevantFiles should always generate a list
// of valid present files in m_filePath, so if we end up
// getting here, something has gone very weird.
e.printStackTrace();
}
catch (IOException e)
{
// For the time being I'm content to treat this as a
// missing file and let the coordinator complain if
// it discovers that it can't build a consistent
// database out of the files it sees available.
// Maybe just a log message? Later.
e.printStackTrace();
}
}
}
return new DependencyPair(DEP_restoreScan, result);
}
else if (fragmentId == SysProcFragmentId.PF_restoreScanResults)
{
TRACE_LOG.trace("Aggregating saved table state");
assert(dependencies.size() > 0);
List<VoltTable> dep = dependencies.get(DEP_restoreScan);
VoltTable result = ClusterSaveFileState.constructEmptySaveFileStateVoltTable();
for (VoltTable table : dep)
{
while (table.advanceRow())
{
// the actually adds the active row... weird...
result.add(table);
}
}
return new DependencyPair(DEP_restoreScanResults, result);
}
else if (fragmentId == SysProcFragmentId.PF_restoreLoadReplicatedTable)
{
m_haveDoneRestore = true;
assert(params.toArray()[0] != null);
assert(params.toArray()[1] != null);
String table_name = (String) params.toArray()[0];
int dependency_id = (Integer) params.toArray()[1];
TRACE_LOG.trace("Loading replicated table: " + table_name);
String result_str = "SUCCESS";
String error_msg = "";
TableSaveFile savefile = null;
/**
* For replicated tables this will do the slow thing and read the file
* once for each ExecutionSite. This could use optimization like
* is done with the partitioned tables.
*/
try
{
savefile =
getTableSaveFile(getSaveFileForReplicatedTable(table_name), 3, null);
assert(savefile.getCompleted());
}
catch (IOException e)
{
String hostname = ConnectionUtil.getHostnameOrAddress();
VoltTable result = constructResultsTable();
result.addRow(m_hostId, hostname, m_siteId, table_name, -1, "FAILURE",
"Unable to load table: " + table_name +
" error: " + e.getMessage());
return new DependencyPair(dependency_id, result);
}
try {
final Table new_catalog_table = getCatalogTable(table_name);
Boolean needsConversion = null;
while (savefile.hasMoreChunks())
{
VoltTable table = null;
final org.voltcore.utils.DBBPool.BBContainer c = savefile.getNextChunk();
if (c == null) {
continue;//Should be equivalent to break
}
if (needsConversion == null) {
VoltTable old_table =
PrivateVoltTableFactory.createVoltTableFromBuffer(c.b.duplicate(), true);
needsConversion = SavedTableConverter.needsConversion(old_table, new_catalog_table);
}
if (needsConversion.booleanValue()) {
VoltTable old_table =
PrivateVoltTableFactory.createVoltTableFromBuffer(c.b , true);
table = SavedTableConverter.convertTable(old_table,
new_catalog_table);
} else {
ByteBuffer copy = ByteBuffer.allocate(c.b.remaining());
copy.put(c.b);
copy.flip();
table = PrivateVoltTableFactory.createVoltTableFromBuffer(copy, true);
}
c.discard();
try
{
super.voltLoadTable(context.getCluster().getTypeName(),
context.getDatabase().getTypeName(),
table_name, table);
}
catch (VoltAbortException e)
{
result_str = "FAILURE";
error_msg = e.getMessage();
break;
}
}
} catch (IOException e) {
String hostname = ConnectionUtil.getHostnameOrAddress();
VoltTable result = constructResultsTable();
result.addRow(m_hostId, hostname, m_siteId, table_name, -1, "FAILURE",
"Unable to load table: " + table_name +
" error: " + e.getMessage());
return new DependencyPair(dependency_id, result);
} catch (VoltTypeException e) {
String hostname = ConnectionUtil.getHostnameOrAddress();
VoltTable result = constructResultsTable();
result.addRow(m_hostId, hostname, m_siteId, table_name, -1, "FAILURE",
"Unable to load table: " + table_name +
" error: " + e.getMessage());
return new DependencyPair(dependency_id, result);
}
String hostname = ConnectionUtil.getHostnameOrAddress();
VoltTable result = constructResultsTable();
result.addRow(m_hostId, hostname, m_siteId, table_name, -1, result_str,
error_msg);
try {
savefile.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return new DependencyPair( dependency_id, result);
}
else if (fragmentId ==
SysProcFragmentId.PF_restoreDistributeReplicatedTable)
{
m_haveDoneRestore = true;
// XXX I tested this with a hack that cannot be replicated
// in a unit test since it requires hacks to this sysproc that
// effectively break it
assert(params.toArray()[0] != null);
assert(params.toArray()[1] != null);
assert(params.toArray()[2] != null);
assert(params.toArray()[3] != null);
String table_name = (String) params.toArray()[0];
int site_id = (Integer) params.toArray()[1];
int dependency_id = (Integer) params.toArray()[2];
TRACE_LOG.trace("Distributing replicated table: " + table_name +
" to: " + site_id);
VoltTable result = performDistributeReplicatedTable(table_name, site_id);
return new DependencyPair(dependency_id, result);
}
else if (fragmentId ==
SysProcFragmentId.PF_restoreSendReplicatedTable)
{
m_haveDoneRestore = true;
assert(params.toArray()[0] != null);
assert(params.toArray()[1] != null);
assert(params.toArray()[2] != null);
assert(params.toArray()[3] != null);
String table_name = (String) params.toArray()[0];
int dependency_id = (Integer) params.toArray()[1];
byte compressedTable[] = (byte[]) params.toArray()[2];
TRACE_LOG.trace("Received replicated table: " + table_name);
String result_str = "SUCCESS";
String error_msg = "";
try
{
VoltTable table =
PrivateVoltTableFactory.createVoltTableFromBuffer(
ByteBuffer.wrap(
CompressionService.decompressBytes(compressedTable)),
true);
super.voltLoadTable(context.getCluster().getTypeName(),
context.getDatabase().getTypeName(),
table_name, table);
}
catch (Exception e)
{
result_str = "FAILURE";
error_msg = e.getMessage();
}
VoltTable result = constructResultsTable();
//Use null hostname to avoid DNS lookup that might be slow
//if DNS caching is not enabled
result.addRow(m_hostId, null, m_siteId, table_name, -1,
result_str, error_msg);
return new DependencyPair(dependency_id, result);
}
else if (fragmentId ==
SysProcFragmentId.PF_restoreSendReplicatedTableResults)
{
m_haveDoneRestore = true;
assert(params.toArray()[0] != null);
int dependency_id = (Integer) params.toArray()[0];
TRACE_LOG.trace("Received confirmmation of successful replicated table load");
VoltTable result = constructResultsTable();
for (int dep_id : dependencies.keySet())
{
List<VoltTable> table_list = dependencies.get(dep_id);
assert(table_list.size() == 1);
VoltTable t = table_list.get(0);
while (t.advanceRow())
{
// this will actually add the active row of t
result.add(t);
}
}
return new DependencyPair(dependency_id, result);
}
else if (fragmentId ==
SysProcFragmentId.PF_restoreLoadReplicatedTableResults)
{
m_haveDoneRestore = true;
TRACE_LOG.trace("Aggregating replicated table restore results");
assert(params.toArray()[0] != null);
int dependency_id = (Integer) params.toArray()[0];
assert(dependencies.size() > 0);
VoltTable result = constructResultsTable();
for (int dep_id : dependencies.keySet())
{
List<VoltTable> table_list = dependencies.get(dep_id);
assert(table_list.size() == 1);
VoltTable t = table_list.get(0);
while (t.advanceRow())
{
// this will actually add the active row of t
result.add(t);
}
}
return new DependencyPair(dependency_id, result);
}
else if (fragmentId ==
SysProcFragmentId.PF_restoreDistributePartitionedTable)
{
m_haveDoneRestore = true;
Object paramsA[] = params.toArray();
assert(paramsA[0] != null);
assert(paramsA[1] != null);
assert(paramsA[2] != null);
assert(paramsA[3] != null);
String table_name = (String) paramsA[0];
int originalHosts[] = (int[]) paramsA[1];
int relevantPartitions[] = (int[]) paramsA[2];
int dependency_id = (Integer) paramsA[3];
for (int partition_id : relevantPartitions) {
TRACE_LOG.trace("Distributing partitioned table: " + table_name +
" partition id: " + partition_id);
}
VoltTable result =
performDistributePartitionedTable(table_name, originalHosts,
relevantPartitions);
return new DependencyPair( dependency_id, result);
}
else if (fragmentId ==
SysProcFragmentId.PF_restoreDistributePartitionedTableResults)
{
m_haveDoneRestore = true;
TRACE_LOG.trace("Aggregating partitioned table restore results");
assert(params.toArray()[0] != null);
int dependency_id = (Integer) params.toArray()[0];
VoltTable result = constructResultsTable();
for (int dep_id : dependencies.keySet())
{
List<VoltTable> table_list = dependencies.get(dep_id);
assert(table_list.size() == 1);
VoltTable t = table_list.get(0);
while (t.advanceRow())
{
// this will actually add the active row of t
result.add(t);
}
}
return new DependencyPair(dependency_id, result);
}
else if (fragmentId ==
SysProcFragmentId.PF_restoreSendPartitionedTable)
{
m_haveDoneRestore = true;
assert(params.toArray()[0] != null);
assert(params.toArray()[1] != null);
assert(params.toArray()[2] != null);
assert(params.toArray()[3] != null);
String table_name = (String) params.toArray()[0];
int partition_id = (Integer) params.toArray()[1];
int dependency_id = (Integer) params.toArray()[2];
byte compressedTable[] = (byte[]) params.toArray()[3];
TRACE_LOG.trace("Received partitioned table: " + table_name);
String result_str = "SUCCESS";
String error_msg = "";
try
{
VoltTable table =
PrivateVoltTableFactory.createVoltTableFromBuffer(
ByteBuffer.wrap(
CompressionService.decompressBytes(compressedTable)),
true);
super.voltLoadTable(context.getCluster().getTypeName(),
context.getDatabase().getTypeName(),
table_name, table);
}
catch (Exception e)
{
result_str = "FAILURE";
error_msg = e.getMessage();
}
VoltTable result = constructResultsTable();
//Use null hostname to avoid DNS lookup that might be slow
//if DNS caching is not enabled
result.addRow(m_hostId, null, m_siteId, table_name, partition_id,
result_str, error_msg);
return new DependencyPair(dependency_id, result);
}
else if (fragmentId ==
SysProcFragmentId.PF_restoreSendPartitionedTableResults)
{
m_haveDoneRestore = true;
assert(params.toArray()[0] != null);
int dependency_id = (Integer) params.toArray()[0];
TRACE_LOG.trace("Received confirmation of successful partitioned table load");
VoltTable result = constructResultsTable();
for (int dep_id : dependencies.keySet())
{
List<VoltTable> table_list = dependencies.get(dep_id);
assert(table_list.size() == 1);
VoltTable t = table_list.get(0);
while (t.advanceRow())
{
// this will actually add the active row of t
result.add(t);
}
}
return new DependencyPair(dependency_id, result);
}
assert (false);
return null;
}
// private final VoltSampler m_sampler = new VoltSampler(10, "sample" + String.valueOf(new Random().nextInt() % 10000) + ".txt");
public VoltTable[] run(SystemProcedureExecutionContext ctx,
String path, String nonce) throws VoltAbortException
{
if (m_haveDoneRestore) {
throw new VoltAbortException("Cluster has already been restored or has failed a restore." +
" Restart the cluster before doing another restore.");
}
final long startTime = System.currentTimeMillis();
HOST_LOG.info("Restoring from path: " + path + " with nonce: " + nonce);
// Fetch all the savefile metadata from the cluster
VoltTable[] savefile_data;
savefile_data = performRestoreScanWork(path, nonce);
List<JSONObject> digests;
Map<String, Map<Integer, Long>> exportSequenceNumbers;
try {
Pair<List<JSONObject>, Map<String, Map<Integer, Long>>> digestScanResult =
performRestoreDigestScanWork();
digests = digestScanResult.getFirst();
exportSequenceNumbers = digestScanResult.getSecond();
} catch (VoltAbortException e) {
ColumnInfo[] result_columns = new ColumnInfo[2];
int ii = 0;
result_columns[ii++] = new ColumnInfo("RESULT", VoltType.STRING);
result_columns[ii++] = new ColumnInfo("ERR_MSG", VoltType.STRING);
VoltTable results[] = new VoltTable[] { new VoltTable(result_columns) };
results[0].addRow("FAILURE", e.toString());
return results;
}
ClusterSaveFileState savefile_state = null;
try
{
savefile_state = new ClusterSaveFileState(savefile_data[0]);
}
catch (IOException e)
{
throw new VoltAbortException(e.getMessage());
}
HashSet<String> relevantTableNames = new HashSet<String>();
try {
if (digests.isEmpty()) {
throw new Exception("No digests found");
}
for (JSONObject obj : digests) {
JSONArray tables = obj.getJSONArray("tables");
for (int ii = 0; ii < tables.length(); ii++) {
relevantTableNames.add(tables.getString(ii));
}
}
} catch (Exception e) {
ColumnInfo[] result_columns = new ColumnInfo[2];
int ii = 0;
result_columns[ii++] = new ColumnInfo("RESULT", VoltType.STRING);
result_columns[ii++] = new ColumnInfo("ERR_MSG", VoltType.STRING);
VoltTable results[] = new VoltTable[] { new VoltTable(result_columns) };
results[0].addRow("FAILURE", e.toString());
return results;
}
assert(relevantTableNames != null);
assert(relevantTableNames.size() > 0);
// ENG-1078: I think this giant for/if block is only good for
// checking if there are no files for a table listed in the digest.
// There appear to be redundant checks for that, and then the per-table
// consistency check is preempted by the ClusterSaveFileState constructor
// called above.
VoltTable[] results = null;
for (String tableName : relevantTableNames) {
if (!savefile_state.getSavedTableNames().contains(tableName)) {
if (results == null) {
ColumnInfo[] result_columns = new ColumnInfo[2];
int ii = 0;
result_columns[ii++] = new ColumnInfo("RESULT", VoltType.STRING);
result_columns[ii++] = new ColumnInfo("ERR_MSG", VoltType.STRING);
results = new VoltTable[] { new VoltTable(result_columns) };
}
results[0].addRow("FAILURE", "Save data contains no information for table " + tableName);
break;
}
final TableSaveFileState saveFileState = savefile_state.getTableState(tableName);
if (saveFileState == null)
{
// Pretty sure this is unreachable
// See ENG-1078
if (results == null) {
ColumnInfo[] result_columns = new ColumnInfo[2];
int ii = 0;
result_columns[ii++] = new ColumnInfo("RESULT", VoltType.STRING);
result_columns[ii++] = new ColumnInfo("ERR_MSG", VoltType.STRING);
results = new VoltTable[] { new VoltTable(result_columns) };
}
results[0].addRow( "FAILURE", "Save data contains no information for table " + tableName);
}
else if (!saveFileState.isConsistent())
{
// Also pretty sure this is unreachable
// See ENG-1078
if (results == null) {
ColumnInfo[] result_columns = new ColumnInfo[2];
int ii = 0;
result_columns[ii++] = new ColumnInfo("RESULT", VoltType.STRING);
result_columns[ii++] = new ColumnInfo("ERR_MSG", VoltType.STRING);
results = new VoltTable[] { new VoltTable(result_columns) };
}
results[0].addRow( "FAILURE", saveFileState.getConsistencyResult());
}
}
if (results != null) {
return results;
}
/*
* Serialize all the export sequence nubmers and then distribute them in a
* plan fragment and each receiver will pull the relevant information for
* itself
*/
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
oos.writeObject(exportSequenceNumbers);
oos.flush();
byte exportSequenceNumberBytes[] = baos.toByteArray();
oos.close();
results =
performDistributeExportSequenceNumbers(
exportSequenceNumberBytes,
digests.get(0).getLong("txnId"));
} catch (IOException e) {
throw new VoltAbortException(e);
} catch (JSONException e) {
throw new VoltAbortException(e);
}
while (results[0].advanceRow()) {
if (results[0].getString("RESULT").equals("FAILURE")) {
throw new VoltAbortException("Error distributing export sequence numbers");
}
}
results = performTableRestoreWork(savefile_state);
final long endTime = System.currentTimeMillis();
final double duration = (endTime - startTime) / 1000.0;
final StringWriter sw = new StringWriter();
final PrintWriter pw = new PrintWriter(sw);
pw.toString();
pw.printf("%.2f", duration);
HOST_LOG.info("Finished restore of " + path + " with nonce: "
+ nonce + " in " + sw.toString() + " seconds");
// m_sampler.setShouldStop();
// try {
// m_sampler.join();
// } catch (InterruptedException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
return results;
}
private VoltTable[] performDistributeExportSequenceNumbers(
byte[] exportSequenceNumberBytes,
long txnId) {
SynthesizedPlanFragment[] pfs = new SynthesizedPlanFragment[2];
// This fragment causes each execution site to confirm the likely
// success of writing tables to disk
pfs[0] = new SynthesizedPlanFragment();
pfs[0].fragmentId = SysProcFragmentId.PF_restoreDistributeExportSequenceNumbers;
pfs[0].outputDepId = DEP_restoreDistributeExportSequenceNumbers;
pfs[0].inputDepIds = new int[] {};
pfs[0].multipartition = true;
ParameterSet params = new ParameterSet();
params.setParameters(exportSequenceNumberBytes, txnId);
pfs[0].parameters = params;
// This fragment aggregates the save-to-disk sanity check results
pfs[1] = new SynthesizedPlanFragment();
pfs[1].fragmentId = SysProcFragmentId.PF_restoreDistributeExportSequenceNumbersResults;
pfs[1].outputDepId = DEP_restoreDistributeExportSequenceNumbersResults;
pfs[1].inputDepIds = new int[] { DEP_restoreDistributeExportSequenceNumbers };
pfs[1].multipartition = false;
pfs[1].parameters = new ParameterSet();
VoltTable[] results;
results = executeSysProcPlanFragments(pfs, DEP_restoreDistributeExportSequenceNumbersResults);
return results;
}
private VoltTable constructResultsTable()
{
ColumnInfo[] result_columns = new ColumnInfo[7];
int ii = 0;
result_columns[ii++] = new ColumnInfo(CNAME_HOST_ID, CTYPE_ID);
result_columns[ii++] = new ColumnInfo("HOSTNAME", VoltType.STRING);
result_columns[ii++] = new ColumnInfo(CNAME_SITE_ID, CTYPE_ID);
result_columns[ii++] = new ColumnInfo("TABLE", VoltType.STRING);
result_columns[ii++] = new ColumnInfo(CNAME_PARTITION_ID, CTYPE_ID);
result_columns[ii++] = new ColumnInfo("RESULT", VoltType.STRING);
result_columns[ii++] = new ColumnInfo("ERR_MSG", VoltType.STRING);
return new VoltTable(result_columns);
}
private File getSaveFileForReplicatedTable(String tableName)
{
StringBuilder filename_builder = new StringBuilder(m_fileNonce);
filename_builder.append("-");
filename_builder.append(tableName);
filename_builder.append(".vpt");
return new VoltFile(m_filePath, new String(filename_builder));
}
private static File getSaveFileForPartitionedTable(
String filePath,
String fileNonce,
String tableName,
int originalHostId)
{
StringBuilder filename_builder = new StringBuilder(fileNonce);
filename_builder.append("-");
filename_builder.append(tableName);
filename_builder.append("-host_");
filename_builder.append(originalHostId);
filename_builder.append(".vpt");
return new VoltFile(filePath, new String(filename_builder));
}
private static TableSaveFile getTableSaveFile(
File saveFile,
int readAheadChunks,
Integer relevantPartitionIds[]) throws IOException
{
FileInputStream savefile_input = new FileInputStream(saveFile);
TableSaveFile savefile =
new TableSaveFile(
savefile_input.getChannel(),
readAheadChunks,
relevantPartitionIds);
return savefile;
}
private final VoltTable[] performRestoreScanWork(String filePath,
String fileNonce)
{
SynthesizedPlanFragment[] pfs = new SynthesizedPlanFragment[2];
// This fragment causes each execution site to confirm the likely
// success of writing tables to disk
pfs[0] = new SynthesizedPlanFragment();
pfs[0].fragmentId = SysProcFragmentId.PF_restoreScan;
pfs[0].outputDepId = DEP_restoreScan;
pfs[0].inputDepIds = new int[] {};
pfs[0].multipartition = true;
ParameterSet params = new ParameterSet();
params.setParameters(filePath, fileNonce);
pfs[0].parameters = params;
// This fragment aggregates the save-to-disk sanity check results
pfs[1] = new SynthesizedPlanFragment();
pfs[1].fragmentId = SysProcFragmentId.PF_restoreScanResults;
pfs[1].outputDepId = DEP_restoreScanResults;
pfs[1].inputDepIds = new int[] { DEP_restoreScan };
pfs[1].multipartition = false;
pfs[1].parameters = new ParameterSet();
VoltTable[] results;
results = executeSysProcPlanFragments(pfs, DEP_restoreScanResults);
return results;
}
private final Pair<List<JSONObject>, Map<String, Map<Integer, Long>>> performRestoreDigestScanWork()
{
SynthesizedPlanFragment[] pfs = new SynthesizedPlanFragment[2];
// This fragment causes each execution site to confirm the likely
// success of writing tables to disk
pfs[0] = new SynthesizedPlanFragment();
pfs[0].fragmentId = SysProcFragmentId.PF_restoreDigestScan;
pfs[0].outputDepId = DEP_restoreDigestScan;
pfs[0].inputDepIds = new int[] {};
pfs[0].multipartition = true;
pfs[0].parameters = new ParameterSet();
// This fragment aggregates the save-to-disk sanity check results
pfs[1] = new SynthesizedPlanFragment();
pfs[1].fragmentId = SysProcFragmentId.PF_restoreDigestScanResults;
pfs[1].outputDepId = DEP_restoreDigestScanResults;
pfs[1].inputDepIds = new int[] { DEP_restoreDigestScan };
pfs[1].multipartition = false;
pfs[1].parameters = new ParameterSet();
VoltTable[] results;
results = executeSysProcPlanFragments(pfs, DEP_restoreDigestScanResults);
HashMap<String, Map<Integer, Long>> exportSequenceNumbers =
new HashMap<String, Map<Integer, Long>>();
Long digestTxnId = null;
ArrayList<JSONObject> digests = new ArrayList<JSONObject>();
/*
* Retrieve and aggregate the per table per partition sequence numbers from
* all the digest files retrieved across the cluster
*/
try {
while (results[0].advanceRow()) {
if (results[0].getString("RESULT").equals("FAILURE")) {
throw new VoltAbortException(results[0].getString("ERR_MSG"));
}
JSONObject digest = new JSONObject(results[0].getString(0));
digests.add(digest);
/*
* Validate that the digests are all from the same snapshot
*/
if (digestTxnId == null) {
digestTxnId = digest.getLong("txnId");
} else {
if (digest.getLong("txnId") != digestTxnId) {
throw new VoltAbortException("Retrieved a digest with txnId " + digest.getLong("txnId") +
" that doesn't match the txnId seen previously " + digestTxnId + " inspect the digests" +
" with the provided nonce and ensure that they are all really from the same snapshot");
}
}
/*
* Snapshots from pre 1.3 VoltDB won't have sequence numbers
* Doing nothing will default it to zero.
*/
if (digest.has("exportSequenceNumbers")) {
/*
* An array of entries for each table
*/
JSONArray sequenceNumbers = digest.getJSONArray("exportSequenceNumbers");
for (int ii = 0; ii < sequenceNumbers.length(); ii++) {
/*
* An object containing all the sequence numbers for its partitions
* in this table. This will be a subset since it is from a single digest
*/
JSONObject tableSequenceNumbers = sequenceNumbers.getJSONObject(ii);
String tableName = tableSequenceNumbers.getString("exportTableName");
Map<Integer,Long> partitionSequenceNumbers =
exportSequenceNumbers.get(tableName);
if (partitionSequenceNumbers == null) {
partitionSequenceNumbers = new HashMap<Integer,Long>();
exportSequenceNumbers.put(tableName, partitionSequenceNumbers);
}
/*
* Array of objects containing partition and sequence number pairs
*/
JSONArray sourcePartitionSequenceNumbers =
tableSequenceNumbers.getJSONArray("sequenceNumberPerPartition");
for (int zz = 0; zz < sourcePartitionSequenceNumbers.length(); zz++) {
int partition = sourcePartitionSequenceNumbers.getJSONObject(zz).getInt("partition");
long sequenceNumber =
sourcePartitionSequenceNumbers.getJSONObject(zz).getInt("exportSequenceNumber");
partitionSequenceNumbers.put(partition, sequenceNumber);
}
}
}
}
} catch (JSONException e) {
throw new VoltAbortException(e);
}
return Pair.of((List<JSONObject>)digests, (Map<String, Map<Integer, Long>>)exportSequenceNumbers);
}
private Set<Table> getTablesToRestore(Set<String> savedTableNames)
{
Set<Table> tables_to_restore = new HashSet<Table>();
for (Table table : m_database.getTables())
{
if (savedTableNames.contains(table.getTypeName()))
{
if (table.getMaterializer() == null)
{
tables_to_restore.add(table);
}
else
{
// LOG_TRIAGE reconsider info level here?
HOST_LOG.info("Table: " + table.getTypeName() + " was saved " +
"but is now a materialized table and will " +
"not be loaded from disk");
}
}
else
{
if (table.getMaterializer() == null && !CatalogUtil.isTableExportOnly(m_database, table))
{
HOST_LOG.info("Table: " + table.getTypeName() + " does not have " +
"any savefile data and so will not be loaded " +
"from disk");
}
}
}
// XXX consider logging the list of tables that were saved but not
// in the current catalog
return tables_to_restore;
}
private VoltTable[]
performTableRestoreWork(ClusterSaveFileState savefileState) throws VoltAbortException
{
Set<Table> tables_to_restore =
getTablesToRestore(savefileState.getSavedTableNames());
VoltTable[] restore_results = new VoltTable[1];
restore_results[0] = constructResultsTable();
ArrayList<SynthesizedPlanFragment[]> restorePlans =
new ArrayList<SynthesizedPlanFragment[]>();
for (Table t : tables_to_restore) {
TableSaveFileState table_state =
savefileState.getTableState(t.getTypeName());
SynthesizedPlanFragment[] restore_plan =
table_state.generateRestorePlan(t);
if (restore_plan == null) {
HOST_LOG.error(
"Unable to generate restore plan for " + t.getTypeName() + " table not restored");
throw new VoltAbortException(
"Unable to generate restore plan for " + t.getTypeName() + " table not restored");
}
restorePlans.add(restore_plan);
}
Iterator<Table> tableIterator = tables_to_restore.iterator();
for (SynthesizedPlanFragment[] restore_plan : restorePlans)
{
Table table = tableIterator.next();
TableSaveFileState table_state =
savefileState.getTableState(table.getTypeName());
TRACE_LOG.trace("Performing restore for table: " + table.getTypeName());
TRACE_LOG.trace("Plan has fragments: " + restore_plan.length);
VoltTable[] results =
executeSysProcPlanFragments(restore_plan,
table_state.getRootDependencyId());
while (results[0].advanceRow())
{
// this will actually add the active row of results[0]
restore_results[0].add(results[0]);
}
}
return restore_results;
}
// XXX I hacked up a horrible one-off in my world to test this code.
// I believe that it will work for at least one new node, but
// there's not a good way to add a unit test for this at the moment,
// so the emma coverage is weak.
private VoltTable performDistributeReplicatedTable(String tableName,
int siteId)
{
String hostname = ConnectionUtil.getHostnameOrAddress();
TableSaveFile savefile = null;
try
{
savefile =
getTableSaveFile(getSaveFileForReplicatedTable(tableName), 3, null);
assert(savefile.getCompleted());
}
catch (IOException e)
{
VoltTable result = constructResultsTable();
result.addRow(m_hostId, hostname, m_siteId, tableName, -1, "FAILURE",
"Unable to load table: " + tableName +
" error: " + e.getMessage());
return result;
}
VoltTable[] results = new VoltTable[] { constructResultsTable() };
results[0].addRow(m_hostId, hostname, m_siteId, tableName, -1,
"SUCCESS", "NO DATA TO DISTRIBUTE");
final Table new_catalog_table = getCatalogTable(tableName);
Boolean needsConversion = null;
try {
while (savefile.hasMoreChunks())
{
final org.voltcore.utils.DBBPool.BBContainer c = savefile.getNextChunk();
if (c == null) {
continue;//Should be equivalent to break
}
if (needsConversion == null) {
VoltTable old_table =
PrivateVoltTableFactory.createVoltTableFromBuffer(c.b.duplicate(), true);
needsConversion = SavedTableConverter.needsConversion(old_table, new_catalog_table);
}
byte compressedTable[];
if (needsConversion.booleanValue()) {
VoltTable old_table =
PrivateVoltTableFactory.createVoltTableFromBuffer(c.b , true);
VoltTable new_table = SavedTableConverter.convertTable(old_table,
new_catalog_table);
compressedTable = new_table.getCompressedBytes();
} else {
compressedTable = CompressionService.compressBuffer(c.b);
}
c.discard();
SynthesizedPlanFragment[] pfs = new SynthesizedPlanFragment[2];
int result_dependency_id = TableSaveFileState.getNextDependencyId();
pfs[0] = new SynthesizedPlanFragment();
pfs[0].fragmentId = SysProcFragmentId.PF_restoreSendReplicatedTable;
pfs[0].siteId = siteId;
pfs[0].outputDepId = result_dependency_id;
pfs[0].inputDepIds = new int[] {};
pfs[0].multipartition = false;
ParameterSet params = new ParameterSet();
params.setParameters(tableName, result_dependency_id, compressedTable);
pfs[0].parameters = params;
int final_dependency_id = TableSaveFileState.getNextDependencyId();
pfs[1] = new SynthesizedPlanFragment();
pfs[1].fragmentId =
SysProcFragmentId.PF_restoreSendReplicatedTableResults;
pfs[1].outputDepId = final_dependency_id;
pfs[1].inputDepIds = new int[] { result_dependency_id };
pfs[1].multipartition = false;
ParameterSet result_params = new ParameterSet();
result_params.setParameters(final_dependency_id);
pfs[1].parameters = result_params;
TRACE_LOG.trace("Sending replicated table: " + tableName + " to site id:" +
siteId);
results =
executeSysProcPlanFragments(pfs, final_dependency_id);
}
} catch (IOException e) {
VoltTable result = PrivateVoltTableFactory.createUninitializedVoltTable();
result = constructResultsTable();
result.addRow(m_hostId, hostname, m_siteId, tableName, -1, "FAILURE",
"Unable to load table: " + tableName +
" error: " + e.getMessage());
return result;
} catch (VoltTypeException e) {
VoltTable result = PrivateVoltTableFactory.createUninitializedVoltTable();
result = constructResultsTable();
result.addRow(m_hostId, hostname, m_siteId, tableName, -1, "FAILURE",
"Unable to load table: " + tableName +
" error: " + e.getMessage());
return result;
}
return results[0];
}
private VoltTable performDistributePartitionedTable(String tableName,
int originalHostIds[],
int relevantPartitionIds[])
{
String hostname = ConnectionUtil.getHostnameOrAddress();
// XXX This is all very similar to the splitting code in
// LoadMultipartitionTable. Consider ways to consolidate later
Map<Long, Integer> sites_to_partitions =
new HashMap<Long, Integer>();
for (long site : VoltDB.instance().getCatalogContext().siteTracker.getAllLiveSites())
{
Integer partitionId = VoltDB.instance().getCatalogContext().siteTracker.getMailboxTracker().getPartitionForSite(site);
sites_to_partitions.put(site, partitionId);
}
try
{
initializeTableSaveFiles(
m_filePath,
m_fileNonce,
tableName,
originalHostIds,
relevantPartitionIds);
}
catch (IOException e)
{
VoltTable result = constructResultsTable();
result.addRow(m_hostId, hostname, m_siteId, tableName, relevantPartitionIds[0], "FAILURE",
"Unable to load table: " + tableName +
" error: " + e.getMessage());
return result;
}
VoltTable[] results = new VoltTable[] { constructResultsTable() };
results[0].addRow(m_hostId, hostname, m_siteId, tableName, 0,
"SUCCESS", "NO DATA TO DISTRIBUTE");
final Table new_catalog_table = getCatalogTable(tableName);
Boolean needsConversion = null;
org.voltcore.utils.DBBPool.BBContainer c = null;
try {
while (hasMoreChunks())
{
VoltTable table = null;
c = null;
c = getNextChunk();
if (c == null) {
continue;//Should be equivalent to break
}
if (needsConversion == null) {
VoltTable old_table = PrivateVoltTableFactory.createVoltTableFromBuffer(c.b.duplicate(), true);
needsConversion = SavedTableConverter.needsConversion(old_table, new_catalog_table);
}
final VoltTable old_table = PrivateVoltTableFactory.createVoltTableFromBuffer(c.b, true);
if (needsConversion) {
table = SavedTableConverter.convertTable(old_table,
new_catalog_table);
} else {
table = old_table;
}
byte[][] partitioned_tables =
createPartitionedTables(tableName, table);
if (c != null) {
c.discard();
}
int[] dependencyIds = new int[sites_to_partitions.size()];
SynthesizedPlanFragment[] pfs =
new SynthesizedPlanFragment[sites_to_partitions.size() + 1];
int pfs_index = 0;
for (long site_id : sites_to_partitions.keySet())
{
int partition_id = sites_to_partitions.get(site_id);
dependencyIds[pfs_index] =
TableSaveFileState.getNextDependencyId();
pfs[pfs_index] = new SynthesizedPlanFragment();
pfs[pfs_index].fragmentId =
SysProcFragmentId.PF_restoreSendPartitionedTable;
pfs[pfs_index].siteId = site_id;
pfs[pfs_index].multipartition = false;
pfs[pfs_index].outputDepId = dependencyIds[pfs_index];
pfs[pfs_index].inputDepIds = new int [] {};
ParameterSet params = new ParameterSet();
params.setParameters(tableName, partition_id,
dependencyIds[pfs_index],
partitioned_tables[partition_id]);
pfs[pfs_index].parameters = params;
++pfs_index;
}
int result_dependency_id = TableSaveFileState.getNextDependencyId();
pfs[sites_to_partitions.size()] = new SynthesizedPlanFragment();
pfs[sites_to_partitions.size()].fragmentId =
SysProcFragmentId.PF_restoreSendPartitionedTableResults;
pfs[sites_to_partitions.size()].multipartition = false;
pfs[sites_to_partitions.size()].outputDepId = result_dependency_id;
pfs[sites_to_partitions.size()].inputDepIds = dependencyIds;
ParameterSet params = new ParameterSet();
params.setParameters(result_dependency_id);
pfs[sites_to_partitions.size()].parameters = params;
results =
executeSysProcPlanFragments(pfs, result_dependency_id);
}
} catch (Exception e) {
VoltTable result = PrivateVoltTableFactory.createUninitializedVoltTable();
result = constructResultsTable();
result.addRow(m_hostId, hostname, m_siteId, tableName, relevantPartitionIds[0],
"FAILURE", "Unable to load table: " + tableName +
" error: " + e.getMessage());
return result;
}
return results[0];
}
private byte[][] createPartitionedTables(String tableName,
VoltTable loadedTable) throws Exception
{
int number_of_partitions = m_cluster.getPartitions().size();
Table catalog_table = m_database.getTables().getIgnoreCase(tableName);
assert(!catalog_table.getIsreplicated());
// XXX blatantly stolen from LoadMultipartitionTable
// find the index and type of the partitioning attribute
int partition_col = catalog_table.getPartitioncolumn().getIndex();
VoltType partition_type =
VoltType.get((byte) catalog_table.getPartitioncolumn().getType());
// create a table for each partition
VoltTable[] partitioned_tables = new VoltTable[number_of_partitions];
for (int i = 0; i < partitioned_tables.length; i++) {
partitioned_tables[i] =
loadedTable.clone(loadedTable.getUnderlyingBufferSize() /
number_of_partitions);
}
// split the input table into per-partition units
while (loadedTable.advanceRow())
{
int partition = 0;
try
{
partition =
TheHashinator.hashToPartition(loadedTable.get(partition_col,
partition_type));
}
catch (Exception e)
{
e.printStackTrace();
throw new RuntimeException(e.getMessage());
}
// this adds the active row of loadedTable
partitioned_tables[partition].add(loadedTable);
}
/*
* Get all hands on deck for compression, do it async to minimize latency
*/
ArrayList<Future<byte[]>> compressTableTasks = new ArrayList<Future<byte[]>>();
for (int ii = 0; ii < number_of_partitions; ii++) {
compressTableTasks.add(partitioned_tables[ii].getCompressedBytesAsync());
}
byte compressedTables[][] = new byte[number_of_partitions][];
for (int ii = 0; ii < compressedTables.length; ii++) {
compressedTables[ii] = compressTableTasks.get(ii).get();
}
return compressedTables;
}
private Table getCatalogTable(String tableName)
{
return m_database.getTables().get(tableName);
}
private Cluster m_cluster;
private Database m_database;
private long m_siteId;
private int m_hostId;
private static volatile String m_filePath;
private static volatile String m_fileNonce;
}
|
package org.ovirt.engine.core.engineencryptutils;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.security.Key;
import java.security.KeyStore;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.cert.Certificate;
import java.util.HashMap;
import java.util.Vector;
public class StoreUtils {
private static class CLIParse {
private Vector argv = new Vector();
private HashMap argsMap = new HashMap();
private int argvIndex = 0;
public CLIParse(String[] args) {
for (int i = 0; i < args.length; i++) {
if (args[i].startsWith("-")) {
int ix = args[i].indexOf("=");
String key = (ix > 0) ? args[i].substring(1, ix) : args[i].substring(1);
String value = (ix > 0) ? args[i].substring(ix + 1) : "";
argsMap.put(key.toLowerCase(), value);
} else {
argv.addElement(args[i]);
}
}
}
public boolean hasArg(String arg) {
return argsMap.containsKey(arg.toLowerCase());
}
public String getArg(String arg) {
return (String) argsMap.get(arg.toLowerCase());
}
public String nextParam() {
String strReturn = null;
if (argvIndex < argv.size()) {
strReturn = (String) argv.elementAt(argvIndex++);
}
return strReturn;
}
}
private static byte[] pvk(String keystore, String password, String alias) {
byte[] bReturn = null;
FileInputStream input = null;
try {
KeyStore ks = KeyStore.getInstance("jks");
input = new FileInputStream(keystore);
ks.load(input, password.toCharArray());
Key key = ks.getKey(alias, password.toCharArray());
bReturn = key.getEncoded();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (input != null) {
try {
input.close();
} catch (IOException e) {
//ignore
}
}
}
return bReturn;
}
private static String pubkey2ssh(String keystore, String password, String alias) {
String bReturn = null;
FileInputStream input = null;
try {
// Load the key store:
KeyStore ks = KeyStore.getInstance("jks");
input = new FileInputStream(keystore);
ks.load(input, password.toCharArray());
// Find the public key:
Key key = ks.getKey(alias, password.toCharArray());
if (key instanceof PrivateKey) {
Certificate cert = ks.getCertificate(alias);
key = cert.getPublicKey();
}
// Generate and return the SSH key string:
return OpenSSHUtils.getKeyString((PublicKey) key, alias);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (input != null) {
try {
input.close();
} catch (IOException e) {
//ignore
}
}
}
return bReturn;
}
private static void printUsage() {
System.out.println("Usage:");
System.out.println("StoreUtils -[enc|dec] -store=path/to/keystore-file -pass=keystore-pass -string='string to be enc/decrypted' [-alias='cert alias']");
System.out.println("StoreUtils -pubkey2ssh -store=path/to/keystore-file -pass=keystore-pass -alias='cert alias'");
}
private static boolean validate(CLIParse parser) {
boolean fOK = true;
if ((!parser.hasArg("enc")) && (!parser.hasArg("dec")) && (!parser.hasArg("pvk")) && (!parser.hasArg("pubkey2ssh"))) {
System.out.println("What do you wish me to do? -please specify -enc, -dec or -pubkey2ssh");
fOK = false;
}
if ((parser.hasArg("enc") || parser.hasArg("dec")) &&
(!parser.hasArg("string"))) {
System.out.println("Can't find a string to work with :( -please specify -string='something'.");
fOK = false;
}
if ((!parser.hasArg("store")) || (!new File(parser.getArg("store")).exists())) {
System.out.println("Can't find a keystore to work with :( -please specify -store with the correct keystore path.");
fOK = false;
}
if (!parser.hasArg("pass")) {
System.out.println("Can't find a keystore pass :( -please specify -pass with the correct keystore password.");
fOK = false;
}
if (!fOK) {
printUsage();
}
return fOK;
}
public static void main(String[] args) {
int exitCode = 0;
try {
CLIParse parser = new CLIParse(args);
if (parser.hasArg("?") || parser.hasArg("help") || args.length == 0) {
printUsage();
return;
}
if (!validate(parser)) {
exitCode = 1;
return;
}
String alias = "engine";
if (parser.hasArg("alias")) {
alias = parser.getArg("alias");
}
if (parser.hasArg("enc")) {
System.out.println(
EncryptionUtils.encrypt(
parser.getArg("string"),
parser.getArg("store"),
parser.getArg("pass"),
alias
).trim().replace("\r\n", "")
);
} else if (parser.hasArg("dec")) {
System.out.println(
EncryptionUtils.decrypt(
parser.getArg("string"),
parser.getArg("store"),
parser.getArg("pass"),
alias
)
);
} else if (parser.hasArg("pvk")) {
System.out.write(
pvk(
parser.getArg("store"),
parser.getArg("pass"),
alias
)
);
} else if (parser.hasArg("pubkey2ssh")) {
System.out.println(
pubkey2ssh(
parser.getArg("store"),
parser.getArg("pass"),
alias
)
);
}
} catch (Exception e) {
System.out.println("Operation failed!");
exitCode = 1;
}
finally {
System.exit(exitCode);
}
}
}
|
package avis;
import avis.models.*;
import exception.*;
import java.util.HashMap;
import java.util.LinkedList;
public class SocialNetwork {
private HashMap<String, Item> items;
private HashMap<String, Member> members;
/**
* Initialise un <i>SocialNetwok</i>.
*/
public SocialNetwork() {
this.items = new HashMap<>();
this.members = new HashMap<>();
}
public static String getMapKeyForClass(Class<?> klass, String string) {
return klass.getName() + string.trim().toLowerCase();
}
/**
* Obtenir le nombre de membres du <i>SocialNetwork</i>.
*
* @return le nombre de membres.
*/
public int nbMembers() {
return members.size();
}
/**
* Obtenir le nombre de films du <i>SocialNetwork</i>.
*
* @return le nombre de films.
*/
public int nbFilms() {
return countItems(Film.class);
}
/**
* Obtenir le nombre de livres du <i>SocialNetwork</i>.
*
* @return le nombre de livres.
*/
public int nbBooks() {
return countItems(Book.class);
}
public void addMember(String pseudo, String password, String profil) throws BadEntry, MemberAlreadyExists {
Member m = new Member(pseudo, password, profil);
String hashKey = getMapKeyForClass(Member.class, pseudo);
if (members.containsKey(hashKey)) {
throw new MemberAlreadyExists();
}
this.members.put(hashKey, m);
}
public void addItemFilm(String pseudo, String password, String titre, String genre, String realisateur, String scenariste, int duree) throws BadEntry, NotMember, ItemFilmAlreadyExists {
findMatchingMember(pseudo, password);
Film film = new Film(titre, genre, realisateur, scenariste, duree);
String hashKey = getMapKeyForClass(Film.class, titre);
if (items.containsKey(hashKey)) {
throw new ItemFilmAlreadyExists();
}
this.items.put(hashKey, film);
}
public void addItemBook(String pseudo, String password, String titre, String genre, String auteur, int nbPages) throws BadEntry, NotMember, ItemBookAlreadyExists {
findMatchingMember(pseudo, password);
Book book = new Book(titre, genre, auteur, nbPages);
String hashKey = getMapKeyForClass(Book.class, titre);
if (items.containsKey(hashKey)) {
throw new ItemBookAlreadyExists();
}
this.items.put(hashKey, book);
}
public LinkedList<String> consultItems(String nom) throws BadEntry {
if (!Item.titleIsValid(nom)) {
throw new BadEntry("Title does not meet the requirements.");
}
LinkedList<String> itemsStrings = new LinkedList<>();
for (Class klass : new Class[]{Book.class, Film.class}) {
String hashKey = getMapKeyForClass(klass, nom);
if (items.containsKey(hashKey)) {
itemsStrings.add(items.get(hashKey).toString());
}
}
return itemsStrings;
}
public float reviewItemFilm(String pseudo, String password, String titre, float note, String commentaire) throws BadEntry, NotMember, NotItem {
return reviewItem(Film.class, pseudo, password, titre, note, commentaire);
}
public float reviewItemBook(String pseudo, String password, String titre, float note, String commentaire) throws BadEntry, NotMember, NotItem {
return reviewItem(Book.class, pseudo, password, titre, note, commentaire);
}
private float reviewItem(Class<?> klass, String pseudo, String password, String titre, float note, String commentaire) throws BadEntry, NotMember, NotItem {
Item item = findMatchingItem(klass, titre);
Member member = findMatchingMember(pseudo, password);
Review review = member.findReview(klass, titre);
if (review == null) {
review = new Review(item, member, commentaire, note);
member.addReview(review);
item.addReview(review);
} else {
review.update(commentaire, note);
}
return item.getRating();
}
// TODO: Javadoc
public float gradeReviewItemBook(String pseudo, String password, String reviewPseudo, String reviewTitle, float grade) throws NotReview, NotMember, BadEntry {
return gradeReview(pseudo, password, Book.class, reviewPseudo, reviewTitle, grade);
}
// TODO: Javadoc
public float gradeReviewItemFilm(String pseudo, String password, String reviewPseudo, String reviewTitle, float grade) throws NotReview, NotMember, BadEntry {
return gradeReview(pseudo, password, Film.class, reviewPseudo, reviewTitle, grade);
}
// TODO: Javadoc
private float gradeReview(String pseudo, String password, Class<?> reviewKlass, String reviewPseudo, String reviewTitle, float grade) throws BadEntry, NotReview, NotMember {
Review review = findMatchingReview(reviewKlass, reviewPseudo, reviewTitle);
Member member = findMatchingMember(pseudo, password);
ReviewGrade reviewGrade = member.findReviewGrade(review);
if (reviewGrade == null) {
reviewGrade = new ReviewGrade(review, member, grade);
member.addReviewGrade(reviewGrade);
review.addReviewGrade(reviewGrade);
} else {
reviewGrade.update(grade);
}
return review.getGrade();
}
private Item findMatchingItem(Class<?> klass, String title) throws NotItem, BadEntry {
if (!Item.titleIsValid(title)) {
throw new BadEntry("Item title does not meet the requirements.");
}
Item item = items.get(getMapKeyForClass(klass, title));
if (item == null) {
throw new NotItem("Item not found.");
}
return item;
}
private Review findMatchingReview(Class<?> klass, String pseudo, String title) throws NotReview, NotMember, BadEntry {
if (!(Member.pseudoIsValid(pseudo) && Item.titleIsValid(title))) {
throw new BadEntry("Pseudo and/or title does not meet the requirements.");
}
Member member = members.get(getMapKeyForClass(Member.class, pseudo));
if (member == null) {
throw new NotMember("Pseudo not found.");
}
Review review = member.findReview(klass, title);
if (review == null) {
throw new NotReview("Review not found.");
}
return review;
}
private Member findMatchingMember(String pseudo, String password) throws NotMember, BadEntry {
if (!(Member.pseudoIsValid(pseudo) && Member.passwordIsValid(password))) {
throw new BadEntry("Pseudo and/or password does not meet the requirements.");
}
Member member = members.get(getMapKeyForClass(Member.class, pseudo));
if (member == null) {
throw new NotMember("User does not exists.");
}
if (!member.checkCredentials(pseudo, password)) {
throw new NotMember("Invalid credentials.");
}
return member;
}
private int countItems(Class<?> klass) {
int count = 0;
for (String key : items.keySet()) {
if (key.startsWith(klass.getName())) {
count++;
}
}
return count;
}
public String toString() {
String output = "";
output += "SocialNetwork" + "\n";
output += nbMembers() + " members" + "\n";
output += nbBooks() + " books" + "\n";
output += nbFilms() + " films" + "\n";
return output;
}
}
|
package com.beautifulbeanbuilder.generators.usecase.generator;
import com.beautifulbeanbuilder.BBBJson;
import com.beautifulbeanbuilder.generators.usecase.UsecaseInfo;
import com.beautifulbeanbuilder.processor.AbstractGenerator;
import com.beautifulbeanbuilder.processor.AbstractJavaGenerator;
import com.central1.leanannotations.LeanEntryPoint;
import com.central1.leanannotations.LeanUsecase;
import com.google.common.collect.Lists;
import com.squareup.javapoet.*;
import io.reactivex.Observable;
import org.apache.commons.lang3.StringUtils;
import org.springframework.messaging.handler.annotation.DestinationVariable;
import org.springframework.messaging.simp.annotation.SubscribeMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.processing.ProcessingEnvironment;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static org.apache.commons.lang3.StringUtils.removeEnd;
public class UsecaseControllerGenerator extends AbstractJavaGenerator<LeanUsecase, UsecaseInfo>
{
@Override
public void write( UsecaseInfo info, TypeSpec.Builder objectToWrite, ProcessingEnvironment processingEnv ) throws IOException
{
if ( objectToWrite != null )
{
JavaFile javaFile = JavaFile.builder( getControllerPackage( info ), objectToWrite.build() ).build();
System.out.println( "Writing out object " + javaFile.packageName + "." + javaFile.typeSpec.name );
javaFile.writeTo( processingEnv.getFiler() );
}
}
private String getControllerName( UsecaseInfo info )
{
return info.typeElement.getSimpleName().toString().replace( "Usecase", "" ) + "Controller";
}
private String getEntitiesPackage( UsecaseInfo info )
{
return info.typePackage.replace( "usecases", "entities" );
}
private String getControllerPackage( UsecaseInfo info )
{
return info.typePackage.replace( "usecases", "controllers" );
}
@Override
public TypeSpec.Builder build( UsecaseInfo ic, Map<AbstractGenerator, Object> generatorBuilderMap, ProcessingEnvironment processingEnv )
throws IOException
{
final ClassName controller = ClassName.get( getControllerPackage( ic ), getControllerName( ic ) );
final TypeSpec.Builder classBuilder = buildClass( controller );
classBuilder.addAnnotation( RestController.class );
classBuilder.addAnnotation( LeanEntryPoint.class );
final FieldSpec usecaseField = FieldSpec.builder( TypeName.get( ic.typeElement.asType() ), "usecase", Modifier.FINAL, Modifier.PRIVATE ).build();
classBuilder.addField( usecaseField );
process( ic, classBuilder, processingEnv );
MethodSpec.Builder constructorBuilder = MethodSpec.constructorBuilder().addModifiers( Modifier.PUBLIC );
classBuilder.build().fieldSpecs.forEach(
f ->
{
constructorBuilder.addParameter( ParameterSpec.builder( f.type, f.name ).build() );
constructorBuilder.addStatement( "this." + f.name + "= " + f.name );
}
);
classBuilder.addMethod( constructorBuilder.addModifiers( Modifier.PUBLIC ).build() );
addEntityMapperMethod( classBuilder );
return classBuilder;
}
private void addEntityMapperMethod( TypeSpec.Builder classBuilder )
{
TypeVariableName t = TypeVariableName.get( "T" );
TypeVariableName r = TypeVariableName.get( "R", ClassName.get( "com.central1.lean.entities", "EntityRef" ) );
ClassName obClassName = ClassName.bestGuess( Observable.class.getName() );
ClassName listClassName = ClassName.bestGuess( List.class.getName() );
ParameterizedTypeName returnType = ParameterizedTypeName.get( obClassName, ParameterizedTypeName.get( listClassName, t ) );
ParameterizedTypeName param1Type = ParameterizedTypeName.get( obClassName, ParameterizedTypeName.get( listClassName, r ) );
ParameterizedTypeName param2Type = ParameterizedTypeName.get( ClassName.get( "com.central1.lean.mapping", "Mapper" ), r, t );
MethodSpec spec = MethodSpec.methodBuilder("getListObservable")
.addModifiers( Modifier.PRIVATE )
.addTypeVariable( t )
.addTypeVariable( r )
.addParameter( param1Type, "refList" )
.addParameter( param2Type,"mapper" )
.returns( returnType )
.addStatement( "return refList.switchMap( refs -> {\n"
+ "\t\t\tfinal $T<Observable<T>> iterable = refs.stream().map( mapper::getEntity )::iterator;\n"
+ "\t\t\treturn Observable.combineLatest( iterable, arr -> $T.asList( (T[]) arr ) );\n"
+ "\t\t} )", Iterable.class, Arrays.class )
.build();
classBuilder.addMethod( spec );
}
private TypeMirror getTypeMirror( String className, ProcessingEnvironment processingEnv )
{
return processingEnv.getTypeUtils().erasure(processingEnv.getElementUtils().getTypeElement( className ).asType());
}
private void process( UsecaseInfo ic, TypeSpec.Builder classBuilder, ProcessingEnvironment processingEnv )
{
String stompReadPrefix = "/queue/" + ic.typeElement.getQualifiedName().toString() + "/";
String stompPocPrefix = "/stomp-poc/" + ic.typeElement.getQualifiedName().toString() + "/";
for ( ExecutableElement e : ic.getAllMethodsExposed() )
{
TypeMirror returnType = e.getReturnType();
// Check wether returnType is Observable.
TypeMirror obType = getTypeMirror( Observable.class.getName(), processingEnv );
if ( processingEnv.getTypeUtils().isAssignable( returnType, obType ) && TypeKind.DECLARED.equals( returnType.getKind() ) )
{
TypeMirror obParamType = ( (DeclaredType) returnType ).getTypeArguments().get( 0 );
TypeMirror listType = getTypeMirror( List.class.getName(), processingEnv );
if( processingEnv.getTypeUtils().isAssignable( obParamType, listType ) && TypeKind.DECLARED.equals( obParamType.getKind() ))
{
//It is Observable of list. Need to get the list's entity type.
TypeMirror entityRefType = ( (DeclaredType) obParamType ).getTypeArguments().get( 0 );
processStompMethod( ic, e, entityRefType, stompReadPrefix, true, classBuilder );
}
else
{
processStompMethod( ic, e, obParamType, stompReadPrefix, false, classBuilder );
}
}
else
{
processPostMethods( ic, e, stompPocPrefix, classBuilder );
}
}
}
private void processStompMethod( UsecaseInfo info, ExecutableElement e, TypeMirror entityRefTm,
String stompPrefix, boolean isList, TypeSpec.Builder classBuilder )
{
//It is Observable of list. Need to get the list's entity type.
//TypeMirror entityRefType = ( (DeclaredType) obParamType ).getTypeArguments().get( 0 );
if ( StringUtils.endsWith( entityRefTm.toString(), "Ref" ) )
{
final ClassName entityType = ClassName.bestGuess( getFQEntityName( info, removeEnd( entityRefTm.toString(), "Ref") ) );
final ClassName entityRefType = ClassName.bestGuess( getFQEntityName( info, entityRefTm.toString() ) );
final ClassName obClassName = ClassName.bestGuess( Observable.class.getName() );
// Create a corresponding mapper class field
final ParameterizedTypeName parameterizedTypeName = ParameterizedTypeName.get(
ClassName.get( "com.central1.lean.mapping", "Mapper" ),
entityRefType, entityType
);
final String eRefName = entityRefTm.toString().substring( entityRefTm.toString().lastIndexOf( '.' ) + 1 );
final String mapperName = eRefName.substring( 0, 1 ).toLowerCase() + eRefName.substring( 1 ) + "Mapper";
final FieldSpec.Builder fieldBuilder = FieldSpec.builder( parameterizedTypeName, mapperName, Modifier.PRIVATE, Modifier.FINAL );
if( classBuilder.build().fieldSpecs.stream().noneMatch( f -> f.type.equals( fieldBuilder.build().type ) ) )
{
classBuilder.addField( fieldBuilder.build() );
// Automatically generate a mapper method for the entity
if( isList )
{
ParameterizedTypeName stompEntityReturnType = ParameterizedTypeName.get( obClassName, entityType );
String methodName = "get" + entityType.simpleName();
MethodSpec.Builder stompEntityMethod = MethodSpec.methodBuilder( methodName )
.addModifiers( Modifier.PUBLIC )
.returns( stompEntityReturnType )
.addAnnotation( AnnotationSpec.builder( SubscribeMapping.class )
.addMember( "value", "\"$L/{$L}\"", stompPrefix + methodName, "ref" )
.build() )
.addParameter( entityRefType, "ref" )
.addStatement( "return " + mapperName + ".getEntity( ref )" );
classBuilder.addMethod( stompEntityMethod.build() );
}
}
// Create stomp method
ParameterizedTypeName stompReturnType;
if ( isList )
{
stompReturnType = ParameterizedTypeName.get( obClassName, ParameterizedTypeName.get( ClassName.bestGuess( List.class.getName() ), entityType ) );
}
else
{
stompReturnType = ParameterizedTypeName.get( obClassName, entityType );
}
MethodSpec.Builder stompMethod = MethodSpec.methodBuilder( e.getSimpleName().toString() )
.addModifiers( Modifier.PUBLIC )
.returns( stompReturnType );
// This only handles read method with at most one parameter
if ( e.getParameters().size() == 1 )
{
VariableElement p = e.getParameters().get( 0 );
String pName = p.getSimpleName().toString();
stompMethod
.addAnnotation( AnnotationSpec.builder( SubscribeMapping.class )
.addMember( "value", "\"$L/{$L}\"", stompPrefix + e.getSimpleName(), pName )
.build() )
.addParameter( ParameterSpec.builder( getFQParameterClassName( info, p ), pName )
.addAnnotation( AnnotationSpec.builder( DestinationVariable.class ).addMember( "value", "$S", pName )
.build() )
.build() );
if( isList )
{
stompMethod.addStatement( "return getListObservable( this.usecase." + e.getSimpleName() + "(" + pName + "), " + mapperName + ")" );
}
else
{
stompMethod.addStatement( "return this.usecase." + e.getSimpleName() + "(" + pName + ").flatMap( " + mapperName + "::getEntity )" );
}
}
else
{
stompMethod
.addAnnotation( AnnotationSpec.builder( SubscribeMapping.class )
.addMember( "value", "$S", stompPrefix + e.getSimpleName() )
.build() );
if ( isList )
{
stompMethod.addStatement( "return getListObservable( this.usecase." + e.getSimpleName() + "(), " + mapperName +" )" );
}
else
{
stompMethod.addStatement( "return this.usecase." + e.getSimpleName() + "().flatMap( " + mapperName + "::getEntity )" );
}
}
classBuilder.addMethod( stompMethod.build() );
}
else
{
throw new IllegalArgumentException( "Usecase should only return Observable of EntityRef" );
}
}
private void processPostMethods( UsecaseInfo info, ExecutableElement e, String stompPrefix, TypeSpec.Builder classBuilder )
{
// Add inner class for request body bean def
String methodName = e.getSimpleName().toString();
String requestBodyName = methodName + "Request";
String requestBeanName = requestBodyName.substring( 0,1 ).toUpperCase() + requestBodyName.substring( 1 );
String requestVar = "requestBean";
TypeSpec.Builder rbeanBuilder = TypeSpec.interfaceBuilder( requestBeanName + "Def" ).addAnnotation( BBBJson.class );
List<String> paramCalls = Lists.newArrayList();
e.getParameters().forEach( p ->
{
String getterName = "get" + p.getSimpleName().toString().substring( 0, 1 ).toUpperCase() + p.getSimpleName().toString().substring( 1 );
MethodSpec.Builder getter = MethodSpec.methodBuilder( getterName )
.addModifiers( Modifier.PUBLIC, Modifier.ABSTRACT )
.returns( getFQParameterClassName( info, p ) );
rbeanBuilder.addMethod( getter.build() );
paramCalls.add( requestVar + "." + getterName + "()" );
});
classBuilder.addType( rbeanBuilder.build() );
MethodSpec.Builder postMethod = MethodSpec.methodBuilder( methodName )
.addAnnotation( AnnotationSpec.builder(RequestMapping.class )
.addMember( "value", "$S", stompPrefix + e.getSimpleName() )
.addMember( "method", "$T.POST", RequestMethod.class )
.build() )
.returns( TypeName.get( e.getReturnType() ) )
.addParameter( ParameterSpec.builder( ClassName.bestGuess( requestBeanName ), requestVar ).addAnnotation( RequestBody.class ).build() )
.addStatement( "return this.usecase." + methodName + "(" + StringUtils.join( paramCalls, ", " ) + ")" );
classBuilder.addMethod( postMethod.build() );
}
private ClassName getFQParameterClassName( UsecaseInfo info, VariableElement p )
{
return ClassName.bestGuess( getFQEntityName( info, p.asType().toString() ) );
}
private String getFQEntityName( UsecaseInfo info, String bestGuessClassName ) {
return ( bestGuessClassName.indexOf( '.' ) == -1 ) ? getEntitiesPackage( info ) + "." + bestGuessClassName : bestGuessClassName;
}
}
|
package gov.nih.nci.calab.service.common;
import gov.nih.nci.calab.db.HibernateDataAccess;
import gov.nih.nci.calab.domain.LabFile;
import gov.nih.nci.calab.service.util.CaNanoLabConstants;
import gov.nih.nci.calab.service.util.PropertyReader;
import gov.nih.nci.calab.service.util.StringUtils;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.sql.SQLException;
import org.apache.log4j.Logger;
import org.apache.struts.upload.FormFile;
/**
* Utility service for file retrieving and writing.
*
* @author pansu
*
*/
public class FileService {
Logger logger = Logger.getLogger(FileService.class);
/**
* Write content of the file to the given output stream
*
* @param fileId
* @param out
* @throws Exception
*/
public void writeFileContent(Long fileId, OutputStream out)
throws Exception {
HibernateDataAccess hda = HibernateDataAccess.getInstance();
try {
hda.open();
LabFile labFile = (LabFile) hda.load(LabFile.class, fileId);
String fileRoot = PropertyReader
.getProperty(CaNanoLabConstants.FILEUPLOAD_PROPERTY,
"fileRepositoryDir");
File fileObj = new File(fileRoot + File.separator
+ labFile.getPath());
InputStream in = new FileInputStream(fileObj);
byte[] bytes = new byte[32768];
int numRead = 0;
while ((numRead = in.read(bytes)) > 0) {
out.write(bytes, 0, numRead);
}
out.close();
} catch (SQLException e) {
throw new Exception(
"error getting file meta data from the database:" + e);
} catch (IOException e) {
throw new Exception(
"error getting file content from the file system and writing to the output stream:"
+ e);
} finally {
hda.close();
}
}
/**
* Get the content of the file into a byte array.
*
* @param fileId
* @return
* @throws Exception
*/
public byte[] getFileContent(Long fileId) throws Exception {
HibernateDataAccess hda = HibernateDataAccess.getInstance();
try {
hda.open();
LabFile labFile = (LabFile) hda.load(LabFile.class, fileId);
String fileRoot = PropertyReader
.getProperty(CaNanoLabConstants.FILEUPLOAD_PROPERTY,
"fileRepositoryDir");
File fileObj = new File(fileRoot + File.separator
+ labFile.getPath());
long fileLength = fileObj.length();
// You cannot create an array using a long type.
// It needs to be an int type.
// Before converting to an int type, check
// to ensure that file is not larger than Integer.MAX_VALUE.
if (fileLength > Integer.MAX_VALUE) {
throw new Exception(
"The file is too big. Byte array can't be longer than Java Integer MAX_VALUE");
}
// Create the byte array to hold the data
byte[] fileData = new byte[(int) fileLength];
// Read in the bytes
InputStream is = new FileInputStream(fileObj);
int offset = 0;
int numRead = 0;
while (offset < fileData.length
&& (numRead = is.read(fileData, offset, fileData.length
- offset)) >= 0) {
offset += numRead;
}
// Ensure all the bytes have been read in
if (offset < fileData.length) {
throw new IOException("Could not completely read file "
+ fileObj.getName());
}
// Close the input stream and return bytes
is.close();
return fileData;
} catch (SQLException e) {
throw new Exception(
"error getting file meta data from the database:" + e);
} catch (IOException e) {
throw new Exception(
"error getting file content from the file system and writing to the output stream:"
+ e);
} finally {
hda.close();
}
}
public void writeUploadedFile(FormFile uploadedFile, String filePath,
boolean addTimeStampPrefix) throws IOException {
File pathDir = new File(filePath);
if (!pathDir.exists())
pathDir.mkdirs();
String fileName=uploadedFile.getFileName();
if (addTimeStampPrefix) {
fileName = prefixFileNameWithTimeStamp(fileName);
}
String fullFileName = filePath + File.separator + fileName;
FileOutputStream oStream = new FileOutputStream(new File(fullFileName));
writeFile(uploadedFile.getInputStream(), oStream);
}
public void writeFile(InputStream is, FileOutputStream os)
throws IOException {
byte[] bytes = new byte[32768];
int numRead = 0;
while ((numRead = is.read(bytes)) > 0) {
os.write(bytes, 0, numRead);
}
os.close();
}
public String prefixFileNameWithTimeStamp(String fileName) {
String newFileName = StringUtils.getTimeAsString() + "_" + fileName;
return newFileName;
}
}
|
/**
*
* $Id: LogoutAction.java,v 1.6 2008-06-11 17:44:57 pandyas Exp $
*
* $Log: not supported by cvs2svn $
* Revision 1.5 2006/05/10 14:15:39 schroedn
* New Features - Changes from code review
*
* Revision 1.4 2006/04/17 19:09:40 pandyas
* caMod 2.1 OM changes
*
*
*/
package gov.nih.nci.camod.webapp.action;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
public class LogoutAction extends BaseAction {
public ActionForward execute(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response)
throws IOException, ServletException
{
System.out.println( "<LogoutAction execute> Logging Off" );
request.getSession().setAttribute( "camod.loggedon.username", null );
request.getSession().invalidate();
// explicitly remove the JSESSIONID to prevent security issue
Cookie[] cookieArray = request.getCookies();
for(int i = 0; i < cookieArray.length; i++){
log.info("Cookie name: " + cookieArray[i].getName());
log.info("Cookie value: " + cookieArray[i].getValue());
if(cookieArray[i].getName().equals("JSESSIONID") | cookieArray[i].getValue().equals("JSESSIONID")) {
cookieArray[i].setValue(null);
log.info("removed value for JSESSIONID");
}
}
return mapping.findForward( "loggedOut" );
}
}
|
package edu.uml.cs.isense.api;
import java.io.BufferedInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.ConnectException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Iterator;
import java.util.Locale;
import java.util.Random;
import org.apache.http.entity.mime.MultipartEntity;
import org.apache.http.entity.mime.content.FileBody;
import org.apache.http.entity.mime.content.StringBody;
import org.json.JSONArray;
import org.json.JSONObject;
// imports for the uploadMedia hotfix
// import org.apache.http.entity.mime.MultipartEntityBuilder;
// import org.apache.http.HttpEntity;
// import org.apache.http.entity.ContentType;
import edu.uml.cs.isense.objects.RDataSet;
import edu.uml.cs.isense.objects.RPerson;
import edu.uml.cs.isense.objects.RProject;
import edu.uml.cs.isense.objects.RProjectField;
/**
* A class which allows Android applications to interface with the iSENSE
* website. Given a singleton instance of this class, functions can be called
* through an AsyncTask.
*
* @author Nick Ver Voort, Jeremy Poulin, and Mike Stowell of the iSENSE
* Android-Development Team
*
*/
public class API {
private final String version_major = "4";
private final String version_minor = "2";
private String version;
private static API instance = null;
private String baseURL = "";
private final String publicURL = "https://isenseproject.org/api/v1";
private final String devURL = "http://dev.isenseproject.org/api/v1";
String authToken = "";
RPerson currentUser;
private boolean usingLive = true;
public static final int CREATED_AT = 0;
public static final int UPDATED_AT = 1;
private String email, password;
public enum TargetType {
PROJECT, DATA_SET
};
/**
* Constructor not to be called by a user of the API Users should call
* getInstance instead, which will call this constructor if necessary
*/
private API() {
baseURL = publicURL;
}
/**
* Gets the one instance of the API class (instead of recreating a new one
* every time). Functions as a constructor if the current instance is null.
*
* @return current or new API
*/
public static API getInstance() {
if (instance == null) {
instance = new API();
}
return instance;
}
/**
* Log in to iSENSE. Stores email and password variables so authenticated
* functions later will work
*
* @param p_email
* The email address of the user to log in as
* @param p_password
* The password of the user to log in as
*/
public RPerson createSession(String p_email, String p_password) {
String reqResult = "";
try {
reqResult = makeRequest(baseURL, "users/myInfo", "email="
+ URLEncoder.encode(p_email, "UTF-8") + "&password="
+ URLEncoder.encode(p_password, "UTF-8"), "GET", null);
JSONObject j = new JSONObject(reqResult);
if (j.getString("name") != null) {
email = p_email;
password = p_password;
RPerson you = new RPerson();
you.name = j.getString("name");
you.gravatar = j.getString("gravatar");
currentUser = you;
you.successfulLogin = true;
return you;
} else {
// not a valid person so get error message
RPerson you = new RPerson();
JSONObject jobj = new JSONObject(reqResult);
you.serverErrorMessage = jobj.getString("msg");
return you;
}
} catch (Exception e) {
// Something went wrong so create a new RPerson object with
// default values and get the error message from the server
try {
RPerson you = new RPerson();
JSONObject jobj = new JSONObject(reqResult);
you.serverErrorMessage = jobj.getString("msg");
return you;
} catch (Exception e2) {
try {
RPerson you = new RPerson();
JSONObject jobj = new JSONObject(reqResult);
you.serverErrorMessage = jobj.getString("error");
return you;
} catch (Exception e3) {
RPerson you = new RPerson();
return you;
}
}
}
}
/**
* Log out of iSENSE
*/
public void deleteSession() {
email = "";
password = "";
currentUser = null;
}
public RPerson getCurrentUser() {
return currentUser;
}
/**
* Verifies whether a given contributor key will work for a project
*
* @param projectId
* @param conKey
* @return True is the key is valid for that project, false if it is not
*/
public boolean validateKey(int projectId, String conKey) {
// FIX this function will never be implemented, remove it and rework
// apps to not need it
return true;
}
/**
* Retrieves multiple projects off of iSENSE.
*
* @param page
* Which page of results to start from. 1-indexed
* @param perPage
* How many results to display per page
* @param descending
* Whether to display the results in descending order (true) or
* ascending order (false)
* @param search
* A string to search all projects for
* @return An ArrayList of Project objects
*/
public ArrayList<RProject> getProjects(int page, int perPage,
boolean descending, int sortOn, String search) {
ArrayList<RProject> result = new ArrayList<RProject>();
try {
String order = descending ? "DESC" : "ASC";
String sortMode = "";
if (sortOn == CREATED_AT) {
sortMode = "created_at";
} else {
sortMode = "updated_at";
}
String reqResult = makeRequest(baseURL, "projects", "page=" + page
+ "&per_page=" + perPage + "&sort=" + sortMode + "&order="
+ order + "&search=" + URLEncoder.encode(search, "UTF-8"),
"GET", null);
JSONArray j = new JSONArray(reqResult);
for (int i = 0; i < j.length(); i++) {
JSONObject inner = j.getJSONObject(i);
RProject proj = new RProject();
proj.project_id = inner.getInt("id");
proj.name = inner.getString("name");
proj.url = inner.getString("url");
proj.featured = inner.getBoolean("featured");
proj.like_count = inner.getInt("likeCount");
proj.timecreated = inner.getString("createdAt");
proj.owner_name = inner.getString("ownerName");
proj.owner_url = inner.getString("ownerUrl");
result.add(proj);
}
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
/**
* Retrieves information about a single project on iSENSE
*
* @param projectId
* The ID of the project to retrieve
* @return A Project object or null if none is found
*/
public RProject getProject(int projectId) {
RProject proj = new RProject();
try {
String reqResult = makeRequest(baseURL, "projects/" + projectId,
"", "GET", null);
JSONObject j = new JSONObject(reqResult);
proj.project_id = j.getInt("id");
proj.name = j.getString("name");
proj.url = j.getString("url");
proj.featured = j.getBoolean("featured");
proj.like_count = j.getInt("likeCount");
proj.timecreated = j.getString("createdAt");
proj.owner_name = j.getString("ownerName");
proj.owner_url = j.getString("ownerUrl");
} catch (Exception e) {
e.printStackTrace();
return null;
}
return proj;
}
/**
* Creates a new project on iSENSE. The Field objects in the second
* parameter must have at a type and a name, and can optionally have a unit.
* This is an authenticated function.
*
* @param projectName
* The name of the new project to be created
* @param fields
* An ArrayList of field objects that will become the fields on
* iSENSE.
* @return The ID of the created project
*/
public UploadInfo createProject(String projectName,
ArrayList<RProjectField> fields) {
UploadInfo info = new UploadInfo();
String projResult = "";
String fieldResult = "";
try {
JSONObject postData = new JSONObject();
postData.put("email", email);
postData.put("password", password);
postData.put("project_name", projectName);
projResult = makeRequest(baseURL, "projects", "", "POST", postData);
JSONObject jobj = new JSONObject(projResult);
info.projectId = jobj.getInt("id");
// Add Fields to Project
for (RProjectField rpf : fields) {
JSONObject mField = new JSONObject();
mField.put("project_id", info.projectId);
mField.put("field_type", rpf.type);
mField.put("name", rpf.name);
mField.put("unit", rpf.unit);
JSONObject postData2 = new JSONObject();
postData2.put("email", email);
postData2.put("password", password);
postData2.put("field", mField);
fieldResult = makeRequest(baseURL, "fields", "", "POST",
postData2);
JSONObject fieldObj = new JSONObject(fieldResult);
// Failed to add field to project, return failure and error
// message
if (fieldObj.getInt("id") == -1) {
try {
info.errorMessage = fieldObj.getString("msg");
info.success = false;
return info;
} catch (Exception e2) {
try {
info.errorMessage = fieldObj.getString("error");
info.success = false;
return info;
} catch (Exception e3) {
info.errorMessage = projResult;
}
}
}
}
info.success = true;
return info;
} catch (Exception e) {
try {
JSONObject jobj = new JSONObject(projResult);
info.errorMessage = jobj.getString("msg");
} catch (Exception e2) {
try {
JSONObject jobj = new JSONObject(projResult);
info.errorMessage = jobj.getString("error");
} catch (Exception e3) {
info.errorMessage = projResult;
}
}
}
info.projectId = -1;
info.success = false;
return info;
}
public boolean deleteProject(int projectId) {
try {
makeRequest(baseURL, "projects/" + projectId, "authenticity_token="
+ URLEncoder.encode(authToken, "UTF-8"), "DELETE", null);
return true;
} catch (Exception e) {
e.printStackTrace();
}
return false;
}
/**
* Gets all of the fields associated with a project.
*
* @param projectId
* The unique ID of the project whose fields you want to see
* @return An ArrayList of ProjectField objects
*/
public ArrayList<RProjectField> getProjectFields(int projectId) {
ArrayList<RProjectField> rpfs = new ArrayList<RProjectField>();
try {
String reqResult = makeRequest(baseURL, "projects/" + projectId,
"?recur=true", "GET", null);
JSONObject j = new JSONObject(reqResult);
JSONArray j2 = j.getJSONArray("fields");
for (int i = 0; i < j2.length(); i++) {
JSONObject inner = j2.getJSONObject(i);
RProjectField rpf = new RProjectField();
rpf.field_id = inner.getInt("id");
rpf.name = inner.getString("name");
rpf.type = inner.getInt("type");
rpf.unit = inner.getString("unit");
rpf.restrictions = new ArrayList<String>();
System.out.println("Restrictions " + inner.get("restrictions"));
rpfs.add(rpf);
}
} catch (Exception e) {
e.printStackTrace();
}
return rpfs;
}
/**
* Retrieve a data set from iSENSE, with it's data field filled in The
* internal data set will be converted to column-major format, to make it
* compatible with the uploadDataSet function
*
* @param dataSetId
* The unique ID of the data set to retrieve from iSENSE
* @return A DataSet object
*/
public RDataSet getDataSet(int dataSetId) {
RDataSet result = new RDataSet();
try {
String reqResult = makeRequest(baseURL, "data_sets/" + dataSetId,
"recur=true", "GET", null);
JSONObject j = new JSONObject(reqResult);
result.ds_id = j.getInt("id");
result.name = j.getString("name");
result.url = j.getString("url");
result.timecreated = j.getString("createdAt");
result.fieldCount = j.getInt("fieldCount");
result.datapointCount = j.getInt("datapointCount");
result.data = rowsToCols(new JSONObject().put("data",
j.getJSONArray("data")));
System.out.println(result.data);
result.project_id = j.getJSONObject("project").getInt("id");
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
/**
* Gets all the data sets associated with a project The data sets returned
* by this function do not have their data field filled.
*
* @param projectId
* The project ID whose data sets you want
* @return An ArrayList of Data Set objects, with their data fields left
* null
*/
public ArrayList<RDataSet> getDataSets(int projectId) {
ArrayList<RDataSet> result = new ArrayList<RDataSet>();
try {
String reqResult = makeRequest(baseURL, "projects/" + projectId,
"recur=true", "GET", null);
JSONObject j = new JSONObject(reqResult);
JSONArray dataSets = j.getJSONArray("dataSets");
for (int i = 0; i < dataSets.length(); i++) {
RDataSet rds = new RDataSet();
JSONObject inner = dataSets.getJSONObject(i);
rds.ds_id = inner.getInt("id");
rds.name = inner.getString("name");
rds.url = inner.getString("url");
rds.timecreated = inner.getString("createdAt");
rds.fieldCount = inner.getInt("fieldCount");
rds.datapointCount = inner.getInt("datapointCount");
result.add(rds);
}
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
/**
* Gets all the data set ids associated with a project
*
* @param projectId
* The project ID whose data sets you want
* @return An ArrayList of Data Set IDs
*/
public ArrayList<Integer> getDataSetIDs(int projectId) {
ArrayList<Integer> result = new ArrayList<Integer>();
try {
String reqResult = makeRequest(baseURL, "projects/" + projectId,
"recur=true", "GET", null);
JSONObject j = new JSONObject(reqResult);
JSONArray dataSets = j.getJSONArray("dataSets");
for (int i = 0; i < dataSets.length(); i++) {
JSONObject inner = dataSets.getJSONObject(i);
result.add(inner.getInt("id"));
}
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
/**
* Gets all the data associated with a certain data set by field.
*
* @param projectId The project we care about
* @param dataSetId The data set we care about
* @param field The field we care about
*
* @return An ArrayList of strings containing the appropriate data
*/
public ArrayList<String> getFieldFromDataSet (int ProjectID, String field,
int dataSetId) {
String FieldID = null;
ArrayList<RDataSet> rdata = getFilledDataSets(ProjectID);
ArrayList<RProjectField> projectFields = getProjectFields(ProjectID);
ArrayList<String> fdata = new ArrayList<String>();
for (RProjectField f : projectFields) {
if (f.name.equals(field)) {
FieldID = f.field_id + "";
break;
}
}
for (RDataSet r : rdata) {
try {
if (r.ds_id == dataSetId) {
System.out.println("iSENSE: fdata:" + r.data.getString(FieldID));
JSONArray jadata = new JSONArray();
jadata = r.data.getJSONArray(FieldID);
for (int i = 0; i < jadata.length(); i++) {
fdata.add(jadata.getString(i));
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
return fdata;
}
/**
* Gets all the data sets associated with a project The data sets returned
* by this function DO have their data field filled.
*
* @param projectId
* The project ID whose data sets you want
* @return An ArrayList of Data Set objects
*/
public ArrayList<RDataSet> getFilledDataSets(int projectId) {
ArrayList<RDataSet> result = new ArrayList<RDataSet>();
try {
String reqResult = makeRequest(baseURL, "projects/" + projectId,
"recur=true", "GET", null);
JSONObject j = new JSONObject(reqResult);
JSONArray dataSets = j.getJSONArray("dataSets");
for (int i = 0; i < dataSets.length(); i++) {
RDataSet rds = new RDataSet();
JSONObject inner = dataSets.getJSONObject(i);
rds.ds_id = inner.getInt("id");
rds.name = inner.getString("name");
rds.url = inner.getString("url");
rds.timecreated = inner.getString("createdAt");
rds.fieldCount = inner.getInt("fieldCount");
rds.datapointCount = inner.getInt("datapointCount");
JSONObject jo = new JSONObject();
jo.put("data", inner.getJSONArray("data"));
rds.data = rowsToCols(jo);
rds.project_id = projectId;
result.add(rds);
}
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
/**
*
* Gets all datasets associated with a project by field.
* @param projectId The project ID whose datasets you're looking for
* @param field The field we care about
*
* @return An ArrayList of Strings containing the appropriate data
*/
public ArrayList<String> getDataSetsByField(int ProjectID, String field) {
String FieldID = null;
ArrayList<RDataSet> rdata = getFilledDataSets(ProjectID);
ArrayList<RProjectField> projectFields = getProjectFields(ProjectID);
ArrayList<String> fdata = new ArrayList<String>();
for (RProjectField f : projectFields) {
if (f.name.equals(field)) {
FieldID = f.field_id + "";
break;
}
}
for (RDataSet r : rdata) {
try {
System.out.println("iSENSE: fdata:" + r.data.getString(FieldID));
JSONArray jadata = new JSONArray();
jadata = r.data.getJSONArray(FieldID);
for (int i = 0; i < jadata.length(); i++) {
fdata.add(jadata.getString(i));
}
} catch (Exception e) {
e.printStackTrace();
}
}
return fdata;
}
/**
* Upload a dataset to iSENSE while logged in
*
* @param projectId
* The ID of the project to upload data to
* @param data
* The data to be uploaded. Must be in column-major format to
* upload correctly
* @param datasetName
* The name of the dataset
* @return The integer ID of the newly uploaded dataset, or -1 if upload
* fails
*/
public UploadInfo uploadDataSet(int projectId, JSONObject data,
String datasetName) {
UploadInfo info = new UploadInfo();
datasetName += appendedTimeStamp();
String reqResult = "";
JSONObject requestData = new JSONObject();
try {
requestData.put("email", email);
requestData.put("password", password);
requestData.put("title", datasetName);
requestData.put("data", data);
reqResult = makeRequest(baseURL, "projects/" + projectId
+ "/jsonDataUpload", "", "POST", requestData);
JSONObject jobj = new JSONObject(reqResult);
info.dataSetId = jobj.getInt("id");
if (jobj.getInt("id") != -1) {
info.success = true;
}
return info;
} catch (Exception e) {
try {
JSONObject jobj = new JSONObject(reqResult);
info.errorMessage = jobj.getString("msg");
} catch (Exception e2) {
try {
JSONObject jobj = new JSONObject(reqResult);
info.errorMessage = jobj.getString("error");
} catch (Exception e3) {
info.errorMessage = reqResult;
}
}
}
info.success = false;
info.dataSetId = -1;
return info;
}
/**
* Upload a dataset to iSENSE with a contributor key
*
* @param projectId
* The ID of the project to upload data to
* @param data
* The data to be uploaded. Must be in column-major format to
* upload correctly
* @param dataName
* The Dataset name
* @param conKey
* The Contributor Key
* @param conName
* The Contributor name
* @return The integer ID of the newly uploaded dataset, or -1 if upload
* fails
*/
public UploadInfo uploadDataSet(int projectId, JSONObject data,
String dataName, String conKey, String conName) {
UploadInfo info = new UploadInfo();
JSONObject requestData = new JSONObject();
String reqResult = "";
try {
requestData.put("contribution_key", conKey);
requestData.put("contributor_name", conName);
requestData.put("data", data);
requestData.put("title", dataName + appendedTimeStamp());
reqResult = makeRequest(baseURL, "projects/" + projectId
+ "/jsonDataUpload", "", "POST", requestData);
JSONObject jobj = new JSONObject(reqResult);
info.dataSetId = jobj.getInt("id");
if (jobj.getInt("id") != -1) {
info.success = true;
}
return info;
} catch (Exception e) {
try {
JSONObject jobj = new JSONObject(reqResult);
info.errorMessage = jobj.getString("msg");
} catch (Exception e2) {
try {
JSONObject jobj = new JSONObject(reqResult);
info.errorMessage = jobj.getString("error");
} catch (Exception e3) {
info.errorMessage = reqResult;
}
}
}
info.success = false;
info.dataSetId = -1;
return info;
}
/**
* Append new rows of data to the end of an existing data set
* while logged in
*
* @param dataSetId
* The ID of the data set to append to
* @param newData
* The new data to append
*
* @return success or failure
*/
public UploadInfo appendDataSetData(int dataSetId, JSONObject newData) {
UploadInfo info = new UploadInfo();
String reqResult = "";
JSONObject requestData = new JSONObject();
try {
requestData.put("email", email);
requestData.put("password", password);
requestData.put("id", dataSetId);
requestData.put("data", newData);
reqResult = makeRequest(baseURL, "data_sets/"
+ "/append", "", "POST", requestData);
JSONObject jobj = new JSONObject(reqResult);
info.dataSetId = jobj.getInt("id");
if (jobj.getInt("id") != -1) {
info.success = true;
}
return info;
} catch (Exception e) {
try {
JSONObject jobj = new JSONObject(reqResult);
info.errorMessage = jobj.getString("msg");
} catch (Exception e2) {
try {
JSONObject jobj = new JSONObject(reqResult);
info.errorMessage = jobj.getString("error");
} catch (Exception e3) {
info.errorMessage = reqResult;
}
}
}
info.success = false;
info.dataSetId = -1;
return info;
}
/**
* Append new rows of data to the end of an existing data set
* with a contributor key
*
* @param dataSetId
* The ID of the data set to append to
* @param newData
* The new data to append
* @param conKey
* The contributor key
*
* @return success or failure
*/
public UploadInfo appendDataSetData(int dataSetId, JSONObject newData, String conKey) {
UploadInfo info = new UploadInfo();
String reqResult = "";
JSONObject requestData = new JSONObject();
try {
requestData.put("contribution_key", conKey);
requestData.put("id", dataSetId);
requestData.put("data", newData);
reqResult = makeRequest(baseURL, "data_sets/"
+ "/append", "", "POST", requestData);
JSONObject jobj = new JSONObject(reqResult);
info.dataSetId = jobj.getInt("id");
if (jobj.getInt("id") != -1) {
info.success = true;
}
return info;
} catch (Exception e) {
try {
JSONObject jobj = new JSONObject(reqResult);
info.errorMessage = jobj.getString("msg");
} catch (Exception e2) {
try {
JSONObject jobj = new JSONObject(reqResult);
info.errorMessage = jobj.getString("error");
} catch (Exception e3) {
info.errorMessage = reqResult;
}
}
}
info.success = false;
info.dataSetId = -1;
return info;
}
public UploadInfo createKey(String project, String keyname, String key) {
UploadInfo info = new UploadInfo();
String reqResult = "";
JSONObject requestData = new JSONObject();
JSONObject contrib_key = new JSONObject();
try {
contrib_key.put("project_id", project);
contrib_key.put("name", keyname);
contrib_key.put("key", key);
requestData.put("email", email);
requestData.put("password", password);
requestData.put("contrib_key", contrib_key);
reqResult = makeRequest(baseURL, "projects/" + project
+ "/add_key", "", "POST", requestData);
JSONObject jobj = new JSONObject(reqResult);
System.out.println(reqResult.toString());
if (jobj.getString("msg").equals("Success")) {
info.success = true;
}
return info;
} catch (Exception e) {
try {
JSONObject jobj = new JSONObject(reqResult);
info.errorMessage = jobj.getString("msg");
} catch (Exception e2) {
try {
JSONObject jobj = new JSONObject(reqResult);
info.errorMessage = jobj.getString("error");
} catch (Exception e3) {
info.errorMessage = reqResult;
}
}
}
info.success = false;
info.dataSetId = -1;
return info;
}
/**
* Uploads a file to the media section of a project while logged in
*
* @param dataId
* The ID of the thing you're uploading to
* @param mediaToUpload
* The file to upload
* @param ttype
* The type of the target (project or dataset)
* @return The media object ID for the media uploaded or -1 if upload fails
*/
public UploadInfo uploadMedia(int dataId, File mediaToUpload,
TargetType ttype) {
UploadInfo info = new UploadInfo();
String output = "";
try {
URL url = new URL(baseURL + "/media_objects/");
HttpURLConnection connection = (HttpURLConnection) url
.openConnection();
connection.setDoOutput(true);
connection.setRequestMethod("POST");
MultipartEntity entity = new MultipartEntity();
entity.addPart(
"upload",
new FileBody(mediaToUpload, URLConnection
.guessContentTypeFromName(mediaToUpload.getName())));
entity.addPart("email", new StringBody(email));
entity.addPart("password", new StringBody(password));
entity.addPart("type", new StringBody(
(ttype == TargetType.PROJECT) ? "project" : "data_set"));
entity.addPart("id", new StringBody("" + dataId));
connection.setRequestProperty("Content-Type", entity
.getContentType().getValue());
connection.setRequestProperty("Accept", "application/json");
OutputStream out = connection.getOutputStream();
try {
entity.writeTo(out);
} finally {
out.close();
}
InputStream in = null;
try {
int response = connection.getResponseCode();
if (response >= 200 && response < 300) {
in = new BufferedInputStream(connection.getInputStream());
} else {
in = new BufferedInputStream(connection.getErrorStream());
}
} catch (FileNotFoundException e) {
e.printStackTrace();
info.mediaId = -1;
info.success = false;
info.errorMessage = "No Connection";
return info;
}
try {
ByteArrayOutputStream bo = new ByteArrayOutputStream();
int i = in.read();
while (i != -1) {
bo.write(i);
i = in.read();
}
output = bo.toString();
System.out.println("Returning from uploadProjectMedia: "
+ output);
JSONObject jobj = new JSONObject(output);
info.mediaId = jobj.getInt("id");
if (jobj.getInt("id") != -1) {
info.success = true;
}
return info;
} catch (Exception e) {
try {
JSONObject jobj = new JSONObject(output);
info.errorMessage = jobj.getString("msg");
info.mediaId = -1;
info.success = false;
return info;
} catch (Exception e2) {
JSONObject jobj = new JSONObject(output);
info.errorMessage = jobj.getString("error");
info.mediaId = -1;
info.success = false;
return info;
}
} finally {
in.close();
}
} catch (Exception e) {
e.printStackTrace();
}
info.mediaId = -1;
info.success = false;
return info;
}
/**
* Uploads a file to the media section of a project with a contributor key
*
* Note: this differs from the iSenseDev repo, as it has been hotfixed to
* work with uploadDataSetWithPhoto in the iSENSEPublisher component.
*
* @param projectId
* The ID of the thing you're uploading to
* @param mediaToUpload
* The file to upload
* @param ttype
* The type of the target (project or dataset)
* @param conKey
* The contributor key
* @param conName
* The contributor name
* @return The media object ID for the media uploaded or -1 if upload fails
*/
public UploadInfo uploadMedia(int projectId, File mediaToUpload,
TargetType ttype, String conKey, String conName) {
UploadInfo info = new UploadInfo();
String output = "";
try {
URL url = new URL(baseURL + "/media_objects/");
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setDoOutput(true);
connection.setRequestMethod("POST");
MultipartEntity entity = new MultipartEntity();
entity.addPart(
"upload",
new FileBody(mediaToUpload, URLConnection
.guessContentTypeFromName(mediaToUpload.getName())));
entity.addPart("contribution_key", new StringBody(conKey));
entity.addPart("contributor_name", new StringBody(conName));
entity.addPart("type", new StringBody(
(ttype == TargetType.PROJECT) ? "project" : "data_set"));
entity.addPart("id", new StringBody("" + projectId));
connection.setRequestProperty("Content-Type", entity.getContentType().getValue());
connection.setRequestProperty("Accept", "application/json");
OutputStream out = connection.getOutputStream();
try {
entity.writeTo(out);
} finally {
out.close();
}
InputStream in = null;
try {
int response = connection.getResponseCode();
if (response >= 200 && response < 300) {
in = new BufferedInputStream(connection.getInputStream());
} else {
in = new BufferedInputStream(connection.getErrorStream());
}
} catch (FileNotFoundException e) {
e.printStackTrace();
info.errorMessage = "No Connection";
info.mediaId = -1;
return info;
}
try {
ByteArrayOutputStream bo = new ByteArrayOutputStream();
int i = in.read();
while (i != -1) {
bo.write(i);
i = in.read();
}
output = bo.toString();
System.out.println("Returning from uploadProjectMedia: "
+ output);
JSONObject jobj = new JSONObject(output);
info.mediaId = jobj.getInt("id");
if (jobj.getInt("id") != -1) {
info.success = true;
}
return info;
} catch (Exception e) {
try {
JSONObject jobj = new JSONObject(output);
info.errorMessage = jobj.getString("msg");
info.mediaId = -1;
return info;
} catch (Exception e2) {
JSONObject jobj = new JSONObject(output);
info.errorMessage = jobj.getString("error");
info.mediaId = -1;
return info;
}
} finally {
in.close();
}
} catch (Exception e) {
e.printStackTrace();
}
info.mediaId = -1;
info.success = false;
return info;
}
/**
* Makes an HTTP request and for JSON-formatted data. This call is blocking,
* and so functions that call this function must not be run on the UI
* thread.
*
* @param baseURL
* The base of the URL to which the request will be made
* @param path
* The path to append to the request URL
* @param parameters
* Parameters separated by ampersands (&)
* @param reqType
* The request type as a string (i.e. GET or POST)
* @return A String dump of a JSONObject representing the requested data
*/
private String makeRequest(String baseURL, String path, String parameters,
String reqType, JSONObject postData) {
byte[] mPostData = null;
int mstat = 0;
try {
URL url = new URL(baseURL + "/" + path + "?" + parameters);
System.out.println("Connect to: " + url);
HttpURLConnection urlConnection = (HttpURLConnection) url
.openConnection();
if (!reqType.equals("GET")) {
urlConnection.setDoOutput(true);
}
urlConnection.setRequestMethod(reqType);
urlConnection.setRequestProperty("Accept", "application/json");
//urlConnection.setDoOutput(true);
if (postData != null) {
System.out.println("Post data: " + postData);
mPostData = postData.toString().getBytes();
urlConnection.setRequestProperty("Content-Length",
Integer.toString(mPostData.length));
urlConnection.setRequestProperty("Content-Type",
"application/json");
OutputStream out = urlConnection.getOutputStream();
out.write(mPostData);
out.close();
}
mstat = urlConnection.getResponseCode();
InputStream in;
System.out.println("Status: " + mstat);
if (mstat >= 200 && mstat < 300) {
in = new BufferedInputStream(urlConnection.getInputStream());
} else {
in = new BufferedInputStream(urlConnection.getErrorStream());
}
try {
ByteArrayOutputStream bo = new ByteArrayOutputStream();
int i = in.read();
while (i != -1) {
bo.write(i);
i = in.read();
}
return bo.toString();
} catch (IOException e) {
return "";
} finally {
in.close();
}
} catch (ConnectException ce) {
System.err
.println("Connection failed: ENETUNREACH (network not reachable)");
ce.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
return "Error: status " + mstat;
}
/**
* Switched the API instance between using the public iSENSE and the
* developer iSENSE
*
* @param use
* Whether or not to use the developer iSENSE
*/
public void useDev(boolean use) {
baseURL = use ? devURL : publicURL;
usingLive = !use;
}
/**
* Returns whether or not the API is using dev mode.
*
* @return True if the API is using the development website, false
* otherwise.
*/
public boolean isLive() {
return usingLive;
}
/**
* Directly set the base URL, rather than using the dev or production URLs
*
* @param newUrl
* The URL to use as a base
*/
public void setBaseUrl(String newUrl) {
baseURL = newUrl + "/api/v1";
usingLive = false;
}
/**
* Reformats a row-major JSONObject into a column-major one
*
* @param original
* The row-major formatted JSONObject
* @return A column-major reformatted version of the original JSONObject
*/
public JSONObject rowsToCols(JSONObject original) {
JSONObject reformatted = new JSONObject();
try {
JSONArray inner = original.getJSONArray("data");
for (int i = 0; i < inner.length(); i++) {
JSONObject innermost = (JSONObject) inner.get(i);
Iterator<?> keys = innermost.keys();
while (keys.hasNext()) {
String currKey = (String) keys.next();
JSONArray currArray = new JSONArray();
if (reformatted.has(currKey)) {
currArray = reformatted.getJSONArray(currKey);
}
currArray.put(innermost.get(currKey));
// currArray.put(innermost.getInt(currKey));
reformatted.put(currKey, currArray);
}
}
} catch (Exception e) {
e.printStackTrace();
}
return reformatted;
}
/**
* Creates a unique date and timestamp used to append to data sets uploaded
* to the iSENSE website to ensure every data set has a unique identifier.
*
* @return A pretty formatted date and timestamp
*/
private String appendedTimeStamp() {
SimpleDateFormat dateFormat = new SimpleDateFormat(
"MM/dd/yy, HH:mm:ss.SSS", Locale.US);
Calendar cal = Calendar.getInstance();
Random r = new Random();
int rMicroseconds = r.nextInt(1000);
String microString = "";
if (rMicroseconds < 10)
microString = "00" + rMicroseconds;
else if (rMicroseconds < 100)
microString = "0" + rMicroseconds;
else
microString = "" + rMicroseconds;
return " - " + dateFormat.format(cal.getTime()) + microString;
}
/**
* Gets the current API version
*
* @return API version in MAJOR.MINOR format
*/
public String getVersion() {
version = version_major + "." + version_minor;
return version;
}
}
|
package info.meoblast001.thugaim.npc;
import android.graphics.Point;
import info.meoblast001.thugaim.Station;
import info.meoblast001.thugaim.StationGraph;
import info.meoblast001.thugaim.engine.Actor;
import info.meoblast001.thugaim.engine.Engine;
import info.meoblast001.thugaim.engine.World;
import info.meoblast001.thugaim.R;
/**
Simple NPC which travels randomly through the StationGraph unless near the
player, in which case it follows the player, or at an adjacent station to the
player's station, in which case it moves to that station.
*/
public class HydrogenFighter extends NPCVehicle
{
private static final int MAX_HEALTH = 5;
private static final float FREE_SURROUNDING_SPACE_AT_INIT = 20.0f;
private StationGraph station_graph = null;
private Station target_station = null;
public HydrogenFighter(Engine engine, float x, float y, float rotation,
StationGraph station_graph)
{
super(engine, R.drawable.hydrogen, x, y, rotation, MAX_HEALTH,
station_graph);
this.station_graph = station_graph;
setSpeed(0.8f);
}
/**
Generates all of the Hydrogen fighters in a level at randon positions.
@param engine The game engine.
@param world The current world.
@param play_size Size of play area.
@param station_graph The current station graph.
@param num_fighters The amount of fighters to create.
*/
public static void generateAll(Engine engine, World world, int play_size,
StationGraph station_graph, int num_fighters)
{
for (int i = 0; i < num_fighters; ++i)
{
inner_loop: while (true)
{
HydrogenFighter fighter = new HydrogenFighter(engine,
(float) Math.random() * play_size - (play_size / 2),
(float) Math.random() * play_size - (play_size / 2),
(float) (Math.random() * Math.PI / 180.0), station_graph);
Point fighter_size = fighter.getSize();
float avg_fighter_size = (fighter_size.x + fighter_size.y) / 2.0f;
if (!world.hasActorAt(fighter.getPosition(), avg_fighter_size / 2.0f +
FREE_SURROUNDING_SPACE_AT_INIT))
{
world.insertActor(fighter);
break inner_loop;
}
else; //Continue trying.
}
}
}
@Override
public void update(long millisecond_delta, float rotation, boolean tapped)
{
if (getWorld() == null || getClosestStation() == null)
return;
Actor player = getWorld().getActor("player");
if (player == null)
return;
//Set initial target station.
if (target_station == null)
target_station = getClosestStation();
//If close to target station, change target to a random adjacent station.
if (distance(target_station) < 50.0f)
{
Station[] adjacent_stations = station_graph.getAdjacentStations(
target_station);
if (adjacent_stations != null && adjacent_stations.length > 0)
target_station = adjacent_stations[(int) Math.floor(Math.random() *
adjacent_stations.length)];
else
//Target station removed from graph.
target_station = station_graph.getClosestStation(this);
}
boolean will_fire = false;
//The amount the fighter would need to rotate to face the player.
float rotation_to_player = crossProduct(getRotationUnitVector(),
getUnitVectorToTarget(getPosition(), player.getPosition()));
final float FIRE_ANGLE_RADIANS = (float) (30.0f * Math.PI / 180.0f);
//If target is within firing angle and is near, fire.
if (rotation_to_player > -FIRE_ANGLE_RADIANS &&
rotation_to_player < FIRE_ANGLE_RADIANS &&
distance(player) < 200.0f)
will_fire = true;
//If another NPC is within firing angle and is nearer than the player,
//cancel fire.
//TODO: Make this more efficient. Only iterate over a subset of nearby
// actors.
if (will_fire)
{
for (Actor npc : getWorld().getActors())
{
if (!(npc instanceof NPCVehicle) || npc == this)
continue;
float rotation_to_npc = crossProduct(getRotationUnitVector(),
getUnitVectorToTarget(getPosition(), npc.getPosition()));
if (rotation_to_npc > -FIRE_ANGLE_RADIANS &&
rotation_to_npc < FIRE_ANGLE_RADIANS &&
distance(npc) < distance(player))
will_fire = false;
}
}
if (will_fire)
fire();
if (distance(player) < 225.0f)
pursue(player.getPosition(), player.getRotation(), millisecond_delta);
else
seek(target_station.getPosition(), millisecond_delta);
super.update(millisecond_delta, rotation, tapped);
}
}
|
// $Id: Getdown.java,v 1.27 2004/07/30 18:14:21 mdb Exp $
package com.threerings.getdown.launcher;
import java.awt.BorderLayout;
import java.awt.EventQueue;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.awt.image.BufferedImage;
import javax.imageio.ImageIO;
import javax.swing.JFrame;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.net.URL;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.ResourceBundle;
// import org.apache.commons.io.TeeOutputStream;
import com.samskivert.swing.util.SwingUtil;
import com.samskivert.text.MessageUtil;
import com.samskivert.util.StringUtil;
import com.threerings.getdown.Log;
import com.threerings.getdown.data.Application;
import com.threerings.getdown.data.Resource;
import com.threerings.getdown.tools.Patcher;
import com.threerings.getdown.util.ProgressObserver;
import com.threerings.getdown.util.ProxyUtil;
/**
* Manages the main control for the Getdown application updater and
* deployment system.
*/
public class Getdown extends Thread
implements Application.StatusDisplay
{
public Getdown (File appDir)
{
super("Getdown");
try {
_msgs = ResourceBundle.getBundle("com.threerings.getdown.messages");
} catch (Exception e) {
// welcome to hell, where java can't cope with a classpath
// that contains jars that live in a directory that contains a
// !, at least the same bug happens on all platforms
String dir = appDir.toString();
if (dir.equals(".")) {
dir = System.getProperty("user.dir");
}
String errmsg = "The directory in which this application is " +
"installed:\n" + dir + "\nis invalid. The directory " +
"must not contain the '!' character. Please reinstall.";
updateStatus(errmsg);
_dead = true;
}
_app = new Application(appDir);
_startup = System.currentTimeMillis();
}
public void run ()
{
// if we have no messages, just bail because we're hosed; the
// error message will be displayed to the user already
if (_msgs == null) {
return;
}
try {
// first parses our application deployment file
try {
_ifc = _app.init(true);
} catch (IOException ioe) {
Log.warning("Failed to parse 'getdown.txt': " + ioe);
_app.attemptRecovery(this);
// and re-initalize
_ifc = _app.init(true);
// now force our UI to be recreated with the updated info
createInterface(true);
}
for (int ii = 0; ii < MAX_LOOPS; ii++) {
// make sure we have the desired version and that the
// metadata files are valid...
setStatus("m.validating", -1, -1L, false);
if (_app.verifyMetadata(this)) {
Log.info("Application requires update.");
update();
// loop back again and reverify the metadata
continue;
}
// now verify our resources...
setStatus("m.validating", -1, -1L, false);
List failures = _app.verifyResources(_progobs);
if (failures == null) {
Log.info("Resources verified.");
launch();
return;
}
// redownload any that are corrupt or invalid...
Log.info(failures.size() + " rsrcs require update.");
download(failures);
// now we'll loop back and try it all again
}
Log.warning("Pants! We couldn't get the job done.");
throw new IOException("m.unable_to_repair");
} catch (Exception e) {
Log.logStackTrace(e);
String msg = e.getMessage();
if (msg == null) {
msg = "m.unknown_error";
} else if (!msg.startsWith("m.")) {
// try to do something sensible based on the type of error
if (e instanceof FileNotFoundException) {
msg = MessageUtil.tcompose("m.missing_resource", msg);
} else {
msg = MessageUtil.tcompose("m.init_error", msg);
}
}
updateStatus(msg);
_dead = true;
}
}
// documentation inherited from interface
public void updateStatus (String message)
{
setStatus(message, -1, -1L, true);
}
/**
* Called if the application is determined to be of an old version.
*/
protected void update ()
throws IOException
{
// first clear all validation markers
_app.clearValidationMarkers();
// attempt to download the patch file
final Resource patch = _app.getPatchResource();
if (patch != null) {
// download the patch file...
ArrayList list = new ArrayList();
list.add(patch);
download(list);
// and apply it...
updateStatus("m.patching");
try {
Patcher patcher = new Patcher();
patcher.patch(patch.getLocal().getParentFile(),
patch.getLocal(), _progobs);
} catch (Exception e) {
Log.warning("Failed to apply patch.");
Log.logStackTrace(e);
}
// lastly clean up the patch file
if (!patch.getLocal().delete()) {
Log.warning("Failed to delete '" + patch + "'.");
patch.getLocal().deleteOnExit();
}
}
// if the patch resource is null, that means something was booched
// in the application, so we skip the patching process but update
// the metadata which will result in a "brute force" upgrade
// finally update our metadata files...
_app.updateMetadata();
// ...and reinitialize the application
_ifc = _app.init(true);
}
/**
* Called if the application is determined to require resource
* downloads.
*/
protected void download (List resources)
{
final Object lock = new Object();
// create our user interface
createInterface(false);
// create a downloader to download our resources
Downloader.Observer obs = new Downloader.Observer() {
public void resolvingDownloads () {
updateStatus("m.resolving");
}
public void downloadProgress (int percent, long remaining) {
setStatus("m.downloading", percent, remaining, true);
if (percent == 100) {
synchronized (lock) {
lock.notify();
}
}
}
public void downloadFailed (Resource rsrc, Exception e) {
updateStatus(
MessageUtil.tcompose("m.failure", e.getMessage()));
Log.warning("Download failed [rsrc=" + rsrc + "].");
Log.logStackTrace(e);
synchronized (lock) {
lock.notify();
}
}
};
Downloader dl = new Downloader(resources, obs);
dl.start();
// now wait for it to complete
synchronized (lock) {
try {
lock.wait();
} catch (InterruptedException ie) {
Log.warning("Waitus interruptus " + ie + ".");
}
}
}
/**
* Called to launch the application if everything is determined to be
* ready to go.
*/
protected void launch ()
{
setStatus("m.launching", 100, -1L, false);
try {
Process proc = _app.createProcess();
// on Windows 98 we need to stick around and read the output
// of stdout lest the process fills its output buffer and
// chokes, yay!
if (System.getProperty("os.name").indexOf("Windows 98") != -1) {
Log.info("Sticking around to read stderr on Win98...");
InputStream stderr = proc.getErrorStream();
BufferedReader reader = new BufferedReader(
new InputStreamReader(stderr));
String line = null;
while ((line = reader.readLine()) != null) {
// nothing doing!
}
Log.info("Process exited: " + proc.waitFor());
}
// if we have a UI open and we haven't been around for at
// least 5 seconds, don't stick a fork in ourselves straight
// away but give our lovely user a chance to see what we're
// doing
long uptime = System.currentTimeMillis() - _startup;
if (_frame != null && uptime < MIN_EXIST_TIME) {
try {
Thread.sleep(MIN_EXIST_TIME - uptime);
} catch (Exception e) {
}
}
System.exit(0);
} catch (Exception e) {
Log.logStackTrace(e);
}
}
/**
* Creates our user interface, which we avoid doing unless we actually
* have to update something.
*/
protected void createInterface (boolean force)
{
if (_frame != null && !force) {
return;
}
// if we have a background image, load it up
BufferedImage bgimg = null;
if (!StringUtil.blank(_ifc.background)) {
File bgpath = _app.getLocalPath(_ifc.background);
try {
bgimg = ImageIO.read(bgpath);
} catch (IOException ioe) {
Log.warning("Failed to read UI background [path=" + bgpath +
", error=" + ioe + "].");
}
}
// create our user interface, and display it
String title = StringUtil.blank(_ifc.name) ? "" : _ifc.name;
if (_frame == null) {
_frame = new JFrame(title);
_frame.addWindowListener(new WindowAdapter() {
public void windowClosing (WindowEvent evt) {
if (_dead || _frame.getState() == JFrame.ICONIFIED) {
System.exit(0);
} else {
_frame.setState(JFrame.ICONIFIED);
}
}
});
} else {
_frame.setTitle(title);
_frame.getContentPane().removeAll();
}
_frame.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
_status = new StatusPanel(_msgs, _ifc, bgimg);
_frame.getContentPane().add(_status, BorderLayout.CENTER);
_frame.pack();
SwingUtil.centerWindow(_frame);
_frame.show();
}
protected void setStatus (final String message, final int percent,
final long remaining, boolean createUI)
{
if (_status == null && createUI) {
createInterface(false);
}
if (_status != null) {
EventQueue.invokeLater(new Runnable() {
public void run () {
if (message != null) {
_status.setStatus(message);
}
if (percent >= 0) {
_status.setProgress(percent, remaining);
}
}
});
}
}
public static void main (String[] args)
{
// maybe they specified the appdir in a system property
String adarg = System.getProperty("appdir");
// if not, check for a command line argument
if (StringUtil.blank(adarg)) {
if (args.length != 1) {
System.err.println("Usage: java -jar getdown.jar app_dir");
System.exit(-1);
}
adarg = args[0];
}
// ensure a valid directory was supplied
File appDir = new File(adarg);
if (!appDir.exists() || !appDir.isDirectory()) {
Log.warning("Invalid app_dir '" + adarg + "'.");
System.exit(-1);
}
// pipe our output into a file in the application directory
File log = new File(appDir, "launcher.log");
try {
FileOutputStream fout = new FileOutputStream(log);
System.setOut(new PrintStream(fout));
System.setErr(new PrintStream(fout));
} catch (IOException ioe) {
Log.warning("Unable to redirect output to '" + log + "': " + ioe);
}
// attempt to obtain our proxy information; it may be specified
// via system properties or we can autodetect it
if (System.getProperty("http.proxyHost") == null) {
try {
URL sample = new URL("http:
if (ProxyUtil.detectProxy(sample)) {
String host = ProxyUtil.getProxyIP();
String port = ProxyUtil.getProxyPort();
ProxyUtil.configureProxy(host, port);
}
} catch (Exception e) {
Log.warning("Failed to detect proxy: " + e);
}
}
// record a few things for posterity
Log.info("
Log.info("-- OS Name: " + System.getProperty("os.name"));
Log.info("-- OS Arch: " + System.getProperty("os.arch"));
Log.info("-- OS Vers: " + System.getProperty("os.version"));
Log.info("-- Java Vers: " + System.getProperty("java.version"));
Log.info("-- Java Home: " + System.getProperty("java.home"));
Log.info("-- User Name: " + System.getProperty("user.name"));
Log.info("-- User Home: " + System.getProperty("user.home"));
Log.info("-- Cur dir: " + System.getProperty("user.dir"));
Log.info("-- Proxy Host: " + System.getProperty("http.proxyHost"));
Log.info("-- Proxy Port: " + System.getProperty("http.proxyPort"));
Log.info("
try {
Getdown app = new Getdown(appDir);
app.start();
} catch (Exception e) {
Log.logStackTrace(e);
}
}
/** Used to pass progress on to our user interface. */
protected ProgressObserver _progobs = new ProgressObserver() {
public void progress (final int percent) {
setStatus(null, percent, -1L, false);
}
};
protected Application _app;
protected Application.UpdateInterface _ifc =
new Application.UpdateInterface();
protected ResourceBundle _msgs;
protected JFrame _frame;
protected StatusPanel _status;
protected boolean _dead;
protected long _startup;
protected static final int MAX_LOOPS = 5;
protected static final long MIN_EXIST_TIME = 5000L;
}
|
// $Id: ImageSprite.java,v 1.7 2002/06/05 23:38:18 ray Exp $
package com.threerings.media.sprite;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.Rectangle;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.Iterator;
import com.threerings.media.Log;
/**
* Extends the sprite class to support rendering the sprite with one or
* more frames of image animation. Overrides various methods to provide
* correspondingly desirable functionality, e.g., {@link #hitTest} only
* reports a hit if the specified point is within a non-transparent pixel
* for the sprite's current image frame.
*/
public class ImageSprite extends Sprite
{
/** Default frame rate. */
public static final int DEFAULT_FRAME_RATE = 15;
/** Animation mode indicating no animation. */
public static final int NO_ANIMATION = 0;
/** Animation mode indicating movement cued animation. */
public static final int MOVEMENT_CUED = 1;
/** Animation mode indicating time based animation. */
public static final int TIME_BASED = 2;
/**
* Constructs an image sprite without any associated frames and with a
* default initial location of <code>(0, 0)</code>. The sprite should
* be populated with a set of frames used to display it via a
* subsequent call to {@link #setFrames}, and its location updated
* with {@link #setLocation}.
*/
public ImageSprite ()
{
this(0, 0, null);
}
/**
* Constructs an image sprite without any associated frames. The
* sprite should be populated with a set of frames used to display it
* via a subsequent call to {@link #setFrames}.
*
* @param x the sprite x-position in pixels.
* @param y the sprite y-position in pixels.
*/
public ImageSprite (int x, int y)
{
this(x, y, null);
}
/**
* Constructs an image sprite.
*
* @param x the sprite x-position in pixels.
* @param y the sprite y-position in pixels.
* @param frames the multi-frame image used to display the sprite.
*/
public ImageSprite (int x, int y, MultiFrameImage frames)
{
super(x, y);
// initialize frame animation member data
_frames = frames;
_frameIdx = 0;
_animMode = NO_ANIMATION;
_frameDelay = 1000L/DEFAULT_FRAME_RATE;
}
// documentation inherited
protected void init (SpriteManager spritemgr)
{
super.init(spritemgr);
// now that we have our spritemanager, we can initialize our frames
initFrames();
}
/**
* Returns true if the sprite's bounds contain the specified point,
* and if there is a non-transparent pixel in the sprite's image at
* the specified point, false if not.
*/
public boolean hitTest (int x, int y)
{
// first check to see that we're in the sprite's bounds and that
// we've got a frame image (if we've got no image, there's nothing
// to be hit)
if (!super.hitTest(x, y) || _frames == null) {
return false;
}
return _frames.hitTest(_frameIdx, x - _bounds.x, y - _bounds.y);
}
/**
* Sets the animation mode for this sprite. The available modes are:
*
* <ul>
* <li><code>TIME_BASED</code>: cues the animation based on a target
* frame rate (specified via {@link #setFrameRate}).
* <li><code>MOVEMENT_CUED</code>: ticks the animation to the next
* frame every time the sprite is moved along its path.
* <li><code>NO_ANIMATION</code>: disables animation.
* </ul>
*
* @param mode the desired animation mode.
*/
public void setAnimationMode (int mode)
{
_animMode = mode;
}
/**
* Sets the number of frames per second desired for the sprite
* animation. This is only used when the animation mode is
* <code>TIME_BASED</code>.
*
* @param fps the desired frames per second.
*/
public void setFrameRate (float fps)
{
_frameDelay = (long)(1000/fps);
}
/**
* Set the image array used to render the sprite.
*
* @param frames the sprite images.
*/
public void setFrames (MultiFrameImage frames)
{
if (frames == null) {
// Log.warning("Someone set up us the null frames! " +
// "[sprite=" + this + "].");
return;
}
// if these are the same frames we already had, no need to do a
// bunch of pointless business
if (frames == _frames) {
return;
}
// set and init our frames
_frames = frames;
initFrames();
}
/**
* Initialize our frames.
*/
protected void initFrames ()
{
// start with our old bounds
Rectangle dirty = new Rectangle(_bounds);
_frameIdx %= _frames.getFrameCount();
// determine our drawing offsets and rendered rectangle size
accomodateFrame(_frames.getWidth(_frameIdx),
_frames.getHeight(_frameIdx));
// add our new bounds
dirty.add(_bounds);
updateRenderOffset();
updateRenderOrigin();
// give the dirty rectangle to the region manager
if (_spritemgr != null) {
_spritemgr.getRegionManager().addDirtyRegion(dirty);
}
}
/**
* Must adjust the bounds to accomodate the new image. Called when a
* new image has been set for this image sprite. If a derived class is
* going to expand the bounds beyond the bounds of the image frame, it
* will need to override this method and adjust bounds accordingly for
* the new frame (which can be null).
*/
protected void accomodateFrame (int width, int height)
{
_bounds.width = width;
_bounds.height = height;
}
// documentation inherited
public void paint (Graphics2D gfx)
{
if (_frames != null) {
_frames.paintFrame(gfx, _frameIdx, _bounds.x, _bounds.y);
} else {
super.paint(gfx);
}
}
// documentation inherited
public void tick (long timestamp)
{
// if we have no frames, we're hosulated (to use a Greenwell term)
if (_frames == null) {
return;
}
int fcount = _frames.getFrameCount();
boolean moved = false;
// move the sprite along toward its destination, if any
if (_path != null) {
moved = _path.tick(this, timestamp);
}
// increment the display image if performing image animation
int nfidx = _frameIdx;
switch (_animMode) {
case NO_ANIMATION:
// nothing doing
break;
case TIME_BASED:
nfidx = (int)((timestamp/_frameDelay) % fcount);
break;
case MOVEMENT_CUED:
// update the frame if the sprite moved
if (moved) {
nfidx = (_frameIdx + 1) % fcount;
}
break;
}
// only update the sprite if our frame index changed
if (nfidx != _frameIdx) {
_frameIdx = nfidx;
// dirty our rectangle since we've altered our display image
invalidate();
}
}
// documentation inherited
protected void toString (StringBuffer buf)
{
super.toString(buf);
buf.append(", fidx=").append(_frameIdx);
}
/** The images used to render the sprite. */
protected MultiFrameImage _frames;
/** The current frame index to render. */
protected int _frameIdx;
/** What type of animation is desired for this sprite. */
protected int _animMode;
/** For how many milliseconds to display an animation frame. */
protected long _frameDelay;
}
|
// $Id: ImageSprite.java,v 1.13 2002/09/17 19:14:59 mdb Exp $
package com.threerings.media.sprite;
import java.awt.AlphaComposite;
import java.awt.Color;
import java.awt.Composite;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.Rectangle;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.Iterator;
import com.threerings.media.Log;
import com.threerings.media.util.MultiFrameImage;
import com.threerings.media.util.SingleFrameImageImpl;
/**
* Extends the sprite class to support rendering the sprite with one or
* more frames of image animation. Overrides various methods to provide
* correspondingly desirable functionality, e.g., {@link #hitTest} only
* reports a hit if the specified point is within a non-transparent pixel
* for the sprite's current image frame.
*/
public class ImageSprite extends Sprite
{
/** Default frame rate. */
public static final int DEFAULT_FRAME_RATE = 15;
/** Animation mode indicating no animation. */
public static final int NO_ANIMATION = 0;
/** Animation mode indicating movement cued animation. */
public static final int MOVEMENT_CUED = 1;
/** Animation mode indicating time based animation. */
public static final int TIME_BASED = 2;
/** Animation mode indicating sequential progressive animation.
* Frame 0 is guaranteed to be shown first for the full duration, and
* so on. */
public static final int TIME_SEQUENTIAL = 3;
/**
* Constructs an image sprite without any associated frames and with
* an invalid default initial location. The sprite should be populated
* with a set of frames used to display it via a subsequent call to
* {@link #setFrames}, and its location updated with {@link
* #setLocation}.
*/
public ImageSprite ()
{
this((MultiFrameImage)null);
}
/**
* Constructs an image sprite.
*
* @param frames the multi-frame image used to display the sprite.
*/
public ImageSprite (MultiFrameImage frames)
{
// initialize frame animation member data
_frames = frames;
_frameIdx = 0;
_animMode = NO_ANIMATION;
_frameDelay = 1000L/DEFAULT_FRAME_RATE;
}
/**
* Constructs an image sprite that will display the supplied single
* image when rendering itself.
*/
public ImageSprite (Image image)
{
this(new SingleFrameImageImpl(image));
}
// documentation inherited
protected void init (SpriteManager spritemgr)
{
super.init(spritemgr);
// now that we have our spritemanager, we can initialize our frames
setFrameIndex(0, true);
}
/**
* Returns true if the sprite's bounds contain the specified point,
* and if there is a non-transparent pixel in the sprite's image at
* the specified point, false if not.
*/
public boolean hitTest (int x, int y)
{
// first check to see that we're in the sprite's bounds and that
// we've got a frame image (if we've got no image, there's nothing
// to be hit)
if (!super.hitTest(x, y) || _frames == null) {
return false;
}
return _frames.hitTest(_frameIdx, x - _bounds.x, y - _bounds.y);
}
/**
* Sets the animation mode for this sprite. The available modes are:
*
* <ul>
* <li><code>TIME_BASED</code>: cues the animation based on a target
* frame rate (specified via {@link #setFrameRate}).
* <li><code>MOVEMENT_CUED</code>: ticks the animation to the next
* frame every time the sprite is moved along its path.
* <li><code>NO_ANIMATION</code>: disables animation.
* </ul>
*
* @param mode the desired animation mode.
*/
public void setAnimationMode (int mode)
{
_animMode = mode;
}
/**
* Sets the number of frames per second desired for the sprite
* animation. This is only used when the animation mode is
* <code>TIME_BASED</code>.
*
* @param fps the desired frames per second.
*/
public void setFrameRate (float fps)
{
_frameDelay = (long)(1000/fps);
}
/**
* Set the image array used to render the sprite.
*
* @param frames the sprite images.
*/
public void setFrames (MultiFrameImage frames)
{
if (frames == null) {
// Log.warning("Someone set up us the null frames! " +
// "[sprite=" + this + "].");
return;
}
// if these are the same frames we already had, no need to do a
// bunch of pointless business
if (frames == _frames) {
return;
}
// set and init our frames
_frames = frames;
setFrameIndex(0, true);
}
/**
* Instructs the sprite to display the specified frame index.
*/
protected void setFrameIndex (int frameIdx, boolean forceUpdate)
{
// make sure we're displaying a valid frame
frameIdx = (frameIdx % _frames.getFrameCount());
// if this is the same frame we're already displaying and we're
// not being forced to update, we can stop now
if (frameIdx == _frameIdx && !forceUpdate) {
return;
} else {
_frameIdx = frameIdx;
}
// start with our old bounds
Rectangle dirty = new Rectangle(_bounds);
// determine our drawing offsets and rendered rectangle size
accomodateFrame(_frameIdx, _frames.getWidth(_frameIdx),
_frames.getHeight(_frameIdx));
// add our new bounds
dirty.add(_bounds);
// give the dirty rectangle to the region manager
if (_spritemgr != null) {
_spritemgr.getRegionManager().addDirtyRegion(dirty);
}
}
/**
* Must adjust the bounds to accomodate the our new frame. This
* includes changing the width and height to reflect the size of the
* new frame and also updating the render origin (if necessary) and
* calling {@link #updateRenderOrigin} to reflect those changes in the
* sprite's bounds.
*
* @param frameIdx the index of our new frame.
* @param width the width of the new frame.
* @param height the height of the new frame.
*/
protected void accomodateFrame (int frameIdx, int width, int height)
{
_bounds.width = width;
_bounds.height = height;
}
// documentation inherited
public void paint (Graphics2D gfx)
{
if (_frames != null) {
// // DEBUG: fill our background with an alpha'd rectangle
// Composite ocomp = gfx.getComposite();
// gfx.setComposite(ALPHA_BOUNDS);
// gfx.setColor(Color.blue);
// gfx.fill(_bounds);
// gfx.setComposite(ocomp);
// render our frame
_frames.paintFrame(gfx, _frameIdx, _bounds.x, _bounds.y);
} else {
super.paint(gfx);
}
}
// documentation inherited
public void tick (long timestamp)
{
// if we have no frames, we're hosulated (to use a Greenwell term)
if (_frames == null) {
return;
}
int fcount = _frames.getFrameCount();
boolean moved = false;
// move the sprite along toward its destination, if any
if (_path != null) {
moved = _path.tick(this, timestamp);
}
// increment the display image if performing image animation
int nfidx = _frameIdx;
switch (_animMode) {
case NO_ANIMATION:
// nothing doing
break;
case TIME_BASED:
nfidx = (int)((timestamp/_frameDelay) % fcount);
break;
case TIME_SEQUENTIAL:
if (_firstStamp == 0L) {
_firstStamp = timestamp;
}
nfidx = (int) (((timestamp - _firstStamp) / _frameDelay) % fcount);
break;
case MOVEMENT_CUED:
// update the frame if the sprite moved
if (moved) {
nfidx = (_frameIdx + 1) % fcount;
}
break;
}
// update our frame (which will do nothing if this is the same as
// our existing frame index)
setFrameIndex(nfidx, false);
}
// documentation inherited
protected void toString (StringBuffer buf)
{
super.toString(buf);
buf.append(", fidx=").append(_frameIdx);
}
/** The images used to render the sprite. */
protected MultiFrameImage _frames;
/** The current frame index to render. */
protected int _frameIdx;
/** What type of animation is desired for this sprite. */
protected int _animMode;
/** For how many milliseconds to display an animation frame. */
protected long _frameDelay;
/** The first timestamp seen (in TIME_SEQUENTIAL mode). */
protected long _firstStamp = 0L;
// /** DEBUG: The alpha level used when rendering our bounds. */
// protected static final Composite ALPHA_BOUNDS =
// AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.2f);
}
|
// $Id: IsoSceneView.java,v 1.68 2001/10/24 01:33:47 shaper Exp $
package com.threerings.miso.scene;
import java.awt.*;
import java.awt.geom.*;
import java.awt.image.*;
import java.util.List;
import java.util.*;
import com.samskivert.util.HashIntMap;
import com.threerings.media.sprite.*;
import com.threerings.media.tile.Tile;
import com.threerings.media.tile.ObjectTile;
import com.threerings.miso.Log;
import com.threerings.miso.scene.DirtyItemList.DirtyItem;
import com.threerings.miso.scene.util.*;
/**
* The iso scene view provides an isometric view of a particular
* scene.
*/
public class IsoSceneView implements SceneView
{
/**
* Constructs an iso scene view.
*
* @param spritemgr the sprite manager.
* @param model the data model.
*/
public IsoSceneView (SpriteManager spritemgr, IsoSceneViewModel model)
{
_spritemgr = spritemgr;
_model = model;
_model.precalculate();
// create our polygon arrays and create polygons for each of the
// tiles. we use these repeatedly, so we go ahead and make 'em all
// up front
_polys = new Polygon[model.scenewid][model.scenehei];
for (int xx = 0; xx < model.scenewid; xx++) {
for (int yy = 0; yy < model.scenehei; yy++) {
_polys[xx][yy] = IsoUtil.getTilePolygon(_model, xx, yy);
}
}
// create the array used to mark dirty tiles
_dirty = new boolean[model.scenewid][model.tilehei];
}
// documentation inherited
public void setScene (MisoScene scene)
{
_scene = scene;
// clear all dirty lists and tile array
clearDirtyRegions();
// generate all object shadow tiles and polygons
initAllObjectBounds();
// invalidate the entire screen as there's a new scene in town
invalidate();
}
// documentation inherited
public void paint (Graphics g)
{
if (_scene == null) {
Log.warning("Scene view painted with null scene.");
return;
}
Graphics2D gfx = (Graphics2D)g;
// clip everything to the overall scene view bounds
Shape oldclip = gfx.getClip();
gfx.setClip(_model.bounds);
if (_numDirty == 0) {
// invalidate the entire screen
invalidate();
}
// render the scene to the graphics context
renderScene(gfx);
// draw frames of dirty tiles and rectangles
// drawDirtyRegions(gfx);
// draw tile coordinates
if (_model.showCoords) {
paintCoordinates(gfx);
}
// clear out the dirty tiles and rectangles
clearDirtyRegions();
// draw sprite paths
if (_model.showPaths) {
_spritemgr.renderSpritePaths(gfx);
}
// draw marks at each location
if (_model.showLocs) {
paintLocations(gfx);
}
// paint any extra goodies
paintExtras(gfx);
// restore the original clipping region
gfx.setClip(oldclip);
}
/**
* A function where derived classes can paint extra stuff while we've
* got the clipping region set up.
*/
protected void paintExtras (Graphics2D gfx)
{
// nothing for now
}
/**
* Invalidate the entire visible scene view.
*/
protected void invalidate ()
{
DirtyRectList rects = new DirtyRectList();
rects.add(_model.bounds);
invalidateRects(rects);
}
/**
* Clears the dirty rectangles and items lists, and the array of
* dirty tiles.
*/
protected void clearDirtyRegions ()
{
_dirtyRects.clear();
_dirtyItems.clear();
_numDirty = 0;
for (int xx = 0; xx < _model.scenewid; xx++) {
for (int yy = 0; yy < _model.scenehei; yy++) {
_dirty[xx][yy] = false;
}
}
}
/**
* Draws highlights around the dirty tiles and rectangles.
*/
protected void drawDirtyRegions (Graphics2D gfx)
{
// draw the dirty tiles
gfx.setColor(Color.cyan);
for (int xx = 0; xx < _model.scenewid; xx++) {
for (int yy = 0; yy < _model.scenehei; yy++) {
if (_dirty[xx][yy]) {
gfx.draw(_polys[xx][yy]);
}
}
}
// draw the dirty rectangles
Stroke ostroke = gfx.getStroke();
gfx.setStroke(DIRTY_RECT_STROKE);
gfx.setColor(Color.red);
int size = _dirtyRects.size();
for (int ii = 0; ii < size; ii++) {
Rectangle rect = (Rectangle)_dirtyRects.get(ii);
gfx.draw(rect);
}
gfx.setStroke(ostroke);
// draw the dirty item rectangles
gfx.setColor(Color.yellow);
size = _dirtyItems.size();
for (int ii = 0; ii < size; ii++) {
Rectangle rect = ((DirtyItem)_dirtyItems.get(ii)).dirtyRect;
gfx.draw(rect);
}
}
/**
* Renders the scene to the given graphics context.
*
* @param gfx the graphics context.
*/
protected void renderScene (Graphics2D gfx)
{
renderTiles(gfx);
renderDirtyItems(gfx);
}
/**
* Renders the base and fringe layer tiles to the given graphics
* context.
*/
protected void renderTiles (Graphics2D gfx)
{
Tile[][][] tiles = _scene.getTiles();
// render the base and fringe layers
for (int yy = 0; yy < _model.scenehei; yy++) {
for (int xx = 0; xx < _model.scenewid; xx++) {
if (_dirty[xx][yy]) {
// draw both layers at this tile position
for (int kk = MisoScene.LAYER_BASE;
kk < MisoScene.LAYER_FRINGE; kk++) {
// get the tile at these coordinates and layer
Tile tile = tiles[kk][xx][yy];
if (tile != null) {
// draw the tile image
tile.paint(gfx, _polys[xx][yy]);
}
}
}
}
}
}
/**
* Renders the dirty sprites and objects in the scene to the given
* graphics context.
*/
protected void renderDirtyItems (Graphics2D gfx)
{
// Log.info("renderDirtyItems");
// sort the dirty sprites and objects visually back-to-front
DirtyItem items[] = _dirtyItems.sort();
// render each item clipping to its dirty rectangle
for (int ii = 0; ii < items.length; ii++) {
items[ii].paint(gfx, items[ii].dirtyRect);
}
}
/**
* Generates and stores bounding polygons for all object tiles in
* the scene for later use while rendering.
*/
protected void initAllObjectBounds ()
{
// clear out any previously existing object polygons
_objpolys.clear();
// generate bounding polygons for all objects
ObjectTile[][] tiles = _scene.getObjectLayer();
for (int xx = 0; xx < _model.scenewid; xx++) {
for (int yy = 0; yy < _model.scenehei; yy++) {
ObjectTile tile = tiles[xx][yy];
if (tile != null) {
generateObjectBounds(tile, xx, yy);
}
}
}
}
/**
* Generates and stores the bounding polygon for the object which
* is used when invalidating dirty rectangles or tiles, and when
* rendering the object to a graphics context. This method should
* be called when an object tile is added to a scene.
*/
protected void generateObjectBounds (ObjectTile tile, int x, int y)
{
// create the bounding polygon for this object
int key = getCoordinateKey(x, y);
// save it off in the object bounds hashtable
_objpolys.put(key, newObjectBounds(tile, x, y));
}
/**
* Creates and returns a new polygon bounding the given object
* tile positioned at the given scene coordinates.
*/
protected Polygon newObjectBounds (ObjectTile tile, int x, int y)
{
return IsoUtil.getObjectBounds(_model, _polys[x][y], tile);
}
/**
* Returns a unique integer key corresponding to the given
* coordinates, suitable for storing and retrieving objects from a
* hashtable.
*/
protected int getCoordinateKey (int x, int y)
{
return (x << 16 | y);
}
/**
* Paints tile coordinate numbers on all dirty tiles.
*
* @param gfx the graphics context.
*/
protected void paintCoordinates (Graphics2D gfx)
{
FontMetrics fm = gfx.getFontMetrics(_font);
gfx.setFont(_font);
gfx.setColor(Color.white);
int cx = _model.tilehwid, cy = _model.tilehhei;
int fhei = fm.getAscent();
for (int yy = 0; yy < _model.scenehei; yy++) {
for (int xx = 0; xx < _model.scenewid; xx++) {
// get the top-left screen coordinates of the tile
Rectangle bounds = _polys[xx][yy].getBounds();
// only draw coordinates if the tile is on-screen
if (bounds.intersects(_model.bounds)) {
int sx = bounds.x, sy = bounds.y;
// draw x-coordinate
String str = "" + xx;
int xpos = sx + cx - (fm.stringWidth(str) / 2);
gfx.drawString(str, xpos, sy + cy);
// draw y-coordinate
str = "" + yy;
xpos = sx + cx - (fm.stringWidth(str) / 2);
gfx.drawString(str, xpos, sy + cy + fhei);
}
}
}
}
/**
* Paint demarcations at all locations in the scene, with each
* location's cluster index, if any, along the right side of its
* rectangle.
*
* @param gfx the graphics context.
*/
protected void paintLocations (Graphics2D gfx)
{
List locations = _scene.getLocations();
int size = locations.size();
// create the location triangle
Polygon tri = new Polygon();
tri.addPoint(-3, -3);
tri.addPoint(3, -3);
tri.addPoint(0, 3);
for (int ii = 0; ii < size; ii++) {
// retrieve the location
Location loc = (Location)locations.get(ii);
// get the cluster index this location is in, if any
int clusteridx = MisoSceneUtil.getClusterIndex(_scene, loc);
Point spos = new Point();
IsoUtil.fullToScreen(_model, loc.x, loc.y, spos);
int cx = spos.x, cy = spos.y;
// translate the origin to center on the location
gfx.translate(cx, cy);
// rotate to reflect the location orientation
double rot = (Math.PI / 4.0f) * loc.orient;
gfx.rotate(rot);
// draw the triangle
Color fcol = (loc instanceof Portal) ? Color.green : Color.yellow;
gfx.setColor(fcol);
gfx.fill(tri);
// outline the triangle in black
gfx.setColor(Color.black);
gfx.draw(tri);
// draw the rectangle
gfx.setColor(Color.red);
gfx.fillRect(-1, 2, 3, 3);
// restore the original transform
gfx.rotate(-rot);
gfx.translate(-cx, -cy);
if (clusteridx != -1) {
// draw the cluster index number on the right side
gfx.setFont(_font);
gfx.setColor(Color.white);
gfx.drawString("" + clusteridx, cx + 5, cy + 3);
}
}
}
// documentation inherited
public void invalidateRects (DirtyRectList rects)
{
int size = rects.size();
for (int ii = 0; ii < size; ii++) {
Rectangle r = (Rectangle)rects.get(ii);
// dirty the tiles impacted by this rectangle
Rectangle tileBounds = invalidateScreenRect(r);
// dirty any sprites or objects impacted by this rectangle
invalidateItems(tileBounds);
// save the rectangle for potential display later
_dirtyRects.add(r);
}
}
/**
* Invalidates the given rectangle in screen pixel coordinates in
* the view. Returns a rectangle that bounds all tiles that were
* dirtied.
*
* @param rect the dirty rectangle.
*/
public Rectangle invalidateScreenRect (Rectangle r)
{
// initialize the rectangle bounding all tiles dirtied by the
// invalidated rectangle
Rectangle tileBounds = new Rectangle(-1, -1, 0, 0);
// note that corner tiles may be included unnecessarily, but
// checking to determine whether they're actually needed
// complicates the code with likely-insufficient benefit
// determine the top-left tile impacted by this rect
Point tpos = new Point();
IsoUtil.screenToTile(_model, r.x, r.y, tpos);
// determine screen coordinates for top-left tile
Point topleft = new Point();
IsoUtil.tileToScreen(_model, tpos.x, tpos.y, topleft);
// determine number of horizontal and vertical tiles for rect
int numh = (int)Math.ceil((float)r.width / (float)_model.tilewid);
int numv = (int)Math.ceil((float)r.height / (float)_model.tilehhei);
// set up iterating variables
int tx = tpos.x, ty = tpos.y, mx = tpos.x, my = tpos.y;
// set the starting screen y-position
int screenY = topleft.y;
// add top row if rect may overlap
if (r.y < (screenY + _model.tilehhei)) {
ty
for (int ii = 0; ii < numh; ii++) {
addDirtyTile(tileBounds, tx++, ty
}
}
// add rows to the bottom if rect may overlap
int ypos = screenY + (numv * _model.tilehhei);
if ((r.y + r.height) > ypos) {
numv += ((r.y + r.height) > (ypos + _model.tilehhei)) ? 2 : 1;
}
// add dirty tiles from each affected row
boolean isodd = false;
for (int ii = 0; ii < numv; ii++) {
// set up iterating variables for this row
tx = mx;
ty = my;
int length = numh;
// set the starting screen x-position
int screenX = topleft.x;
if (isodd) {
screenX -= _model.tilehwid;
}
// skip leftmost tile if rect doesn't overlap
if (r.x > screenX + _model.tilewid) {
tx++;
ty
screenX += _model.tilewid;
}
// add to the right edge if rect may overlap
if (r.x + r.width > (screenX + (length * _model.tilewid))) {
length++;
}
// add all tiles in the row to the dirty set
for (int jj = 0; jj < length; jj++) {
addDirtyTile(tileBounds, tx++, ty
}
// step along the x- or y-axis appropriately
if (isodd) {
mx++;
} else {
my++;
}
// increment the screen y-position
screenY += _model.tilehhei;
// toggle whether we're drawing an odd-numbered row
isodd = !isodd;
}
return tileBounds;
}
/**
* Marks the tile at the given coordinates dirty and expands the
* tile bounds rectangle to include the rectangle for the dirtied
* tile.
*/
protected void addDirtyTile (Rectangle tileBounds, int x, int y)
{
if (!_model.isCoordinateValid(x, y)) {
return;
}
// expand the tile bounds rectangle to include this tile
Rectangle bounds = _polys[x][y].getBounds();
if (tileBounds.x == -1) {
tileBounds.setBounds(bounds);
} else {
tileBounds.add(bounds);
}
// do nothing if the tile's already dirty
if (_dirty[x][y]) {
return;
}
// mark the tile dirty
_numDirty++;
_dirty[x][y] = true;
}
/**
* Adds any sprites or objects in the scene whose bounds overlap
* with the given dirty rectangle to the dirty item list for later
* re-rendering.
*/
protected void invalidateItems (Rectangle r)
{
// add any sprites impacted by the dirty rectangle
_dirtySprites.clear();
_spritemgr.getIntersectingSprites(_dirtySprites, r);
int size = _dirtySprites.size();
for (int ii = 0; ii < size; ii++) {
AmbulatorySprite sprite = (AmbulatorySprite)_dirtySprites.get(ii);
// get the dirty portion of the sprite
Rectangle drect = sprite.getBounds().intersection(r);
_dirtyItems.appendDirtySprite(
sprite, sprite.getTileX(), sprite.getTileY(), drect);
// Log.info("Dirtied item: " + sprite);
}
// add any objects impacted by the dirty rectangle
ObjectTile tiles[][] = _scene.getObjectLayer();
Iterator iter = _objpolys.keys();
while (iter.hasNext()) {
// get the object's coordinates and bounding polygon
int coord = ((Integer)iter.next()).intValue();
Polygon poly = (Polygon)_objpolys.get(coord);
if (poly.intersects(r)) {
// get the dirty portion of the object
Rectangle drect = poly.getBounds().intersection(r);
int tx = coord >> 16, ty = coord & 0x0000FFFF;
_dirtyItems.appendDirtyObject(
tiles[tx][ty], poly, tx, ty, drect);
// Log.info("Dirtied item: Object(" + tx + ", " +
}
}
}
// documentation inherited
public Path getPath (AmbulatorySprite sprite, int x, int y)
{
// make sure the destination point is within our bounds
if (!_model.bounds.contains(x, y)) {
return null;
}
// get the destination tile coordinates
Point dest = new Point();
IsoUtil.screenToTile(_model, x, y, dest);
// get a reasonable tile path through the scene
List points = AStarPathUtil.getPath(
_scene.getBaseLayer(), _model.scenewid, _model.scenehei,
sprite, sprite.getTileX(), sprite.getTileY(), dest.x, dest.y);
// construct a path object to guide the sprite on its merry way
return (points == null) ? null :
new TilePath(_model, sprite, points, x, y);
}
/** The stroke used to draw dirty rectangles. */
protected static final Stroke DIRTY_RECT_STROKE = new BasicStroke(2);
/** The font to draw tile coordinates. */
protected Font _font = new Font("Arial", Font.PLAIN, 7);
/** Polygon instances for all of our tiles. */
protected Polygon _polys[][];
/** Bounding polygons for all of the object tiles. */
protected HashIntMap _objpolys = new HashIntMap();
/** The dirty tiles that need to be re-painted. */
protected boolean _dirty[][];
/** The number of dirty tiles. */
protected int _numDirty;
/** The dirty rectangles that need to be re-painted. */
protected ArrayList _dirtyRects = new ArrayList();
/** The dirty sprites and objects that need to be re-painted. */
protected DirtyItemList _dirtyItems = new DirtyItemList();
/** The working sprites list used when calculating dirty regions. */
protected ArrayList _dirtySprites = new ArrayList();
/** The scene view model data. */
protected IsoSceneViewModel _model;
/** The scene object to be displayed. */
protected MisoScene _scene;
/** The sprite manager. */
protected SpriteManager _spritemgr;
}
|
// $Id: IsoSceneView.java,v 1.25 2001/08/04 00:22:19 shaper Exp $
package com.threerings.miso.scene;
import com.threerings.miso.Log;
import com.threerings.miso.sprite.*;
import com.threerings.miso.tile.Tile;
import com.threerings.miso.tile.TileManager;
import java.awt.*;
import java.awt.image.*;
import java.util.ArrayList;
/**
* The <code>IsoSceneView</code> provides an isometric view of a
* particular scene.
*/
public class IsoSceneView implements EditableSceneView
{
/**
* Construct an <code>IsoSceneView</code> object.
*
* @param tilemgr the tile manager.
* @param spritemgr the sprite manager.
* @param model the data model.
*/
public IsoSceneView (TileManager tilemgr, SpriteManager spritemgr,
IsoSceneModel model)
{
_tilemgr = tilemgr;
_spritemgr = spritemgr;
setModel(model);
// initialize the highlighted tile
_htile = new Point(-1, -1);
// get the font used to render tile coordinates
_font = new Font("Arial", Font.PLAIN, 7);
// create the list of dirty rectangles
_dirty = new ArrayList();
}
/**
* Paint the scene view and any highlighted tiles to the given
* graphics context.
*
* @param g the graphics context.
*/
public void paint (Graphics g)
{
Graphics2D gfx = (Graphics2D)g;
// clip the drawing region to our desired bounds since we
// currently draw tiles willy-nilly in undesirable areas.
Shape oldclip = gfx.getClip();
gfx.setClip(0, 0, _model.bounds.width, _model.bounds.height);
// draw the full scene into the offscreen image buffer
//renderSceneInvalid(gfx);
renderScene(gfx);
// draw an outline around the highlighted tile
paintHighlightedTile(gfx, _htile.x, _htile.y);
// draw lines illustrating tracking of the mouse position
//paintMouseLines(gfx);
// restore the original clipping region
gfx.setClip(oldclip);
}
/**
* Render the scene to the given graphics context.
*
* @param gfx the graphics context.
*/
protected void renderSceneInvalid (Graphics2D gfx)
{
Log.info("renderSceneInvalid.");
int size = _dirty.size();
for (int ii = 0; ii < size; ii++) {
// retrieve the next dirty tile coordinates
int[] dinfo = (int[])_dirty.remove(0);
int tx = dinfo[0], ty = dinfo[1];
// get the tile's screen position
Polygon poly = getTilePolygon(tx, ty);
// draw all layers at this tile position
for (int kk = 0; kk < Scene.NUM_LAYERS; kk++) {
// get the tile at these coordinates and layer
Tile tile = _scene.tiles[tx][ty][kk];
if (tile == null) continue;
// offset the image y-position by the tile-specific height
int ypos = poly.ypoints[0] - _model.tilehhei -
(tile.height - _model.tilehei);
// draw the tile image
gfx.drawImage(tile.img, poly.xpoints[0], ypos, null);
}
// draw all sprites residing in the current tile
_spritemgr.renderSprites(gfx, poly);
}
}
/**
* Return a polygon framing the specified tile.
*
* @param x the tile x-position coordinate.
* @param y the tile y-position coordinate.
*/
protected Polygon getTilePolygon (int x, int y)
{
// get the top-left screen coordinate for the tile
Point spos = new Point();
IsoUtil.tileToScreen(_model, x, y, spos);
// create a polygon framing the tile
Polygon poly = new Polygon();
poly.addPoint(spos.x, spos.y + _model.tilehhei);
poly.addPoint(spos.x + _model.tilehwid, spos.y);
poly.addPoint(spos.x + _model.tilewid, spos.y + _model.tilehhei);
poly.addPoint(spos.x + _model.tilehwid, spos.y + _model.tilehei);
poly.addPoint(spos.x, spos.y + _model.tilehhei);
return poly;
}
/**
* Render the scene to the given graphics context.
*
* @param gfx the graphics context.
*/
protected void renderScene (Graphics2D gfx)
{
int mx = 1;
int my = 0;
int screenY = _model.origin.y;
for (int ii = 0; ii < _model.tilerows; ii++) {
// determine starting tile coordinates
int tx = (ii < Scene.TILE_HEIGHT) ? 0 : mx++;
int ty = my;
// determine number of tiles in this row
int length = (ty - tx) + 1;
// determine starting screen x-position
int screenX = _model.origin.x - ((length) * _model.tilehwid);
for (int jj = 0; jj < length; jj++) {
for (int kk = 0; kk < Scene.NUM_LAYERS; kk++) {
// grab the tile we're rendering
Tile tile = _scene.tiles[tx][ty][kk];
if (tile == null) continue;
// determine screen y-position, accounting for
// tile image height
int ypos = screenY - (tile.height - _model.tilehei);
// draw the tile image at the appropriate screen position
gfx.drawImage(tile.img, screenX, ypos, null);
// draw all sprites residing in the current tile
// TODO: simplify other tile positioning here to use poly
_spritemgr.renderSprites(gfx, getTilePolygon(tx, ty));
}
// draw tile coordinates in each tile
if (_model.showCoords) {
paintCoords(gfx, tx, ty, screenX, screenY);
}
// each tile is one tile-width to the right of the previous
screenX += _model.tilewid;
// advance tile x and decrement tile y as we move to
// the right drawing the row
tx++;
ty
}
// each row is a half-tile-height away from the previous row
screenY += _model.tilehhei;
// advance starting y-axis coordinate unless we've hit bottom
if ((++my) > Scene.TILE_HEIGHT - 1) my = Scene.TILE_HEIGHT - 1;
}
}
/**
* Paint lines showing the most recently calculated x- and y-axis
* mouse position tracking lines, and the mouse position itself.
*
* @param gfx the graphics context.
*/
protected void paintMouseLines (Graphics2D gfx)
{
Point[] lx = _model.lineX, ly = _model.lineY;
// draw the baseline x-axis line
gfx.setColor(Color.red);
gfx.drawLine(lx[0].x, lx[0].y, lx[1].x, lx[1].y);
// draw line from last mouse pos to baseline
gfx.setColor(Color.yellow);
gfx.drawLine(ly[0].x, ly[0].y, ly[1].x, ly[1].y);
// draw the most recent mouse cursor position
gfx.setColor(Color.green);
gfx.fillRect(ly[0].x, ly[0].y, 2, 2);
gfx.setColor(Color.red);
gfx.drawRect(ly[0].x - 1, ly[0].y - 1, 3, 3);
}
/**
* Paint the tile coordinate numbers in tile (x, y) whose top-left
* corner is at screen pixel coordinates (sx, sy).
*
* @param gfx the graphics context.
* @param x the tile x-position coordinate.
* @param y the tile y-position coordinate.
* @param sx the screen x-position pixel coordinate.
* @param sy the screen y-position pixel coordinate.
*/
protected void paintCoords (Graphics2D gfx, int x, int y, int sx, int sy)
{
FontMetrics fm = gfx.getFontMetrics(_font);
gfx.setFont(_font);
gfx.setColor(Color.white);
int cx = _model.tilehwid, cy = _model.tilehhei;
int fhei = fm.getAscent();
// draw x-coordinate
String str = "" + x;
gfx.drawString(str, sx + cx - fm.stringWidth(str), sy + cy);
// draw y-coordinate
str = "" + y;
gfx.drawString(str, sx + cx - fm.stringWidth(str), sy + cy + fhei);
}
/**
* Paint a highlight around the specified tile.
*
* @param gfx the graphics context.
* @param x the tile x-position coordinate.
* @param y the tile y-position coordinate.
*/
protected void paintHighlightedTile (Graphics2D gfx, int x, int y)
{
// set the desired stroke and color
Stroke ostroke = gfx.getStroke();
gfx.setStroke(HLT_STROKE);
gfx.setColor(HLT_COLOR);
// draw the tile outline
gfx.draw(getTilePolygon(x, y));
// restore the original stroke
gfx.setStroke(ostroke);
}
public void setHighlightedTile (int sx, int sy)
{
IsoUtil.screenToTile(_model, sx, sy, _htile);
}
/**
* Invalidate a list of rectangles in the view for later repainting.
*
* @param rects the list of Rectangle objects.
*/
public void invalidateRects (ArrayList rects)
{
int size = rects.size();
for (int ii = 0; ii < size; ii++) {
Rectangle r = (Rectangle)rects.get(ii);
invalidateScreenRect(r.x, r.y, r.width, r.height);
}
}
/**
* Invalidate the specified rectangle in screen pixel coordinates
* in the view.
*
* @param x the rectangle x-position.
* @param y the rectangle y-position.
* @param width the rectangle width.
* @param height the rectangle height.
*/
public void invalidateScreenRect (int x, int y, int width, int height)
{
Point tpos = new Point();
IsoUtil.screenToTile(_model, x, y, tpos);
// Log.info("invalidateScreenRect: mapped rect to tile " +
// "[tx=" + tpos.x + ", ty=" + tpos.y +
// ", x=" + x + ", y=" + y + ", width=" + width +
// ", height=" + height + "].");
_dirty.add(new int[] { tpos.x, tpos.y });
}
public void setScene (Scene scene)
{
_scene = scene;
}
public void setShowCoordinates (boolean show)
{
_model.showCoords = show;
}
public void setTile (int x, int y, int lnum, Tile tile)
{
Point tpos = new Point();
IsoUtil.screenToTile(_model, x, y, tpos);
_scene.tiles[tpos.x][tpos.y][lnum] = tile;
}
public void setModel (IsoSceneModel model)
{
_model = model;
_model.calculateXAxis();
}
public Path getPath (Sprite sprite, int x, int y)
{
// make sure the destination point is within our bounds
if (x < 0 || x >= _model.bounds.width ||
y < 0 || y >= _model.bounds.height) {
return null;
}
// create path from current loc to destination
Path path = new Path(sprite.x, sprite.y);
int dir = IsoUtil.getDirection(_model, sprite.x, sprite.y, x, y);
path.addNode(x, y, dir);
return path;
}
/** The color to draw the highlighted tile. */
protected static final Color HLT_COLOR = Color.green;
/** The stroke object used to draw the highlighted tile. */
protected static final Stroke HLT_STROKE = new BasicStroke(3);
/** The currently highlighted tile. */
protected Point _htile;
/** The font to draw tile coordinates. */
protected Font _font;
/** The dirty tile row segments that need to be re-painted. */
protected ArrayList _dirty;
/** The scene model data. */
protected IsoSceneModel _model;
/** The scene object to be displayed. */
protected Scene _scene;
/** The sprite manager. */
protected SpriteManager _spritemgr;
/** The tile manager. */
protected TileManager _tilemgr;
}
|
package org.jaxen.saxpath.base;
import java.util.LinkedList;
import org.jaxen.saxpath.Axis;
import org.jaxen.saxpath.Operator;
import org.jaxen.saxpath.XPathHandler;
import org.jaxen.saxpath.XPathSyntaxException;
/** Implementation of SAXPath's <code>XPathReader</code> which
* generates callbacks to an <code>XPathHandler</code>.
*
* @author bob mcwhirter (bob@werken.com)
*/
public class XPathReader extends TokenTypes implements org.jaxen.saxpath.XPathReader
{
private LinkedList tokens;
private XPathLexer lexer;
private XPathHandler handler;
public XPathReader()
{
setXPathHandler( DefaultXPathHandler.getInstance() );
}
public void setXPathHandler(XPathHandler handler)
{
this.handler = handler;
}
public XPathHandler getXPathHandler()
{
return this.handler;
}
public void parse(String xpath) throws org.jaxen.saxpath.SAXPathException
{
setUpParse( xpath );
getXPathHandler().startXPath();
expr();
getXPathHandler().endXPath();
if ( LA(1) != EOF )
{
throwUnexpected();
}
lexer = null;
tokens = null;
}
void setUpParse(String xpath)
{
this.tokens = new LinkedList();
this.lexer = new XPathLexer( xpath );
}
void pathExpr() throws org.jaxen.saxpath.SAXPathException
{
getXPathHandler().startPathExpr();
switch ( LA(1) )
{
case INTEGER:
case DOUBLE:
case LITERAL:
case LEFT_PAREN:
case DOLLAR:
{
filterExpr();
if ( LA(1) == SLASH || LA(1) == DOUBLE_SLASH )
{
XPathSyntaxException ex = this.createSyntaxException("Node-set expected");
throw ex;
}
break;
}
case IDENTIFIER:
{
if ( ( LA(2) == LEFT_PAREN
&&
! isNodeTypeName( LT(1) ) )
||
( LA(2) == COLON
&&
LA(4) == LEFT_PAREN) )
{
filterExpr();
if ( LA(1) == SLASH || LA(1) == DOUBLE_SLASH)
{
locationPath( false );
}
break;
}
else
{
locationPath( false );
break;
}
}
case DOT:
case DOT_DOT:
case STAR:
case AT:
{
locationPath( false );
break;
}
case SLASH:
case DOUBLE_SLASH:
{
locationPath( true );
break;
}
default:
{
throwUnexpected();
}
}
getXPathHandler().endPathExpr();
}
void numberDouble() throws org.jaxen.saxpath.SAXPathException
{
Token token = match( DOUBLE );
getXPathHandler().number( Double.parseDouble( token.getTokenText() ) );
}
void numberInteger() throws org.jaxen.saxpath.SAXPathException
{
Token token = match( INTEGER );
String text = token.getTokenText();
try {
getXPathHandler().number( Integer.parseInt( text ) );
}
catch (NumberFormatException ex) {
getXPathHandler().number( Double.parseDouble( text ) );
}
}
void literal() throws org.jaxen.saxpath.SAXPathException
{
Token token = match( LITERAL );
getXPathHandler().literal( token.getTokenText() );
}
void functionCall() throws org.jaxen.saxpath.SAXPathException
{
String prefix = null;
String functionName = null;
if ( LA(2) == COLON )
{
prefix = match( IDENTIFIER ).getTokenText();
match( COLON );
}
else
{
prefix = "";
}
functionName = match( IDENTIFIER ).getTokenText();
getXPathHandler().startFunction( prefix,
functionName );
match ( LEFT_PAREN );
arguments();
match ( RIGHT_PAREN );
getXPathHandler().endFunction();
}
void arguments() throws org.jaxen.saxpath.SAXPathException
{
while ( LA(1) != RIGHT_PAREN )
{
expr();
if ( LA(1) == COMMA )
{
match( COMMA );
}
else
{
break;
}
}
}
void filterExpr() throws org.jaxen.saxpath.SAXPathException
{
getXPathHandler().startFilterExpr();
switch ( LA(1) )
{
case INTEGER:
{
numberInteger();
break;
}
case DOUBLE:
{
numberDouble();
break;
}
case LITERAL:
{
literal();
break;
}
case LEFT_PAREN:
{
match( LEFT_PAREN );
expr();
match( RIGHT_PAREN );
break;
}
case IDENTIFIER:
{
functionCall();
break;
}
case DOLLAR:
{
variableReference();
break;
}
}
predicates();
getXPathHandler().endFilterExpr();
}
void variableReference() throws org.jaxen.saxpath.SAXPathException
{
match( DOLLAR );
String prefix = null;
String variableName = null;
if ( LA(2) == COLON )
{
prefix = match( IDENTIFIER ).getTokenText();
match( COLON );
}
else
{
prefix = "";
}
variableName = match( IDENTIFIER ).getTokenText();
getXPathHandler().variableReference( prefix,
variableName );
}
void locationPath(boolean isAbsolute) throws org.jaxen.saxpath.SAXPathException
{
switch ( LA(1) )
{
case SLASH:
case DOUBLE_SLASH:
{
if ( isAbsolute )
{
absoluteLocationPath();
}
else
{
relativeLocationPath();
}
break;
}
case AT:
case IDENTIFIER:
case DOT:
case DOT_DOT:
case STAR:
{
relativeLocationPath();
break;
}
default:
{
throwUnexpected();
break;
}
}
}
void absoluteLocationPath() throws org.jaxen.saxpath.SAXPathException
{
getXPathHandler().startAbsoluteLocationPath();
switch ( LA(1) )
{
case SLASH:
{
match( SLASH );
switch ( LA(1) )
{
case DOT:
case DOT_DOT:
case AT:
case IDENTIFIER:
case STAR:
{
steps();
break;
}
}
break;
}
case DOUBLE_SLASH:
{
getXPathHandler().startAllNodeStep( Axis.DESCENDANT_OR_SELF );
getXPathHandler().endAllNodeStep();
match( DOUBLE_SLASH );
switch ( LA(1) )
{
case DOT:
case DOT_DOT:
case AT:
case IDENTIFIER:
case STAR:
{
steps();
break;
}
default:
XPathSyntaxException ex = this.createSyntaxException("Location path cannot end with
throw ex;
}
break;
}
}
getXPathHandler().endAbsoluteLocationPath();
}
void relativeLocationPath() throws org.jaxen.saxpath.SAXPathException
{
getXPathHandler().startRelativeLocationPath();
switch ( LA(1) )
{
case SLASH:
{
match( SLASH );
break;
}
case DOUBLE_SLASH:
{
getXPathHandler().startAllNodeStep( Axis.DESCENDANT_OR_SELF );
getXPathHandler().endAllNodeStep();
match( DOUBLE_SLASH );
break;
}
}
steps();
getXPathHandler().endRelativeLocationPath();
}
void steps() throws org.jaxen.saxpath.SAXPathException
{
switch ( LA(1) )
{
case DOT:
case DOT_DOT:
case AT:
case IDENTIFIER:
case STAR:
{
step();
break;
}
case EOF:
{
return;
}
default:
{
throw createSyntaxException( "Expected one of '.', '..', '@', '*', <QName>" );
}
}
do
{
if ( ( LA(1) == SLASH)
||
( LA(1) == DOUBLE_SLASH ) )
{
switch ( LA(1) )
{
case SLASH:
{
match( SLASH );
break;
}
case DOUBLE_SLASH:
{
getXPathHandler().startAllNodeStep( Axis.DESCENDANT_OR_SELF );
getXPathHandler().endAllNodeStep();
match( DOUBLE_SLASH );
break;
}
}
}
else
{
return;
}
switch ( LA(1) )
{
case DOT:
case DOT_DOT:
case AT:
case IDENTIFIER:
case STAR:
{
step();
break;
}
default:
{
throw createSyntaxException( "Expected one of '.', '..', '@', '*', <QName>" );
}
}
} while ( true );
}
void step() throws org.jaxen.saxpath.SAXPathException
{
int axis = 0;
switch ( LA(1) )
{
case DOT:
case DOT_DOT:
{
abbrStep();
return;
}
case AT:
{
axis = axisSpecifier();
break;
}
case IDENTIFIER:
{
if ( LA(2) == DOUBLE_COLON )
{
axis = axisSpecifier();
}
else
{
axis = Axis.CHILD;
}
break;
}
case STAR:
{
axis = Axis.CHILD;
break;
}
}
nodeTest( axis );
}
int axisSpecifier() throws org.jaxen.saxpath.SAXPathException
{
int axis = 0;
switch ( LA(1) )
{
case AT:
{
match( AT );
axis = Axis.ATTRIBUTE;
break;
}
case IDENTIFIER:
{
Token token = LT( 1 );
axis = Axis.lookup( token.getTokenText() );
if ( axis == Axis.INVALID_AXIS )
{
throwInvalidAxis( token.getTokenText() );
}
match( IDENTIFIER );
match( DOUBLE_COLON );
break;
}
}
return axis;
}
void nodeTest(int axis) throws org.jaxen.saxpath.SAXPathException
{
switch ( LA(1) )
{
case IDENTIFIER:
{
switch ( LA(2) )
{
case LEFT_PAREN:
{
nodeTypeTest( axis );
break;
}
default:
{
nameTest( axis );
break;
}
}
break;
}
case STAR:
{
nameTest( axis );
break;
}
default:
throw createSyntaxException("Expected <QName> or *");
}
}
void nodeTypeTest(int axis) throws org.jaxen.saxpath.SAXPathException
{
Token nodeTypeToken = match( IDENTIFIER );
String nodeType = nodeTypeToken.getTokenText();
match( LEFT_PAREN );
if ( "processing-instruction".equals( nodeType ) )
{
String piName = "";
if ( LA(1) == LITERAL )
{
piName = match( LITERAL ).getTokenText();
}
match( RIGHT_PAREN );
getXPathHandler().startProcessingInstructionNodeStep( axis,
piName );
predicates();
getXPathHandler().endProcessingInstructionNodeStep();
}
else if ( "node".equals( nodeType ) )
{
match( RIGHT_PAREN );
getXPathHandler().startAllNodeStep( axis );
predicates();
getXPathHandler().endAllNodeStep();
}
else if ( "text".equals( nodeType ) )
{
match( RIGHT_PAREN );
getXPathHandler().startTextNodeStep( axis );
predicates();
getXPathHandler().endTextNodeStep();
}
else if ( "comment".equals( nodeType ) )
{
match( RIGHT_PAREN );
getXPathHandler().startCommentNodeStep( axis );
predicates();
getXPathHandler().endCommentNodeStep();
}
else
{
throw createSyntaxException( "Expected node-type" );
}
}
void nameTest(int axis) throws org.jaxen.saxpath.SAXPathException
{
String prefix = null;
String localName = null;
switch ( LA(2) )
{
case COLON:
{
switch ( LA(1) )
{
case IDENTIFIER:
{
prefix = match( IDENTIFIER ).getTokenText();
match( COLON );
break;
}
}
break;
}
}
switch ( LA(1) )
{
case IDENTIFIER:
{
localName = match( IDENTIFIER ).getTokenText();
break;
}
case STAR:
{
match( STAR );
localName = "*";
break;
}
}
if ( prefix == null )
{
prefix = "";
}
getXPathHandler().startNameStep( axis,
prefix,
localName );
predicates();
getXPathHandler().endNameStep();
}
void abbrStep() throws org.jaxen.saxpath.SAXPathException
{
switch ( LA(1) )
{
case DOT:
{
match( DOT );
getXPathHandler().startAllNodeStep( Axis.SELF );
predicates();
getXPathHandler().endAllNodeStep();
break;
}
case DOT_DOT:
{
match( DOT_DOT );
getXPathHandler().startAllNodeStep( Axis.PARENT );
predicates();
getXPathHandler().endAllNodeStep();
break;
}
}
}
void predicates() throws org.jaxen.saxpath.SAXPathException
{
while (true )
{
if ( LA(1) == LEFT_BRACKET )
{
predicate();
}
else
{
break;
}
}
}
void predicate() throws org.jaxen.saxpath.SAXPathException
{
getXPathHandler().startPredicate();
match( LEFT_BRACKET );
predicateExpr();
match( RIGHT_BRACKET );
getXPathHandler().endPredicate();
}
void predicateExpr() throws org.jaxen.saxpath.SAXPathException
{
expr();
}
void expr() throws org.jaxen.saxpath.SAXPathException
{
orExpr();
}
void orExpr() throws org.jaxen.saxpath.SAXPathException
{
getXPathHandler().startOrExpr();
andExpr();
boolean create = false;
switch ( LA(1) )
{
case OR:
{
create = true;
match( OR );
orExpr();
break;
}
}
getXPathHandler().endOrExpr( create );
}
void andExpr() throws org.jaxen.saxpath.SAXPathException
{
getXPathHandler().startAndExpr();
equalityExpr();
boolean create = false;
switch ( LA(1) )
{
case AND:
{
create = true;
match( AND );
andExpr();
break;
}
}
getXPathHandler().endAndExpr( create );
}
void equalityExpr() throws org.jaxen.saxpath.SAXPathException
{
getXPathHandler().startEqualityExpr();
// XXX why call this twice?
getXPathHandler().startEqualityExpr();
relationalExpr();
int operator = Operator.NO_OP;
switch ( LA(1) )
{
case EQUALS:
{
match( EQUALS );
relationalExpr();
operator = Operator.EQUALS;
break;
}
case NOT_EQUALS:
{
match( NOT_EQUALS );
relationalExpr();
operator = Operator.NOT_EQUALS;
break;
}
}
getXPathHandler().endEqualityExpr( operator );
operator = Operator.NO_OP;
switch ( LA(1) )
{
case EQUALS:
{
match( EQUALS );
equalityExpr();
operator = Operator.EQUALS;
break;
}
case NOT_EQUALS:
{
match( NOT_EQUALS );
equalityExpr();
operator = Operator.NOT_EQUALS;
break;
}
}
getXPathHandler().endEqualityExpr( operator );
}
void relationalExpr() throws org.jaxen.saxpath.SAXPathException
{
getXPathHandler().startRelationalExpr();
getXPathHandler().startRelationalExpr();
additiveExpr();
int operator = Operator.NO_OP;
switch ( LA(1) )
{
case LESS_THAN:
{
match( LESS_THAN );
additiveExpr();
operator = Operator.LESS_THAN;
break;
}
case GREATER_THAN:
{
match( GREATER_THAN );
additiveExpr();
operator = Operator.GREATER_THAN;
break;
}
case LESS_THAN_EQUALS:
{
match( LESS_THAN_EQUALS );
additiveExpr();
operator = Operator.LESS_THAN_EQUALS;
break;
}
case GREATER_THAN_EQUALS:
{
match( GREATER_THAN_EQUALS );
additiveExpr();
operator = Operator.GREATER_THAN_EQUALS;
break;
}
}
getXPathHandler().endRelationalExpr( operator );
operator = Operator.NO_OP;
switch ( LA(1) )
{
case LESS_THAN:
{
match( LESS_THAN );
relationalExpr();
operator = Operator.LESS_THAN;
break;
}
case GREATER_THAN:
{
match( GREATER_THAN );
relationalExpr();
operator = Operator.GREATER_THAN;
break;
}
case LESS_THAN_EQUALS:
{
match( LESS_THAN_EQUALS );
relationalExpr();
operator = Operator.LESS_THAN_EQUALS;
break;
}
case GREATER_THAN_EQUALS:
{
match( GREATER_THAN_EQUALS );
relationalExpr();
operator = Operator.GREATER_THAN_EQUALS;
break;
}
}
getXPathHandler().endRelationalExpr( operator );
}
void additiveExpr() throws org.jaxen.saxpath.SAXPathException
{
getXPathHandler().startAdditiveExpr();
getXPathHandler().startAdditiveExpr();
multiplicativeExpr();
int operator = Operator.NO_OP;
switch ( LA(1) )
{
case PLUS:
{
match( PLUS );
operator = Operator.ADD;
multiplicativeExpr();
break;
}
case MINUS:
{
match( MINUS );
operator = Operator.SUBTRACT;
multiplicativeExpr();
break;
}
}
getXPathHandler().endAdditiveExpr( operator );
operator = Operator.NO_OP;
switch ( LA(1) )
{
case PLUS:
{
match( PLUS );
operator = Operator.ADD;
additiveExpr();
break;
}
case MINUS:
{
match( MINUS );
operator = Operator.SUBTRACT;
additiveExpr();
break;
}
default:
{
operator = Operator.NO_OP;
break;
}
}
getXPathHandler().endAdditiveExpr( operator );
}
void multiplicativeExpr() throws org.jaxen.saxpath.SAXPathException
{
getXPathHandler().startMultiplicativeExpr();
getXPathHandler().startMultiplicativeExpr();
unaryExpr();
int operator = Operator.NO_OP;
switch ( LA(1) )
{
case STAR:
{
match( STAR );
unaryExpr();
operator = Operator.MULTIPLY;
break;
}
case DIV:
{
match( DIV );
unaryExpr();
operator = Operator.DIV;
break;
}
case MOD:
{
match( MOD );
unaryExpr();
operator = Operator.MOD;
break;
}
}
getXPathHandler().endMultiplicativeExpr( operator );
operator = Operator.NO_OP;
switch ( LA(1) )
{
case STAR:
{
match( STAR );
multiplicativeExpr();
operator = Operator.MULTIPLY;
break;
}
case DIV:
{
match( DIV );
multiplicativeExpr();
operator = Operator.DIV;
break;
}
case MOD:
{
match( MOD );
multiplicativeExpr();
operator = Operator.MOD;
break;
}
}
getXPathHandler().endMultiplicativeExpr( operator );
}
void unaryExpr() throws org.jaxen.saxpath.SAXPathException
{
getXPathHandler().startUnaryExpr();
int operator = Operator.NO_OP;
switch ( LA(1) )
{
case MINUS:
{
match( MINUS );
operator = Operator.NEGATIVE;
unaryExpr();
break;
}
default:
{
unionExpr();
break;
}
}
getXPathHandler().endUnaryExpr( operator );
}
void unionExpr() throws org.jaxen.saxpath.SAXPathException
{
getXPathHandler().startUnionExpr();
pathExpr();
boolean create = false;
switch ( LA(1) )
{
case PIPE:
{
match( PIPE );
create = true;
expr();
break;
}
}
getXPathHandler().endUnionExpr( create );
}
Token match(int tokenType) throws XPathSyntaxException
{
LT(1);
Token token = (Token) tokens.get( 0 );
if ( token.getTokenType() == tokenType )
{
tokens.removeFirst();
return token;
}
throw createSyntaxException( "Expected: " + getTokenText( tokenType ) );
}
int LA(int position)
{
return LT(position).getTokenType();
}
Token LT(int position)
{
if ( tokens.size() <= ( position - 1 ) )
{
for ( int i = 0 ; i < position ; ++i )
{
tokens.add( lexer.nextToken() );
}
}
return (Token) tokens.get( position - 1 );
}
boolean isNodeTypeName(Token name)
{
String text = name.getTokenText();
if ( "node".equals( text )
||
"comment".equals( text )
||
"text".equals( text )
||
"processing-instruction".equals( text ) )
{
return true;
}
return false;
}
XPathSyntaxException createSyntaxException(String message)
{
String xpath = this.lexer.getXPath();
int position = LT(1).getTokenBegin();
return new XPathSyntaxException( xpath,
position,
message );
}
void throwInvalidAxis(String invalidAxis) throws org.jaxen.saxpath.SAXPathException
{
String xpath = this.lexer.getXPath();
int position = LT(1).getTokenBegin();
String message = "Expected valid axis name instead of [" + invalidAxis + "]";
throw new XPathSyntaxException( xpath,
position,
message );
}
void throwUnexpected() throws org.jaxen.saxpath.SAXPathException
{
throw createSyntaxException( "Unexpected '" + LT(1).getTokenText() + "'" );
}
}
|
package org.apache.xmlrpc.applet;
import java.applet.*;
import java.util.*;
import java.net.MalformedURLException;
public class JSXmlRpcApplet extends XmlRpcApplet {
public Object loaded = null;
private String errorMessage;
private Vector arguments;
public void init () {
initClient ();
arguments = new Vector ();
loaded = Boolean.TRUE;
System.out.println ("JSXmlRpcApplet initialized");
}
// add ints (primitve != object) to structs, vectors
public void addIntArg (int value) { arguments.addElement (new Integer (value)); }
public void addIntArgToStruct (Hashtable struct, String key, int value) { struct.put (key, new Integer (value)); }
public void addIntArgToArray (Vector ary, int value) { ary.addElement (new Integer (value)); }
// add floats/doubles to structs, vectors
public void addDoubleArg (float value) { arguments.addElement (new Double (value)); }
public void addDoubleArgToStruct (Hashtable struct, String key, float value) { struct.put (key, new Double (value)); }
public void addDoubleArgToArray (Vector ary, float value) { ary.addElement (new Double (value)); }
public void addDoubleArg (double value) { arguments.addElement (new Double (value)); }
public void addDoubleArgToStruct (Hashtable struct, String key, double value) { struct.put (key, new Double (value)); }
public void addDoubleArgToArray (Vector ary, double value) { ary.addElement (new Double (value)); }
// add bools to structs, vectors
public void addBooleanArg (boolean value) { arguments.addElement (new Boolean (value)); }
public void addBooleanArgToStruct (Hashtable struct, String key, boolean value) { struct.put (key, new Boolean(value)); }
public void addBooleanArgToArray (Vector ary, boolean value) { ary.addElement (new Boolean(value)); }
// add Dates to structs, vectors Date argument in SystemTimeMillis (seems to be the way)
public void addDateArg (long dateNo) { arguments.addElement (new Date(dateNo)); }
public void addDateArgToStruct (Hashtable struct, String key, long dateNo) { struct.put (key, new Date (dateNo)); }
public void addDateArgToArray (Vector ary, long dateNo) { ary.addElement (new Date (dateNo)); }
// add String arguments
public void addStringArg (String str) { arguments.addElement (str); }
public void addStringArgToStruct (Hashtable struct, String key, String str) { struct.put (key, str); }
public void addStringArgToArray (Vector ary, String str) { ary.addElement (str); }
// add Array arguments
public Vector addArrayArg () {
Vector v = new Vector ();
arguments.addElement (v);
return v;
}
public Vector addArrayArgToStruct (Hashtable struct, String key) {
Vector v = new Vector ();
struct.put (key, v);
return v;
}
public Vector addArrayArgToArray (Vector ary) {
Vector v = new Vector ();
ary.addElement (v);
return v;
}
// add Struct arguments
public Hashtable addStructArg () {
Hashtable ht = new Hashtable ();
arguments.addElement (ht);
return ht;
}
public Hashtable addStructArgToStruct (Hashtable struct, String key) {
Hashtable ht = new Hashtable ();
struct.put (key, ht);
return ht;
}
public Hashtable addStructArgToArray (Vector ary) {
Hashtable ht = new Hashtable ();
ary.addElement (ht);
return ht;
}
// get the errorMessage, null if none
public String getErrorMessage() { return errorMessage; }
public void reset () {
arguments = new Vector ();
}
public Object execute (String methodName) {
// XmlRpcSupport.setDebug (true);
errorMessage = null;
showStatus ("Connecting to Server...");
Object returnValue = null;
try {
returnValue = execute (methodName, arguments);
} catch (Exception e) {
errorMessage = e.getMessage ();
if (errorMessage == null || errorMessage == "")
errorMessage = e.toString ();
}
// reset argument array for reuse
arguments = new Vector ();
showStatus ("");
return returnValue;
}
}
|
package org.concord.sensor.nativelib;
import org.concord.framework.data.stream.DataListener;
import org.concord.framework.data.stream.DataStreamEvent;
import org.concord.framework.text.UserMessageHandler;
import org.concord.sensor.DeviceConfig;
import org.concord.sensor.ExperimentConfig;
import org.concord.sensor.SensorConfig;
import org.concord.sensor.SensorDataManager;
import org.concord.sensor.SensorDataProducer;
import org.concord.sensor.SensorRequest;
import org.concord.sensor.device.SensorDevice;
import org.concord.sensor.device.impl.DeviceConfigImpl;
import org.concord.sensor.device.impl.InterfaceManager;
import org.concord.sensor.device.impl.JavaDeviceFactory;
import org.concord.sensor.impl.ExperimentRequestImpl;
import org.concord.sensor.impl.SensorRequestImpl;
import org.concord.sensor.impl.SensorUtilJava;
import org.concord.sensor.state.PrintUserMessageHandler;
/**
* @author Informaiton Services
*
* TODO To change the template for this generated type comment go to
* Window - Preferences - Java - Code Style - Code Templates
*/
public class TestNative
{
public static void main(String[] args)
{
UserMessageHandler messenger = new PrintUserMessageHandler();
SensorDataManager sdManager = new InterfaceManager(messenger);
// This should be loaded from the OTrunk. Each computer
// might have a different set of devices configured.
DeviceConfig [] dConfigs = new DeviceConfig[1];
dConfigs[0] = new DeviceConfigImpl(JavaDeviceFactory.VERNIER_GO_LINK, null);
((InterfaceManager)sdManager).setDeviceConfigs(dConfigs);
// Check what is attached, this isn't necessary if you know what you want
// to be attached. But sometimes you want the user to see what is attached
SensorDevice sensorDevice = sdManager.getSensorDevice();
ExperimentConfig currentConfig = sensorDevice.getCurrentConfig();
SensorUtilJava.printExperimentConfig(currentConfig);
ExperimentRequestImpl request = new ExperimentRequestImpl();
request.setPeriod(0.1f);
request.setNumberOfSamples(-1);
SensorRequestImpl sensor = new SensorRequestImpl();
sensor.setDisplayPrecision(-2);
sensor.setRequiredMax(Float.NaN);
sensor.setRequiredMin(Float.NaN);
sensor.setPort(0);
sensor.setStepSize(0.1f);
sensor.setType(SensorConfig.QUANTITY_TEMPERATURE);
request.setSensorRequests(new SensorRequest [] {sensor});
SensorDataProducer sDataProducer =
sdManager.createDataProducer();
sDataProducer.configure(request);
sDataProducer.addDataListener(new DataListener(){
public void dataReceived(DataStreamEvent dataEvent)
{
int numSamples = dataEvent.getNumSamples();
float [] data = dataEvent.getData();
if(numSamples > 0) {
System.out.println("" + numSamples + " " +
data[0]);
System.out.flush();
}
else {
System.out.println("" + numSamples);
}
}
public void dataStreamEvent(DataStreamEvent dataEvent)
{
String eventString;
int eventType = dataEvent.getType();
if(eventType == 1001) return;
switch(eventType) {
case DataStreamEvent.DATA_STARTED:
eventString = "Ready to start";
break;
case DataStreamEvent.DATA_STOPPED:
eventString = "Stopped";
break;
case DataStreamEvent.DATA_DESC_CHANGED:
eventString = "Description changed";
break;
default:
eventString = "Unknown event type";
}
System.out.println("Data Event: " + eventString);
}
});
sDataProducer.start();
System.out.println("started device");
try {
Thread.sleep(1000);
} catch (Exception e) {
e.printStackTrace();
}
sDataProducer.stop();
sDataProducer.close();
System.exit(0);
}
}
|
package org.jcoderz.phoenix.report;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jcoderz.commons.util.Assert;
import org.jcoderz.commons.util.HashCode;
import org.jcoderz.commons.util.IoUtil;
import org.jcoderz.commons.util.ObjectUtil;
/**
* This class holds resource information about a Java class.
*
* @author Michael Griffel
*/
public final class ResourceInfo
{
/** holds a map from resource name to ResourceInfo */
private static final Map<String, ResourceInfo> RESOURCES
= Collections.synchronizedMap(new HashMap<String, ResourceInfo>());
/** holds a map from package / classname to ResourceInfo */
private static final Map<String, ResourceInfo> RESOURCES_BY_CLASS
= Collections.synchronizedMap(new HashMap<String, ResourceInfo>());
private static final String CLASSNAME = ResourceInfo.class.getName();
private static final Logger logger = Logger.getLogger(CLASSNAME);
private final String mResourceName;
private final String mPackage;
private final String mSourcDir;
private final String mClassname;
/** Lazy initialized number of source lines value. */
private int mLinesOfCode = -1;
/** Lazy initialized hash code value. */
private int mHashCode = -1;
private ResourceInfo (String name, String pkg, String sourceDir)
{
if (logger.isLoggable(Level.FINER))
{
logger.entering(CLASSNAME, "<init>",
new Object[]{name, pkg, sourceDir});
}
Assert.notNull(name, "name");
Assert.notNull(sourceDir, "sourceDir");
mResourceName = checkName(name).intern();
mPackage = ObjectUtil.toStringOrEmpty(pkg);
mSourcDir = checkName(sourceDir).intern();
mClassname = determineClassName(name).intern();
if (logger.isLoggable(Level.FINER))
{
logger.exiting(CLASSNAME, "<init>", this);
}
}
/**
* Registers the a new resource with the given parameters.
* @param name the name of the resource.
* @param pkg the Java package of the resource.
* @param sourceDir the source directory of the resource.
* @return the registered resource info.
*/
public static ResourceInfo register (String name, String pkg,
String sourceDir)
{
final String resourceName = checkName(name);
final ResourceInfo result;
if (!RESOURCES.containsKey(resourceName))
{
result = new ResourceInfo(resourceName, pkg, sourceDir);
add(resourceName, result);
}
else
{
result = RESOURCES.get(resourceName);
final ResourceInfo newInfo
= new ResourceInfo(resourceName, pkg, sourceDir);
// sanity check
Assert.assertEquals("Ups, the ResourceInfo w/ the name "
+ resourceName
+ " is already registered with different parameters!",
result, newInfo);
}
return result;
}
/**
* Locates the resource with the given name.
*
* @param name resource name.
* @return the resource for the given name or <tt>null</tt> if not found.
*/
public static ResourceInfo lookup (String name)
{
String lookupName = name;
ResourceInfo result = RESOURCES.get(lookupName);
if (result == null)
{
lookupName = checkName(name);
result = RESOURCES.get(lookupName);
}
if (result == null)
{
logger.finer("### ResourceInfo not found for '"
+ lookupName + "'");
}
return result;
}
/**
* Searches the resource with the given class name and package.
*
* @param packageName resource package name.
* @param className resource class name.
* @return the resource for the given name or <tt>null</tt> if not found.
*/
public static ResourceInfo lookup (String packageName, String className)
{
final String key = combineName(packageName, className);
final ResourceInfo result = RESOURCES_BY_CLASS.get(key);
if (result == null)
{
logger.finer("### ResourceInfo not found for '"
+ key + "'");
}
return result;
}
static String dump ()
{
return RESOURCES.toString();
}
/**
* Returns the number of lines for the given file <tt>filename</tt>.
* @param fileName the name of the file.
* @return the number of lines.
* @throws IOException in case of an I/O problem.
* @throws FileNotFoundException in case the named file does
* not exists or is a directory.
*/
public static int countLinesOfCode (String fileName)
throws IOException, FileNotFoundException
{
int counter = 0;
final BufferedReader reader
= new BufferedReader(new FileReader(fileName));
try
{
while (reader.readLine() != null)
{
++counter;
}
}
finally
{
IoUtil.close(reader);
}
return counter;
}
/** {@inheritDoc} */
public boolean equals (Object obj)
{
boolean result = false;
if (this == obj)
{
result = true;
}
else if (obj instanceof ResourceInfo)
{
final ResourceInfo o = (ResourceInfo) obj;
result = ObjectUtil.equals(mResourceName, o.getResourceName())
&& ObjectUtil.equals(mPackage, o.getPackage())
&& ObjectUtil.equals(mSourcDir, o.getSourcDir());
}
else
{
result = false;
}
return result;
}
/** {@inheritDoc} */
public int hashCode ()
{
if (mHashCode == -1)
{
final HashCode hashCode = new HashCode();
hashCode.hash(mResourceName);
hashCode.hash(mPackage);
hashCode.hash(mSourcDir);
mHashCode = hashCode.hashCode();
}
return mHashCode;
}
/**
* Returns the linesOfCode.
*
* @return the linesOfCode.
*/
public int getLinesOfCode ()
{
if (mLinesOfCode == -1)
{
try
{
mLinesOfCode = countLinesOfCode(mResourceName);
}
catch (IOException e)
{
mLinesOfCode = 0;
logger.log(Level.FINER,
"Cannot read the resource with the name "
+ mResourceName, e);
}
}
return mLinesOfCode;
}
/**
* Returns the package.
*
* @return the package.
*/
public String getPackage ()
{
return mPackage;
}
/**
* Returns the resourceName.
*
* @return the resourceName.
*/
public String getResourceName ()
{
return mResourceName;
}
/**
* Returns the sourcDir.
*
* @return the sourcDir.
*/
public String getSourcDir ()
{
return mSourcDir;
}
/** {@inheritDoc} */
public String toString ()
{
return "[ResourceInfo: name=" + mResourceName + ", pkg=" + mPackage
+ ", sourceDir=" + mSourcDir + ", mClassname=" + mClassname
+ "]";
}
/**
* Returns the class name.
* @return the class name.
*/
public String getClassname ()
{
return mClassname;
}
private String determineClassName (String name)
{
String result = "";
final String magic = ".java";
if (name.endsWith(magic))
{
final int lastSlashPos = name.lastIndexOf(File.separator);
if (lastSlashPos != -1)
{
result = name.substring(lastSlashPos + File.separator.length());
result = result.substring(0, result.indexOf(magic));
}
}
return result;
}
private static void add (String name, ResourceInfo info)
{
synchronized (RESOURCES)
{
RESOURCES.put(name, info);
RESOURCES_BY_CLASS.put(
combineName(info.getPackage(), info.getClassname()), info);
}
}
private static String combineName (String packageName, String className)
{
return ObjectUtil.toStringOrEmpty(packageName) + "
+ ObjectUtil.toStringOrEmpty(className);
}
private static String checkName (String lookupName)
{
String name = ObjectUtil.toStringOrEmpty(lookupName);
if (!RESOURCES.containsKey(name))
{
try
{
name = new File(name).getCanonicalPath();
}
catch (IOException ex)
{
throw new RuntimeException(
"Uuppss, this was not expected in 'getCanonicalPath' "
+ " for '" + name + "'.",
ex);
}
}
return name;
}
}
|
package org.jdesktop.swingx.painter;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.geom.RoundRectangle2D;
import org.jdesktop.swingx.color.ColorUtil;
/**
* A specific painter that paints an "infinite progress" like animation.
*
*/
public class BusyPainter<T> extends AbstractPainter<T> {
private int frame = -1;
private boolean skewed = false;
private int points = 8;
private float barWidth = 4;
private float barLength = 8;
private float centerDistance = 5;
private Color baseColor = new Color(200,200,200);
private Color highlightColor = Color.BLACK;
private int trailLength = 4;
/**
* @inheritDoc
*/
@Override
protected void doPaint(Graphics2D g, T t, int width, int height) {
RoundRectangle2D rect = new RoundRectangle2D.Float(getCenterDistance(), -getBarWidth()/2,
getBarLength(), getBarWidth(),
getBarWidth(), getBarWidth());
if(skewed) {
rect = new RoundRectangle2D.Float(5,getBarWidth()/2,8, getBarWidth(),
getBarWidth(), getBarWidth());
}
g.setColor(Color.GRAY);
g.translate(width/2,height/2);
for(int i=0; i<getPoints(); i++) {
g.setColor(calcFrameColor(i));
g.fill(rect);
g.rotate(Math.PI*2.0/(double)getPoints());
}
}
private Color calcFrameColor(final int i) {
if(frame == -1) {
return getBaseColor();
}
for(int t=0; t<getTrailLength(); t++) {
if(i == (frame-t+getPoints())%getPoints()) {
float terp = 1-((float)(getTrailLength()-t))/(float)getTrailLength();
return ColorUtil.interpolate(
getBaseColor(),
getHighlightColor(), terp);
}
}
return getBaseColor();
}
public int getFrame() {
return frame;
}
public void setFrame(int frame) {
this.frame = frame;
}
public Color getBaseColor() {
return baseColor;
}
public void setBaseColor(Color baseColor) {
Color old = getBaseColor();
this.baseColor = baseColor;
firePropertyChange("baseColor", old, getBaseColor());
}
public Color getHighlightColor() {
return highlightColor;
}
public void setHighlightColor(Color highlightColor) {
Color old = getHighlightColor();
this.highlightColor = highlightColor;
firePropertyChange("highlightColor", old, getHighlightColor());
}
public float getBarWidth() {
return barWidth;
}
public void setBarWidth(float barWidth) {
float old = getBarWidth();
this.barWidth = barWidth;
firePropertyChange("barWidth", old, getBarWidth());
}
public float getBarLength() {
return barLength;
}
public void setBarLength(float barLength) {
float old = getBarLength();
this.barLength = barLength;
firePropertyChange("barLength", old, getBarLength());
}
public float getCenterDistance() {
return centerDistance;
}
public void setCenterDistance(float centerDistance) {
float old = getCenterDistance();
this.centerDistance = centerDistance;
firePropertyChange("centerDistance", old, getCenterDistance());
}
public int getPoints() {
return points;
}
public void setPoints(int points) {
int old = getPoints();
this.points = points;
firePropertyChange("points", old, getPoints());
}
public int getTrailLength() {
return trailLength;
}
public void setTrailLength(int trailLength) {
int old = getTrailLength();
this.trailLength = trailLength;
firePropertyChange("trailLength", old, getTrailLength());
}
}
|
package test.org.relique.jdbc.csv;
import java.io.*;
import java.sql.*;
import java.util.Properties;
import junit.framework.*;
/**This class is used to test the CsvJdbc driver.
*
* @author Jonathan Ackerman
* @author JD Evora
* @version $Id: TestCsvDriver.java,v 1.7 2004/08/09 21:37:29 jackerm Exp $
*/
public class TestCsvDriver extends TestCase
{
public static final String SAMPLE_FILES_LOCATION_PROPERTY="sample.files.location";
private String filePath;
public TestCsvDriver(String name)
{
super(name);
}
protected void setUp()
{
filePath=System.getProperty(SAMPLE_FILES_LOCATION_PROPERTY);
if (filePath == null)
filePath=RunTests.DEFAULT_FILEPATH;
assertNotNull("Sample files location property not set !", filePath);
// load CSV driver
try
{
Class.forName("org.relique.jdbc.csv.CsvDriver");
}
catch (ClassNotFoundException e)
{
fail("Driver is not in the CLASSPATH -> " + e);
}
}
public void testWithDefaultValues()
{
try
{
Connection conn = DriverManager.getConnection("jdbc:relique:csv:" + filePath );
Statement stmt = conn.createStatement();
ResultSet results = stmt.executeQuery("SELECT NAME,ID,EXTRA_FIELD FROM sample");
results.next();
assertTrue("Incorrect ID Value",results.getString("ID").equals("Q123"));
assertTrue("Incorrect NAME Value",results.getString("NAME").equals("\"S,\""));
assertTrue("Incorrect EXTRA_FIELD Value",results.getString("EXTRA_FIELD").equals("F"));
results.next();
assertTrue("Incorrect ID Value",results.getString("ID").equals("A123"));
assertTrue("Incorrect NAME Value",results.getString("NAME").equals("Jonathan Ackerman"));
assertTrue("Incorrect EXTRA_FIELD Value",results.getString("EXTRA_FIELD").equals("A"));
results.next();
assertTrue("Incorrect ID Value",results.getString("ID").equals("B234"));
assertTrue("Incorrect NAME Value",results.getString("NAME").equals("Grady O'Neil"));
assertTrue("Incorrect EXTRA_FIELD Value",results.getString("EXTRA_FIELD").equals("B"));
results.next();
assertTrue("Incorrect ID Value",results.getString("ID").equals("C456"));
assertTrue("Incorrect NAME Value",results.getString("NAME").equals("Susan, Peter and Dave"));
assertTrue("Incorrect EXTRA_FIELD Value",results.getString("EXTRA_FIELD").equals("C"));
results.next();
assertTrue("Incorrect ID Value",results.getString("ID").equals("D789"));
assertTrue("Incorrect NAME Value",results.getString("NAME").equals("Amelia \"meals\" Maurice"));
assertTrue("Incorrect EXTRA_FIELD Value",results.getString("EXTRA_FIELD").equals("E"));
results.next();
assertTrue("Incorrect ID Value",results.getString("ID").equals("X234"));
assertTrue("Incorrect NAME Value",results.getString("NAME").equals("Peter \"peg leg\", Jimmy & Samantha \"Sam\""));
assertTrue("Incorrect EXTRA_FIELD Value",results.getString("EXTRA_FIELD").equals("G"));
results.close();
stmt.close();
conn.close();
}
catch(Exception e)
{
fail("Unexpected Exception: " + e);
}
}
public void testWithProperties()
{
try
{
Properties props = new Properties();
props.put("fileExtension",".txt");
props.put("separator",";");
Connection conn = DriverManager.getConnection("jdbc:relique:csv:" + filePath,props);
Statement stmt = conn.createStatement();
ResultSet results = stmt.executeQuery("SELECT NAME,ID,EXTRA_FIELD FROM sample");
results.next();
assertTrue("Incorrect ID Value",results.getString("ID").equals("Q123"));
assertTrue("Incorrect NAME Value",results.getString("NAME").equals("\"S;\""));
assertTrue("Incorrect EXTRA_FIELD Value",results.getString("EXTRA_FIELD").equals("F"));
results.next();
assertTrue("Incorrect ID Value",results.getString("ID").equals("A123"));
assertTrue("Incorrect NAME Value",results.getString("NAME").equals("Jonathan Ackerman"));
assertTrue("Incorrect EXTRA_FIELD Value",results.getString("EXTRA_FIELD").equals("A"));
results.next();
assertTrue("Incorrect ID Value",results.getString("ID").equals("B234"));
assertTrue("Incorrect NAME Value",results.getString("NAME").equals("Grady O'Neil"));
assertTrue("Incorrect EXTRA_FIELD Value",results.getString("EXTRA_FIELD").equals("B"));
results.next();
assertTrue("Incorrect ID Value",results.getString("ID").equals("C456"));
assertTrue("Incorrect NAME Value",results.getString("NAME").equals("Susan; Peter and Dave"));
assertTrue("Incorrect EXTRA_FIELD Value",results.getString("EXTRA_FIELD").equals("C"));
results.next();
assertTrue("Incorrect ID Value",results.getString("ID").equals("D789"));
assertTrue("Incorrect NAME Value",results.getString("NAME").equals("Amelia \"meals\" Maurice"));
assertTrue("Incorrect EXTRA_FIELD Value",results.getString("EXTRA_FIELD").equals("E"));
results.next();
assertTrue("Incorrect ID Value",results.getString("ID").equals("X234"));
assertTrue("Incorrect NAME Value",results.getString("NAME").equals("Peter \"peg leg\"; Jimmy & Samantha \"Sam\""));
assertTrue("Incorrect EXTRA_FIELD Value",results.getString("EXTRA_FIELD").equals("G"));
results.close();
stmt.close();
conn.close();
}
catch(Exception e)
{
fail("Unexpected Exception: " + e);
}
}
public void testMetadata()
{
try
{
Connection conn = DriverManager.getConnection("jdbc:relique:csv:" + filePath);
Statement stmt = conn.createStatement();
ResultSet results = stmt.executeQuery("SELECT * FROM sample3");
ResultSetMetaData metadata = results.getMetaData();
assertTrue("Incorrect Table Name",metadata.getTableName(0).equals("sample3"));
assertTrue("Incorrect Column Name 1",metadata.getColumnName(1).equals("column 1"));
assertTrue("Incorrect Column Name 2",metadata.getColumnName(2).equals("column \"2\" two"));
assertTrue("Incorrect Column Name 3",metadata.getColumnName(3).equals("Column 3"));
assertTrue("Incorrect Column Name 4",metadata.getColumnName(4).equals("CoLuMn4"));
assertTrue("Incorrect Column Name 5",metadata.getColumnName(5).equals("COLumn5 "));
results.close();
stmt.close();
conn.close();
}
catch(Exception e)
{
fail("Unexpected Exception: " + e);
}
}
public void testMetadataWithSupressedHeaders()
{
try
{
Properties props = new Properties();
props.put("suppressHeaders","true");
Connection conn = DriverManager.getConnection("jdbc:relique:csv:" + filePath,props);
Statement stmt = conn.createStatement();
ResultSet results = stmt.executeQuery("SELECT * FROM sample");
ResultSetMetaData metadata = results.getMetaData();
assertTrue("Incorrect Table Name",metadata.getTableName(0).equals("sample"));
assertTrue("Incorrect Column Name 1",metadata.getColumnName(1).equals("COLUMN1"));
assertTrue("Incorrect Column Name 2",metadata.getColumnName(2).equals("COLUMN2"));
assertTrue("Incorrect Column Name 3",metadata.getColumnName(3).equals("COLUMN3"));
results.close();
stmt.close();
conn.close();
}
catch(Exception e)
{
fail("Unexpected Exception: " + e);
}
}
public void testWithSuppressedHeaders()
{
try
{
Properties props = new Properties();
props.put("suppressHeaders","true");
Connection conn = DriverManager.getConnection("jdbc:relique:csv:" + filePath,props );
Statement stmt = conn.createStatement();
ResultSet results = stmt.executeQuery("SELECT * FROM sample");
// header is now treated as normal data line
results.next();
assertTrue("Incorrect COLUMN1 Value",results.getString("COLUMN1").equals("ID"));
assertTrue("Incorrect COLUMN2 Value",results.getString("COLUMN2").equals("NAME"));
assertTrue("Incorrect COLUMN3 Value",results.getString("COLUMN3").equals("EXTRA_FIELD"));
results.next();
assertTrue("Incorrect COLUMN1 Value",results.getString("COLUMN1").equals("Q123"));
assertTrue("Incorrect COLUMN2 Value",results.getString("COLUMN2").equals("\"S,\""));
assertTrue("Incorrect COLUMN3 Value",results.getString("COLUMN3").equals("F"));
results.next();
assertTrue("Incorrect COLUMN1 Value",results.getString("COLUMN1").equals("A123"));
assertTrue("Incorrect COLUMN2 Value",results.getString("COLUMN2").equals("Jonathan Ackerman"));
assertTrue("Incorrect COLUMN3 Value",results.getString("COLUMN3").equals("A"));
results.next();
assertTrue("Incorrect COLUMN1 Value",results.getString("COLUMN1").equals("B234"));
assertTrue("Incorrect COLUMN2 Value",results.getString("COLUMN2").equals("Grady O'Neil"));
assertTrue("Incorrect COLUMN3 Value",results.getString("COLUMN3").equals("B"));
results.next();
assertTrue("Incorrect COLUMN1 Value",results.getString("COLUMN1").equals("C456"));
assertTrue("Incorrect COLUMN2 Value",results.getString("COLUMN2").equals("Susan, Peter and Dave"));
assertTrue("Incorrect COLUMN3 Value",results.getString("COLUMN3").equals("C"));
results.next();
assertTrue("Incorrect COLUMN1 Value",results.getString("COLUMN1").equals("D789"));
assertTrue("Incorrect COLUMN2 Value",results.getString("COLUMN2").equals("Amelia \"meals\" Maurice"));
assertTrue("Incorrect COLUMN3 Value",results.getString("COLUMN3").equals("E"));
results.next();
assertTrue("Incorrect COLUMN1 Value",results.getString("COLUMN1").equals("X234"));
assertTrue("Incorrect COLUMN2 Value",results.getString("COLUMN2").equals("Peter \"peg leg\", Jimmy & Samantha \"Sam\""));
assertTrue("Incorrect COLUMN3 Value",results.getString("COLUMN3").equals("G"));
results.close();
stmt.close();
conn.close();
}
catch(Exception e)
{
fail("Unexpected Exception: " + e);
}
}
public void testRelativePath()
{
try
{
// break up file path to test relative paths
String parentPath = new File(filePath).getParent();
String subPath = new File(filePath).getName();
Connection conn = DriverManager.getConnection("jdbc:relique:csv:" + parentPath );
Statement stmt = conn.createStatement();
ResultSet results = stmt.executeQuery("SELECT NAME,ID,EXTRA_FIELD FROM ." +
File.separator + subPath +
File.separator + "sample");
results.next();
assertTrue("Incorrect ID Value",results.getString("ID").equals("Q123"));
assertTrue("Incorrect NAME Value",results.getString("NAME").equals("\"S,\""));
assertTrue("Incorrect EXTRA_FIELD Value",results.getString("EXTRA_FIELD").equals("F"));
results.close();
stmt.close();
conn.close();
}
catch(Exception e)
{
fail("Unexpected Exception: " + e);
}
}
}
|
package archimulator.sim.common;
import archimulator.model.*;
import archimulator.service.ServiceManager;
import archimulator.sim.common.report.ReportNode;
import archimulator.sim.common.report.Reportable;
import archimulator.sim.core.BasicProcessor;
import archimulator.sim.core.Core;
import archimulator.sim.core.Processor;
import archimulator.sim.core.Thread;
import archimulator.sim.core.speculativePrecomputation.DynamicSpeculativePrecomputationHelper;
import archimulator.sim.isa.Memory;
import archimulator.sim.os.Context;
import archimulator.sim.os.Kernel;
import archimulator.sim.uncore.BasicMemoryHierarchy;
import archimulator.sim.uncore.MemoryHierarchy;
import archimulator.sim.uncore.cache.Interval.IntervalHelper;
import archimulator.sim.uncore.cache.interference.CacheInteractionHelper;
import archimulator.sim.uncore.cache.replacement.reuseDistancePrediction.ReuseDistancePredictionHelper;
import archimulator.sim.uncore.cache.stackDistanceProfile.StackDistanceProfilingHelper;
import archimulator.sim.uncore.coherence.msi.controller.GeneralCacheController;
import archimulator.sim.uncore.coherence.msi.flow.CacheCoherenceFlow;
import archimulator.sim.uncore.delinquentLoad.DelinquentLoadIdentificationHelper;
import archimulator.sim.uncore.helperThread.FeedbackDirectedHelperThreadingHelper;
import archimulator.sim.uncore.helperThread.HelperThreadL2CacheRequestProfilingHelper;
import archimulator.sim.uncore.helperThread.hotspot.HotspotProfilingHelper;
import archimulator.sim.uncore.mlp.BLPProfilingHelper;
import archimulator.sim.uncore.mlp.MLPProfilingHelper;
import archimulator.sim.uncore.tlb.TranslationLookasideBuffer;
import archimulator.util.RuntimeHelper;
import net.pickapack.collection.tree.NodeHelper;
import net.pickapack.dateTime.DateHelper;
import net.pickapack.event.BlockingEventDispatcher;
import net.pickapack.event.CycleAccurateEventQueue;
import net.pickapack.io.file.FileHelper;
import net.pickapack.util.Reference;
import org.apache.commons.lang.time.DurationFormatUtils;
import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* Simulation.
*
* @author Min Cai
*/
public abstract class Simulation implements SimulationObject, Reportable {
protected Reference<Kernel> kernelRef;
private String title;
private long beginTime;
private long endTime;
private boolean running;
private boolean stopForced;
private SimulationType type;
private Processor processor;
private Experiment experiment;
private BlockingEventDispatcher<SimulationEvent> blockingEventDispatcher;
private CycleAccurateEventQueue cycleAccurateEventQueue;
//TODO: the following stuffs are to be refactored out!!!
private LatencyTrackingHelper latencyTrackingHelper;
private StackDistanceProfilingHelper stackDistanceProfilingHelper;
private ReuseDistancePredictionHelper reuseDistancePredictionHelper;
private HotspotProfilingHelper hotspotProfilingHelper;
private HelperThreadL2CacheRequestProfilingHelper helperThreadL2CacheRequestProfilingHelper;
private CacheInteractionHelper cacheInteractionHelper;
private FeedbackDirectedHelperThreadingHelper feedbackDirectedHelperThreadingHelper;
private DelinquentLoadIdentificationHelper delinquentLoadIdentificationHelper;
private DynamicSpeculativePrecomputationHelper dynamicSpeculativePrecomputationHelper;
private MLPProfilingHelper mlpProfilingHelper;
private BLPProfilingHelper blpProfilingHelper;
private IntervalHelper intervalHelper;
private RuntimeHelper runtimeHelper;
/**
* Current (max) dynamic instruction ID.
*/
public long currentDynamicInstructionId;
/**
* Current (max) reorder buffer entry ID.
*/
public long currentReorderBufferEntryId;
/**
* Current (max) decode buffer entry ID.
*/
public long currentDecodeBufferEntryId;
/**
* Current (max) memory hierarchy access ID.
*/
public long currentMemoryHierarchyAccessId;
/**
* Current (max) net message ID.
*/
public long currentNetMessageId;
/**
* Current (max) cache coherence flow ID.
*/
public long currentCacheCoherenceFlowId;
/**
* Pending cache coherence flows.
*/
public List<CacheCoherenceFlow> pendingFlows = new ArrayList<>();
/**
* Create a simulation.
*
* @param type the simulation type
* @param experiment the experiment object
* @param blockingEventDispatcher the blocking event dispatcher
* @param cycleAccurateEventQueue the cycle accurate event queue
* @param kernelRef the kernel reference
*/
public Simulation(SimulationType type, Experiment experiment, BlockingEventDispatcher<SimulationEvent> blockingEventDispatcher, CycleAccurateEventQueue cycleAccurateEventQueue, Reference<Kernel> kernelRef) {
this.experiment = experiment;
this.blockingEventDispatcher = blockingEventDispatcher;
this.cycleAccurateEventQueue = cycleAccurateEventQueue;
this.title = experiment.getId() + "/" + getPrefix();
this.type = type;
File cwdFile = new File(this.getWorkingDirectory());
if (cwdFile.exists() && !FileHelper.deleteDir(cwdFile) || !cwdFile.mkdirs()) {
throw new RuntimeException();
}
this.kernelRef = kernelRef;
Kernel kernel = this.prepareKernel();
if (!this.blockingEventDispatcher.isEmpty()) {
throw new IllegalArgumentException();
}
this.processor = new BasicProcessor(this.experiment, this, this.blockingEventDispatcher, this.cycleAccurateEventQueue, kernel, this.prepareMemoryHierarchy());
this.latencyTrackingHelper = new LatencyTrackingHelper(this);
this.stackDistanceProfilingHelper = new StackDistanceProfilingHelper(this);
this.reuseDistancePredictionHelper = new ReuseDistancePredictionHelper(this);
this.hotspotProfilingHelper = new HotspotProfilingHelper(this);
this.helperThreadL2CacheRequestProfilingHelper = new HelperThreadL2CacheRequestProfilingHelper(this);
this.cacheInteractionHelper = new CacheInteractionHelper(this);
this.feedbackDirectedHelperThreadingHelper = new FeedbackDirectedHelperThreadingHelper(this);
this.delinquentLoadIdentificationHelper = new DelinquentLoadIdentificationHelper(this);
if (getExperiment().getArchitecture().getDynamicSpeculativePrecomputationEnabled()) {
this.dynamicSpeculativePrecomputationHelper = new DynamicSpeculativePrecomputationHelper(this);
}
this.mlpProfilingHelper = new MLPProfilingHelper(this);
this.blpProfilingHelper = new BLPProfilingHelper(this);
this.intervalHelper = new IntervalHelper(this);
ServiceManager.getBlockingEventDispatcher().addListener(ExperimentStateChangedEvent.class, event -> {
if (running
&& event.getSender().getId() == experiment.getId()
&& ServiceManager.getExperimentService().getExperimentById(this.experiment.getId()).getState() == ExperimentState.PENDING) {
stopForced = true;
}
});
}
/**
* Perform the simulation.
*/
public void simulate() {
this.running = true;
Logger.infof(Logger.SIMULATOR, "begin simulation: %s", this.cycleAccurateEventQueue.getCurrentCycle(), this.getTitle());
Logger.info(Logger.SIMULATOR, "", this.cycleAccurateEventQueue.getCurrentCycle());
Logger.infof(Logger.SIMULATION, " architecture: %s", this.cycleAccurateEventQueue.getCurrentCycle(), getExperiment().getArchitecture());
Logger.info(Logger.SIMULATOR, "", this.cycleAccurateEventQueue.getCurrentCycle());
this.beginTime = DateHelper.toTick(new Date());
this.beginSimulation();
try {
if (this.getType() == SimulationType.FAST_FORWARD) {
this.doFastForward();
} else if (this.getType() == SimulationType.CACHE_WARMUP) {
this.doCacheWarmup();
} else if (this.getType() == SimulationType.MEASUREMENT) {
this.doMeasurement();
}
} catch (Exception e) {
e.printStackTrace();
this.endTime = DateHelper.toTick(new Date());
this.collectStats();
this.endSimulation();
this.running = false;
throw new RuntimeException(e);
}
this.endTime = DateHelper.toTick(new Date());
this.collectStats();
this.endSimulation();
this.running = false;
}
/**
* Collect the statistics.
*/
private void collectStats() {
final List<ExperimentStat> stats = new ArrayList<>();
ReportNode rootReportNode = new ReportNode(null, "");
this.getRuntimeHelper().dumpStats(rootReportNode);
this.dumpStats(rootReportNode);
this.processor.dumpStats(rootReportNode);
for (Memory memory : this.getProcessor().getKernel().getMemories()) {
memory.dumpStats(rootReportNode);
}
for (Core core : this.getProcessor().getCores()) {
core.dumpStats(rootReportNode);
}
for (Thread thread : this.getProcessor().getThreads()) {
thread.dumpStats(rootReportNode);
}
for (TranslationLookasideBuffer tlb : this.getProcessor().getMemoryHierarchy().getTlbs()) {
tlb.dumpStats(rootReportNode);
}
for (GeneralCacheController cacheController : this.getProcessor().getMemoryHierarchy().getCacheControllers()) {
cacheController.dumpStats(rootReportNode);
}
this.getProcessor().getMemoryHierarchy().getMemoryController().dumpStats(rootReportNode);
this.getLatencyTrackingHelper().dumpStats(rootReportNode);
this.getStackDistanceProfilingHelper().dumpStats(rootReportNode);
this.getReuseDistancePredictionHelper().dumpStats(rootReportNode);
this.getHotspotProfilingHelper().dumpStats(rootReportNode);
this.getHelperThreadL2CacheRequestProfilingHelper().dumpStats(rootReportNode);
this.getCacheInteractionHelper().dumpStats(rootReportNode);
this.getFeedbackDirectedHelperThreadingHelper().dumpStats(rootReportNode);
this.getDelinquentLoadIdentificationHelper().dumpStats(rootReportNode);
this.getMlpProfilingHelper().dumpStats(rootReportNode);
this.getIntervalHelper().dumpStats(rootReportNode);
this.getProcessor().getMemoryHierarchy().getL2CacheController().getCache().getReplacementPolicy().dumpStats(rootReportNode);
rootReportNode.traverse(node -> {
stats.add(new ExperimentStat(experiment.getId(), getPrefix(), node.getPath(), node.getValue()));
});
if (this.getType() == SimulationType.MEASUREMENT || this.getType() == SimulationType.CACHE_WARMUP) {
getProcessor().getMemoryHierarchy().dumpCacheControllerFsmStats(stats);
}
ServiceManager.getExperimentStatService().addStatsByParent(this.getExperiment(), stats);
}
/**
* Get a value indicating whether it can do fast forwarding one cycle or not.
*
* @return a value indicating whether it can fast forwarding one cycle or not
*/
protected abstract boolean canDoFastForwardOneCycle();
/**
* Get a value indicating whether it can do cache warmup one cycle or not.
*
* @return a value indicating whether it can do cache warmup one cycle or not
*/
protected abstract boolean canDoCacheWarmupOneCycle();
/**
* Get a value indicating whether it can do measurement one cycle or not.
*
* @return a value indicating whether it can do measurement one cycle or not
*/
protected abstract boolean canDoMeasurementOneCycle();
/**
* Begin the simulation.
*/
protected abstract void beginSimulation();
/**
* End the simulation.
*/
protected abstract void endSimulation();
/**
* Do fast forwarding.
*/
public void doFastForward() {
Logger.info(Logger.SIMULATION, "Switched to fast forward mode.", this.getCycleAccurateEventQueue().getCurrentCycle());
while (!this.getProcessor().getKernel().getContexts().isEmpty() && this.canDoFastForwardOneCycle()) {
this.getProcessor().getCores().forEach(Core::doFastForwardOneCycle);
this.advanceOneCycle();
}
}
/**
* Do cache warmup.
*/
public void doCacheWarmup() {
Logger.info(Logger.SIMULATION, "Switched to cache warmup mode.", this.getCycleAccurateEventQueue().getCurrentCycle());
while (!this.getProcessor().getKernel().getContexts().isEmpty() && this.canDoCacheWarmupOneCycle()) {
this.getProcessor().getCores().forEach(Core::doCacheWarmupOneCycle);
this.advanceOneCycle();
}
}
/**
* Do measurement.
*/
public void doMeasurement() {
Logger.info(Logger.SIMULATION, "Switched to measurement mode.", this.getCycleAccurateEventQueue().getCurrentCycle());
while (!this.getProcessor().getKernel().getContexts().isEmpty() && this.canDoMeasurementOneCycle()) {
this.getProcessor().getCores().forEach(Core::doMeasurementOneCycle);
this.advanceOneCycle();
}
}
/**
* Advance one cycle.
*/
private void advanceOneCycle() {
this.doHouseKeeping();
this.getCycleAccurateEventQueue().advanceOneCycle();
if (this.stopForced) {
throw new RuntimeException("Aborted by user.");
}
if (this.getCycleAccurateEventQueue().getCurrentCycle() % (this.type == SimulationType.FAST_FORWARD ? 100000000 : 10000000) == 0) {
ServiceManager.getExperimentService().updateExperiment(this.experiment);
}
}
/**
* Do housekeeping work.
*/
public void doHouseKeeping() {
this.getProcessor().getKernel().advanceOneCycle();
this.getProcessor().updateContextToThreadAssignments();
}
/**
* Prepare the kernel.
*
* @return the kernel that is prepared
*/
public Kernel prepareKernel() {
Kernel kernel = new Kernel(this);
for (final ContextMapping contextMapping : this.getExperiment().getContextMappings()) {
final Context context = Context.load(kernel, this.getWorkingDirectory(), contextMapping);
if (!kernel.map(context, candidateThreadId -> candidateThreadId == contextMapping.getThreadId())) {
throw new RuntimeException();
}
kernel.getContexts().add(context);
}
return kernel;
}
/**
* Prepare the memory hierarchy.
*
* @return the memory hierarchy that is prepared
*/
public MemoryHierarchy prepareMemoryHierarchy() {
return new BasicMemoryHierarchy(this.getExperiment(), this, this.getBlockingEventDispatcher(), this.getCycleAccurateEventQueue());
}
/**
* Dump the tree of the pending cache coherence flows.
*/
public void dumpPendingFlowTree() {
for (CacheCoherenceFlow pendingFlow : this.pendingFlows) {
NodeHelper.print(pendingFlow);
System.out.println();
}
System.out.println();
}
@Override
public void dumpStats(ReportNode reportNode) {
reportNode.getChildren().add(new ReportNode(reportNode, "simulation") {{
getChildren().add(new ReportNode(this, "beginTimeAsString", getBeginTimeAsString()));
getChildren().add(new ReportNode(this, "endTimeAsString", getEndTimeAsString()));
getChildren().add(new ReportNode(this, "duration", getDuration()));
getChildren().add(new ReportNode(this, "durationInSeconds", getDurationInSeconds() + ""));
getChildren().add(new ReportNode(this, "cycleAccurateEventQueue/currentCycle", getCycleAccurateEventQueue().getCurrentCycle() + ""));
}});
}
/**
* Get the simulation type.
*
* @return the simulation type
*/
public SimulationType getType() {
return type;
}
/**
* Get the time in ticks when the simulation begins.
*
* @return the time in ticks when the simulation begins
*/
public long getBeginTime() {
return beginTime;
}
/**
* Get the time in ticks when the simulation ends.
*
* @return the time in ticks when the simulation ends
*/
public long getEndTime() {
return endTime;
}
/**
* Get the string representation of the time when the simulation begins.
*
* @return the string representation of the time when the simulation begins
*/
public String getBeginTimeAsString() {
return DateHelper.toString(beginTime);
}
/**
* Get the string representation of the time when the simulation ends.
*
* @return the string representation of the time when the simulation ends
*/
public String getEndTimeAsString() {
return DateHelper.toString(endTime);
}
/**
* Get the duration in seconds that the simulation lasts.
*
* @return the duration in seconds that the simulation lasts
*/
public long getDurationInSeconds() {
return (this.getEndTime() - this.getBeginTime()) / 1000;
}
/**
* Get the string representation of the duration that the simulation lasts.
*
* @return the string representation of the duration that the simulation lasts
*/
public String getDuration() {
return DurationFormatUtils.formatDurationHMS(this.getEndTime() - this.getBeginTime());
}
/**
* Get the simulation title.
*
* @return the simulation title
*/
public String getTitle() {
return title;
}
/**
* Get the simulation's working directory.
*
* @return the simulation's working directory
*/
public String getWorkingDirectory() {
return "experiments" + File.separator + this.title;
}
/**
* Get the processor object.
*
* @return the processor object
*/
public Processor getProcessor() {
return this.processor;
}
/**
* Get the experiment object.
*
* @return the experiment object
*/
public Experiment getExperiment() {
return experiment;
}
@Override
public Simulation getSimulation() {
return this;
}
/**
* Get the cycle accurate event queue.
*
* @return the cycle accurate event queue
*/
public CycleAccurateEventQueue getCycleAccurateEventQueue() {
return this.cycleAccurateEventQueue;
}
/**
* Get the blocking event dispatcher.
*
* @return the blocking event dispatcher
*/
public BlockingEventDispatcher<SimulationEvent> getBlockingEventDispatcher() {
return this.blockingEventDispatcher;
}
/**
* Get the latency tracking helper.
*
* @return the latency tracking helper
*/
public LatencyTrackingHelper getLatencyTrackingHelper() {
return latencyTrackingHelper;
}
/**
* Get the stack distance profiling helper.
*
* @return the stack distance profiling helper
*/
public StackDistanceProfilingHelper getStackDistanceProfilingHelper() {
return stackDistanceProfilingHelper;
}
/**
* Get the reuse distance prediction helper.
*
* @return the reuse distance prediction helper
*/
public ReuseDistancePredictionHelper getReuseDistancePredictionHelper() {
return reuseDistancePredictionHelper;
}
/**
* Get the hotspot profiling helper.
*
* @return the hotspot profiling helper
*/
public HotspotProfilingHelper getHotspotProfilingHelper() {
return hotspotProfilingHelper;
}
/**
* Get the helper thread L2 cache request profiling helper.
*
* @return the helper thread L2 cache request profiling helper
*/
public HelperThreadL2CacheRequestProfilingHelper getHelperThreadL2CacheRequestProfilingHelper() {
return helperThreadL2CacheRequestProfilingHelper;
}
/**
* Get the cache interaction helper.
*
* @return the cache interaction helper
*/
public CacheInteractionHelper getCacheInteractionHelper() {
return cacheInteractionHelper;
}
/**
* Get the feedback directed helper threading helper.
*
* @return the feedback directed helper threading helper
*/
public FeedbackDirectedHelperThreadingHelper getFeedbackDirectedHelperThreadingHelper() {
return feedbackDirectedHelperThreadingHelper;
}
/**
* Get the delinquent load identification helper.
*
* @return the delinquent load identification helper
*/
public DelinquentLoadIdentificationHelper getDelinquentLoadIdentificationHelper() {
return delinquentLoadIdentificationHelper;
}
/**
* Get the dynamic speculative precomputation helper.
*
* @return the dynamic speculative precomputation helper
*/
public DynamicSpeculativePrecomputationHelper getDynamicSpeculativePrecomputationHelper() {
return dynamicSpeculativePrecomputationHelper;
}
/**
* Get the MLP profiling helper.
*
* @return the MLP profiling helper
*/
public MLPProfilingHelper getMlpProfilingHelper() {
return mlpProfilingHelper;
}
/**
* Get the BLP profiling helper.
*
* @return the BLP profiling helper
*/
public BLPProfilingHelper getBlpProfilingHelper() {
return blpProfilingHelper;
}
/**
* Get the interval helper.
*
* @return the interval helper
*/
public IntervalHelper getIntervalHelper() {
return intervalHelper;
}
/**
* Get the runtime helper.
*
* @return the runtime helper
*/
public RuntimeHelper getRuntimeHelper() {
if (runtimeHelper == null) {
runtimeHelper = new RuntimeHelper();
}
return runtimeHelper;
}
@Override
public String getName() {
return "simulation";
}
/**
* Get the simulation title prefix.
*
* @return the simulation title prefix
*/
public abstract String getPrefix();
}
|
package bj.pranie.config;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import javax.sql.DataSource;
@Configuration
@EnableWebSecurity
public class WebSecurityConfig extends WebSecurityConfigurerAdapter {
@Autowired
CustomAuthenticationProvider customAuthenticationProvider;
@Override
protected void configure(HttpSecurity http) throws Exception {
http.authorizeRequests()
.antMatchers("/",
"/logout",
"/wm*/*",
|
package cn.momia.mapi.api.index;
import cn.momia.api.course.CourseServiceApi;
import cn.momia.api.course.SubjectServiceApi;
import cn.momia.api.course.dto.course.Course;
import cn.momia.api.course.dto.subject.Subject;
import cn.momia.api.discuss.DiscussServiceApi;
import cn.momia.api.discuss.dto.DiscussTopic;
import cn.momia.api.user.UserServiceApi;
import cn.momia.api.user.dto.User;
import cn.momia.common.core.dto.PagedList;
import cn.momia.common.core.http.MomiaHttpResponse;
import cn.momia.common.webapp.config.Configuration;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
@RestController
@RequestMapping(value = "/v3/index")
public class IndexV3Api extends AbstractIndexApi {
private static final Logger LOGGER = LoggerFactory.getLogger(IndexV3Api.class);
private static final int HOT_COURSE = 1;
private static final int NEW_COURSE = 2;
private static Date lastChangeTime = new Date();
private static int subjectCourseType = Math.random() > 0.5 ? HOT_COURSE : NEW_COURSE;
@Autowired private CourseServiceApi courseServiceApi;
@Autowired private SubjectServiceApi subjectServiceApi;
@Autowired private DiscussServiceApi discussServiceApi;
@Autowired private UserServiceApi userServiceApi;
@RequestMapping(method = RequestMethod.GET)
public MomiaHttpResponse index(HttpServletRequest request,
@RequestParam(required = false, defaultValue = "") String utoken,
@RequestParam(value = "city") int cityId,
@RequestParam int start) {
if (cityId < 0) return MomiaHttpResponse.FAILED("CityID");
if (start < 0) return MomiaHttpResponse.FAILED("start");
JSONObject indexJson = new JSONObject();
if (start == 0) {
int platform = getPlatform(request);
String version = getVersion(request);
indexJson.put("banners", getBanners(cityId, platform, version));
indexJson.put("newUser", false);
if (!StringUtils.isBlank(utoken)) {
User user = userServiceApi.get(utoken);
if (!user.isPayed()) {
indexJson.put("newUser", true);
indexJson.put("eventsTitle", "");
indexJson.put("events", getEvents(cityId, platform, version, 2));
} else {
indexJson.put("eventsTitle", "");
indexJson.put("events", getEvents(cityId, platform, version, 1));
}
} else {
indexJson.put("newUser", true);
indexJson.put("eventsTitle", "");
indexJson.put("events", getEvents(cityId, platform, version, 2));
}
List<Subject> subjects = getSubjects(cityId);
Date now = new Date();
if (now.getTime() - lastChangeTime.getTime() >= 3 * 24 * 60 * 60 * 1000) {
if (subjectCourseType == HOT_COURSE) subjectCourseType = NEW_COURSE;
else subjectCourseType = HOT_COURSE;
}
int currentSubjectCourseType = subjectCourseType;
if (currentSubjectCourseType == HOT_COURSE) sortCoursesByJoined(subjects);
else sortCoursesByAddTime(subjects);
JSONArray subjectsJson = (JSONArray) JSON.toJSON(subjects);
for (int i = 0; i < subjectsJson.size(); i++) {
JSONObject subjectJson = subjectsJson.getJSONObject(i);
if (currentSubjectCourseType == HOT_COURSE) subjectJson.put("coursesTitle", "");
else subjectJson.put("coursesTitle", "");
}
indexJson.put("subjects", subjectsJson);
indexJson.put("subjectCourseType", currentSubjectCourseType);
List<DiscussTopic> topics = discussServiceApi.listTopics(cityId, 0, 3).getList();
if (!topics.isEmpty()) topics = topics.subList(0, 1);
for (DiscussTopic topic : topics) {
topic.setCover(completeLargeImg(topic.getCover()));
}
indexJson.put("topics", topics);
}
indexJson.put("courses", getRecommendCourses(cityId, start));
return MomiaHttpResponse.SUCCESS(indexJson);
}
private List<Subject> getSubjects(int cityId) {
List<Subject> subjects = subjectServiceApi.list(cityId);
for (Subject subject : subjects) {
completeLargeImg(subject);
}
return subjects;
}
private void sortCoursesByJoined(List<Subject> subjects) {
for (Subject subject : subjects) {
List<Course> courses = subject.getCourses();
if (!courses.isEmpty()) {
Collections.sort(courses, new Comparator<Course>() {
@Override
public int compare(Course c1, Course c2) {
return c2.getJoined() - c1.getJoined();
}
});
subject.setCourses(courses.subList(0, Math.min(courses.size(), 3)));
}
}
}
private void sortCoursesByAddTime(List<Subject> subjects) {
for (Subject subject : subjects) {
List<Course> courses = subject.getCourses();
if (!courses.isEmpty()) {
Collections.sort(courses, new Comparator<Course>() {
@Override
public int compare(Course c1, Course c2) {
return (int) (c2.getAddTime().getTime() - c1.getAddTime().getTime());
}
});
subject.setCourses(courses.subList(0, Math.min(courses.size(), 3)));
}
}
}
private PagedList<Course> getRecommendCourses(int cityId, int start) {
try {
PagedList<Course> courses = courseServiceApi.listRecommend(cityId, start, Configuration.getInt("PageSize.Course"));
for (Course course : courses.getList()) {
completeLargeImg(course);
}
return courses;
} catch (Exception e) {
LOGGER.error("fail to list recommend courses", e);
return PagedList.EMPTY;
}
}
}
|
package co.com.codesoftware.beans;
import java.io.Serializable;
import java.util.ArrayList;
import javax.annotation.PostConstruct;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.ViewScoped;
import javax.faces.context.FacesContext;
import org.primefaces.model.menu.DefaultMenuItem;
import org.primefaces.model.menu.DefaultMenuModel;
import org.primefaces.model.menu.DefaultSubMenu;
import org.primefaces.model.menu.MenuModel;
import co.com.codesoftware.entity.PuntoMenuEntity;
import co.com.codesoftware.servicio.usuario.UsuarioEntity;
@ManagedBean
@ViewScoped
public class MenuBean implements Serializable {
private static final long serialVersionUID = 1L;
private String listaPermisos = ".InPr13.";
private ArrayList<PuntoMenuEntity> menu;
private MenuModel menuDinamico;
private UsuarioEntity objetoSesion;
@PostConstruct
public void init() {
this.objetoSesion = (UsuarioEntity) FacesContext.getCurrentInstance().getExternalContext().getSessionMap()
.get("dataSession");
this.listaPermisos = this.objetoSesion.getPerfil().getPermisos();
this.creaMenu();
}
/**
* Funcion con la cual creo el menu principal de la aplicacion
*/
public void creaMenu() {
if (menuDinamico == null) {
this.menuDinamico = new DefaultMenuModel();
}
DefaultMenuItem tercerPunto = new DefaultMenuItem("Cerrar Sesion");
tercerPunto.setIcon("fa fa-close");
tercerPunto.setCommand("#{loginBean.cerrarSesion}");
// Cuarto punto de menu
// DefaultSubMenu tercerPunto = new DefaultSubMenu();
DefaultMenuItem cuartoPunto = new DefaultMenuItem("SIGEMCO");
cuartoPunto.setCommand("#{loginBean.cambioSigemco}");
this.menuDinamico.addElement(this.generaMenuAdmon());
this.menuDinamico.addElement(this.generaMenuProd());
this.menuDinamico.addElement(this.generaMenuParametros());
this.menuDinamico.addElement(this.generaMenuReportes());
this.menuDinamico.addElement(this.generaMenuFacturacion());
this.menuDinamico.addElement(this.generaMenuImportaciones());
this.menuDinamico.addElement(this.generaMenuContabilidad());
this.menuDinamico.addElement(tercerPunto);
this.menuDinamico.addElement(cuartoPunto);
}
public DefaultSubMenu generaMenuParametros() {
DefaultSubMenu segundoPunto = new DefaultSubMenu();
try {
// REPORTES
if (this.listaPermisos.contains(".Per1.") || this.listaPermisos.contains(".Per2.")) {
segundoPunto.setLabel("PARAMETROS");
segundoPunto.setIcon("fa fa-archive");
if (this.listaPermisos.contains(".Per3.")) {
DefaultSubMenu segPunNivUno = new DefaultSubMenu();
segPunNivUno.setLabel("Categoria");
DefaultMenuItem segPunNvUnoAsoc = new DefaultMenuItem("Asociar Cat y Sub Cat");
segPunNvUnoAsoc.setCommand("/ACTION/SUBCATEGORIA/asocCatSubCat.jsf");
segundoPunto.addElement(segPunNivUno);
segPunNivUno.addElement(segPunNvUnoAsoc);
}
if (this.listaPermisos.contains(".Per4.")) {
DefaultSubMenu segPunNivDos = new DefaultSubMenu();
segPunNivDos.setLabel("Precio Masivo");
DefaultMenuItem segPunNvDosPrecMasiv = new DefaultMenuItem("Consulta Porcentajes");
segPunNvDosPrecMasiv.setCommand("/ACTION/PRECIOS/ConsPorcPrecioMasivo.jsf");
DefaultMenuItem segPunNvDosPrecMasivIns = new DefaultMenuItem("Insercion Parametros");
segPunNvDosPrecMasivIns.setCommand("/ACTION/PRECIOS/InsercionParametrosPrecio.jsf");
DefaultMenuItem segPunNvDosPrecMasivEje = new DefaultMenuItem("Ejecucion");
segPunNvDosPrecMasivEje.setCommand("/ACTION/PRECIOS/EjecuionParaPrecioMasivo.jsf");
segundoPunto.addElement(segPunNivDos);
segPunNivDos.addElement(segPunNvDosPrecMasiv);
segPunNivDos.addElement(segPunNvDosPrecMasivIns);
segPunNivDos.addElement(segPunNvDosPrecMasivEje);
}
}
} catch (Exception e) {
e.printStackTrace();
}
return segundoPunto;
}
/**
* Funcion con la cual genero el menu de reportes
*
* @return
*/
public DefaultSubMenu generaMenuReportes() {
DefaultSubMenu menuPrincipal = new DefaultSubMenu();
try {
// REPORTES
if (this.listaPermisos.contains(".Per3.") || this.listaPermisos.contains(".Per4.")) {
menuPrincipal.setLabel("REPORTES");
menuPrincipal.setIcon("fa fa-file-excel-o");
if (this.listaPermisos.contains(".Per3.")) {
DefaultMenuItem basico = new DefaultMenuItem("Basico");
basico.setCommand("/ACTION/REPORTES/reportes.jsf");
menuPrincipal.addElement(basico);
}
if (this.listaPermisos.contains(".Per4.")) {
DefaultMenuItem comVentas = new DefaultMenuItem("Compras y Ventas");
comVentas.setCommand("/ACTION/REPORTES/comprasVentas.xhtml");
menuPrincipal.addElement(comVentas);
}
}
} catch (Exception e) {
e.printStackTrace();
}
return menuPrincipal;
}
public DefaultSubMenu generaMenuProd() {
DefaultSubMenu menuPrincipal = new DefaultSubMenu();
try {
if (this.listaPermisos.contains(".Inv1.") || this.listaPermisos.contains(".Inv2.")
|| this.listaPermisos.contains(".Inv3.") || this.listaPermisos.contains(".Inv5.")
|| this.listaPermisos.contains(".Inv5.") || this.listaPermisos.contains(".Inv6.")
|| this.listaPermisos.contains(".Inv7.") || this.listaPermisos.contains(".Inv8.")
|| this.listaPermisos.contains(".Inv9.")) {
menuPrincipal.setLabel("INVENTARIOS");
menuPrincipal.setIcon("fa fa-bank");
if (this.listaPermisos.contains(".Inv1.") || this.listaPermisos.contains(".Inv2.")
|| this.listaPermisos.contains(".Inv3.") || this.listaPermisos.contains(".Inv5.")
|| this.listaPermisos.contains(".Adm5.") || this.listaPermisos.contains(".Inv6.")
|| this.listaPermisos.contains(".Inv7.")) {
// Segundo Nivel
DefaultSubMenu productos = new DefaultSubMenu();
productos.setLabel("Productos ");
menuPrincipal.addElement(productos);
if (this.listaPermisos.contains(".Inv1.")) {
DefaultMenuItem tercerNivel = new DefaultMenuItem("Factura de Compra");
tercerNivel.setCommand("/ACTION/FACTURACION/facturaCompraTmp.jsf?faces-redirect=false");
productos.addElement(tercerNivel);
}
if (this.listaPermisos.contains(".Inv2.")) {
DefaultMenuItem tercerNivelFacturaCompra = new DefaultMenuItem("Consulta Fac Compra");
tercerNivelFacturaCompra
.setCommand("/ACTION/PRODUCTOS/consultaFacturaCompras.jsf?faces-redirect=false");
productos.addElement(tercerNivelFacturaCompra);
}
if (this.listaPermisos.contains(".Inv3.")) {
DefaultMenuItem tercerNivelFacturaCompraTmp = new DefaultMenuItem("Consulta Fac Compra tmp");
tercerNivelFacturaCompraTmp
.setCommand("/ACTION/FACTURACION/consultaFacCompraTmp.jsf?faces-redirect=false");
productos.addElement(tercerNivelFacturaCompraTmp);
}
if (this.listaPermisos.contains(".Inv4.")) {
DefaultMenuItem tercerNivelPrueba = new DefaultMenuItem("Insertar Productos");
tercerNivelPrueba.setCommand("/ACTION/PRODUCTOS/insertaProductos.jsf?faces-redirect=false");
productos.addElement(tercerNivelPrueba);
}
if (this.listaPermisos.contains(".Inv5.")) {
DefaultMenuItem tercerNivelPrecio = new DefaultMenuItem("Parametrizacion de precio");
tercerNivelPrecio.setCommand("/ACTION/PRODUCTOS/precioProductos.jsf?faces-redirect=false");
productos.addElement(tercerNivelPrecio);
}
if (this.listaPermisos.contains(".Inv6.")) {
DefaultMenuItem tercerNivelConsGeneral = new DefaultMenuItem("Consulta General");
tercerNivelConsGeneral
.setCommand("/ACTION/PRODUCTOS/consGeneralProductos.jsf?faces-redirect=false");
productos.addElement(tercerNivelConsGeneral);
}
if (this.listaPermisos.contains(".Inv7.")) {
DefaultMenuItem tercerNivelSolicitudes = new DefaultMenuItem("Solicitudes");
tercerNivelSolicitudes
.setCommand("/ACTION/SOLICITUDES/consultaSolicitudes.jsf?faces-redirect=false");
productos.addElement(tercerNivelSolicitudes);
}
}
if (this.listaPermisos.contains(".Inv8.") || this.listaPermisos.contains(".Inv9.")) {
DefaultSubMenu cargues = new DefaultSubMenu();
cargues.setLabel("Cargues Masivos ");
menuPrincipal.addElement(cargues);
if (this.listaPermisos.contains(".Inv8.")) {
DefaultMenuItem cargueProductos = new DefaultMenuItem("Cargue Productos");
cargueProductos.setCommand("/ACTION/PRODUCTOS/cargueProductos.jsf?faces-redirect=false");
cargues.addElement(cargueProductos);
}
if (this.listaPermisos.contains(".Inv9.")) {
DefaultMenuItem cargueProd = new DefaultMenuItem("Solo Prod");
cargueProd.setCommand("/ACTION/PRODUCTOS/cargueSoloProducto.jsf?faces-redirect=false");
cargues.addElement(cargueProd);
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
return menuPrincipal;
}
/**
* Funcion con la cual genero el menu de administracion, con todos sus hijos
*
* @return
*/
public DefaultSubMenu generaMenuAdmon() {
DefaultSubMenu menuPrincipal = new DefaultSubMenu();
try {
if (this.listaPermisos.contains(".Adm1.") || this.listaPermisos.contains(".Adm2.")
|| this.listaPermisos.contains(".Adm3.") || this.listaPermisos.contains(".Adm4.")
|| this.listaPermisos.contains(".Adm5.") || this.listaPermisos.contains(".Adm6.")
|| this.listaPermisos.contains(".Adm7.") || this.listaPermisos.contains(".Adm8.")) {
menuPrincipal.setLabel("ADMINISTRACION");
menuPrincipal.setIcon("fa fa-users");
// Se generan los submenus de segundo nivel
// Genero el segundo nivel del menu
if (this.listaPermisos.contains(".Adm1.")) {
DefaultMenuItem segundoNivel = new DefaultMenuItem("Resolución Fact.");
segundoNivel.setCommand("/ACTION/ADMIN/resolucionFacturacion.jsf");
menuPrincipal.addElement(segundoNivel);
}
if (this.listaPermisos.contains(".Adm2.")) {
// Genero sedes
DefaultMenuItem segundoNivelUno = new DefaultMenuItem("Sedes.");
segundoNivelUno.setCommand("/ACTION/ADMIN/sedes.jsf");
menuPrincipal.addElement(segundoNivelUno);
}
if (this.listaPermisos.contains(".Adm3.")) {
// Genero conteos
DefaultMenuItem segundoNivelDos = new DefaultMenuItem("Conteo.");
segundoNivelDos.setCommand("/ACTION/ADMIN/conteos.jsf");
menuPrincipal.addElement(segundoNivelDos);
}
if (this.listaPermisos.contains(".Adm4.")) {
// Punto de menu para parametros
DefaultMenuItem segundoNivelTres = new DefaultMenuItem("Parametros.");
segundoNivelTres.setCommand("/ACTION/ADMIN/parametrosGenerales.jsf");
menuPrincipal.addElement(segundoNivelTres);
}
if (this.listaPermisos.contains(".Adm5.")) {
// Punto de menu para usuarios
DefaultMenuItem segundoNivelCuatro = new DefaultMenuItem("Usuarios.");
segundoNivelCuatro.setCommand("/ACTION/ADMIN/usuarios.jsf");
menuPrincipal.addElement(segundoNivelCuatro);
}
if (this.listaPermisos.contains(".Adm6.")) {
DefaultMenuItem segundoNivelCinco = new DefaultMenuItem("Proveedores.");
segundoNivelCinco.setCommand("/ACTION/ADMIN/proveedores.jsf");
menuPrincipal.addElement(segundoNivelCinco);
}
if (this.listaPermisos.contains(".Adm7.")) {
DefaultMenuItem segundoNivelSeis = new DefaultMenuItem("Clientes.");
segundoNivelSeis.setCommand("/ACTION/ADMIN/clientes.jsf");
menuPrincipal.addElement(segundoNivelSeis);
}
if (this.listaPermisos.contains(".Adm8.")) {
DefaultMenuItem segundoNivelSiete = new DefaultMenuItem("Perfiles.");
segundoNivelSiete.setCommand("/ACTION/ADMIN/perfiles.jsf");
menuPrincipal.addElement(segundoNivelSiete);
}
}
} catch (Exception e) {
e.printStackTrace();
}
return menuPrincipal;
}
/**
* Funcion con la cual genero el menu de facturacion, con todos sus hijos
*
* @return
*/
public DefaultSubMenu generaMenuFacturacion() {
DefaultSubMenu menuPrincipal = new DefaultSubMenu();
try {
menuPrincipal.setLabel("FACTURACION");
menuPrincipal.setIcon("fa fa-files-o");
// Se generan los submenus de segundo nivel
// Genero el segundo nivel del menu
DefaultMenuItem segundoNivel = new DefaultMenuItem("Consulta Facturas");
segundoNivel.setCommand("/ACTION/FACTURACION/consultaFacturas.jsf");
DefaultSubMenu segundoNivelDos = new DefaultSubMenu("Remisiones");
DefaultMenuItem consultaRemi = new DefaultMenuItem("Consulta Remisiones");
consultaRemi.setCommand("/ACTION/FACTURACION/remisionFacturacion.jsf");
DefaultMenuItem consultaPagos = new DefaultMenuItem("Registro Pagos");
consultaPagos.setCommand("/ACTION/FACTURACION/pagosRemision.jsf");
segundoNivelDos.addElement(consultaRemi);
segundoNivelDos.addElement(consultaPagos);
// Adiciono al punto de menu principal
menuPrincipal.addElement(segundoNivel);
menuPrincipal.addElement(segundoNivelDos);
} catch (Exception e) {
e.printStackTrace();
}
return menuPrincipal;
}
/**
* Menu en el cual se generan las importaciones
*
* @return
*/
public DefaultSubMenu generaMenuImportaciones() {
DefaultSubMenu menuPrincipal = new DefaultSubMenu();
try {
menuPrincipal.setLabel("IMPORTACIONES");
menuPrincipal.setIcon("fa fa-briefcase");
// Se generan los submenus de segundo nivel
// Genero el segundo nivel del menu
DefaultMenuItem segundoNivel = new DefaultMenuItem("Info Principal");
DefaultMenuItem segundoNivelConsulta = new DefaultMenuItem("Consulta Gral");
segundoNivel.setCommand("/ACTION/IMPORTACION/adminImportacion.jsf");
segundoNivelConsulta.setCommand("/ACTION/IMPORTACION/consultaImportacion.jsf");
// Adiciono al punto de menu principal
menuPrincipal.addElement(segundoNivel);
menuPrincipal.addElement(segundoNivelConsulta);
} catch (Exception e) {
e.printStackTrace();
}
return menuPrincipal;
}
/**
* Menu en el cual se generan las importaciones
*
* @return
*/
public DefaultSubMenu generaMenuContabilidad() {
DefaultSubMenu menuPrincipal = new DefaultSubMenu();
try {
menuPrincipal.setLabel("CONTABILIDAD");
menuPrincipal.setIcon("fa fa-calculator");
// Se generan los submenus de segundo nivel
// Genero el segundo nivel del menu
DefaultSubMenu segundoNivel = new DefaultSubMenu("PUC");
DefaultMenuItem tercerNivelConsulta = new DefaultMenuItem("Consulta Gral");
// tercerNivelConsulta.setCommand("/ACTION/IMPORTACION/adminImportacion.jsf");
tercerNivelConsulta.setCommand("/ACTION/CONTABILIDAD/CONSULTAS/consultaClase.jsf");
DefaultSubMenu segundoNivelDos = new DefaultSubMenu("CONSULTA MV.");
DefaultMenuItem tercerNivelConsultaDos = new DefaultMenuItem("POR TIPO DOCUMENTO");
tercerNivelConsultaDos.setCommand("/ACTION/CONTABILIDAD/CONSULTAS/consultaMoviContable.jsf");
// Adiciono al punto de menu principal
menuPrincipal.addElement(segundoNivel);
segundoNivel.addElement(tercerNivelConsulta);
segundoNivelDos.addElement(tercerNivelConsultaDos);
menuPrincipal.addElement(segundoNivelDos);
} catch (Exception e) {
e.printStackTrace();
}
return menuPrincipal;
}
public String getListaPermisos() {
return listaPermisos;
}
public void setListaPermisos(String listaPermisos) {
this.listaPermisos = listaPermisos;
}
public ArrayList<PuntoMenuEntity> getMenu() {
return menu;
}
public void setMenu(ArrayList<PuntoMenuEntity> menu) {
this.menu = menu;
}
public MenuModel getMenuDinamico() {
return menuDinamico;
}
public void setMenuDinamico(MenuModel menuDinamico) {
this.menuDinamico = menuDinamico;
}
public UsuarioEntity getObjetoSesion() {
return objetoSesion;
}
public void setObjetoSesion(UsuarioEntity objetoSesion) {
this.objetoSesion = objetoSesion;
}
}
|
package codemining.math.random;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.lang.math.RandomUtils;
import codemining.util.StatsUtil;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multiset;
/**
* A utility class for sampling from sets and multisets.
*
* @author Miltos Allamanis <m.allamanis@ed.ac.uk>
*
*/
public class SampleUtils {
/**
* Get a uniformly random element from a Multiset.
*
* @param set
* @return
*/
public static <T> T getRandomElement(final Multiset<T> set) {
final int randPos = RandomUtils.nextInt(checkNotNull(set).size());
T selected = null;
int i = 0;
for (final Multiset.Entry<T> entry : set.entrySet()) {
i += entry.getCount();
if (i > randPos) {
selected = entry.getElement();
break;
}
}
return selected;
}
/**
* Get a random index when selecting from the given unnormalized
* log2-probabilities.
*
* @param log2ProbWeights
* @return
*/
public static int getRandomIndex(final double[] log2ProbWeights) {
double max = Double.NEGATIVE_INFINITY;
for (final double weight : log2ProbWeights) {
if (max < weight) {
max = weight;
}
}
final double[] weights = new double[log2ProbWeights.length];
double sum = 0;
for (int i = 0; i < log2ProbWeights.length; i++) {
final double prob = Math.pow(2, log2ProbWeights[i] - max);
sum += prob;
weights[i] = prob;
}
final double randomPoint = RandomUtils.nextDouble() * sum;
double partialSum = 0;
for (int i = 0; i < log2ProbWeights.length; i++) {
partialSum += weights[i];
if (partialSum >= randomPoint) {
return i;
}
}
throw new IllegalStateException("Should not have reached here.");
}
/**
* Return a random T where each T is associated with a double log2
* probability.
*
* @param log2ProbWeights
* @return
*/
public static <T> T getRandomKey(final Map<T, Double> log2ProbWeights) {
final double max = StatsUtil.max(log2ProbWeights.values());
final Map<T, Double> weights = Maps.newHashMap();
double sum = 0;
for (final Entry<T, Double> entry : log2ProbWeights.entrySet()) {
final double prob = Math.pow(2, entry.getValue() - max);
weights.put(entry.getKey(), prob);
sum += prob;
}
final double randomPoint = RandomUtils.nextDouble() * sum;
double partialSum = 0;
for (final Entry<T, Double> entry : weights.entrySet()) {
partialSum += entry.getValue();
if (partialSum >= randomPoint) {
return entry.getKey();
}
}
throw new IllegalStateException("Should not have reached here.");
}
/**
* Partition a set of elements into different weight bins (defined by
* partitionWeights), where each element has a weight defined by
* elementWeights. The order with which the partition is made is given by
* orderedElements.
*
* This method sequentially adds elements into bins until the bin weight has
* been fully filled. No guarantees are given about how accurate the binning
* will be, since this is not an optimization algorithm.
*
* @param elementWeights
* @param partitionWeights
* @param orderedElements
* @return
*/
public static <K, T> Multimap<K, T> partitionGivenOrder(
final Map<T, Double> elementWeights,
final Map<K, Double> partitionWeights,
final List<Entry<T, Double>> orderedElements) {
final Multimap<K, T> partitions = ArrayListMultimap.create();
final double elementWeightSum = StatsUtil.sum(elementWeights.values());
final double partitionWeightSum = StatsUtil.sum(partitionWeights
.values());
final List<Entry<K, Double>> partitionList = Lists
.newArrayList(partitionWeights.entrySet());
int currentPartitionIdx = 0;
double currentElementSum = 0;
double currentPartitionSum = 0;
for (int currentElementIdx = 0; currentElementIdx < orderedElements
.size(); currentElementIdx++) {
double partitionPoint = (currentPartitionSum + partitionList.get(
currentPartitionIdx).getValue())
/ partitionWeightSum;
final double elementWeightPoint = currentElementSum
/ elementWeightSum;
currentElementSum += orderedElements.get(currentElementIdx)
.getValue();
while (partitionPoint <= elementWeightPoint) {
currentPartitionSum += partitionList.get(currentPartitionIdx)
.getValue();
currentPartitionIdx++;
partitionPoint = (currentPartitionSum + partitionList.get(
currentPartitionIdx).getValue())
/ partitionWeightSum;
}
partitions.put(partitionList.get(currentPartitionIdx).getKey(),
orderedElements.get(currentElementIdx).getKey());
}
return partitions;
}
/**
* Partition the elements T in partition, whose relative size is given
* approximately by partitionWeights. Here we do a best effort to match the
* weights.
*
* @param elementWeights
* @param partitionWeights
* @return
*/
public static <K, T> Multimap<K, T> randomPartition(
final Map<T, Double> elementWeights,
final Map<K, Double> partitionWeights) {
final List<Entry<T, Double>> elements = Lists
.newArrayList(elementWeights.entrySet());
Collections.shuffle(elements);
return partitionGivenOrder(elementWeights, partitionWeights, elements);
}
private SampleUtils() {
}
}
|
package com.aol.spring.demo;
import com.aol.spring.demo.models.Chat;
import com.aol.spring.demo.models.ChatsDao;
import com.aol.spring.demo.models.User;
import com.aol.spring.demo.models.UserDao;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.Date;
import java.util.List;
@Controller
public class MainController {
// Private fields
@Autowired
private UserDao _userDao;
@Autowired
private ChatsDao _chatDao;
private static final Logger logger = LoggerFactory
.getLogger(MainController.class);
@RequestMapping("/")
public ModelAndView index(@CookieValue(value = "name", defaultValue = "") String name,
@CookieValue(value = "email", defaultValue = "") String email) {
if ("".equals(name) || "".equals(email)) {
return loginPage();
} else {
return chatsPage();
}
}
@RequestMapping(value = "/login", method = RequestMethod.GET)
public ModelAndView loginPage() {
return new ModelAndView("login");
}
@RequestMapping(value = "/logout", method = RequestMethod.POST)
public ModelAndView logout(HttpServletRequest request,
HttpServletResponse response) {
for (Cookie cookie : request.getCookies()) {
if ("name".equals(cookie.getName()) || "email".equals(cookie.getName())) {
// clear cookie
cookie.setMaxAge(0);
response.addCookie(cookie);
}
}
return new ModelAndView("redirect:/");
}
@RequestMapping(value = "/create-user", method = RequestMethod.POST)
public ModelAndView createUser(HttpServletRequest request,
HttpServletResponse response,
@RequestParam String name,
@RequestParam String email) {
try {
// create new user object
User user = new User();
user.setName(name);
user.setEmail(email);
user.setTimestamp(new Date().getTime());
// save user in db (if new)
if (_userDao.getByEmail(email) == null) {
_userDao.save(user);
}
// save in cookie
Cookie cookie = new Cookie("name", name);
Cookie cookie1 = new Cookie("email", email);
response.addCookie(cookie);
response.addCookie(cookie1);
} catch (Exception e) {
logger.error("Exception in creating user: ", e.getStackTrace());
}
return new ModelAndView("redirect:/");
}
@RequestMapping(value = "/chats", method = RequestMethod.GET)
public ModelAndView chatsPage() {
return new ModelAndView("chats");
}
@ResponseBody
@RequestMapping(value = "/get-all-chats", method = RequestMethod.GET)
public List<Chat> getAllChats() {
try {
return _chatDao.getAll();
} catch (Exception e) {
logger.error("Exception in fetching chats: ", e.getStackTrace());
}
return null;
}
@ResponseBody
@RequestMapping(value = "/get-all-users", method = RequestMethod.GET)
public List<User> getAllUsers() {
try {
return _userDao.getAll();
} catch (Exception e) {
logger.error("Exception in fetching users: ", e.getStackTrace());
}
return null;
}
@ResponseBody
@RequestMapping(value = "/post-chat", method = RequestMethod.POST)
public ModelAndView postChat(HttpServletRequest request,
HttpServletResponse response,
@RequestParam String message,
@CookieValue(required = true) String name,
@CookieValue(required = true) String email) {
try {
// fetch user info
User user = _userDao.getByEmail(email);
if (user == null) {
return logout(request, response);
}
// create new chat object
Chat chat = new Chat();
chat.setMessage(message);
chat.setUser(user);
chat.setTimestamp(new Date().getTime());
// save chat in db
_chatDao.save(chat);
} catch (Exception e) {
logger.error("Exception in saving chat: ", e.getStackTrace());
}
return null;
}
}
|
package com.blackberry.log4j;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.charset.Charset;
import java.util.Properties;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import org.apache.log4j.spi.LoggingEvent;
import com.blackberry.kafka.lowoverhead.MetricRegistrySingleton;
import com.blackberry.kafka.lowoverhead.producer.LowOverheadProducer;
import com.blackberry.kafka.lowoverhead.producer.ProducerConfiguration;
import com.codahale.metrics.MetricRegistry;
public class KafkaAppender extends AppenderSkeleton {
private MetricRegistry metrics;
private static final Charset UTF8 = Charset.forName("UTF8");
private Logger logger;
private Properties props = new Properties();
private String clientId = null;
private String topic;
private String key = null;
private LowOverheadProducer producer;
@Override
public void activateOptions() {
PropertyConfigurator.configure(this.getClass().getClassLoader()
.getResource("kafka.appender.log4j.properties"));
logger = Logger.getLogger(this.getClass());
// clientid and key default to the hostname
if (clientId == null) {
try {
clientId = InetAddress.getLocalHost().getHostName();
props.setProperty("client.id", clientId);
} catch (UnknownHostException e) {
logger.error("Error getting hostname for default clientId while configuring "
+ this.getClass());
}
}
if (key == null) {
try {
key = InetAddress.getLocalHost().getHostName();
props.setProperty("key", key);
} catch (UnknownHostException e) {
logger.error("Error getting hostname for default key while configuring "
+ this.getClass());
}
}
metrics = MetricRegistrySingleton.getInstance().getMetricsRegistry();
MetricRegistrySingleton.getInstance().enableJmx();
ProducerConfiguration conf = null;
try {
conf = new ProducerConfiguration(props);
} catch (Exception e) {
logger.error("Error creating " + LowOverheadProducer.class
+ ". Cannot log to Kafka.", e);
}
try {
producer = new LowOverheadProducer(conf, clientId, topic, key,
metrics);
} catch (Exception e) {
logger.error("Error creating " + LowOverheadProducer.class
+ ". Cannot log to Kafka.", e);
}
}
@Override
public void close() {
producer.close();
}
@Override
public boolean requiresLayout() {
return true;
}
private byte[] message;
@Override
protected void append(LoggingEvent e) {
message = getLayout().format(e).getBytes(UTF8);
try {
producer.send(message, 0, message.length);
} catch (Throwable t) {
logger.error("Error sending log to Kafka.", t);
}
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public void setTopic(String topic) {
this.topic = topic;
}
public void setKey(String key) {
this.key = key;
}
public void setMetadataBrokerList(String metadataBrokerList) {
props.setProperty("metadata.broker.list", metadataBrokerList);
}
public void setQueueBufferingMaxMs(String queueBufferingMaxMs) {
props.setProperty("queue.buffering.max.ms", queueBufferingMaxMs);
}
public void setRequrestRequiredAcks(String requestRequiredAcks) {
props.setProperty("request.required.acks", requestRequiredAcks);
}
public void setRequestTimeoutMs(String requestTimeoutMs) {
props.setProperty("request.timeout.ms", requestTimeoutMs);
}
public void setMessageSendMaxRetries(String messageSendMaxRetries) {
props.setProperty("message.send.max.retries", messageSendMaxRetries);
}
public void setRetryBackoffMs(String retryBackoffMs) {
props.setProperty("retry.backoff.ms", retryBackoffMs);
}
public void setMessageBufferSize(String messageBufferSize) {
props.setProperty("message.buffer.size", messageBufferSize);
}
public void setSendBufferSize(String sendBufferSize) {
props.setProperty("send.buffer.size", sendBufferSize);
}
public void setResponseBufferSize(String responseBufferSize) {
props.setProperty("response.buffer.size", responseBufferSize);
}
public void setCompressionCodec(String compressionCodec) {
props.setProperty("compression.codec", compressionCodec);
}
public void setCompressionLevel(String compressionLevel) {
props.setProperty("compression.level", compressionLevel);
}
public void setTopicMetadataRefreshIntervalMs(
String topicMetadataRefreshIntervalMs) {
props.setProperty("topic.metadata.refresh.interval.ms",
topicMetadataRefreshIntervalMs);
}
public void setQueueEnqueueTimeoutMs(String queueEnqueueTimeoutMs) {
props.setProperty("queue.enqueue.timeout.ms", queueEnqueueTimeoutMs);
}
public void setSendBufferBytes(String sendBufferBytes) {
props.setProperty("send.buffer.bytes", sendBufferBytes);
}
}
|
package com.davidsoergel.dsutils;
/**
* @author lorax
* @version 1.0
*/
public class MathUtils
{
private static final int FACTORIAL_LIMIT = 100;
private static double[] factorials = new double[FACTORIAL_LIMIT + 1];
// log(x+y) = log(x) + log [1 + exp[log(y) - log(x)]]
// for x >= y
/* double logsum(double x, double y)
{
double largest = Math.max(x, y);
double smallest = Math.min(x, y);
return largest + Math.log(1.0 + Math.exp(smallest - largest));
}
*/
// Still stuck on how to implement this:
// need to know which is bigger, exp(x) or exp(y)+exp(z)
static double MAX_EXPONENT = Math.log(Double.MAX_VALUE);
public static double minmax(double min, double b, double max)
{
return Math.max(Math.min(b, max), min);
}
public static long choose(int n, int m)
{
if (m == 0)
{
return 1;
}
double result;
result = factorial(n) / (factorial(m) * factorial(n - m));
return (long) result;
}
public static double factorial(int n) throws ArithmeticException
{
if (n > FACTORIAL_LIMIT)
{
return StirlingFactorial(n);
}
if (factorials[n] == 0)
{
factorials[n] = n * factorial(n - 1);
}
return factorials[n];
}
public static double StirlingFactorial(int n)
{
double result = Math.sqrt(2.0 * Math.PI * n) * Math.pow(n, n) * Math.pow(Math.E, -n);
return result;
}
static
{
factorials[0] = 1;
factorials[1] = 1;
}
/**
* log(sum(exp(args)))
*
* @param x
* @param y
*/
public static double logsum(double x, double y)
{
// scale all the log probabilities up to avoid underflow.
double B = MAX_EXPONENT - Math.log(3) - Math.max(x, y);
double result = Math.log(Math.exp(x + B) + Math.exp(y + B)) - B;
// logger.debug("Log sum: " + x + " + " + y + " = " + result + " (Scale factor: " + B + ")");
return result;
}
/**
* log(sum(exp(args)))
*
* @param x
* @param y
* @param z
*/
public static double logsum(double x, double y, double z)
{
// scale all the log probabilities up to avoid underflow.
double B = MAX_EXPONENT - Math.log(3) - Math.max(x, Math.max(y, z));
double result = Math.log(Math.exp(x + B) + Math.exp(y + B) + Math.exp(z + B)) - B;
if (Double.isNaN(result))
{
result = Double.NEGATIVE_INFINITY;
//xklogger.info("Log sum produced NaN: " + x + " + " + y + " + " + z + " = " + result + " (Scale factor: " + B + ")", new Exception());
//logger.debug("Log sum produced NaN!");
// try
// throw new Exception("bogus");
// catch(Exception e) { e.printStackTrace(); }
}
// logger.debug("Log sum: " + x + " + " + y + " + " + z + " = " + result + " (Scale factor: " + B + ")");
// if (result > 0)
// throw new Error("Positive log probability not allowed!");
return result;
}
private static double[] logTableBelowOne;
private static double[] logTableAboveOne;
public static int logbins;
public static double logResolution;
public static double maxLogArg;
public static void initApproximateLog(int bins, double max)
{
maxLogArg = max;
logbins = bins;
logResolution = (double) 1 / logbins;
logTableBelowOne = new double[logbins];
for (int i = 0; i < logbins; i++)
{
logTableBelowOne[i] = Math.log((double) i / (double) logbins);
}
logTableAboveOne = new double[logbins];
for (int i = 0; i < logbins; i++)
{
logTableAboveOne[i] = Math.log((double) (i * maxLogArg) / (double) logbins);
}
}
public static double approximateLog(double x)
{
if ((x < 0) || (x >= maxLogArg))
{
return Math.log(x);
//throw new MathUtilsException("approximateLog accepts only 0 < x < " + maxLogArg +"; maybe init with different max");
}
if (x < .00001 || (x > .9999 && x < 1))
{
return Math.log(x);
}
if (x < 1)
{
return logTableBelowOne[(int) (x * logbins)];
}
return logTableAboveOne[(int) ((x / maxLogArg) * logbins)];
}
public static boolean equalWithinFPError(double a, double b)
{
double nearlyZero = a - b;
// these errors are generally in the vicinity of 1e-15
// let's be extra permissive, 1e-10 is good enough anyway
return -1e-10 < nearlyZero && nearlyZero < 1e-10;
}
}
|
package com.ds.listing.rest;
import javax.ws.rs.ApplicationPath;
import javax.ws.rs.core.Application;
@ApplicationPath("/rest")
public class JaxRsActivator extends Application {
}
|
package com.github.ansell.csvmap;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.Writer;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.script.ScriptException;
import com.fasterxml.jackson.databind.SequenceWriter;
import com.fasterxml.jackson.dataformat.csv.CsvSchema;
import com.github.ansell.csvutil.CSVUtil;
import joptsimple.OptionException;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import joptsimple.OptionSpec;
/**
* Maps from one CSV file to another based on the supplied mapping definitions.
*
* @author Peter Ansell p_ansell@yahoo.com
*/
public final class CSVMapper {
/**
* Private constructor for static only class
*/
private CSVMapper() {
}
public static void main(String... args) throws Exception {
final OptionParser parser = new OptionParser();
final OptionSpec<Void> help = parser.accepts("help").forHelp();
final OptionSpec<File> input = parser.accepts("input").withRequiredArg().ofType(File.class).required()
.describedAs("The input CSV file to be mapped.");
final OptionSpec<File> mapping = parser.accepts("mapping").withRequiredArg().ofType(File.class).required()
.describedAs("The mapping file.");
final OptionSpec<File> output = parser.accepts("output").withRequiredArg().ofType(File.class)
.describedAs("The mapped CSV file, or the console if not specified.");
OptionSet options = null;
try {
options = parser.parse(args);
} catch (final OptionException e) {
System.out.println(e.getMessage());
parser.printHelpOn(System.out);
throw e;
}
if (options.has(help)) {
parser.printHelpOn(System.out);
return;
}
final Path inputPath = input.value(options).toPath();
if (!Files.exists(inputPath)) {
throw new FileNotFoundException("Could not find input CSV file: " + inputPath.toString());
}
final Path mappingPath = mapping.value(options).toPath();
if (!Files.exists(mappingPath)) {
throw new FileNotFoundException("Could not find mappng CSV file: " + mappingPath.toString());
}
final Writer writer;
if (options.has(output)) {
writer = Files.newBufferedWriter(output.value(options).toPath());
} else {
writer = new BufferedWriter(new OutputStreamWriter(System.out));
}
try (final BufferedReader readerMapping = Files.newBufferedReader(mappingPath);
final BufferedReader readerInput = Files.newBufferedReader(inputPath);) {
List<CSVMapping> map = extractMappings(readerMapping);
runMapper(readerInput, map, writer);
} finally {
writer.close();
}
}
private static void runMapper(Reader input, List<CSVMapping> map, Writer output)
throws ScriptException, IOException {
Function<CSVMapping, String> outputFields = e -> e.getOutputField();
List<String> outputHeaders = map.stream().map(outputFields).collect(Collectors.toList());
final CsvSchema schema = CSVUtil.buildSchema(outputHeaders);
try (final SequenceWriter csvWriter = CSVUtil.newCSVWriter(output, schema);) {
List<String> inputHeaders = new ArrayList<>();
CSVUtil.streamCSV(input, h -> inputHeaders.addAll(h), (h, l) -> {
return CSVMapping.mapLine(inputHeaders, outputHeaders, l, map);
} , l -> {
// Write out all of the mapped lines for this original line in
// the original CSV file
try {
csvWriter.write(l);
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
}
private static List<CSVMapping> extractMappings(Reader input) throws IOException {
List<CSVMapping> result = new ArrayList<>();
List<String> headers = new ArrayList<>();
CSVUtil.streamCSV(input, h -> headers.addAll(h), (h, l) -> {
return CSVMapping.newMapping(l.get(h.indexOf(CSVMapping.LANGUAGE)), l.get(h.indexOf(CSVMapping.OLD_FIELD)),
l.get(h.indexOf(CSVMapping.NEW_FIELD)), l.get(h.indexOf(CSVMapping.MAPPING)));
} , l -> {
result.add(l);
});
return result;
}
}
|
package com.github.davidmoten.security;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.security.InvalidKeyException;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.security.spec.X509EncodedKeySpec;
import java.util.Optional;
import javax.crypto.Cipher;
import javax.crypto.CipherInputStream;
import javax.crypto.CipherOutputStream;
import javax.crypto.KeyGenerator;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
import com.google.common.io.ByteStreams;
public final class PPK {
/*
* We load the public cipher and private cipher and we generate an AES
* cipher. The AES cipher is more efficient for encryption and decryption of
* data when the data can be longer than the RSA cipher key size. We use the
* public key to encrypt the AES cipher and prepend the AES encrypted bytes
* with the rsa encrypted AES secret key. Thus the consumer has to read the
* first N bytes and decrypt the AES secret key using the rsa private key
* and then can decrypt the remaining bytes in the message using the AES
* secret key.
*/
private static final String RSA = "RSA";
private static final String AES = "AES";
private static final int AES_KEY_BITS = 128;// multiple of 8
private static final int AES_KEY_BYTES = AES_KEY_BITS / 8;
private final Optional<Cipher> publicCipher;
private final Optional<Cipher> privateCipher;
private final SecretKeySpec aesSecretKeySpec;
private final Cipher aesCipher;
private final byte[] aesEncodedSecretKey;
private Optional<byte[]> rsaEncryptedAesSecretKeyBytes;
private PPK(Optional<Cipher> publicCipher, Optional<Cipher> privateCipher) {
this.publicCipher = publicCipher;
this.privateCipher = privateCipher;
try {
KeyGenerator kgen = KeyGenerator.getInstance(AES);
kgen.init(AES_KEY_BITS);
SecretKey key = kgen.generateKey();
aesEncodedSecretKey = key.getEncoded();
aesSecretKeySpec = new SecretKeySpec(aesEncodedSecretKey, AES);
aesCipher = Cipher.getInstance(AES);
if (publicCipher.isPresent())
rsaEncryptedAesSecretKeyBytes = Optional
.of(applyCipher(publicCipher.get(), aesEncodedSecretKey));
else
rsaEncryptedAesSecretKeyBytes = Optional.empty();
} catch (NoSuchAlgorithmException | NoSuchPaddingException e) {
throw new RuntimeException(e);
}
}
public static final Builder privateKey(Class<?> cls, String resource) {
return new Builder().privateKey(cls, resource);
}
public static final Builder privateKey(String resource) {
return new Builder().privateKey(resource);
}
public static final Builder privateKey(InputStream is) {
return new Builder().privateKey(is);
}
public static final Builder privateKey(File file) {
return new Builder().privateKey(file);
}
public static final Builder publicKey(Class<?> cls, String resource) {
return new Builder().publicKey(cls, resource);
}
public static final Builder publicKey(String resource) {
return new Builder().publicKey(resource);
}
public static final Builder publicKey(File file) {
return new Builder().publicKey(file);
}
public static final Builder publicKey(InputStream is) {
return new Builder().publicKey(is);
}
public static final class Builder {
private Optional<Cipher> publicCipher = Optional.empty();
private Optional<Cipher> privateCipher = Optional.empty();
private Builder() {
// prevent instantiation
}
public Builder publicKey(InputStream is) {
try {
return publicKey(ByteStreams.toByteArray(is));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public Builder privateKey(InputStream is) {
try {
return privateKey(ByteStreams.toByteArray(is));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public Builder publicKey(byte[] bytes) {
publicCipher = Optional.of(readPublicCipher(bytes));
return this;
}
public Builder publicKey(String resource) {
return publicKey(Classpath.bytesFrom(PPK.class, resource));
}
public Builder publicKey(Class<?> cls, String resource) {
return publicKey(Classpath.bytesFrom(cls, resource));
}
public Builder publicKey(File file) {
try {
return publicKey(Files.readAllBytes(file.toPath()));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public Builder privateKey(byte[] bytes) {
privateCipher = Optional.of(readPrivateCipher(bytes));
return this;
}
public Builder privateKey(String resource) {
return privateKey(Classpath.bytesFrom(PPK.class, resource));
}
public Builder privateKey(Class<?> cls, String resource) {
return privateKey(Classpath.bytesFrom(cls, resource));
}
public Builder privateKey(File file) {
try {
return privateKey(Files.readAllBytes(file.toPath()));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public byte[] encrypt(byte[] bytes) {
return build().encrypt(bytes);
}
public byte[] encrypt(InputStream is) {
return build().encrypt(is);
}
public byte[] decrypt(byte[] bytes) {
return build().decrypt(bytes);
}
public byte[] encrypt(String string, Charset charset) {
return build().encrypt(string, charset);
}
public String decrypt(byte[] bytes, Charset charset) {
return build().decrypt(bytes, charset);
}
public void encrypt(InputStream is, OutputStream os) {
build().encrypt(is, os);
}
public void decrypt(InputStream is, OutputStream os) {
build().decrypt(is, os);
}
public PPK build() {
return new PPK(publicCipher, privateCipher);
}
}
public void encrypt(InputStream is, OutputStream os) {
if (publicCipher.isPresent()) {
try {
if (rsaEncryptedAesSecretKeyBytes.get().length > 256)
throw new RuntimeException(
"unexpected length=" + rsaEncryptedAesSecretKeyBytes.get().length);
os.write(rsaEncryptedAesSecretKeyBytes.get().length - 1);
os.write(rsaEncryptedAesSecretKeyBytes.get());
encryptWithAes(is, os);
} catch (IOException e) {
throw new RuntimeException(e);
}
} else
throw new PublicKeyNotSetException();
}
public byte[] encrypt(InputStream is) {
try {
return encrypt(ByteStreams.toByteArray(is));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public byte[] encrypt(byte[] bytes) {
try (ByteArrayInputStream is = new ByteArrayInputStream(bytes);
ByteArrayOutputStream os = new ByteArrayOutputStream()) {
encrypt(is, os);
return os.toByteArray();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private void encryptWithAes(InputStream is, OutputStream os) {
try {
aesCipher.init(Cipher.ENCRYPT_MODE, aesSecretKeySpec);
applyCipher(aesCipher, is, os);
} catch (InvalidKeyException e) {
throw new RuntimeException(e);
}
}
public void decrypt(InputStream is, OutputStream os) {
if (privateCipher.isPresent()) {
int rsaEncryptedAesSecretKeyLength;
byte[] raw;
try {
rsaEncryptedAesSecretKeyLength = is.read() + 1;
raw = new byte[rsaEncryptedAesSecretKeyLength];
is.read(raw);
} catch (IOException e1) {
throw new RuntimeException(e1);
}
ByteArrayInputStream rsaEncryptedAesSecretKeyInputStream = new ByteArrayInputStream(
raw);
byte[] aesKey = new byte[AES_KEY_BYTES];
try (CipherInputStream cis = new CipherInputStream(rsaEncryptedAesSecretKeyInputStream,
privateCipher.get())) {
cis.read(aesKey, 0, rsaEncryptedAesSecretKeyLength);
} catch (IOException e) {
throw new RuntimeException(e);
}
SecretKeySpec aesKeySpec = new SecretKeySpec(aesKey, AES);
try {
aesCipher.init(Cipher.DECRYPT_MODE, aesKeySpec);
applyCipher(aesCipher, is, os);
} catch (InvalidKeyException e) {
throw new RuntimeException(e);
}
} else
throw new PrivateKeyNotSetException();
}
public byte[] decrypt(byte[] bytes) {
try (ByteArrayInputStream is = new ByteArrayInputStream(bytes);
ByteArrayOutputStream os = new ByteArrayOutputStream()) {
decrypt(is, os);
return os.toByteArray();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public byte[] encrypt(String string, Charset charset) {
return encrypt(string.getBytes(charset));
}
public String decrypt(byte[] bytes, Charset charset) {
return new String(decrypt(bytes), charset);
}
private static Cipher readPublicCipher(byte[] bytes) {
try {
X509EncodedKeySpec publicSpec = new X509EncodedKeySpec(bytes);
KeyFactory keyFactory = KeyFactory.getInstance(RSA);
PublicKey key = keyFactory.generatePublic(publicSpec);
Cipher cipher = Cipher.getInstance("RSA/ECB/OAEPWithSHA1AndMGF1Padding");
cipher.init(Cipher.ENCRYPT_MODE, key);
return cipher;
} catch (InvalidKeySpecException | NoSuchAlgorithmException | NoSuchPaddingException
| InvalidKeyException e) {
throw new RuntimeException(e);
}
}
private static Cipher readPrivateCipher(byte[] bytes) {
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(bytes);
try {
KeyFactory keyFactory = KeyFactory.getInstance(RSA);
PrivateKey key = keyFactory.generatePrivate(keySpec);
Cipher cipher = Cipher.getInstance("RSA/ECB/OAEPWithSHA1AndMGF1Padding");
cipher.init(Cipher.DECRYPT_MODE, key);
return cipher;
} catch (InvalidKeySpecException | NoSuchAlgorithmException | NoSuchPaddingException
| InvalidKeyException e) {
throw new RuntimeException(e);
}
}
private static void applyCipher(Cipher cipher, InputStream is, OutputStream os) {
try (CipherOutputStream cos = new CipherOutputStream(os, cipher)) {
copy(is, cos);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static byte[] applyCipher(Cipher cipher, byte[] bytes) {
ByteArrayInputStream input = new ByteArrayInputStream(bytes);
ByteArrayOutputStream output = new ByteArrayOutputStream();
applyCipher(cipher, input, output);
return output.toByteArray();
}
private static void copy(InputStream is, OutputStream os) throws IOException {
int i;
byte[] b = new byte[1024];
while ((i = is.read(b)) != -1) {
os.write(b, 0, i);
}
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.ht.scada.oildata.dr;
import com.google.common.base.Joiner;
import com.ht.scada.common.tag.util.VarSubTypeEnum;
import com.ht.scada.data.Config;
import com.ht.scada.data.service.RealtimeDataService;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import javax.inject.Named;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.sql2o.Connection;
import org.sql2o.Query;
import org.sql2o.Sql2o;
/**
*
*
* @author 2015-1-21 22:52:25
*/
@Component
public class DataRouter {
private static final Logger log = LoggerFactory.getLogger(DataRouter.class);
@Inject
@Named("sql2o2")
protected Sql2o sql2o2;
@Inject
@Named("sql2o")
protected Sql2o sql2o;
private Connection con;
private Connection con2;
@Autowired
private RealtimeDataService realtimeDataService;
private ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor();
private void dataRouter() {
List<Map<String, Object>> taskList = null;
try {
con = sql2o.open();
con2 = sql2o2.open();
taskList = con.createQuery("select * from D_TASK")
.executeAndFetchTable().asList();
} catch (Exception e) {
}
if (taskList != null) {
for (Map<String, Object> map : taskList) {
try {
String sid = (String) map.get("sid");
if (!sid.equals(Config.INSTANCE.getConfig().getString("dr.sid", "1"))) {
continue;
}
final String taskName = (String) map.get("name");
Integer interval = ((BigDecimal) map.get("interval")).intValue();
String timeUnit = ((String) map.get("timeunit")).toLowerCase();
Integer delay = ((BigDecimal) map.get("delay")).intValue();
final String tableName = (String) map.get("tablename");
final Integer isUpdate = ((BigDecimal) map.get("isupdate")).intValue();
final Integer isCreateTalbe = ((BigDecimal) map.get("iscreate")).intValue();
final String updateKey = ((String) map.get("updatekey")) == null ? null : ((String) map.get("updatekey")).toLowerCase();
TimeUnit tu;
switch (timeUnit) {
case "second":
tu = TimeUnit.SECONDS;
break;
case "minute":
tu = TimeUnit.MINUTES;
break;
default:
tu = TimeUnit.SECONDS;
break;
}
final List<Map<String, Object>> fieldList = con.createQuery("select * from D_TASK_FIELD where RWMC=:RWMC")
.addParameter("RWMC", taskName)
.executeAndFetchTable().asList();
final List<Map<String, Object>> recordList = con.createQuery("select * from D_TASK_RECORD where RWMC=:RWMC")
.addParameter("RWMC", taskName)
.executeAndFetchTable().asList();
if ("".equals(taskName.trim())) {
YouJingSbdazc yjsbzc = new YouJingSbdazc(con2, recordList, realtimeDataService);
executorService.scheduleAtFixedRate(yjsbzc, delay, interval, tu);
continue;
}
final List<String> fields = new ArrayList<>();
if (fieldList != null) {
for (Map<String, Object> fieldMap : fieldList) {
String zdmc = (String) fieldMap.get("zdmc");
fields.add(zdmc);
}
try {
if (isCreateTalbe > 0) {
createTable(tableName, fieldList);
}
} catch (Exception e) {
e.printStackTrace();
}
if (recordList != null) {
final String insertSql = generateInsertSql(tableName, fields);
final String updateSql = generateUpdateSql(tableName, fields, updateKey);
executorService.scheduleAtFixedRate(new Runnable() {
int hasUpdate = isUpdate;
@Override
public void run() {
log.info("——{}——", taskName);
Date date = new Date();
try {
if (hasUpdate > 0) {
con2.createQuery("delete from " + tableName).executeUpdate();
insertData(insertSql, fieldList, recordList, date);
hasUpdate = -1;
System.out.println("update");
} else {
if (isUpdate > 0) {
if (updateKey != null && !"".equals(updateKey)) {
updateData(updateSql, fieldList, recordList, date, updateKey);
System.out.println("update");
} else {
log.error("update");
}
} else {
insertData(insertSql, fieldList, recordList, date);
System.out.println("insert");
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}, delay, interval, tu);
}
}
} catch (Exception e) {
e.printStackTrace();
log.error(e.getMessage());
continue;
}
}
}
}
/**
* SQL
*/
private String generateInsertSql(String tableName, List<String> fields) {
StringBuilder sqlBuilder = new StringBuilder();
sqlBuilder.append("insert into " + tableName + " (");
Joiner.on(", ").appendTo(sqlBuilder, fields);
sqlBuilder.append(") values \n (:");
Joiner.on(", :").appendTo(sqlBuilder, fields);
sqlBuilder.append(")");
String sql = sqlBuilder.toString();
log.info(sql);
return sql;
}
private void insertData(String sql, List<Map<String, Object>> fieldList, List<Map<String, Object>> recordList, Date date) {
con2 = sql2o2.beginTransaction();
Query query = con2.createQuery(sql);
int i = 0;
for (Map<String, Object> map : recordList) {
String jh = (String) map.get("jlmc");
String code = (String) map.get("ysjlmc");
String tsjlmc = (String) map.get("tsjlmc"); //WD|GD1-Q1|hgwd,YL|GD1-G2|hgyl2
for (Map<String, Object> fieldMap : fieldList) {
String zdmc = (String) fieldMap.get("zdmc");
String tscl = (String) fieldMap.get("tscl");
String gjz = (String) fieldMap.get("gjz");
if (tsjlmc != null && !"".equals(tsjlmc)) {
String mcs[] = tsjlmc.split(",");
for (String s : mcs) {
String gjzs[] = s.split("\\|");
if (zdmc.equals(gjzs[0])) {
code = gjzs[1];
gjz = gjzs[2];
break;
}
}
}
String myTscl = tscl == null ? "" : tscl.toLowerCase();
switch (myTscl) {
case "":
String value = null;
if (gjz != null && !"".equals(gjz.trim())) {
value = realtimeDataService.getEndTagVarInfo(code, gjz);
}
query.addParameter(zdmc, value);
break;
case "cjsj":
query.addParameter(zdmc, date);
break;
case "jh":
query.addParameter(zdmc, jh);
break;
case "yjyxzt":
String zt = null;
String s1 = realtimeDataService.getEndTagVarInfo(code, VarSubTypeEnum.RTU_RJ45_STATUS.toString().toLowerCase());
if ("true".equals(s1)) {
String s2 = realtimeDataService.getEndTagVarInfo(code, VarSubTypeEnum.YOU_JING_YUN_XING.toString().toLowerCase());
if ("true".equals(s2)) {
zt = "1";
} else {
zt = "0";
}
}
query.addParameter(zdmc, zt);
break;
case "null":
query.addParameter(zdmc, (String) null);
break;
case "yx":
String yx = null;
if (gjz != null && !"".equals(gjz.trim())) {
String v = realtimeDataService.getEndTagVarInfo(code, gjz);
if ("true".equals(v)) {
yx = "1";
} else if ("false".equals(v)) {
yx = "0";
}
}
query.addParameter(zdmc, yx);
break;
case "snzt_g":
query.addParameter(zdmc, getSnztG(code));
break;
case "snzt_d":
query.addParameter(zdmc, getSnztD(code));
break;
case "zxzt_g":
query.addParameter(zdmc, getZxztG(code));
break;
case "zxzt_d":
query.addParameter(zdmc, getZxztD(code));
break;
default:
query.addParameter(zdmc, myTscl);
break;
}
}
query.addToBatch();
i++;
if (i >= 2) {
query.executeBatch();
i = 0;
}
}
if (i > 0) {
query.executeBatch();
}
con2.commit();
}
/**
* SQL
*/
private String generateUpdateSql(String tableName, List<String> fields, String key) {
StringBuilder sqlBuilder = new StringBuilder();
sqlBuilder.append("update " + tableName + " set ");
for (String f : fields) {
sqlBuilder.append(f + "=:" + f + ",");
}
sqlBuilder.deleteCharAt(sqlBuilder.length() - 1);
if (key != null && !"".equals(key)) {
String s1 = key.split(",")[0];
String s2 = key.split(",")[1];
sqlBuilder.append(" where " + s1 + " = :" + s2);
}
String sql = sqlBuilder.toString();
log.info(sql);
return sql;
}
private void updateData(String sql, List<Map<String, Object>> fieldList, List<Map<String, Object>> recordList, Date date, String key) {
String s2 = key.split(",")[1];
con2 = sql2o2.beginTransaction();
Query query = con2.createQuery(sql);
int i = 0;
for (Map<String, Object> map : recordList) {
String jh = (String) map.get("jlmc");
String code = (String) map.get("ysjlmc");
for (Map<String, Object> fieldMap : fieldList) {
String zdmc = (String) fieldMap.get("zdmc");
String tscl = (String) fieldMap.get("tscl");
String myTscl = tscl == null ? "" : tscl.toLowerCase();
switch (myTscl) {
case "":
query.addParameter(zdmc, "1");
break;
case "cjsj":
query.addParameter(zdmc, date);
break;
case "jh":
query.addParameter(zdmc, jh);
break;
default:
query.addParameter(zdmc, "1");
break;
}
}
query.addParameter(s2, (String) map.get(s2.toLowerCase()));
query.addToBatch();
i++;
if (i >= 2) {
query.executeBatch();
i = 0;
}
}
if (i > 0) {
query.executeBatch();
}
con2.commit();
}
private void createTable(String tableName, List<Map<String, Object>> fieldList) {
String isExitSql = "select count(*) from user_tables where TABLE_NAME='" + tableName + "'";
int tableNum = con2.createQuery(isExitSql).executeScalar(Integer.class);
if (tableNum == 0) {
StringBuilder createTableBuilder = new StringBuilder();
createTableBuilder.append("create table " + tableName + "(");
for (Map<String, Object> map : fieldList) {
String zdmc = (String) map.get("zdmc");
String zdlx = ((String) map.get("zdlx")).toLowerCase();
createTableBuilder.append(zdmc + " " + zdlx + ",");
}
createTableBuilder.deleteCharAt(createTableBuilder.length() - 1);
createTableBuilder.append(")");
String createTableSql = createTableBuilder.toString();
log.info(createTableSql);
try {
con2.createQuery(createTableSql).executeUpdate();
} catch (Exception e) {
e.printStackTrace();
}
}
}
private String getSnztG(String code) {
String s16 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "wyyth_shi_neng_cgq16")) ? "1" : "0";
String s15 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "zndb_shi_neng_cgq15")) ? "1" : "0";
String s14 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "bpq_shi_neng_cgq14")) ? "1" : "0";
String s13 = "0";
String s12 = "0";
String s11 = "0";
String s10 = "0";
String s9 = "0";
return s16 + s15 + s14 + s13 + s12 + s11 + s10 + s9;
}
private String getSnztD(String code) {
String s8 = "0";
String s7 = "0";
String s6 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "ty_shi_neng_cgq6")) ? "1" : "0";
String s5 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "yw_shi_neng_cgq5")) ? "1" : "0";
String s4 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "yy_shi_neng_cgq4")) ? "1" : "0";
String s3 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "wy_shi_neng_cgq3")) ? "1" : "0";
String s2 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "zh_shi_neng_cgq2")) ? "1" : "0";
String s1 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "yth_shi_neng_cgq1")) ? "1" : "0";
return s8 + s7 + s6 + s5 + s4 + s3 + s2 + s1;
}
private String getZxztG(String code) {
String s16 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "wyyth_zai_xian_cgq16")) ? "1" : "0";
String s15 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "zndb_zai_xian_cgq15")) ? "1" : "0";
String s14 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "bpq_zai_xian_cgq14")) ? "1" : "0";
String s13 = "0";
String s12 = "0";
String s11 = "0";
String s10 = "0";
String s9 = "0";
return s16 + s15 + s14 + s13 + s12 + s11 + s10 + s9;
}
private String getZxztD(String code) {
String s8 = "0";
String s7 = "0";
String s6 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "ty_zai_xian_cgq6")) ? "1" : "0";
String s5 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "yw_zai_xian_cgq5")) ? "1" : "0";
String s4 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "yy_zai_xian_cgq4")) ? "1" : "0";
String s3 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "wy_zai_xian_cgq3")) ? "1" : "0";
String s2 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "zh_zai_xian_cgq2")) ? "1" : "0";
String s1 = "true".equals(realtimeDataService.getEndTagVarInfo(code, "yth_zai_xian_cgq1")) ? "1" : "0";
return s8 + s7 + s6 + s5 + s4 + s3 + s2 + s1;
}
}
|
package com.kodcu.service.ui;
import com.kodcu.controller.ApplicationController;
import com.kodcu.other.Current;
import de.jensd.fx.fontawesome.AwesomeDude;
import de.jensd.fx.fontawesome.AwesomeIcon;
import javafx.scene.Node;
import javafx.scene.control.Label;
import javafx.scene.control.Menu;
import javafx.scene.control.MenuBar;
import javafx.scene.control.ScrollPane;
import javafx.scene.layout.Priority;
import javafx.scene.layout.VBox;
import javafx.scene.web.WebView;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class EditorService {
@Autowired
private Current current;
@Autowired
private ApplicationController controller;
public Node createEditorVBox(WebView webView) {
MenuBar menuBar = new MenuBar();
menuBar.getStyleClass().add("editorToolsBar");
String iconSize = "14.0";
Label saveLabel = AwesomeDude.createIconLabel(AwesomeIcon.SAVE, iconSize);
Label newLabel = AwesomeDude.createIconLabel(AwesomeIcon.FILE_TEXT_ALT, iconSize);
Label openLabel = AwesomeDude.createIconLabel(AwesomeIcon.FOLDER_ALTPEN_ALT, iconSize);
Label boldLabel = AwesomeDude.createIconLabel(AwesomeIcon.BOLD, iconSize);
Label italicLabel = AwesomeDude.createIconLabel(AwesomeIcon.ITALIC, iconSize);
Label headerLabel = AwesomeDude.createIconLabel(AwesomeIcon.HEADER, iconSize);
Label codeLabel = AwesomeDude.createIconLabel(AwesomeIcon.CODE, iconSize);
Label ulListLabel = AwesomeDude.createIconLabel(AwesomeIcon.LIST_UL, iconSize);
Label olListLabel = AwesomeDude.createIconLabel(AwesomeIcon.LIST_ALTL, iconSize);
Label tableLabel = AwesomeDude.createIconLabel(AwesomeIcon.TABLE, iconSize);
Label imageLabel = AwesomeDude.createIconLabel(AwesomeIcon.IMAGE, iconSize);
Label subscriptLabel = AwesomeDude.createIconLabel(AwesomeIcon.SUBSCRIPT, iconSize);
Label superScriptLabel = AwesomeDude.createIconLabel(AwesomeIcon.SUPERSCRIPT, iconSize);
Label underlineLabel = AwesomeDude.createIconLabel(AwesomeIcon.UNDERLINE, iconSize);
Label hyperlinkLabel = AwesomeDude.createIconLabel(AwesomeIcon.LINK, iconSize);
Label strikethroughLabel = AwesomeDude.createIconLabel(AwesomeIcon.STRIKETHROUGH, iconSize);
Label highlightLabel = new Label(" A ");
// Events
newLabel.setOnMouseClicked(controller::newDoc);
openLabel.setOnMouseClicked(controller::openDoc);
saveLabel.setOnMouseClicked(controller::saveDoc);
boldLabel.setOnMouseClicked(event -> {
current.currentEngine().executeScript("boldText()");
});
italicLabel.setOnMouseClicked(event -> {
current.currentEngine().executeScript("italicizeText()");
});
codeLabel.setOnMouseClicked(event -> {
current.currentEngine().executeScript("addSourceCode()");
});
tableLabel.setOnMouseClicked(controller::createTable);
subscriptLabel.setOnMouseClicked(event -> {
current.currentEngine().executeScript("subScript()");
});
superScriptLabel.setOnMouseClicked(event -> {
current.currentEngine().executeScript("superScript()");
});
imageLabel.setOnMouseClicked(event -> {
current.currentEngine().executeScript("addImageSection()");
});
headerLabel.setOnMouseClicked(event -> {
current.currentEngine().executeScript("addHeading()");
});
ulListLabel.setOnMouseClicked(event -> {
current.currentEngine().executeScript("addUlList()");
});
olListLabel.setOnMouseClicked(event -> {
current.currentEngine().executeScript("addOlList()");
});
underlineLabel.setOnMouseClicked(event -> {
current.currentEngine().executeScript("underlinedText()");
});
hyperlinkLabel.setOnMouseClicked(event -> {
current.currentEngine().executeScript("addHyperLink()");
});
strikethroughLabel.setOnMouseClicked(event -> {
current.currentEngine().executeScript("addStrikeThroughText()");
});
highlightLabel.setStyle("-fx-background-color: rgba(255, 255, 0, 0.4)");
highlightLabel.setOnMouseClicked(event -> {
current.currentEngine().executeScript("highlightedText()");
});
menuBar.getMenus().addAll(
new Menu("", newLabel),
new Menu("", openLabel),
new Menu("", saveLabel),
new Menu("", boldLabel),
new Menu("", italicLabel),
new Menu("", underlineLabel),
new Menu("", strikethroughLabel),
new Menu("", headerLabel),
new Menu("", hyperlinkLabel),
new Menu("", codeLabel),
new Menu("", ulListLabel),
new Menu("", olListLabel),
new Menu("", tableLabel),
new Menu("", imageLabel),
new Menu("", subscriptLabel),
new Menu("", superScriptLabel),
new Menu("", highlightLabel)
);
ScrollPane scrollPane = new ScrollPane();
scrollPane.setContent(webView);
scrollPane.setFitToHeight(true);
scrollPane.setFitToWidth(true);
VBox.setVgrow(scrollPane, Priority.ALWAYS);
return new VBox(menuBar, scrollPane);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.