answer
stringlengths 17
10.2M
|
|---|
package com.jfinal.rest;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
public class ClassScanner {
private ClassScanner() {
}
private static ClassLoader classLoader = null;
private static String classPath = null;
static {
classLoader = ClassScanner.class.getClassLoader();
classPath = classLoader.getResource("").getFile();
classPath = new File(classPath).getAbsolutePath();
}
/**
*
*
* @param pack
* @return
*/
public static List<Class<?>> scan(String pack) {
String classPath = classLoader.getResource("").getFile();
String path = classPath + pack.replace(".", "/");
File dir = new File(path);
if (!dir.isDirectory()) {
return null;
}
try {
return scan(dir);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
private static List<Class<?>> scan(File dir) throws ClassNotFoundException {
List<Class<?>> list = new ArrayList<Class<?>>();
File[] files = dir.listFiles();
if (files == null) {
return list;
}
for (File file : files) {
if (file.isFile()) {
String filePath = file.getAbsolutePath();
if (!filePath.endsWith(".class")) {
continue;
}
//classPath
String className = filePath.substring(classPath.length());
if (className.startsWith(File.separator)) {
className = className.substring(1);
}
//.class
className = className.substring(0, className.length() - 6);
className = className.replace(File.separator, ".");
list.add(classLoader.loadClass(className));
continue;
}
if (file.isDirectory()) {
list.addAll(scan(file));
}
}
return list;
}
}
|
package com.joelhockey.jacknji11;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.joelhockey.codec.Buf;
import com.joelhockey.codec.Hex;
import com.sun.jna.Memory;
import com.sun.jna.NativeLong;
import com.sun.jna.Pointer;
import com.sun.jna.ptr.ByteByReference;
import com.sun.jna.ptr.NativeLongByReference;
/**
* CKA_? constants and wrapper for CK_ATTRIBUTE struct.
* @author Joel Hockey (joel.hockey@gmail.com)
*/
public class CKA {
private static final Log log = LogFactory.getLog(CKA.class);
public static final int CKF_ARRAY_ATTRIBUTE = 0x40000000;
public static final int CLASS = 0x00000000;
public static final int TOKEN = 0x00000001;
public static final int PRIVATE = 0x00000002;
public static final int LABEL = 0x00000003;
public static final int APPLICATION = 0x00000010;
public static final int VALUE = 0x00000011;
public static final int OBJECT_ID = 0x00000012;
public static final int CERTIFICATE_TYPE = 0x00000080;
public static final int ISSUER = 0x00000081;
public static final int SERIAL_NUMBER = 0x00000082;
public static final int AC_ISSUER = 0x00000083;
public static final int OWNER = 0x00000084;
public static final int ATTR_TYPES = 0x00000085;
public static final int TRUSTED = 0x00000086;
public static final int CERTIFICATE_CATEGORY = 0x00000087;
public static final int JAVA_MIDP_SECURITY_DOMAIN = 0x00000088;
public static final int URL = 0x00000089;
public static final int HASH_OF_SUBJECT_PUBLIC_KEY = 0x0000008a;
public static final int HASH_OF_ISSUER_PUBLIC_KEY = 0x0000008b;
public static final int CHECK_VALUE = 0x00000090;
public static final int KEY_TYPE = 0x00000100;
public static final int SUBJECT = 0x00000101;
public static final int ID = 0x00000102;
public static final int SENSITIVE = 0x00000103;
public static final int ENCRYPT = 0x00000104;
public static final int DECRYPT = 0x00000105;
public static final int WRAP = 0x00000106;
public static final int UNWRAP = 0x00000107;
public static final int SIGN = 0x00000108;
public static final int SIGN_RECOVER = 0x00000109;
public static final int VERIFY = 0x0000010a;
public static final int VERIFY_RECOVER = 0x0000010b;
public static final int DERIVE = 0x0000010c;
public static final int START_DATE = 0x00000110;
public static final int END_DATE = 0x00000111;
public static final int MODULUS = 0x00000120;
public static final int MODULUS_BITS = 0x00000121;
public static final int PUBLIC_EXPONENT = 0x00000122;
public static final int PRIVATE_EXPONENT = 0x00000123;
public static final int PRIME_1 = 0x00000124;
public static final int PRIME_2 = 0x00000125;
public static final int EXPONENT_1 = 0x00000126;
public static final int EXPONENT_2 = 0x00000127;
public static final int COEFFICIENT = 0x00000128;
public static final int PRIME = 0x00000130;
public static final int SUBPRIME = 0x00000131;
public static final int BASE = 0x00000132;
public static final int PRIME_BITS = 0x00000133;
public static final int SUBPRIME_BITS = 0x00000134;
public static final int VALUE_BITS = 0x00000160;
public static final int VALUE_LEN = 0x00000161;
public static final int EXTRACTABLE = 0x00000162;
public static final int LOCAL = 0x00000163;
public static final int NEVER_EXTRACTABLE = 0x00000164;
public static final int ALWAYS_SENSITIVE = 0x00000165;
public static final int MODIFIABLE = 0x00000170;
public static final int EC_PARAMS = 0x00000180;
public static final int EC_POINT = 0x00000181;
public static final int SECONDARY_AUTH = 0x00000200;
public static final int AUTH_PIN_FLAGS = 0x00000201;
public static final int ALWAYS_AUTHENTICATE = 0x00000202;
public static final int WRAP_WITH_TRUSTED = 0x00000210;
public static final int WRAP_TEMPLATE = (CKF_ARRAY_ATTRIBUTE|0x00000211);
public static final int UNWRAP_TEMPLATE = (CKF_ARRAY_ATTRIBUTE|0x00000212);
public static final int OTP_FORMAT = 0x00000220;
public static final int OTP_LENGTH = 0x00000221;
public static final int OTP_TIME_INTERVAL = 0x00000222;
public static final int OTP_USER_FRIENDLY_MODE = 0x00000223;
public static final int OTP_CHALLENGE_REQUIREMENT = 0x00000224;
public static final int OTP_TIME_REQUIREMENT = 0x00000225;
public static final int OTP_COUNTER_REQUIREMENT = 0x00000226;
public static final int OTP_PIN_REQUIREMENT = 0x00000227;
public static final int OTP_COUNTER = 0x0000022e;
public static final int OTP_TIME = 0x0000022f;
public static final int OTP_USER_IDENTIFIER = 0x0000022a;
public static final int OTP_SERVICE_IDENTIFIER = 0x0000022b;
public static final int OTP_SERVICE_LOGO = 0x0000022c;
public static final int OTP_SERVICE_LOGO_TYPE = 0x0000022d;
public static final int HW_FEATURE_TYPE = 0x00000300;
public static final int RESET_ON_INIT = 0x00000301;
public static final int HAS_RESET = 0x00000302;
public static final int PIXEL_X = 0x00000400;
public static final int PIXEL_Y = 0x00000401;
public static final int RESOLUTION = 0x00000402;
public static final int CHAR_ROWS = 0x00000403;
public static final int CHAR_COLUMNS = 0x00000404;
public static final int COLOR = 0x00000405;
public static final int BITS_PER_PIXEL = 0x00000406;
public static final int CHAR_SETS = 0x00000480;
public static final int ENCODING_METHODS = 0x00000481;
public static final int MIME_TYPES = 0x00000482;
public static final int MECHANISM_TYPE = 0x00000500;
public static final int REQUIRED_CMS_ATTRIBUTES = 0x00000501;
public static final int DEFAULT_CMS_ATTRIBUTES = 0x00000502;
public static final int SUPPORTED_CMS_ATTRIBUTES = 0x00000503;
public static final int ALLOWED_MECHANISMS = (CKF_ARRAY_ATTRIBUTE|0x00000600);
// Vendor defined values
// Eracom PTK
public static final int VENDOR_PTK_USAGE_COUNT = 0x80000101;
public static final int VENDOR_PTK_TIME_STAMP = 0x80000102;
public static final int VENDOR_PTK_CHECK_VALUE = 0x80000103;
public static final int VENDOR_PTK_MECHANISM_LIST = 0x80000104;
public static final int VENDOR_PTK_SIGN_LOCAL_CERT = 0x80000127;
public static final int VENDOR_PTK_EXPORT = 0x80000128;
public static final int VENDOR_PTK_EXPORTABLE = 0x80000129;
public static final int VENDOR_PTK_DELETABLE = 0x8000012a;
public static final int VENDOR_PTK_IMPORT = 0x8000012b;
public static final int VENDOR_PTK_KEY_SIZE = 0x8000012c;
public static final int VENDOR_PTK_ISSUER_STR = 0x80000130;
public static final int VENDOR_PTK_SUBJECT_STR = 0x80000131;
public static final int VENDOR_PTK_SERIAL_NUMBER_INT = 0x80000132;
public static final int VENDOR_PTK_RECORD_COUNT = 0x80000136;
public static final int VENDOR_PTK_RECORD_NUMBER = 0x80000137;
public static final int VENDOR_PTK_PURGE = 0x80000139;
public static final int VENDOR_PTK_EVENT_LOG_FULL = 0x8000013a;
public static final int VENDOR_PTK_SECURITY_MODE = 0x80000140;
public static final int VENDOR_PTK_TRANSPORT_MODE = 0x80000141;
public static final int VENDOR_PTK_BATCH = 0x80000142;
public static final int VENDOR_PTK_HW_STATUS = 0x80000143;
public static final int VENDOR_PTK_FREE_MEM = 0x80000144;
public static final int VENDOR_PTK_TAMPER_CMD = 0x80000145;
public static final int VENDOR_PTK_DATE_OF_MANUFACTURE = 0x80000146;
public static final int VENDOR_PTK_HALT_CMD = 0x80000147;
public static final int VENDOR_PTK_APPLICATION_COUNT = 0x80000148;
public static final int VENDOR_PTK_FW_VERSION = 0x80000149;
public static final int VENDOR_PTK_RESCAN_PERIPHERALS_CMD = 0x8000014a;
public static final int VENDOR_PTK_RTC_AAC_ENABLED = 0x8000014b;
public static final int VENDOR_PTK_RTC_AAC_GUARD_SECONDS = 0x8000014c;
public static final int VENDOR_PTK_RTC_AAC_GUARD_COUNT = 0x8000014d;
public static final int VENDOR_PTK_RTC_AAC_GUARD_DURATION = 0x8000014e;
public static final int VENDOR_PTK_HW_EXT_INFO_STR = 0x8000014f;
public static final int VENDOR_PTK_SLOT_ID = 0x80000151;
public static final int VENDOR_PTK_MAX_SESSIONS = 0x80000155;
public static final int VENDOR_PTK_MIN_PIN_LEN = 0x80000156;
public static final int VENDOR_PTK_MAX_PIN_FAIL = 0x80000158;
public static final int VENDOR_PTK_FLAGS = 0x80000159;
public static final int VENDOR_PTK_VERIFY_OS = 0x80000170;
public static final int VENDOR_PTK_VERSION = 0x80000181;
public static final int VENDOR_PTK_MANUFACTURER = 0x80000182;
public static final int VENDOR_PTK_BUILD_DATE = 0x80000183;
public static final int VENDOR_PTK_FINGERPRINT = 0x80000184;
public static final int VENDOR_PTK_ROM_SPACE = 0x80000185;
public static final int VENDOR_PTK_RAM_SPACE = 0x80000186;
public static final int VENDOR_PTK_FM_STATUS = 0x80000187;
public static final int VENDOR_PTK_DELETE_FM = 0x80000188;
public static final int VENDOR_PTK_FM_STARTUP_STATUS = 0x80000189;
public static final int VENDOR_PTK_CERTIFICATE_START_TIME = 0x80000190;
public static final int VENDOR_PTK_CERTIFICATE_END_TIME = 0x80000191;
public static final int VENDOR_PTK_PKI_ATTRIBUTE_BER_ENCODED = 0x80000230;
public static final int VENDOR_PTK_HIFACE_MASTER = 0x80000250;
public static final int VENDOR_PTK_CKA_SEED = 0x80000260;
public static final int VENDOR_PTK_CKA_COUNTER = 0x80000261;
public static final int VENDOR_PTK_CKA_H_VALUE = 0x80000262;
public static final int VENDOR_PTK_ENUM_ATTRIBUTE = 0x0000ffff;
/** Maps from int value to String description (variable name). */
private static final Map<Integer, String> I2S = C.createI2SMap(CKA.class);
/**
* Convert int constant value to name.
* @param cka value
* @return name
*/
public static final String I2S(int cka) { return C.i2s(I2S, CKA.class.getSimpleName(), cka); }
public int type;
public Pointer pValue;
public int ulValueLen;
// disallow zero-arg constructor
private CKA() {
}
/**
* PKCS#11 CK_ATTRIBUTE struct constructor.
* @param type CKA_? type. Use one of the public static final int fields in this class.
* @param value supports java types Boolean, byte[], Number (int, long), String
*/
public CKA(int type, Object value) {
this.type = type;
if (value == null) {
pValue = null;
ulValueLen = 0;
} else if (value instanceof Boolean) {
pValue = new ByteByReference((Boolean) value ? (byte) 1 : (byte) 0).getPointer();
ulValueLen = 1;
} else if (value instanceof byte[]) {
byte[] v = (byte[]) value;
pValue = new Memory(v.length);
pValue.write(0, v, 0, v.length);
ulValueLen = v.length;
} else if (value instanceof Number) {
pValue = new NativeLongByReference(new NativeLong(((Number) value).longValue())).getPointer();
ulValueLen = NativeLong.SIZE;
} else if (value instanceof String) {
byte[] v = ((String) value).getBytes();
pValue = new Memory(v.length);
pValue.write(0, v, 0, v.length);
ulValueLen = v.length;
} else {
throw new RuntimeException("Unknown att type: " + value.getClass());
}
}
/**
* PKCS#11 CK_ATTRIBUTE struct constructor with null value.
* @param type CKA_? type. Use one of the public static final int fields in this class.
*/
public CKA(int type) {
this(type, null);
}
/** @return value as byte[] */
public byte[] getValue() { return pValue == null ? null : pValue.getByteArray(0, ulValueLen); }
/** @return value as String */
public String getValueStr() { return pValue == null ? null : new String(pValue.getByteArray(0, ulValueLen)); }
/** @return value as int */
public Integer getValueInt() {
if (ulValueLen == 0 || pValue == null) {
return null;
}
if (ulValueLen != NativeLong.SIZE) {
throw new IllegalStateException(String.format(
"Method getValueInt called when value is not int type of length %d. Got length: %d, CKA type: 0x%08x(%s), value: %s",
NativeLong.SIZE, ulValueLen, type, CKA.I2S.get(type), Hex.b2s(getValue())));
}
return NativeLong.SIZE == 4 ? pValue.getInt(0) : (int) pValue.getLong(0);
}
/** @return value as boolean */
public Boolean getValueBool() {
if (ulValueLen == 0 || pValue == null) {
return null;
}
if (ulValueLen != 1) {
throw new IllegalStateException(String.format(
"Method getValueBool called when value is not boolean type of length 1. Got length: %d, CKA type: 0x%08x(%s), value: %s",
ulValueLen, type, CKA.I2S.get(type), Hex.b2s(getValue())));
}
return pValue.getByte(0) != 0;
}
/**
* Dump for debug.
* @param sb write to
*/
public void dump(StringBuilder sb) {
sb.append(String.format("type=0x%08x{%s} valueLen=%d", type, I2S(type), ulValueLen));
try {
switch (type) {
case CLASS: // lookup CKO
Integer cko = getValueInt();
sb.append(String.format(" value=0x%08x{%s}", type, cko != null ? CKO.I2S(cko) : "null"));
return;
case TOKEN: // boolean
case PRIVATE:
case TRUSTED:
case SENSITIVE:
case ENCRYPT:
case DECRYPT:
case WRAP:
case UNWRAP:
case SIGN:
case SIGN_RECOVER:
case VERIFY:
case VERIFY_RECOVER:
case DERIVE:
case EXTRACTABLE:
case LOCAL:
case NEVER_EXTRACTABLE:
case ALWAYS_SENSITIVE:
case MODIFIABLE:
case ALWAYS_AUTHENTICATE:
case WRAP_WITH_TRUSTED:
case RESET_ON_INIT:
case HAS_RESET:
case VENDOR_PTK_SIGN_LOCAL_CERT:
case VENDOR_PTK_EXPORT:
case VENDOR_PTK_EXPORTABLE:
case VENDOR_PTK_DELETABLE:
case VENDOR_PTK_IMPORT:
case VENDOR_PTK_EVENT_LOG_FULL:
case VENDOR_PTK_VERIFY_OS:
Boolean b = getValueBool();
sb.append(" value=").append(b != null ? b ? "TRUE" : "FALSE" : "null");
return;
case LABEL: // escaped printable string
case APPLICATION:
case URL:
case START_DATE:
case END_DATE:
case VENDOR_PTK_TIME_STAMP:
case VENDOR_PTK_ISSUER_STR:
case VENDOR_PTK_SUBJECT_STR:
case VENDOR_PTK_DATE_OF_MANUFACTURE:
case VENDOR_PTK_RTC_AAC_ENABLED:
case VENDOR_PTK_HW_EXT_INFO_STR:
case VENDOR_PTK_MANUFACTURER:
case VENDOR_PTK_BUILD_DATE:
case VENDOR_PTK_CERTIFICATE_START_TIME:
case VENDOR_PTK_CERTIFICATE_END_TIME:
sb.append(" value=").append(Buf.escstr(getValue()));
return;
case CERTIFICATE_TYPE: // lookup CKC
Integer ckc = getValueInt();
sb.append(String.format(" value=0x%08x{%s}", type, ckc != null ? CKC.I2S(ckc) : "null"));
return;
case KEY_TYPE: // lookup CKK
Integer ckk = getValueInt();
sb.append(String.format(" value=0x%08x{%s}", type, ckk != null ? CKK.I2S(ckk) : "null"));
return;
case MODULUS_BITS: // int
case PRIME_BITS:
case SUBPRIME_BITS:
case VALUE_BITS:
case VALUE_LEN:
case OTP_LENGTH:
case OTP_TIME_INTERVAL:
case PIXEL_X:
case PIXEL_Y:
case RESOLUTION:
case CHAR_ROWS:
case CHAR_COLUMNS:
case BITS_PER_PIXEL:
case VENDOR_PTK_USAGE_COUNT:
case VENDOR_PTK_KEY_SIZE:
case VENDOR_PTK_RECORD_COUNT:
case VENDOR_PTK_RECORD_NUMBER:
case VENDOR_PTK_FREE_MEM:
case VENDOR_PTK_APPLICATION_COUNT:
case VENDOR_PTK_RTC_AAC_GUARD_SECONDS:
case VENDOR_PTK_RTC_AAC_GUARD_COUNT:
case VENDOR_PTK_RTC_AAC_GUARD_DURATION:
case VENDOR_PTK_SLOT_ID:
case VENDOR_PTK_MAX_SESSIONS:
case VENDOR_PTK_MIN_PIN_LEN:
case VENDOR_PTK_MAX_PIN_FAIL:
case VENDOR_PTK_ROM_SPACE:
case VENDOR_PTK_RAM_SPACE:
case VENDOR_PTK_CKA_COUNTER:
sb.append(" value=").append(getValueInt());
return;
default: // no default, fall through to hex dump below
}
} catch (Exception e) { // unexpected CKA values
// log warning
log.warn("Unexpected CKA values", e);
// hex dump below
}
// hex dump by default or if error parsing other data type
byte[] value = getValue();
Hex.dump(sb, value, 0, ulValueLen, " ", 32, false);
}
public String toString() {
StringBuilder sb = new StringBuilder();
dump(sb);
return sb.toString();
}
}
|
package com.justinsb.etcd;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutionException;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.StatusLine;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.concurrent.FutureCallback;
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
import org.apache.http.impl.nio.client.HttpAsyncClients;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
import com.google.common.base.Charsets;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.AsyncFunction;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonParseException;
import com.google.gson.JsonParser;
public class EtcdClient {
static CloseableHttpAsyncClient defaultHttpClient;
static final Gson gson = new GsonBuilder().create();
static synchronized CloseableHttpAsyncClient buildDefaultHttpClient() {
if (defaultHttpClient == null) {
// TODO: Increase timeout??
RequestConfig requestConfig = RequestConfig.custom().build();
CloseableHttpAsyncClient httpClient = HttpAsyncClients.custom().setDefaultRequestConfig(requestConfig).build();
httpClient.start();
EtcdClient.defaultHttpClient = httpClient;
}
return defaultHttpClient;
}
final URI baseUri;
private final CloseableHttpAsyncClient httpClient;
public EtcdClient(URI baseUri) {
this(baseUri, buildDefaultHttpClient());
}
public EtcdClient(URI baseUri, CloseableHttpAsyncClient client) {
this.httpClient = client;
String uri = baseUri.toString();
if (!uri.endsWith("/")) {
uri += "/";
baseUri = URI.create(uri);
}
this.baseUri = baseUri;
}
/**
* Retrieves a key. Returns null if not found.
*/
public EtcdResult get(String key) throws EtcdClientException {
URI uri = buildKeyUri("v2/keys", key, "");
HttpGet request = new HttpGet(uri);
EtcdResult result = syncExecute(request, new int[] { 200, 404 }, 100);
if (result.isError()) {
if (result.errorCode == 100) {
return null;
}
}
return result;
}
/**
* Deletes the given key
*/
public EtcdResult delete(String key) throws EtcdClientException {
URI uri = buildKeyUri("v2/keys", key, "");
HttpDelete request = new HttpDelete(uri);
return syncExecute(request, new int[] { 200, 404 });
}
/**
* Sets a key to a new value
*/
public EtcdResult set(String key, String value) throws EtcdClientException {
return set(key, value, null);
}
/**
* Sets a key to a new value with an (optional) ttl
*/
public EtcdResult set(String key, String value, Integer ttl) throws EtcdClientException {
List<BasicNameValuePair> data = Lists.newArrayList();
data.add(new BasicNameValuePair("value", value));
if (ttl != null) {
data.add(new BasicNameValuePair("ttl", Integer.toString(ttl)));
}
return set0(key, data, new int[] { 200, 201 });
}
/**
* Creates a directory
*/
public EtcdResult createDirectory(String key) throws EtcdClientException {
List<BasicNameValuePair> data = Lists.newArrayList();
data.add(new BasicNameValuePair("dir", "true"));
return set0(key, data, new int[] { 200, 201 });
}
/**
* Lists a directory
*/
public List<EtcdNode> listDirectory(String key) throws EtcdClientException {
EtcdResult result = get(key + "/");
if (result == null || result.node == null) {
return null;
}
return result.node.nodes;
}
/**
* Delete a directory
*/
public EtcdResult deleteDirectory(String key) throws EtcdClientException {
URI uri = buildKeyUri("v2/keys", key, "?dir=true");
HttpDelete request = new HttpDelete(uri);
return syncExecute(request, new int[] { 202 });
}
/**
* Sets a key to a new value, if the value is a specified value
*/
public EtcdResult cas(String key, String prevValue, String value) throws EtcdClientException {
List<BasicNameValuePair> data = Lists.newArrayList();
data.add(new BasicNameValuePair("value", value));
data.add(new BasicNameValuePair("prevValue", prevValue));
return set0(key, data, new int[] { 200, 412 }, 101);
}
/**
* Watches the given subtree
*/
public ListenableFuture<EtcdResult> watch(String key) throws EtcdClientException {
return watch(key, null, false);
}
/**
* Watches the given subtree
*/
public ListenableFuture<EtcdResult> watch(String key, Long index, boolean recursive) throws EtcdClientException {
String suffix = "?wait=true";
if (index != null) {
suffix += "&waitIndex=" + index;
}
if (recursive) {
suffix += "&recursive=true";
}
URI uri = buildKeyUri("v2/keys", key, suffix);
HttpGet request = new HttpGet(uri);
return asyncExecute(request, new int[] { 200 });
}
/**
* Gets the etcd version
*/
public String getVersion() throws EtcdClientException {
URI uri = baseUri.resolve("version");
HttpGet request = new HttpGet(uri);
// Technically not JSON, but it'll work
// This call is the odd one out
JsonResponse s = syncExecuteJson(request, 200);
if (s.httpStatusCode != 200) {
throw new EtcdClientException("Error while fetching versions", s.httpStatusCode);
}
return s.json;
}
private EtcdResult set0(String key, List<BasicNameValuePair> data, int[] httpErrorCodes, int... expectedErrorCodes)
throws EtcdClientException {
URI uri = buildKeyUri("v2/keys", key, "");
HttpPut request = new HttpPut(uri);
UrlEncodedFormEntity entity = new UrlEncodedFormEntity(data, Charsets.UTF_8);
request.setEntity(entity);
return syncExecute(request, httpErrorCodes, expectedErrorCodes);
}
public EtcdResult listChildren(String key) throws EtcdClientException {
URI uri = buildKeyUri("v2/keys", key, "/");
HttpGet request = new HttpGet(uri);
EtcdResult result = syncExecute(request, new int[] { 200 });
return result;
}
protected ListenableFuture<EtcdResult> asyncExecute(HttpUriRequest request, int[] expectedHttpStatusCodes, final int... expectedErrorCodes)
throws EtcdClientException {
ListenableFuture<JsonResponse> json = asyncExecuteJson(request, expectedHttpStatusCodes);
return Futures.transform(json, new AsyncFunction<JsonResponse, EtcdResult>() {
public ListenableFuture<EtcdResult> apply(JsonResponse json) throws Exception {
EtcdResult result = jsonToEtcdResult(json, expectedErrorCodes);
return Futures.immediateFuture(result);
}
});
}
protected EtcdResult syncExecute(HttpUriRequest request, int[] expectedHttpStatusCodes, int... expectedErrorCodes) throws EtcdClientException {
try {
return asyncExecute(request, expectedHttpStatusCodes, expectedErrorCodes).get();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new EtcdClientException("Interrupted during request", e);
} catch (ExecutionException e) {
throw unwrap(e);
}
// String json = syncExecuteJson(request);
// return jsonToEtcdResult(json, expectedErrorCodes);
}
private EtcdClientException unwrap(ExecutionException e) {
Throwable cause = e.getCause();
if (cause instanceof EtcdClientException) {
return (EtcdClientException) cause;
}
return new EtcdClientException("Error executing request", e);
}
private EtcdResult jsonToEtcdResult(JsonResponse response, int... expectedErrorCodes) throws EtcdClientException {
if (response == null || response.json == null) {
return null;
}
EtcdResult result = parseEtcdResult(response.json);
if (result.isError()) {
if (!contains(expectedErrorCodes, result.errorCode)) {
throw new EtcdClientException(result.message, result);
}
}
return result;
}
private EtcdResult parseEtcdResult(String json) throws EtcdClientException {
EtcdResult result;
try {
result = gson.fromJson(json, EtcdResult.class);
} catch (JsonParseException e) {
throw new EtcdClientException("Error parsing response from etcd", e);
}
return result;
}
private static boolean contains(int[] list, int find) {
for (int i = 0; i < list.length; i++) {
if (list[i] == find) {
return true;
}
}
return false;
}
protected List<EtcdResult> syncExecuteList(HttpUriRequest request) throws EtcdClientException {
JsonResponse response = syncExecuteJson(request, 200);
if (response.json == null) {
return null;
}
if (response.httpStatusCode != 200) {
EtcdResult etcdResult = parseEtcdResult(response.json);
throw new EtcdClientException("Error listing keys", etcdResult);
}
try {
List<EtcdResult> ret = new ArrayList<EtcdResult>();
JsonParser parser = new JsonParser();
JsonArray array = parser.parse(response.json).getAsJsonArray();
for (int i = 0; i < array.size(); i++) {
EtcdResult next = gson.fromJson(array.get(i), EtcdResult.class);
ret.add(next);
}
return ret;
} catch (JsonParseException e) {
throw new EtcdClientException("Error parsing response from etcd", e);
}
}
protected JsonResponse syncExecuteJson(HttpUriRequest request, int... expectedHttpStatusCodes) throws EtcdClientException {
try {
return asyncExecuteJson(request, expectedHttpStatusCodes).get();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new EtcdClientException("Interrupted during request processing", e);
} catch (ExecutionException e) {
throw unwrap(e);
}
// ListenableFuture<HttpResponse> response = asyncExecuteHttp(request);
// HttpResponse httpResponse;
// try {
// httpResponse = response.get();
// } catch (InterruptedException e) {
// Thread.currentThread().interrupt();
// throw new
// EtcdClientException("Interrupted during request processing", e);
// } catch (ExecutionException e) {
// // TODO: Unwrap?
// throw new EtcdClientException("Error executing request", e);
// String json = parseJsonResponse(httpResponse);
// return json;
}
protected ListenableFuture<JsonResponse> asyncExecuteJson(HttpUriRequest request, final int[] expectedHttpStatusCodes) throws EtcdClientException {
ListenableFuture<HttpResponse> response = asyncExecuteHttp(request);
return Futures.transform(response, new AsyncFunction<HttpResponse, JsonResponse>() {
public ListenableFuture<JsonResponse> apply(HttpResponse httpResponse) throws Exception {
JsonResponse json = extractJsonResponse(httpResponse, expectedHttpStatusCodes);
return Futures.immediateFuture(json);
}
});
}
/**
* We need the status code & the response to parse an error response.
*/
static class JsonResponse {
final String json;
final int httpStatusCode;
public JsonResponse(String json, int statusCode) {
this.json = json;
this.httpStatusCode = statusCode;
}
}
protected JsonResponse extractJsonResponse(HttpResponse httpResponse, int[] expectedHttpStatusCodes) throws EtcdClientException {
try {
StatusLine statusLine = httpResponse.getStatusLine();
int statusCode = statusLine.getStatusCode();
String json = null;
if (httpResponse.getEntity() != null) {
try {
json = EntityUtils.toString(httpResponse.getEntity());
} catch (IOException e) {
throw new EtcdClientException("Error reading response", e);
}
}
if (!contains(expectedHttpStatusCodes, statusCode)) {
if (statusCode == 400 && json != null) {
// More information in JSON
} else {
throw new EtcdClientException("Error response from etcd: " + statusLine.getReasonPhrase(),
statusCode);
}
}
return new JsonResponse(json, statusCode);
} finally {
close(httpResponse);
}
}
private URI buildKeyUri(String prefix, String key, String suffix) {
StringBuilder sb = new StringBuilder();
sb.append(prefix);
if (key.startsWith("/")) {
key = key.substring(1);
}
for (String token : Splitter.on('/').split(key)) {
sb.append("/");
sb.append(urlEscape(token));
}
sb.append(suffix);
URI uri = baseUri.resolve(sb.toString());
return uri;
}
protected ListenableFuture<HttpResponse> asyncExecuteHttp(HttpUriRequest request) {
final SettableFuture<HttpResponse> future = SettableFuture.create();
httpClient.execute(request, new FutureCallback<HttpResponse>() {
public void completed(HttpResponse result) {
future.set(result);
}
public void failed(Exception ex) {
future.setException(ex);
}
public void cancelled() {
future.setException(new InterruptedException());
}
});
return future;
}
public static void close(HttpResponse response) {
if (response == null) {
return;
}
HttpEntity entity = response.getEntity();
if (entity != null) {
EntityUtils.consumeQuietly(entity);
}
}
protected static String urlEscape(String s) {
try {
return URLEncoder.encode(s, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new IllegalStateException();
}
}
public static String format(Object o) {
try {
return gson.toJson(o);
} catch (Exception e) {
return "Error formatting: " + e.getMessage();
}
}
}
|
package com.lb_stuff.kataparty;
import com.lb_stuff.kataparty.api.IPartySet;
import com.lb_stuff.kataparty.api.KataPartyService;
import com.lb_stuff.kataparty.api.IMessenger;
import com.lb_stuff.kataparty.api.IParty;
import static com.lb_stuff.kataparty.api.IParty.IMember;
import com.lb_stuff.kataparty.api.PartyRank;
import com.lb_stuff.kataparty.api.IPartySettings;
import static com.lb_stuff.kataparty.api.IPartySettings.IMemberSettings;
import com.lb_stuff.kataparty.api.event.PartyDisbandEvent;
import com.lb_stuff.kataparty.api.event.PartyMemberJoinEvent;
import com.lb_stuff.kataparty.api.event.PartyMemberLeaveEvent;
import com.lb_stuff.kataparty.api.event.PartySettingsChangeEvent;
import org.bukkit.Bukkit;
import org.bukkit.OfflinePlayer;
import org.bukkit.entity.Player;
import org.bukkit.inventory.Inventory;
import org.bukkit.inventory.ItemStack;
import org.bukkit.Location;
import org.bukkit.World;
import java.util.*;
public final class Party extends PartySettings implements IParty
{
private final IPartySet parties;
private final IMessenger messenger;
private final Set<IMember> members = new HashSet<>();
private Inventory inv = null;
@Override
public Map<String, Object> serialize()
{
Map<String, Object> data = super.serialize();
if(inv != null)
{
data.put("inv", inv.getContents());
}
data.put("members", members.toArray(new IMember[0]));
return data;
}
public Party(Map<String, Object> data)
{
KataPartyPlugin plugin = (KataPartyPlugin)Bukkit.getServicesManager().getRegistration(KataPartyService.class).getPlugin();
parties = plugin.getPartySet();
messenger = parties.getMessenger();
Object inventory = data.get("inv");
if(!(inventory instanceof Boolean))
{
enableInventory();
inv.setContents(((List<ItemStack>)inventory).toArray(new ItemStack[0]));
data.put("inv", true);
}
super.apply(PartySettings.deserialize(data));
List<IMember> mems = (List<IMember>)data.get("members");
for(IMember m : mems)
{
m.setParty(this);
members.add(m);
}
setAll(Metadatable.deserialize(data));
}
public Party(IPartySet ps, IPartySettings settings)
{
super(settings);
parties = ps;
messenger = ps.getMessenger();
cloneAll(settings);
if(super.hasInventory())
{
enableInventory();
}
}
@Override @Deprecated
public void informMembers(String message)
{
for(IMember m : members)
{
m.inform(message);
}
}
@Override
public void informMembersMessage(String name, Object... parameters)
{
for(IMember m : members)
{
m.informMessage(name, parameters);
}
}
@Override
public IPartySet getPartySet()
{
return parties;
}
private boolean canChangeSettings(IPartySettings s)
{
PartySettingsChangeEvent psce = new PartySettingsChangeEvent(this, s);
Bukkit.getPluginManager().callEvent(psce);
return !psce.isCancelled();
}
@Override
public void setName(String n)
{
PartySettings changes = new PartySettings(this);
changes.setName(n);
if(!canChangeSettings(changes))
{
return;
}
informMembersMessage("party-rename-inform", getName(), n);
super.setName(n);
}
@Override
public int hashCode()
{
return getName().toLowerCase().hashCode();
}
@Override
public boolean equals(Object obj)
{
if(obj == null)
{
return false;
}
if(obj instanceof IPartySettings)
{
return getName().equalsIgnoreCase(((IPartySettings)obj).getName());
}
else if(obj instanceof String)
{
return getName().equalsIgnoreCase((String)obj);
}
return false;
}
@Override
public IMember newMember(IMemberSettings settings, PartyMemberJoinEvent.Reason r)
{
if(disbanded)
{
return null;
}
{
IMember m;
while((m = getPartySet().findMember(settings.getUuid())) != null)
{
if(!m.getParty().removeMember(settings.getUuid(), PartyMemberLeaveEvent.Reason.SWITCH_PARTIES))
{
return null;
}
}
}
if(r != null)
{
PartyMemberJoinEvent pmje = new PartyMemberJoinEvent(this, settings, r);
Bukkit.getServer().getPluginManager().callEvent(pmje);
if(pmje.isCancelled())
{
return null;
}
}
IMember m = getPartySet().getMemberFactory(settings.getClass()).create(this, settings);
if(m != null)
{
members.add(m);
OfflinePlayer offp = Bukkit.getOfflinePlayer(settings.getUuid());
if(offp.isOnline())
{
informMembersMessage("party-join-inform", offp.getPlayer().getDisplayName());
m.informMessage("manage-hint");
}
else
{
informMembersMessage("party-join-inform", offp.getName());
}
m.setTp(m.canTp()); //shows message
}
return m;
}
@Override
public boolean removeMember(UUID uuid, PartyMemberLeaveEvent.Reason r)
{
final boolean hadmembers = (numMembers() > 0);
IMember m = null;
for(Iterator<IMember> it = members.iterator(); it.hasNext();)
{
m = it.next();
if(m.getUuid().equals(uuid))
{
PartyMemberLeaveEvent pmle = new PartyMemberLeaveEvent(m, r);
Bukkit.getServer().getPluginManager().callEvent(pmle);
if(pmle.isCancelled())
{
return false;
}
if(!disbanded)
{
m.informMessage("party-left-inform");
}
it.remove();
break;
}
m = null;
}
if(!disbanded)
{
informMembersMessage("party-leave-inform", Bukkit.getOfflinePlayer(uuid).getName());
if(hadmembers && numMembers() == 0 && !parties.keepEmptyParties() && !isSticky())
{
parties.remove(this, PartyDisbandEvent.Reason.AUTOMATIC_CLOSE, Bukkit.getPlayer(uuid));
if(m != null)
{
m.informMessage("party-closed-on-leave-inform", getName());
}
}
}
if(m != null)
{
m.setParty(null);
}
return true;
}
@Override
public IMember findMember(UUID uuid)
{
for(IMember m : members)
{
if(m.getUuid().equals(uuid))
{
return m;
}
}
return null;
}
@Override
public IMember findMember(String name)
{
for(IMember m : members)
{
OfflinePlayer offp = Bukkit.getOfflinePlayer(m.getUuid());
if(offp != null && offp.getName() != null && offp.getName().equalsIgnoreCase(name))
{
return m;
}
}
return null;
}
@Override
public Iterator<IMember> iterator()
{
Set<IMember> mems = new HashSet<IMember>();
mems.addAll(members);
return mems.iterator();
}
@Override
public int numMembers()
{
return members.size();
}
@Override
public Set<IMember> getMembersOnline()
{
Set<IMember> mems = new HashSet<>();
for(IMember m : this)
{
if(Bukkit.getOfflinePlayer(m.getUuid()).isOnline())
{
mems.add(m);
}
}
return mems;
}
@Override
public Set<IMember> getMembersAlive()
{
Set<IMember> mems = getMembersOnline();
for(Iterator<IMember> it = mems.iterator(); it.hasNext(); )
{
IMember m = it.next();
Player p = Bukkit.getPlayer(m.getUuid());
if(p.getPlayer().isDead())
{
it.remove();
}
}
return mems;
}
@Override
public Set<IMember> getMembersRanked(PartyRank r)
{
Set<IMember> mems = new HashSet<>();
for(IMember m : this)
{
if(m.getRank().equals(r))
{
mems.add(m);
}
}
return mems;
}
private boolean disbanded = false;
@Override
public boolean disband(PartyDisbandEvent.Reason r, Player p)
{
PartyDisbandEvent pde = new PartyDisbandEvent(this, r, p);
Bukkit.getServer().getPluginManager().callEvent(pde);
if(!pde.isCancelled())
{
disbanded = true;
for(Member m : this.members.toArray(new Member[0]))
{
m.informMessage("party-disband-inform");
removeMember(m.getUuid(), PartyMemberLeaveEvent.Reason.DISBAND);
}
return true;
}
return false;
}
@Override
public boolean isDisbanded()
{
return disbanded;
}
@Override
public void setTp(boolean enabled)
{
PartySettings changes = new PartySettings(this);
changes.setTp(enabled);
if(!canChangeSettings(changes))
{
return;
}
if(enabled)
{
informMembersMessage("party-teleports-enabled-inform");
}
else
{
informMembersMessage("party-teleports-disabled-inform");
}
super.setTp(enabled);
}
@Override
public void setPvp(boolean enabled)
{
PartySettings changes = new PartySettings(this);
changes.setPvp(enabled);
if(!canChangeSettings(changes))
{
return;
}
if(enabled)
{
informMembersMessage("party-pvp-enabled-inform");
}
else
{
informMembersMessage("party-pvp-disabled-inform");
}
super.setPvp(enabled);
}
@Override
public void setVisible(boolean enabled)
{
PartySettings changes = new PartySettings(this);
changes.setVisible(enabled);
if(!canChangeSettings(changes))
{
return;
}
if(enabled)
{
informMembersMessage("party-visibility-enabled-inform");
}
else
{
informMembersMessage("party-visibility-disabled-inform");
}
super.setVisible(enabled);
}
@Override
public boolean hasInventory()
{
return inv != null;
}
@Override
public void enableInventory()
{
PartySettings changes = new PartySettings(this);
changes.setInventory(true);
if(!canChangeSettings(changes))
{
return;
}
if(inv == null)
{
inv = Bukkit.createInventory(null, 4 * 9, messenger.getMessage("party-inventory-gui-title", getName()));
informMembersMessage("party-inventory-enable-inform");
}
super.setInventory(true);
}
@Override
public Inventory getInventory()
{
return inv;
}
@Override
public void disableInventory(Location droploc)
{
PartySettings changes = new PartySettings(this);
changes.setInventory(false);
if(!canChangeSettings(changes))
{
return;
}
if(inv != null)
{
for(ItemStack i : inv.getContents())
{
if(i != null)
{
if(droploc != null)
{
droploc.getWorld().dropItem(droploc, i).setPickupDelay(0);
}
else
{
World w = Bukkit.getServer().getWorlds().get(0);
w.dropItemNaturally(w.getSpawnLocation(), i);
}
}
}
inv = null;
informMembersMessage("party-inventory-disable-inform");
}
super.setInventory(false);
}
@Override @Deprecated
public void setInventory(boolean enabled)
{
if(enabled)
{
enableInventory();
}
else
{
disableInventory(null);
}
}
@Override
public void setInviteOnly(boolean enabled)
{
PartySettings changes = new PartySettings(this);
changes.setInviteOnly(enabled);
if(!canChangeSettings(changes))
{
return;
}
if(enabled)
{
informMembersMessage("party-invite-only-inform");
}
else
{
informMembersMessage("party-public-inform");
}
super.setInviteOnly(enabled);
}
@Override
public void setHealthShared(boolean enabled)
{
PartySettings changes = new PartySettings(this);
changes.setHealthShared(enabled);
if(!canChangeSettings(changes))
{
return;
}
if(enabled)
{
informMembersMessage("party-shared-health-enable-inform");
}
else
{
informMembersMessage("party-shared-health-disable-inform");
}
super.setHealthShared(enabled);
}
@Override
public String rankName(PartyRank r)
{
switch(r)
{
case ADMIN: return messenger.getMessage("party-rank-admin");
case MODERATOR: return messenger.getMessage("party-rank-moderator");
case MEMBER: return messenger.getMessage("party-rank-member");
default: throw new IllegalStateException();
}
}
public static class Member extends MemberSettings implements IMember
{
private IParty p;
@Override
public Map<String, Object> serialize()
{
Map<String, Object> data = super.serialize();
return data;
}
public Member(Map<String, Object> data)
{
super(MemberSettings.deserialize(data));
setAll(Metadatable.deserialize(data));
}
@Override @Deprecated
public void setParty(IParty party)
{
p = party;
}
public Member(IParty party, IMemberSettings settings)
{
super(settings);
p = party;
cloneAll(settings);
}
@Override @Deprecated
public void inform(String message)
{
OfflinePlayer offp = Bukkit.getOfflinePlayer(getUuid());
if(offp.isOnline())
{
offp.getPlayer().sendMessage("[KataParty] "+message);
}
}
@Override
public void informMessage(String name, Object... parameters)
{
OfflinePlayer offp = Bukkit.getOfflinePlayer(getUuid());
if(offp.isOnline())
{
p.getPartySet().getMessenger().tellMessage(offp.getPlayer(), name, parameters);
}
}
@Override
public IParty getParty()
{
return p;
}
@Override
public String getRankName()
{
return p.rankName(getRank());
}
@Override
public void setRank(PartyRank r)
{
super.setRank(r);
informMessage("party-rank-inform", p.rankName(r));
}
@Override
public void setTp(boolean v)
{
if(v)
{
informMessage("party-self-teleports-enable-inform");
}
else
{
informMessage("party-self-teleports-disable-inform");
}
super.setTp(v);
}
}
}
|
package com.stratio.specs;
import com.auth0.jwt.JWTSigner;
import com.ning.http.client.cookie.Cookie;
import com.stratio.exceptions.DBException;
import com.stratio.tests.utils.RemoteSSHConnection;
import com.stratio.tests.utils.ThreadProperty;
import cucumber.api.DataTable;
import cucumber.api.java.en.Given;
import org.assertj.core.api.Assertions;
import org.openqa.selenium.WebElement;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.net.MalformedURLException;
import java.net.UnknownHostException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.stratio.assertions.Assertions.assertThat;
/**
* Generic Given Specs.
*/
public class GivenGSpec extends BaseGSpec {
public static final int PAGE_LOAD_TIMEOUT = 120;
public static final int IMPLICITLY_WAIT = 10;
public static final int SCRIPT_TIMEOUT = 30;
/**
* Generic constructor.
*
* @param spec
*/
public GivenGSpec(CommonG spec) {
this.commonspec = spec;
}
/**
* Create a basic Index.
*
* @param index_name index name
* @param table the table where index will be created.
* @param column the column where index will be saved
* @param keyspace keyspace used
* @throws Exception
*/
@Given("^I create a Cassandra index named '(.+?)' in table '(.+?)' using magic_column '(.+?)' using keyspace '(.+?)'$")
public void createBasicMapping(String index_name, String table, String column, String keyspace) throws Exception {
String query = "CREATE INDEX " + index_name + " ON " + table + " (" + column + ");";
commonspec.getCassandraClient().executeQuery(query);
}
/**
* Create a Cassandra Keyspace.
*
* @param keyspace
*/
@Given("^I create a Cassandra keyspace named '(.+)'$")
public void createCassandraKeyspace(String keyspace) {
commonspec.getCassandraClient().createKeyspace(keyspace);
}
/**
* Connect to cluster.
*
* @param clusterType DB type (Cassandra|Mongo|Elasticsearch)
* @param url url where is started Cassandra cluster
*/
@Given("^I connect to '(Cassandra|Mongo|Elasticsearch)' cluster at '(.+)'$")
public void connect(String clusterType, String url) throws DBException, UnknownHostException {
switch (clusterType) {
case "Cassandra":
commonspec.getCassandraClient().buildCluster();
commonspec.getCassandraClient().connect();
break;
case "Mongo":
commonspec.getMongoDBClient().connect();
break;
case "Elasticsearch":
LinkedHashMap<String, Object> settings_map = new LinkedHashMap<String, Object>();
settings_map.put("cluster.name", System.getProperty("ES_CLUSTER", "elasticsearch"));
commonspec.getElasticSearchClient().setSettings(settings_map);
commonspec.getElasticSearchClient().connect();
break;
default:
throw new DBException("Unknown cluster type");
}
}
/**
* Create table
*
* @param table
* @param datatable
* @param keyspace
* @throws Exception
*/
@Given("^I create a Cassandra table named '(.+?)' using keyspace '(.+?)' with:$")
public void createTableWithData(String table, String keyspace, DataTable datatable) {
try {
commonspec.getCassandraClient().useKeyspace(keyspace);
int attrLength = datatable.getGherkinRows().get(0).getCells().size();
Map<String, String> columns = new HashMap<String, String>();
ArrayList<String> pk = new ArrayList<String>();
for (int i = 0; i < attrLength; i++) {
columns.put(datatable.getGherkinRows().get(0).getCells().get(i),
datatable.getGherkinRows().get(1).getCells().get(i));
if ((datatable.getGherkinRows().size() == 3) && datatable.getGherkinRows().get(2).getCells().get(i).equalsIgnoreCase("PK")) {
pk.add(datatable.getGherkinRows().get(0).getCells().get(i));
}
}
if (pk.isEmpty()) {
throw new Exception("A PK is needed");
}
commonspec.getCassandraClient().createTableWithData(table, columns, pk);
} catch (Exception e) {
// TODO Auto-generated catch block
commonspec.getLogger().debug("Exception captured");
commonspec.getLogger().debug(e.toString());
commonspec.getExceptions().add(e);
}
}
/**
* Insert Data
*
* @param table
* @param datatable
* @param keyspace
* @throws Exception
*/
@Given("^I insert in keyspace '(.+?)' and table '(.+?)' with:$")
public void insertData(String keyspace, String table, DataTable datatable) {
try {
commonspec.getCassandraClient().useKeyspace(keyspace);
int attrLength = datatable.getGherkinRows().get(0).getCells().size();
Map<String, Object> fields = new HashMap<String, Object>();
for (int e = 1; e < datatable.getGherkinRows().size(); e++) {
for (int i = 0; i < attrLength; i++) {
fields.put(datatable.getGherkinRows().get(0).getCells().get(i), datatable.getGherkinRows().get(e).getCells().get(i));
}
commonspec.getCassandraClient().insertData(keyspace + "." + table, fields);
}
} catch (Exception e) {
// TODO Auto-generated catch block
commonspec.getLogger().debug("Exception captured");
commonspec.getLogger().debug(e.toString());
commonspec.getExceptions().add(e);
}
}
@Given("^I save element (in position \'(.+?)\' in )?\'(.+?)\' in environment variable \'(.+?)\'$")
public void saveElementEnvironment(String foo, String position, String element, String envVar) throws Exception {
Pattern pattern = Pattern.compile("^((.*)(\\.)+)(\\$.*)$");
Matcher matcher = pattern.matcher(element);
String json;
String parsedElement;
if (matcher.find()) {
json = matcher.group(2);
parsedElement = matcher.group(4);
} else {
json = commonspec.getResponse().getResponse();
parsedElement = element;
}
String value = commonspec.getJSONPathString(json, parsedElement, position);
if (value == null) {
throw new Exception("Element to be saved: " + element + " is null");
} else {
ThreadProperty.set(envVar, value);
}
}
/**
* Drop all the ElasticSearch indexes.
*/
@Given("^I drop every existing elasticsearch index$")
public void dropElasticsearchIndexes() {
commonspec.getElasticSearchClient().dropAllIndexes();
}
/**
* Drop an specific index of ElasticSearch.
*
* @param index
*/
@Given("^I drop an elasticsearch index named '(.+?)'$")
public void dropElasticsearchIndex(String index) {
commonspec.getElasticSearchClient().dropSingleIndex(index);
}
/**
* Execute a cql file over a Cassandra keyspace.
*
* @param filename
* @param keyspace
*/
@Given("a Cassandra script with name '(.+?)' and default keyspace '(.+?)'$")
public void insertDataOnCassandraFromFile(String filename, String keyspace) {
commonspec.getCassandraClient().loadTestData(keyspace, "/scripts/" + filename);
}
/**
* Drop a Cassandra Keyspace.
*
* @param keyspace
*/
@Given("^I drop a Cassandra keyspace '(.+)'$")
public void dropCassandraKeyspace(String keyspace) {
commonspec.getCassandraClient().dropKeyspace(keyspace);
}
/**
* Create a MongoDB dataBase.
*
* @param databaseName
*/
@Given("^I create a MongoDB dataBase '(.+?)'$")
public void createMongoDBDataBase(String databaseName) {
commonspec.getMongoDBClient().connectToMongoDBDataBase(databaseName);
}
/**
* Drop MongoDB Database.
*
* @param databaseName
*/
@Given("^I drop a MongoDB database '(.+?)'$")
public void dropMongoDBDataBase(String databaseName) {
commonspec.getMongoDBClient().dropMongoDBDataBase(databaseName);
}
/**
* Insert data in a MongoDB table.
*
* @param dataBase
* @param tabName
* @param table
*/
@Given("^I insert into a MongoDB database '(.+?)' and table '(.+?)' this values:$")
public void insertOnMongoTable(String dataBase, String tabName, DataTable table) {
commonspec.getMongoDBClient().connectToMongoDBDataBase(dataBase);
commonspec.getMongoDBClient().insertIntoMongoDBCollection(tabName, table);
}
/**
* Truncate table in MongoDB.
*
* @param database
* @param table
*/
@Given("^I drop every document at a MongoDB database '(.+?)' and table '(.+?)'")
public void truncateTableInMongo(String database, String table) {
commonspec.getMongoDBClient().connectToMongoDBDataBase(database);
commonspec.getMongoDBClient().dropAllDataMongoDBCollection(table);
}
/**
* Browse to {@code url} using the current browser.
*
* @param path
* @throws Exception
*/
@Given("^I browse to '(.+?)'$")
public void seleniumBrowse(String path) throws Exception {
assertThat(path).isNotEmpty();
if (commonspec.getWebHost() == null) {
throw new Exception("Web host has not been set");
}
if (commonspec.getWebPort() == null) {
throw new Exception("Web port has not been set");
}
String webURL = "http://" + commonspec.getWebHost() + commonspec.getWebPort();
commonspec.getDriver().get(webURL + path);
commonspec.setParentWindow(commonspec.getDriver().getWindowHandle());
}
/**
* Set app host and port {@code host, @code port}
*
* @param host
* @param port
*/
@Given("^My app is running in '([^:]+?)(:.+?)?'$")
public void setupApp(String host, String port) {
assertThat(host).isNotEmpty();
assertThat(port).isNotEmpty();
if (port == null) {
port = ":80";
}
commonspec.setWebHost(host);
commonspec.setWebPort(port);
commonspec.setRestHost(host);
commonspec.setRestPort(port);
}
/**
* Browse to {@code webHost, @code webPort} using the current browser.
*
* @param webHost
* @param webPort
* @throws MalformedURLException
*/
@Given("^I set web base url to '([^:]+?)(:.+?)?'$")
public void setupWeb(String webHost, String webPort) throws MalformedURLException {
assertThat(webHost).isNotEmpty();
assertThat(webPort).isNotEmpty();
if (webPort == null) {
webPort = ":80";
}
commonspec.setWebHost(webHost);
commonspec.setWebPort(webPort);
}
/**
* Send requests to {@code restHost @code restPort}.
*
* @param restHost
* @param restPort
*/
@Given("^I( securely)? send requests to '([^:]+?)(:.+?)?'$")
public void setupRestClient(String isSecured, String restHost, String restPort) {
assertThat(restHost).isNotEmpty();
assertThat(restPort).isNotEmpty();
String restProtocol = "http:
if (isSecured != null) {
restProtocol = "https:
}
if (restHost == null) {
restHost = "localhost";
}
if (restPort == null) {
restPort = ":80";
}
commonspec.setRestProtocol(restProtocol);
commonspec.setRestHost(restHost);
commonspec.setRestPort(restPort);
}
/**
* Maximizes current browser window. Mind the current resolution could break a test.
*/
@Given("^I maximize the browser$")
public void seleniumMaximize(String url) {
commonspec.getDriver().manage().window().maximize();
}
/**
* Switches to a frame/ iframe.
*/
@Given("^I switch to the iframe on index '(\\d+?)'$")
public void seleniumSwitchFrame(Integer index) {
assertThat(commonspec.getPreviousWebElements()).as("There are less found elements than required")
.hasAtLeast(index);
WebElement elem = commonspec.getPreviousWebElements().getPreviousWebElements().get(index);
commonspec.getDriver().switchTo().frame(elem);
}
/**
* Switches to a parent frame/ iframe.
*/
@Given("^I switch to a parent frame$")
public void seleniumSwitchAParentFrame() {
commonspec.getDriver().switchTo().parentFrame();
}
/**
* Switches to the frames main container.
*/
@Given("^I switch to the main frame container$")
public void seleniumSwitchParentFrame() {
commonspec.getDriver().switchTo().frame(commonspec.getParentWindow());
}
/*
* Opens a ssh connection to remote host
*
* @param remoteHost
* @param user
* @param password (required if pemFile null)
* @param pemFile (required if password null)
*
*/
@Given("^I open remote ssh connection to host '(.+?)' with user '(.+?)'( and password '(.+?)')?( using pem file '(.+?)')?$")
public void openSSHConnection(String remoteHost, String user, String foo, String password, String bar, String pemFile) throws Exception {
if ((pemFile == null) || (pemFile.equals(""))) {
if (password == null) {
throw new Exception("You have to provide a password or a pem file to be used for connection");
}
commonspec.setRemoteSSHConnection(new RemoteSSHConnection(user, password, remoteHost, null));
} else {
File pem = new File(pemFile);
if (!pem.exists()) {
throw new Exception("Pem file: " + pemFile + " does not exist");
}
commonspec.setRemoteSSHConnection(new RemoteSSHConnection(user, null, remoteHost, pemFile));
}
}
/*
* Authenticate in a DCOS cluster
*
* @param remoteHost
* @param email
* @param user
* @param password (required if pemFile null)
* @param pemFile (required if password null)
*
*/
@Given("^I want to authenticate in DCOS cluster '(.+?)' with email '(.+?)' with user '(.+?)'( and password '(.+?)')?( using pem file '(.+?)')$")
public void authenticateDCOSpem(String remoteHost,String email, String user, String foo, String password, String bar, String pemFile) throws Exception {
String DCOSsecret = null;
if ((pemFile.equals("") && (password != ""))) {
commonspec.setRemoteSSHConnection(new RemoteSSHConnection(user, password, remoteHost, null));
commonspec.getRemoteSSHConnection().runCommand("sudo cat /var/lib/dcos/dcos-oauth/auth-token-secret");
DCOSsecret = commonspec.getRemoteSSHConnection().getResult().trim();
} else if ((password.equals("") && (pemFile != ""))) {
File pem = new File(pemFile);
if (!pem.exists()) {
throw new Exception("Pem file: " + pemFile + " does not exist");
}
commonspec.setRemoteSSHConnection(new RemoteSSHConnection(user, null, remoteHost, pemFile));
commonspec.getRemoteSSHConnection().runCommand("sudo cat /var/lib/dcos/dcos-oauth/auth-token-secret");
DCOSsecret = commonspec.getRemoteSSHConnection().getResult().trim();
}
else if ((password.equals("") && (pemFile.equals("")))){
throw new Exception("Either password or Pem file must be provided");
}
if (DCOSsecret == null){
throw new Exception("There was an error trying to obtain DCOS secret.");
}
final JWTSigner signer = new JWTSigner(DCOSsecret);
final HashMap<String, Object> claims = new HashMap();
claims.put("uid", email);
final String jwt = signer.sign(claims);
Cookie cookie = new Cookie("dcos-acs-auth-cookie", jwt, false, "", "", 99999, false, false);
List<Cookie> cookieList = new ArrayList<Cookie>();
cookieList.add(cookie);
commonspec.setCookies(cookieList);
}
/*
* Authenticate in a DCOS cluster
*
* @param dcosHost
* @param user
*
*/
@Given("^I authenticate in DCOS cluster '(.+?)' with email '(.+?)'$")
public void authenticateDCOS(String dcosCluster, String user) throws Exception {
commonspec.setRemoteSSHConnection(new RemoteSSHConnection("root", "stratio", dcosCluster, null));
commonspec.getRemoteSSHConnection().runCommand("cat /var/lib/dcos/dcos-oauth/auth-token-secret");
String DCOSsecret = commonspec.getRemoteSSHConnection().getResult().trim();
final JWTSigner signer = new JWTSigner(DCOSsecret);
final HashMap<String, Object> claims = new HashMap();
claims.put("uid", user);
final String jwt = signer.sign(claims);
Cookie cookie = new Cookie("dcos-acs-auth-cookie", jwt, false, "", "", 99999, false, false);
List<Cookie> cookieList = new ArrayList<Cookie>();
cookieList.add(cookie);
commonspec.setCookies(cookieList);
}
/*
* Copies file/s from remote system into local system
*
* @param remotePath
* @param localPath
*
*/
@Given("^I copy '(.+?)' from remote ssh connection and store it in '(.+?)'$")
public void copyFromRemoteFile(String remotePath, String localPath) throws Exception {
commonspec.getRemoteSSHConnection().copyFrom(remotePath, localPath);
}
/*
* Copies file/s from local system to remote system
*
* @param localPath
* @param remotePath
*
*/
@Given("^I copy '(.+?)' to remote ssh connection in '(.+?)'$")
public void copyToRemoteFile(String localPath, String remotePath) throws Exception {
commonspec.getRemoteSSHConnection().copyTo(localPath, remotePath);
}
/**
* Executes the command specified in local system
*
* @param command
**/
@Given("^I execute command '(.+?)' locally$")
public void executeLocalCommand(String command) throws Exception {
commonspec.runLocalCommand(command);
}
/**
* Executes the command specified in remote system
*
* @param command
**/
@Given("^I execute command '(.+?)' in remote ssh connection( with exit status '(.+?)')?( and save the value in environment variable '(.+?)')?$")
public void executeCommand(String command, String foo, Integer exitStatus, String var, String envVar) throws Exception {
if (exitStatus == null) {
exitStatus = 0;
}
commonspec.getRemoteSSHConnection().runCommand(command);
commonspec.setCommandResult(commonspec.getRemoteSSHConnection().getResult());
commonspec.setCommandExitStatus(commonspec.getRemoteSSHConnection().getExitStatus());
List<String> logOutput = Arrays.asList(commonspec.getCommandResult().split("\n"));
StringBuffer log = new StringBuffer();
int logLastLines = 25;
if (logOutput.size() < 25) {
logLastLines = logOutput.size();
}
for (String s : logOutput.subList(logOutput.size() - logLastLines, logOutput.size())) {
log.append(s).append("\n");
}
if (envVar != null){
ThreadProperty.set(envVar, commonspec.getRemoteSSHConnection().getResult());
}
if (commonspec.getRemoteSSHConnection().getExitStatus() != 0) {
if (ThreadProperty.get("logLevel").equalsIgnoreCase("debug")) {
commonspec.getLogger().debug("Command complete stdout:\n{}", commonspec.getCommandResult());
} else {
commonspec.getLogger().error("Command last {} lines stdout:", logLastLines);
commonspec.getLogger().error("{}", log);
}
} else {
commonspec.getLogger().debug("Command complete stdout:\n{}", commonspec.getCommandResult());
}
Assertions.assertThat(commonspec.getRemoteSSHConnection().getExitStatus()).isEqualTo(exitStatus);
}
/**
* Insert document in a MongoDB table.
*
* @param dataBase
* @param collection
* @param document
*/
@Given("^I insert into MongoDB database '(.+?)' and collection '(.+?)' the document from schema '(.+?)'$")
public void insertOnMongoTable(String dataBase, String collection, String document) throws Exception {
String retrievedDoc = commonspec.retrieveData(document, "json");
commonspec.getMongoDBClient().connectToMongoDBDataBase(dataBase);
commonspec.getMongoDBClient().insertDocIntoMongoDBCollection(collection, retrievedDoc);
}
/**
* Get all opened windows and store it.
*/
@Given("^new window is opened$")
public void seleniumGetwindows() {
Set<String> wel = commonspec.getDriver().getWindowHandles();
Assertions.assertThat(wel).as("Element count doesnt match").hasSize(2);
}
/**
* Connect to zookeeper.
*
* @param zookeeperHosts as host:port (comma separated)
*/
@Given("^I connect to zk cluster at '(.+)'$")
public void connectToZk(String zookeeperHosts) {
commonspec.getZookeeperClient().setZookeeperConnection(zookeeperHosts, 3000);
commonspec.getZookeeperClient().connectZk();
}
/**
* Connect to Kafka.
* @param zkHost
* @param zkPort
* @param zkPath
*/
@Given("^I connect to kafka cluster at '(.+)':'(.+)' using path '(.+)'$")
public void connectKafka(String zkHost, String zkPort, String zkPath) throws UnknownHostException {
if (System.getenv("DCOS_CLUSTER") != null) {
commonspec.getKafkaUtils().setZkHost(zkHost, zkPort, zkPath);
} else {
commonspec.getKafkaUtils().setZkHost(zkHost, zkPort, "dcos-service-" + zkPath);
}
commonspec.getKafkaUtils().connect();
}
}
|
package com.uchicom.smtp;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.Writer;
import java.net.InetAddress;
import java.net.Socket;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.Properties;
import javax.naming.NamingException;
import javax.naming.directory.Attribute;
import javax.naming.directory.Attributes;
import javax.naming.directory.InitialDirContext;
import com.uchicom.server.Parameter;
import com.uchicom.server.ServerProcess;
public class SmtpProcess implements ServerProcess {
private SimpleDateFormat format = new SimpleDateFormat(
"yyyyMMdd_HHmmss.SSS");
private Parameter parameter;
private Socket socket;
private boolean bHelo;
private boolean bMailFrom;
private boolean bRcptTo;
private boolean bData;
private boolean bAuth;
private boolean bTransfer;
private int authStatus;
private String senderAddress;
private String helo;
private String mailFrom;
private Mail mail;
private String authName;
private List<MailBox> boxList = new ArrayList<>();
private List<MailBox> rcptList = new ArrayList<>();
private List<String> transferList = new ArrayList<>();
private long startTime = System.currentTimeMillis();
/**
* .
*
* @param parameter
* @param socket
* @throws IOException
*/
public SmtpProcess(Parameter parameter, Socket socket) {
this.parameter = parameter;
this.socket = socket;
}
public void execute() {
execute(System.out);
}
/**
* SMTP
*
* @throws IOException
*/
public void execute(PrintStream logStream) {
this.senderAddress = socket.getInetAddress().getHostAddress();
logStream.println(System.currentTimeMillis() + ":"
+ String.valueOf(senderAddress));
BufferedReader br = null;
PrintStream ps = null;
Writer writer = null;
try {
br = new BufferedReader(new InputStreamReader(
socket.getInputStream()));
ps = new PrintStream(socket.getOutputStream());
SmtpUtil.recieveLine(ps, "220 ", parameter.get("host"), " SMTP");
String line = br.readLine();
// HELO
// MAIL FROM:
// RCPT TO:
// DATA
// MAIL FROM:
// RCPT TO:
// DATA
while (line != null) {
logStream.println("[" + line + "]");
logStream.println("status:" + authStatus);
if (authStatus > 0 && authStatus < 3) {
switch (authStatus) {
case 1:
String name = new String(Base64.getDecoder().decode(line));
File dir = new File(parameter.getFile("dir"), name);
if (dir.exists() && dir.isDirectory()) {
authStatus = 2;
authName = name;
SmtpUtil.recieveLine(ps,
Constants.RECV_334,
" UGFzc3dvcmQ6");
}
break;
case 2:
String pass = new String(Base64.getDecoder().decode(line));
File passwordFile = new File(new File(parameter.getFile("dir"), authName), "pass.txt");
if (passwordFile.exists() && passwordFile.isFile()) {
try (BufferedReader passReader = new BufferedReader(
new InputStreamReader(
new FileInputStream(
passwordFile)));) {
String password = passReader.readLine();
while ("".equals(password)) {
password = passReader.readLine();
}
if (pass.equals(password)) {
SmtpUtil.recieveLine(ps,
Constants.RECV_235);
bAuth = true;
authStatus = 3;
} else {
SmtpUtil.recieveLine(ps,
Constants.RECV_535);
authStatus = 0;
}
}
}
break;
default:
}
} else if (bData) {
if (".".equals(line)) {
writer.close();
if (bTransfer) {
logStream.println("transferList:" + transferList);
for (String address : transferList) {
logStream.println("transfer:" + address);
String[] addresses = address.split("@");
String[] hosts = lookupMailHosts(addresses[1]);
for (String hostss : hosts) {
try (Socket transferSocket = new Socket(hostss, 25);
BufferedReader reader = new BufferedReader(new InputStreamReader(transferSocket.getInputStream()));
BufferedWriter writer2 = new BufferedWriter(new OutputStreamWriter(transferSocket.getOutputStream()));) {
logStream.println("t[" + reader.readLine() + "]");
startTime = System.currentTimeMillis();
writer2.write("EHLO uchicom.com\r\n");//EHLO
writer2.flush();
startTime = System.currentTimeMillis();
String rec = null;
// boolean starttls = false;
do {
rec = reader.readLine();
logStream.println("t[" + rec + "]");
if (rec.contains("STARTTLS")) {
// starttls = true;
}
}while (rec != null && rec.startsWith("250-"));
startTime = System.currentTimeMillis();
// if (starttls) {
// writer2.write("STARTTLS\r\n");// STARTTLS
// writer2.flush();
// startTime = System.currentTimeMillis();
// logStream.println("t[" + reader.readLine() + "]");
// writer2.close();
// reader.close();
// //SSL
// SSLSocket sslSocket = (SSLSocket) ((SSLSocketFactory) SSLSocketFactory.getDefault()).createSocket(
// socket,
// socket.getInetAddress().getHostAddress(),
// socket.getPort(),
// true);
//// sslSocket.setEnabledProtocols(sslSocket.getSupportedProtocols());
//// sslSocket.setEnabledCipherSuites(sslSocket.getSupportedCipherSuites());
// sslSocket.setEnableSessionCreation(true);
// sslSocket.setUseClientMode(true);
// logStream.println("startHandshake");
// startTime = System.currentTimeMillis();
// sslSocket.startHandshake();
// startTime = System.currentTimeMillis();
// logStream.println("reader2");
// BufferedReader reader2 = new BufferedReader(new InputStreamReader(sslSocket.getInputStream()));
// logStream.println("writer3");
// BufferedWriter writer3 = new BufferedWriter(new OutputStreamWriter(sslSocket.getOutputStream()));
// startTime = System.currentTimeMillis();
// logStream.println("ehlo");
// writer3.write("EHLO uchicom.com\r\n");//EHLO
// logStream.println("flush");
// writer3.flush();
// startTime = System.currentTimeMillis();
// rec = null;
// logStream.println("readline");
// rec = reader2.readLine();
// logStream.println("t[" + rec + "]");
// }while (rec != null && rec.startsWith("250-"));
// logStream.println("mailFrom3:" + mailFrom);
// writer3.write("MAIL FROM: <" + mailFrom + ">\r\n");// MAIL FROM:
// writer3.flush();
// startTime = System.currentTimeMillis();
// logStream.println("t[" + reader2.readLine() + "]");
// startTime = System.currentTimeMillis();
// writer3.write("RCPT TO: <" + address + ">\r\n");// RCPT TO:
// writer3.flush();
// startTime = System.currentTimeMillis();
// logStream.println("t[" + reader2.readLine() + "]");
// startTime = System.currentTimeMillis();
// writer3.write("DATA\r\n");// DATA
// writer3.flush();
// startTime = System.currentTimeMillis();
// logStream.println("t[" + reader2.readLine() + "]");
// startTime = System.currentTimeMillis();
// writer3.write(mail.getTitle());
// writer3.flush();
// startTime = System.currentTimeMillis();
// writer3.write(".\r\n");
// writer3.flush();
// logStream.println("t[" + reader2.readLine() + "]");
// startTime = System.currentTimeMillis();
// writer3.write("QUIT\r\n");
// writer3.flush();
// logStream.println("t[" + reader2.readLine() + "]");
// startTime = System.currentTimeMillis();
// reader2.close();
// writer3.close();
// } else {
logStream.println("mailFrom:" + mailFrom);
writer2.write("MAIL FROM: <" + mailFrom + ">\r\n");// MAIL FROM:
writer2.flush();
startTime = System.currentTimeMillis();
logStream.println("t[" + reader.readLine() + "]");
startTime = System.currentTimeMillis();
writer2.write("RCPT TO: <" + address + ">\r\n");// RCPT TO:
writer2.flush();
startTime = System.currentTimeMillis();
logStream.println("t[" + reader.readLine() + "]");
startTime = System.currentTimeMillis();
writer2.write("DATA\r\n");// DATA
writer2.flush();
startTime = System.currentTimeMillis();
logStream.println("t[" + reader.readLine() + "]");
startTime = System.currentTimeMillis();
writer2.write(mail.getTitle());
writer2.flush();
startTime = System.currentTimeMillis();
writer2.write(".\r\n");
writer2.flush();
logStream.println("t[" + reader.readLine() + "]");
startTime = System.currentTimeMillis();
writer2.write("QUIT\r\n");
writer2.flush();
logStream.println("t[" + reader.readLine() + "]");
startTime = System.currentTimeMillis();
}
logStream.println("mx!:" + hostss);
break;
}
}
mail.delete();
} else {
try {
mail.copy(boxList,
socket.getLocalAddress().getHostName(),
socket.getInetAddress().getHostName());
mail.delete();
} catch (Exception e) {
e.printStackTrace();
}
}
mail = null;
transferList.clear();
rcptList.clear();
SmtpUtil.recieveLine(ps, Constants.RECV_250_OK);
init();
} else if (line.indexOf(0) == '.') {
writer.write(line.substring(1));
writer.write("\r\n");
} else {
writer.write(line);
writer.write("\r\n");
}
} else if (!bHelo
&& (SmtpUtil.isEhelo(line) || SmtpUtil.isHelo(line))) {
bHelo = true;
String[] lines = line.split(" +");
helo = lines[1];
SmtpUtil.recieveLine(ps,
Constants.RECV_250,
"-",
parameter.get("host"),
" Hello ",
senderAddress);
SmtpUtil.recieveLine(ps, Constants.RECV_250, " AUTH LOGIN");
init();
} else if (SmtpUtil.isAuthLogin(line)) {
authStatus = 1;
SmtpUtil.recieveLine(ps, Constants.RECV_334, " VXNlcm5hbWU6");
} else if (SmtpUtil.isRset(line)) {
SmtpUtil.recieveLine(ps, Constants.RECV_250_OK);
init();
} else if (SmtpUtil.isMailFrom(line)) {
if (bHelo) {
mailFrom = line.substring(10)
.trim()
.replaceAll("[<>]", "");
logStream.println(mailFrom);
SmtpUtil.recieveLine(ps, Constants.RECV_250_OK);
bMailFrom = true;
} else {
// 500
SmtpUtil.recieveLine(ps, "451 ERROR");
}
} else if (SmtpUtil.isRcptTo(line)) {
if (bMailFrom) {
String[] heads = line.split(":");
String address = heads[1].trim().replaceAll("[<>]", "");
String[] addresses = address.split("@");
logStream.println(addresses[0]);
logStream.println(addresses[1]);
if (addresses[1].equals(parameter.get("host"))) {
if (parameter.is("memory")) {
for (String user : Context.singleton().getUsers()) {
if (addresses[0].equals(user)) {
boxList.add(new MailBox(address, Context.singleton().getMailList(user)));
break;
}
}
} else {
for (File box : parameter.getFile("dir").listFiles()) {
if (box.isDirectory()) {
if (addresses[0].equals(box.getName())) {
if (mailFromCheck(box, logStream)) {
boxList.add(new MailBox(address, box));
}
break;
}
}
}
}
if (boxList.size() > 0) {
SmtpUtil.recieveLine(ps, Constants.RECV_250_OK);
bRcptTo = true;
} else {
SmtpUtil.recieveLine(ps, "550 Failure reply");
}
} else if (bAuth) {
SmtpUtil.recieveLine(ps, Constants.RECV_250_OK);
bTransfer = true;
transferList.add(address);
bRcptTo = true;
} else {
SmtpUtil.recieveLine(ps, "500");
}
} else {
// 500
SmtpUtil.recieveLine(ps, "500");
}
} else if (SmtpUtil.isData(line)) {
if (bRcptTo) {
if (bTransfer) {
mail = new MemoryMail();
} else if (parameter.is("memory")) {
mail = new MemoryMail();
} else {
mail = new FileMail(
new File(new File(parameter.getFile("dir"), "@rcpt"), helo.replaceAll(":", "_")
+ "_"
+ mailFrom
+ "~"
+ senderAddress.replaceAll(":", "_")
+ "_"
+ format.format(new Date())
+ "_"
+ Thread.currentThread().getId()
+ ".eml"));
}
writer = mail.getWriter();
SmtpUtil.recieveLine(ps, Constants.RECV_354);
bData = true;
} else {
SmtpUtil.recieveLine(ps, "500");
}
} else if (SmtpUtil.isHelp(line)) {
SmtpUtil.recieveLine(ps, "250");
} else if (SmtpUtil.isQuit(line)) {
SmtpUtil.recieveLine(ps, "221 ", parameter.get("host"));
break;
} else if (SmtpUtil.isNoop(line)) {
SmtpUtil.recieveLine(ps, "250");
} else if (line.length() == 0) {
} else {
SmtpUtil.recieveLine(ps,
"500 Syntax error, command unrecognized");
}
startTime = System.currentTimeMillis();
line = br.readLine();
}
br.close();
ps.close();
} catch (IOException e) {
e.printStackTrace();
} catch (Throwable e) {
e.printStackTrace();
} finally {
if (mail != null) {
try {
mail.copy(boxList, socket.getInetAddress().getHostName(), InetAddress.getLocalHost().getHostName());
mail.delete();
} catch (Exception e) {
e.printStackTrace();
}
mail = null;
rcptList.clear();
}
if (writer != null) {
try {
writer.close();
} catch (IOException e) {
// TODO catch
e.printStackTrace();
}
}
if (socket != null) {
try {
socket.close();
} catch (Exception e) {
e.printStackTrace();
} finally {
socket = null;
}
}
}
}
private void init() {
mail = null;
bMailFrom = false;
bRcptTo = false;
bData = false;
}
public long getStartTime() {
return startTime;
}
public void forceClose() {
System.out.println("forceClose!");
if (socket != null && socket.isConnected()) {
try {
socket.close();
} catch (IOException e) {
e.printStackTrace();
}
socket = null;
}
}
public List<Mail> getMailList(String user) {
if (parameter.is("memory")) {
return Context.singleton().getMailList(user);
} else {
List<Mail> mailList = new ArrayList<>();
File box = new File(parameter.getFile("dir"), user);
if (box.exists()) {
for (File file : box.listFiles()) {
try {
mailList.add(new FileMail(file));
} catch (IOException e) {
e.printStackTrace();
}
}
}
return mailList;
}
}
@Override
public long getLastTime() {
return System.currentTimeMillis();
}
static String[] lookupMailHosts(String domainName) throws NamingException {
InitialDirContext idc = new InitialDirContext();
Attributes attributes = idc.getAttributes("dns:/" + domainName, new String[] { "MX" });
Attribute attributeMX = attributes.get("MX");
if (attributeMX == null) {
return (new String[] { domainName });
}
String[][] pvhn = new String[attributeMX.size()][2];
for (int i = 0; i < attributeMX.size(); i++) {
pvhn[i] = ("" + attributeMX.get(i)).split("\\s+");
}
Arrays.sort(pvhn, new Comparator<String[]>() {
public int compare(String[] o1, String[] o2) {
return (Integer.parseInt(o1[0]) - Integer.parseInt(o2[0]));
}
});
String[] sortedHostNames = new String[pvhn.length];
for (int i = 0; i < pvhn.length; i++) {
if (pvhn[i][1].endsWith(".")) {
sortedHostNames[i] = pvhn[i][1].substring(0, pvhn[i][1].length() - 1);
} else {
sortedHostNames[i] = pvhn[i][1];
}
}
return sortedHostNames;
}
/**
* MAIL FROM
* @param box
* @param logStream
* @return
*/
private boolean mailFromCheck(File box, PrintStream logStream) {
boolean add = true;
File mailFromFile = new File(box, ".ignore");
if (mailFromFile.exists() && mailFromFile.isFile()) {
Properties prop = new Properties();
try (FileInputStream fis = new FileInputStream(mailFrom)) {
prop.load(fis);
String all = prop.getProperty("*");
if (all != null) {
add = Boolean.getBoolean(all);
}
String ignore = prop.getProperty(mailFrom);
if (ignore != null) {
add = Boolean.getBoolean(ignore);
}
} catch (Exception e) {
e.printStackTrace(logStream);
}
if (!add) {
File mailFromResultFile = new File(box, ".ignore_result");
int cnt = 1;
try (FileInputStream fis = new FileInputStream(mailFromResultFile)) {
prop.load(fis);
} catch (Exception e) {
e.printStackTrace(logStream);
}
if (!prop.isEmpty()) {
String ignore = prop.getProperty(mailFrom);
if (ignore != null) {
cnt = Integer.parseInt(ignore);
}
prop.setProperty(mailFrom, String.valueOf(cnt));
try (FileOutputStream fos = new FileOutputStream(mailFromResultFile)) {
prop.store(fos, "");
} catch (Exception e) {
e.printStackTrace(logStream);
}
}
}
}
return add;
}
}
|
package com.vaadan.playground;
import com.vaadin.data.Property;
import com.vaadin.data.util.BeanItemContainer;
import com.vaadin.data.util.ObjectProperty;
import com.vaadin.event.LayoutEvents;
import com.vaadin.ui.*;
import java.util.List;
public class Header extends HorizontalLayout {
final private ObjectProperty<Example> currentExample;
final private Button viewSourceBtn;
final private Button viewInfoBtn;
public Header(ObjectProperty<Example> currentExample){
this.currentExample = currentExample;
setWidth("100%");
setHeight("40px");
setStyleName("header");
setSpacing(true);
CssLayout logo = new CssLayout();
logo.setStyleName("logo");
addComponent(logo);
setComponentAlignment(logo, Alignment.MIDDLE_LEFT);
logo.addLayoutClickListener(new LayoutEvents.LayoutClickListener() {
@Override
public void layoutClick(LayoutEvents.LayoutClickEvent layoutClickEvent) {
UI.getCurrent().getPage().setUriFragment("home");
}
});
BeanItemContainer<Example> examples = new BeanItemContainer<Example>(Example.class);
List<Example> theExamples = ExampleSet.EXAMPLES.getExamples();
examples.addAll(theExamples);
final ComboBox examplesCB = new ComboBox();
examplesCB.setContainerDataSource(examples);
examplesCB.setItemCaptionMode(AbstractSelect.ItemCaptionMode.PROPERTY);
examplesCB.setItemCaptionPropertyId("name");
examplesCB.setNullSelectionAllowed(false);
examplesCB.setInputPrompt("Choose an example");
examplesCB.setImmediate(true);
examplesCB.addValueChangeListener(new Property.ValueChangeListener() {
@Override
public void valueChange(Property.ValueChangeEvent valueChangeEvent) {
Example selectedValue = (Example)valueChangeEvent.getProperty().getValue();
if(selectedValue != null){ //it will be null if use goes back to home view
//set the Uri fragment to selected example
UI.getCurrent().getPage().setUriFragment(selectedValue.getFragmentName());
}
}
});
addComponent(examplesCB);
setComponentAlignment(examplesCB, Alignment.MIDDLE_LEFT);
viewSourceBtn = new Button("Source");
addComponent(viewSourceBtn);
viewSourceBtn.setVisible(false);
setComponentAlignment(viewSourceBtn, Alignment.MIDDLE_LEFT);
viewSourceBtn.addClickListener(new Button.ClickListener() {
@Override
public void buttonClick(Button.ClickEvent clickEvent) {
displaySource();
}
});
viewInfoBtn = new Button("Info");
addComponent(viewInfoBtn);
viewInfoBtn.setVisible(false);
setComponentAlignment(viewInfoBtn, Alignment.MIDDLE_LEFT);
viewInfoBtn.addClickListener(new Button.ClickListener() {
@Override
public void buttonClick(Button.ClickEvent clickEvent) {
displayInfo();
}
});
//this will push the logo,combobox,source and info buttons to the left and everything else to the right
Label expandingGap = new Label();
expandingGap.setWidth("100%");
addComponent(expandingGap);
setExpandRatio(expandingGap, 1.0f);
currentExample.addValueChangeListener(new Property.ValueChangeListener() {
@Override
public void valueChange(Property.ValueChangeEvent valueChangeEvent) {
Example example = (Example)valueChangeEvent.getProperty().getValue();
if(example != null){
examplesCB.setValue(example);
viewSourceBtn.setVisible(true);
viewInfoBtn.setVisible(true);
}
else {
//home is showing, so hide all the buttons and reset the combobox
viewSourceBtn.setVisible(false);
viewInfoBtn.setVisible(false);
examplesCB.setValue(null);
}
}
});
}
private void displaySource(){
Example example = (Example)currentExample.getValue();
//List<String> sources = example.getSources();
//Notification.show("Show source for:"+example.getName());
SourceView sourceView = new SourceView(example);
UI.getCurrent().addWindow(sourceView);
}
private void displayInfo(){
Example example = (Example)currentExample.getValue();
Notification.show("Show info for:"+example.getName());
}
}
|
/* *\
** SICU Stress Measurement System **
** Project P04 | C380 Team A **
** EBME 380: Biomedical Engineering Design Experience **
** Case Western Reserve University **
** 2016 Fall Semester **
\* */
package edu.cwru.sicu_sms;
import javafx.application.Platform;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections;
import javafx.collections.ListChangeListener;
import javafx.collections.ObservableList;
import javafx.concurrent.Task;
import javafx.event.ActionEvent;
import javafx.event.Event;
import javafx.fxml.FXML;
import javafx.scene.chart.CategoryAxis;
import javafx.scene.chart.LineChart;
import javafx.scene.chart.NumberAxis;
import javafx.scene.control.*;
import java.io.FileWriter;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Random;
import java.util.concurrent.Executors;
import jssc.SerialPort;
import jssc.SerialPortException;
import jssc.SerialPortList;
/**
* The controller for the front-end program.
*
* @since October 13, 2016
* @author Ted Frohlich <ttf10@case.edu>
* @author Abby Walker <amw138@case.edu>
*/
public class Controller {
@FXML private Menu connectMenu;
@FXML private ToggleGroup connectGroup;
@FXML private LineChart eegChart;
@FXML private LineChart ekgChart;
@FXML private ToggleButton recordButton;
private ObservableList<String> serialPortList;
private SerialPort eegPort, ekgPort;
private enum PortType {EEG_PORT, EKGPORT}
private List<LineChart.Series> eegChannels;
private LineChart.Series ekgChannel;
private FileWriter fileWriter;
/**
* Construct a controller for the front-end program by performing the setup routine:
* <ul>
* 1. Detect serial ports. <br>
* 2. Connect to the first port by default, if it exists. <br>
* </ul>
*/
public Controller() {
if (detectSerialPorts() && serialPortList.size() == 1) {
connect(serialPortList.get(0));
}
}
@FXML
private void confirmExit() throws Exception {
Alert alert = new Alert(Alert.AlertType.CONFIRMATION);
alert.setTitle("Confirm Exit");
alert.setHeaderText("Are you sure you want to exit?");
ButtonType result = alert.showAndWait().orElse(ButtonType.CANCEL);
if (result == ButtonType.OK) {
disconnect();
Platform.exit();
}
}
@FXML
public void connect(ActionEvent actionEvent) {
connect("COM5"); // TODO: Figure out how to get item text from action event.
}
@FXML
private void onMouseEnteredRecordButton() {
recordButton.setText((isRecording() ? "Stop" : "Start") + " Recording");
}
@FXML
private void onMouseExitedRecordButton() {
recordButton.setText("Record" + (isRecording() ? "ing..." : ""));
}
@FXML
private void onMousePressedRecordButton() {
recordButton.setStyle("-fx-background-color: darkred");
}
@FXML
private void onMouseReleasedRecordButton() {
recordButton.setStyle("-fx-background-color: red");
}
@FXML
public void onConnectMenuValidation(Event event) {
connectMenu.getItems().clear();
String[] portNames = SerialPortList.getPortNames();
if (portNames.length == 0) {
MenuItem dummy = new MenuItem("<no ports available>");
dummy.setDisable(true);
connectMenu.getItems().add(dummy);
return;
}
for (String portName : portNames) {
connectMenu.getItems().add(new RadioMenuItem(portName));
}
}
@FXML
private void record() {
if (isRecording()) { // start recording...
//TODO: Run thread for saving data to file.
}
else { // stop recording...
//TODO: End thread for saving data to file.
}
onMouseEnteredRecordButton(); // indicate what next click would do
}
/**
* Connect to the specified serial port.
*
* @param portName the name of the serial port
* @return <code>true</code> if the serial port was successfully connected; <code>false</code> if there is already another port currently open, or just if something went wrong connecting this one
*/
private boolean connect(String portName) {
boolean success = false;
try {
System.out.print("Connecting to serial port " + portName + "...");
if (eegPort != null && eegPort.isOpened()) {
System.out.println("\t->\tAlready connected!");
} else {
eegPort = new SerialPort(portName);
success = eegPort.openPort();
System.out.println("\t->\tSuccessfully connected!");
}
} catch (SerialPortException e) {
System.out.println("\t->\tCouldn't connect!");
}
return success;
}
/**
* Initialize the list of detected ports.
*
* @return <code>true</code> if at least one serial port was detected; <code>false</code> otherwise
*/
private boolean detectSerialPorts() {
serialPortList = FXCollections.observableArrayList();
serialPortList.addAll(SerialPortList.getPortNames());
return !serialPortList.isEmpty();
}
/**
* Disconnect from the serial port.
*
* @return <code>true</code> if the serial port was successfully disconnected; <code>false</code> if none of the ports were connected to begin with, or just if something went wrong disconnecting this one
*/
private boolean disconnect() {
boolean success = false;
try {
System.out.print("Disconnecting from serial port " + eegPort.getPortName() + "...");
success = eegPort.closePort();
eegPort = null;
if (success) System.out.println("\t->\tSuccessfully disconnected!");
} catch (Exception e) {
System.out.println("\t->\tAlready disconnected!");
}
return success;
}
/**
* Get whether data recording is currently toggled 'on' in the front-end.
*
* @return <code>true</code> if the 'record' toggle button has been pushed; <code>false</code> if no data recording is currently happening
*/
private boolean isRecording() {
return recordButton.isSelected();
}
/**
* A controller for the EEG tab.
*/
private class EEGController {
@FXML private LineChart<String, Number>
leftRostralChart, rightRostralChart,
leftCaudalChart, rightCaudalChart;
private CategoryAxis xAxis;
private NumberAxis yAxis;
private ObservableList<String> xAxisCategories;
private LineChart.Series<String, Number>[] electrodes;
private ObservableList<LineChart.Data<String, Number>>
leftRostralList, rightRostralList,
leftCaudalList, rightCaudalList;
private int lastObservedChangelistSize, changesBeforeUpdate = 10;
private Task<Date> chartUpdateTask;
private EEGController() {
initObservableLists();
getObservableLists().forEach(list ->
list.addListener(dataListChangeListener()));
initAxes();
xAxis.setCategories(xAxisCategories);
// xAxis.setAutoRanging(false);
//TODO: instantiate and add data series
initChartUpdateTask();
Executors.newSingleThreadExecutor().submit(chartUpdateTask);
}
private void initAxes() {
xAxis = new CategoryAxis(); yAxis = new NumberAxis();
xAxis.setLabel("Time (sec)"); yAxis.setLabel("Relative Amplitude");
}
private void initChartUpdateTask() {
chartUpdateTask = new Task<Date>() {
@Override
protected Date call() throws Exception {
while (true) {
try {
Thread.sleep(1000);
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
}
if (isCancelled()) break;
updateValue(new Date());
}
return new Date();
}
};
chartUpdateTask.valueProperty().addListener(new ChangeListener<Date>() {
SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss"); //TODO: eventually just want seconds
Random random = new Random();
@Override
public void changed(ObservableValue<? extends Date> observableDate, Date oldDate, Date newDate) {
String strDate = dateFormat.format(newDate);
xAxisCategories.add(strDate);
getObservableLists().forEach(list ->
list.add(new LineChart.Data(strDate, newDate.getMinutes() + random.nextInt(100500))));
}
});
}
private void initObservableLists() {
leftRostralList = rightRostralList = leftCaudalList = rightCaudalList
= FXCollections.observableArrayList();
xAxisCategories
= FXCollections.observableArrayList();
}
private List<LineChart<String, Number>> getCharts() {
List<LineChart<String, Number>> charts = Collections.emptyList();
charts.add(leftRostralChart); charts.add(rightRostralChart);
charts.add(leftCaudalChart); charts.add(rightCaudalChart);
return charts;
}
private List<ObservableList<LineChart.Data<String, Number>>> getObservableLists() {
List<ObservableList<LineChart.Data<String, Number>>> lists = Collections.emptyList();
lists.add(leftRostralList); lists.add(rightRostralList);
lists.add(leftCaudalList); lists.add(rightCaudalList);
return lists;
}
private ListChangeListener<LineChart.Data<String, Number>> dataListChangeListener() {
return change -> {
if (change.getList().size() - lastObservedChangelistSize > changesBeforeUpdate) {
lastObservedChangelistSize += changesBeforeUpdate;
xAxis.getCategories().remove(0, changesBeforeUpdate);
}
};
}
}
/**
* A controller for the EKG tab.
*/
private class EKGController {
//TODO:
}
}
|
package es.uniovi.DBUpdate.util;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
public class Jdbc {
private static String DRIVER = "org.sqlite.JDBC";
private static String URL= "jdbc:sqlite:/sqlite/asw.db";
static {
try {
Class.forName( DRIVER );
} catch (ClassNotFoundException e) {
throw new RuntimeException("Driver not found in classpath", e);
}
}
public static Connection getConnection() throws SQLException {
return DriverManager.getConnection(URL);
}
public static void close(ResultSet rs, Statement st, Connection c) {
close(rs);
close(st);
close(c);
}
public static void close(ResultSet rs, Statement st) {
close(rs);
close(st);
}
protected static void close(ResultSet rs) {
if (rs != null) try { rs.close(); } catch(SQLException e) {e.printStackTrace();}
}
public static void close(Statement st) {
if (st != null ) try { st.close(); } catch(SQLException e) {e.printStackTrace();}
}
public static void close(Connection c) {
if (c != null) try { c.close(); } catch(SQLException e) {e.printStackTrace();}
}
public static Connection createThreadConnection() throws SQLException {
Connection con = getConnection();
con.setAutoCommit( false );
threadConnection.set(con);
return con;
}
private static ThreadLocal<Connection> threadConnection = new ThreadLocal<Connection>();
public static Connection getCurrentConnection() {
return threadConnection.get();
}
}
|
package eu.amidst.scai2015;
import eu.amidst.core.datastream.*;
import eu.amidst.core.distribution.Multinomial;
import eu.amidst.core.inference.InferenceAlgorithmForBN;
import eu.amidst.core.inference.messagepassing.VMP;
import eu.amidst.core.io.DataStreamLoader;
import eu.amidst.core.learning.StreamingVariationalBayesVMP;
import eu.amidst.core.models.BayesianNetwork;
import eu.amidst.core.models.DAG;
import eu.amidst.core.utils.Utils;
import eu.amidst.core.variables.StaticVariables;
import eu.amidst.core.variables.Variable;
import weka.classifiers.evaluation.NominalPrediction;
import weka.classifiers.evaluation.Prediction;
import weka.classifiers.evaluation.ThresholdCurve;
import weka.core.Instances;
import java.io.IOException;
import java.util.*;
public class wrapperBN {
int seed = 0;
Variable classVariable;
Variable classVariable_PM;
Attribute SEQUENCE_ID;
Attribute TIME_ID;
static int DEFAULTER_VALUE_INDEX = 1;
static int NON_DEFAULTER_VALUE_INDEX = 0;
int NbrClients= 50000;
HashMap<Integer, Multinomial> posteriorsGlobal = new HashMap<>();
static boolean usePRCArea = false; //By default ROCArea is used
static boolean dynamicNB = false;
static boolean onlyPrediction = false;
public static boolean isDynamicNB() {
return dynamicNB;
}
public static void setDynamicNB(boolean dynamicNB) {
wrapperBN.dynamicNB = dynamicNB;
}
public static boolean isOnlyPrediction() {
return onlyPrediction;
}
public static void setOnlyPrediction(boolean onlyPrediction) {
wrapperBN.onlyPrediction = onlyPrediction;
}
HashMap<Integer, Integer> defaultingClients = new HashMap<>();
public static boolean isUsePRCArea() {
return usePRCArea;
}
public static void setUsePRCArea(boolean usePRCArea) {
wrapperBN.usePRCArea = usePRCArea;
}
public Attribute getSEQUENCE_ID() {
return SEQUENCE_ID;
}
public void setSEQUENCE_ID(Attribute SEQUENCE_ID) {
this.SEQUENCE_ID = SEQUENCE_ID;
}
public Attribute getTIME_ID() {
return TIME_ID;
}
public void setTIME_ID(Attribute TIME_ID) {
this.TIME_ID = TIME_ID;
}
public Variable getClassVariable_PM() {
return classVariable_PM;
}
public void setClassVariable_PM(Variable classVariable_PM) {
this.classVariable_PM = classVariable_PM;
}
public int getSeed() {
return seed;
}
public void setSeed(int seed) {
this.seed = seed;
}
public Variable getClassVariable() {
return classVariable;
}
public void setClassVariable(Variable classVariable) {
this.classVariable = classVariable;
}
public BayesianNetwork wrapperBNOneMonthNB(DataOnMemory<DataInstance> data){
StaticVariables Vars = new StaticVariables(data.getAttributes());
//Split the whole data into training and testing
List<DataOnMemory<DataInstance>> splitData = this.splitTrainAndTest(data,66.0);
DataOnMemory<DataInstance> trainingData = splitData.get(0);
DataOnMemory<DataInstance> testData = splitData.get(1);
List<Variable> NSF = new ArrayList<>(Vars.getListOfVariables()); // NSF: non selected features
NSF.remove(classVariable); //remove C
NSF.remove(classVariable_PM); // remove C'
int nbrNSF = NSF.size();
List<Variable> SF = new ArrayList(); // SF:selected features
Boolean stop = false;
//Learn the initial BN with training data including only the class variable
BayesianNetwork bNet = train(trainingData, Vars, SF,false);
System.out.println(bNet.toString());
//Evaluate the initial BN with testing data including only the class variable, i.e., initial score or initial auc
double score = testFS(testData, bNet);
int cont=0;
//iterate until there is no improvement in score
while (nbrNSF > 0 && stop == false ){
System.out.print("Iteration: " + cont + ", Score: "+score +", Number of selected variables: "+ SF.size() + ", ");
SF.stream().forEach(v -> System.out.print(v.getName() + ", "));
System.out.println();
Map<Variable, Double> scores = new HashMap<>(); //scores for each considered feature
for(Variable V:NSF) {
//if (V.getVarID()>5)
// break;
System.out.println("Testing "+V.getName());
SF.add(V);
//train
bNet = train(trainingData, Vars, SF, false);
//evaluate
scores.put(V, testFS(testData, bNet));
SF.remove(V);
}
//determine the Variable V with max score
double maxScore = (Collections.max(scores.values())); //returns max value in the Hashmap
if (maxScore - score > 0.001){
score = maxScore;
//Variable with best score
for (Map.Entry<Variable, Double> entry : scores.entrySet()) {
if (entry.getValue()== maxScore){
Variable SelectedV = entry.getKey();
SF.add(SelectedV);
NSF.remove(SelectedV);
break;
}
}
nbrNSF = nbrNSF - 1;
}
else{
stop = true;
}
cont++;
}
//Final training with the winning SF and the full initial data
bNet = train(data, Vars, SF, true);
System.out.println(bNet.getDAG().toString());
return bNet;
}
List<DataOnMemory<DataInstance>> splitTrainAndTest(DataOnMemory<DataInstance> data, double trainPercentage) {
Random random = new Random(this.seed);
DataOnMemoryListContainer<DataInstance> train = new DataOnMemoryListContainer(data.getAttributes());
DataOnMemoryListContainer<DataInstance> test = new DataOnMemoryListContainer(data.getAttributes());
for (DataInstance dataInstance : data) {
if (dataInstance.getValue(classVariable) == DEFAULTER_VALUE_INDEX)
continue;
if (random.nextDouble()<trainPercentage/100.0)
train.add(dataInstance);
else
test.add(dataInstance);
}
for (DataInstance dataInstance : data) {
if (dataInstance.getValue(classVariable) != DEFAULTER_VALUE_INDEX)
continue;
if (random.nextDouble()<trainPercentage/100.0)
train.add(dataInstance);
else
test.add(dataInstance);
}
Collections.shuffle(train.getList(), random);
Collections.shuffle(test.getList(), random);
return Arrays.asList(train, test);
}
public BayesianNetwork train(DataOnMemory<DataInstance> data, StaticVariables allVars, List<Variable> SF, boolean includeClassVariablePM){
DAG dag = new DAG(allVars);
if(includeClassVariablePM)
dag.getParentSet(classVariable).addParent(classVariable_PM);
/* Add classVariable to all SF*/
dag.getParentSets().stream()
.filter(parent -> SF.contains(parent.getMainVar()))
.filter(w -> w.getMainVar().getVarID() != classVariable.getVarID())
.forEach(w -> w.addParent(classVariable));
StreamingVariationalBayesVMP vmp = new StreamingVariationalBayesVMP();
vmp.setDAG(dag);
vmp.setDataStream(data);
vmp.setWindowsSize(100);
vmp.runLearning();
return vmp.getLearntBayesianNetwork();
}
public BayesianNetwork train(DataOnMemory<DataInstance> data, StaticVariables allVars, List<Variable> SF){
DAG dag = new DAG(allVars);
if(data.getDataInstance(0).getValue(TIME_ID)!=0)
dag.getParentSet(classVariable).addParent(classVariable_PM);
/* Add classVariable to all SF*/
dag.getParentSets().stream()
.filter(parent -> SF.contains(parent.getMainVar()))
.filter(w -> w.getMainVar().getVarID() != classVariable.getVarID())
.forEach(w -> w.addParent(classVariable));
StreamingVariationalBayesVMP vmp = new StreamingVariationalBayesVMP();
vmp.setDAG(dag);
vmp.setDataStream(data);
vmp.setWindowsSize(100);
vmp.runLearning();
return vmp.getLearntBayesianNetwork();
}
public double testFS(DataOnMemory<DataInstance> data, BayesianNetwork bn){
InferenceAlgorithmForBN vmp = new VMP();
ArrayList<Prediction> predictions = new ArrayList<>();
int currentMonthIndex = (int)data.getDataInstance(0).getValue(TIME_ID);
for (DataInstance instance : data) {
int clientID = (int) instance.getValue(SEQUENCE_ID);
double classValue = instance.getValue(classVariable);
Prediction prediction;
Multinomial posterior;
vmp.setModel(bn);
instance.setValue(classVariable, Utils.missingValue());
vmp.setEvidence(instance);
vmp.runInference();
posterior = vmp.getPosterior(classVariable);
instance.setValue(classVariable, classValue);
prediction = new NominalPrediction(classValue, posterior.getProbabilities());
predictions.add(prediction);
}
ThresholdCurve thresholdCurve = new ThresholdCurve();
Instances tcurve = thresholdCurve.getCurve(predictions);
if(usePRCArea)
return ThresholdCurve.getPRCArea(tcurve);
else
return ThresholdCurve.getROCArea(tcurve);
}
public double test(DataOnMemory<DataInstance> data, BayesianNetwork bn, HashMap<Integer, Multinomial> posteriors, boolean updatePosteriors){
InferenceAlgorithmForBN vmp = new VMP();
ArrayList<Prediction> predictions = new ArrayList<>();
int currentMonthIndex = (int)data.getDataInstance(0).getValue(TIME_ID);
for (DataInstance instance : data) {
int clientID = (int) instance.getValue(SEQUENCE_ID);
double classValue = instance.getValue(classVariable);
Prediction prediction;
Multinomial posterior;
/*Propagates*/
bn.setConditionalDistribution(classVariable_PM, posteriors.get(clientID));
/*
Multinomial_MultinomialParents distClass = bn.getConditionalDistribution(classVariable);
Multinomial deterministic = new Multinomial(classVariable);
deterministic.setProbabilityOfState(DEFAULTER_VALUE_INDEX, 1.0);
deterministic.setProbabilityOfState(NON_DEFAULTER_VALUE_INDEX, 0.0);
distClass.setMultinomial(DEFAULTER_VALUE_INDEX, deterministic);
*/
vmp.setModel(bn);
double classValue_PM = instance.getValue(classVariable_PM);
instance.setValue(classVariable, Utils.missingValue());
instance.setValue(classVariable_PM, Utils.missingValue());
vmp.setEvidence(instance);
vmp.runInference();
posterior = vmp.getPosterior(classVariable);
instance.setValue(classVariable, classValue);
instance.setValue(classVariable_PM, classValue_PM);
prediction = new NominalPrediction(classValue, posterior.getProbabilities());
predictions.add(prediction);
if (classValue == DEFAULTER_VALUE_INDEX) {
defaultingClients.putIfAbsent(clientID, currentMonthIndex);
}
if(updatePosteriors) {
Multinomial multi_PM = posterior.toEFUnivariateDistribution().deepCopy(classVariable_PM).toUnivariateDistribution();
if (classValue == DEFAULTER_VALUE_INDEX) {
multi_PM.setProbabilityOfState(DEFAULTER_VALUE_INDEX, 1.0);
multi_PM.setProbabilityOfState(NON_DEFAULTER_VALUE_INDEX, 0);
}
posteriors.put(clientID, multi_PM);
}
}
ThresholdCurve thresholdCurve = new ThresholdCurve();
Instances tcurve = thresholdCurve.getCurve(predictions);
if(usePRCArea)
return ThresholdCurve.getPRCArea(tcurve);
else
return ThresholdCurve.getROCArea(tcurve);
}
public double propagateAndTest(Queue<DataOnMemory<DataInstance>> data, BayesianNetwork bn){
HashMap<Integer, Multinomial> posteriors = new HashMap<>();
InferenceAlgorithmForBN vmp = new VMP();
ArrayList<Prediction> predictions = new ArrayList<>();
/*
for (int i = 0; i < NbrClients ; i++){
Multinomial uniform = new Multinomial(classVariable_PM);
uniform.setProbabilityOfState(DEFAULTER_VALUE_INDEX, 0.5);
uniform.setProbabilityOfState(NON_DEFAULTER_VALUE_INDEX, 0.5);
posteriors.put(i, uniform);
}
*/
boolean firstMonth = true;
Iterator<DataOnMemory<DataInstance>> iterator = data.iterator();
while(iterator.hasNext()){
Prediction prediction = null;
Multinomial posterior = null;
DataOnMemory<DataInstance> batch = iterator.next();
int currentMonthIndex = (int)batch.getDataInstance(0).getValue(TIME_ID);
for (DataInstance instance : batch) {
int clientID = (int) instance.getValue(SEQUENCE_ID);
double classValue = instance.getValue(classVariable);
/*Propagates*/
double classValue_PM = -1;
if(!firstMonth){
bn.setConditionalDistribution(classVariable_PM, posteriors.get(clientID));
classValue_PM = instance.getValue(classVariable_PM);
instance.setValue(classVariable_PM, Utils.missingValue());
}
vmp.setModel(bn);
instance.setValue(classVariable, Utils.missingValue());
vmp.setEvidence(instance);
vmp.runInference();
posterior = vmp.getPosterior(classVariable);
instance.setValue(classVariable, classValue);
if(!firstMonth) {
instance.setValue(classVariable_PM, classValue_PM);
}
if(!iterator.hasNext()) { //Last month or present
prediction = new NominalPrediction(classValue, posterior.getProbabilities());
predictions.add(prediction);
}
Multinomial multi_PM = posterior.toEFUnivariateDistribution().deepCopy(classVariable_PM).toUnivariateDistribution();
posteriors.put(clientID, multi_PM);
}
firstMonth = false;
if(!iterator.hasNext()) {//Last month or present time
ThresholdCurve thresholdCurve = new ThresholdCurve();
Instances tcurve = thresholdCurve.getCurve(predictions);
if(usePRCArea)
return ThresholdCurve.getPRCArea(tcurve);
else
return ThresholdCurve.getROCArea(tcurve);
}
}
throw new UnsupportedOperationException("Something went wrong: The method should have stopped at some point in the loop.");
}
void learnCajamarModel(DataStream<DataInstance> data) {
StaticVariables Vars = new StaticVariables(data.getAttributes());
classVariable = Vars.getVariableById(Vars.getNumberOfVars()-1);
classVariable_PM = Vars.getVariableById(Vars.getNumberOfVars()-2);
TIME_ID = data.getAttributes().getAttributeByName("TIME_ID");
SEQUENCE_ID = data.getAttributes().getAttributeByName("SEQUENCE_ID");
int count = 0;
double averageAUC = 0;
/*
for (int i = 0; i < NbrClients ; i++){
Multinomial uniform = new Multinomial(classVariable_PM);
uniform.setProbabilityOfState(DEFAULTER_VALUE_INDEX, 0.5);
uniform.setProbabilityOfState(NON_DEFAULTER_VALUE_INDEX, 0.5);
posteriorsGlobal.put(i, uniform);
}
*/
Iterable<DataOnMemory<DataInstance>> iteratable = data.iterableOverBatches(NbrClients);
Iterator<DataOnMemory<DataInstance>> iterator = iteratable.iterator();
Queue<DataOnMemory<DataInstance>> monthsMinus12to0 = new LinkedList<>();
iterator.next(); //First month is discarded
//Take 13 batches at a time - 1 for training and 12 for testing
//for (int i = 0; i < 12; i++) {
for (int i = 0; i < 2; i++) {
monthsMinus12to0.add(iterator.next());
}
while(iterator.hasNext()){
DataOnMemory<DataInstance> currentMonth = iterator.next();
monthsMinus12to0.add(currentMonth);
int idMonthMinus12 = (int)monthsMinus12to0.peek().getDataInstance(0).getValue(TIME_ID);
BayesianNetwork bn = null;
if(isOnlyPrediction()){
DataOnMemory<DataInstance> batch = monthsMinus12to0.poll();
StaticVariables vars = new StaticVariables(batch.getAttributes());
bn = train(batch, vars, vars.getListOfVariables(),this.isDynamicNB());
}
else
bn = wrapperBNOneMonthNB(monthsMinus12to0.poll());
double auc = propagateAndTest(monthsMinus12to0, bn);
System.out.println( idMonthMinus12 + "\t" + auc);
averageAUC += auc;
count += NbrClients;
}
System.out.println("Average Accuracy: " + averageAUC / (count / NbrClients));
}
public static void main(String[] args) throws IOException {
//DataStream<DataInstance> data = DataStreamLoader.loadFromFile("datasets/BankArtificialDataSCAI2015_DEFAULTING_PM.arff");
DataStream<DataInstance> data = DataStreamLoader.loadFromFile(args[0]);
for (int i = 1; i < args.length ; i++) {
if(args[i].equalsIgnoreCase("PRCArea"))
setUsePRCArea(true);
if(args[i].equalsIgnoreCase("onlyPrediction"))
setOnlyPrediction(true);
if(args[i].equalsIgnoreCase("dynamic"))
setDynamicNB(true);
}
wrapperBN wbnet = new wrapperBN();
wbnet.learnCajamarModel(data);
}
}
|
package io.github.classgraph;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.FileSystemNotFoundException;
import java.nio.file.InvalidPathException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.AbstractMap.SimpleEntry;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import io.github.classgraph.ClassGraph.FailureHandler;
import io.github.classgraph.ClassGraph.ScanResultProcessor;
import io.github.classgraph.Classfile.ClassfileFormatException;
import io.github.classgraph.Classfile.SkipClassException;
import nonapi.io.github.classgraph.classpath.ClasspathFinder;
import nonapi.io.github.classgraph.classpath.ClasspathOrder.ClasspathEntry;
import nonapi.io.github.classgraph.classpath.ModuleFinder;
import nonapi.io.github.classgraph.concurrency.AutoCloseableExecutorService;
import nonapi.io.github.classgraph.concurrency.InterruptionChecker;
import nonapi.io.github.classgraph.concurrency.SingletonMap;
import nonapi.io.github.classgraph.concurrency.SingletonMap.NewInstanceFactory;
import nonapi.io.github.classgraph.concurrency.WorkQueue;
import nonapi.io.github.classgraph.concurrency.WorkQueue.WorkUnitProcessor;
import nonapi.io.github.classgraph.fastzipfilereader.NestedJarHandler;
import nonapi.io.github.classgraph.scanspec.ScanSpec;
import nonapi.io.github.classgraph.utils.CollectionUtils;
import nonapi.io.github.classgraph.utils.FastPathResolver;
import nonapi.io.github.classgraph.utils.FileUtils;
import nonapi.io.github.classgraph.utils.JarUtils;
import nonapi.io.github.classgraph.utils.LogNode;
/** The classpath scanner. */
class Scanner implements Callable<ScanResult> {
/** The scan spec. */
private final ScanSpec scanSpec;
/** If true, performing a scan. If false, only fetching the classpath. */
public boolean performScan;
/** The nested jar handler. */
private final NestedJarHandler nestedJarHandler;
/** The executor service. */
private final ExecutorService executorService;
/** The interruption checker. */
private final InterruptionChecker interruptionChecker;
/** The number of parallel tasks. */
private final int numParallelTasks;
/** The scan result processor. */
private final ScanResultProcessor scanResultProcessor;
/** The failure handler. */
private final FailureHandler failureHandler;
/** The toplevel log. */
private final LogNode topLevelLog;
/** The classpath finder. */
private final ClasspathFinder classpathFinder;
/** The module order. */
private final List<ClasspathElementModule> moduleOrder;
/**
* The classpath scanner. Scanning is started by calling {@link #call()} on this object.
*
* @param performScan
* If true, performing a scan. If false, only fetching the classpath.
* @param scanSpec
* the scan spec
* @param executorService
* the executor service
* @param numParallelTasks
* the num parallel tasks
* @param scanResultProcessor
* the scan result processor
* @param failureHandler
* the failure handler
* @param topLevelLog
* the log
*
* @throws InterruptedException
* if interrupted
*/
Scanner(final boolean performScan, final ScanSpec scanSpec, final ExecutorService executorService,
final int numParallelTasks, final ScanResultProcessor scanResultProcessor,
final FailureHandler failureHandler, final LogNode topLevelLog) throws InterruptedException {
this.scanSpec = scanSpec;
this.performScan = performScan;
scanSpec.sortPrefixes();
scanSpec.log(topLevelLog);
if (topLevelLog != null) {
if (scanSpec.pathAcceptReject != null
&& scanSpec.packagePrefixAcceptReject.isSpecificallyAccepted("")) {
topLevelLog.log("Note: There is no need to accept the root package (\"\") -- not accepting "
+ "anything will have the same effect of causing all packages to be scanned");
}
topLevelLog.log("Number of worker threads: " + numParallelTasks);
}
this.executorService = executorService;
this.interruptionChecker = executorService instanceof AutoCloseableExecutorService
? ((AutoCloseableExecutorService) executorService).interruptionChecker
: new InterruptionChecker();
this.nestedJarHandler = new NestedJarHandler(scanSpec, interruptionChecker);
this.numParallelTasks = numParallelTasks;
this.scanResultProcessor = scanResultProcessor;
this.failureHandler = failureHandler;
this.topLevelLog = topLevelLog;
final LogNode classpathFinderLog = topLevelLog == null ? null : topLevelLog.log("Finding classpath");
this.classpathFinder = new ClasspathFinder(scanSpec, classpathFinderLog);
try {
this.moduleOrder = new ArrayList<>();
// Check if modules should be scanned
final ModuleFinder moduleFinder = classpathFinder.getModuleFinder();
if (moduleFinder != null) {
// Add modules to start of classpath order, before traditional classpath
final List<ModuleRef> systemModuleRefs = moduleFinder.getSystemModuleRefs();
final ClassLoader[] classLoaderOrderRespectingParentDelegation = classpathFinder
.getClassLoaderOrderRespectingParentDelegation();
final ClassLoader defaultClassLoader = classLoaderOrderRespectingParentDelegation != null
&& classLoaderOrderRespectingParentDelegation.length != 0
? classLoaderOrderRespectingParentDelegation[0]
: null;
if (systemModuleRefs != null) {
for (final ModuleRef systemModuleRef : systemModuleRefs) {
final String moduleName = systemModuleRef.getName();
if (
// If scanning system packages and modules is enabled and accept/reject criteria are empty,
// then scan all system modules
(scanSpec.enableSystemJarsAndModules
&& scanSpec.moduleAcceptReject.acceptAndRejectAreEmpty())
// Otherwise only scan specifically accepted system modules
|| scanSpec.moduleAcceptReject.isSpecificallyAcceptedAndNotRejected(moduleName)) {
// Create a new ClasspathElementModule
final ClasspathElementModule classpathElementModule = new ClasspathElementModule(
systemModuleRef, nestedJarHandler.moduleRefToModuleReaderProxyRecyclerMap,
new ClasspathEntryWorkUnit(null, defaultClassLoader, null, moduleOrder.size(),
""),
scanSpec);
moduleOrder.add(classpathElementModule);
// Open the ClasspathElementModule
classpathElementModule.open(/* ignored */ null, classpathFinderLog);
} else {
if (classpathFinderLog != null) {
classpathFinderLog
.log("Skipping non-accepted or rejected system module: " + moduleName);
}
}
}
}
final List<ModuleRef> nonSystemModuleRefs = moduleFinder.getNonSystemModuleRefs();
if (nonSystemModuleRefs != null) {
for (final ModuleRef nonSystemModuleRef : nonSystemModuleRefs) {
String moduleName = nonSystemModuleRef.getName();
if (moduleName == null) {
moduleName = "";
}
if (scanSpec.moduleAcceptReject.isAcceptedAndNotRejected(moduleName)) {
// Create a new ClasspathElementModule
final ClasspathElementModule classpathElementModule = new ClasspathElementModule(
nonSystemModuleRef, nestedJarHandler.moduleRefToModuleReaderProxyRecyclerMap,
new ClasspathEntryWorkUnit(null, defaultClassLoader, null, moduleOrder.size(),
""),
scanSpec);
moduleOrder.add(classpathElementModule);
// Open the ClasspathElementModule
classpathElementModule.open(/* ignored */ null, classpathFinderLog);
} else {
if (classpathFinderLog != null) {
classpathFinderLog.log("Skipping non-accepted or rejected module: " + moduleName);
}
}
}
}
}
} catch (final InterruptedException e) {
nestedJarHandler.close(/* log = */ null);
throw e;
}
}
/**
* Recursively perform a depth-first search of jar interdependencies, breaking cycles if necessary, to determine
* the final classpath element order.
*
* @param currClasspathElement
* the current classpath element
* @param visitedClasspathElts
* visited classpath elts
* @param order
* the classpath element order
*/
private static void findClasspathOrderRec(final ClasspathElement currClasspathElement,
final Set<ClasspathElement> visitedClasspathElts, final List<ClasspathElement> order) {
if (visitedClasspathElts.add(currClasspathElement)) {
// The classpath order requires a preorder traversal of the DAG of classpath dependencies
if (!currClasspathElement.skipClasspathElement) {
// Don't add a classpath element if it is marked to be skipped.
order.add(currClasspathElement);
// Whether or not a classpath element should be skipped, add any child classpath elements that are
// not marked to be skipped (i.e. keep recursing below)
}
// Sort child elements into correct order, then traverse to them in order
final List<ClasspathElement> childClasspathElementsSorted = CollectionUtils
.sortCopy(currClasspathElement.childClasspathElements);
for (final ClasspathElement childClasspathElt : childClasspathElementsSorted) {
findClasspathOrderRec(childClasspathElt, visitedClasspathElts, order);
}
}
}
/**
* Recursively perform a depth-first traversal of child classpath elements, breaking cycles if necessary, to
* determine the final classpath element order. This causes child classpath elements to be inserted in-place in
* the classpath order, after the parent classpath element that contained them.
*
* @param toplevelClasspathElts
* the toplevel classpath elts, indexed by order within the toplevel classpath
* @return the final classpath order, after depth-first traversal of child classpath elements
*/
private List<ClasspathElement> findClasspathOrder(final Set<ClasspathElement> toplevelClasspathElts) {
// Sort toplevel classpath elements into their correct order
final List<ClasspathElement> toplevelClasspathEltsSorted = CollectionUtils.sortCopy(toplevelClasspathElts);
// Perform a depth-first preorder traversal of the DAG of classpath elements
final Set<ClasspathElement> visitedClasspathElts = new HashSet<>();
final List<ClasspathElement> order = new ArrayList<>();
for (final ClasspathElement elt : toplevelClasspathEltsSorted) {
findClasspathOrderRec(elt, visitedClasspathElts, order);
}
return order;
}
/**
* Process work units.
*
* @param <W>
* the work unit type
* @param workUnits
* the work units
* @param log
* the log entry text to group work units under
* @param workUnitProcessor
* the work unit processor
* @throws InterruptedException
* if a worker was interrupted.
* @throws ExecutionException
* If a worker threw an uncaught exception.
*/
private <W> void processWorkUnits(final Collection<W> workUnits, final LogNode log,
final WorkUnitProcessor<W> workUnitProcessor) throws InterruptedException, ExecutionException {
WorkQueue.runWorkQueue(workUnits, executorService, interruptionChecker, numParallelTasks, log,
workUnitProcessor);
if (log != null) {
log.addElapsedTime();
}
// Throw InterruptedException if any of the workers failed
interruptionChecker.check();
}
/** Used to enqueue classpath elements for opening. */
static class ClasspathEntryWorkUnit {
/** The classpath entry object (a {@link String} path, {@link Path}, {@link URL} or {@link URI}). */
Object classpathEntryObj;
/** The classloader the classpath entry object was obtained from. */
final ClassLoader classLoader;
/** The parent classpath element. */
final ClasspathElement parentClasspathElement;
/** The order within the parent classpath element. */
final int classpathElementIdxWithinParent;
/** The package root prefix (e.g. "BOOT-INF/classes/"). */
final String packageRootPrefix;
/**
* Constructor.
*
* @param classpathEntryObj
* the raw classpath entry object
* @param classLoader
* the classloader the classpath entry object was obtained from
* @param parentClasspathElement
* the parent classpath element
* @param classpathElementIdxWithinParent
* the order within parent classpath element
* @param packageRootPrefix
* the package root prefix
*/
public ClasspathEntryWorkUnit(final Object classpathEntryObj, final ClassLoader classLoader,
final ClasspathElement parentClasspathElement, final int classpathElementIdxWithinParent,
final String packageRootPrefix) {
this.classpathEntryObj = classpathEntryObj;
this.classLoader = classLoader;
this.parentClasspathElement = parentClasspathElement;
this.classpathElementIdxWithinParent = classpathElementIdxWithinParent;
this.packageRootPrefix = packageRootPrefix;
}
}
/**
* Normalize a classpath entry object so that it is mapped to a canonical {@link Path} object if possible,
* falling back to a {@link URL} or {@link URI} if not possible. This is needed to avoid treating
* "file:///path/to/x.jar" and "/path/to/x.jar" as different classpath elements. Maps URL("jar:file:x.jar!/") to
* Path("x.jar"), etc.
*
* @param classpathEntryObj
* The classpath entry object.
* @return The normalized classpath entry object.
* @throws IOException
*/
private static Object normalizeClasspathEntry(final Object classpathEntryObj) throws IOException {
if (classpathEntryObj == null) {
// Should not happen
throw new IOException("Got null classpath entry object");
}
Object classpathEntryObjNormalized = classpathEntryObj;
// Convert URL/URI (or anything other than URL/URI, or Path) into a String.
// for paths like "jar:file:myjar.jar!/" (#625) -- need to strip the "!/" off the end.
// Also strip any "jar:file:" or "file:" off the beginning.
// This normalizes "file:x.jar" and "x.jar" to the same string, for example.
if (!(classpathEntryObjNormalized instanceof Path)) {
classpathEntryObjNormalized = FastPathResolver.resolve(FileUtils.currDirPath(),
classpathEntryObjNormalized.toString());
}
// If classpath entry object is a URL-formatted string, convert to (or back to) a URL instance.
if (classpathEntryObjNormalized instanceof String) {
String classpathEntStr = (String) classpathEntryObjNormalized;
final boolean isURL = JarUtils.URL_SCHEME_PATTERN.matcher(classpathEntStr).matches();
final boolean isMultiSection = classpathEntStr.contains("!");
if (isURL || isMultiSection) {
// Convert back to URL (or URI) if this has a URL scheme or if this is a multi-section
// path (which needs the "jar:file:" scheme)
if (!isURL) {
// Add "file:" scheme if there is no scheme
classpathEntStr = "file:" + classpathEntStr;
}
if (isMultiSection) {
// Multi-section URL strings that do not already have a URL scheme need to
// have the "jar:file:" scheme
classpathEntStr = "jar:" + classpathEntStr;
// Also "jar:" URLs need at least one instance of "!/" -- if only "!" is used
// without a subsequent "/", replace it
classpathEntStr = classpathEntStr.replaceAll("!([^/])", "!/$1");
}
try {
// Convert classpath entry to (or back to) a URL.
final URL classpathEntryURL = new URL(classpathEntStr);
classpathEntryObjNormalized = classpathEntryURL;
// If this is not a multi-section URL, try converting URL to a Path
if (!isMultiSection) {
try {
final String scheme = classpathEntryURL.getProtocol();
if (!"http".equals(scheme) && !"https".equals(scheme)) {
final URI classpathEntryURI = classpathEntryURL.toURI();
// See if the URL resolves to a file or directory via the Path API
classpathEntryObjNormalized = Paths.get(classpathEntryURI);
}
} catch (final URISyntaxException | IllegalArgumentException | SecurityException e1) {
// URL cannot be represented as a URI or as a Path
} catch (final FileSystemNotFoundException e) {
// This is a custom URL scheme without a backing FileSystem
}
} // else this is a remote jar URL
} catch (final MalformedURLException e) {
// Try creating URI if URL creation fails, in case there is a URI-only scheme
try {
final URI classpathEntryURI = new URI(classpathEntStr);
classpathEntryObjNormalized = classpathEntryURI;
final String scheme = classpathEntryURI.getScheme();
if (!"http".equals(scheme) && !"https".equals(scheme)) {
// See if the URI resolves to a file or directory via the Path API
classpathEntryObjNormalized = Paths.get(classpathEntryURI);
} // else this is a remote jar URI
} catch (final URISyntaxException e1) {
throw new IOException("Malformed URI: " + classpathEntryObjNormalized + " : " + e1);
} catch (final IllegalArgumentException | SecurityException e1) {
// URI cannot be represented as a Path
} catch (final FileSystemNotFoundException e1) {
// This is a custom URI scheme without a backing FileSystem
}
}
}
// Last-ditch effort -- try to convert String to Path
if (classpathEntryObjNormalized instanceof String) {
try {
classpathEntryObjNormalized = Paths.get((String) classpathEntryObjNormalized);
} catch (final InvalidPathException e) {
throw new IOException("Malformed path: " + classpathEntryObj + " : " + e);
}
}
}
// At this point, classpathEntryObjNormalized is either a Path wherever possible (where the
// classpath entry pointed to a jarfile or directory) or a URL/URI (for multi-section "jar:"
// URLs with "!" separators, custom URL schemes without backing filesystems, or URLs that
// can't be turned into a Path for any other reason).
// Canonicalize Path objects so the same file is opened only once
if (classpathEntryObjNormalized instanceof Path) {
try {
// Canonicalize path, to avoid duplication
// Throws IOException if the file does not exist or an I/O error occurs
classpathEntryObjNormalized = ((Path) classpathEntryObjNormalized).toRealPath();
} catch (final SecurityException e) {
// Ignore
}
}
return classpathEntryObjNormalized;
}
/**
* A singleton map used to eliminate creation of duplicate {@link ClasspathElement} objects, to reduce the
* chance that resources are scanned twice, by mapping canonicalized Path objects, URLs, etc. to
* ClasspathElements.
*/
private final SingletonMap<Object, ClasspathElement, IOException>
classpathEntryObjToClasspathEntrySingletonMap =
new SingletonMap<Object, ClasspathElement, IOException>() {
@Override
public ClasspathElement newInstance(final Object classpathEntryObj, final LogNode log)
throws IOException, InterruptedException {
// Overridden by a NewInstanceFactory
throw new IOException("Should not reach here");
}
};
/**
* Create a WorkUnitProcessor for opening traditional classpath entries (which are mapped to
* {@link ClasspathElementDir} or {@link ClasspathElementZip} -- {@link ClasspathElementModule is handled
* separately}).
*
* @param allClasspathEltsOut
* on exit, the set of all classpath elements
* @param toplevelClasspathEltsOut
* on exit, the toplevel classpath elements
* @param ClasspathEltOrder
* the toplevel classpath elt order
* @return the work unit processor
*/
private WorkUnitProcessor<ClasspathEntryWorkUnit> newClasspathEntryWorkUnitProcessor(
final Set<ClasspathElement> allClasspathEltsOut, final Set<ClasspathElement> toplevelClasspathEltsOut) {
return new WorkUnitProcessor<ClasspathEntryWorkUnit>() {
@Override
public void processWorkUnit(final ClasspathEntryWorkUnit workUnit,
final WorkQueue<ClasspathEntryWorkUnit> workQueue, final LogNode log)
throws InterruptedException {
try {
// Normalize the classpath entry object
workUnit.classpathEntryObj = normalizeClasspathEntry(workUnit.classpathEntryObj);
// Determine if classpath entry is a jar or dir
boolean isJar = false;
if (workUnit.classpathEntryObj instanceof URL || workUnit.classpathEntryObj instanceof URI) {
// URLs and URIs always point to jars
isJar = true;
} else if (workUnit.classpathEntryObj instanceof Path) {
final Path path = (Path) workUnit.classpathEntryObj;
if (FileUtils.canReadAndIsFile(path)) {
// classpathEntObj is a Path which points to a file, so it must be a jar
isJar = true;
} else if (FileUtils.canReadAndIsDir(path)) {
if ("JrtFileSystem".equals(path.getFileSystem().getClass().getSimpleName())) {
// Ignore JrtFileSystem (#553) -- paths are of form:
// /modules/java.base/module-info.class
throw new IOException("Ignoring JrtFS filesystem path " + workUnit.classpathEntryObj
+ " (modules are scanned using the JPMS API)");
}
// classpathEntObj is a Path which points to a dir
} else if (!FileUtils.canRead(path)) {
throw new IOException("Cannot read path: " + path);
}
} else {
// Should not happen
throw new IOException("Got unexpected classpath entry object type "
+ workUnit.classpathEntryObj.getClass().getName() + " : "
+ workUnit.classpathEntryObj);
}
// Create a ClasspathElementZip or ClasspathElementDir from the classpath entry
// Use a singleton map to ensure that classpath elements are only opened once
// per unique Path, URL, or URI
final boolean isJarFinal = isJar;
classpathEntryObjToClasspathEntrySingletonMap.get(workUnit.classpathEntryObj, log,
// A NewInstanceFactory is used here because workUnit has to be passed in,
// and the standard newInstance API doesn't support an extra parameter like this
new NewInstanceFactory<ClasspathElement, IOException>() {
@Override
public ClasspathElement newInstance() throws IOException, InterruptedException {
final ClasspathElement cpElt = isJarFinal
? new ClasspathElementZip(workUnit.classpathEntryObj, workUnit,
nestedJarHandler, scanSpec)
: new ClasspathElementDir((Path) workUnit.classpathEntryObj, workUnit,
nestedJarHandler, scanSpec);
allClasspathEltsOut.add(cpElt);
// Run open() on the ClasspathElement
final LogNode subLog = log == null ? null
: log.log("Opening classpath element " + cpElt);
// Check if the classpath element is valid (classpathElt.skipClasspathElement
// will be set if not). In case of ClasspathElementZip, open or extract nested
// jars as LogicalZipFile instances. Read manifest files for jarfiles to look
// for Class-Path manifest entries. Adds extra classpath elements to the work
// queue if they are found.
cpElt.open(workQueue, subLog);
if (workUnit.parentClasspathElement != null) {
// Link classpath element to its parent, if it is not a toplevel element
workUnit.parentClasspathElement.childClasspathElements.add(cpElt);
} else {
toplevelClasspathEltsOut.add(cpElt);
}
return cpElt;
}
});
} catch (final Exception e) {
if (log != null) {
log.log("Skipping invalid classpath entry " + workUnit.classpathEntryObj + " : "
+ (e.getCause() == null ? e : e.getCause()));
}
}
}
};
}
/** Used to enqueue classfiles for scanning. */
static class ClassfileScanWorkUnit {
/** The classpath element. */
private final ClasspathElement classpathElement;
/** The classfile resource. */
private final Resource classfileResource;
/** True if this is an external class. */
private final boolean isExternalClass;
/**
* Constructor.
*
* @param classpathElement
* the classpath element
* @param classfileResource
* the classfile resource
* @param isExternalClass
* the is external class
*/
ClassfileScanWorkUnit(final ClasspathElement classpathElement, final Resource classfileResource,
final boolean isExternalClass) {
this.classpathElement = classpathElement;
this.classfileResource = classfileResource;
this.isExternalClass = isExternalClass;
}
}
/** WorkUnitProcessor for scanning classfiles. */
private static class ClassfileScannerWorkUnitProcessor implements WorkUnitProcessor<ClassfileScanWorkUnit> {
/** The scan spec. */
private final ScanSpec scanSpec;
/** The classpath order. */
private final List<ClasspathElement> classpathOrder;
/**
* The names of accepted classes found in the classpath while scanning paths within classpath elements.
*/
private final Set<String> acceptedClassNamesFound;
/**
* The names of external (non-accepted) classes scheduled for extended scanning (where scanning is extended
* upwards to superclasses, interfaces and annotations).
*/
private final Set<String> classNamesScheduledForExtendedScanning = Collections
.newSetFromMap(new ConcurrentHashMap<String, Boolean>());
private final Queue<Classfile> scannedClassfiles;
/** The string intern map. */
private final ConcurrentHashMap<String, String> stringInternMap = new ConcurrentHashMap<>();
public ClassfileScannerWorkUnitProcessor(final ScanSpec scanSpec,
final List<ClasspathElement> classpathOrder, final Set<String> acceptedClassNamesFound,
final Queue<Classfile> scannedClassfiles) {
this.scanSpec = scanSpec;
this.classpathOrder = classpathOrder;
this.acceptedClassNamesFound = acceptedClassNamesFound;
this.scannedClassfiles = scannedClassfiles;
}
/**
* Process work unit.
*
* @param workUnit
* the work unit
* @param workQueue
* the work queue
* @param log
* the log
* @throws InterruptedException
* the interrupted exception
*/
/* (non-Javadoc)
* @see nonapi.io.github.classgraph.concurrency.WorkQueue.WorkUnitProcessor#processWorkUnit(
* java.lang.Object, nonapi.io.github.classgraph.concurrency.WorkQueue)
*/
@Override
public void processWorkUnit(final ClassfileScanWorkUnit workUnit,
final WorkQueue<ClassfileScanWorkUnit> workQueue, final LogNode log) throws InterruptedException {
// Classfile scan log entries are listed inline below the entry that was added to the log
// when the path of the corresponding resource was found, by using the LogNode stored in
// Resource#scanLog. This allows the path scanning and classfile scanning logs to be
// merged into a single tree, rather than having them appear as two separate trees.
final LogNode subLog = workUnit.classfileResource.scanLog == null ? null
: workUnit.classfileResource.scanLog.log(workUnit.classfileResource.getPath(),
"Parsing classfile");
try {
// Parse classfile binary format, creating a Classfile object
final Classfile classfile = new Classfile(workUnit.classpathElement, classpathOrder,
acceptedClassNamesFound, classNamesScheduledForExtendedScanning,
workUnit.classfileResource.getPath(), workUnit.classfileResource, workUnit.isExternalClass,
stringInternMap, workQueue, scanSpec, subLog);
// Enqueue the classfile for linking
scannedClassfiles.add(classfile);
if (subLog != null) {
subLog.addElapsedTime();
}
} catch (final SkipClassException e) {
if (subLog != null) {
subLog.log(workUnit.classfileResource.getPath(), "Skipping classfile: " + e.getMessage());
subLog.addElapsedTime();
}
} catch (final ClassfileFormatException e) {
if (subLog != null) {
subLog.log(workUnit.classfileResource.getPath(), "Invalid classfile: " + e.getMessage());
subLog.addElapsedTime();
}
} catch (final IOException e) {
if (subLog != null) {
subLog.log(workUnit.classfileResource.getPath(), "Could not read classfile: " + e);
subLog.addElapsedTime();
}
} catch (final Exception e) {
if (subLog != null) {
subLog.log(workUnit.classfileResource.getPath(), "Could not read classfile", e);
subLog.addElapsedTime();
}
}
}
}
/**
* Find classpath elements whose path is a prefix of another classpath element, and record the nesting.
*
* @param classpathElts
* the classpath elements
* @param log
* the log
*/
private void findNestedClasspathElements(final List<SimpleEntry<String, ClasspathElement>> classpathElts,
final LogNode log) {
// Sort classpath elements into lexicographic order
CollectionUtils.sortIfNotEmpty(classpathElts, new Comparator<SimpleEntry<String, ClasspathElement>>() {
@Override
public int compare(final SimpleEntry<String, ClasspathElement> o1,
final SimpleEntry<String, ClasspathElement> o2) {
return o1.getKey().compareTo(o2.getKey());
}
});
// Find any nesting of elements within other elements
for (int i = 0; i < classpathElts.size(); i++) {
// See if each classpath element is a prefix of any others (if so, they will immediately follow
// in lexicographic order)
final SimpleEntry<String, ClasspathElement> ei = classpathElts.get(i);
final String basePath = ei.getKey();
final int basePathLen = basePath.length();
for (int j = i + 1; j < classpathElts.size(); j++) {
final SimpleEntry<String, ClasspathElement> ej = classpathElts.get(j);
final String comparePath = ej.getKey();
final int comparePathLen = comparePath.length();
boolean foundNestedClasspathRoot = false;
if (comparePath.startsWith(basePath) && comparePathLen > basePathLen) {
// Require a separator after the prefix
final char nextChar = comparePath.charAt(basePathLen);
if (nextChar == '/' || nextChar == '!') {
// basePath is a path prefix of comparePath. Ensure that the nested classpath does
// not contain another '!' zip-separator (since classpath scanning does not recurse
// to jars-within-jars unless they are explicitly listed on the classpath)
final String nestedClasspathRelativePath = comparePath.substring(basePathLen + 1);
if (nestedClasspathRelativePath.indexOf('!') < 0) {
// Found a nested classpath root
foundNestedClasspathRoot = true;
// Store link from prefix element to nested elements
final ClasspathElement baseElement = ei.getValue();
if (baseElement.nestedClasspathRootPrefixes == null) {
baseElement.nestedClasspathRootPrefixes = new ArrayList<>();
}
baseElement.nestedClasspathRootPrefixes.add(nestedClasspathRelativePath + "/");
if (log != null) {
log.log(basePath + " is a prefix of the nested element " + comparePath);
}
}
}
}
if (!foundNestedClasspathRoot) {
// After the first non-match, there can be no more prefix matches in the sorted order
break;
}
}
}
}
/**
* Find classpath elements whose path is a prefix of another classpath element, and record the nesting.
*
* @param finalTraditionalClasspathEltOrder
* the final traditional classpath elt order
* @param classpathFinderLog
* the classpath finder log
*/
private void preprocessClasspathElementsByType(final List<ClasspathElement> finalTraditionalClasspathEltOrder,
final LogNode classpathFinderLog) {
final List<SimpleEntry<String, ClasspathElement>> classpathEltDirs = new ArrayList<>();
final List<SimpleEntry<String, ClasspathElement>> classpathEltZips = new ArrayList<>();
for (final ClasspathElement classpathElt : finalTraditionalClasspathEltOrder) {
if (classpathElt instanceof ClasspathElementDir) {
// Separate out ClasspathElementFileDir and ClasspathElementPathDir elements from other types
final File file = classpathElt.getFile();
final String path = file == null ? classpathElt.toString() : file.getPath();
classpathEltDirs.add(new SimpleEntry<>(path, classpathElt));
} else if (classpathElt instanceof ClasspathElementZip) {
// Separate out ClasspathElementZip elements from other types
final ClasspathElementZip classpathEltZip = (ClasspathElementZip) classpathElt;
classpathEltZips.add(new SimpleEntry<>(classpathEltZip.getZipFilePath(), classpathElt));
// Handle module-related manifest entries
if (classpathEltZip.logicalZipFile != null) {
// From JEP 261:
// "A <module>/<package> pair in the value of an Add-Exports attribute has the same
// meaning as the command-line option --add-exports <module>/<package>=ALL-UNNAMED.
// A <module>/<package> pair in the value of an Add-Opens attribute has the same
// meaning as the command-line option --add-opens <module>/<package>=ALL-UNNAMED."
if (classpathEltZip.logicalZipFile.addExportsManifestEntryValue != null) {
for (final String addExports : JarUtils.smartPathSplit(
classpathEltZip.logicalZipFile.addExportsManifestEntryValue, ' ', scanSpec)) {
scanSpec.modulePathInfo.addExports.add(addExports + "=ALL-UNNAMED");
}
}
if (classpathEltZip.logicalZipFile.addOpensManifestEntryValue != null) {
for (final String addOpens : JarUtils.smartPathSplit(
classpathEltZip.logicalZipFile.addOpensManifestEntryValue, ' ', scanSpec)) {
scanSpec.modulePathInfo.addOpens.add(addOpens + "=ALL-UNNAMED");
}
}
// Retrieve Automatic-Module-Name manifest entry, if present
if (classpathEltZip.logicalZipFile.automaticModuleNameManifestEntryValue != null) {
classpathEltZip.moduleNameFromManifestFile =
classpathEltZip.logicalZipFile.automaticModuleNameManifestEntryValue;
}
}
}
// (Ignore ClasspathElementModule, no preprocessing to perform)
}
// Find nested classpath elements (writes to ClasspathElement#nestedClasspathRootPrefixes)
findNestedClasspathElements(classpathEltDirs, classpathFinderLog);
findNestedClasspathElements(classpathEltZips, classpathFinderLog);
}
/**
* Perform classpath masking of classfiles. If the same relative classfile path occurs multiple times in the
* classpath, causes the second and subsequent occurrences to be ignored (removed).
*
* @param classpathElementOrder
* the classpath element order
* @param maskLog
* the mask log
*/
private void maskClassfiles(final List<ClasspathElement> classpathElementOrder, final LogNode maskLog) {
final Set<String> acceptedClasspathRelativePathsFound = new HashSet<>();
for (int classpathIdx = 0; classpathIdx < classpathElementOrder.size(); classpathIdx++) {
final ClasspathElement classpathElement = classpathElementOrder.get(classpathIdx);
classpathElement.maskClassfiles(classpathIdx, acceptedClasspathRelativePathsFound, maskLog);
}
if (maskLog != null) {
maskLog.addElapsedTime();
}
}
/**
* Scan the classpath and/or visible modules.
*
* @param finalClasspathEltOrder
* the final classpath elt order
* @param finalClasspathEltOrderStrs
* the final classpath elt order strs
* @param classpathFinder
* the {@link ClasspathFinder}
* @return the scan result
* @throws InterruptedException
* if the scan was interrupted
* @throws ExecutionException
* if the scan threw an uncaught exception
*/
private ScanResult performScan(final List<ClasspathElement> finalClasspathEltOrder,
final List<String> finalClasspathEltOrderStrs, final ClasspathFinder classpathFinder)
throws InterruptedException, ExecutionException {
// Mask classfiles (remove any classfile resources that are shadowed by an earlier definition
// of the same class)
if (scanSpec.enableClassInfo) {
maskClassfiles(finalClasspathEltOrder,
topLevelLog == null ? null : topLevelLog.log("Masking classfiles"));
}
// Merge the file-to-timestamp maps across all classpath elements
final Map<File, Long> fileToLastModified = new HashMap<>();
for (final ClasspathElement classpathElement : finalClasspathEltOrder) {
fileToLastModified.putAll(classpathElement.fileToLastModified);
}
// Scan classfiles, if scanSpec.enableClassInfo is true.
// ArrayTypeSignature.getArrayClassInfo() after scanning is complete)
final Map<String, ClassInfo> classNameToClassInfo = new ConcurrentHashMap<>();
final Map<String, PackageInfo> packageNameToPackageInfo = new HashMap<>();
final Map<String, ModuleInfo> moduleNameToModuleInfo = new HashMap<>();
if (scanSpec.enableClassInfo) {
// Get accepted classfile order
final List<ClassfileScanWorkUnit> classfileScanWorkItems = new ArrayList<>();
final Set<String> acceptedClassNamesFound = new HashSet<>();
for (final ClasspathElement classpathElement : finalClasspathEltOrder) {
// Get classfile scan order across all classpath elements
for (final Resource resource : classpathElement.acceptedClassfileResources) {
// Create a set of names of all accepted classes found in classpath element paths,
// and double-check that a class is not going to be scanned twice
final String className = JarUtils.classfilePathToClassName(resource.getPath());
if (!acceptedClassNamesFound.add(className) && !className.equals("module-info")
&& !className.equals("package-info") && !className.endsWith(".package-info")) {
// The class should not be scheduled more than once for scanning, since classpath
// masking was already applied
throw new IllegalArgumentException("Class " + className
+ " should not have been scheduled more than once for scanning due to classpath"
+ " masking -- please report this bug at:"
+ " https://github.com/classgraph/classgraph/issues");
}
// Schedule class for scanning
classfileScanWorkItems
.add(new ClassfileScanWorkUnit(classpathElement, resource, /* isExternal = */ false));
}
}
// Scan classfiles in parallel
final Queue<Classfile> scannedClassfiles = new ConcurrentLinkedQueue<>();
final ClassfileScannerWorkUnitProcessor classfileWorkUnitProcessor =
new ClassfileScannerWorkUnitProcessor(scanSpec, finalClasspathEltOrder,
Collections.unmodifiableSet(acceptedClassNamesFound), scannedClassfiles);
processWorkUnits(classfileScanWorkItems,
topLevelLog == null ? null : topLevelLog.log("Scanning classfiles"),
classfileWorkUnitProcessor);
// Link the Classfile objects to produce ClassInfo objects. This needs to be done from a single thread.
final LogNode linkLog = topLevelLog == null ? null : topLevelLog.log("Linking related classfiles");
while (!scannedClassfiles.isEmpty()) {
final Classfile c = scannedClassfiles.remove();
c.link(classNameToClassInfo, packageNameToPackageInfo, moduleNameToModuleInfo);
}
// Uncomment the following code to create placeholder external classes for any classes
// referenced in type descriptors or type signatures, so that a ClassInfo object can be
// obtained for those class references. This will cause all type descriptors and type
// signatures to be parsed, and class names extracted from them. This will add some
// overhead to the scanning time, and the only benefit is that
// ClassRefTypeSignature.getClassInfo() and AnnotationClassRef.getClassInfo() will never
// return null, since all external classes found in annotation class refs will have a
// placeholder ClassInfo object created for them. This is obscure enough that it is
// probably not worth slowing down scanning for all other usecases, by forcibly parsing
// all type descriptors and type signatures before returning the ScanResult.
// With this code commented out, type signatures and type descriptors are only parsed
// lazily, on demand.
// final Set<String> referencedClassNames = new HashSet<>();
// for (final ClassInfo classInfo : classNameToClassInfo.values()) {
// classInfo.findReferencedClassNames(referencedClassNames);
// for (final String referencedClass : referencedClassNames) {
// ClassInfo.getOrCreateClassInfo(referencedClass, /* modifiers = */ 0, scanSpec,
// classNameToClassInfo);
if (linkLog != null) {
linkLog.addElapsedTime();
}
} else {
if (topLevelLog != null) {
topLevelLog.log("Classfile scanning is disabled");
}
}
// Return a new ScanResult
return new ScanResult(scanSpec, finalClasspathEltOrder, finalClasspathEltOrderStrs, classpathFinder,
classNameToClassInfo, packageNameToPackageInfo, moduleNameToModuleInfo, fileToLastModified,
nestedJarHandler, topLevelLog);
}
/**
* Open each of the classpath elements, looking for additional child classpath elements that need scanning (e.g.
* {@code Class-Path} entries in jar manifest files), then perform the scan if {@link ScanSpec#performScan} is
* true, or just get the classpath if {@link ScanSpec#performScan} is false.
*
* @return the scan result
* @throws InterruptedException
* if the scan was interrupted
* @throws ExecutionException
* if a worker threw an uncaught exception
*/
private ScanResult openClasspathElementsThenScan() throws InterruptedException, ExecutionException {
// Get order of elements in traditional classpath
final List<ClasspathEntryWorkUnit> rawClasspathEntryWorkUnits = new ArrayList<>();
final List<ClasspathEntry> rawClasspathOrder = classpathFinder.getClasspathOrder().getOrder();
for (final ClasspathEntry rawClasspathEntry : rawClasspathOrder) {
rawClasspathEntryWorkUnits.add(new ClasspathEntryWorkUnit(rawClasspathEntry.classpathEntryObj,
rawClasspathEntry.classLoader, /* parentClasspathElement = */ null,
// classpathElementIdxWithinParent is the original classpath index,
// for toplevel classpath elements
/* classpathElementIdxWithinParent = */ rawClasspathEntryWorkUnits.size(),
/* packageRootPrefix = */ ""));
}
// In parallel, create a ClasspathElement singleton for each classpath element, then call open()
// on each ClasspathElement object, which in the case of jarfiles will cause LogicalZipFile instances
// to be created for each (possibly nested) jarfile, then will read the manifest file and zip entries.
final Set<ClasspathElement> allClasspathElts = Collections
.newSetFromMap(new ConcurrentHashMap<ClasspathElement, Boolean>());
final Set<ClasspathElement> toplevelClasspathElts = Collections
.newSetFromMap(new ConcurrentHashMap<ClasspathElement, Boolean>());
processWorkUnits(rawClasspathEntryWorkUnits,
topLevelLog == null ? null : topLevelLog.log("Opening classpath elements"),
newClasspathEntryWorkUnitProcessor(allClasspathElts, toplevelClasspathElts));
// Determine total ordering of classpath elements, inserting jars referenced in manifest Class-Path
// entries in-place into the ordering, if they haven't been listed earlier in the classpath already.
final List<ClasspathElement> classpathEltOrder = findClasspathOrder(toplevelClasspathElts);
// Find classpath elements that are path prefixes of other classpath elements, and for
// ClasspathElementZip, get module-related manifest entry values
preprocessClasspathElementsByType(classpathEltOrder,
topLevelLog == null ? null : topLevelLog.log("Finding nested classpath elements"));
// Order modules before classpath elements from traditional classpath
final LogNode classpathOrderLog = topLevelLog == null ? null
: topLevelLog.log("Final classpath element order:");
final int numElts = moduleOrder.size() + classpathEltOrder.size();
final List<ClasspathElement> finalClasspathEltOrder = new ArrayList<>(numElts);
final List<String> finalClasspathEltOrderStrs = new ArrayList<>(numElts);
int classpathOrderIdx = 0;
for (final ClasspathElementModule classpathElt : moduleOrder) {
classpathElt.classpathElementIdx = classpathOrderIdx++;
finalClasspathEltOrder.add(classpathElt);
finalClasspathEltOrderStrs.add(classpathElt.toString());
if (classpathOrderLog != null) {
final ModuleRef moduleRef = classpathElt.getModuleRef();
classpathOrderLog.log(moduleRef.toString());
}
}
for (final ClasspathElement classpathElt : classpathEltOrder) {
classpathElt.classpathElementIdx = classpathOrderIdx++;
finalClasspathEltOrder.add(classpathElt);
finalClasspathEltOrderStrs.add(classpathElt.toString());
if (classpathOrderLog != null) {
classpathOrderLog.log(classpathElt.toString());
}
}
// In parallel, scan paths within each classpath element, comparing them against accept/reject
processWorkUnits(finalClasspathEltOrder,
topLevelLog == null ? null : topLevelLog.log("Scanning classpath elements"),
new WorkUnitProcessor<ClasspathElement>() {
@Override
public void processWorkUnit(final ClasspathElement classpathElement,
final WorkQueue<ClasspathElement> workQueueIgnored, final LogNode pathScanLog)
throws InterruptedException {
// Scan the paths within the classpath element
classpathElement.scanPaths(pathScanLog);
}
});
// Filter out classpath elements that do not contain required accepted paths.
List<ClasspathElement> finalClasspathEltOrderFiltered = finalClasspathEltOrder;
if (!scanSpec.classpathElementResourcePathAcceptReject.acceptIsEmpty()) {
finalClasspathEltOrderFiltered = new ArrayList<>(finalClasspathEltOrder.size());
for (final ClasspathElement classpathElement : finalClasspathEltOrder) {
if (classpathElement.containsSpecificallyAcceptedClasspathElementResourcePath) {
finalClasspathEltOrderFiltered.add(classpathElement);
}
}
}
if (performScan) {
// Scan classpath / modules, producing a ScanResult.
return performScan(finalClasspathEltOrderFiltered, finalClasspathEltOrderStrs, classpathFinder);
} else {
// Only getting classpath -- return a placeholder ScanResult to hold classpath elements
if (topLevelLog != null) {
topLevelLog.log("Only returning classpath elements (not performing a scan)");
}
return new ScanResult(scanSpec, finalClasspathEltOrderFiltered, finalClasspathEltOrderStrs,
classpathFinder, /* classNameToClassInfo = */ null, /* packageNameToPackageInfo = */ null,
/* moduleNameToModuleInfo = */ null, /* fileToLastModified = */ null, nestedJarHandler,
topLevelLog);
}
}
/**
* Determine the unique ordered classpath elements, and run a scan looking for file or classfile matches if
* necessary.
*
* @return the scan result
* @throws InterruptedException
* if scanning was interrupted
* @throws CancellationException
* if scanning was cancelled
* @throws ExecutionException
* if a worker threw an uncaught exception
*/
@Override
public ScanResult call() throws InterruptedException, CancellationException, ExecutionException {
ScanResult scanResult = null;
final long scanStart = System.currentTimeMillis();
boolean removeTemporaryFilesAfterScan = scanSpec.removeTemporaryFilesAfterScan;
try {
// Perform the scan
scanResult = openClasspathElementsThenScan();
// Log total time after scan completes, and flush log
if (topLevelLog != null) {
topLevelLog.log("~",
String.format("Total time: %.3f sec", (System.currentTimeMillis() - scanStart) * .001));
topLevelLog.flush();
}
// Call the ScanResultProcessor, if one was provided
if (scanResultProcessor != null) {
try {
scanResultProcessor.processScanResult(scanResult);
} catch (final Exception e) {
scanResult.close();
throw new ExecutionException(e);
}
scanResult.close();
}
} catch (final Throwable e) {
if (topLevelLog != null) {
topLevelLog.log("~",
e instanceof InterruptedException || e instanceof CancellationException
? "Scan interrupted or canceled"
: e instanceof ExecutionException || e instanceof RuntimeException
? "Uncaught exception during scan"
: e.getMessage(),
InterruptionChecker.getCause(e));
// Flush the log
topLevelLog.flush();
}
// Since an exception was thrown, remove temporary files
removeTemporaryFilesAfterScan = true;
// Stop any running threads (should not be needed, threads should already be quiescent)
interruptionChecker.interrupt();
if (failureHandler == null) {
if (removeTemporaryFilesAfterScan) {
// If removeTemporaryFilesAfterScan was set, remove temp files and close resources,
// zipfiles and modules
nestedJarHandler.close(topLevelLog);
}
// If there is no failure handler set, re-throw the exception
throw e;
} else {
// Otherwise, call the failure handler
try {
failureHandler.onFailure(e);
} catch (final Exception f) {
// The failure handler failed
if (topLevelLog != null) {
topLevelLog.log("~", "The failure handler threw an exception:", f);
topLevelLog.flush();
}
// Group the two exceptions into one, using the suppressed exception mechanism
// to show the scan exception below the failure handler exception
final ExecutionException failureHandlerException = new ExecutionException(
"Exception while calling failure handler", f);
failureHandlerException.addSuppressed(e);
if (removeTemporaryFilesAfterScan) {
// If removeTemporaryFilesAfterScan was set, remove temp files and close resources,
// zipfiles and modules
nestedJarHandler.close(topLevelLog);
}
// Throw a new ExecutionException (although this will probably be ignored,
// since any job with a FailureHandler was started with ExecutorService::execute
// rather than ExecutorService::submit)
throw failureHandlerException;
}
}
}
if (removeTemporaryFilesAfterScan) {
// If removeTemporaryFilesAfterScan was set, remove temp files and close resources,
// zipfiles and modules
nestedJarHandler.close(topLevelLog);
}
return scanResult;
}
}
|
package io.pivio.ganges.maven;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.ocpsoft.prettytime.PrettyTime;
import java.util.Date;
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonPropertyOrder({
"repository_type",
"group_id",
"name",
"version",
"release_date_timestamp",
"release_date",
"release_date_pretty",
"latest_version",
"latest_version_release_date_timestamp",
"latest_version_release_date",
"latest_version_release_date_pretty",
"versions_to_latest_version"
})
public class Result {
static volatile String TYPE_MAVEN = "maven";
@JsonProperty("latest_version")
String latestVersion;
@JsonProperty("latest_version_release_date_timestamp")
private Date latestVersionReleaseDate;
@JsonProperty("repository_type")
private String repositoryType;
@JsonProperty("release_date_timestamp")
private Date releaseDate;
@JsonProperty("group_id")
private String groupId;
@JsonProperty("versions_to_latest_version")
int versionsToLatestVersion;
@JsonProperty("name")
private String name;
@JsonProperty("version")
private String version;
public Result() {
}
Result(String groupId, String name, String version, String repositoryType, Date releaseDate, String latestVersion, Date latestVersionReleaseDate, int versionsToLatestVersion) {
this.groupId = groupId;
this.name = name;
this.repositoryType = repositoryType;
this.releaseDate = releaseDate;
this.version = version;
this.latestVersion = latestVersion;
this.latestVersionReleaseDate = latestVersionReleaseDate;
this.versionsToLatestVersion = versionsToLatestVersion;
}
@JsonProperty("release_date")
@JsonIgnoreProperties
public String getReleaseDateAsString() {
return releaseDate.toString();
}
@JsonProperty("release_date_pretty")
public String getReleaseDatePretty() {
return new PrettyTime().format(releaseDate);
}
@JsonProperty("latest_version_release_date")
public String getLatestVersionReleaseDateAsString() {
return latestVersionReleaseDate.toString();
}
@JsonProperty("latest_version_release_date_pretty")
public String getLatestVersionReleaseDatePretty() {
return new PrettyTime().format(latestVersionReleaseDate);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Result result = (Result) o;
return new EqualsBuilder()
.append(versionsToLatestVersion, result.versionsToLatestVersion)
.append(latestVersion, result.latestVersion)
.append(latestVersionReleaseDate, result.latestVersionReleaseDate)
.append(repositoryType, result.repositoryType)
.append(releaseDate, result.releaseDate)
.append(groupId, result.groupId)
.append(name, result.name)
.append(version, result.version)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(latestVersion)
.append(latestVersionReleaseDate)
.append(repositoryType)
.append(releaseDate)
.append(groupId)
.append(versionsToLatestVersion)
.append(name)
.append(version)
.toHashCode();
}
}
|
package jabsc.classgen;
import bnfc.abs.AbstractVisitor;
import bnfc.abs.Absyn.ClassBody;
import bnfc.abs.Absyn.ClassDecl;
import bnfc.abs.Absyn.ClassImplements;
import bnfc.abs.Absyn.ClassParamDecl;
import bnfc.abs.Absyn.ClassParamImplements;
import bnfc.abs.Absyn.ExtendsDecl;
import bnfc.abs.Absyn.FieldAssignClassBody;
import bnfc.abs.Absyn.FieldClassBody;
import bnfc.abs.Absyn.InterfDecl;
import bnfc.abs.Absyn.JustBlock;
import bnfc.abs.Absyn.MaybeBlock;
import bnfc.abs.Absyn.MethClassBody;
import bnfc.abs.Absyn.MethSig;
import bnfc.abs.Absyn.MethSignat;
import bnfc.abs.Absyn.Modul;
import bnfc.abs.Absyn.NoBlock;
import bnfc.abs.Absyn.Param;
import bnfc.abs.Absyn.QType;
import bnfc.abs.Absyn.Stm;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.stream.Stream;
import javax.lang.model.element.ElementKind;
final class ModuleVisitor extends AbstractVisitor<Void, ClassWriter> {
private final VisitorState state;
ModuleVisitor(VisitorState state) {
this.state = state;
}
@Override
public Void visit(Modul m, ClassWriter writer) {
state.setCurrentModule(m);
m.listdecl_.stream().forEach(decl -> decl.accept(this, writer));
m.maybeblock_.accept(new MaybeBlock.Visitor<Void, ClassWriter>() {
@Override
public Void visit(JustBlock p, ClassWriter arg) {
p.block_.accept((b, v) -> createMain(b.liststm_), null);
return null;
}
@Override
public Void visit(NoBlock p, ClassWriter arg) {
return null;
}
}, writer);
return null;
}
@Override
public Void visit(InterfDecl inf, ClassWriter writer) {
createInterface(inf.uident_, inf.listmethsignat_, Collections.emptyList());
return null;
}
@Override
public Void visit(ExtendsDecl inf, ClassWriter writer) {
createInterface(inf.uident_, inf.listmethsignat_, inf.listqtype_);
return null;
}
private void createInterface(String name, List<MethSignat> methods, List<QType> supertypes) {
try (ClassWriter declWriter = state.getFileWriter(name, ElementKind.INTERFACE)) {
declWriter.setInterfaces(supertypes, state);
methods.forEach(method -> method.accept(this, declWriter));
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
private Void createMain(List<Stm> liststm) {
try (ClassWriter declWriter =
state.getFileWriter(StateUtil.MAIN_CLASS_NAME, ElementKind.CLASS)) {
declWriter.addMainMethod(liststm, state);
} catch (IOException e) {
throw new IllegalStateException(e);
}
return null;
}
private void createClass(String name, List<Param> params, List<ClassBody> body1,
MaybeBlock block, List<ClassBody> body2, List<QType> interfaces) {
String refinedClassName = state.getRefinedClassName(name);
try (ClassWriter declWriter = state.getFileWriter(refinedClassName, ElementKind.CLASS)) {
declWriter.setInterfaces(interfaces, state);
List<FieldAssignClassBody> fieldAssigns = new ArrayList<>();
Stream.concat(body1.stream(), body2.stream()).forEachOrdered(c -> c.accept(
new ClassBody.Visitor<Void, Void>() {
@Override
public Void visit(FieldClassBody p, Void arg) {
return null;
}
@Override
public Void visit(FieldAssignClassBody p, Void arg) {
fieldAssigns.add(p);
return null;
}
@Override
public Void visit(MethClassBody p, Void arg) {
return null;
}
}, null));
body1.forEach(cb -> cb.accept(this, declWriter));
block.accept(new MaybeBlock.Visitor<Void, ClassWriter>() {
@Override
public Void visit(JustBlock p, ClassWriter arg) {
p.block_.accept((b, w) -> {
w.init(params, b.liststm_, fieldAssigns, state);
return null;
}, arg);
return null;
}
@Override
public Void visit(NoBlock p, ClassWriter arg) {
declWriter.init(params, Collections.emptyList(), fieldAssigns, state);
return null;
}
}, declWriter);
body2.forEach(cb -> cb.accept(this, declWriter));
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
@Override
public Void visit(ClassDecl klass, ClassWriter writer) {
createClass(klass.uident_, Collections.emptyList(), klass.listclassbody_1,
klass.maybeblock_, klass.listclassbody_2, Collections.emptyList());
return null;
}
public Void visit(MethSig sig, ClassWriter writer) {
writer.addMethod(sig, state);
return null;
}
@Override
public Void visit(MethClassBody body, ClassWriter writer) {
writer.addMethod(body, state);
return null;
}
@Override
public Void visit(ClassParamDecl klass, ClassWriter writer) {
createClass(klass.uident_, klass.listparam_, klass.listclassbody_1, klass.maybeblock_,
klass.listclassbody_2, Collections.emptyList());
return null;
}
@Override
public Void visit(ClassImplements klass, ClassWriter writer) {
createClass(klass.uident_, Collections.emptyList(), klass.listclassbody_1,
klass.maybeblock_, klass.listclassbody_2, klass.listqtype_);
return null;
}
@Override
public Void visit(ClassParamImplements klass, ClassWriter writer) {
createClass(klass.uident_, klass.listparam_, klass.listclassbody_1, klass.maybeblock_,
klass.listclassbody_2, klass.listqtype_);
return null;
}
@Override
public Void visit(FieldAssignClassBody body, ClassWriter writer) {
return null;
}
@Override
public Void visit(FieldClassBody body, ClassWriter writer) {
writer.addField(body, state);
return null;
}
}
|
package javax.time.calendar;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectStreamException;
import java.io.Serializable;
import java.io.StreamCorruptedException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import javax.time.CalendricalException;
import javax.time.Instant;
import javax.time.calendar.zone.ZoneRules;
import javax.time.calendar.zone.ZoneRulesGroup;
/**
* A time-zone representing the set of rules by which the zone offset
* varies through the year and historically.
* <p>
* Time zones are geographical regions where the same rules for time apply.
* The rules are defined by governments and change frequently.
* <p>
* There are a number of sources of time-zone information available,
* each represented by an instance of {@link ZoneRulesGroup}.
* One group is provided as standard - 'TZDB' - and more can be added.
* <p>
* Each group defines a naming scheme for the regions of the time-zone.
* The format of the region is specific to the group.
* For example, the 'TZDB' group typically use the format {area}/{city},
* such as 'Europe/London'.
* <p>
* Each group typically produces multiple versions of their data.
* The format of the version is specific to the group.
* For example, the 'TZDB' group use the format {year}{letter}, such as '2009b'.
* <p>
* In combination, a unique ID is created expressing the time-zone, formed from
* {groupID}:{regionID}#{versionID}.
* <p>
* The version can be set to an empty string. This represents the "floating version".
* The floating version will always choose the latest applicable set of rules.
* Applications will probably choose to use the floating version, as it guarantees
* usage of the latest rules.
* <p>
* In addition to the group/region/version combinations, {@code TimeZone}
* can represent a fixed offset. This has an empty group and version ID.
* It is not possible to have an invalid instance of a fixed time-zone.
* <p>
* The purpose of capturing all this information is to handle issues when
* manipulating and persisting time-zones. For example, consider what happens if the
* government of a country changed the start or end of daylight savings time.
* If you created and stored a date using one version of the rules, and then load it
* up when a new version of the rules are in force, what should happen?
* The date might now be invalid, for example due to a gap in the local time-line.
* By storing the version of the time-zone rules data together with the date, it is
* possible to tell that the rules have changed and to process accordingly.
* <p>
* {@code TimeZone} merely represents the identifier of the zone.
* The actual rules are provided by {@link ZoneRules}.
* One difference is that serializing this class only stores the reference to the zone,
* whereas serializing {@code ZoneRules} stores the entire set of rules.
* <p>
* After deserialization, or by using the special constructor, it is possible for the
* time-zone to represent a group/region/version combination that is unavailable.
* Since this class can still be loaded even when the rules cannot, the application can
* continue. For example, a {@link ZonedDateTime} instance could still be queried.
* The application might also take appropriate corrective action.
* For example, an application might choose to download missing rules from a central server.
* <p>
* TimeZone is immutable and thread-safe.
*
* @author Stephen Colebourne
*/
public abstract class TimeZone implements Calendrical, Serializable {
/**
* A serialization identifier for this class.
*/
private static final long serialVersionUID = 1L;
/**
* The time-zone offset for UTC, with an id of 'UTC'.
*/
public static final TimeZone UTC = new Fixed(ZoneOffset.UTC);
/**
* A map of zone overrides to enable the older US time-zone names to be used.
* <p>
* This maps as follows:
* <ul>
* <li>EST - America/Indianapolis</li>
* <li>MST - America/Phoenix</li>
* <li>HST - Pacific/Honolulu</li>
* <li>ACT - Australia/Darwin</li>
* <li>AET - Australia/Sydney</li>
* <li>AGT - America/Argentina/Buenos_Aires</li>
* <li>ART - Africa/Cairo</li>
* <li>AST - America/Anchorage</li>
* <li>BET - America/Sao_Paulo</li>
* <li>BST - Asia/Dhaka</li>
* <li>CAT - Africa/Harare</li>
* <li>CNT - America/St_Johns</li>
* <li>CST - America/Chicago</li>
* <li>CTT - Asia/Shanghai</li>
* <li>EAT - Africa/Addis_Ababa</li>
* <li>ECT - Europe/Paris</li>
* <li>IET - America/Indiana/Indianapolis</li>
* <li>IST - Asia/Kolkata</li>
* <li>JST - Asia/Tokyo</li>
* <li>MIT - Pacific/Apia</li>
* <li>NET - Asia/Yerevan</li>
* <li>NST - Pacific/Auckland</li>
* <li>PLT - Asia/Karachi</li>
* <li>PNT - America/Phoenix</li>
* <li>PRT - America/Puerto_Rico</li>
* <li>PST - America/Los_Angeles</li>
* <li>SST - Pacific/Guadalcanal</li>
* <li>VST - Asia/Ho_Chi_Minh</li>
* </ul>
* The map is unmodifiable.
*/
public static final Map<String, String> OLD_IDS_PRE_2005;
/**
* A map of zone overrides to enable the older US time-zone names to be used.
* <p>
* This maps as follows:
* <ul>
* <li>EST - UTC-05:00</li>
* <li>HST - UTC-10:00</li>
* <li>MST - UTC-07:00</li>
* <li>ACT - Australia/Darwin</li>
* <li>AET - Australia/Sydney</li>
* <li>AGT - America/Argentina/Buenos_Aires</li>
* <li>ART - Africa/Cairo</li>
* <li>AST - America/Anchorage</li>
* <li>BET - America/Sao_Paulo</li>
* <li>BST - Asia/Dhaka</li>
* <li>CAT - Africa/Harare</li>
* <li>CNT - America/St_Johns</li>
* <li>CST - America/Chicago</li>
* <li>CTT - Asia/Shanghai</li>
* <li>EAT - Africa/Addis_Ababa</li>
* <li>ECT - Europe/Paris</li>
* <li>IET - America/Indiana/Indianapolis</li>
* <li>IST - Asia/Kolkata</li>
* <li>JST - Asia/Tokyo</li>
* <li>MIT - Pacific/Apia</li>
* <li>NET - Asia/Yerevan</li>
* <li>NST - Pacific/Auckland</li>
* <li>PLT - Asia/Karachi</li>
* <li>PNT - America/Phoenix</li>
* <li>PRT - America/Puerto_Rico</li>
* <li>PST - America/Los_Angeles</li>
* <li>SST - Pacific/Guadalcanal</li>
* <li>VST - Asia/Ho_Chi_Minh</li>
* </ul>
* The map is unmodifiable.
*/
public static final Map<String, String> OLD_IDS_POST_2005;
static {
Map<String, String> base = new HashMap<String, String>();
base.put("ACT", "Australia/Darwin");
base.put("AET", "Australia/Sydney");
base.put("AGT", "America/Argentina/Buenos_Aires");
base.put("ART", "Africa/Cairo");
base.put("AST", "America/Anchorage");
base.put("BET", "America/Sao_Paulo");
base.put("BST", "Asia/Dhaka");
base.put("CAT", "Africa/Harare");
base.put("CNT", "America/St_Johns");
base.put("CST", "America/Chicago");
base.put("CTT", "Asia/Shanghai");
base.put("EAT", "Africa/Addis_Ababa");
base.put("ECT", "Europe/Paris");
base.put("IET", "America/Indiana/Indianapolis");
base.put("IST", "Asia/Kolkata");
base.put("JST", "Asia/Tokyo");
base.put("MIT", "Pacific/Apia");
base.put("NET", "Asia/Yerevan");
base.put("NST", "Pacific/Auckland");
base.put("PLT", "Asia/Karachi");
base.put("PNT", "America/Phoenix");
base.put("PRT", "America/Puerto_Rico");
base.put("PST", "America/Los_Angeles");
base.put("SST", "Pacific/Guadalcanal");
base.put("VST", "Asia/Ho_Chi_Minh");
Map<String, String> pre = new HashMap<String, String>(base);
pre.put("EST", "America/Indianapolis");
pre.put("MST", "America/Phoenix");
pre.put("HST", "Pacific/Honolulu");
OLD_IDS_PRE_2005 = Collections.unmodifiableMap(pre);
Map<String, String> post = new HashMap<String, String>(base);
post.put("EST", "UTC-05:00");
post.put("MST", "UTC-07:00");
post.put("HST", "UTC-10:00");
OLD_IDS_POST_2005 = Collections.unmodifiableMap(post);
}
public static TimeZone of(String timeZoneIdentifier, Map<String, String> aliasMap) {
ISOChronology.checkNotNull(timeZoneIdentifier, "Time Zone ID must not be null");
ISOChronology.checkNotNull(aliasMap, "Alias map must not be null");
String zoneId = aliasMap.get(timeZoneIdentifier);
zoneId = (zoneId != null ? zoneId : timeZoneIdentifier);
return of(zoneId);
}
/**
* Obtains an instance of {@code TimeZone} from an identifier.
* <p>
* Six forms of identifier are recognized:
* <ul>
* <li>{@code {groupID}:{regionID}#{versionID}} - full
* <li>{@code {groupID}:{regionID}} - implies the floating version
* <li>{@code {regionID}#{versionID}} - implies 'TZDB' group and specific version
* <li>{@code {regionID}} - implies 'TZDB' group and the floating version
* <li>{@code UTC{offset}} - fixed time-zone
* <li>{@code GMT{offset}} - fixed time-zone
* </ul>
* <p>
* Most of the formats are based around the group, version and region IDs.
* The version and region ID formats are specific to the group.
* If a group does not support versioning, then the version must be an empty string.
* <p>
* The default group is 'TZDB' which has versions of the form {year}{letter}, such as '2009b'.
* The region ID for the 'TZDB' group is generally of the form '{area}/{city}', such as 'Europe/Paris'.
* This is compatible with most IDs from {@link java.util.TimeZone}.
* <p>
* For example, if a provider is loaded with the ID 'MyProvider' containing a zone ID of
* 'France', then the unique key for version 2.1 would be 'MyProvider:France#2.1'.
* A specific version of the TZDB provider can be specified using this format,
* for example 'TZDB:Asia/Tokyo#2008g'.
* <p>
* The alternate format is for fixed time-zones, where the offset never changes over time.
* A fixed time-zone is returned if the first three characters are 'UTC' or 'GMT' and
* the remainder of the ID is a valid format for {@link ZoneOffset#of(String)}.
* The result will have a normalized time-zone ID of 'UTC{offset}', or just 'UTC' if the offset is zero.
* <p>
* Note that it is intended that fixed offset time-zones are rarely used. Applications should use
* {@link ZoneOffset} and {@link OffsetDateTime} are in preference.
*
* @param zoneID the time-zone identifier, not null
* @return the TimeZone, never null
* @throws CalendricalException if the time-zone cannot be found
*/
public static TimeZone of(String zoneID) {
ISOChronology.checkNotNull(zoneID, "Time zone ID must not be null");
// special fixed cases
if (zoneID.equals("UTC") || zoneID.equals("GMT")) {
return UTC;
}
if ((zoneID.startsWith("UTC") || zoneID.startsWith("GMT")) && zoneID.indexOf('
try {
return of(ZoneOffset.of(zoneID.substring(3)));
} catch (IllegalArgumentException ex) {
// continue, in case it is something like GMT0, GMT+0, GMT-0
}
}
// main parsing
ZoneRulesGroup group;
int colonPos = zoneID.indexOf(':');
if (colonPos >= 0) {
group = ZoneRulesGroup.getGroup(zoneID.substring(0, colonPos)); // validates group available
zoneID = zoneID.substring(colonPos + 1);
} else {
group = ZoneRulesGroup.getGroup("TZDB"); // validates group available
}
String versionID = "";
int hashPos = zoneID.indexOf('
if (hashPos >= 0) {
versionID = zoneID.substring(hashPos + 1);
zoneID = zoneID.substring(0, hashPos);
if (group.isValidRules(zoneID, versionID) == false) {
throw new CalendricalException("Unknown time-zone region or version: " + group.getID() + ":" + zoneID + '#' + versionID);
}
} else {
if (group.isValidRegionID(zoneID) == false) {
throw new CalendricalException("Unknown time-zone region: " + group.getID() + ":" + zoneID);
}
}
return new ID(group.getID(), zoneID, versionID);
}
/**
* Obtains an instance of {@code TimeZone} representing a fixed time-zone.
* <p>
* The time-zone returned from this factory has a fixed offset for all time.
* The region ID will return an identifier formed from 'UTC' and the offset.
* The group and version IDs will both return an empty string.
* <p>
* Fixed time-zones are {@link #isValid() always valid}.
*
* @param offset the zone offset to create a fixed zone for, not null
* @return the TimeZone for the offset, never null
*/
public static TimeZone of(ZoneOffset offset) {
ISOChronology.checkNotNull(offset, "ZoneOffset must not be null");
if (offset == ZoneOffset.UTC) {
return UTC;
}
return new Fixed(offset);
}
/**
* Constructor only accessible within the package.
*/
TimeZone() {
}
/**
* Gets the unique time-zone ID.
* <p>
* The unique key is created from the group ID, version ID and region ID.
* The format is {groupID}:{regionID}#{versionID}.
* If the group is 'TZDB' then the {groupID}: is omitted.
* If the version is floating, then the #{versionID} is omitted.
* Fixed time-zones will only output the region ID.
*
* @return the time-zone unique ID, never null
*/
public abstract String getID();
/**
* Gets the time-zone rules group ID, such as 'TZDB'.
* <p>
* Time zone rules are provided by groups referenced by an ID.
* <p>
* For fixed time-zones, the group ID will be an empty string.
*
* @return the time-zone rules group ID, never null
*/
public abstract String getGroupID();
/**
* Gets the time-zone region identifier, such as 'Europe/London'.
* <p>
* The time-zone region identifier is of a format specific to the group.
* The default 'TZDB' group generally uses the format {area}/{city}, such as 'Europe/Paris'.
*
* @return the time-zone rules region ID, never null
*/
public abstract String getRegionID();
/**
* Gets the time-zone rules group version, such as '2009b'.
* <p>
* Time zone rules change over time as governments change the associated laws.
* The time-zone groups capture these changes by issuing multiple versions
* of the data. An application can reference the exact set of rules used
* by using the group ID and version. Once loaded, there is no way to unload
* a version of the rules, however new versions may be added.
* <p>
* The version can be an empty string which represents the floating version.
* This always uses the latest version of the rules available.
* <p>
* For fixed time-zones, the version ID will be an empty string.
*
* @return the time-zone rules version ID, empty if the version is floating, never null
*/
public abstract String getVersionID();
/**
* Checks of the time-zone is fixed, such that the offset never varies.
* <p>
* It is intended that {@link OffsetDateTime}, {@link OffsetDate} and
* {@link OffsetTime} are used in preference to fixed offset time-zones
* in {@link ZonedDateTime}.
*
* @return true if the time-zone is fixed and the offset never changes
*/
public abstract boolean isFixed();
/**
* Checks if the version is floating.
* <p>
* A floating version will track the latest available version of the rules.
* <p>
* For group based time-zones, this returns true if the version ID is empty,
* which is the definition of a floating zone.
* <p>
* For fixed time-zones, true is returned.
*
* @return true if the version is floating
*/
public abstract boolean isFloatingVersion();
/**
* Returns a copy of this time-zone with the specified version ID.
* <p>
* For group based time-zones, this returns a {@code TimeZone} with the
* same group and region, but a floating version.
* The group and region IDs are not validated.
* <p>
* For fixed time-zones, {@code this} is returned.
*
* @return the new updated time-zone, never null
* @throws CalendricalException if the time-zone is fixed
*/
public abstract TimeZone withFloatingVersion();
/**
* Checks if the version is the latest version.
* <p>
* For floating group based time-zones, true is returned.
* <p>
* For non-floating group based time-zones, this returns true if the version
* stored is the same as the latest version available for the group and region.
* The group and region IDs are validated in order to calculate the latest version.
* <p>
* For fixed time-zones, true is returned.
*
* @return true if the version is the latest available
* @throws CalendricalException if the version is non-floating and the group or region ID is not found
*/
public abstract boolean isLatestVersion();
/**
* Returns a copy of this time-zone with the latest available version ID.
* <p>
* For floating and non-floating group based time-zones, this returns a zone with the same
* group and region, but the latest version that has been registered.
* The group and region IDs are validated in order to calculate the latest version.
* <p>
* For fixed time-zones, {@code this} is returned.
*
* @return the new updated time-zone, never null
* @throws CalendricalException if the version is non-floating and the group or region ID is not found
*/
public abstract TimeZone withLatestVersion();
/**
* Returns a copy of this time-zone with the specified version ID.
* <p>
* For group based time-zones, this returns a {@code TimeZone}
* with the same group and region, but the specified version.
* The group and region IDs are validated to ensure that the version is valid.
* <p>
* For fixed time-zones, the version must be an empty string, otherwise an
* exception is thrown.
*
* @param versionID the version ID to use, empty means floating version, not null
* @return the new updated time-zone, never null
* @throws CalendricalException if the time-zone is fixed and the version is not empty
* @throws CalendricalException if the group, region or version ID is not found
*/
public abstract TimeZone withVersion(String versionID);
/**
* Returns a copy of this time-zone with the latest version that is valid
* for the specified date-time and offset.
* <p>
* This method validates the group and region IDs.
*
* @param dateTime the date-time to get the latest version for
* @return the new updated time-zone, never null
* @throws CalendricalException if the group or region ID is not found
* @throws CalendricalException if there are no valid rules for the date-time
*/
public abstract TimeZone withLatestVersionValidFor(OffsetDateTime dateTime);
/**
* Finds the zone rules group for the stored group ID, such as 'TZDB'.
* <p>
* Time zone rules are provided by groups referenced by an ID.
* <p>
* Fixed time-zones are not provided by a group, thus this method throws
* an exception if the time-zone is fixed.
* <p>
* Callers of this method need to be aware of an unusual scenario.
* It is possible to obtain a {@code TimeZone} instance even when the
* rules are not available. This typically occurs when a {@code TimeZone}
* is loaded from a previously stored version but the rules are not available.
* In this case, the {@code TimeZone} instance is still valid, as is
* any associated object, such as {@link ZonedDateTime}. It is impossible to
* perform any calculations that require the rules however, and this method
* will throw an exception.
*
* @return the time-zone rules group ID, never null
* @throws CalendricalException if the time-zone is fixed
* @throws CalendricalException if the group ID cannot be found
*/
public abstract ZoneRulesGroup getGroup();
/**
* Checks if this time-zone is valid such that rules can be obtained for it.
* <p>
* This will return true if the rules are available for the group, region
* and version ID combination. If this method returns true, then
* {@link #getRules()} will return a valid rules instance.
* <p>
* A time-zone can be invalid if it is deserialized in a JVM which does not
* have the same rules loaded as the JVM that stored it.
* <p>
* If this object declares a floating version of the rules and a background
* thread is used to update the available rules, then the result of calling
* this method may vary over time.
* Each individual call will be still remain thread-safe.
* <p>
* If this is a fixed time-zone, then it is always valid.
*
* @return true if this time-zone is valid and rules are available
*/
public abstract boolean isValid();
/**
* Gets the time-zone rules allowing calculations to be performed.
* <p>
* The rules provide the functionality associated with a time-zone,
* such as finding the offset for a given instant or local date-time.
* Different rules may be returned depending on the group, version and zone.
* <p>
* If this object declares a specific version of the rules, then the result will
* be of that version. If this object declares a floating version of the rules,
* then the latest version available will be returned.
* <p>
* A time-zone can be invalid if it is deserialized in a JVM which does not
* have the same rules loaded as the JVM that stored it. In this case, calling
* this method will throw an exception.
* <p>
* If this object declares a floating version of the rules and a background
* thread is used to update the available rules, then the result of calling
* this method may vary over time.
* Each individual call will be still remain thread-safe.
*
* @return the rules, never null
* @throws CalendricalException if the group, region or version ID cannot be found
*/
public abstract ZoneRules getRules();
/**
* Checks if this time-zone is valid such that rules can be obtained for it
* which are valid for the specified date-time and offset.
* <p>
* This will return true if the rules are available for the group, region
* and version ID combination that are valid for the specified date-time.
* If this method returns true, then {@link #getRulesValidFor(OffsetDateTime)}
* will return a valid rules instance.
* <p>
* A time-zone can be invalid if it is deserialized in a JVM which does not
* have the same rules loaded as the JVM that stored it.
* <p>
* If this object declares a floating version of the rules and a background
* thread is used to update the available rules, then the result of calling
* this method may vary over time.
* Each individual call will be still remain thread-safe.
* <p>
* If this is a fixed time-zone, then it is valid if the offset matches the date-time.
*
* @param dateTime a date-time for which the rules must be valid, null returns false
* @return true if this time-zone is valid and rules are available
*/
public abstract boolean isValidFor(OffsetDateTime dateTime);
/**
* Gets the time-zone rules allowing calculations to be performed, ensuring that
* the date-time and offset specified is valid for the returned rules.
* <p>
* The rules provide the functionality associated with a time-zone,
* such as finding the offset for a given instant or local date-time.
* Different rules may be returned depending on the group, version and zone.
* <p>
* If this object declares a specific version of the rules, then the result will
* be of that version providing that the specified date-time is valid for those rules.
* If this object declares a floating version of the rules, then the latest
* version of the rules where the date-time is valid will be returned.
* <p>
* A time-zone can be invalid if it is deserialized in a JVM which does not
* have the same rules loaded as the JVM that stored it. In this case, calling
* this method will throw an exception.
* <p>
* If this object declares a floating version of the rules and a background
* thread is used to update the available rules, then the result of calling
* this method may vary over time.
* Each individual call will be still remain thread-safe.
*
* @param dateTime a date-time for which the rules must be valid, not null
* @return the latest rules for this zone where the date-time is valid, never null
* @throws CalendricalException if the zone ID cannot be found
* @throws CalendricalException if no rules match the zone ID and date-time
*/
public abstract ZoneRules getRulesValidFor(OffsetDateTime dateTime);
/**
* Gets the textual name of this zone.
*
* @return the time-zone name, never null
*/
public String getName() {
return getRegionID(); // TODO
}
/**
* Gets the short textual name of this zone.
*
* @return the time-zone short name, never null
*/
public String getShortName() {
return getRegionID(); // TODO
}
/**
* Is this instance equal to that specified by comparing the ID.
*
* @param otherZone the other zone, null returns false
* @return true if this zone is the same as that specified
*/
@Override
public boolean equals(Object otherZone) {
if (this == otherZone) {
return true;
}
if (otherZone instanceof TimeZone) {
TimeZone zone = (TimeZone) otherZone;
return getRegionID().equals(zone.getRegionID()) &&
getVersionID().equals(zone.getVersionID()) &&
getGroupID().equals(zone.getGroupID());
}
return false;
}
/**
* A hash code for this time-zone ID.
*
* @return a suitable hash code
*/
@Override
public int hashCode() {
return getGroupID().hashCode() ^ getRegionID().hashCode() ^ getVersionID().hashCode();
}
/**
* Returns a string representation of the time-zone.
* <p>
* This returns {@link #getID()}.
*
* @return the time-zone ID, never null
*/
@Override
public String toString() {
return getID();
}
/**
* Gets the value of the specified calendrical rule.
* <p>
* This method queries the value of the specified calendrical rule.
* If the value cannot be returned for the rule from this offset then
* {@code null} will be returned.
*
* @param rule the rule to use, not null
* @return the value for the rule, null if the value cannot be returned
*/
public <T> T get(CalendricalRule<T> rule) {
if (rule.equals(ZoneOffset.rule()) && isFixed()) {
return rule.reify(getRules().getOffset(Instant.EPOCH));
}
return rule().deriveValueFor(rule, this, this);
}
/**
* Gets the field rule for {@code DateTimeZone}.
*
* @return the field rule for the time-zone, never null
*/
public static CalendricalRule<TimeZone> rule() {
return Rule.INSTANCE;
}
/**
* Rule implementation.
*/
static final class Rule extends CalendricalRule<TimeZone> implements Serializable {
private static final CalendricalRule<TimeZone> INSTANCE = new Rule();
private static final long serialVersionUID = 1L;
private Rule() {
super(TimeZone.class, ISOChronology.INSTANCE, "TimeZone", null, null);
}
private Object readResolve() {
return INSTANCE;
}
@Override
protected TimeZone derive(Calendrical calendrical) {
ZonedDateTime zdt = calendrical.get(ZonedDateTime.rule());
return zdt != null ? zdt.getZone() : null;
}
}
/**
* ID based time-zone.
* This can refer to an id that does not have available rules.
*/
static final class ID extends TimeZone {
/** A serialization identifier for this class. */
private static final long serialVersionUID = 1L;
/** The time-zone group ID, not null. */
private final String groupID;
/** The time-zone region ID, not null. */
private final String regionID;
/** The time-zone version ID, not null. */
private final String versionID;
/**
* Constructor.
*
* @param groupID the time-zone rules group ID, not null
* @param regionID the time-zone region ID, not null
* @param versionID the time-zone rules version ID, not null
*/
ID(String groupID, String regionID, String versionID) {
this.groupID = groupID;
this.regionID = regionID;
this.versionID = versionID;
}
/**
* Validate deserialization.
*
* @param in the input stream
*/
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
if (groupID == null || groupID.length() == 0 || regionID == null || versionID == null) {
throw new StreamCorruptedException();
}
}
@Override
public String getID() {
if (groupID.equals("TZDB")) {
return regionID + (versionID.length() == 0 ? "" : '#' + versionID);
}
return groupID + ':' + regionID + (versionID.length() == 0 ? "" : '#' + versionID);
}
@Override
public String getGroupID() {
return groupID;
}
@Override
public String getRegionID() {
return regionID;
}
@Override
public String getVersionID() {
return versionID;
}
@Override
public boolean isFixed() {
return false;
}
@Override
public boolean isFloatingVersion() {
return versionID.length() == 0;
}
@Override
public TimeZone withFloatingVersion() {
if (isFloatingVersion()) {
return this;
}
return new ID(groupID, regionID, "");
}
@Override
public boolean isLatestVersion() {
return isFloatingVersion() ||
versionID.equals(getGroup().getLatestVersionID(regionID)); // validates IDs
}
@Override
public TimeZone withLatestVersion() {
String versionID = getGroup().getLatestVersionID(regionID); // validates IDs
if (versionID.equals(this.versionID)) {
return this;
}
return new ID(groupID, regionID, versionID);
}
@Override
public TimeZone withVersion(String versionID) {
ISOChronology.checkNotNull(versionID, "Version ID must not be null");
if (versionID.length() == 0) {
return withFloatingVersion();
}
if (getGroup().isValidRules(regionID, versionID) == false) {
throw new CalendricalException("Unknown version: " + groupID + ":" + regionID + '#' + versionID);
}
if (versionID.equals(this.versionID)) {
return this;
}
return new ID(groupID, regionID, versionID);
}
@Override
public TimeZone withLatestVersionValidFor(OffsetDateTime dateTime) {
ISOChronology.checkNotNull(dateTime, "OffsetDateTime must not be null");
return withVersion(getGroup().getLatestVersionIDValidFor(regionID, dateTime));
}
@Override
public ZoneRulesGroup getGroup() {
return ZoneRulesGroup.getGroup(groupID);
}
@Override
public boolean isValid() {
if (isFloatingVersion()) {
return ZoneRulesGroup.isValidGroupID(groupID) && getGroup().isValidRegionID(regionID);
}
return ZoneRulesGroup.isValidGroupID(groupID) && getGroup().isValidRules(regionID, versionID);
}
@Override
public ZoneRules getRules() {
ZoneRulesGroup group = getGroup();
if (isFloatingVersion()) {
return group.getRules(regionID, group.getLatestVersionID(regionID));
}
return group.getRules(regionID, versionID);
}
@Override
public boolean isValidFor(OffsetDateTime dateTime) {
if (dateTime == null) {
return false;
}
try {
getRulesValidFor(dateTime);
return true;
} catch (CalendricalException ex) {
return false;
}
}
@Override
public ZoneRules getRulesValidFor(OffsetDateTime dateTime) {
ISOChronology.checkNotNull(dateTime, "OffsetDateTime must not be null");
ZoneRulesGroup group = getGroup();
if (isFloatingVersion()) {
return group.getRules(regionID, group.getLatestVersionIDValidFor(regionID, dateTime));
}
return group.getRulesValidFor(regionID, versionID, dateTime);
}
}
/**
* Fixed time-zone.
*/
static final class Fixed extends TimeZone {
/** A serialization identifier for this class. */
private static final long serialVersionUID = 1L;
/** The zone id. */
private final String id;
/** The zone rules. */
private final transient ZoneRules rules;
/**
* Constructor.
*
* @param offset the offset, not null
*/
Fixed(ZoneOffset offset) {
this.rules = ZoneRules.fixed(offset);
this.id = rules.toString();
}
/**
* Handle deserialization.
*
* @return the resolved instance, never null
*/
private Object readResolve() throws ObjectStreamException {
if (id == null || id.startsWith("UTC") == false) {
throw new StreamCorruptedException();
}
// fixed time-zone must always be valid
return TimeZone.of(id);
}
@Override
public String getID() {
return id;
}
@Override
public String getGroupID() {
return "";
}
@Override
public String getRegionID() {
return id;
}
@Override
public String getVersionID() {
return "";
}
@Override
public boolean isFixed() {
return true;
}
@Override
public boolean isFloatingVersion() {
return true;
}
@Override
public TimeZone withFloatingVersion() {
return this;
}
@Override
public boolean isLatestVersion() {
return true;
}
@Override
public TimeZone withLatestVersion() {
return this;
}
@Override
public TimeZone withVersion(String versionID) {
ISOChronology.checkNotNull(versionID, "Version ID must not be null");
if (versionID.length() > 0) {
throw new CalendricalException("Fixed time-zone does not provide versions");
}
return this;
}
@Override
public TimeZone withLatestVersionValidFor(OffsetDateTime dateTime) {
ISOChronology.checkNotNull(dateTime, "OffsetDateTime must not be null");
if (getRules().getOffset(dateTime).equals(dateTime.getOffset()) == false) {
throw new CalendricalException("Fixed time-zone " + getID() + " is invalid for date-time " + dateTime);
}
return this;
}
@Override
public ZoneRulesGroup getGroup() {
throw new CalendricalException("Fixed time-zone is not provided by a group");
}
@Override
public boolean isValid() {
return true;
}
@Override
public ZoneRules getRules() {
return rules;
}
@Override
public boolean isValidFor(OffsetDateTime dateTime) {
if (dateTime == null) {
return false;
}
return rules.getOffset(dateTime).equals(dateTime.getOffset());
}
@Override
public ZoneRules getRulesValidFor(OffsetDateTime dateTime) {
ISOChronology.checkNotNull(dateTime, "OffsetDateTime must not be null");
// fixed rules always in transient field
if (rules.getOffset(dateTime).equals(dateTime.getOffset()) == false) {
throw new CalendricalException("Fixed time-zone " + getID() + " is invalid for date-time " + dateTime);
}
return rules;
}
}
}
|
package kodkod.engine;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.util.Arrays;
import java.util.Iterator;
import kodkod.ast.Formula;
import kodkod.ast.Relation;
import kodkod.engine.config.ExtendedOptions;
import kodkod.engine.config.Options;
import kodkod.engine.config.Reporter;
import kodkod.engine.unbounded.ElectrodPrinter;
import kodkod.engine.unbounded.ElectrodReader;
import kodkod.engine.unbounded.InvalidUnboundedProblem;
import kodkod.engine.unbounded.InvalidUnboundedSolution;
import kodkod.instance.PardinusBounds;
import kodkod.instance.TemporalInstance;
/**
* A computational engine for solving unbounded temporal relational
* satisfiability problems. Such a problem is described by a
* {@link kodkod.ast.Formula formula} in first order temporal relational logic;
* finite unbounded temporal {@link PardinusBounds bounds} on the value of each
* {@link Relation relation} constrained by the respective formula; and a set of
* {@link ExtendedOptions options}, although there are currently no particular
* options for unbounded temporal solving.
*
* <p>
* An {@link ElectrodSolver} takes as input a relational problem and produces a
* satisfying model or a {@link TemporalInstance temporal instance} of it, if
* one exists.
* </p>
*
* <p>
* Although Electrod does not support solution iteration, it is implemented as
* an {@link IterableSolver} in order to be used by the Alloy Analyzer. This
* iterator contains one single satisfiable solution.
* </p>
*
* @author Nuno Macedo // [HASLab] unbounded temporal model finding
*/
public class ElectrodSolver implements UnboundedSolver<ExtendedOptions>,
TemporalSolver<ExtendedOptions>,
IterableSolver<PardinusBounds, ExtendedOptions> {
private final ExtendedOptions options;
private String file;
/**
* Constructs a new Electrod solver with the given options.
*
* @param options the solver options.
* @throws NullPointerException
* options = null
*/
public ElectrodSolver(ExtendedOptions options) {
if (options == null)
throw new NullPointerException();
this.options = options;
}
/**
* {@inheritDoc}
*/
public ExtendedOptions options() {
return options;
}
/**
* {@inheritDoc}
*/
public Solution solve(Formula formula, PardinusBounds bounds)
throws InvalidUnboundedProblem, InvalidUnboundedSolution {
Reporter rep = options.reporter();
// create a directory with the specified unique name
File dir = new File(options.uniqueName());
if (!dir.exists()) dir.mkdir();
file = dir+"/"+bounds.hashCode();
PrintWriter writer;
try {
writer = new PrintWriter(file+".elo");
String electrod = ElectrodPrinter.print(formula, bounds, rep);
writer.println(electrod);
writer.close();
rep.debug("New Electrod problem at "+dir+".");
rep.debug(electrod);
} catch (FileNotFoundException e) {
throw new AbortedException("Electrod problem generation failed.", e);
}
ProcessBuilder builder = new ProcessBuilder("electrod",Options.isDebug()?"-vv":"-v",file+".elo");
builder.environment().put("PATH", builder.environment().get("PATH")+":/usr/local/bin:.");
int ret;
try {
Process p = builder.start();
BufferedReader output = new BufferedReader(new InputStreamReader(
p.getInputStream()));
BufferedReader error = new BufferedReader(new InputStreamReader(
p.getErrorStream()));
String oline = "";
while ((oline = output.readLine()) != null)
rep.debug(oline);
while ((oline = error.readLine()) != null)
rep.warning(oline);
ret = p.waitFor();
} catch (InterruptedException e) {
throw new AbortedException("Electrod problem interrupted.", e);
} catch (IOException e) {
throw new AbortedException("Electrod process failed.", e);
}
if (ret != 0)
rep.warning("Electrod exit code: "+ret);
else
rep.debug("Electrod ran successfully.");
File xml = new File(file+".xml");
if (!xml.exists())
throw new AbortedException("XML solution file not found: "+file+".xml.");
else {
rep.debug(file);
ElectrodReader rd = new ElectrodReader(bounds);
TemporalInstance res = rd.read(xml);
// TODO: get the stats from the header of the electrod solution
Solution sol;
// ElectrodReader#read returns null if unsat
if (res == null)
sol = Solution.unsatisfiable(new Statistics(0, 0, 0, 0, 0), null);
else
sol = Solution.satisfiable(new Statistics(0, 0, 0, 0, 0), res);
return sol;
}
}
/**
* {@inheritDoc}
*/
public void free() {}
/**
* {@inheritDoc}
*
* Electrod problems return a single solution, thus this iterator has
* exactly one satisfiable element and one unsatisfiable.
*/
public Iterator<Solution> solveAll(Formula formula, PardinusBounds bounds) {
Solution s = solve(formula,bounds);
Solution[] ss;
if (s.sat())
// TODO: get the stats from the header of the electrod solution
ss = new Solution[]{s,Solution.unsatisfiable(new Statistics(0, 0, 0, 0, 0), null)};
else
ss = new Solution[]{s};
return Arrays.asList(ss).iterator();
}
}
|
package mmarquee.demo;
import mmarquee.automation.AutomationElement;
import mmarquee.automation.AutomationException;
import mmarquee.automation.AutomationTreeWalker;
import mmarquee.automation.AutomationTreeWalker.AutomationElementVisitor;
import mmarquee.automation.UIAutomation;
public class DemoTreeWalker extends TestBase {
final int recurse_level = 2;
public DemoTreeWalker() {
}
public void run() {
UIAutomation automation = UIAutomation.getInstance();
try {
AutomationTreeWalker walker = automation.getControlViewWalker();
AutomationElement root = automation.getRootElement();
AutomationElementVisitor logVisitor = new AutomationElementVisitor() {
int level = 0;
@Override
public boolean visit(AutomationTreeWalker walker, AutomationElement element) throws AutomationException {
String name = element.getName();
String className = element.getClassName();
String indent = level == 0 ? "" : String.format("%"+ level*2 + "s","");
String message = String.format("%s'%s' [%s]", indent, name, className);
logger.info(message);
if (recurse_level > level) {
level++;
walker.walk(this, element);
level
}
return true;
}
};
walker.walk(logVisitor, root);
} catch (Throwable ex) {
// Smother
logger.error("Exception thrown - " + ex.toString());
ex.printStackTrace();
}
}
}
|
package nl.mpi.kinnate;
import java.util.ArrayList;
public class GraphDataNode {
enum SymbolType {
// symbol terms are used here to try to keep things agnostic
square, triangle, circle
}
public enum RelationType {
sibling, ancestor, descendant, union
}
public static RelationType getOpposingRelationType(RelationType relationType) {
switch (relationType) {
case ancestor:
return GraphDataNode.RelationType.descendant;
case descendant:
return GraphDataNode.RelationType.ancestor;
case sibling:
return GraphDataNode.RelationType.sibling;
case union:
return GraphDataNode.RelationType.union;
}
return GraphDataNode.RelationType.sibling;
}
SymbolType symbolType;
boolean isEgo = false;
private String labelString;
private ArrayList<NodeRelation> relatedNodes = new ArrayList<NodeRelation>();
int xPos;
int yPos;
public class NodeRelation {
public GraphDataNode sourceNode;
public GraphDataNode linkedNode;
public int generationalDistance;
RelationType relationType;
}
public GraphDataNode(int symbolIndex, String labelStringLocal) {
switch (symbolIndex) {
case 0:
symbolType = SymbolType.square;
break;
case 1:
symbolType = SymbolType.circle;
break;
case 2:
symbolType = SymbolType.triangle;
break;
}
labelString = labelStringLocal;
}
public GraphDataNode(String labelStringLocal) {
labelString = labelStringLocal;
}
public String getLabel() {
return labelString;
}
ArrayList<String> unhandledLinkTypesArray = new ArrayList<String>();
// protected void calculateLinks(HashMap<String, GraphDataNode> graphDataNodeList) {
// if (this.imdiTreeObject != null) {
// this.imdiTreeObject.waitTillLoaded();
// for (ImdiTreeObject childNode : this.imdiTreeObject.getAllChildren()) {
// ImdiField[] currentField = childNode.getFields().get("Link");
// if (currentField != null && currentField.length > 0) {
// GraphDataNode.RelationType relationType = GraphDataNode.RelationType.sibling;
// ImdiField[] relationTypeField = childNode.getFields().get("Type"); //todo: this RELA field might not be the best nor the only one to gather relation types from
// if (relationTypeField != null && relationTypeField.length > 0) {
// String typeString = relationTypeField[0].getFieldValue();
// System.out.println("link type field: " + relationTypeField[0].getFieldValue());
// List<String> ancestorTerms = Arrays.asList(new String[]{"SUBN", "_HME", "WIFE", "CHIL", "HUSB", "REPO", "OBJE", "NOTE", "FAMC", "FAMS", "SOUR", "ASSO", "SUBM", "ANCI", "DESI", "ALIA"});
// if (("Kinnate.Gedcom.Entity." + ancestorTerms).contains(typeString)) {
// relationType = GraphDataNode.RelationType.ancestor;
// } else {
// unhandledLinkTypesArray.add(typeString);
//// if ("Father".equals(typeString)) {
//// relationType = GraphDataNode.RelationType.ancestor;
//// } else if ("Mother".equals(typeString)) {
//// relationType = GraphDataNode.RelationType.ancestor;
// System.out.println("link field: " + currentField[0].getFieldValue());
//// linkArray.add(currentField[0].getFieldValue());
// GraphDataNode linkedNode = graphDataNodeList.get(currentField[0].getFieldValue());
// if (linkedNode != null) {
// this.addRelatedNode(linkedNode, 0, relationType);
// if (unhandledLinkTypesArray.size() > 0) {
// System.err.println("unhandledLinkTypes: " + unhandledLinkTypesArray.toString());
// public GraphDataNode[] getLinks() {
// if (imdiTreeObject == null) {
// return linkStringsArray;
// } else {
// ArrayList<String> linkArray = new ArrayList<String>();
// imdiTreeObject.waitTillLoaded();
// for (ImdiTreeObject childNode : imdiTreeObject.getAllChildren()) {
//// System.out.println("getAllChildren: " + childNode.getUrlString());
// ImdiField[] currentField = childNode.getFields().get("Link");
// if (currentField != null && currentField.length > 0) {
// System.out.println("link field: " + currentField[0].getFieldValue());
// linkArray.add(currentField[0].getFieldValue());
// return linkArray.toArray(new String[]{});
public void addRelatedNode(GraphDataNode relatedNode, int generationalDistance, RelationType relationType) {
// note that the test gedcom file has multiple links for a given pair so in might be necessary to filter incoming links on a preferential basis
NodeRelation nodeRelation = new NodeRelation();
nodeRelation.sourceNode = this;
nodeRelation.linkedNode = relatedNode;
nodeRelation.generationalDistance = generationalDistance;
nodeRelation.relationType = relationType;
relatedNodes.add(nodeRelation);
relatedNode.relatedNodes.add(nodeRelation);
}
public NodeRelation[] getNodeRelations() {
return relatedNodes.toArray(new NodeRelation[]{});
}
}
|
package nuclibook.server;
import nuclibook.constants.C;
import nuclibook.constants.RequestType;
import nuclibook.entity_utils.SecurityUtils;
import nuclibook.models.Staff;
import nuclibook.routes.*;
import org.apache.commons.configuration.ConfigurationException;
import spark.Session;
import spark.Spark;
import java.awt.*;
import java.math.BigInteger;
import java.security.SecureRandom;
/**
* This class creates and manages the server for the entire system.
* Its main jobs are to set up the server, route pages, and handle top-level security.
*/
public class LocalServer {
/**
* Create the server and perform initial configuration.
* @param args Any command line arguments; ignored in this application.
*/
public static void main(String... args) {
/*
SERVER SETTINGS
*/
//initialise constants
try {
C.initConstants();
} catch (ConfigurationException e) {
e.printStackTrace();
}
// static files folder
Spark.staticFileLocation("/static");
// page security
Spark.before((request, response) -> {
// get path
String path = request.pathInfo();
// get current session and user
Session session = request.session();
if (request.queryParams("token") != null) {
session = SecurityUtils.checkOneOffSession(request.queryParams("token"));
}
Staff user = SecurityUtils.getCurrentUser(session);
// require all POST requests to be authenticated with a CSRF token
if (request.requestMethod().toLowerCase().equals("post") && !SecurityUtils.checkCsrfToken(session, request.queryParams("csrf-token"))) {
Spark.halt(403, "Security token invalid.");
}
// check if they are accessing non-page
if (path.startsWith("/css")
|| path.startsWith("/images")
|| path.startsWith("/js")
|| path.startsWith("/font-awesome")) {
// nothing more to do - everything is fine
return;
}
// check for a password force-change
if (SecurityUtils.checkLoggedIn(session)
&& user != null
&& user.getDaysRemainingToPasswordChange() < 1
&& !path.startsWith("/profile")
&& !(path.equals("/entity-update") && request.queryParams("entity-type") != null && request.queryParams("entity-type").equals("staff-password-change"))) {
response.redirect("/profile?changepw=1&force=1");
Spark.halt("Redirecting.");
}
// check if they are accessing a non-secure page
if (path.startsWith("/login")) {
// nothing more to do - everything is fine
return;
}
// not authenticated?
if (!SecurityUtils.checkLoggedIn(session)) {
// send them back to the login page
response.redirect("/login");
Spark.halt("Redirecting.");
}
// CSV page
if (path.endsWith(".goto-csv")) {
if (Desktop.isDesktopSupported()) {
// generate token
SecureRandom random = new SecureRandom();
String token = new BigInteger(130, random).toString(32);
SecurityUtils.setOneOffToken(token, session);
// redirect
String toOpen = "http://localhost:4567" + path.replace("goto-csv", "csv") + "?token=" + token;
Spark.halt("<!--OPEN:" + toOpen + "-->Your file will download now. Please wait...<script type=\"text/javascript\">window.history.back();</script>");
}
}
});
// prevent viewing pages after logout
Spark.after((request, response) -> {
// get path
String path = request.pathInfo();
// don't apply this for resources
if (path.startsWith("/css")
|| path.startsWith("/images")
|| path.startsWith("/js")
|| path.startsWith("/font-awesome")) {
// nothing more to do - everything is fine
return;
}
response.header("Cache-Control", "no-cache, no-store, must-revalidate");
response.header("Pragma", "no-cache");
response.header("Expires", "0");
});
/*
ROUTES
*/
// basic pages
Spark.get("/", new DashboardRoute());
// day summary
Spark.get("/day-summary", new DaySummaryRoute());
// security
Spark.get("/access-denied", new AccessDeniedRoute());
Spark.get("/login", new LoginRoute(RequestType.GET));
Spark.post("/login", new LoginRoute(RequestType.POST));
Spark.get("/logout", new LogoutRoute());
Spark.get("/profile", new ProfileRoute());
Spark.get("/renew-session", new RenewSessionRoute());
// action logs
Spark.get("/action-log", new ActionLogRoute());
// entity CRUD action routes
Spark.post("/entity-update", new CrudCreateUpdateRoute());
Spark.post("/entity-delete", new CrudDeleteRoute());
// basic CRUD pages
Spark.get("/cameras", new CamerasRoute());
Spark.get("/camera-types", new CameraTypesRoute());
Spark.get("/patients", new PatientsRoute());
Spark.get("/staff", new StaffRoute());
Spark.get("/staff-roles", new StaffRolesRoute());
Spark.get("/therapies", new TherapiesRoute());
Spark.get("/tracers", new TracersRoute());
Spark.get("/generic-events", new GenericEventsRoute());
// staff absences and availabilities
Spark.get("/select-staff/:target:", new SelectStaffRoute());
Spark.get("/staff-absences/:staffid:", new StaffAbsencesRoute());
Spark.get("/staff-availabilities/:staffid:", new StaffAvailabilitiesRoute());
// bookings
Spark.get("/new-booking-1", new NewBookingRouteStage1());
Spark.post("/new-booking-2", new NewBookingRouteStage2());
Spark.post("/new-booking-3", new NewBookingRouteStage3());
Spark.get("/bookings", new BookingsRoute());
Spark.post("/booking-edit", new BookingEditRoute());
Spark.post("/booking-details/:bookingid:", new BookingDetailsRoute());
Spark.get("/booking-details/:bookingid:", new BookingDetailsRoute());
Spark.get("/booking-details/:bookingid:/:newstatus:", new BookingDetailsRoute());
// AJAX routes
Spark.get("/ajax/calendar-data", new AjaxCalendarDataRoute());
Spark.get("/ajax/patient-data/0", new AjaxPatientDataRoute(0));
Spark.get("/ajax/patient-data/1", new AjaxPatientDataRoute(1));
Spark.get("/ajax/action-log-data", new AjaxActionLogDataRoute());
// tracer orders
Spark.get("/tracer-orders", new TracerOrdersRoute());
Spark.get("/tracer-order-details/:tracerorderid:", new TracerOrderDetailsRoute());
Spark.get("/tracer-order-details/:tracerorderid:/:newstatus:", new TracerOrderDetailsRoute());
// patients
Spark.get("/patient-details/:patientid:", new PatientDetailsRoute());
// export
Spark.get("/export/:file:", new ExportRoute());
// import
Spark.post("/import", new ImportRoute());
}
/**
* This will stop the server and effectively kill the application in the event of a fatal error
* @param message The message to be delivered to the user
*/
public static void fatalError(String message) {
Spark.halt(500, "<html>" +
"<head>" +
"</head>" +
"<body>" +
"<p>A fatal error occurred: <em>" + message + "</em>.</p>" +
"<p>Please restart the server and try again.</p>" +
"</body>" +
"</html>");
Spark.stop();
}
}
|
package org.b3log.symphony.model;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.b3log.latke.ioc.LatkeBeanManagerImpl;
import org.b3log.latke.util.Strings;
import org.b3log.symphony.cache.TagCache;
import org.b3log.symphony.util.Symphonys;
import org.json.JSONObject;
public final class Tag {
/**
* Tag.
*/
public static final String TAG = "tag";
/**
* Tags.
*/
public static final String TAGS = "tags";
/**
* Key of tag title.
*/
public static final String TAG_TITLE = "tagTitle";
/**
* Key of tag URI.
*/
public static final String TAG_URI = "tagURI";
/**
* Key of tag icon path.
*/
public static final String TAG_ICON_PATH = "tagIconPath";
/**
* Key of tag CSS.
*/
public static final String TAG_CSS = "tagCSS";
/**
* Key of tag description.
*/
public static final String TAG_DESCRIPTION = "tagDescription";
/**
* Key of tag reference count.
*/
public static final String TAG_REFERENCE_CNT = "tagReferenceCount";
/**
* Key of tag comment count.
*/
public static final String TAG_COMMENT_CNT = "tagCommentCount";
/**
* Key of tag follower count.
*/
public static final String TAG_FOLLOWER_CNT = "tagFollowerCount";
/**
* Key of link count.
*/
public static final String TAG_LINK_CNT = "tagLinkCount";
/**
* Key of tag status.
*/
public static final String TAG_STATUS = "tagStatus";
/**
* Key of tag good count.
*/
public static final String TAG_GOOD_CNT = "tagGoodCnt";
/**
* Key of tag bad count.
*/
public static final String TAG_BAD_CNT = "tagBadCnt";
/**
* Key of tag seo title.
*/
public static final String TAG_SEO_TITLE = "tagSeoTitle";
/**
* Key of tag seo keywords.
*/
public static final String TAG_SEO_KEYWORDS = "tagSeoKeywords";
/**
* Key of tag seo description.
*/
public static final String TAG_SEO_DESC = "tagSeoDesc";
/**
* Key of tag random double value.
*/
public static final String TAG_RANDOM_DOUBLE = "tagRandomDouble";
//// Transient ////
/**
* Key of tag domains.
*/
public static final String TAG_T_DOMAINS = "tagDomains";
/**
* Key of tag count.
*/
public static final String TAG_T_COUNT = "tagCnt";
/**
* Key of tag id.
*/
public static final String TAG_T_ID = "tagId";
/**
* Key of tag description text.
*/
public static final String TAG_T_DESCRIPTION_TEXT = "tagDescriptionText";
/**
* Key of tag create time.
*/
public static final String TAG_T_CREATE_TIME = "tagCreateTime";
/**
* Key of tag creator thumbnail URL.
*/
public static final String TAG_T_CREATOR_THUMBNAIL_URL = "tagCreatorThumbnailURL";
/**
* Key of tag creator thumbnail update time.
*/
public static final String TAG_T_CREATOR_THUMBNAIL_UPDATE_TIME = "tagCreatorThumbnailUpdateTime";
/**
* Key of tag creator name.
*/
public static final String TAG_T_CREATOR_NAME = "tagCreatorName";
/**
* Key of tag participants.
*/
public static final String TAG_T_PARTICIPANTS = "tagParticipants";
/**
* Key of tag participant name.
*/
public static final String TAG_T_PARTICIPANT_NAME = "tagParticipantName";
/**
* Key of tag participant thumbnail URL.
*/
public static final String TAG_T_PARTICIPANT_THUMBNAIL_URL = "tagParticipantThumbnailURL";
/**
* Key of tag participant thumbnail update time.
*/
public static final String TAG_T_PARTICIPANT_THUMBNAIL_UPDATE_TIME = "tagParticipantThumbnailUpdateTime";
/**
* Key of tag participant URL.
*/
public static final String TAG_T_PPARTICIPANT_URL = "tagParticipantURL";
/**
* Key of related tags.
*/
public static final String TAG_T_RELATED_TAGS = "tagRelatedTags";
/**
* Key of tag title lower case.
*/
public static final String TAG_T_TITLE_LOWER_CASE = "tagTitleLowerCase";
/**
* Key of tag links.
*/
public static final String TAG_T_LINKS = "tagLinks";
/**
* Key of tag links count.
*/
public static final String TAG_T_LINKS_CNT = "tagLinksCnt";
//// Tag type constants
/**
* Tag type - creator.
*/
public static final int TAG_TYPE_C_CREATOR = 0;
/**
* Tag type - article.
*/
public static final int TAG_TYPE_C_ARTICLE = 1;
/**
* Tag type - user self.
*/
public static final int TAG_TYPE_C_USER_SELF = 2;
// Status constants
/**
* Tag status - valid.
*/
public static final int TAG_STATUS_C_VALID = 0;
/**
* Tag status - invalid.
*/
public static final int TAG_STATUS_C_INVALID = 1;
// Tag title constants
/**
* Title - Sandbox.
*/
public static final String TAG_TITLE_C_SANDBOX = "Sandbox";
// Validation
/**
* Max tag title length.
*/
public static final int MAX_TAG_TITLE_LENGTH = (null == Symphonys.getInt("tag.maxTagTitleLength"))
? 9 : Symphonys.getInt("tag.maxTagTitleLength");
/**
* Max tag count.
*/
public static final int MAX_TAG_COUNT = 4;
/**
* Tag title pattern string.
*/
public static final String TAG_TITLE_PATTERN_STR = "[\\u4e00-\\u9fa5,\\w,\\s,&,\\+,\\-,\\.]+";
/**
* Tag title pattern.
*/
public static final Pattern TAG_TITLE_PATTERN = Pattern.compile(TAG_TITLE_PATTERN_STR);
/**
* Normalized tag title mappings.
*/
private static final Map<String, Set<String>> NORMALIZE_MAPPINGS = new HashMap<>();
static {
NORMALIZE_MAPPINGS.put("JavaScript", new HashSet<>(Arrays.asList("JS")));
NORMALIZE_MAPPINGS.put("Elasticsearch", new HashSet<>(Arrays.asList("ES", "ES")));
NORMALIZE_MAPPINGS.put("golang", new HashSet<>(Arrays.asList("Go", "Go")));
}
/**
* Uses the head tags.
*
* @param tagStr the specified tags
* @param num the specified used number
* @return head tags
*/
public static String useHead(final String tagStr, final int num) {
final String[] tags = tagStr.split(",");
if (tags.length <= num) {
return tagStr;
}
final StringBuilder sb = new StringBuilder();
for (int i = 0; i < num; i++) {
sb.append(tags[i]).append(",");
}
sb.deleteCharAt(sb.length() - 1);
return sb.toString();
}
/**
* Formats the specified tags.
*
* <ul>
* <li>Trims every tag</li>
* <li>Deduplication</li>
* </ul>
*
* @param tagStr the specified tags
* @return formatted tags string
*/
public static String formatTags(final String tagStr) {
final String tagStr1 = tagStr.replaceAll("\\s+", "").replaceAll("", ",").replaceAll("", ",").
replaceAll("", ",").replaceAll(";", ",");
String[] tagTitles = tagStr1.split(",");
tagTitles = Strings.trimAll(tagTitles);
// deduplication
final Set<String> titles = new LinkedHashSet<>();
for (final String tagTitle : tagTitles) {
if (!exists(titles, tagTitle)) {
titles.add(tagTitle);
}
}
tagTitles = titles.toArray(new String[0]);
int count = 0;
final StringBuilder tagsBuilder = new StringBuilder();
for (final String tagTitle : tagTitles) {
String title = tagTitle.trim();
if (StringUtils.isBlank(title)) {
continue;
}
if (containsWhiteListTags(title)) {
tagsBuilder.append(title).append(",");
count++;
if (count >= MAX_TAG_COUNT) {
break;
}
continue;
}
if (StringUtils.length(title) > MAX_TAG_TITLE_LENGTH) {
continue;
}
if (!TAG_TITLE_PATTERN.matcher(title).matches()) {
continue;
}
title = normalize(title);
tagsBuilder.append(title).append(",");
count++;
if (count >= MAX_TAG_COUNT) {
break;
}
}
if (tagsBuilder.length() > 0) {
tagsBuilder.deleteCharAt(tagsBuilder.length() - 1);
}
return tagsBuilder.toString();
}
/**
* Checks the specified tag string whether contains the reserved tags.
*
* @param tagStr the specified tag string
* @return {@code true} if it contains, returns {@code false} otherwise
*/
public static boolean containsReservedTags(final String tagStr) {
for (final String reservedTag : Symphonys.RESERVED_TAGS) {
if (StringUtils.containsIgnoreCase(tagStr, reservedTag)) {
return true;
}
}
return false;
}
/**
* Checks the specified tag string whether contains the white list tags.
*
* @param tagStr the specified tag string
* @return {@code true} if it contains, returns {@code false} otherwise
*/
public static boolean containsWhiteListTags(final String tagStr) {
for (final String whiteListTag : Symphonys.WHITE_LIST_TAGS) {
if (StringUtils.equalsIgnoreCase(tagStr, whiteListTag)) {
return true;
}
}
return false;
}
/**
* Checks the specified title exists in the specified title set.
*
* @param titles the specified title set
* @param title the specified title to check
* @return {@code true} if exists, returns {@code false} otherwise
*/
private static boolean exists(final Set<String> titles, final String title) {
for (final String setTitle : titles) {
if (setTitle.equalsIgnoreCase(title)) {
return true;
}
}
return false;
}
/**
* Normalizes the specified title. For example, Normalizes "JS" to "JavaScript.
*
* @param title the specified title
* @return normalized title
*/
private static String normalize(final String title) {
final TagCache cache = LatkeBeanManagerImpl.getInstance().getReference(TagCache.class);
final List<JSONObject> iconTags = cache.getIconTags(Integer.MAX_VALUE);
Collections.sort(iconTags, new Comparator<JSONObject>() {
@Override
public int compare(final JSONObject t1, final JSONObject t2) {
final String u1Title = t1.optString(Tag.TAG_T_TITLE_LOWER_CASE);
final String u2Title = t2.optString(Tag.TAG_T_TITLE_LOWER_CASE);
return u2Title.length() - u1Title.length();
}
});
for (final JSONObject iconTag : iconTags) {
final String iconTagTitle = iconTag.optString(Tag.TAG_TITLE);
if (iconTagTitle.length() < 2) {
break;
}
if (StringUtils.containsIgnoreCase(title, iconTagTitle)) {
return iconTagTitle;
}
}
for (final Map.Entry<String, Set<String>> entry : NORMALIZE_MAPPINGS.entrySet()) {
final Set<String> oddTitles = entry.getValue();
for (final String oddTitle : oddTitles) {
if (StringUtils.equalsIgnoreCase(title, oddTitle)) {
return entry.getKey();
}
}
}
return title;
}
/**
* Private constructor.
*/
private Tag() {
}
}
|
package org.basex.query;
import static org.basex.core.Text.*;
import static org.basex.query.QueryText.*;
import static org.basex.query.util.Err.*;
import static org.basex.util.Token.*;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import org.basex.core.Context;
import org.basex.core.Progress;
import org.basex.core.Prop;
import org.basex.data.Data;
import org.basex.data.FTPosData;
import org.basex.data.Nodes;
import org.basex.data.Result;
import org.basex.io.IO;
import org.basex.io.serial.Serializer;
import org.basex.io.serial.SerializerException;
import org.basex.io.serial.SerializerProp;
import org.basex.query.expr.Expr;
import org.basex.query.expr.ParseExpr;
import org.basex.query.item.DBNode;
import org.basex.query.item.Dat;
import org.basex.query.item.Dtm;
import org.basex.query.item.Item;
import org.basex.query.item.QNm;
import org.basex.query.item.SeqType;
import org.basex.query.item.Tim;
import org.basex.query.item.Uri;
import org.basex.query.item.Value;
import org.basex.query.iter.ItemCache;
import org.basex.query.iter.Iter;
import org.basex.query.up.Updates;
import org.basex.query.util.UserFuncs;
import org.basex.query.util.NSLocal;
import org.basex.query.util.Var;
import org.basex.query.util.Variables;
import org.basex.query.util.format.DecFormatter;
import org.basex.util.InputInfo;
import org.basex.util.Token;
import org.basex.util.TokenBuilder;
import org.basex.util.Util;
import org.basex.util.ft.FTLexer;
import org.basex.util.ft.FTOpt;
import org.basex.util.hash.TokenMap;
import org.basex.util.hash.TokenObjMap;
import org.basex.util.list.IntList;
public final class QueryContext extends Progress {
/** Functions. */
public final UserFuncs funcs = new UserFuncs();
/** Variables. */
public final Variables vars = new Variables();
/** Namespaces. */
public NSLocal ns = new NSLocal();
/** Query resources. */
public final QueryResources resource;
/** Database context. */
public final Context context;
/** Query string. */
public String query;
/** XQuery version flag. */
public boolean xquery3;
/** Cached stop word files. */
public HashMap<String, String> stop;
/** Cached thesaurus files. */
public HashMap<String, String> thes;
/** Modified properties. */
public HashMap<String, Object> props;
/** Root expression of the query. */
public Expr root;
/** Current context value. */
public Value value;
/** Current context position. */
public long pos;
/** Current context size. */
public long size;
/** Optional initial context set. */
Nodes nodes;
/** Current full-text options. */
public FTOpt ftopt = new FTOpt();
/** Current full-text token. */
public FTLexer fttoken;
/** Current Date. */
public Dat date;
/** Current DateTime. */
public Dtm dtm;
/** Current Time. */
public Tim time;
/** Decimal-format declarations. */
public final TokenObjMap<DecFormatter> decFormats =
new TokenObjMap<DecFormatter>();
/** Default function namespace. */
public byte[] nsFunc = FNURI;
/** Default element namespace. */
public byte[] nsElem = EMPTY;
/** Static Base URI. */
public Uri baseURI = Uri.EMPTY;
/** Default collation. */
public Uri collation = Uri.uri(URLCOLL);
/** Default boundary-space. */
public boolean spaces;
/** Empty Order mode. */
public boolean orderGreatest;
/** Preserve Namespaces. */
public boolean nsPreserve = true;
/** Inherit Namespaces. */
public boolean nsInherit = true;
/** Ordering mode. */
public boolean ordered;
/** Construction mode. */
public boolean construct;
/** Full-text position data (needed for highlighting of full-text results). */
public FTPosData ftpos;
/** Full-text token counter (needed for highlighting of full-text results). */
public byte ftoknum;
/** Copied nodes, resulting from transform expression. */
public final HashSet<Data> copiedNods = new HashSet<Data>();
/** Pending updates. */
public final Updates updates = new Updates();
/** Indicates if this query might perform updates. */
public boolean updating;
/** Compilation flag: current node has leaves. */
public boolean leaf;
/** Compilation flag: GFLWOR clause performs grouping. */
public boolean grouping;
/** Counter for variable IDs. */
public int varIDs;
/** Pre-declared modules, containing the file path and module uri. */
final TokenMap modDeclared = new TokenMap();
/** Parsed modules, containing the file path and module uri. */
final TokenMap modParsed = new TokenMap();
/** Serializer options. */
SerializerProp serProp;
/** Initial context value type. */
SeqType initType;
/** Initial context value. */
Expr initExpr;
/** Number of successive tail calls. */
public int tailCalls;
/** Maximum number of successive tail calls. */
public final int maxCalls;
/** String container for query background information. */
private final TokenBuilder info = new TokenBuilder();
/** Info flag. */
private final boolean inf;
/** Optimization flag. */
private boolean firstOpt = true;
/** Evaluation flag. */
private boolean firstEval = true;
/**
* Constructor.
* @param ctx database context
*/
public QueryContext(final Context ctx) {
resource = new QueryResources(this);
context = ctx;
nodes = ctx.current();
xquery3 = ctx.prop.is(Prop.XQUERY3);
inf = ctx.prop.is(Prop.QUERYINFO) || Util.debug;
if(ctx.query != null) baseURI = Uri.uri(token(ctx.query.url()));
maxCalls = ctx.prop.num(Prop.TAILCALLS);
}
/**
* Parses the specified query.
* @param q input query
* @throws QueryException query exception
*/
public void parse(final String q) throws QueryException {
root = new QueryParser(q, this).parse(base(), null);
query = q;
}
/**
* Parses the specified module.
* @param q input query
* @throws QueryException query exception
*/
public void module(final String q) throws QueryException {
new QueryParser(q, this).parse(base(), Uri.EMPTY);
}
/**
* Compiles and optimizes the expression.
* @throws QueryException query exception
*/
public void compile() throws QueryException {
// dump compilation info
if(inf) compInfo(NL + QUERYCOMP);
if(initExpr != null) {
// evaluate initial expression
try {
value = initExpr.value(this);
} catch(final QueryException ex) {
if(ex.err() != XPNOCTX) throw ex;
// only {@link ParseExpr} instances may cause this error
CTXINIT.thrw(((ParseExpr) initExpr).input, ex.getMessage());
}
} else if(nodes != null) {
// add full-text container reference
if(nodes.ftpos != null) ftpos = new FTPosData();
// cache the initial context nodes
resource.compile(nodes);
}
// if specified, convert context item to specified type
if(value != null && initType != null) {
value = initType.promote(value, this, null);
}
try {
// compile global functions.
// variables will be compiled if called for the first time
funcs.comp(this);
// compile the expression
root = root.comp(this);
} catch(final StackOverflowError ex) {
Util.debug(ex);
XPSTACK.thrw(null);
}
// dump resulting query
if(inf) info.add(NL + QUERYRESULT + funcs + root + NL);
}
/**
* Returns a result iterator.
* @return result iterator
* @throws QueryException query exception
*/
public Iter iter() throws QueryException {
try {
// evaluate lazily if no updates are possible
return updating ? value().iter() : iter(root);
} catch(final StackOverflowError ex) {
Util.debug(ex);
throw XPSTACK.thrw(null);
}
}
/**
* Returns the result value.
* @return result value
* @throws QueryException query exception
*/
public Value value() throws QueryException {
try {
final Value v = value(root);
if(updating) {
updates.applyUpdates(this);
if(context.data() != null) context.update();
}
return v;
} catch(final StackOverflowError ex) {
Util.debug(ex);
throw XPSTACK.thrw(null);
}
}
/**
* Evaluates the expression with the specified context set.
* @return resulting value
* @throws QueryException query exception
*/
Result eval() throws QueryException {
// evaluates the query
final Iter ir = iter();
final ItemCache ic = new ItemCache();
Item it;
// check if all results belong to the database of the input context
if(serProp == null && nodes != null) {
final IntList pre = new IntList();
while((it = ir.next()) != null) {
checkStop();
if(!(it instanceof DBNode)) break;
if(it.data() != nodes.data) break;
pre.add(((DBNode) it).pre);
}
// completed... return standard nodeset with full-text positions
final int ps = pre.size();
if(it == null) return ps == 0 ? ic :
new Nodes(pre.toArray(), nodes.data, ftpos).checkRoot();
// otherwise, add nodes to standard iterator
for(int p = 0; p < ps; ++p) ic.add(new DBNode(nodes.data, pre.get(p)));
ic.add(it);
}
// use standard iterator
while((it = ir.next()) != null) {
checkStop();
ic.add(it);
}
return ic;
}
/**
* Recursively serializes the query plan.
* @param ser serializer
* @throws IOException I/O exception
*/
protected void plan(final Serializer ser) throws IOException {
// only show root node if functions or variables exist
final boolean r = funcs.size() != 0 || vars.global().size != 0;
if(r) ser.openElement(PLAN);
funcs.plan(ser);
vars.plan(ser);
root.plan(ser);
if(r) ser.closeElement();
}
/**
* Evaluates the specified expression and returns an iterator.
* @param e expression to be evaluated
* @return iterator
* @throws QueryException query exception
*/
public Iter iter(final Expr e) throws QueryException {
checkStop();
return e.iter(this);
}
/**
* Evaluates the specified expression and returns an iterator.
* @param e expression to be evaluated
* @return iterator
* @throws QueryException query exception
*/
public Value value(final Expr e) throws QueryException {
checkStop();
return e.value(this);
}
/**
* Returns the current data reference of the context value, or {@code null}.
* @return data reference
*/
public Data data() {
return value != null ? value.data() : null;
}
/**
* Creates a variable with a unique, non-clashing variable name.
* @param ii input info
* @param t type
* @return variable
*/
public Var uniqueVar(final InputInfo ii, final SeqType t) {
return Var.create(this, ii, new QNm(Token.token(varIDs)), t);
}
/**
* Copies properties of the specified context.
* @param ctx query context
*/
public void copy(final QueryContext ctx) {
baseURI = ctx.baseURI;
spaces = ctx.spaces;
construct = ctx.construct;
nsInherit = ctx.nsInherit;
nsPreserve = ctx.nsPreserve;
collation = ctx.collation;
nsElem = ctx.nsElem;
nsFunc = ctx.nsFunc;
orderGreatest = ctx.orderGreatest;
ordered = ctx.ordered;
}
/**
* Adds some optimization info.
* @param string evaluation info
* @param ext text text extensions
*/
public void compInfo(final String string, final Object... ext) {
if(!inf) return;
if(!firstOpt) info.add(QUERYSEP);
firstOpt = false;
info.addExt(string, ext).add(NL);
}
/**
* Adds some evaluation info.
* @param string evaluation info
*/
public void evalInfo(final byte[] string) {
if(!inf) return;
if(firstEval) info.add(NL).add(QUERYEVAL).add(NL);
info.add(QUERYSEP).add(string).add(NL);
firstEval = false;
}
/**
* Returns an IO representation of the base uri.
* @return IO reference
*/
public IO base() {
return baseURI != Uri.EMPTY ? IO.get(string(baseURI.atom())) : null;
}
/**
* Returns info on query compilation and evaluation.
* @return query info
*/
public String info() {
return info.toString();
}
/**
* Returns the serialization properties.
* @param opt return {@code null} reference if no properties are specified
* @return serialization properties
* @throws SerializerException serializer exception
*/
public SerializerProp serProp(final boolean opt) throws SerializerException {
// if available, use local query properties
if(serProp != null) return serProp;
final String serial = context.prop.get(Prop.SERIALIZER);
if(opt && serial.isEmpty()) return null;
// otherwise, apply global serialization option
return new SerializerProp(serial);
}
@Override
public String tit() {
return QUERYEVAL;
}
@Override
public String det() {
return QUERYEVAL;
}
@Override
public double prog() {
return 0;
}
@Override
public String toString() {
return Util.name(this) + '[' + base() + ']';
}
}
|
package org.basex.query.func;
import static org.basex.query.QueryTokens.*;
import static org.basex.query.QueryText.*;
import static javax.xml.datatype.DatatypeConstants.*;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.math.BigDecimal;
import java.math.BigInteger;
import javax.xml.datatype.Duration;
import javax.xml.datatype.XMLGregorianCalendar;
import javax.xml.namespace.QName;
import org.basex.core.Main;
import org.basex.data.Serializer;
import org.basex.query.QueryContext;
import org.basex.query.QueryException;
import org.basex.query.expr.Arr;
import org.basex.query.expr.Expr;
import org.basex.query.item.Item;
import org.basex.query.item.Jav;
import org.basex.query.item.Type;
import org.basex.query.iter.Iter;
import org.basex.query.iter.SeqIter;
import org.basex.query.util.Err;
import org.basex.util.Token;
import org.w3c.dom.Attr;
import org.w3c.dom.Comment;
import org.w3c.dom.Document;
import org.w3c.dom.DocumentFragment;
import org.w3c.dom.Element;
import org.w3c.dom.ProcessingInstruction;
import org.w3c.dom.Text;
public final class FunJava extends Arr {
/** Input Java types. */
private static final Class<?>[] JAVA = {
String.class, boolean.class, Boolean.class, byte.class, Byte.class,
short.class, Short.class, int.class, Integer.class, long.class, Long.class,
float.class, Float.class, double.class, Double.class, BigDecimal.class,
BigInteger.class, QName.class, CharSequence.class, byte[].class,
Object[].class,
};
/** Resulting XQuery types. */
private static final Type[] XQUERY = {
Type.STR, Type.BLN, Type.BLN, Type.BYT, Type.BYT,
Type.SHR, Type.SHR, Type.INT, Type.INT, Type.LNG, Type.LNG,
Type.FLT, Type.FLT, Type.DBL, Type.DBL, Type.DEC,
Type.ITR, Type.QNM, Type.STR, Type.HEX, Type.SEQ
};
/** Java class. */
private final Class<?> cls;
/** Java method. */
private final String mth;
/**
* Constructor.
* @param c Java class
* @param m Java method/field
* @param a arguments
*/
public FunJava(final Class<?> c, final String m, final Expr[] a) {
super(a);
cls = c;
mth = m;
}
@Override
public Iter iter(final QueryContext ctx) throws QueryException {
final Item[] arg = new Item[expr.length];
for(int a = 0; a < expr.length; a++) {
arg[a] = expr[a].iter(ctx).finish();
if(arg[a].size(ctx) == 0) Err.empty(this);
}
Object result = null;
try {
result = mth.equals("new") ? constructor(arg) : method(arg);
} catch(final Exception ex) {
Main.debug(ex);
Err.or(FUNJAVA, info());
}
return result == null ? Iter.EMPTY : iter(result);
}
/**
* Calls a constructor.
* @param ar arguments
* @return resulting object
* @throws Exception exception
*/
private Object constructor(final Item[] ar) throws Exception {
for(final Constructor<?> con : cls.getConstructors()) {
final Object[] arg = args(con.getParameterTypes(), ar, true);
if(arg != null) return con.newInstance(arg);
}
throw new Exception();
}
/**
* Calls a constructor.
* @param ar arguments
* @return resulting object
* @throws Exception exception
*/
private Object method(final Item[] ar) throws Exception {
// check if a field with the specified name exists
try {
final Field f = cls.getField(mth);
final boolean st = Modifier.isStatic(f.getModifiers());
if(ar.length == (st ? 0 : 1)) {
return f.get(st ? null : ((Jav) ar[0]).val);
}
} catch(final NoSuchFieldException ex) { /* ignored */ }
for(final Method meth : cls.getMethods()) {
if(!meth.getName().equals(mth)) continue;
final boolean st = Modifier.isStatic(meth.getModifiers());
final Object[] arg = args(meth.getParameterTypes(), ar, st);
if(arg != null) return meth.invoke(st ? null : ((Jav) ar[0]).val, arg);
}
throw new Exception();
}
/**
* Checks if the arguments conform with the specified parameters.
* @param params parameters
* @param args arguments
* @param stat static flag
* @return argument array or null
*/
private Object[] args(final Class<?>[] params, final Item[] args,
final boolean stat) {
final int s = stat ? 0 : 1;
final int l = args.length - s;
if(l != params.length) return null;
/** Function arguments. */
final Object[] val = new Object[l];
int a = 0;
for(final Class<?> par : params) {
final Type jtype = type(par);
if(jtype == null) return null;
final Item arg = args[s + a];
if(!arg.type.instance(jtype) && !jtype.instance(arg.type)) return null;
val[a++] = arg.java();
}
return val;
}
/**
* Returns an appropriate XQuery data type for the specified Java class.
* @param par Java type
* @return xquery type
*/
private static Type type(final Class<?> par) {
for(int j = 0; j < JAVA.length; j++) if(par == JAVA[j]) return XQUERY[j];
return Type.JAVA;
}
/**
* Returns an appropriate XQuery data type for the specified Java object.
* @param o object
* @return xquery type
*/
public static Type type(final Object o) {
final Type t = type(o.getClass());
if(t != Type.JAVA) return t;
if(o instanceof Element) return Type.ELM;
if(o instanceof Document) return Type.DOC;
if(o instanceof DocumentFragment) return Type.DOC;
if(o instanceof Attr) return Type.ATT;
if(o instanceof Comment) return Type.COM;
if(o instanceof ProcessingInstruction) return Type.PI;
if(o instanceof Text) return Type.TXT;
if(o instanceof Duration) {
final Duration d = (Duration) o;
return !d.isSet(YEARS) && !d.isSet(MONTHS) ? Type.DTD :
!d.isSet(HOURS) && !d.isSet(MINUTES) && !d.isSet(SECONDS) ? Type.YMD :
Type.DUR;
}
if(o instanceof XMLGregorianCalendar) {
final QName type = ((XMLGregorianCalendar) o).getXMLSchemaType();
if(type == DATE) return Type.DAT;
if(type == DATETIME) return Type.DTM;
if(type == TIME) return Type.TIM;
if(type == GYEARMONTH) return Type.YMO;
if(type == GMONTHDAY) return Type.MDA;
if(type == GYEAR) return Type.YEA;
if(type == GMONTH) return Type.MON;
if(type == GDAY) return Type.DAY;
}
return null;
}
/**
* Converts the specified object to an iterator.
* @param res object
* @return iterator
*/
private Iter iter(final Object res) {
if(!res.getClass().isArray()) return new Jav(res).iter();
final SeqIter seq = new SeqIter();
if(res instanceof boolean[]) {
for(final Object o : (boolean[]) res) seq.add(new Jav(o));
} else if(res instanceof char[]) {
for(final Object o : (char[]) res) seq.add(new Jav(o));
} else if(res instanceof byte[]) {
for(final Object o : (byte[]) res) seq.add(new Jav(o));
} else if(res instanceof short[]) {
for(final Object o : (short[]) res) seq.add(new Jav(o));
} else if(res instanceof int[]) {
for(final Object o : (int[]) res) seq.add(new Jav(o));
} else if(res instanceof long[]) {
for(final Object o : (long[]) res) seq.add(new Jav(o));
} else if(res instanceof float[]) {
for(final Object o : (float[]) res) seq.add(new Jav(o));
} else if(res instanceof double[]) {
for(final Object o : (double[]) res) seq.add(new Jav(o));
} else {
for(final Object o : (Object[]) res) seq.add(new Jav(o));
}
return seq;
}
@Override
public void plan(final Serializer ser) throws IOException {
ser.openElement(this, NAM, Token.token(cls + "." + mth));
for(final Expr arg : expr) arg.plan(ser);
ser.closeElement();
}
@Override
public String info() {
return cls.getName() + "." + mth + "(...)" + (mth.equals("new") ?
" constructor" : " method");
}
@Override
public String toString() {
return cls + "." + mth + "(" + toString(", ") + ")";
}
}
|
package org.cactoos.io;
import java.io.IOException;
import java.io.InputStream;
import org.cactoos.Input;
import org.cactoos.Scalar;
/**
* Length of Input.
*
* <p>There is no thread-safety guarantee.
*
* @author Yegor Bugayenko (yegor256@gmail.com)
* @version $Id$
* @since 0.1
*/
public final class LengthOfInput implements Scalar<Long> {
/**
* The input.
*/
private final Input source;
/**
* The buffer size.
*/
private final int size;
/**
* Ctor.
* @param input The input
*/
public LengthOfInput(final Input input) {
// @checkstyle MagicNumber (1 line)
this(input, 16 << 10);
}
/**
* Ctor.
* @param input The input
* @param max Buffer size
*/
public LengthOfInput(final Input input, final int max) {
this.source = input;
this.size = max;
}
@Override
public Long asValue() throws IOException {
try (final InputStream stream = this.source.stream()) {
final byte[] buf = new byte[this.size];
long length = 0L;
while (true) {
final int len = stream.read(buf);
if (len > 0) {
length += (long) len;
}
if (len != buf.length) {
break;
}
}
return length;
}
}
}
|
package org.dynmap.hdmap;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.LineNumberReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import javax.imageio.ImageIO;
import org.dynmap.Color;
import org.dynmap.DynmapPlugin;
import org.dynmap.Log;
import org.dynmap.utils.DynmapBufferedImage;
import org.dynmap.utils.MapIterator.BlockStep;
import org.dynmap.kzedmap.KzedMap;
import org.dynmap.utils.MapIterator;
* BetterGlass/*.png - mod-based improved windows (future optional)
*/
public class TexturePack {
/* Loaded texture packs */
private static HashMap<String, TexturePack> packs = new HashMap<String, TexturePack>();
private static final String TERRAIN_PNG = "terrain.png";
private static final String GRASSCOLOR_PNG = "misc/grasscolor.png";
private static final String FOLIAGECOLOR_PNG = "misc/foliagecolor.png";
private static final String WATER_PNG = "misc/water.png";
private static final String CUSTOMLAVASTILL_PNG = "custom_lava_still.png";
private static final String CUSTOMLAVAFLOWING_PNG = "custom_lava_flowing.png";
private static final String CUSTOMWATERSTILL_PNG = "custom_water_still.png";
private static final String CUSTOMWATERFLOWING_PNG = "custom_water_flowing.png";
private static final String STANDARDTP = "standard";
/* Color modifier codes (x1000 for value in mapping code) */
private static final int COLORMOD_NONE = 0;
private static final int COLORMOD_GRASSTONED = 1;
private static final int COLORMOD_FOLIAGETONED = 2;
private static final int COLORMOD_WATERTONED = 3; /* Not used */
private static final int COLORMOD_ROT90 = 4;
private static final int COLORMOD_ROT180 = 5;
private static final int COLORMOD_ROT270 = 6;
private static final int COLORMOD_FLIPHORIZ = 7;
private static final int COLORMOD_SHIFTDOWNHALF = 8;
private static final int COLORMOD_SHIFTDOWNHALFANDFLIPHORIZ = 9;
private static final int COLORMOD_INCLINEDTORCH = 10;
private static final int COLORMOD_GRASSSIDE = 11;
private static final int COLORMOD_CLEARINSIDE = 12;
/* Special tile index values */
private static final int BLOCKINDEX_BLANK = -1;
private static final int BLOCKINDEX_REDSTONE_NSEW_TONE = 164;
private static final int BLOCKINDEX_REDSTONE_EW_TONE = 165;
private static final int BLOCKINDEX_REDSTONE_NSEW = 180;
private static final int BLOCKINDEX_REDSTONE_EW = 181;
private static final int BLOCKINDEX_STATIONARYWATER = 257;
private static final int BLOCKINDEX_MOVINGWATER = 258;
private static final int BLOCKINDEX_STATIONARYLAVA = 259;
private static final int BLOCKINDEX_MOVINGLAVA = 260;
private static final int MAX_BLOCKINDEX = 260;
private static final int BLOCKTABLELEN = MAX_BLOCKINDEX+1;
private static class LoadedImage {
int[] argb;
int width, height;
int trivial_color;
}
private int[][] terrain_argb;
private int terrain_width, terrain_height;
private int native_scale;
private static final int IMG_GRASSCOLOR = 0;
private static final int IMG_FOLIAGECOLOR = 1;
private static final int IMG_WATER = 2;
private static final int IMG_CUSTOMWATERMOVING = 3;
private static final int IMG_CUSTOMWATERSTILL = 4;
private static final int IMG_CUSTOMLAVAMOVING = 5;
private static final int IMG_CUSTOMLAVASTILL = 6;
private static final int IMG_CNT = 7;
private LoadedImage[] imgs = new LoadedImage[IMG_CNT];
private HashMap<Integer, TexturePack> scaled_textures;
public enum BlockTransparency {
OPAQUE, /* Block is opaque - blocks light - lit by light from adjacent blocks */
TRANSPARENT, /* Block is transparent - passes light - lit by light level in own block */
SEMITRANSPARENT /* Opaque block that doesn't block all rays (steps, slabs) - use light above for face lighting on opaque blocks */
}
public static class HDTextureMap {
private int faces[]; /* index in terrain.png of image for each face (indexed by BlockStep.ordinal()) */
private List<Integer> blockids;
private int databits;
private BlockTransparency bt;
private static HDTextureMap[] texmaps;
private static BlockTransparency transp[];
private static void initializeTable() {
texmaps = new HDTextureMap[16*BLOCKTABLELEN];
transp = new BlockTransparency[BLOCKTABLELEN];
HDTextureMap blank = new HDTextureMap();
for(int i = 0; i < texmaps.length; i++)
texmaps[i] = blank;
for(int i = 0; i < transp.length; i++)
transp[i] = BlockTransparency.OPAQUE;
}
private HDTextureMap() {
blockids = Collections.singletonList(Integer.valueOf(0));
databits = 0xFFFF;
faces = new int[] { BLOCKINDEX_BLANK, BLOCKINDEX_BLANK, BLOCKINDEX_BLANK, BLOCKINDEX_BLANK, BLOCKINDEX_BLANK, BLOCKINDEX_BLANK };
for(int i = 0; i < texmaps.length; i++) {
texmaps[i] = this;
}
}
public HDTextureMap(List<Integer> blockids, int databits, int[] faces, BlockTransparency trans) {
this.faces = faces;
this.blockids = blockids;
this.databits = databits;
this.bt = trans;
}
public void addToTable() {
/* Add entries to lookup table */
for(Integer blkid : blockids) {
for(int i = 0; i < 16; i++) {
if((databits & (1 << i)) != 0) {
texmaps[16*blkid + i] = this;
}
}
transp[blkid] = bt; /* Transparency is only blocktype based right now */
}
}
public static HDTextureMap getMap(int blkid, int blkdata) {
return texmaps[(blkid<<4) + blkdata];
}
public static BlockTransparency getTransparency(int blkid) {
return transp[blkid];
}
}
/** Get or load texture pack */
public static TexturePack getTexturePack(String tpname) {
TexturePack tp = packs.get(tpname);
if(tp != null)
return tp;
try {
tp = new TexturePack(tpname); /* Attempt to load pack */
packs.put(tpname, tp);
return tp;
} catch (FileNotFoundException fnfx) {
Log.severe("Error loading texture pack '" + tpname + "' - not found");
}
return null;
}
/**
* Constructor for texture pack, by name
*/
private TexturePack(String tpname) throws FileNotFoundException {
ZipFile zf = null;
File texturedir = getTexturePackDirectory();
File f = new File(texturedir, tpname);
try {
/* Try to open zip */
zf = new ZipFile(f);
/* Find and load terrain.png */
InputStream is;
ZipEntry ze = zf.getEntry(TERRAIN_PNG); /* Try to find terrain.png */
if(ze == null) {
/* Check for terrain.png under standard texture pack*/
File ff = new File(texturedir, STANDARDTP + "/" + TERRAIN_PNG);
is = new FileInputStream(ff);
}
else {
is = zf.getInputStream(ze); /* Get input stream for terrain.png */
}
loadTerrainPNG(is);
is.close();
/* Try to find and load misc/grasscolor.png */
ze = zf.getEntry(GRASSCOLOR_PNG);
if(ze == null) { /* Fall back to standard file */
/* Check for misc/grasscolor.png under standard texture pack*/
File ff = new File(texturedir, STANDARDTP + "/" + GRASSCOLOR_PNG);
is = new FileInputStream(ff);
}
else {
is = zf.getInputStream(ze);
}
loadBiomeShadingImage(is, IMG_GRASSCOLOR);
is.close();
/* Try to find and load misc/foliagecolor.png */
ze = zf.getEntry(FOLIAGECOLOR_PNG);
if(ze == null) {
/* Check for misc/foliagecolor.png under standard texture pack*/
File ff = new File(texturedir, STANDARDTP + "/" + FOLIAGECOLOR_PNG);
is = new FileInputStream(ff);
}
else {
is = zf.getInputStream(ze);
}
loadBiomeShadingImage(is, IMG_FOLIAGECOLOR);
is.close();
/* Try to find and load misc/water.png */
ze = zf.getEntry(WATER_PNG);
if(ze == null) {
File ff = new File(texturedir, STANDARDTP + "/" + WATER_PNG);
is = new FileInputStream(ff);
}
else {
is = zf.getInputStream(ze);
}
loadImage(is, IMG_WATER);
patchTextureWithImage(IMG_WATER, BLOCKINDEX_STATIONARYWATER);
patchTextureWithImage(IMG_WATER, BLOCKINDEX_MOVINGWATER);
is.close();
/* Optional files - process if they exist */
ze = zf.getEntry(CUSTOMLAVAFLOWING_PNG);
if(ze != null) {
is = zf.getInputStream(ze);
loadImage(is, IMG_CUSTOMLAVAMOVING);
patchTextureWithImage(IMG_CUSTOMLAVAMOVING, BLOCKINDEX_MOVINGLAVA);
}
ze = zf.getEntry(CUSTOMLAVASTILL_PNG);
if(ze != null) {
is = zf.getInputStream(ze);
loadImage(is, IMG_CUSTOMLAVASTILL);
patchTextureWithImage(IMG_CUSTOMLAVASTILL, BLOCKINDEX_STATIONARYLAVA);
}
ze = zf.getEntry(CUSTOMWATERFLOWING_PNG);
if(ze != null) {
is = zf.getInputStream(ze);
loadImage(is, IMG_CUSTOMWATERMOVING);
patchTextureWithImage(IMG_CUSTOMWATERMOVING, BLOCKINDEX_MOVINGWATER);
}
ze = zf.getEntry(CUSTOMWATERSTILL_PNG);
if(ze != null) {
is = zf.getInputStream(ze);
loadImage(is, IMG_CUSTOMWATERSTILL);
patchTextureWithImage(IMG_CUSTOMWATERSTILL, BLOCKINDEX_STATIONARYWATER);
}
zf.close();
return;
} catch (IOException iox) {
if(zf != null) {
try { zf.close(); } catch (IOException io) {}
}
}
/* Try loading terrain.png from directory of name */
FileInputStream fis = null;
try {
/* Open and load terrain.png */
f = new File(texturedir, tpname + "/" + TERRAIN_PNG);
if(!f.canRead()) {
f = new File(texturedir, STANDARDTP + "/" + TERRAIN_PNG);
}
fis = new FileInputStream(f);
loadTerrainPNG(fis);
fis.close();
/* Check for misc/grasscolor.png */
f = new File(texturedir, tpname + "/" + GRASSCOLOR_PNG);
if(!f.canRead()) {
f = new File(texturedir, STANDARDTP + "/" + GRASSCOLOR_PNG);
}
fis = new FileInputStream(f);
loadBiomeShadingImage(fis, IMG_GRASSCOLOR);
fis.close();
/* Check for misc/foliagecolor.png */
f = new File(texturedir, tpname + "/" + FOLIAGECOLOR_PNG);
if(!f.canRead()) {
f = new File(texturedir, STANDARDTP + "/" + FOLIAGECOLOR_PNG);
}
fis = new FileInputStream(f);
loadBiomeShadingImage(fis, IMG_FOLIAGECOLOR);
fis.close();
/* Check for misc/water.png */
f = new File(texturedir, tpname + "/" + WATER_PNG);
if(!f.canRead()) {
f = new File(texturedir, STANDARDTP + "/" + WATER_PNG);
}
fis = new FileInputStream(f);
loadImage(fis, IMG_WATER);
patchTextureWithImage(IMG_WATER, BLOCKINDEX_STATIONARYWATER);
patchTextureWithImage(IMG_WATER, BLOCKINDEX_MOVINGWATER);
fis.close();
/* Optional files - process if they exist */
f = new File(texturedir, tpname + "/" + CUSTOMLAVAFLOWING_PNG);
if(f.canRead()) {
fis = new FileInputStream(f);
loadImage(fis, IMG_CUSTOMLAVAMOVING);
patchTextureWithImage(IMG_CUSTOMLAVAMOVING, BLOCKINDEX_MOVINGLAVA);
fis.close();
}
f = new File(texturedir, tpname + "/" + CUSTOMLAVASTILL_PNG);
if(f.canRead()) {
fis = new FileInputStream(f);
loadImage(fis, IMG_CUSTOMLAVASTILL);
patchTextureWithImage(IMG_CUSTOMLAVASTILL, BLOCKINDEX_STATIONARYLAVA);
fis.close();
}
f = new File(texturedir, tpname + "/" + IMG_CUSTOMWATERMOVING);
if(f.canRead()) {
fis = new FileInputStream(f);
loadImage(fis, IMG_CUSTOMWATERMOVING);
patchTextureWithImage(IMG_CUSTOMWATERMOVING, BLOCKINDEX_MOVINGWATER);
fis.close();
}
f = new File(texturedir, tpname + "/" + IMG_CUSTOMWATERSTILL);
if(f.canRead()) {
fis = new FileInputStream(f);
loadImage(fis, IMG_CUSTOMWATERSTILL);
patchTextureWithImage(IMG_CUSTOMWATERSTILL, BLOCKINDEX_STATIONARYWATER);
fis.close();
}
} catch (IOException iox) {
if(fis != null) {
try { fis.close(); } catch (IOException io) {}
}
Log.info("Cannot process " + f.getPath() + " - " + iox);
throw new FileNotFoundException();
}
}
/* Copy texture pack */
private TexturePack(TexturePack tp) {
this.terrain_argb = new int[tp.terrain_argb.length][];
System.arraycopy(tp.terrain_argb, 0, this.terrain_argb, 0, this.terrain_argb.length);
this.terrain_width = tp.terrain_width;
this.terrain_height = tp.terrain_height;
this.native_scale = tp.native_scale;
this.imgs = tp.imgs;
}
/* Load terrain.png */
private void loadTerrainPNG(InputStream is) throws IOException {
int i;
/* Load image */
BufferedImage img = ImageIO.read(is);
if(img == null) { throw new FileNotFoundException(); }
terrain_width = img.getWidth();
terrain_height = img.getHeight();
native_scale = terrain_width / 16;
terrain_argb = new int[BLOCKTABLELEN][];
for(i = 0; i < 256; i++) {
terrain_argb[i] = new int[native_scale*native_scale];
img.getRGB((i & 0xF)*native_scale, (i>>4)*native_scale, native_scale, native_scale, terrain_argb[i], 0, native_scale);
}
int[] blank = new int[native_scale*native_scale];
for(i = 256; i < BLOCKTABLELEN; i++) {
terrain_argb[i] = blank;
}
/* Fallbacks */
terrain_argb[BLOCKINDEX_STATIONARYLAVA] = terrain_argb[255];
terrain_argb[BLOCKINDEX_MOVINGLAVA] = terrain_argb[255];
/* Now, build redstone textures with active wire color (since we're not messing with that) */
Color tc = new Color();
for(i = 0; i < native_scale*native_scale; i++) {
if(terrain_argb[BLOCKINDEX_REDSTONE_NSEW_TONE][i] != 0) {
/* Overlay NSEW redstone texture with toned wire color */
tc.setARGB(terrain_argb[BLOCKINDEX_REDSTONE_NSEW_TONE][i]);
tc.blendColor(0xFFC00000); /* Blend in red */
terrain_argb[BLOCKINDEX_REDSTONE_NSEW][i] = tc.getARGB();
}
if(terrain_argb[BLOCKINDEX_REDSTONE_EW_TONE][i] != 0) {
/* Overlay NSEW redstone texture with toned wire color */
tc.setARGB(terrain_argb[BLOCKINDEX_REDSTONE_EW_TONE][i]);
tc.blendColor(0xFFC00000); /* Blend in red */
terrain_argb[BLOCKINDEX_REDSTONE_EW][i] = tc.getARGB();
}
}
img.flush();
}
/* Load image into image array */
private void loadImage(InputStream is, int idx) throws IOException {
/* Load image */
BufferedImage img = ImageIO.read(is);
if(img == null) { throw new FileNotFoundException(); }
imgs[idx] = new LoadedImage();
imgs[idx].width = img.getWidth();
imgs[idx].height = img.getHeight();
imgs[idx].argb = new int[imgs[idx].width * imgs[idx].height];
img.getRGB(0, 0, imgs[idx].width, imgs[idx].height, imgs[idx].argb, 0, imgs[idx].width);
img.flush();
}
/* Load biome shading image into image array */
private void loadBiomeShadingImage(InputStream is, int idx) throws IOException {
loadImage(is, idx); /* Get image */
LoadedImage li = imgs[idx];
/* Get trivial color for biome-shading image */
int clr = li.argb[li.height*li.width*3/4 + li.width/2];
boolean same = true;
for(int j = 0; same && (j < li.height); j++) {
for(int i = 0; same && (i <= j); i++) {
if(li.argb[li.width*j+i] != clr)
same = false;
}
}
/* All the same - no biome lookup needed */
if(same) {
imgs[idx].argb = null;
li.trivial_color = clr;
}
else { /* Else, calculate color average for lower left quadrant */
int[] clr_scale = new int[4];
scaleTerrainPNGSubImage(li.width, 2, li.argb, clr_scale);
li.trivial_color = clr_scale[2];
}
}
/* Patch image into texture table */
private void patchTextureWithImage(int image_idx, int block_idx) {
/* Now, patch in to block table */
int new_argb[] = new int[native_scale*native_scale];
scaleTerrainPNGSubImage(imgs[image_idx].width, native_scale, imgs[image_idx].argb, new_argb);
terrain_argb[block_idx] = new_argb;
}
/* Get texture pack directory */
private static File getTexturePackDirectory() {
return new File(DynmapPlugin.dataDirectory, "texturepacks");
}
/**
* Resample terrain pack for given scale, and return copy using that scale
*/
public TexturePack resampleTexturePack(int scale) {
if(scaled_textures == null) scaled_textures = new HashMap<Integer, TexturePack>();
TexturePack stp = scaled_textures.get(scale);
if(stp != null)
return stp;
stp = new TexturePack(this); /* Make copy */
/* Scale terrain.png, if needed */
if(stp.native_scale != scale) {
stp.native_scale = scale;
stp.terrain_height = 16*scale;
stp.terrain_width = 16*scale;
scaleTerrainPNG(stp);
}
/* Remember it */
scaled_textures.put(scale, stp);
return stp;
}
/**
* Scale out terrain_argb into the terrain_argb of the provided destination, matching the scale of that destination
* @param tp
*/
private void scaleTerrainPNG(TexturePack tp) {
tp.terrain_argb = new int[terrain_argb.length][];
/* Terrain.png is 16x16 array of images : process one at a time */
for(int idx = 0; idx < terrain_argb.length; idx++) {
tp.terrain_argb[idx] = new int[tp.native_scale*tp.native_scale];
scaleTerrainPNGSubImage(native_scale, tp.native_scale, terrain_argb[idx], tp.terrain_argb[idx]);
}
}
private static void scaleTerrainPNGSubImage(int srcscale, int destscale, int[] src_argb, int[] dest_argb) {
int nativeres = srcscale;
int res = destscale;
Color c = new Color();
/* Same size, so just copy */
if(res == nativeres) {
System.arraycopy(src_argb, 0, dest_argb, 0, dest_argb.length);
}
/* If we're scaling larger source pixels into smaller pixels, each destination pixel
* receives input from 1 or 2 source pixels on each axis
*/
else if(res > nativeres) {
int weights[] = new int[res];
int offsets[] = new int[res];
/* LCM of resolutions is used as length of line (res * nativeres)
* Each native block is (res) long, each scaled block is (nativeres) long
* Each scaled block overlaps 1 or 2 native blocks: starting with native block 'offsets[]' with
* 'weights[]' of its (res) width in the first, and the rest in the second
*/
for(int v = 0, idx = 0; v < res*nativeres; v += nativeres, idx++) {
offsets[idx] = (v/res); /* Get index of the first native block we draw from */
if((v+nativeres-1)/res == offsets[idx]) { /* If scaled block ends in same native block */
weights[idx] = nativeres;
}
else { /* Else, see how much is in first one */
weights[idx] = (offsets[idx]*res + res) - v;
}
}
/* Now, use weights and indices to fill in scaled map */
for(int y = 0, off = 0; y < res; y++) {
int ind_y = offsets[y];
int wgt_y = weights[y];
for(int x = 0; x < res; x++, off++) {
int ind_x = offsets[x];
int wgt_x = weights[x];
int accum_red = 0;
int accum_green = 0;
int accum_blue = 0;
int accum_alpha = 0;
for(int xx = 0; xx < 2; xx++) {
int wx = (xx==0)?wgt_x:(nativeres-wgt_x);
if(wx == 0) continue;
for(int yy = 0; yy < 2; yy++) {
int wy = (yy==0)?wgt_y:(nativeres-wgt_y);
if(wy == 0) continue;
/* Accumulate */
c.setARGB(src_argb[(ind_y+yy)*nativeres + ind_x + xx]);
accum_red += c.getRed() * wx * wy;
accum_green += c.getGreen() * wx * wy;
accum_blue += c.getBlue() * wx * wy;
accum_alpha += c.getAlpha() * wx * wy;
}
}
/* Generate weighted compnents into color */
c.setRGBA(accum_red / (nativeres*nativeres), accum_green / (nativeres*nativeres),
accum_blue / (nativeres*nativeres), accum_alpha / (nativeres*nativeres));
dest_argb[(y*res) + x] = c.getARGB();
}
}
}
else { /* nativeres > res */
int weights[] = new int[nativeres];
int offsets[] = new int[nativeres];
/* LCM of resolutions is used as length of line (res * nativeres)
* Each native block is (res) long, each scaled block is (nativeres) long
* Each native block overlaps 1 or 2 scaled blocks: starting with scaled block 'offsets[]' with
* 'weights[]' of its (res) width in the first, and the rest in the second
*/
for(int v = 0, idx = 0; v < res*nativeres; v += res, idx++) {
offsets[idx] = (v/nativeres); /* Get index of the first scaled block we draw to */
if((v+res-1)/nativeres == offsets[idx]) { /* If native block ends in same scaled block */
weights[idx] = res;
}
else { /* Else, see how much is in first one */
weights[idx] = (offsets[idx]*nativeres + nativeres) - v;
}
}
int accum_red[] = new int[res*res];
int accum_green[] = new int[res*res];
int accum_blue[] = new int[res*res];
int accum_alpha[] = new int[res*res];
/* Now, use weights and indices to fill in scaled map */
for(int y = 0; y < nativeres; y++) {
int ind_y = offsets[y];
int wgt_y = weights[y];
for(int x = 0; x < nativeres; x++) {
int ind_x = offsets[x];
int wgt_x = weights[x];
c.setARGB(src_argb[(y*nativeres) + x]);
for(int xx = 0; xx < 2; xx++) {
int wx = (xx==0)?wgt_x:(res-wgt_x);
if(wx == 0) continue;
for(int yy = 0; yy < 2; yy++) {
int wy = (yy==0)?wgt_y:(res-wgt_y);
if(wy == 0) continue;
accum_red[(ind_y+yy)*res + (ind_x+xx)] += c.getRed() * wx * wy;
accum_green[(ind_y+yy)*res + (ind_x+xx)] += c.getGreen() * wx * wy;
accum_blue[(ind_y+yy)*res + (ind_x+xx)] += c.getBlue() * wx * wy;
accum_alpha[(ind_y+yy)*res + (ind_x+xx)] += c.getAlpha() * wx * wy;
}
}
}
}
/* Produce normalized scaled values */
for(int y = 0; y < res; y++) {
for(int x = 0; x < res; x++) {
int off = (y*res) + x;
c.setRGBA(accum_red[off]/(nativeres*nativeres), accum_green[off]/(nativeres*nativeres),
accum_blue[off]/(nativeres*nativeres), accum_alpha[off]/(nativeres*nativeres));
dest_argb[y*res + x] = c.getARGB();
}
}
}
}
public void saveTerrainPNG(File f) throws IOException {
int[] outbuf = new int[256*native_scale*native_scale];
for(int i = 0; i < 256; i++) {
for(int y = 0; y < native_scale; y++) {
System.arraycopy(terrain_argb[i],native_scale*y,outbuf,((i>>4)*native_scale+y)*terrain_width + (i & 0xF)*native_scale, native_scale);
}
}
BufferedImage img = DynmapBufferedImage.createBufferedImage(outbuf, terrain_width, terrain_height);
ImageIO.write(img, "png", f);
}
/**
* Load texture pack mappings
*/
public static void loadTextureMapping(File datadir) {
/* Start clean with texture packs - need to be loaded after mapping */
packs.clear();
/* Initialize map with blank map for all entries */
HDTextureMap.initializeTable();
/* Load block models */
InputStream in = TexturePack.class.getResourceAsStream("/texture.txt");
if(in != null) {
loadTextureFile(in, "texture.txt");
if(in != null) { try { in.close(); } catch (IOException x) {} in = null; }
}
else
Log.severe("Error loading texture.txt");
File custom = new File(datadir, "renderdata/custom-texture.txt");
if(custom.canRead()) {
try {
in = new FileInputStream(custom);
loadTextureFile(in, custom.getPath());
} catch (IOException iox) {
Log.severe("Error loading renderdata/custom-texture.txt - " + iox);
} finally {
if(in != null) { try { in.close(); } catch (IOException x) {} in = null; }
}
}
else {
try {
FileWriter fw = new FileWriter(custom);
fw.write("# The user is free to add new and custom texture mappings here - Dynmap's install will not overwrite it\n");
fw.close();
} catch (IOException iox) {
}
}
}
/**
* Load texture pack mappings from texture.txt file
*/
private static void loadTextureFile(InputStream txtfile, String txtname) {
LineNumberReader rdr = null;
int cnt = 0;
try {
String line;
rdr = new LineNumberReader(new InputStreamReader(txtfile));
while((line = rdr.readLine()) != null) {
if(line.startsWith("block:")) {
ArrayList<Integer> blkids = new ArrayList<Integer>();
int databits = -1;
int faces[] = new int[] { -1, -1, -1, -1, -1, -1 };
line = line.substring(6);
BlockTransparency trans = BlockTransparency.OPAQUE;
String[] args = line.split(",");
for(String a : args) {
String[] av = a.split("=");
if(av.length < 2) continue;
if(av[0].equals("id")) {
blkids.add(Integer.parseInt(av[1]));
}
else if(av[0].equals("data")) {
if(databits < 0) databits = 0;
if(av[1].equals("*"))
databits = 0xFFFF;
else
databits |= (1 << Integer.parseInt(av[1]));
}
else if(av[0].equals("top") || av[0].equals("y-")) {
faces[BlockStep.Y_MINUS.ordinal()] = Integer.parseInt(av[1]);
}
else if(av[0].equals("bottom") || av[0].equals("y+")) {
faces[BlockStep.Y_PLUS.ordinal()] = Integer.parseInt(av[1]);
}
else if(av[0].equals("north") || av[0].equals("x+")) {
faces[BlockStep.X_PLUS.ordinal()] = Integer.parseInt(av[1]);
}
else if(av[0].equals("south") || av[0].equals("x-")) {
faces[BlockStep.X_MINUS.ordinal()] = Integer.parseInt(av[1]);
}
else if(av[0].equals("west") || av[0].equals("z-")) {
faces[BlockStep.Z_MINUS.ordinal()] = Integer.parseInt(av[1]);
}
else if(av[0].equals("east") || av[0].equals("z+")) {
faces[BlockStep.Z_PLUS.ordinal()] = Integer.parseInt(av[1]);
}
else if(av[0].equals("allfaces")) {
int id = Integer.parseInt(av[1]);
for(int i = 0; i < 6; i++) {
faces[i] = id;
}
}
else if(av[0].equals("allsides")) {
short id = Short.parseShort(av[1]);
faces[BlockStep.X_PLUS.ordinal()] = id;
faces[BlockStep.X_MINUS.ordinal()] = id;
faces[BlockStep.Z_PLUS.ordinal()] = id;
faces[BlockStep.Z_MINUS.ordinal()] = id;
}
else if(av[0].equals("topbottom")) {
faces[BlockStep.Y_MINUS.ordinal()] =
faces[BlockStep.Y_PLUS.ordinal()] = Integer.parseInt(av[1]);
}
else if(av[0].equals("transparency")) {
trans = BlockTransparency.valueOf(av[1]);
if(trans == null) {
trans = BlockTransparency.OPAQUE;
Log.severe("Texture mapping has invalid transparency setting - " + av[1] + " - line " + rdr.getLineNumber() + " of " + txtname);
}
}
}
/* If no data bits, assume all */
if(databits < 0) databits = 0xFFFF;
/* If we have everything, build block */
if(blkids.size() > 0) {
HDTextureMap map = new HDTextureMap(blkids, databits, faces, trans);
map.addToTable();
cnt++;
}
else {
Log.severe("Texture mapping missing required parameters = line " + rdr.getLineNumber() + " of " + txtname);
}
}
else if(line.startsWith("#") || line.startsWith(";")) {
}
}
Log.verboseinfo("Loaded " + cnt + " texture mappings from " + txtname);
} catch (IOException iox) {
Log.severe("Error reading " + txtname + " - " + iox.toString());
} catch (NumberFormatException nfx) {
Log.severe("Format error - line " + rdr.getLineNumber() + " of " + txtname);
} finally {
if(rdr != null) {
try {
rdr.close();
rdr = null;
} catch (IOException e) {
}
}
}
}
/**
* Read color for given subblock coordinate, with given block id and data and face
*/
public final void readColor(final HDPerspectiveState ps, final MapIterator mapiter, final Color rslt, final int blkid, final int lastblocktype, final boolean biome_shaded) {
int blkdata = ps.getBlockData();
HDTextureMap map = HDTextureMap.getMap(blkid, blkdata);
BlockStep laststep = ps.getLastBlockStep();
int textid = map.faces[laststep.ordinal()]; /* Get index of texture source */
if(textid < 0) {
rslt.setTransparent();
return;
}
else if(textid < 1000) { /* If simple mapping */
int[] texture = terrain_argb[textid];
int[] xyz = ps.getSubblockCoord();
/* Get texture coordinates (U=horizontal(left=0),V=vertical(top=0)) */
int u = 0, v = 0;
switch(laststep) {
case X_MINUS: /* South face: U = East (Z-), V = Down (Y-) */
u = native_scale-xyz[2]-1; v = native_scale-xyz[1]-1;
break;
case X_PLUS: /* North face: U = West (Z+), V = Down (Y-) */
u = xyz[2]; v = native_scale-xyz[1]-1;
break;
case Z_MINUS: /* West face: U = South (X+), V = Down (Y-) */
u = xyz[0]; v = native_scale-xyz[1]-1;
break;
case Z_PLUS: /* East face: U = North (X-), V = Down (Y-) */
u = native_scale-xyz[0]-1; v = native_scale-xyz[1]-1;
break;
case Y_MINUS: /* U = East(Z-), V = South(X+) */
case Y_PLUS:
u = native_scale-xyz[2]-1; v = xyz[0];
break;
}
/* Read color from texture */
rslt.setARGB(texture[v*native_scale + u]);
return;
}
/* See if not basic block texture */
int textop = textid / 1000;
textid = textid % 1000;
/* If clear-inside op, get out early */
if(textop == COLORMOD_CLEARINSIDE) {
/* Check if previous block is same block type as we are: surface is transparent if it is */
if(blkid == lastblocktype) {
rslt.setTransparent();
return;
}
}
int[] texture = terrain_argb[textid];
int[] xyz = ps.getSubblockCoord();
/* Get texture coordinates (U=horizontal(left=0),V=vertical(top=0)) */
int u = 0, v = 0, tmp;
switch(laststep) {
case X_MINUS: /* South face: U = East (Z-), V = Down (Y-) */
u = native_scale-xyz[2]-1; v = native_scale-xyz[1]-1;
break;
case X_PLUS: /* North face: U = West (Z+), V = Down (Y-) */
u = xyz[2]; v = native_scale-xyz[1]-1;
break;
case Z_MINUS: /* West face: U = South (X+), V = Down (Y-) */
u = xyz[0]; v = native_scale-xyz[1]-1;
break;
case Z_PLUS: /* East face: U = North (X-), V = Down (Y-) */
u = native_scale-xyz[0]-1; v = native_scale-xyz[1]-1;
break;
case Y_MINUS: /* U = East(Z-), V = South(X+) */
case Y_PLUS:
u = native_scale-xyz[2]-1; v = xyz[0];
break;
}
/* Handle U-V transorms before fetching color */
switch(textop) {
case COLORMOD_NONE:
case COLORMOD_GRASSTONED:
case COLORMOD_FOLIAGETONED:
case COLORMOD_WATERTONED:
break;
case COLORMOD_ROT90:
tmp = u; u = native_scale - v - 1; v = tmp;
break;
case COLORMOD_ROT180:
u = native_scale - u - 1; v = native_scale - v - 1;
break;
case COLORMOD_ROT270:
tmp = u; u = v; v = native_scale - tmp - 1;
break;
case COLORMOD_FLIPHORIZ:
u = native_scale - u - 1;
break;
case COLORMOD_SHIFTDOWNHALF:
if(v < native_scale/2) {
rslt.setTransparent();
return;
}
v -= native_scale/2;
break;
case COLORMOD_SHIFTDOWNHALFANDFLIPHORIZ:
if(v < native_scale/2) {
rslt.setTransparent();
return;
}
v -= native_scale/2;
u = native_scale - u - 1;
break;
case COLORMOD_INCLINEDTORCH:
if(v >= (3*native_scale/4)) {
rslt.setTransparent();
return;
}
v += native_scale/4;
if(u < native_scale/2) u = native_scale/2-1;
if(u > native_scale/2) u = native_scale/2;
break;
case COLORMOD_GRASSSIDE:
/* Check if snow above block */
if(mapiter.getBlockTypeIDAt(BlockStep.Y_PLUS) == 78) {
texture = terrain_argb[68]; /* Snow block */
textid = 68;
}
else { /* Else, check the grass color overlay */
int ovclr = terrain_argb[38][v*native_scale+u];
if((ovclr & 0xFF000000) != 0) { /* Hit? */
texture = terrain_argb[38]; /* Use it */
textop = COLORMOD_GRASSTONED; /* Force grass toning */
}
}
break;
case COLORMOD_CLEARINSIDE:
break;
}
/* Read color from texture */
rslt.setARGB(texture[v*native_scale + u]);
LoadedImage li;
/* Switch based on texture modifier */
switch(textop) {
case COLORMOD_GRASSTONED:
li = imgs[IMG_GRASSCOLOR];
if((li.argb == null) || (!biome_shaded)) {
rslt.blendColor(li.trivial_color);
}
else {
rslt.blendColor(biomeLookup(li.argb, li.width, mapiter.getRawBiomeRainfall(), mapiter.getRawBiomeTemperature()));
}
break;
case COLORMOD_FOLIAGETONED:
li = imgs[IMG_FOLIAGECOLOR];
if((li.argb == null) || (!biome_shaded)) {
rslt.blendColor(li.trivial_color);
}
else {
rslt.blendColor(biomeLookup(li.argb, li.width, mapiter.getRawBiomeRainfall(), mapiter.getRawBiomeTemperature()));
}
break;
}
}
private static final int biomeLookup(int[] argb, int width, double rainfall, double temp) {
int w = width-1;
int t = (int)((1.0-temp)*w);
int h = (int)((1.0 - (temp*rainfall))*w);
if(h > w) h = w;
if(t > w) t = w;
return argb[width*h + t];
}
}
|
package org.icij.extract.cli;
import org.icij.extract.core.*;
import java.util.Locale;
import java.util.logging.Logger;
/**
* Extract
*
* @author Matthew Caruana Galizia <mcaruana@icij.org>
* @version 1.0.0-beta
* @since 1.0.0-beta
*/
public enum Command {
QUEUE(QueueCli.class),
WIPE_QUEUE(WipeQueueCli.class),
WIPE_REPORT(WipeReportCli.class),
DUMP_QUEUE(DumpQueueCli.class),
DUMP_REPORT(DumpReportCli.class),
LOAD_QUEUE(LoadQueueCli.class),
SPEW(SpewCli.class),
SOLR_COMMIT(SolrCommitCli.class),
SOLR_ROLLBACK(SolrRollbackCli.class),
SOLR_DELETE(SolrDeleteCli.class);
private final Class<?> klass;
private Command(Class<?> klass) {
this.klass = klass;
}
public String toString() {
return name().toLowerCase(Locale.ROOT).replace('_', '-');
}
public Cli createCli(Logger logger) {
try {
return (Cli) klass.getDeclaredConstructor(Logger.class).newInstance(logger);
} catch (Throwable e) {
throw new IllegalArgumentException("Unexpected exception while constructing CLI.", e);
}
}
public static Command parse(String command) throws IllegalArgumentException {
try {
return fromString(command);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(String.format("\"%s\" is not a valid command.", command));
}
}
public static Command fromString(String command) {
return valueOf(command.toUpperCase(Locale.ROOT).replace('-', '_'));
}
};
|
package org.jtrfp.trcl.core;
import java.util.Collection;
import java.util.Collections;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class Features {
private static FeaturesImpl impl = new FeaturesImpl();
private static Features singleton;
public Features(){
this(Collections.EMPTY_LIST);
}
@Autowired(required=true)
public Features(Collection<FeatureFactory> features){
for(FeatureFactory ff:features)
registerFeature(ff);
setSingleton(this);
}
public synchronized static Features getSingleton(){
if(singleton == null)
throw new IllegalStateException("Cannot call getSingleton() before Features constructor is called by Spring.");
return singleton;
}
public synchronized static void resetForTesting(){
impl = new FeaturesImpl();
setSingleton(new Features());
}
public synchronized static void registerFeature(FeatureFactory<?> factory){
impl.registerFeature(factory);
}
public synchronized static void init(Object obj){
impl.init(obj);
}//end init(...)
public synchronized static void destruct(Object obj){
impl.destruct(obj);
}//end destruct()
public synchronized static <T> T get(Object target, Class<T> featureClass){
return impl.get(target, featureClass);
}//end get(...)
public synchronized static <T> T getByPath(Object target, Class<T>lastClassInPath, Class<?> ... featurePathNotIncludingLastClass){
if(target == null)
target = getSingleton();
return impl.getByPath(target, lastClassInPath, featurePathNotIncludingLastClass);
}//end getByPath(...)
public synchronized static void getAllFeaturesOf(Object target, Set dest){
impl.getAllFeaturesOf(target,dest);
}
public static void setSingleton(Features singleton) {
Features.singleton = singleton;
}
}//end Features
|
package org.jtrfp.trcl.obj;
import java.beans.BeanInfo;
import java.beans.Introspector;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.beans.PropertyDescriptor;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.math3.exception.MathArithmeticException;
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
import org.jtrfp.jfdt.UnrecognizedFormatException;
import org.jtrfp.jtrfp.FileLoadException;
import org.jtrfp.trcl.beh.AdjustAltitudeToPlayerBehavior;
import org.jtrfp.trcl.beh.AutoFiring;
import org.jtrfp.trcl.beh.AutoLeveling;
import org.jtrfp.trcl.beh.AutoLeveling.LevelingAxis;
import org.jtrfp.trcl.beh.Behavior;
import org.jtrfp.trcl.beh.Bobbing;
import org.jtrfp.trcl.beh.BuzzByPlayerSFX;
import org.jtrfp.trcl.beh.CollidesWithPlayer;
import org.jtrfp.trcl.beh.CollidesWithTerrain;
import org.jtrfp.trcl.beh.CustomDeathBehavior;
import org.jtrfp.trcl.beh.CustomNAVTargetableBehavior;
import org.jtrfp.trcl.beh.CustomPlayerWithinRangeBehavior;
import org.jtrfp.trcl.beh.DamageTrigger;
import org.jtrfp.trcl.beh.DamageableBehavior;
import org.jtrfp.trcl.beh.DamageableBehavior.SupplyNotNeededException;
import org.jtrfp.trcl.beh.DamagedByCollisionWithDEFObject;
import org.jtrfp.trcl.beh.DamagedByCollisionWithPlayer;
import org.jtrfp.trcl.beh.DamagedByCollisionWithSurface;
import org.jtrfp.trcl.beh.DeathBehavior;
import org.jtrfp.trcl.beh.DebrisOnDeathBehavior;
import org.jtrfp.trcl.beh.ExplodesOnDeath;
import org.jtrfp.trcl.beh.FireOnFrame;
import org.jtrfp.trcl.beh.HorizAimAtPlayerBehavior;
import org.jtrfp.trcl.beh.LeavesPowerupOnDeathBehavior;
import org.jtrfp.trcl.beh.LoopingPositionBehavior;
import org.jtrfp.trcl.beh.PositionLimit;
import org.jtrfp.trcl.beh.ProjectileFiringBehavior;
import org.jtrfp.trcl.beh.RandomSFXPlayback;
import org.jtrfp.trcl.beh.ResetsRandomlyAfterDeath;
import org.jtrfp.trcl.beh.SmartPlaneBehavior;
import org.jtrfp.trcl.beh.SpawnsRandomSmoke;
import org.jtrfp.trcl.beh.SpinAccellerationBehavior;
import org.jtrfp.trcl.beh.SpinAccellerationBehavior.SpinMode;
import org.jtrfp.trcl.beh.SteadilyRotating;
import org.jtrfp.trcl.beh.TerrainLocked;
import org.jtrfp.trcl.beh.TunnelRailed;
import org.jtrfp.trcl.beh.phy.AccelleratedByPropulsion;
import org.jtrfp.trcl.beh.phy.HasPropulsion;
import org.jtrfp.trcl.beh.phy.MovesByVelocity;
import org.jtrfp.trcl.beh.phy.PulledDownByGravityBehavior;
import org.jtrfp.trcl.beh.phy.RotationalDragBehavior;
import org.jtrfp.trcl.beh.phy.RotationalMomentumBehavior;
import org.jtrfp.trcl.beh.phy.VelocityDragBehavior;
import org.jtrfp.trcl.core.Features;
import org.jtrfp.trcl.core.ResourceManager;
import org.jtrfp.trcl.core.TRFactory;
import org.jtrfp.trcl.core.TRFactory.TR;
import org.jtrfp.trcl.ext.tr.GPUFactory.GPUFeature;
import org.jtrfp.trcl.ext.tr.SoundSystemFactory.SoundSystemFeature;
import org.jtrfp.trcl.file.BINFile.AnimationControl;
import org.jtrfp.trcl.file.DEFFile.EnemyDefinition;
import org.jtrfp.trcl.file.DEFFile.EnemyDefinition.EnemyLogic;
import org.jtrfp.trcl.file.DEFFile.EnemyPlacement;
import org.jtrfp.trcl.game.TVF3Game;
import org.jtrfp.trcl.game.TVF3Game.Difficulty;
import org.jtrfp.trcl.gpu.BINFileExtractor;
import org.jtrfp.trcl.gpu.BasicModelSource;
import org.jtrfp.trcl.gpu.BufferedModelTarget;
import org.jtrfp.trcl.gpu.InterpolatedAnimatedModelSource;
import org.jtrfp.trcl.gpu.Model;
import org.jtrfp.trcl.gpu.RotatedModelSource;
import org.jtrfp.trcl.miss.Mission;
import org.jtrfp.trcl.obj.Explosion.ExplosionType;
import org.jtrfp.trcl.shell.GameShellFactory.GameShell;
import org.jtrfp.trcl.snd.SoundEvent;
import org.jtrfp.trcl.snd.SoundSystem;
import org.jtrfp.trcl.snd.SoundTexture;
public class DEFObject extends WorldObject {
//PROPERTIES
public static final String ENEMY_DEFINITION = "enemyDefinition",
POSITION = "position",
HEADING = "heading",
TOP = "top";
private Double boundingHeight, boundingWidth;
private HitBox [] hitBoxes;
//private WorldObject ruinObject;
private ArrayList<WorldObject> subObjects = null;
private final EnemyLogic logic;
private final EnemyDefinition enemyDefinition;
private boolean mobile,canTurn,foliage,boss,
shieldGen,isRuin,spinCrash,ignoringProjectiles;
private Anchoring anchoring;
private RotatedModelSource rotatedModelSource;
public static final String [] BIG_EXP_SOUNDS = new String[]{"EXP3.WAV","EXP4.WAV","EXP5.WAV"};
public static final String [] MED_EXP_SOUNDS = new String[]{"EXP1.WAV","EXP2.WAV"};
private final ArrayList<Object> hardReferences = new ArrayList<Object>();
private GameShell gameShell;
////INTROSPECTOR
static {
try{
final Set<String> persistentProperties = new HashSet<String>();
persistentProperties.addAll(Arrays.asList(
ENEMY_DEFINITION,
POSITION,
HEADING,
TOP
));
BeanInfo info = Introspector.getBeanInfo(DEFObject.class);
PropertyDescriptor[] propertyDescriptors =
info.getPropertyDescriptors();
for (int i = 0; i < propertyDescriptors.length; ++i) {
PropertyDescriptor pd = propertyDescriptors[i];
if (!persistentProperties.contains(pd.getName())) {
pd.setValue("transient", Boolean.TRUE);
}
}
}catch(Exception e){e.printStackTrace();}
}//end static{}
public DEFObject(EnemyDefinition def, EnemyPlacement pl) throws FileLoadException, IllegalAccessException, IOException{
super();
this.enemyDefinition=def;
final TR tr = getTr();
if(def==null){
logic = null;
return;
}
anchoring=Anchoring.floating;
logic =def.getLogic();
mobile =true;
canTurn=true;
foliage=false;
boss =def.isObjectIsBoss();
final int numHitBoxes = def.getNumNewHBoxes();
final int [] rawHBoxData = def.getHboxVertices();
if(numHitBoxes!=0){
final HitBox [] boxes = new HitBox[numHitBoxes];
for(int i=0; i<numHitBoxes; i++){
final HitBox hb = new HitBox();
hb.setVertexID(rawHBoxData[i*2]);
hb.setSize (rawHBoxData[i*2+1] / TRFactory.crossPlatformScalar);
boxes[i]=hb;
}//end for(boxes)
setHitBoxes(boxes);
}//end if(hitboxes)
//Default Direction
setDirection(new ObjectDirection(pl.getRoll(),pl.getPitch(),pl.getYaw()+65536));
boolean customExplosion=false;
this.setModelOffset(
TRFactory.legacy2Modern(def.getPivotX()),
TRFactory.legacy2Modern(def.getPivotY()),
TRFactory.legacy2Modern(def.getPivotZ()));
if(logic == null)
return;
switch(logic){
case groundDumb:
mobile=false;
canTurn=false;
anchoring=Anchoring.terrain;
defaultModelAssignment();
break;
case groundTargeting://Ground turrets
{mobile=false;
canTurn=true;
addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));
ProjectileFiringBehavior pfb;
Integer [] firingVertices = Arrays.copyOf(def.getFiringVertices(),def.getNumRandomFiringVertices());
addBehavior(pfb=new ProjectileFiringBehavior().
setProjectileFactory(tr.getResourceManager().
getProjectileFactories()[def.getWeapon().ordinal()]).
setFiringPositions(getModelSource(),firingVertices));
try{pfb.addSupply(9999999);}catch(SupplyNotNeededException e){}
addBehavior(new AutoFiring().
setProjectileFiringBehavior(pfb).
setPatternOffsetMillis((int)(Math.random()*2000)).
setMaxFiringDistance(TRFactory.mapSquareSize*3).
setSmartFiring(false).
setMaxFireVectorDeviation(.7).
setTimePerPatternEntry((int)(getFiringRateScalar()*(250*getFiringRateScalar()))));
anchoring=Anchoring.terrain;
defaultModelAssignment();
break;}
case flyingDumb:
canTurn=false;
defaultModelAssignment();
break;
case groundTargetingDumb:
addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));
anchoring=Anchoring.terrain;
defaultModelAssignment();
break;
case flyingSmart:
smartPlaneBehavior(tr,def,false);
defaultModelAssignment();
break;
case bankSpinDrill:
addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));
unhandled(def);
defaultModelAssignment();
break;
case sphereBoss:
projectileFiringBehavior();
mobile=true;
defaultModelAssignment();
break;
case flyingAttackRetreatSmart:
smartPlaneBehavior(tr,def,false);
//addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));
defaultModelAssignment();
break;
case splitShipSmart://TODO
smartPlaneBehavior(tr,def,false);
//addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));
defaultModelAssignment();
break;
case groundStaticRuin://Destroyed object is replaced with another using SimpleModel i.e. weapons bunker
mobile=false;
canTurn=false;
anchoring=Anchoring.terrain;
defaultModelAssignment();
defaultRuinObject(pl);
break;
case targetHeadingSmart:
mobile=false;//Belazure's crane bots
addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));
projectileFiringBehavior();
anchoring=Anchoring.terrain;
defaultModelAssignment();
break;
case targetPitchSmart:
mobile=false;
addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));
projectileFiringBehavior();
anchoring=Anchoring.terrain;
defaultModelAssignment();
break;
case coreBossSmart:
mobile=false;
addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));
projectileFiringBehavior();
defaultModelAssignment();
defaultBossNAVTargetingResponse();
break;
case cityBossSmart:
mobile=false;
projectileFiringBehavior();
defaultModelAssignment();
defaultBossNAVTargetingResponse();
addBehavior(new SteadilyRotating().setRotationPeriodMillis(1000));
break;
case staticFiringSmart:{
if(boss)
addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));//ATMOS Boss uses this!
final ProjectileFiringBehavior pfb = new ProjectileFiringBehavior();
try{pfb.addSupply(99999999);}catch(SupplyNotNeededException e){}
Integer [] firingVertices = Arrays.copyOf(def.getFiringVertices(),def.getNumRandomFiringVertices());
pfb.
setProjectileFactory(tr.getResourceManager().getProjectileFactories()[def.getWeapon().ordinal()]).
setFiringPositions(getModelSource(),firingVertices);
addBehavior(pfb);
addBehavior(new AutoFiring().
setProjectileFiringBehavior(pfb).
setPatternOffsetMillis((int)(Math.random()*2000)).
setMaxFiringDistance(TRFactory.mapSquareSize*8).
setSmartFiring(true));
if(def.isObjectIsBoss())
defaultBossNAVTargetingResponse();
mobile=false;
canTurn=true;
defaultModelAssignment();
break;}
case sittingDuck:
canTurn=false;
mobile=false;
defaultModelAssignment();
break;
case tunnelAttack:{
final ProjectileFiringBehavior pfb = new ProjectileFiringBehavior();
try{pfb.addSupply(99999999);}catch(SupplyNotNeededException e){}
Integer [] firingVertices = Arrays.copyOf(def.getFiringVertices(),def.getNumRandomFiringVertices());
pfb.setProjectileFactory(tr.getResourceManager().getProjectileFactories()[def.getWeapon().ordinal()]).
setFiringPositions(getModelSource(),firingVertices);
addBehavior(pfb);
//addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));
addBehavior(new AutoFiring().
setProjectileFiringBehavior(pfb).
setPatternOffsetMillis((int)(Math.random()*2000)).
setMaxFiringDistance(TRFactory.mapSquareSize*.2).
setSmartFiring(false).
setMaxFireVectorDeviation(.3).
setTimePerPatternEntry((int)(500*getFiringRateScalar())));
/*addBehavior(new Bobbing().
setPhase(Math.random()).
setBobPeriodMillis(10*1000+Math.random()*3000).setAmplitude(2000).
setAdditionalHeight(0));*/ //Conflicts with TunnelRailed
mobile=false;
defaultModelAssignment();
break;}
case takeoffAndEscape:
addBehavior(new MovesByVelocity());
addBehavior((Behavior)(new HasPropulsion().setMinPropulsion(0).setPropulsion(def.getThrustSpeed()/1.2)));
addBehavior(new AccelleratedByPropulsion().setEnable(false));
addBehavior(new VelocityDragBehavior().setDragCoefficient(.86));
addBehavior(new CustomPlayerWithinRangeBehavior(){
@Override
public void withinRange(){
DEFObject.this.
probeForBehavior(AccelleratedByPropulsion.class).
setThrustVector(Vector3D.PLUS_J).
setEnable(true);
}
}).setRange(TRFactory.mapSquareSize*10);
addBehavior(new LoopingPositionBehavior());
addBehavior(new ExplodesOnDeath(ExplosionType.Blast,BIG_EXP_SOUNDS[(int)(Math.random()*3)]));
customExplosion=true;
canTurn=false;
mobile=false;
defaultModelAssignment();
break;
case fallingAsteroid:
anchoring=Anchoring.floating;
fallingObjectBehavior();
customExplosion=true;
addBehavior(new ExplodesOnDeath(ExplosionType.BigExplosion,MED_EXP_SOUNDS[(int)(Math.random()*2)]));
//setVisible(false);
//addBehavior(new FallingDebrisBehavior(tr,model));
defaultModelAssignment();
break;
case cNome://Walky bot?
anchoring=Anchoring.terrain;
defaultModelAssignment();
break;
case cNomeLegs://Walky bot?
anchoring=Anchoring.terrain;
defaultModelAssignment();
break;
case cNomeFactory:
mobile=false;
defaultModelAssignment();
break;
case geigerBoss:
addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));
projectileFiringBehavior();
anchoring=Anchoring.terrain;
mobile=false;
defaultModelAssignment();
defaultBossNAVTargetingResponse();
break;
case volcanoBoss:
addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));
projectileFiringBehavior();
anchoring=Anchoring.terrain;
mobile=false;
defaultModelAssignment();
defaultBossNAVTargetingResponse();
break;
case volcano://Wat.
unhandled(def);
canTurn=false;
mobile=false;
anchoring=Anchoring.terrain;
defaultModelAssignment();
defaultBossNAVTargetingResponse();
break;
case missile://Silo?
mobile=false;//TODO
anchoring=Anchoring.terrain;
defaultModelAssignment();
break;
case bob:
addBehavior(new Bobbing().setAdditionalHeight(TRFactory.mapSquareSize*1));
addBehavior(new SteadilyRotating());
addBehavior(new ExplodesOnDeath(ExplosionType.Blast,MED_EXP_SOUNDS[(int)(Math.random()*2)]));
possibleBobbingSpinAndCrashOnDeath(.5,def);
customExplosion=true;
anchoring=Anchoring.floating;
mobile=false;
canTurn=false;//ironic?
defaultModelAssignment();
break;
case alienBoss:
mobile=false;
alienModelAssignment();
alienBoss(pl);
bossWarningSiren();
break;
case canyonBoss1:
addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));
projectileFiringBehavior();
mobile=false;
defaultModelAssignment();
defaultBossNAVTargetingResponse();
break;
case canyonBoss2:
addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));
projectileFiringBehavior();
mobile=false;
defaultModelAssignment();
defaultBossNAVTargetingResponse();
break;
case lavaMan://Also terraform-o-bot
addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));
projectileFiringBehavior();
mobile=false;
defaultModelAssignment();
defaultBossNAVTargetingResponse();
break;
case arcticBoss:
//ARTIC / Ymir. Hangs from ceiling.
addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));
projectileFiringBehavior();
mobile=false;
anchoring=Anchoring.ceiling;
defaultModelAssignment();
defaultBossNAVTargetingResponse();
break;
case helicopter:
defaultModelAssignment();
break;
case tree:
canTurn=false;
mobile=false;
foliage=true;
anchoring=Anchoring.terrain;
defaultModelAssignment();
break;
case ceilingStatic:
canTurn=false;
mobile=false;
setTop(Vector3D.MINUS_J);
anchoring=Anchoring.ceiling;
defaultModelAssignment();
break;
case bobAndAttack:{
addBehavior(new SteadilyRotating().setRotationPhase(2*Math.PI*Math.random()));
final ProjectileFiringBehavior pfb = new ProjectileFiringBehavior();
try{pfb.addSupply(99999999);}catch(SupplyNotNeededException e){}
pfb.setProjectileFactory(tr.getResourceManager().getProjectileFactories()[def.getWeapon().ordinal()]);
Integer [] firingVertices = Arrays.copyOf(def.getFiringVertices(),def.getNumRandomFiringVertices());
pfb.setFiringPositions(getModelSource(),firingVertices);
addBehavior(pfb);//Bob and attack don't have the advantage of movement, so give them the advantage of range.
addBehavior(new AutoFiring().
setProjectileFiringBehavior(pfb).
setPatternOffsetMillis((int)(Math.random()*2000)).
setMaxFiringDistance(TRFactory.mapSquareSize*17).
setSmartFiring(true));
addBehavior(new Bobbing().
setPhase(Math.random()).
setBobPeriodMillis(10*1000+Math.random()*3000));
addBehavior(new ExplodesOnDeath(ExplosionType.Blast,BIG_EXP_SOUNDS[(int)(Math.random()*3)]));
possibleBobbingSpinAndCrashOnDeath(.5,def);
if(isBoss())
defaultBossNAVTargetingResponse();
customExplosion=true;
mobile=false;
canTurn=false;
anchoring=Anchoring.floating;
defaultModelAssignment();
break;}
case forwardDrive:
canTurn=false;
anchoring=Anchoring.terrain;
defaultModelAssignment();
break;
case fallingStalag:
fallingObjectBehavior();
customExplosion=true;
addBehavior(new ExplodesOnDeath(ExplosionType.BigExplosion,MED_EXP_SOUNDS[(int)(Math.random()*2)]));
//canTurn=false;
//mobile=false;
anchoring=Anchoring.floating;
defaultModelAssignment();
break;
case attackRetreatBelowSky:
smartPlaneBehavior(tr,def,false);
anchoring=Anchoring.floating;
defaultModelAssignment();
break;
case attackRetreatAboveSky:
smartPlaneBehavior(tr,def,true);
anchoring=Anchoring.floating;
defaultModelAssignment();
break;
case bobAboveSky:
addBehavior(new Bobbing().setAdditionalHeight(TRFactory.mapSquareSize*5));
addBehavior(new SteadilyRotating());
possibleBobbingSpinAndCrashOnDeath(.5,def);
mobile=false;
canTurn=false;
anchoring=Anchoring.floating;
defaultModelAssignment();
break;
case factory:
canTurn=false;
mobile=false;
anchoring=Anchoring.floating;
defaultModelAssignment();
break;
case shootOnThrustFrame:
defaultModelAssignment();
projectileFiringBehavior();
addBehavior(new FireOnFrame().
setController(getModel().getController()).
setFrame(def.getThrustSpeed()).
setNumShots(5).
setTimeBetweenShotsMillis(200));
addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()));
defaultBossNAVTargetingResponse();
break;
}//end switch(logic)
//Position Limit
{final PositionLimit posLimit = new PositionLimit();
posLimit.getPositionMaxima()[1]=tr.getWorld().sizeY;
posLimit.getPositionMinima()[1]=-tr.getWorld().sizeY;
addBehavior(posLimit);}
if(anchoring==Anchoring.terrain){
addBehavior(new CustomDeathBehavior(new Runnable(){
@Override
public void run(){
getGameShell().getGame().getCurrentMission().notifyGroundTargetDestroyed();
}
}));
addBehavior(new TerrainLocked());
}
else if(anchoring==Anchoring.ceiling){
addBehavior(new TerrainLocked().setLockedToCeiling(true));
}
else addBehavior(new CustomDeathBehavior(new Runnable(){
@Override
public void run(){
getGameShell().getGame().getCurrentMission().notifyAirTargetDestroyed();
}//end run()
}));
//Misc
addBehavior(new TunnelRailed(tr));//Centers in tunnel when appropriate
addBehavior(new DeathBehavior());
final int newHealth = (int)(getShieldScalar()*(pl.getStrength()+(spinCrash?16:0)));
addBehavior(new DamageableBehavior().
setHealth(newHealth).
setMaxHealth(newHealth).
setEnable(!boss));
setActive(!boss);
addBehavior(new DamagedByCollisionWithDEFObject());
if(!foliage)addBehavior(new DebrisOnDeathBehavior());
else{addBehavior(new CustomDeathBehavior(new Runnable(){
@Override
public void run(){
getGameShell().getGame().getCurrentMission().notifyFoliageDestroyed();
}}));}
if(canTurn||boss){
addBehavior(new RotationalMomentumBehavior());
addBehavior(new RotationalDragBehavior()).setDragCoefficient(.86);
addBehavior(new AutoLeveling());
}
if(foliage){
addBehavior(new ExplodesOnDeath(ExplosionType.Billow));
}else if((anchoring == Anchoring.terrain) && !customExplosion){
addBehavior(new ExplodesOnDeath(ExplosionType.BigExplosion,BIG_EXP_SOUNDS[(int)(Math.random()*3)]));
}else if(!customExplosion){
addBehavior(new ExplodesOnDeath(ExplosionType.Blast,MED_EXP_SOUNDS[(int)(Math.random()*2)]));
}
if(mobile){
addBehavior(new MovesByVelocity());
addBehavior(new HasPropulsion());
addBehavior(new AccelleratedByPropulsion());
addBehavior(new VelocityDragBehavior());
if(anchoring==Anchoring.terrain){}
else {//addBehavior(new BouncesOffSurfaces().setReflectHeading(false));
addBehavior(new CollidesWithTerrain().setAutoNudge(true).setNudgePadding(40000));
}
probeForBehavior(VelocityDragBehavior.class).setDragCoefficient(.86);
probeForBehavior(Propelled.class).setMinPropulsion(0);
probeForBehavior(Propelled.class).setPropulsion(getDEFSpeedScalar()*def.getThrustSpeed()/1.2);
addBehavior(new LoopingPositionBehavior());
}//end if(mobile)
if(def.getPowerup()!=null && Math.random()*100. < def.getPowerupProbability()){
addBehavior(new LeavesPowerupOnDeathBehavior(def.getPowerup()));}
addBehavior(new CollidesWithPlayer());
addBehavior(new DamagedByCollisionWithPlayer(8024,250));
proposeRandomYell();
}//end DEFObject
/*
@Override
public void destroy(){
if(ruinObject!=null){
//Give the ruinObject is own position because it is sharing positions with the original WorldObject,
//which is going to be sent to xyz=Double.INFINITY soon.
ruinObject.setPosition(Arrays.copyOf(getPosition(), 3));
ruinObject.setVisible(true);
ruinObject.setActive(true);}
super.destroy();
}*/
private void defaultModelAssignment() throws IllegalAccessException, FileLoadException, IOException{
setModel(getTr().getResourceManager().getBINModel(
enemyDefinition.getComplexModelFile(),
getTr().getGlobalPaletteVL(), null, null));
}
private void alienModelAssignment() throws FileLoadException, IOException, IllegalAccessException{
setModel(getTr().getResourceManager().getBINModel(
enemyDefinition.getSimpleModel(),
getTr().getGlobalPaletteVL(), null, null));
}
private void defaultRuinObject(EnemyPlacement pl) throws IOException, IllegalArgumentException, IllegalAccessException, FileLoadException{
//Spawn a second, powerup-free model using the simplemodel
//Model simpleModel=null;
//try{simpleModel = tr.getResourceManager().getBINModel(def.getSimpleModel(),tr.getGlobalPaletteVL(),null,tr.gpu.get().getGl());}
//catch(Exception e){e.printStackTrace();}
EnemyDefinition ed = new EnemyDefinition();
ed.setLogic(EnemyLogic.groundDumb);
ed.setDescription("auto-generated enemy rubble def");
ed.setPowerupProbability(0);
ed.setComplexModelFile(enemyDefinition.getSimpleModel());
EnemyPlacement simplePlacement = pl.clone();
// if(ed.getComplexModelFile()!=null){
final DEFObject ruin = new DEFObject(ed,simplePlacement);
ruin.setActive(false);
ruin.setVisible(false);
ruin.setRuin(true);
getSubObjects().add(ruin);
ruin.setPosition(new double[]{-999999999999999999.,-9999999999999999999.,-999999999999999999.});//Relevant nowhere
ruin.notifyPositionChange();
addBehavior(new CustomDeathBehavior(new Runnable(){
@Override
public void run() {
ruin.setPosition(Arrays.copyOf(getPosition(), 3));
ruin.notifyPositionChange();
ruin.setActive(true);
ruin.setVisible(true);
}}));
try{ruin.setDirection(new ObjectDirection(pl.getRoll(),pl.getPitch(),pl.getYaw()+65536));}
catch(MathArithmeticException e){e.printStackTrace();}
//}//end if(!null)
}//end setRuinObject(...)
private void proposeRandomYell(){
final String sfxFile = enemyDefinition.getBossYellSFXFile();
if(sfxFile != null && !sfxFile.toUpperCase().contentEquals("NULL")){
final SoundTexture soundTexture = getTr().getResourceManager().soundTextures.get(sfxFile);
final RandomSFXPlayback randomSFXPlayback = new RandomSFXPlayback()
.setSoundTexture(soundTexture)
.setDisableOnDeath(true)
.setVolumeScalar(SoundSystem.DEFAULT_SFX_VOLUME*1.5);
addBehavior(randomSFXPlayback);
}//end if(!NULL)
}//end proposeRandomYell()
private void projectileFiringBehavior(){
ProjectileFiringBehavior pfb;
Integer [] firingVertices = Arrays.copyOf(enemyDefinition.getFiringVertices(),enemyDefinition.getNumRandomFiringVertices());
addBehavior(pfb=new ProjectileFiringBehavior().
setProjectileFactory(getTr().getResourceManager().
getProjectileFactories()[enemyDefinition.getWeapon().ordinal()]).setFiringPositions(getModelSource(),firingVertices)
);
final String fireSfxFile = enemyDefinition.getBossFireSFXFile();
if(!fireSfxFile.toUpperCase().contentEquals("NULL"))
pfb.setFiringSFX(getTr().getResourceManager().soundTextures.get(fireSfxFile));
try{pfb.addSupply(99999999);}catch(SupplyNotNeededException e){}
final AutoFiring af;
addBehavior(af=new AutoFiring().
setProjectileFiringBehavior(pfb).
setPatternOffsetMillis((int)(Math.random()*2000)).
setMaxFiringDistance(TRFactory.mapSquareSize*5).
setSmartFiring(true).
setMaxFireVectorDeviation(2.).
setTimePerPatternEntry((int)(getFiringRateScalar()*(!boss?500:350))));
if(boss)af.setFiringPattern(new boolean []{true,true,true,true,false,false,true,false}).setAimRandomness(.07);
}
private void unhandled(EnemyDefinition def){
System.err.println("UNHANDLED DEF LOGIC: "+def.getLogic()+". MODEL="+def.getComplexModelFile()+" DESC="+def.getDescription());
}
private void fallingObjectBehavior(){
canTurn=false;
mobile=false;//Technically wrong but propulsion is unneeded.
//addBehavior(new PulledDownByGravityBehavior());
final MovesByVelocity mbv = new MovesByVelocity();
mbv.setVelocity(new Vector3D(3500,-100000,5000));
addBehavior(mbv);
//addBehavior(new VelocityDragBehavior().setDragCoefficient(.99)); // For some reason it falls like pine tar
addBehavior(new DamageableBehavior().setMaxHealth(10).setHealth(10));
addBehavior(new DeathBehavior());
addBehavior(new CollidesWithTerrain().setIgnoreCeiling(true));
addBehavior(new DamagedByCollisionWithSurface());
addBehavior(new RotationalMomentumBehavior()
.setEquatorialMomentum(.01).setLateralMomentum(.02).setPolarMomentum(.03));
{final DEFObject thisObject = this;
final TR thisTr = getTr();
addBehavior(new ResetsRandomlyAfterDeath()
.setMinWaitMillis(1000)
.setMaxWaitMillis(5000)
.setRunOnReset(new Runnable(){
@Override
public void run(){
final Vector3D centerPos = thisObject.probeForBehavior(DeathBehavior.class).getLocationOfLastDeath();
thisObject.probeForBehavior(MovesByVelocity.class).setVelocity(new Vector3D(7000,-200000,1000));
final double [] pos = thisObject.getPosition();
pos[0]=centerPos.getX()+Math.random()*TRFactory.mapSquareSize*3-TRFactory.mapSquareSize*1.5;
pos[1]=thisTr.getWorld().sizeY/2+thisTr.getWorld().sizeY*(Math.random())*.3;
pos[2]=centerPos.getZ()+Math.random()*TRFactory.mapSquareSize*3-TRFactory.mapSquareSize*1.5;
thisObject.notifyPositionChange();
}//end run()
}));}
}
private void possibleSpinAndCrashOnDeath(double probability, final EnemyDefinition def){
spinCrash=Math.random()<probability;
if(spinCrash){
final DamageTrigger spinAndCrash = new DamageTrigger(){
@Override
public void healthBelowThreshold(){// Spinout and crash
final WorldObject parent = getParent();
if(probeForBehavior(DamageableBehavior.class).getHealth()<1)
return;//No point; already dying.
//Trigger small boom
final TR tr = parent.getTr();
Features.get(tr,SoundSystemFeature.class).getPlaybackFactory().
create(tr.getResourceManager().soundTextures.get("EXP2.WAV"), new double[]{.5*SoundSystem.DEFAULT_SFX_VOLUME*2,.5*SoundSystem.DEFAULT_SFX_VOLUME*2});
addBehavior(new PulledDownByGravityBehavior().setEnable(true));
probeForBehavior(DamagedByCollisionWithSurface.class).setEnable(true);
probeForBehavior(CollidesWithTerrain.class).setNudgePadding(0);
probeForBehavior(DamageableBehavior.class).setAcceptsProjectileDamage(false);
probeForBehavior(ExplodesOnDeath.class).setExplosionType(ExplosionType.BigExplosion).setExplosionSound(BIG_EXP_SOUNDS[(int)(Math.random()*3)]);
if(def.getThrustSpeed()<800000){
probeForBehavior(HasPropulsion.class).setPropulsion(0);
probeForBehavior(VelocityDragBehavior.class).setEnable(false);
}
//Catastrophy
final double spinSpeedCoeff=Math.max(def.getThrustSpeed()!=0?def.getThrustSpeed()/1600000:.3,.4);
addBehavior(new SpinAccellerationBehavior().setSpinMode(SpinMode.LATERAL).setSpinAccelleration(.009*spinSpeedCoeff));
addBehavior(new SpinAccellerationBehavior().setSpinMode(SpinMode.EQUATORIAL).setSpinAccelleration(.006*spinSpeedCoeff));
addBehavior(new SpinAccellerationBehavior().setSpinMode(SpinMode.POLAR).setSpinAccelleration(.007*spinSpeedCoeff));
//TODO: Sparks, and other fun stuff.
addBehavior(new SpawnsRandomExplosionsAndDebris(parent.getTr()));
addBehavior(new SpawnsRandomSmoke(parent.getTr()));
}//end healthBelowThreshold
}.setThreshold(2048);
addBehavior(new DamagedByCollisionWithSurface().setCollisionDamage(65535).setEnable(false));
addBehavior(spinAndCrash);}
}
private void possibleBobbingSpinAndCrashOnDeath(double probability, EnemyDefinition def){
possibleSpinAndCrashOnDeath(probability,def);
if(spinCrash){
addBehavior(new CollidesWithTerrain());
addBehavior(new MovesByVelocity()).setEnable(false);
addBehavior(new HasPropulsion()).setEnable(false);
addBehavior(new AccelleratedByPropulsion()).setEnable(false);
addBehavior(new VelocityDragBehavior()).setEnable(false);
addBehavior(new RotationalMomentumBehavior()).setEnable(false);
addBehavior(new RotationalDragBehavior()).setDragCoefficient(.86);
final DamageTrigger spinAndCrashAddendum = new DamageTrigger(){
@Override
public void healthBelowThreshold(){
final WorldObject parent = getParent();
parent.probeForBehavior(MovesByVelocity.class).setEnable(true);
parent.probeForBehavior(HasPropulsion.class).setEnable(true);
parent.probeForBehavior(AccelleratedByPropulsion.class).setEnable(true);
parent.probeForBehavior(VelocityDragBehavior.class).setEnable(true);
parent.probeForBehavior(RotationalMomentumBehavior.class).setEnable(true);
parent.probeForBehavior(SteadilyRotating.class).setEnable(false);
parent.probeForBehavior(Bobbing.class).setEnable(false);
// parent.getBehavior().probeForBehavior(AutoFiring.class).setBerzerk(true)
// .setFiringPattern(new boolean[]{true}).setTimePerPatternEntry(100);
}};
addBehavior(spinAndCrashAddendum);
}//end if(spinCrash)
}//end possibleBobbingSpinAndCrashOnDeath
private void smartPlaneBehavior(TR tr, EnemyDefinition def, boolean retreatAboveSky){
final HorizAimAtPlayerBehavior haapb =new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer()).setLeftHanded(Math.random()>=.5);
addBehavior(haapb);
final AdjustAltitudeToPlayerBehavior aatpb = new AdjustAltitudeToPlayerBehavior(getGameShell().getGame().getPlayer()).setAccelleration(1000);
addBehavior(aatpb);
final ProjectileFiringBehavior pfb = new ProjectileFiringBehavior().setProjectileFactory(tr.getResourceManager().getProjectileFactories()[def.getWeapon().ordinal()]);
try{pfb.addSupply(99999999);}catch(SupplyNotNeededException e){}
Integer [] firingVertices = Arrays.copyOf(def.getFiringVertices(),def.getNumRandomFiringVertices());
pfb.setFiringPositions(getModelSource(),firingVertices);
addBehavior(pfb);
possibleSpinAndCrashOnDeath(.4,def);
if(spinCrash){
final DamageTrigger spinAndCrashAddendum = new DamageTrigger(){
@Override
public void healthBelowThreshold(){
final WorldObject parent = getParent();
final HasPropulsion hp = probeForBehavior(HasPropulsion.class);
hp.setPropulsion(hp.getPropulsion()/1);
probeForBehavior(AutoLeveling.class).
setLevelingAxis(LevelingAxis.HEADING).
setLevelingVector(Vector3D.MINUS_J).setRetainmentCoeff(.985,.985,.985);
}};
addBehavior(spinAndCrashAddendum);
}//end if(spinCrash)
AccelleratedByPropulsion escapeProp=null;
if(retreatAboveSky){
escapeProp = new AccelleratedByPropulsion();
escapeProp.setThrustVector(new Vector3D(0,.1,0)).setEnable(false);
addBehavior(escapeProp);}
final AutoFiring afb = new AutoFiring();
afb.setMaxFireVectorDeviation(.7);
afb.setFiringPattern(new boolean [] {true,false,false,false,true,true,false});
afb.setTimePerPatternEntry((int)(getFiringRateScalar()*(200+Math.random()*200)));
afb.setPatternOffsetMillis((int)(Math.random()*1000));
afb.setProjectileFiringBehavior(pfb);
try{
final TVF3Game tvf3 = (TVF3Game)getGameShell().getGame();
if(tvf3.getDifficulty() != Difficulty.EASY)
afb.setSmartFiring(true);
}catch(ClassCastException e){}//Not a TVF3 Game
addBehavior(afb);
final SpinAccellerationBehavior sab = (SpinAccellerationBehavior)new SpinAccellerationBehavior().setEnable(false);
addBehavior(sab);
addBehavior(new SmartPlaneBehavior(haapb,afb,sab,aatpb,escapeProp,retreatAboveSky));
addBehavior(new BuzzByPlayerSFX().setBuzzSounds(new String[]{
"FLYBY56.WAV","FLYBY60.WAV","FLYBY80.WAV","FLYBY81.WAV"}));
}//end smartPlaneBehavior()
private void alienBoss(EnemyPlacement pl) throws FileLoadException, IllegalAccessException, IOException{
addBehavior(new HorizAimAtPlayerBehavior(getGameShell().getGame().getPlayer())).setEnable(false);
projectileFiringBehavior();
setVisible(false);
final ResourceManager rm = getTr().getResourceManager();
setModel(rm.getBINModel(enemyDefinition.getSimpleModel(), getTr().getGlobalPaletteVL(), null, null));
final int towerShields = pl.getStrength();//Not sure exactly what should go here.
final int alienShields = pl.getStrength();
final int totalShields = towerShields + alienShields;
// BOSS
final DamageTrigger damageTrigger = new DamageTrigger(){
@Override
public void healthBelowThreshold() {
final Model oldModel = getModel();
try{setModel(rm.getBINModel(enemyDefinition.getComplexModelFile(), getTr().getGlobalPaletteVL(), null, null));}
catch(Exception e){e.printStackTrace();}
probeForBehavior(ProjectileFiringBehavior.class).setEnable(true);
probeForBehavior(HorizAimAtPlayerBehavior.class).setEnable(true);
final Vector3D pos = new Vector3D(getPosition());
getTr().getResourceManager().getExplosionFactory().triggerExplosion(pos, Explosion.ExplosionType.Blast);
final Vector3D dims = oldModel.getMaximumVertexDims();
final DebrisSystem debrisSystem = getTr().getResourceManager().getDebrisSystem();
for(int i=0; i<20; i++){
final Vector3D rPos = new Vector3D(
(Math.random()-.5)*dims.getX(),
(Math.random()-.5)*dims.getY(),
(Math.random()-.5)*dims.getZ()).
scalarMultiply(2).
add(new Vector3D(getPosition()));
final double velocity = 1000;
final Vector3D rVel = new Vector3D(
(Math.random()-.5)*velocity,
(Math.random()-.5)*velocity,
(Math.random()-.5)*velocity).
scalarMultiply(2);
debrisSystem.spawn(rPos, rVel);
}//end for(200)
getTr().getResourceManager().getDebrisSystem().spawn(pos, new Vector3D(Math.random()*10000,Math.random()*10000,Math.random()*10000));
}};
damageTrigger.setThreshold(alienShields);
addBehavior(damageTrigger);
//TOWER
final PropertyChangeListener alienPCL;
addPropertyChangeListener(ACTIVE, alienPCL = new PropertyChangeListener(){
@Override
public void propertyChange(PropertyChangeEvent evt) {
if(evt.getNewValue() == Boolean.TRUE){
probeForBehavior(DamageableBehavior.class).setMaxHealth(totalShields);
probeForBehavior(DamageableBehavior.class).setHealth(totalShields);
probeForBehavior(ProjectileFiringBehavior.class).setEnable(false);
DEFObject.this.setVisible(true);
}
}});
//DAMAGEABLE TOOWER
addBehavior(new CustomNAVTargetableBehavior(new Runnable(){
@Override
public void run() {
probeForBehavior(DamageableBehavior.class).setEnable(true);
DEFObject.this.setIgnoringProjectiles(false);
}}));
hardReferences.add(alienPCL);
}//end alienBoss(...)
private void bossWarningSiren(){
final PropertyChangeListener alienPCL;
addPropertyChangeListener(ACTIVE, alienPCL = new PropertyChangeListener(){
@Override
public void propertyChange(PropertyChangeEvent evt) {
if(evt.getNewValue() == Boolean.TRUE){
final TR tr = getTr();
SoundSystem ss = Features.get(getTr(),SoundSystemFeature.class);
final SoundTexture st = tr.getResourceManager().soundTextures.get("WARNING.WAV");
final SoundEvent se = ss.getPlaybackFactory().create(st, new double[]{SoundSystem.DEFAULT_SFX_VOLUME,SoundSystem.DEFAULT_SFX_VOLUME});
ss.enqueuePlaybackEvent(se);
}
}});
hardReferences.add(alienPCL);
}//end bossWarningSiren()
private void defaultBossNAVTargetingResponse(){
addBehavior(new CustomNAVTargetableBehavior(new Runnable(){
@Override
public void run() {
probeForBehavior(DamageableBehavior.class).setEnable(true);
setIgnoringProjectiles(false);}
}));
bossWarningSiren();
}//end defaultBossNAVTargetingResponse()
@Override
public void setTop(Vector3D top){
super.setTop(top);
}
/**
* @return the logic
*/
public EnemyLogic getLogic() {
return logic;
}
/**
* @return the mobile
*/
public boolean isMobile() {
return mobile;
}
/**
* @return the canTurn
*/
public boolean isCanTurn() {
return canTurn;
}
/**
* @return the foliage
*/
public boolean isFoliage() {
return foliage;
}
/**
* @return the boss
*/
public boolean isBoss() {
return boss;
}
/**
* @return the groundLocked
*/
public boolean isGroundLocked() {
return anchoring==Anchoring.terrain;
}
/**
* @return the ignoringProjectiles
*/
public boolean isIgnoringProjectiles() {
return ignoringProjectiles;
}
/**
* @param ignoringProjectiles the ignoringProjectiles to set
*/
public void setIgnoringProjectiles(boolean ignoringProjectiles) {
this.ignoringProjectiles = ignoringProjectiles;
}
/**
* @return the isRuin
*/
public boolean isRuin() {
return isRuin;
}
/**
* @param isRuin the isRuin to set
*/
public void setRuin(boolean isRuin) {
this.isRuin = isRuin;
}
/**
* @return the shieldGen
*/
public boolean isShieldGen() {
return shieldGen;
}
/**
* @param shieldGen the shieldGen to set
*/
public void setShieldGen(boolean shieldGen) {
this.shieldGen = shieldGen;
}
@Override
public String toString(){
final StringBuilder attachedObjects = new StringBuilder();
attachedObjects.append("\n\tAttached objects: ");
for(WorldObject wo:getSubObjects())
attachedObjects.append("\n\t "+wo.toString()+" ");
return "DEFObject Model="+getModel()+" Logic="+logic+" Anchoring="+anchoring+
"\n\tmobile="+mobile+" isRuin="+isRuin+" foliage="+foliage+" boss="+boss+" spinCrash="+spinCrash+
"\n\tignoringProjectiles="+ignoringProjectiles+"\n"+
"\tRuinObject="+attachedObjects.toString();
}
enum Anchoring{
floating(false),
terrain(true),
ceiling(true);
private final boolean locked;
private Anchoring(boolean locked){
this.locked=locked;
}
public boolean isLocked()
{return locked;}
}//end Anchoring
public BasicModelSource getModelSource(){
if(rotatedModelSource==null){//Assemble our decorator sandwich.
final String complexModel = enemyDefinition.getComplexModelFile();
if(complexModel==null)
return null;
final ResourceManager rm = getTr().getResourceManager();
BasicModelSource bmt = null;
final BINFileExtractor bfe = new BINFileExtractor(rm);
bfe.setDefaultTexture(Features.get(getTr(), GPUFeature.class).textureManager.get().getFallbackTexture());
try{bmt= new BufferedModelTarget();
bfe.extract(rm.getBinFileModel(enemyDefinition.getComplexModelFile()), (BufferedModelTarget)bmt);}
catch(UnrecognizedFormatException e){//Animated BIN
try{final AnimationControl ac = rm.getAnimationControlBIN(enemyDefinition.getComplexModelFile());
List<String> bins = ac.getBinFiles();
bmt = new InterpolatedAnimatedModelSource();
for(String name:bins){
BufferedModelTarget bufferedTarget = new BufferedModelTarget();
bfe.extract(rm.getBinFileModel(name),bufferedTarget);
((InterpolatedAnimatedModelSource)bmt).addModelFrame(bufferedTarget);}
((InterpolatedAnimatedModelSource)bmt).setDelayBetweenFramesMillis(ac.getDelay());
}
catch(Exception ee){ee.printStackTrace();}
}
catch(Exception e){e.printStackTrace();}
rotatedModelSource = new RotatedModelSource(bmt);
rotatedModelSource.setRotatableSource (this);
}
return rotatedModelSource;
}
/**
* @return the boundingHeight
*/
public double getBoundingHeight() {
if(boundingHeight == null){
calcBoundingDims();
}
return boundingHeight;
}//end getBoundingHeight()
public double getBoundingWidth() {
if(boundingWidth == null){
calcBoundingDims();
}
return boundingWidth;
}//end getBoundingHeight()
private void calcBoundingDims(){
final Model model = getModel();
Vector3D max = Vector3D.ZERO;
if(model!=null)
max = model.getMaximumVertexDims();
else{
max = new Vector3D((enemyDefinition.getBoundingBoxRadius()/TRFactory.crossPlatformScalar),(enemyDefinition.getBoundingBoxRadius()/TRFactory.crossPlatformScalar),0)
.scalarMultiply(1./1.5);
//max = Vector3D.ZERO;
}
boundingWidth =max.getX();
boundingHeight=max.getY();
}//end calcBoundingDims()
public static class HitBox{
private int vertexID;
private double size;
public int getVertexID() {
return vertexID;
}
public void setVertexID(int vertexID) {
this.vertexID = vertexID;
}
/**
*
* @return size in modern units
* @since Jul 6, 2016
*/
public double getSize() {
return size;
}
/**
* Size in modern units
* @param size
* @since Jul 6, 2016
*/
public void setSize(double size) {
this.size = size;
}
}//end HitBox
public HitBox[] getHitBoxes() {
return hitBoxes;
}
public void setHitBoxes(HitBox[] hitBoxes) {
this.hitBoxes = hitBoxes;
}
private double getFiringRateScalar(){
try{
final TVF3Game tvf3 = (TVF3Game)getGameShell().getGame();
return tvf3.getDifficulty().getFiringRateScalar();
}catch(ClassCastException e){return 1;}
}
private double getShieldScalar(){
try{
final TVF3Game tvf3 = (TVF3Game)getGameShell().getGame();
return tvf3.getDifficulty().getShieldScalar();
}catch(ClassCastException e){return 1;}
}
private double getDEFSpeedScalar(){
try{
final TVF3Game tvf3 = (TVF3Game)getGameShell().getGame();
return tvf3.getDifficulty().getDefSpeedScalar();
}catch(ClassCastException e){return 1;}
}
public ArrayList<WorldObject> getSubObjects() {
if(subObjects==null)
subObjects = new ArrayList<WorldObject>();
return subObjects;
}
protected void setSubObjects(ArrayList<WorldObject> attachedObjects) {
this.subObjects = attachedObjects;
}
public GameShell getGameShell() {
if(gameShell == null){
gameShell = Features.get(getTr(), GameShell.class);}
return gameShell;
}
public void setGameShell(GameShell gameShell) {
this.gameShell = gameShell;
}
public EnemyDefinition getEnemyDefinition() {
return enemyDefinition;
}
}//end DEFObject
|
package org.lantern.oauth;
import java.io.IOException;
import java.io.InputStream;
import javax.security.auth.login.CredentialException;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.StatusLine;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.entity.StringEntity;
import org.apache.http.util.EntityUtils;
import org.lantern.TokenResponseEvent;
import org.lantern.event.Events;
import org.lantern.event.RefreshTokenEvent;
import org.lantern.state.Model;
import org.lantern.state.ModelIo;
import org.lantern.state.Settings;
import org.lantern.util.HttpClientFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.api.client.auth.oauth2.ClientParametersAuthentication;
import com.google.api.client.auth.oauth2.Credential;
import com.google.api.client.auth.oauth2.CredentialRefreshListener;
import com.google.api.client.auth.oauth2.RefreshTokenRequest;
import com.google.api.client.auth.oauth2.TokenErrorResponse;
import com.google.api.client.auth.oauth2.TokenResponse;
import com.google.api.client.auth.oauth2.TokenResponseException;
import com.google.api.client.googleapis.auth.oauth2.GoogleClientSecrets;
import com.google.api.client.googleapis.auth.oauth2.GoogleClientSecrets.Details;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.http.GenericUrl;
import com.google.api.client.http.HttpRequestFactory;
import com.google.api.client.http.HttpResponse;
import com.google.api.client.http.apache.ApacheHttpTransport;
import com.google.api.client.http.javanet.NetHttpTransport;
import com.google.api.client.json.jackson.JacksonFactory;
import com.google.inject.Inject;
import com.google.inject.Singleton;
/**
* Utility methods for OAUTH.
*/
@Singleton
public class OauthUtils {
private static final Logger LOG = LoggerFactory.getLogger(OauthUtils.class);
private final Model model;
private static volatile TokenResponse lastResponse;
private final HttpClientFactory httpClientFactory;
private static GoogleClientSecrets secrets = null;
private final RefreshToken refreshToken;
private final ModelIo modelIo;
public OauthUtils(final HttpClientFactory httpClientFactory,
final Model model, final RefreshToken refreshToken) {
this(httpClientFactory, model, refreshToken, null);
}
@Inject
public OauthUtils(final HttpClientFactory httpClientFactory,
final Model model, final RefreshToken refreshToken,
final ModelIo modelIo) {
this.httpClientFactory = httpClientFactory;
this.model = model;
this.refreshToken = refreshToken;
this.modelIo = modelIo;
LanternSaslGoogleOAuth2Mechanism.setOauthUtils(this);
}
public static String getRedirectUrl(int port) {
return String.format("http://localhost:%1$s/oauth2callback", port);
}
/**
* Obtains the oauth tokens. Note the refresh token should already be
* set when this is called. This will attempt to obtain the tokens directly
* and will then use a proxy if necessary.
*
* @return The tokens.
* @throws IOException If we cannot access the tokens either directory or
* through a fallback proxy.
* @throws CredentialException If the user's credentials are invalid.
*/
public TokenResponse oauthTokens() throws IOException, CredentialException {
LOG.debug("Refreshing ACCESS token");
// Get the tokens with a direct request followed by a proxied request
// if the direct request fails.
final HttpFallbackFunc<TokenResponse> func =
new HttpFallbackFunc<TokenResponse>() {
@Override
public TokenResponse call(final HttpClient client,
final String refresh) throws IOException, CredentialException {
return oauthTokens(client, refresh);
}
};
return func.execute();
}
/**
* This class allows implementors to make HTTP calls that automatically
* first try to connect directly and then fallback to available proxies if
* direct connections don't work.
*
* @param <T> The return type of the underlying function that should be
* first tried directly and then with a fallback proxy.
*/
private abstract class HttpFallbackFunc<T> {
public abstract T call(final HttpClient client, final String refresh)
throws IOException, CredentialException;
/**
* Execute the desired call with a fallback. If the fallback is used,
* the implemented call method will get invoked a second time. The
* fallback will be used if the direct attempt throws an exception.
*
* @return The implementor's return type.
* @throws IOException If there's an error running the function with
* both direct attempts and fallback proxy attempts.
* @throws CredentialException If the user's credentials are invalid.
*/
public T execute() throws IOException, CredentialException {
LOG.debug("Making oauth call -- will use fallback if necessary...");
// Note this call will block until a refresh token is available!
final String refresh = refreshToken.refreshToken();
final HttpClient client = httpClientFactory.newClient();
return call(client, refresh);
}
}
public TokenResponse oauthTokens(final HttpClient httpClient,
final String refresh)
throws IOException, CredentialException {
LOG.debug("Obtaining access token...");
if (lastResponse != null) {
LOG.debug("We have a cached response...");
final long now = System.currentTimeMillis();
if (now < model.getSettings().getExpiryTime()) {
LOG.debug("Access token hasn't expired yet");
return lastResponse;
} else {
LOG.debug("Access token expired!");
}
}
final ApacheHttpTransport httpTransport =
new ApacheHttpTransport(httpClient);
final GoogleClientSecrets creds = OauthUtils.loadClientSecrets();
final Details installed = creds.getInstalled();
try {
final ClientParametersAuthentication clientAuth =
new ClientParametersAuthentication(installed.getClientId(),
installed.getClientSecret());
final TokenResponse response =
new RefreshTokenRequest(httpTransport,
new JacksonFactory(),
new GenericUrl("https://accounts.google.com/o/oauth2/token"),
refresh)
.setClientAuthentication(clientAuth).execute();
final long expiry = response.getExpiresInSeconds();
LOG.info("Got expiry time: {}", expiry);
//LOG.info("Got response: {}", response);
final Settings set = this.model.getSettings();
final String accessTok = response.getAccessToken();
if (StringUtils.isNotBlank(accessTok)) {
set.setAccessToken(accessTok);
} else {
LOG.warn("Blank access token?");
}
set.setExpiryTime(System.currentTimeMillis() +
((expiry-10) * 1000));
set.setUseGoogleOAuth2(true);
// If the server sent us a new refresh token, store it.
final String tok = response.getRefreshToken();
if (StringUtils.isNotBlank(tok)) {
set.setRefreshToken(tok);
Events.asyncEventBus().post(new RefreshTokenEvent(refresh));
}
// Could be null for testing.
if (this.modelIo != null) {
this.modelIo.write();
}
lastResponse = response;
return lastResponse;
} catch (final TokenResponseException e) {
final String msg = e.getMessage();
final CredentialException ce;
if (msg != null && msg.contains("Bad Gateway")) {
LOG.debug("Looks like we have no proxies", e);
ce = new CredentialException("No proxies?");
} else {
LOG.error("Token error -- maybe revoked or unauthorized?", e);
ce = new CredentialException("Problem with token -- maybe revoked?");
}
ce.initCause(e);
throw ce;
} catch (final IOException e) {
LOG.warn("IO exception while trying to refresh token.", e);
throw e;
}
}
public String postRequest(final String endpoint, final String json)
throws IOException, CredentialException {
final HttpPost post = new HttpPost(endpoint);
post.setHeader("Content-Type", "application/json");
final HttpEntity requestEntity = new StringEntity(json, "UTF-8");
post.setEntity(requestEntity);
return httpRequest(post);
}
public String getRequest(final String endpoint) throws IOException,
CredentialException {
return httpRequest(new HttpGet(endpoint));
}
public String deleteRequest(final String endpoint) throws IOException,
CredentialException {
return httpRequest(new HttpDelete(endpoint));
}
private String httpRequest(final HttpRequestBase request)
throws IOException, CredentialException {
final HttpFallbackFunc<String> func = new HttpFallbackFunc<String>() {
@Override
public String call(final HttpClient client, final String refresh)
throws IOException, CredentialException {
return httpRequest(client, request);
}
};
return func.execute();
}
private String httpRequest(final HttpClient httpClient,
final HttpRequestBase request) throws IOException,
CredentialException {
configureOauth(httpClient, request);
try {
final org.apache.http.HttpResponse response = httpClient.execute(request);
final StatusLine line = response.getStatusLine();
final Header cl = response.getFirstHeader("Content-Length");
if (cl != null && cl.getValue().equals("0")) {
return "";
}
final int code = line.getStatusCode();
// Check for 204 No Content -- i.e. no entity body.
if (code == 204) {
return "";
}
final HttpEntity entity = response.getEntity();
final String body = IOUtils.toString(entity.getContent(), "UTF-8");
EntityUtils.consume(entity);
if (code < 200 || code > 299) {
LOG.warn("Bad response code: {}", code);
throw new IOException("Bad response code: "+code+"\n"+body);
}
return body;
} catch (final IOException e) {
throw e;
} finally {
request.reset();
}
}
private void configureOauth(final HttpClient httpClient,
final HttpRequestBase request) throws IOException,
CredentialException {
final String accessToken = accessToken(httpClient);
request.setHeader("Authorization", "Bearer "+accessToken);
request.setHeader("Accept-Charset", "UTF-8");
request.setHeader("Accept", "application/json");
}
public String accessToken(final HttpClient httpClient) throws IOException,
CredentialException {
final String refresh = this.refreshToken.refreshToken();
return oauthTokens(httpClient, refresh).getAccessToken();
}
public static synchronized GoogleClientSecrets loadClientSecrets() throws IOException {
if (secrets != null) {
return secrets;
}
InputStream is = null;
try {
is = OauthUtils.class.getResourceAsStream(
"/client_secrets_installed.json");
secrets = GoogleClientSecrets.load(new JacksonFactory(), is);
//LOG.debug("Secrets: {}", secrets);
return secrets;
} finally {
IOUtils.closeQuietly(is);
}
}
/**
* Utility class for making an Oauth request to a Google service.
*
* NOTE: Currently unused but an interesting technique for future
* reference.
*
* @param access The access token.
* @param refresh The refresh token.
* @param encodedUrl The URL to visit.
*
* @return The {@link HttpResponse}.
* @throws IOException If there's an error loading the client secrets or
* accessing the service.
*/
public static HttpResponse googleOauth(final String access,
final String refresh, final String encodedUrl) throws IOException{
final GoogleClientSecrets creds = OauthUtils.loadClientSecrets();
final CredentialRefreshListener refreshListener =
new CredentialRefreshListener() {
@Override
public void onTokenResponse(final Credential credential,
final TokenResponse tokenResponse) throws IOException {
LOG.info("Got token response...sending event");
Events.eventBus().post(new TokenResponseEvent(tokenResponse));
}
@Override
public void onTokenErrorResponse(final Credential credential,
final TokenErrorResponse tokenErrorResponse)
throws IOException {
LOG.warn("Error response:\n"+
tokenErrorResponse.toPrettyString());
}
};
final GoogleCredential gc = new GoogleCredential.Builder().
setTransport(new NetHttpTransport()).
setJsonFactory(new JacksonFactory()).
addRefreshListener(refreshListener).
setClientAuthentication(new ClientParametersAuthentication(
creds.getInstalled().getClientId(),
creds.getInstalled().getClientSecret())).build();
gc.setAccessToken(access);
gc.setRefreshToken(refresh);
final GenericUrl url = new GenericUrl(encodedUrl);
final HttpRequestFactory requestFactory =
gc.getTransport().createRequestFactory(gc);
return requestFactory.buildGetRequest(url).execute();
}
}
|
package org.myrobotlab.service;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.PullCommand;
import org.eclipse.jgit.api.errors.CanceledException;
import org.eclipse.jgit.api.errors.DetachedHeadException;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.InvalidConfigurationException;
import org.eclipse.jgit.api.errors.InvalidRemoteException;
import org.eclipse.jgit.api.errors.NoHeadException;
import org.eclipse.jgit.api.errors.RefNotFoundException;
import org.eclipse.jgit.api.errors.TransportException;
import org.eclipse.jgit.api.errors.WrongRepositoryStateException;
import org.eclipse.jgit.errors.AmbiguousObjectException;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.errors.RevisionSyntaxException;
import org.eclipse.jgit.lib.BranchTrackingStatus;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.lib.TextProgressMonitor;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.storage.file.FileRepositoryBuilder;
import org.myrobotlab.codec.CodecJson;
import org.myrobotlab.framework.MrlException;
import org.myrobotlab.framework.Platform;
import org.myrobotlab.framework.ProcessData;
import org.myrobotlab.framework.Service;
import org.myrobotlab.framework.ServiceType;
import org.myrobotlab.framework.Status;
import org.myrobotlab.lang.NameGenerator;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.net.Http;
import org.myrobotlab.service.Runtime.CmdOptions;
import org.slf4j.Logger;
import picocli.CommandLine;
/**
* <pre>
*
* Agent is responsible for managing running instances of myrobotlab. It
* can start, stop and update myrobotlab.
*
*
* FIXME - test switching branches and remaining on the branch for multiple updates
* FIXME - tes multiple instances on different branches
* FIXME - ws client connectivity and communication !!!
* FIXME - Cli client ws enabled !!
* FIXME - capability to update Agent from child
* FIXME - move CmdLine defintion to Runtime
* FIXME - convert Runtime's cmdline processing to CmdOptions Fixme - remove CmdLine
* FIXME !!! - remove stdin/stdout !!!! use sockets only
*
* FIXME - there are at least 3 different levels of updating
* 1. a global thread which only "checks" for updates
* 2. the possibility of just downloading an update (per instance)
* 3. the possibility of auto-restarting after a download is completed (per instance)
*
* FIXME - auto update log .. sparse log of only updates and their results ...
* FIXME - test changing version prefix .. e.g. 1.2.
* FIXME - testing test - without version test - remote unaccessable
* FIXME - spawn must be synchronized 2 threads (the timer and the user)
* FIXME - test naming an instance FIXME - test starting an old version
* FIXME - make hidden check latest version interval and make default interval check large
* FIXME - change Runtime's cli !!!
* FIXME - check user define services for Agent
*
* </pre>
*/
public class Agent extends Service {
private static final long serialVersionUID = 1L;
public final static Logger log = LoggerFactory.getLogger(Agent.class);
final Map<String, ProcessData> processes = new ConcurrentHashMap<String, ProcessData>();
transient static SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMdd.HHmmssSSS");
Platform platform = Platform.getLocalInstance();
transient WebGui webgui = null;
int port = 8887;
String address = "127.0.0.1";
String currentBranch;
String currentVersion;
/**
* auto update - automatically checks for updates and WILL update any running
* mrl instance automatically
*/
boolean autoUpdate = false;
/**
* autoCheckForUpdate - checks automatically checks for updates after some
* interval but does not automatically update - it publishes events of new
* availability of updates but does not update
*/
boolean autoCheckForUpdate = false;
Set<String> possibleVersions = new TreeSet<String>();
final static String REMOTE_BUILDS_URL_HOME = "http://build.myrobotlab.org:8080/job/myrobotlab-multibranch/";
// for more info -
// myrobotlab-multibranch/job/develop/api/json
// WARNING Jenkins url api format for multi-branch pipelines is different from
// maven builds !
final static String REMOTE_BUILDS_URL = "job/%s/api/json?tree=builds[number,status,timestamp,id,result]";
final static String REMOTE_JAR_URL = "job/%s/%s/artifact/target/myrobotlab.jar";
final static String REMOTE_MULTI_BRANCH_JOBS = "api/json";
boolean checkRemoteVersions = false;
/**
* command line options for the agent
*/
static CmdOptions options;
String versionPrefix = "1.1.";
static String banner = " _____ __________ ___. __ .____ ___. \n"
+ " / \\ ___.__.\\______ \\ ____\\_ |__ _____/ |_| | _____ \\_ |__ \n"
+ " / \\ / < | | | _
+ "/ Y \\___ | | | ( <_> ) \\_\\ ( <_> ) | | |___ / __ \\| \\_\\ \\\n" + "\\____|__ / ____| |____|_ /\\____/|___ /\\____/|__| |_______ (____ /___ /\n"
+ " \\/\\/ \\/ \\/ \\/ \\/ \\/ \n resistance is futile, we have cookies and robots ...";
/**
* singleton for security purposes
*/
transient static Agent agent;
/**
* development variable to force version "unknown" to be either greatest or
* smallest version for development
*/
private boolean unknownIsGreatest = false;
public static class WorkflowMultiBranchProject {
String name;
WorkflowJob[] jobs;
}
/**
* Jenkins data structure to describe jobs
*/
public static class WorkflowJob {
String name;
String url;
String color;
WorkflowRun lastSuccessfulBuild;
WorkflowRun[] builds;
}
/**
* Jenkins data structure to describe builds
*/
public static class WorkflowRun {
String id;
Integer number;
String result;
Long timestamp;
}
// FIXME - change this to hour for production ...
// long updateCheckIntervalMs = 60 * 60 * 1000; // every hour
long updateCheckIntervalMs = 60 * 1000; // every minute
List<Status> updateLog = new ArrayList<>();
/**
* Update thread - we cannot use addTask as a long update could pile up a
* large set of updates to process quickly in series. Instead, we have a
* simple single class which is always single threaded to process updates.
*
*/
class Updater implements Runnable {
transient Agent agent = null;
transient Thread thread = null;
ProcessData.stateType state = ProcessData.stateType.stopped;
public Updater(Agent agent) {
this.agent = agent;
}
@Override
public void run() {
state = ProcessData.stateType.running;
updateLog("info", "updater running");
autoUpdate = true;
broadcastState();
try {
while (true) {
state = ProcessData.stateType.sleeping;
updateLog("info", "updater sleeping");
Thread.sleep(updateCheckIntervalMs);
state = ProcessData.stateType.updating;
updateLog("info", "updater updating");
agent.update();
}
} catch (Exception e) {
log.info("updater threw", e);
}
log.info("updater stopping");
updateLog("info", "updater stopping");
autoUpdate = false;
state = ProcessData.stateType.stopped;
broadcastState();
}
synchronized public void start() {
if (state == ProcessData.stateType.stopped) {
thread = new Thread(this, getName() + ".updater");
thread.start();
updateLog("info", "updater starting");
} else {
log.warn("updater busy in state = {}", state);
}
}
synchronized public void stop() {
if (state != ProcessData.stateType.stopped) {
// we'll wait if its in the middle of an update
while (state == ProcessData.stateType.updating) {
log.warn("updater currently updating, waiting for 5 seconds...");
sleep(5000);
}
// most likely the thread is a sleeping state
// so, we wake it up quickly to die ;)
thread.interrupt();
}
}
}
public static String BRANCHES_ROOT = "branches";
Updater updater;
public Agent(String n) throws IOException {
super(n);
updater = new Updater(this);
currentBranch = Platform.getLocalInstance().getBranch();
currentVersion = Platform.getLocalInstance().getVersion();
log.info("Agent {} Pid {} is alive", n, Platform.getLocalInstance().getPid());
// basic setup - minimally we make a directory
// and instance folder of the same branch & version as the
// agent jar
setup();
// user has decided to look for updates ..
if (autoUpdate || checkRemoteVersions) {
invoke("getVersions", currentBranch);
}
}
public String getDir(String branch, String version) {
if (branch == null) {
branch = Platform.getLocalInstance().getBranch();
}
if (version == null) {
try {
version = getLatestVersion(branch, autoUpdate);
} catch (Exception e) {
log.error("getDir threw", e);
}
}
return BRANCHES_ROOT + File.separator + branch + "-" + version;
}
public String getJarName(String branch, String version) {
return getDir(branch, version) + File.separator + "myrobotlab.jar";
}
private void setup() throws IOException {
String agentBranch = Platform.getLocalInstance().getBranch();
String agentVersion = Platform.getLocalInstance().getVersion();
// location of the agent's branch (and version)
String agentVersionPath = getDir(agentBranch, agentVersion);
if (!new File(agentVersionPath).exists()) {
File branchDir = new File(agentVersionPath);
branchDir.mkdirs();
}
String agentMyRobotLabJar = getJarName(agentBranch, agentVersion);
if (!new File(agentMyRobotLabJar).exists()) {
String agentJar = new java.io.File(Agent.class.getProtectionDomain().getCodeSource().getLocation().getPath()).getName();
if (!new File(agentJar).exists() || !agentJar.endsWith(".jar")) {
// not operating in released runtime mode - probably operating in ide
String ideTargetJar = new File(System.getProperty("user.dir") + File.separator + "target" + File.separator + "myrobotlab.jar").getAbsolutePath();
if (!new File(ideTargetJar).exists()) {
error("no source agent jar can be found checked:\n%s\n%s\nare you using ide? please package a build (mvn package -DskipTest)", agentJar, ideTargetJar);
} else {
agentJar = ideTargetJar;
}
}
log.info("on branch {} copying agent's current jar to appropriate location {} -> {}", currentBranch, agentJar, agentMyRobotLabJar);
Files.copy(Paths.get(agentJar), Paths.get(agentMyRobotLabJar), StandardCopyOption.REPLACE_EXISTING);
}
}
public void startWebGui() {
startWebGui(null);
}
public void startWebGui(String addressPort) {
if (addressPort == null) {
startWebGui(null, null);
}
Integer port = null;
String address = null;
try {
port = Integer.parseInt(addressPort);
} catch (Exception e) {
}
try {
if (addressPort.contains(":")) {
String[] anp = addressPort.split(":");
port = Integer.parseInt(anp[1]);
addressPort = anp[0];
}
InetAddress ip = InetAddress.getByName(addressPort);
address = ip.getHostAddress();
} catch (Exception e2) {
}
startWebGui(address, port);
}
public void startWebGui(String address, Integer port) {
try {
if (webgui == null) {
if (address != null) {
this.address = address;
}
if (port != null) {
this.port = port;
} else {
port = 8887;
}
webgui = (WebGui) Runtime.create("webgui", "WebGui");
webgui.autoStartBrowser(false);
webgui.setPort(port);
webgui.setAddress(address);
webgui.startService();
} else {
log.info("webgui already started");
}
} catch (Exception e) {
log.error("startWebGui threw", e);
}
}
public void autoUpdate(boolean b) {
if (b) {
// addTask("update", 1000 * 60, 0, "update");
updater.start();
} else {
// purgeTask("update");
updater.stop();
}
}
/**
* FIXME !!! - i believe in task for these pipe up !!! NOT GOOD _ must have
* its own thread then !!
*
* called by the autoUpdate task which is scheduled every minute to look for
* updates from the build server
*/
public void update() {
log.info("looking for updates");
for (String key : processes.keySet()) {
ProcessData process = processes.get(key);
if (!process.options.autoUpdate) {
log.info("not autoUpdate");
continue;
}
try {
// FIXME - if options.src != null GITHUB
if (options.src != null) {
log.info("USING SRC LOOKING FOR GITHUB FOR UPDATES");
String newVersion = getLatestSrc(process.options.branch);
if (newVersion != null && process.isRunning()) {
log.info("its running - we should restart");
restart(process.options.id);
log.info("restarted");
}
/*
CmdOptions options = new CmdOptions();
options.branch = branch;
options.version = version;
agent.spawn(options);
*/
} else {
/**<pre> OLD WAY - checking Jenkins ! FIXME - else JENKINS
// getRemoteVersions
log.info("getting version");
String version = getLatestVersion(process.options.branch, true);
if (version == null || version.equals(process.options.version)) {
log.info("same version {}", version);
continue;
}
// we have a possible update
log.info("WOOHOO ! updating to version {}", version);
process.options.version = version;
process.jarPath = new File(getJarName(process.options.branch, process.options.version)).getAbsolutePath();
getLatestJar(process.options.branch);
log.info("WOOHOO ! updated !");
if (process.isRunning()) {
log.info("its running - we should restart");
restart(process.options.id);
log.info("restarted");
}
</pre>
*/
}
} catch (Exception e) {
log.error("proccessing updates from scheduled task threw", e);
}
}
}
/**
* gets the latest jar if allowed to check remote ....
*
* @param branch
*/
public void getLatestJar(String branch) {
try {
// check for latest
String version = getLatestVersion(branch, true);
// check if branch and version exist locally
if (!existsLocally(branch, version)) {
log.info("found update - getting new jar {} {}", branch, version);
getJar(branch, version);
// download latest to the appropriate directory
// mkdirs
// download file
if (!verifyJar(branch, version)) {
}
log.info("successfully downloaded {} {}", branch, version);
}
} catch (Exception e) {
error(e);
}
}
// FIXME - implement :)
private boolean verifyJar(String branch, String version) {
return true;
}
synchronized public void getJar(String branch, String version) {
new File(getDir(branch, version)).mkdirs();
String build = getBuildId(version);
// this
Http.getSafePartFile(String.format(REMOTE_BUILDS_URL_HOME + REMOTE_JAR_URL, branch, build), getJarName(branch, version));
}
public String getBuildId(String version) {
String[] parts = version.split("\\.");
return parts[2];
}
public String getLatestVersion(String branch, Boolean allowRemote) throws MrlException {
Set<String> versions = getVersions(branch, allowRemote);
return getLatestVersion(versions);
}
public String getLatestVersion(Set<String> versions) throws MrlException {
String latest = null;
for (String version : versions) {
if (latest == null) {
latest = version;
continue;
}
if (isGreaterThan(version, latest)) {
latest = version;
}
}
return latest;
}
/**
* checks to see if a branch / version jar exists on the local filesystem
*
* @param branch
* @param version
* @return
*/
public boolean existsLocally(String branch, String version) {
return new File(getJarName(branch, version)).exists();
}
/**
* if there is a single instance - just restart it ...
*
* @throws IOException
* e
* @throws URISyntaxException
* e
* @throws InterruptedException
* e
*
*/
public synchronized void restart(String id) throws IOException, URISyntaxException, InterruptedException {
log.info("restarting process {}", id);
kill(id); // FIXME - kill should include prepare to shutdown ...
sleep(2000);
spawnId(id);
}
/**
* For respawning an existing ProcessData object
*
* @param id
*/
public void spawnId(String id) {
try {
if (processes.containsKey(id)) {
spawn(processes.get(id));
} else {
log.error("agent does not know about process id {}", id);
}
} catch (Exception e) {
log.error("spawn({}) threw ", id, e);
}
}
/**
* Will spawn a new process of mrl using defaults.
*
* @return
* @throws IOException
* @throws URISyntaxException
* @throws InterruptedException
*/
public Process spawn() throws IOException, URISyntaxException, InterruptedException {
CmdOptions options = new CmdOptions();
new CommandLine(options).parseArgs(new String[] {});
return spawn(options);
}
/**
* Spawn a process give a argument command line in single string form
*
* @param args
* @return
* @throws IOException
* @throws URISyntaxException
* @throws InterruptedException
*/
public Process spawn(String args) throws IOException, URISyntaxException, InterruptedException {
CmdOptions options = new CmdOptions();
new CommandLine(options).parseArgs(args.split(" "));
return spawn(options);
}
/**
* return a non-running process structure from an existing one with a new id
*
* @param id
* id
* @return process data
*
*/
public ProcessData copy(String id) {
if (!processes.containsKey(id)) {
log.error("cannot copy %s does not exist", id);
return null;
}
ProcessData pd = processes.get(id);
ProcessData pd2 = new ProcessData(pd);
pd2.startTs = null;
pd2.stopTs = null;
String[] parts = id.split("\\.");
if (parts.length == 4) {
try {
int instance = Integer.parseInt(parts[3]);
++instance;
} catch (Exception e) {
}
} else {
pd2.options.id = id + ".0";
}
processes.put(pd2.options.id, pd2);
if (agent != null) {
agent.broadcastState();
}
return pd2;
}
/**
* Copies an existing ProcessData, makes a new id and spawns it.
*
* @param id
* @throws IOException
*/
public void copyAndStart(String id) throws IOException {
// returns a non running copy with new process id
// on the processes list
ProcessData pd2 = copy(id);
spawn(pd2);
if (agent != null) {
agent.broadcastState();
}
}
/**
* get the current branches being built in a Jenkins multi-branch pipeline job
*
* @return
*/
static public Set<String> getBranches() {
Set<String> possibleBranches = new TreeSet<String>();
try {
byte[] r = Http.get(REMOTE_BUILDS_URL_HOME + REMOTE_MULTI_BRANCH_JOBS);
if (r != null) {
String json = new String(r);
CodecJson decoder = new CodecJson();
WorkflowMultiBranchProject project = (WorkflowMultiBranchProject) decoder.decode(json, WorkflowMultiBranchProject.class);
for (WorkflowJob job : project.jobs) {
possibleBranches.add(job.name);
}
}
} catch (Exception e) {
log.error("getRemoteBranches threw", e);
}
return possibleBranches;
}
/**
* Used to compare semantic versions
*
* @param version1
* @param version2
* @return
* @throws MrlException
*/
boolean isGreaterThan(String version1, String version2) throws MrlException {
if (version1 == null) {
return false;
}
if (version2 == null) {
return true;
}
// special development behavior
if (version1.equals("unknown")) {
return (unknownIsGreatest) ? true : false;
}
if (version2.equals("unknown")) {
return !((unknownIsGreatest) ? true : false);
}
String[] parts1 = version1.split("\\.");
String[] parts2 = version2.split("\\.");
if (parts1.length != 3 || parts2.length != 3) {
throw new MrlException("invalid version isGreaterThan(%s, %s)", version1, version2);
}
for (int i = 0; i < 3; ++i) {
int v1 = Integer.parseInt(parts1[i]);
int v2 = Integer.parseInt(parts2[i]);
if (v1 != v2) {
return v1 > v2;
}
}
throw new MrlException("invalid isGreaterThan(%s, %s)", version1, version2);
}
/**
* This method gets all the version on a particular branch, if allowed remote
* access it will ask the build server what successful builds exist
*
* @param branch
* @param allowRemote
* @return
*/
synchronized public Set<String> getVersions(String branch, Boolean allowRemote) {
Set<String> versions = new TreeSet<String>();
versions.addAll(getLocalVersions(branch));
if (allowRemote) {
versions.addAll(getRemoteVersions(branch));
}
if (versions.size() != possibleVersions.size()) {
possibleVersions = versions;
broadcastState();
}
return versions;
}
/**
* Get remote versions from jenkins
*
* @param branch
* @return
*/
public Set<String> getRemoteVersions(String branch) {
Set<String> versions = new TreeSet<String>();
try {
byte[] data = Http.get(String.format(REMOTE_BUILDS_URL_HOME + REMOTE_BUILDS_URL, branch));
if (data != null) {
CodecJson decoder = new CodecJson();
String json = new String(data);
WorkflowJob job = (WorkflowJob) decoder.decode(json, WorkflowJob.class);
if (job.builds != null) {
for (WorkflowRun build : job.builds) {
if ("SUCCESS".equals(build.result)) {
versions.add(versionPrefix + build.id);
}
}
}
}
} catch (Exception e) {
error(e);
}
return versions;
}
/**
* Checks in the branches directory for the latest version of desired "branch"
*
* @param branch
* @return
* @throws MrlException
*/
public String getLatestLocalVersion(String branch) throws MrlException {
Set<String> allLocal = getLocalVersions(branch);
String latest = null;
for (String version : allLocal) {
if (latest == null) {
latest = version;
continue;
}
if (isGreaterThan(version, latest)) {
latest = version;
}
}
return latest;
}
/**
* A version to be unique is both {branch}-{version}. This finds all currently
* available versions.
*
* @return
*/
public Set<String> getLocalVersions() {
Set<String> versions = new TreeSet<>();
// get local file system versions
File branchDir = new File(BRANCHES_ROOT);
// get local existing versions
File[] listOfFiles = branchDir.listFiles();
for (int i = 0; i < listOfFiles.length; ++i) {
File file = listOfFiles[i];
if (file.isDirectory()) {
// if (file.getName().startsWith(branch)) {
// String version = file.getName().substring(branch.length() + 1);//
// getFileVersion(file.getName());
// if (version != null) {
int pos = file.getName().lastIndexOf("-");
String branchAndVersion = file.getName().substring(0, pos - 1) + " " + file.getName().substring(pos + 1);
versions.add(branchAndVersion);
}
}
return versions;
}
/**
* Get the local versions available for the selected branch.
*
* @param branch
* @return
*/
public Set<String> getLocalVersions(String branch) {
Set<String> versions = new TreeSet<>();
// get local file system versions
File branchDir = new File(BRANCHES_ROOT);
// get local existing versions
File[] listOfFiles = branchDir.listFiles();
for (int i = 0; i < listOfFiles.length; ++i) {
File file = listOfFiles[i];
if (file.isDirectory()) {
if (file.getName().startsWith(branch)) {
String version = file.getName().substring(branch.length() + 1);// getFileVersion(file.getName());
if (version != null) {
versions.add(version);
}
}
}
}
return versions;
}
/**
* get a list of all the processes currently governed by this Agent
*
* @return hash map, int to process data
*/
public Map<String, ProcessData> getProcesses() {
return processes;
}
/**
* Kills requested process.
*
* @param id
* @return
*/
public String kill(String id) {
// FIXME !!! - "ask" all child processes to kindly Runtime.shutdown via msgs
if (processes.containsKey(id)) {
if (agent != null) {
agent.info("terminating %s", id);
}
ProcessData process = processes.get(id);
process.process.destroy();
process.state = ProcessData.stateType.stopped;
if (process.monitor != null) {
process.monitor.interrupt();
process.monitor = null;
}
// remove(processes.get(name));
if (agent != null) {
agent.info("%s haz beeen terminated", id);
agent.broadcastState();
}
return id;
}
error("kill unknown process id {}", id);
return null;
}
/**
* kill all processes
*/
public void killAll() {
// FIXME !!! - "ask" all child processes to kindly Runtime.shutdown via msgs
for (String id : processes.keySet()) {
kill(id);
}
log.info("no survivors sir...");
if (agent != null) {
agent.broadcastState();
}
}
/**
* kills and clears
*
* @param id
*/
public void killAndRemove(String id) {
if (processes.containsKey(id)) {
kill(id);
processes.remove(id);
if (agent != null) {
agent.broadcastState();
}
}
}
/**
* list processes
*
* @return lp ?
*/
public String[] lp() {
Object[] objs = processes.keySet().toArray();
String[] pd = new String[objs.length];
for (int i = 0; i < objs.length; ++i) {
String id = (String) objs[i];
ProcessData p = processes.get(id);
pd[i] = String.format("%s - %s [%s - %s]", id, p.options.id, p.options.branch, p.options.version);
}
return pd;
}
/**
* Publishing point when a process dies.
*
* @param id
* @return
*/
public String publishTerminated(String id) {
log.info("publishTerminated - terminated {} - restarting", id);
if (!processes.containsKey(id)) {
log.error("processes {} not found");
return id;
}
// if you don't fork with Agent allowed to
// exist without instances - then
if (!options.fork) {
// spin through instances - if I'm the only
// thing left - terminate
boolean processesStillRunning = false;
for (ProcessData pd : processes.values()) {
if (pd.isRunning() || pd.isRestarting()) {
processesStillRunning = true;
break;
}
}
if (!processesStillRunning) {
shutdown();
}
}
if (agent != null) {
agent.broadcastState();
}
return id;
}
/**
* Max complexity spawn - with all possible options - this will create a
* ProcessData object and send it to spawn. ProcessData contains all the
* unique data related to starting an instance.
*
* Convert command line parameter options into a ProcessData which can be
* spawned
*
* @param inOptions
* - cmd options
* @return a process
* @throws IOException
* @throws URISyntaxException
* @throws InterruptedException
*/
public Process spawn(CmdOptions inOptions) throws IOException, URISyntaxException, InterruptedException {
if (ProcessData.agent == null) {
ProcessData.agent = this;
}
// create a ProcessData then spawn it !
ProcessData pd = new ProcessData();
pd.options = inOptions;
CmdOptions options = pd.options;
if (options.id == null) {
options.id = NameGenerator.getName();
}
if (options.branch == null) {
options.branch = Platform.getLocalInstance().getBranch();
}
if (options.version == null) {
try {
options.version = getLatestVersion(options.branch, autoUpdate);
} catch (Exception e) {
log.error("getDir threw", e);
}
}
pd.jarPath = new File(getJarName(options.branch, options.version)).getAbsolutePath();
// javaExe
String fs = File.separator;
Platform platform = Platform.getLocalInstance();
String exeName = platform.isWindows() ? "javaw" : "java";
pd.javaExe = String.format("%s%sbin%s%s", System.getProperty("java.home"), fs, fs, exeName);
String jvmArgs = "-Djava.library.path=libraries/native -Djna.library.path=libraries/native -Dfile.encoding=UTF-8";
if (pd.options.memory != null) {
jvmArgs += String.format(" -Xms%s -Xmx%s ", pd.options.memory, pd.options.memory);
}
pd.jvm = jvmArgs.split(" ");
// user override
if (options.jvm != null) {
pd.jvm = options.jvm.split(" ");
}
if (options.services.size() == 0) {
options.services.add("log");
options.services.add("Log");
options.services.add("cli");
options.services.add("Cli");
options.services.add("gui");
options.services.add("SwingGui");
options.services.add("python");
options.services.add("Python");
}
return spawn(pd);
}
public String setBranch(String branch) {
currentBranch = branch;
return currentBranch;
}
static public Map<String, String> setEnv(Map<String, String> env) {
Platform platform = Platform.getLocalInstance();
String platformId = platform.getPlatformId();
if (platform.isLinux()) {
String ldPath = String.format("'pwd'/libraries/native:'pwd'/libraries/native/%s:${LD_LIBRARY_PATH}", platformId);
env.put("LD_LIBRARY_PATH", ldPath);
} else if (platform.isMac()) {
String dyPath = String.format("'pwd'/libraries/native:'pwd'/libraries/native/%s:${DYLD_LIBRARY_PATH}", platformId);
env.put("DYLD_LIBRARY_PATH", dyPath);
} else if (platform.isWindows()) {
// this just borks the path in Windows - additionally (unlike Linux)
// - i don't think you need native code on the PATH
// and Windows does not have a LD_LIBRARY_PATH
// String path =
// String.format("PATH=%%CD%%\\libraries\\native;PATH=%%CD%%\\libraries\\native\\%s;%%PATH%%",
// platformId);
// env.put("PATH", path);
// we need to sanitize against a non-ascii username
// work around for Jython bug in 2.7.0...
env.put("APPDATA", "%%CD%%");
} else {
log.error("unkown operating system");
}
return env;
}
/**
* Kills all connected processes, then shuts down itself. FIXME - should send
* shutdown to other processes instead of killing them
*/
public void shutdown() {
log.info("terminating others");
killAll();
log.info("terminating self ... goodbye...");
Runtime.shutdown();
}
/**
* Constructs a command line from a ProcessData object which can directly be
* run to spawn a new instance of mrl
*
* FIXME is this ProcessData.toString()
*
* @param pd
* @return
*/
public String[] buildCmdLine(ProcessData pd) {
// command line to be returned
List<String> cmd = new ArrayList<String>();
cmd.add(pd.javaExe);
if (pd.jvm != null) {
for (int i = 0; i < pd.jvm.length; ++i) {
cmd.add(pd.jvm[i]);
}
}
cmd.add("-cp");
// step 1 - get current env data
Platform platform = Platform.getLocalInstance();
/**
* The final spawn - all other data types as parameters make a ProcesData
* which is used by this method to start the process.
*
* @param pd
* @return
* @throws IOException
*/
public synchronized Process spawn(ProcessData pd) throws IOException {
log.info("============== spawn begin ==============");
// this needs cmdLine
String[] cmdLine = buildCmdLine(pd);
ProcessBuilder builder = new ProcessBuilder(cmdLine);
// handle stderr as a direct pass through to System.err
builder.redirectErrorStream(true);
// setting working directory to wherever the jar is...
String spawnDir = new File(pd.jarPath).getParent();
builder.directory(new File(spawnDir));
log.info("cd {}", spawnDir);
StringBuilder spawning = new StringBuilder();
for (String c : cmdLine) {
spawning.append(c);
spawning.append(" ");
}
log.info("SPAWNING ! --> [{}]", spawning);
// environment variables setup
setEnv(builder.environment());
Process process = builder.start();
pd.process = process;
pd.startTs = System.currentTimeMillis();
pd.monitor = new ProcessData.Monitor(pd);
pd.monitor.start();
pd.state = ProcessData.stateType.running;
if (pd.options.id == null) {
log.error("id should not be null!");
}
if (processes.containsKey(pd.options.id)) {
if (agent != null) {
agent.info("restarting %s", pd.options.id);
}
} else {
if (agent != null) {
agent.info("starting new %s", pd.options.id);
}
processes.put(pd.options.id, pd);
}
log.info("Agent finished spawn {}", formatter.format(new Date()));
if (agent != null) {
Cli cli = Runtime.getCli();
cli.add(pd.options.id, process.getInputStream(), process.getOutputStream());
cli.attach(pd.options.id);
agent.broadcastState();
}
return process;
}
/**
* DEPRECATE ? spawn should do this checking ?
*
* @param id
* i
* @throws IOException
* e
* @throws URISyntaxException
* e
* @throws InterruptedException
* e
*
*/
public void start(String id) throws IOException, URISyntaxException, InterruptedException {
if (!processes.containsKey(id)) {
log.error("start process %s can not start - process does not exist", id);
return;
}
ProcessData p = processes.get(id);
if (p.isRunning()) {
log.warn("process %s already started", id);
return;
}
spawn(p);
}
/**
* This static method returns all the details of the class without it having
* to be constructed. It has description, categories, dependencies, and peer
* definitions.
*
* @return ServiceType - returns all the data
*
*/
static public ServiceType getMetaData() {
ServiceType meta = new ServiceType(Agent.class.getCanonicalName());
meta.addDescription("responsible for spawning a MRL process. Agent can also terminate, respawn and control the spawned process");
meta.addCategory("framework");
meta.setSponsor("GroG");
meta.setLicenseApache();
meta.includeServiceInOneJar(true);
return meta;
}
/**
* First method JVM executes when myrobotlab.jar is in jar form.
*
* --agent "--logLevel DEBUG --service webgui WebGui"
*
* @param args
* args
*/
public static void main(String[] args) {
try {
options = new CmdOptions();
// for Callable version ...
// int exitCode = new CommandLine(options).execute(args);
new CommandLine(options).parseArgs(args);
if (options.help) {
Runtime.mainHelp();
return;
}
// String[] agentArgs = new String[] { "--id", "agent-" +
// NameGenerator.getName(), "-l", "WARN"};
List<String> agentArgs = new ArrayList<>();
if (options.agent != null) {
agentArgs.addAll(Arrays.asList(options.agent.split(" ")));
} else {
agentArgs.add("--id");
agentArgs.add("agent-" + NameGenerator.getName());
agentArgs.add("-s");
agentArgs.add("agent");
agentArgs.add("Agent");
agentArgs.add("cli");
agentArgs.add("Cli");
agentArgs.add("security");
agentArgs.add("Security");
agentArgs.add("--log-level");
agentArgs.add(options.logLevel);
// agentArgs.add("webgui"); FIXME - soon .. but not yet ...
// agentArgs.add("WebGui");
}
Process p = null;
if (!options.noBanner) {
System.out.println(banner);
System.out.println("");
}
log.info("user args {}", Arrays.toString(args));
log.info("agent args {}", Arrays.toString(agentArgs.toArray()));
Runtime.main(agentArgs.toArray(new String[agentArgs.size()]));
agent = (Agent) Runtime.getService("agent");
/*
* if (agent == null) { agent = (Agent) Runtime.start("agent", "Agent");
* agent.options = options; }
*/
if (options.listVersions) {
System.out.println("available local versions");
for (String bv : agent.getLocalVersions()) {
System.out.println(bv);
}
agent.shutdown();
}
if ("".equals(options.version)) {
Map<String, String> manifest = Platform.getManifest();
System.out.println("manifest");
for (String name : manifest.keySet()) {
System.out.println(String.format("%s=%s", name, manifest.get(name)));
}
agent.shutdown();
}
Platform platform = Platform.getLocalInstance();
if (options.branch == null) {
options.branch = platform.getBranch();
}
if (options.version == null) {
options.version = platform.getVersion();
}
agent.setBranch(options.branch);
agent.setVersion(options.version);
// FIXME - have a list versions ... command line !!!
// FIXME - the most common use case is the version of the spawned instance
// if that is the case its needed to determine what is the "proposed"
// branch & version if no
// special command parameters were given
// FIXME HELP !!!! :D
// if (cmdline.containsKey("-h") || cmdline.containsKey("--help")) {
// // FIXME - add all possible command descriptions ..
// System.out.println(String.format("%s branch %s version %s",
// platform.getBranch(), platform.getPlatformId(),
// platform.getVersion()));
// return;
if (options.webgui != null) {
agent.startWebGui(options.webgui);
}
// the user set auto-update to true
if (options.autoUpdate) {
// options.fork = true;
// lets check and get the latest jar if there is new one
if (options.src == null) {
// get the latest from Jenkins
agent.getLatestJar(agent.getBranch());
} else {
// get the latest from GitHub
agent.getLatestSrc(agent.getBranch());
}
// the "latest" should have been downloaded
options.version = agent.getLatestLocalVersion(agent.getBranch());
}
// FIXME - use wsclient for remote access
if (options.client != null) {
Runtime.start("cli", "Cli");
return;
}
// TODO - build command line ...
// FIXME - if another instances is spawned agent should wait for all
// instances to stop
// list of flags we want to by-pass spawning
if (options.fork && options.services.size() > 0 || !options.fork) {
p = agent.spawn(options); // <-- agent's is now in charge of first
}
// we start a timer to process future updates
if (options.autoUpdate) {
// if you update a single process
// it kills it and restarts - and this will kill the
// agent unless its forked
options.fork = true;
agent.autoUpdate(true);
}
if (options.install != null) {
// wait for mrl instance to finish installing
// then shutdown (addendum: check if supporting other processes)
p.waitFor();
agent.shutdown();
}
} catch (Exception e) {
log.error("unsuccessful spawn", e);
}
}
public String getLatestSrc(String branch) throws WrongRepositoryStateException, InvalidConfigurationException, DetachedHeadException, InvalidRemoteException, CanceledException, RefNotFoundException, NoHeadException, TransportException, IOException, GitAPIException {
Runtime.getInstance();
Agent agent = (Agent) Runtime.start("agent", "Agent");
RevCommit latestCommit = agent.gitPull(branch);
if (latestCommit != null) {
String version = agent.mvn(null, branch, (long)latestCommit.getCommitTime()/1000);
return version;
}
return null;
}
public String getBranch() {
return currentBranch;
}
public String getVersion() {
return currentVersion;
}
public String setVersion(String version) {
currentVersion = version;
return version;
}
public String mvn(String branch) {
return mvn(null, branch, null);
}
// mvn -DskipTests -Dbuild.number=71 -DGitBranch=develop clean package -o
public String mvn(String src, String branch, Long buildNumber) {
try {
if (src == null) {
src = branch + ".src";
}
if (buildNumber == null) {
// epoch minute build time number
buildNumber = System.currentTimeMillis() / 1000;
}
String version = versionPrefix + buildNumber;
Platform platform = Platform.getLocalInstance();
List<String> cmd = new ArrayList<>();
String pathToPom = src + File.separator + "pom.xml";
cmd.add((platform.isWindows()) ? "cmd" : "bash");
cmd.add((platform.isWindows()) ? "/c" : "-c");
cmd.add((platform.isWindows()) ? "mvn" : "mvn"); // huh .. thought it was
// mvn.bat
cmd.add("-DskipTests");
cmd.add("-Dbuild.number=" + buildNumber);
cmd.add("-DGitBranch=" + branch);
cmd.add("compile");
cmd.add("prepare-package");
cmd.add("package");
// cmd.add("-f");
// cmd.add(pathToPom);
// cmd.add("-o"); // offline
StringBuilder sb = new StringBuilder();
for (String c : cmd) {
sb.append(c);
sb.append(" ");
}
// src path ..
log.info("build [{}]", sb);
// ProcessBuilder pb = new
// ProcessBuilder("mvn","exec:java","-Dexec.mainClass="+"FunnyClass");
ProcessBuilder pb = new ProcessBuilder(cmd);
pb.directory(new File(src));
// handle stderr as a direct pass through to System.err
pb.redirectErrorStream(true);
// pb.environment().putAll(System.getenv());
pb.inheritIO().start().waitFor();
// FIXME LOOK FOR --> "BUILD FAILURE"
String newJar = src + File.separator + "target" + File.separator + "myrobotlab.jar";
String newJarLoc = getJarName(branch, version);
File p = new File(newJarLoc).getAbsoluteFile().getParentFile();
p.mkdirs();
Files.move(Paths.get(newJar), Paths.get(newJarLoc));
return buildNumber + "";
} catch (Exception e) {
log.error("mvn threw", e);
}
return null;
}
public RevCommit gitPull(String branch) throws WrongRepositoryStateException, InvalidConfigurationException, DetachedHeadException, InvalidRemoteException, CanceledException,
RefNotFoundException, NoHeadException, TransportException, IOException, GitAPIException {
return gitPull(null, branch);
}
public RevCommit gitPull(String src, String branch) throws IOException, WrongRepositoryStateException, InvalidConfigurationException, DetachedHeadException, InvalidRemoteException,
CanceledException, RefNotFoundException, NoHeadException, TransportException, GitAPIException {
if (branch == null) {
branch = currentBranch;
}
if (src == null) {
src = branch + ".src";
}
List<String> branches = new ArrayList<String>();
branches.add("refs/heads/" + branch);
File repoParentFolder = new File(src);
Git git = null;
TextProgressMonitor textmonitor = new TextProgressMonitor(new PrintWriter(System.out));
Repository repo = null;
if (!repoParentFolder.exists()) {
// String branch = "master";
git = Git.cloneRepository().setProgressMonitor(textmonitor).setURI("https://github.com/MyRobotLab/myrobotlab.git").setDirectory(new File(src)).setBranchesToClone(branches)
.setBranch("refs/heads/" + branch).call();
} else {
// Open an existing repository
String gitDir = repoParentFolder.getAbsolutePath() + "/.git";
repo = new FileRepositoryBuilder().setGitDir(new File(gitDir)).build();
git = new Git(repo);
}
repo = git.getRepository();
/**
* <pre>
* CheckoutCommand checkout = git.checkout().setCreateBranch(true).setName(branch).setUpstreamMode(CreateBranchCommand.SetupUpstreamMode.TRACK).setStartPoint("origin/" + branch)
* .call();
* </pre>
*/
// git.pull().setCredentialsProvider(user).call();
// FIXME if currentBranch != branch - then checkout .. set current branch
if (!branch.equals(currentBranch)) {
git.branchCreate().setForce(true).setName(branch).setStartPoint("origin/" + branch).call();
git.checkout().setName(branch).call();
}
// FIXME - if auto-update or auto-fetch ie .. remote allowed and cache remote changes
git.fetch().setProgressMonitor(new TextProgressMonitor(new PrintWriter(System.out))).call();
List<RevCommit> localLogs = getLogs(git, "origin/" + branch, 1);
List<RevCommit> remoteLogs = getLogs(git, "remotes/origin/" + branch, 1);
BranchTrackingStatus status = BranchTrackingStatus.of(repo, branch);
RevCommit localCommit = localLogs.get(0);
RevCommit remoteCommit = remoteLogs.get(0);
// if (localCommit.getCommitTime() < remoteCommit.getCommitTime()) {
if (status.getBehindCount() > 0) {
log.info("local ts {}, remote {} - {} updating", localCommit.getCommitTime(), remoteCommit.getCommitTime(), remoteCommit.getFullMessage());
PullCommand pullCmd = git.pull();
pullCmd.setProgressMonitor(textmonitor);
pullCmd.call();
return remoteCommit;
} else {
log.info("no new commits on branch {}", branch);
}
return null;
}
private List<RevCommit> getLogs(Git git, String ref, int maxCount)
throws RevisionSyntaxException, NoHeadException, MissingObjectException, IncorrectObjectTypeException, AmbiguousObjectException, GitAPIException, IOException {
List<RevCommit> ret = new ArrayList<>();
Repository repository = git.getRepository();
Iterable<RevCommit> logs = git.log().setMaxCount(maxCount).add(repository.resolve(ref)).call();
int count = 0;
for (RevCommit rev : logs) {
System.out.println(
rev /*
* + ", name: " + rev.getName() + ", id: " + rev.getId().getName()
*/);
count++;
ret.add(rev);
}
System.out.println("Had " + count + " " + ref);
return ret;
}
/**
* Populate all the files to update, if the system should update.
*
* @throws GitAPIException
* @throws IOException
* @throws IncorrectObjectTypeException
* @throws AmbiguousObjectException
* @throws RevisionSyntaxException
*/
/*
* private List<DiffEntry> populateDiff(Git git) throws GitAPIException,
* RevisionSyntaxException, AmbiguousObjectException,
* IncorrectObjectTypeException, IOException {
*
* git.fetch().setProgressMonitor(new TextProgressMonitor(new
* PrintWriter(System.out))).call(); Repository repo = git.getRepository();
* ObjectId fetchHead = repo.resolve("FETCH_HEAD^{tree}"); ObjectId head =
* repo.resolve("HEAD^{tree}");
*
* ObjectReader reader = repo.newObjectReader(); CanonicalTreeParser
* oldTreeIter = new CanonicalTreeParser(); oldTreeIter.reset(reader, head);
* CanonicalTreeParser newTreeIter = new CanonicalTreeParser();
* newTreeIter.reset(reader, fetchHead); List<DiffEntry> diffs =
* git.diff().setShowNameAndStatusOnly(true) .setNewTree(newTreeIter)
* .setOldTree(oldTreeIter) .call();
*
* return diffs; }
*/
// FIXME - move to enums for status level !
public void updateLog(String level, String msg) {
if (updateLog.size() > 100) {
updateLog.remove(updateLog.size() - 1);
}
if ("info".equals(level)) {
updateLog.add(Status.info((new Date()).toString() + " " + msg));
} else if ("error".equals(level)) {
updateLog.add(Status.error((new Date()).toString() + " " + msg));
}
}
}
|
package org.myrobotlab.service;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import org.myrobotlab.cmdline.CmdLine;
import org.myrobotlab.codec.CodecJson;
import org.myrobotlab.codec.CodecUtils;
import org.myrobotlab.framework.MrlException;
import org.myrobotlab.framework.Platform;
import org.myrobotlab.framework.ProcessData;
import org.myrobotlab.framework.Service;
import org.myrobotlab.framework.ServiceType;
import org.myrobotlab.framework.Status;
import org.myrobotlab.framework.repo.GitHub;
import org.myrobotlab.framework.repo.Repo;
import org.myrobotlab.framework.repo.ServiceData;
import org.myrobotlab.image.Util;
import org.myrobotlab.io.FileIO;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.logging.Logging;
import org.myrobotlab.logging.LoggingFactory;
import org.myrobotlab.net.Http;
import org.slf4j.Logger;
import com.google.gson.internal.LinkedTreeMap;
/**
*
* @author GroG
*
* Agent is responsible for managing running instances of myrobotlab. It
* can start, stop and update myrobotlab.
*
*
* FIXME - all update functionality will need to be moved to Runtime it
* should take parameters such that it will be possible at some point to
* do an update from a child process and update the agent :)
*
* FIXME - testing test - without version test - remote unaccessable
* FIXME - spawn must be synchronized 2 threads (the timer and the user)
*
*
*/
public class Agent extends Service {
private static final long serialVersionUID = 1L;
public final static Logger log = LoggerFactory.getLogger(Agent.class);
final Map<String, ProcessData> processes = new ConcurrentHashMap<String, ProcessData>();
transient static SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMdd.HHmmssSSS");
Platform platform = Platform.getLocalInstance();
/**
* command line to be relayed to the the first process the Agent spawns
*/
static CmdLine cmdline;
transient WebGui webgui = null;
int port = 8887;
String address = "127.0.0.1";
/**
* command line for the Agent process
*/
static CmdLine agentCmdline;
String branchAgent = Platform.getLocalInstance().getBranch();
String branchLast = null;
static String branchRequested = null;
String versionAgent = Platform.getLocalInstance().getVersion();
String versionLast = null;
String versionLatest = null;
static String versionRequested = null;
/**
* auto update - automatically checks for updates and WILL update any running
* mrl instance automatically
*/
static boolean autoUpdate = false;
/**
* autoCheckForUpdate - checks automatically checks for updates after some
* interval but does not automatically update - it publishes events of new
* availability of updates but does not update
*/
static boolean autoCheckForUpdate = false;
static HashSet<String> possibleVersions = new HashSet<String>();
// for more info -
@Deprecated /* not needed - use more general urls with filter functions */
final static String REMOTE_LAST_SUCCESSFUL_BUILD_JAR = "http://build.myrobotlab.org:8080/job/myrobotlab-multibranch/job/%s/lastSuccessfulBuild/artifact/target/myrobotlab.jar";
final static String REMOTE_BUILDS_URL = "http://build.myrobotlab.org:8080/job/myrobotlab-multibranch/job/%s/api/json?tree=builds[number,status,timestamp,id,result]";
final static String REMOTE_JAR_URL = "http://build.myrobotlab.org:8080/job/myrobotlab-multibranch/job/%s/%s/artifact/target/myrobotlab.jar";
@Deprecated /* not needed - use more general urls with filter functions */
final static String REMOTE_LAST_SUCCESSFUL_VERSION = "http://build.myrobotlab.org:8080/job/myrobotlab-multibranch/job/%s/api/json?tree=lastSuccessfulBuild[number,status,timestamp,id,result]";
boolean checkRemoteVersions = false;
String versionPrefix = "1.1.";
/**
* singleton for security purposes
*/
static Agent agent;
String rootBranchDir = "branches";
/**
* development variable to force version "unknown" to be either greatest or
* smallest version for development
*/
private static boolean unknownIsGreatest = false;
public static class WorkflowJob {
WorkflowRun lastSuccessfulBuild;
WorkflowRun[] builds;
}
public static class WorkflowRun {
String id;
Integer number;
String result;
Long timestamp;
}
public static String BRANCHES_ROOT = "branches";
public Agent(String n) throws IOException {
super(n);
log.info("Agent {} Pid {} is alive", n, Platform.getLocalInstance().getPid());
if (branchRequested == null) {
branchRequested = branchAgent;
}
// basic setup - minimally we make a directory
// and instance folder of the same branch & version as the
// agent jar
setup();
// user has decided to look for updates ..
if (autoUpdate || checkRemoteVersions) {
invoke("getPossibleVersions", branchAgent);
}
}
public String getDir(String branch, String version) {
if (branch == null) {
branch = branchAgent; // FIXME - or lastBranch ? or currentBranch !!!
}
if (version == null) {
try {
version = getLatestVersion(branch, autoUpdate);
} catch (Exception e) {
log.error("getDir threw", e);
}
}
return BRANCHES_ROOT + File.separator + branch + "-" + version;
}
public String getFilePath(String branch, String version) {
return getDir(branch, version) + File.separator + "myrobotlab.jar";
}
private void setup() throws IOException {
// FIXME - this stuff needs to be outside the contructor !!!
// initialize perhaps ? setup ? oneTime ? initialInstall ?
// location of the agent's branch (and version)
String agentVersionPath = getDir(branchAgent, versionAgent);
if (!new File(agentVersionPath).exists()) {
File branchDir = new File(agentVersionPath);
branchDir.mkdirs();
}
String agentMyRobotLabJar = getFilePath(branchAgent, versionAgent);
if (!new File(agentMyRobotLabJar).exists()) {
String agentJar = new java.io.File(Agent.class.getProtectionDomain().getCodeSource().getLocation().getPath()).getName();
if (!new File(agentJar).exists() || !agentJar.endsWith(".jar")) {
// not operating in released runtime mode - probably operating in ide
String ideTargetJar = new File(System.getProperty("user.dir") + File.separator + "target" + File.separator + "myrobotlab.jar").getAbsolutePath();
if (!new File(ideTargetJar).exists()) {
error("no source agent jar can be found checked:\n%s\n%s\nare you using ide? please package a build (mvn package -DskipTest)", agentJar, ideTargetJar);
} else {
agentJar = ideTargetJar;
}
}
log.info("on branch {} copying agent's current jar to appropriate location {} -> {}", branchRequested, agentJar, agentMyRobotLabJar);
Files.copy(Paths.get(agentJar), Paths.get(agentMyRobotLabJar), StandardCopyOption.REPLACE_EXISTING);
}
}
public void startWebGui() {
try {
if (webgui == null) {
webgui = (WebGui) Runtime.create("webadmin", "WebGui");
webgui.autoStartBrowser(false);
webgui.setPort(port);
webgui.setAddress(address);
webgui.startService();
} else {
log.info("webgui already started");
}
} catch (Exception e) {
log.error("startWebGui threw", e);
}
}
public void autoUpdate(boolean b) {
if (b) {
addTask("processUpdates", 1000 * 60, 0, "processUpdates");
} else {
purgeTask("processUpdates");
}
}
public void processUpdates() {
log.error("processUpdates implement me ...");
}
/**
* max complexity method to process and update
*
* @throws IOException
* e
* @throws URISyntaxException
* e
* @throws InterruptedException
* e
* @throws MrlException
*
*/
synchronized public void processUpdates(String id, String branch, String version, Boolean allowRemote)
throws IOException, URISyntaxException, InterruptedException, MrlException {
getLatest(branch);
/**
* <pre>
* for all running instances - see if they can be updated ... on their
* appropriate branch - restart if necessary
*/
for (String key : processes.keySet()) {
ProcessData process = processes.get(key);
if (!process.autoUpdate) {
continue;
}
if (!branchAgent.equals(process.branch)) {
log.info("skipping update of {} because its on branch {}", process.id, process.branch);
continue;
}
if (version.equals(process.version)) {
log.info("skipping update of {} {} because its already version {}", process.id, process.name, process.version);
continue;
}
// FIXME - it would be nice to send a SIG_TERM to
// the process before we kill the jvm
// process.process.getOutputStream().write("/Runtime/releaseAll".getBytes());
process.version = version;
if (process.isRunning()) {
restart(process.id);
}
}
}
public void getLatest(String branch) {
try {
// check for updates
String version = getLatestVersion(branch, autoUpdate);
// check if branch and version exist locally
if (!existsLocally(branch, version)) {
getJar(branch, version); // FIXME - make part file .unconfirmed
// download latest to the appropriate directory
// mkdirs
// download file
if (!verifyJar(branch, version)) {
// removeJar(branch, version + ".unconfirmed");
}
}
} catch (Exception e) {
error(e);
}
}
// FIXME - implement
private boolean verifyJar(String branch, String version) {
return true;
}
public void getJar(String branch, String version) {
new File(getDir(branch, version)).mkdirs();
String build = getBuildId(version);
Http.get(String.format(REMOTE_JAR_URL, branch, build), getFilePath(branch, version));
}
public String getBuildId(String version) {
String[] parts = version.split("\\.");
return parts[2];
}
public String getLatestVersion(String branch, Boolean allowRemote) throws MrlException {
Set<String> versions = getVersions(branch, allowRemote);
return getLatestVersion(versions);
}
public String getLatestVersion(Set<String> versions) throws MrlException {
String latest = null;
for (String version : versions) {
if (latest == null) {
latest = version;
continue;
}
if (isGreaterThan(version, latest)) {
latest = version;
}
}
return latest;
}
/**
* checks to see if a branch / version jar exists on the local filesystem
*
* @param branch
* @param version
* @return
*/
public boolean existsLocally(String branch, String version) {
return new File(getFilePath(branch, version)).exists();
}
/**
* if there is a single instance - just restart it ...
*
* @throws IOException
* e
* @throws URISyntaxException
* e
* @throws InterruptedException
* e
*
*/
public synchronized void restart(String id) throws IOException, URISyntaxException, InterruptedException {
log.info("restarting process {}", id);
// ProcessData pd2 = copy(id);
// pd.setRestarting();
kill(id); // FIXME - kill should include prepare to shutdown ...
sleep(2000);
spawn2(id);
}
private void spawn2(String id) {
try {
if (processes.containsKey(id)) {
spawn2(processes.get(id));
} else {
log.error("agent does not know about process id {}", id);
}
} catch (Exception e) {
log.error("spawn2({}) threw ", id, e);
}
}
/**
* return a non-running process structure from an existing one with a new id
*
* @param id
* id
* @return process data
*
*/
public ProcessData copy(String id) {
if (!processes.containsKey(id)) {
log.error("cannot copy %s does not exist", id);
return null;
}
ProcessData pd = processes.get(id);
ProcessData pd2 = new ProcessData(pd);
pd2.startTs = null;
pd2.stopTs = null;
String[] parts = id.split("\\.");
if (parts.length == 4) {
try {
int instance = Integer.parseInt(parts[3]);
++instance;
} catch (Exception e) {
}
} else {
pd2.id = id + ".0";
}
processes.put(pd2.id, pd2);
if (agent != null) {
agent.broadcastState();
}
return pd2;
}
public void copyAndStart(String id) throws IOException {
// returns a non running copy with new process id
// on the processes list
ProcessData pd2 = copy(id);
spawn2(pd2);
if (agent != null) {
agent.broadcastState();
}
}
public void downloadLatest(String branch) throws IOException {
String version = getLatestRemoteVersion(branch);
log.info("downloading version {} /{}", version, branch);
byte[] myrobotlabjar = getLatestRemoteJar(branch);
if (myrobotlabjar == null) {
throw new IOException("could not download");
}
log.info("{} bytes", myrobotlabjar.length);
/*
* File archive = new File(String.format("%s/archive", branch));
* archive.mkdirs();
*/
FileOutputStream fos = new FileOutputStream(String.format("%s/myrobotlab.%s.jar", branch, version));
fos.write(myrobotlabjar);
fos.close();
}
/**
* gets id from name
*
* @param name
* name
* @return integer
*
*/
public String getId(String name) {
for (String pid : processes.keySet()) {
if (pid.equals(name)) {
return processes.get(pid).id;
}
}
return null;
}
// FIXME - should just be be saveRemoteJar() - but shouldn't be from
// multiple threads
static public byte[] getLatestRemoteJar(String branch) {
return Http.get(String.format(REMOTE_LAST_SUCCESSFUL_BUILD_JAR, branch));
}
public String getLatestRemoteVersion(String branch) {
try {
byte[] data = Http.get(String.format(REMOTE_LAST_SUCCESSFUL_VERSION, branch));
if (data != null) {
String json = new String(data);
CodecJson decoder = new CodecJson();
WorkflowJob job = (WorkflowJob) decoder.decode(json, WorkflowJob.class);
return versionPrefix + job.lastSuccessfulBuild.id;
}
} catch (Exception e) {
log.error("getLatestRemoteVersion threw", e);
}
return null;
}
/**
* gets name from id
*
* @param id
* e
* @return string
*/
public String getName(String id) {
for (String pid : processes.keySet()) {
if (pid.equals(id)) {
return processes.get(pid).name;
}
}
return null;
}
/**
* FIXME this should be build server not github ... github has not artifacts
*
* @return
*/
static public Set<String> getBranches() {
Set<String> possibleBranches = new HashSet<String>();
try {
// TODO - all http gets use HttpClient static methods and promise
// for asynchronous
// get gitHub's branches
byte[] r = Http.get(GitHub.BRANCHES);
if (r != null) {
String branches = new String(r);
CodecJson decoder = new CodecJson();
// decoder.decodeArray(Branch)
Object[] array = decoder.decodeArray(branches);
for (int i = 0; i < array.length; ++i) {
@SuppressWarnings("unchecked")
LinkedTreeMap<String, String> m = (LinkedTreeMap<String, String>) array[i];
if (m.containsKey("name")) {
possibleBranches.add(m.get("name").toString());
}
}
}
} catch (Exception e) {
log.error("getRemoteBranches threw", e);
}
return possibleBranches;
}
static boolean isGreaterThan(String version1, String version2) throws MrlException {
if (version1 == null) {
return false;
}
if (version2 == null) {
return true;
}
// special development behavior
if (version1.equals("unknown")) {
return (unknownIsGreatest) ? true : false;
}
if (version2.equals("unknown")) {
return !((unknownIsGreatest) ? true : false);
}
String[] parts1 = version1.split("\\.");
String[] parts2 = version2.split("\\.");
if (parts1.length != 3 || parts2.length != 3) {
throw new MrlException("invalid version isGreaterThan(%s, %s)", version1, version2);
}
for (int i = 0; i < 3; ++i) {
int v1 = Integer.parseInt(parts1[i]);
int v2 = Integer.parseInt(parts2[i]);
if (v1 != v2) {
return v1 > v2;
}
}
throw new MrlException("invalid isGreaterThan(%s, %s)", version1, version2);
}
/**
* This method gets all the version on a particular branch, if allowed remote
* access it will ask the build server what successful builds exist
*
* @param branch
* @param allowRemote
* @return
*/
synchronized public Set<String> getVersions(String branch, Boolean allowRemote) {
Set<String> versions = new HashSet<String>();
versions.addAll(getLocalVersions(branch));
if (allowRemote) {
versions.addAll(getRemoteVersions(branch));
}
invoke("publishVersions", versions);
return versions;
}
public Set<String> publishVersions(HashSet<String> versions) {
return versions;
}
public Set<String> getRemoteVersions(String branch) {
Set<String> versions = new HashSet<String>();
try {
byte[] data = Http.get(String.format(REMOTE_BUILDS_URL, branch));
if (data != null) {
CodecJson decoder = new CodecJson();
String json = new String(data);
WorkflowJob job = (WorkflowJob) decoder.decode(json, WorkflowJob.class);
if (job.builds != null) {
for (WorkflowRun build : job.builds) {
if ("SUCCESS".equals(build.result)) {
versions.add(versionPrefix + build.id);
}
}
}
}
} catch (Exception e) {
error(e);
}
return versions;
}
public Set<String> getLocalVersions(String branch) {
Set<String> versions = new HashSet<>();
// get local file system versions
File branchDir = new File(BRANCHES_ROOT);
// get local existing versions
File[] listOfFiles = branchDir.listFiles();
for (int i = 0; i < listOfFiles.length; ++i) {
File file = listOfFiles[i];
if (file.isDirectory()) {
if (file.getName().startsWith(branch)) {
String version = file.getName().substring(branch.length() + 1);// getFileVersion(file.getName());
if (version != null) {
versions.add(version);
}
}
}
}
return versions;
}
static public String getFileVersion(String name) {
if (!name.startsWith("myrobotlab.")) {
return null;
}
String[] parts = name.split("\\.");
if (parts.length != 5) {
return null;
}
String version = String.format("%s.%s.%s", parts[1], parts[2], parts[3]);
return version;
}
/**
* get a list of all the processes currently governed by this Agent
*
* @return hash map, int to process data
*/
public Map<String, ProcessData> getProcesses() {
return processes;
}
// by id (or by pid?)
public String kill(String id) {
// FIXME !!! - "ask" all child processes to kindly Runtime.shutdown via msgs
if (processes.containsKey(id)) {
if (agent != null) {
agent.info("terminating %s", id);
}
ProcessData process = processes.get(id);
process.process.destroy();
process.state = ProcessData.STATE_STOPPED;
if (process.monitor != null) {
process.monitor.interrupt();
process.monitor = null;
}
// remove(processes.get(name));
if (agent != null) {
agent.info("%s haz beeen terminated", id);
agent.broadcastState();
}
return id;
} else {
try {
// FIXME make operating system independent
String cmd = "taskkill /F /PID " + id;
java.lang.Runtime.getRuntime().exec(cmd);
} catch (Exception e) {
log.error("kill threw", e);
}
}
log.warn("%s? no sir, I don't know that punk...", id);
return null;
}
/*
* BAD IDEA - data type ambiguity is a drag public Integer kill(String name) {
* return kill(getId(name)); }
*/
public void killAll() {
// FIXME !!! - "ask" all child processes to kindly Runtime.shutdown via msgs
for (String id : processes.keySet()) {
kill(id);
}
log.info("no survivors sir...");
if (agent != null) {
agent.broadcastState();
}
}
public void killAndRemove(String id) {
if (processes.containsKey(id)) {
kill(id);
processes.remove(id);
if (agent != null) {
agent.broadcastState();
}
}
}
/**
* list processes
*
* @return lp ?
*/
public String[] lp() {
Object[] objs = processes.keySet().toArray();
String[] pd = new String[objs.length];
for (int i = 0; i < objs.length; ++i) {
Integer id = (Integer) objs[i];
ProcessData p = processes.get(id);
pd[i] = String.format("%s - %s [%s - %s]", id, p.name, p.branch, p.version);
}
return pd;
}
public String publishTerminated(String id) {
log.info("publishTerminated - terminated %s %s - restarting", id, getName(id));
if (!processes.containsKey(id)) {
log.error("processes {} not found");
return id;
}
// if you don't fork with Agent allowed to
// exist without instances - then
if (!cmdline.containsKey("-fork")) {
// spin through instances - if I'm the only
// thing left - terminate
boolean processesStillRunning = false;
for (ProcessData pd : processes.values()) {
if (pd.isRunning() || pd.isRestarting()) {
processesStillRunning = true;
break;
}
}
if (!processesStillRunning) {
shutdown();
}
}
if (agent != null) {
agent.broadcastState();
}
return id;
}
/**
* This is a great idea & test - because we want complete control over
* environment and dependencies - the ability to purge completely - and start
* from the beginning - but it should be in another service and not part of
* the Agent. The 'Test' service could use Agent as a peer
*
* @return list of status
*
*/
public List<Status> serviceTest() {
List<Status> ret = new ArrayList<Status>();
// CLEAN FOR TEST METHOD
// FIXME DEPRECATE !!!
// RUNTIME is responsible for running services
// REPO is responsible for possible services
// String[] serviceTypeNames =
// Runtime.getInstance().getServiceTypeNames();
HashSet<String> skipTest = new HashSet<String>();
skipTest.add("org.myrobotlab.service.Runtime");
skipTest.add("org.myrobotlab.service.OpenNi");
/*
* skipTest.add("org.myrobotlab.service.Agent");
* skipTest.add("org.myrobotlab.service.Incubator");
* skipTest.add("org.myrobotlab.service.InMoov"); // just too big and
* complicated at the moment skipTest.add("org.myrobotlab.service.Test");
* skipTest.add("org.myrobotlab.service.Cli"); // ?? No ?
*/
long installTime = 0;
Repo repo = Runtime.getInstance().getRepo();
ServiceData serviceData = ServiceData.getLocalInstance();
List<ServiceType> serviceTypes = serviceData.getServiceTypes();
ret.add(Status.info("serviceTest will test %d services", serviceTypes.size()));
long startTime = System.currentTimeMillis();
ret.add(Status.info("startTime", "%d", startTime));
for (int i = 0; i < serviceTypes.size(); ++i) {
ServiceType serviceType = serviceTypes.get(i);
// TODO - option to disable
if (!serviceType.isAvailable()) {
continue;
}
// serviceType = "org.myrobotlab.service.OpenCV";
if (skipTest.contains(serviceType.getName())) {
log.info("skipping %s", serviceType.getName());
continue;
}
try {
// agent.serviceTest(); // WTF?
// status.addInfo("perparing clean environment for %s",
// serviceType);
// clean environment
// FIXME - optimize clean
// SUPER CLEAN - force .repo to clear !!
// repo.clearRepo();
// less clean but faster
// repo.clearLibraries();
// repo.clearServiceData();
// comment all out for dirty
// install Test dependencies
long installStartTime = System.currentTimeMillis();
repo.install("org.myrobotlab.service.Test");
repo.install(serviceType.getName());
installTime += System.currentTimeMillis() - installStartTime;
// clean test.json part file
// spawn a test - attach to cli - test 1 service end to end
// ,"-invoke", "test","test","org.myrobotlab.service.Clock"
Process process = spawn(
new String[] { "-runtimeName", "testEnv", "-service", "test", "Test", "-logLevel", "WARN", "-noEnv", "-invoke", "test", "test", serviceType.getName() });
process.waitFor();
// destroy - start again next service
// wait for partFile report .. test.json
// NOT NEEDED - foreign process has ended
byte[] data = FileIO.loadPartFile("test.json", 60000);
if (data != null) {
String test = new String(data);
Status testResult = CodecUtils.fromJson(test, Status.class);
if (testResult.isError()) {
ret.add(testResult);
}
} else {
Status.info("could not get results");
}
// destroy env
kill(getId("testEnv"));
} catch (Exception e) {
ret.add(Status.error(e));
continue;
}
}
ret.add(Status.info("installTime", "%d", installTime));
ret.add(Status.info("installTime %d", installTime));
ret.add(Status.info("testTimeMs %d", System.currentTimeMillis() - startTime));
ret.add(Status.info("testTimeMinutes %d", TimeUnit.MILLISECONDS.toMinutes(System.currentTimeMillis() - startTime)));
ret.add(Status.info("endTime %d", System.currentTimeMillis()));
try {
FileIO.savePartFile(new File("fullTest.json"), CodecUtils.toJson(ret).getBytes());
} catch (Exception e) {
log.error("serviceTest threw", e);
}
return ret;
}
public Process spawn(String[] args) throws IOException, URISyntaxException, InterruptedException {
return spawn(null, null, args);
}
public String setBranch(String branch) {
branchRequested = branch;
return branchRequested;
}
static public Map<String, String> setEnv(Map<String, String> env) {
Platform platform = Platform.getLocalInstance();
String platformId = platform.getPlatformId();
if (platform.isLinux()) {
String ldPath = String.format("'pwd'/libraries/native:'pwd'/libraries/native/%s:${LD_LIBRARY_PATH}", platformId);
env.put("LD_LIBRARY_PATH", ldPath);
} else if (platform.isMac()) {
String dyPath = String.format("'pwd'/libraries/native:'pwd'/libraries/native/%s:${DYLD_LIBRARY_PATH}", platformId);
env.put("DYLD_LIBRARY_PATH", dyPath);
} else if (platform.isWindows()) {
// this just borks the path in Windows - additionally (unlike Linux)
// - i don't think you need native code on the PATH
// and Windows does not have a LD_LIBRARY_PATH
// String path =
// String.format("PATH=%%CD%%\\libraries\\native;PATH=%%CD%%\\libraries\\native\\%s;%%PATH%%",
// platformId);
// env.put("PATH", path);
// we need to sanitize against a non-ascii username
// work around for Jython bug in 2.7.0...
env.put("APPDATA", "%%CD%%");
} else {
log.error("unkown operating system");
}
return env;
}
public void shutdown() {
// FIXME !!! - "ask" all child processes to kindly Runtime.shutdown via msgs
log.info("terminating others");
killAll();
log.info("terminating self ... goodbye...");
// Runtime.exit();
Runtime.shutdown();
}
public synchronized Process spawn() throws IOException, URISyntaxException, InterruptedException {
return spawn(null, null, new String[] {});
}
public synchronized Process spawn(String branch, String version, String[] in) throws IOException, URISyntaxException, InterruptedException {
return spawn(getFilePath(branch, version), in);
}
public Process spawn(String jarPath, String[] in) throws IOException, URISyntaxException, InterruptedException {
File jarPathDir = new File(jarPath);
ProcessData pd = new ProcessData(agent, jarPathDir.getAbsolutePath(), in, branchAgent, versionAgent);
CmdLine cmdline = new CmdLine(in);
if (cmdline.hasSwitch("-autoUpdate")) {
autoUpdate(true);
}
log.info("Agent starting spawn {}", formatter.format(new Date()));
log.info("in args {}", Arrays.toString(in));
return spawn2(pd);
}
/**
* max complexity spawn
* @param pd
* @return
* @throws IOException
*/
public synchronized Process spawn2(ProcessData pd) throws IOException {
log.info("============== spawn begin ==============");
String runtimeName = pd.name;
// this needs cmdLine
String[] cmdLine = pd.buildCmdLine();
StringBuffer sb = new StringBuffer();
for (int i = 0; i < cmdLine.length; ++i) {
sb.append(cmdLine[i]);
sb.append(" ");
}
log.info("spawning -> [{}]", sb.toString());
ProcessBuilder builder = new ProcessBuilder(cmdLine);
// handle stderr as a direct pass through to System.err
builder.redirectErrorStream(true);
// setting working directory to wherever the jar is...
String spawnDir = new File(pd.jarPath).getParent();
builder.directory(new File(spawnDir));
log.info("in {} spawning -> [{}]", spawnDir, sb.toString());
// environment variables setup
setEnv(builder.environment());
Process process = builder.start();
pd.process = process;
pd.startTs = System.currentTimeMillis();
pd.monitor = new ProcessData.Monitor(pd);
pd.monitor.start();
pd.state = ProcessData.STATE_RUNNING;
if (pd.id == null) {
log.error("id should not be null!");
}
if (processes.containsKey(pd.id)) {
if (agent != null) {
agent.info("restarting %s %s", pd.id, pd.name);
}
} else {
if (agent != null) {
agent.info("starting new %s %s", pd.id, pd.name);
}
processes.put(pd.id, pd);
}
// FIXME !!! - remove stdin/stdout !!!! use sockets only
// attach our cli to the latest instance
// which I assume is the beginning main thread doing a write to std::out
// and it blocking before anything else can happen
log.info("Agent finished spawn {}", formatter.format(new Date()));
if (agent != null) {
Cli cli = Runtime.getCli();
cli.add(runtimeName, process.getInputStream(), process.getOutputStream());
cli.attach(runtimeName);
agent.broadcastState();
}
return process;
}
/**
* DEPRECATE ? spawn2 should do this checking ?
*
* @param id
* i
* @throws IOException
* e
* @throws URISyntaxException
* e
* @throws InterruptedException
* e
*
*/
public void start(String id) throws IOException, URISyntaxException, InterruptedException {
if (!processes.containsKey(id)) {
log.error("start process %s can not start - process does not exist", id);
return;
}
ProcessData p = processes.get(id);
if (p.isRunning()) {
log.warn("process %s already started", id);
return;
}
spawn2(p);
}
public void update() throws IOException {
Platform platform = Platform.getLocalInstance();
update(platform.getBranch());
}
public void update(String branch) throws IOException {
log.info("update({})", branch);
// so we need to get the version of the jar contained in the {branch}
// directory ..
FileIO.extract(String.format("%s/myrobotlab.jar", branch), Util.getResourceDir() + "/version.txt", String.format("%s/version.txt", branch));
String currentVersion = FileIO.toString(String.format("%s/version.txt", branch));
if (currentVersion == null) {
log.error("{}/version.txt current version is null", branch);
return;
}
// and figure
String latestVersion = getLatestRemoteVersion(branch);
if (latestVersion == null) {
log.error("s3 version.txt current version is null", branch);
return;
}
if (!latestVersion.equals(currentVersion)) {
log.info("latest %s > current %s - updating", latestVersion, currentVersion);
downloadLatest(branch);
}
// FIXME - restart processes
// if (updateRestartProcesses) {
}
/**
* This static method returns all the details of the class without it having
* to be constructed. It has description, categories, dependencies, and peer
* definitions.
*
* @return ServiceType - returns all the data
*
*/
static public ServiceType getMetaData() {
ServiceType meta = new ServiceType(Agent.class.getCanonicalName());
meta.addDescription("responsible for spawning a MRL process. Agent can also terminate, respawn and control the spawned process");
meta.addCategory("framework");
meta.setSponsor("GroG");
meta.setLicenseApache();
meta.addDependency("commons-cli", "commons-cli", "1.4");
meta.includeServiceInOneJar(true);
return meta;
}
public void startService() {
super.startService();
// addTask(1000, "scanForMsgs");
}
/**
* First method JVM executes when myrobotlab.jar is in jar form.
*
* -agent "-logLevel DEBUG -service webgui WebGui"
*
* @param args
* args
*/
// FIXME - test when internet is not available
// FIXME - test over multiple running processes
// FIXME - add -help
// TODO - add jvm memory other runtime info
// FIXME - a way to route parameters from command line to Agent vs Runtime -
// the current concept is ok - but it does not work ..
// make it work if necessary prefix everything by -agent-<...>
// FIXME - replace by PicoCli !!!
public static void main(String[] args) {
try {
Logging logging = LoggingFactory.getInstance();
// FIXME convert to picocmd or apachecli
// -agent \"-params -service ... \" string encoded
cmdline = new CmdLine(args);
logging.setLevel(cmdline.getSafeArgument("-logLevel", 0, "INFO"));
log.info("agent cmdline [{}] will be relayed ", cmdline);
Platform platform = Platform.getLocalInstance();
if (cmdline.containsKey("--autoUpdate")) {
autoUpdate = true;
}
if (cmdline.containsKey("--branch")) {
branchRequested = cmdline.getArgument("--branch", 0);
}
if (cmdline.containsKey("--version")) {
versionRequested = cmdline.getArgument("--version", 0);
}
// FIXME - have a list versions ... command line !!!
// FIXME - the most common use case is the version of the spawned instance
// if that is the case its needed to determine what is the "proposed"
// branch & version if no
// special command parameters were given
if (cmdline.containsKey("-h") || cmdline.containsKey("--help")) {
// FIXME - add all possible command descriptions ..
System.out.println(String.format("%s branch %s version %s", platform.getBranch(), platform.getPlatformId(), platform.getVersion()));
return;
}
// "agent" command line - must be in quotes since the rest of the command
// line
// is relayed to the service
// Start with the default cmdline for the agent
String[] agentArgs = new String[] { "-isAgent", "-id", String.format("agent.%s.%s", formatter.format(new Date()), Platform.getLocalInstance().getPid()) };
if (cmdline.containsKey("-agent")) {
String str = cmdline.getArgument("-agent", 0);
String[] tmp = str.split(" ");
agentArgs = new String[tmp.length + 1];
for (int i = 0; i < agentArgs.length - 1; ++i) {
agentArgs[i] = tmp[i];
}
// -isAgent parameter is REQUIRED for Agent
agentArgs[agentArgs.length - 1] = "-isAgent";
}
agentCmdline = new CmdLine(agentArgs);
Process p = null;
log.info("agent args [{}]", agentCmdline);
Runtime.setLogLevel("WARN");
// agents runtime
Runtime.main(agentArgs);
if (agent == null) {
agent = (Agent) Runtime.start("agent", "Agent");
}
if (cmdline.containsKey("-webadmin") || cmdline.containsKey("--webadmin")) {
agent.startWebGui();
// webgui.setAddress("127.0.0.1"); - for security...
}
if (cmdline.containsKey("--autoUpdate")) {
// FIXME - call directly - update if possible - then spawn
// agent.processUpdates(null, branchRequested, versionRequested,
// autoUpdate);
agent.getLatest(branchRequested);
agent.autoUpdate(true);
}
// FIXME - use wsclient for remote access
if (!cmdline.containsKey("--client")) {
p = agent.spawn(args); // <-- agent's is now in charge of first
} else {
Runtime.start("cli", "Cli");
}
// deprecate non-standard -install use --install short-version would be -i
if (cmdline.containsKey("-install") || cmdline.containsKey("--install")) {
// wait for mrl instance to finish installing
// then shutdown (addendum: check if supporting other processes)
p.waitFor();
agent.shutdown();
}
} catch (Exception e) {
log.error("unsuccessful spawn", e);
}
}
}
|
package org.nuxeo.common;
import java.io.File;
import java.util.Properties;
/**
* @author <a href="mailto:bs@nuxeo.com">Bogdan Stefanescu</a>
*/
public class Environment {
/**
* Constants that identifies possible hosts for the framework.
*/
public static final String JBOSS_HOST = "JBoss";
// Jetty or GF3 embedded
public static final String NXSERVER_HOST = "NXServer";
public static final String TOMCAT_HOST = "Tomcat";
public static String NUXEO_HOME_DIR = "nuxeo.home.dir";
public static String NUXEO_DATA_DIR = "nuxeo.data.dir";
public static String NUXEO_LOG_DIR = "nuxeo.log.dir";
public static String NUXEO_TMP_DIR = "nuxeo.tmp.dir";
public static String NUXEO_CONFIG_DIR = "nuxeo.config.dir";
public static String NUXEO_WEB_DIR = "nuxeo.web.dir";
/**
* the home directory
*
* @deprecated never defined
*/
public static final String HOME_DIR = "org.nuxeo.app.home";
/**
* the web root
*
* @deprecated never defined
*/
public static final String WEB_DIR = "org.nuxeo.app.web";
/**
* the config directory
*
* @deprecated never defined
*/
public static final String CONFIG_DIR = "org.nuxeo.app.config";
/**
* the data directory
*
* @deprecated never defined
*/
public static final String DATA_DIR = "org.nuxeo.app.data";
/**
* the log directory
*
* @deprecated never defined
*/
public static final String LOG_DIR = "org.nuxeo.app.log";
// the application layout (optional)
// directory containing nuxeo runtime osgi bundles
public static final String BUNDLES_DIR = "nuxeo.osgi.app.bundles";
public static final String BUNDLES = "nuxeo.osgi.bundles";
private static Environment DEFAULT;
public static void setDefault(Environment env) {
DEFAULT = env;
}
public static Environment getDefault() {
return DEFAULT;
}
protected final File home;
protected File data;
protected File log;
protected File config;
protected File web;
protected File temp;
protected final Properties properties;
protected String[] args;
protected boolean isAppServer;
protected String hostAppName;
protected String hostAppVersion;
public Environment(File home) {
this(home, null);
}
public Environment(File home, Properties properties) {
this.home = home;
this.properties = new Properties();
if (properties != null) {
loadProperties(properties);
}
this.properties.put(HOME_DIR, this.home.getAbsolutePath());
}
public File getHome() {
return home;
}
public boolean isApplicationServer() {
return isAppServer;
}
public void setIsApplicationServer(boolean isAppServer) {
this.isAppServer = isAppServer;
}
public String getHostApplicationName() {
return hostAppName;
}
public String getHostApplicationVersion() {
return hostAppVersion;
}
public void setHostApplicationName(String name) {
hostAppName = name;
}
public void setHostApplicationVersion(String version) {
hostAppVersion = version;
}
public File getTemp() {
if (temp == null) {
temp = new File(home, "tmp");
}
return temp;
}
public void setTemp(File temp) {
this.temp = temp;
this.properties.put(NUXEO_TMP_DIR, temp.getAbsolutePath());
}
public File getConfig() {
if (config == null) {
config = new File(home, "config");
}
return config;
}
public void setConfig(File config) {
this.config = config;
}
public File getLog() {
if (log == null) {
log = new File(home, "log");
}
return log;
}
public void setLog(File log) {
this.log = log;
this.properties.put(NUXEO_LOG_DIR, log.getAbsolutePath());
}
public File getData() {
if (data == null) {
data = new File(home, "data");
}
return data;
}
public void setData(File data) {
this.data = data;
this.properties.put(NUXEO_DATA_DIR, data.getAbsolutePath());
}
public File getWeb() {
if (web == null) {
web = new File(home, "web");
}
return web;
}
public void setWeb(File web) {
this.web = web;
}
public String[] getCommandLineArguments() {
return args;
}
public void setCommandLineArguments(String[] args) {
this.args = args;
}
public String getProperty(String key) {
return properties.getProperty(key);
}
public String getProperty(String key, String defaultValue) {
String val = properties.getProperty(key);
return val == null ? defaultValue : val;
}
public void setProperty(String key, String value) {
properties.put(key, value);
}
public Properties getProperties() {
return properties;
}
public void loadProperties(Properties properties) {
this.properties.putAll(properties);
}
public boolean isJBoss() {
return JBOSS_HOST.equals(hostAppName);
}
public boolean isJetty() {
return NXSERVER_HOST.equals(hostAppName);
}
public boolean isTomcat() {
return TOMCAT_HOST.equals(hostAppName);
}
}
|
package org.weakref.jmx;
public class JmxException extends RuntimeException
{
private static final long serialVersionUID = 1L;
public enum JmxCause
{
CONFIG,
MALFORMED_OBJECT_NAME,
INSTANCE_ALREADY_EXISTS,
INSTANCE_NOT_FOUND,
MBEAN_REGISTRATION
}
private final JmxCause jmxCause;
JmxException(final JmxCause jmxCause, final String message, final Object ... args)
{
super(String.format(message, args));
this.jmxCause = jmxCause;
}
JmxException(final JmxCause jmxCause, final Throwable cause, final String message, final Object ... args)
{
super(String.format(message, args), cause);
this.jmxCause = jmxCause;
}
public JmxCause getJmxCause()
{
return jmxCause;
}
}
|
package org.wiztools.restclient;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.io.Writer;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.MalformedInputException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
import org.wiztools.restclient.xml.XMLException;
import org.wiztools.restclient.xml.XMLUtil;
/**
*
* @author schandran
*/
public class Util {
public static boolean isStrEmpty(final String str) {
if (str == null || "".equals(str.trim())) {
return true;
}
return false;
}
public static String getNullStrIfNull(final String str) {
return str == null ? "" : str;
}
public static String getStackTrace(final Throwable aThrowable) {
String errorMsg = aThrowable.getMessage();
final Writer result = new StringWriter();
final PrintWriter printWriter = new PrintWriter(result);
aThrowable.printStackTrace(printWriter);
return errorMsg + "\n" + result.toString();
}
public static String getHTMLListFromList(List<String> ll) {
StringBuffer sb = new StringBuffer();
sb.append("<html><ul>");
for (String str : ll) {
sb.append("<li>").append(str).append("</li>");
}
sb.append("</ul></html>");
return sb.toString();
}
public static String inputStream2String(final InputStream in) throws IOException {
if (in == null) {
return "";
}
StringBuffer out = new StringBuffer();
byte[] b = new byte[4096];
for (int n; (n = in.read(b)) != -1;) {
Charset charset = Charset.forName(ENCODE);
CharsetDecoder decoder = charset.newDecoder();
CharBuffer charBuffer = null;
for(int i=0; i<n; i++){
try{
charBuffer = decoder.decode(ByteBuffer.wrap(b, 0, n));
}
catch(MalformedInputException ex){
throw new IOException("File not in supported encoding (" + ENCODE + ")");
}
}
charBuffer.rewind(); // Bring the buffer's pointer to 0
out.append(charBuffer.toString());
}
return out.toString();
}
private static final String ENCODE = "UTF-8";
public static String parameterEncode(Map<String, String> params) {
StringBuffer sb = new StringBuffer();
for (String key : params.keySet()) {
try {
String value = params.get(key);
String encodedKey = URLEncoder.encode(key, ENCODE);
String encodedValue = URLEncoder.encode(value, ENCODE);
sb.append(encodedKey).append("=").append(encodedValue).append("&");
} catch (UnsupportedEncodingException ex) {
assert true : "Encoder UTF-8 supported in all Java platforms.";
}
}
sb.deleteCharAt(sb.length() - 1);
return sb.toString();
}
public static String getStringFromFile(File f) throws FileNotFoundException, IOException {
InputStream is = null;
try {
is = new FileInputStream(f);
return inputStream2String(is);
} finally {
if(is != null){
is.close();
}
}
}
public static String getMimeType(File f) {
String type = null;
URLConnection uc = null;
try {
URL u = f.toURI().toURL();
uc = u.openConnection();
type = uc.getContentType();
} catch (Exception e) {
// Do nothing!
e.printStackTrace();
}
finally{
if(uc != null){
// No method like uc.close() !!
}
}
return type;
}
public static void createReqResArchive(RequestBean request, ResponseBean response, File zipFile)
throws IOException, XMLException {
File requestFile = File.createTempFile("req-", ".xml");
File responseFile = File.createTempFile("res-", ".xml");
XMLUtil.writeRequestXML(request, requestFile);
XMLUtil.writeResponseXML(response, responseFile);
Map<String, File> files = new HashMap<String, File>();
files.put("request.rcq", requestFile);
files.put("response.rcs", responseFile);
byte[] buf = new byte[BUFF_SIZE];
ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(zipFile));
boolean isSuccess = false;
try{
for (String entryName: files.keySet()) {
File entryFile = files.get(entryName);
FileInputStream fis = new FileInputStream(entryFile);
zos.putNextEntry(new ZipEntry(entryName));
int len;
while ((len = fis.read(buf)) > 0) {
zos.write(buf, 0, len);
}
zos.closeEntry();
fis.close();
}
isSuccess = true;
}
finally{
IOException ioe = null;
if(zos != null){
try{
zos.close();
}
catch(IOException ex){
isSuccess = false;
ioe = ex;
}
}
if(!isSuccess){ // Failed: delete half-written zip file
zipFile.delete();
}
requestFile.delete();
responseFile.delete();
if(ioe != null){
throw ioe;
}
}
}
private static final int BUFF_SIZE = 1024 * 4;
public static ReqResBean getReqResArchive(File zipFile)
throws FileNotFoundException, IOException, XMLException {
ReqResBean encpBean = new ReqResBean();
// BufferedOutputStream dest = null;
FileInputStream fis = new FileInputStream(zipFile);
ZipInputStream zis = new ZipInputStream(new BufferedInputStream(fis));
ZipEntry entry;
try{
boolean isReqRead = false;
boolean isResRead = false;
while ((entry = zis.getNextEntry()) != null) {
int count;
byte data[] = new byte[BUFF_SIZE];
File tmpFile = File.createTempFile(entry.getName(), "");
try{
FileOutputStream fos = new FileOutputStream(tmpFile);
BufferedOutputStream dest = new BufferedOutputStream(fos, BUFF_SIZE);
while ((count = zis.read(data, 0, BUFF_SIZE)) != -1) {
dest.write(data, 0, count);
}
dest.flush();
dest.close();
if (entry.getName().equals("request.rcq")) {
RequestBean reqBean = XMLUtil.getRequestFromXMLFile(tmpFile);
encpBean.setRequestBean(reqBean);
isReqRead = true;
}
else if(entry.getName().equals("response.rcs")){
ResponseBean resBean = XMLUtil.getResponseFromXMLFile(tmpFile);
encpBean.setResponseBean(resBean);
isResRead = true;
}
}
finally{
tmpFile.delete();
}
}
if((!isReqRead) || (!isResRead)){
throw new IOException("Archive does not have request.rcq/response.rcs!");
}
}
finally{
zis.close();
}
return encpBean;
}
public static final int getStatusCodeFromStatusLine(final String statusLine){
int retVal = -1;
final String STATUS_PATTERN = "[^\\s]+\\s([0-9]{3})\\s.*";
Pattern p = Pattern.compile(STATUS_PATTERN);
Matcher m = p.matcher(statusLine);
if(m.matches()){
retVal = Integer.parseInt(m.group(1));
}
return retVal;
}
}
|
package pointGroups.gui;
import java.awt.BorderLayout;
import javax.swing.JPanel;
import pointGroups.geometry.Edge;
import pointGroups.geometry.Point;
import pointGroups.geometry.Schlegel;
import pointGroups.gui.event.EventDispatcher;
import pointGroups.gui.event.types.SchlegelResultEvent;
import pointGroups.gui.event.types.SchlegelResultHandler;
import pointGroups.util.jreality.JRealityUtility;
import de.jreality.scene.Geometry;
public class SchlegelView
extends JPanel
implements SchlegelResultHandler
{
private static final long serialVersionUID = -3642299900579728806L;
private final EventDispatcher dispatcher = EventDispatcher.get();
protected final UiViewer uiViewer = new UiViewer(this);
protected Schlegel lastSchlegel;
public SchlegelView() {
super();
setLayout(new BorderLayout());
dispatcher.addHandler(SchlegelResultHandler.class, this);
}
public void dispose() {
uiViewer.dispose();
}
@Override
public void onSchlegelResultEvent(SchlegelResultEvent event) {
lastSchlegel = event.getResult();
Point[] points = lastSchlegel.points;
Edge<Integer, Integer>[] edges = lastSchlegel.edgesViaIndices;
Geometry geom = JRealityUtility.generateGraph(points, edges);
uiViewer.setGeometry(geom);
}
}
|
package rdfanalyzer.spark;
import java.util.List;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.json.JSONObject;
public class EntryPoint {
/**
* Reads the suggested entry points from initially computed files.
*
* @param graph
* The name of the graph to query from.
* @param method
* The ranking method which should be used to determine the top
* ranked items.
* @param num
* How many suggestions to return.
*
* @return A JSONObject mapping the URIs of the neighbors to a JSONObject of
* their properties for each neighbor.
*/
public static JSONObject getSuggestions(String graph, String method, int num) {
if (num <= 0) {
throw new IllegalArgumentException("Requested number of suggestions must be greater than zero.");
}
JSONObject suggestions = new JSONObject();
for (String suggestion : querySuggestions(graph + method, num)) {
// Convert the suggestion String back to a JSONObject.
JSONObject jsonNeighbor = new JSONObject(suggestion);
// Add element to suggestions. Format "URI" => {properties}
suggestions.put(jsonNeighbor.getString("URI"), jsonNeighbor);
}
return suggestions;
}
/**
* Queries the neighbors from the graph.
*
* @param graph
* The name of the graph to query from.
* @param centralNode
* The URI of the central node.
* @param num
* How many neighbors to return.
*
* @return A List of JSON represented neighbors.
*/
private static List<String> querySuggestions(String graph, int num) {
DataFrame graphFrame = Service.sqlCtx().parquetFile(Configuration.storage() + graph + ".parquet");
graphFrame.cache().registerTempTable("RankingGraph");
// Only select valid URIs from the data.
DataFrame resultsFrame = Service.sqlCtx()
.sql("SELECT * FROM RankingGraph WHERE node LIKE '<%' ORDER BY importance DESC LIMIT " + num);
@SuppressWarnings("serial")
List<String> neighbors = resultsFrame.javaRDD().map(new Function<Row, String>() {
@Override
public String call(Row row) {
return convertSQLRowToJSON(row);
}
}).collect();
return neighbors;
}
/**
* Converts a SQL row with a suggested node into a JSONObject, represented
* as a String for serializability.
*
* @param row
* The SQL row to convert.
* @return The String representation of a JSONObject with the suggested
* nodes properties.
*/
private static String convertSQLRowToJSON(Row row) {
JSONObject suggestion = new JSONObject();
double importance = row.getDouble(0);
String URI = row.getString(1);
suggestion.put("URI", URI);
suggestion.put("name", RDFgraph.shortenURI(URI));
suggestion.put("importance", importance);
return suggestion.toString();
}
}
|
package seedu.scheduler.ui;
import java.util.Date;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import javafx.fxml.FXML;
import javafx.scene.Node;
import javafx.scene.control.Label;
import javafx.scene.layout.HBox;
import seedu.scheduler.model.entry.ReadOnlyEntry;
public class EntryCard extends UiPart{
private static final String FXML = "EntryListCard.fxml";
@FXML
private HBox cardPane;
@FXML
private Label name;
@FXML
private Label id;
@FXML
private Label startTime;
@FXML
private Label date;
@FXML
private Label endDate;
@FXML
private Label endTime;
@FXML
private Label tags;
private ReadOnlyEntry entry;
private int displayedIndex;
//@@author A0139956L
public static final String COMPLETED_INDICATION = "-fx-background-color: #ccffcc;";
public static final String OVERDUE_INDICATION = "-fx-background-color: #ffcce6;";
//@@author
public EntryCard(){
}
public static EntryCard load(ReadOnlyEntry entry, int displayedIndex){
EntryCard card = new EntryCard();
card.entry = entry;
card.displayedIndex = displayedIndex;
return UiPartLoader.loadUiPart(card);
}
//@@author A0139956L
@FXML
public void initialize() {
name.setText(entry.getName().fullName);
hideFieldsAccordingToType(entry);
indicatingColourByCondition(entry);
id.setText(displayedIndex + ". ");
date.setText("Start Date : " + entry.getDate().value);
startTime.setText("Start Time : " + entry.getStartTime().value);
endDate.setText("End Date : " + entry.getEndDate().value);
endTime.setText("End Time : " + entry.getEndTime().value);
tags.setText(entry.tagsString());
}
//@@author
public HBox getLayout() {
return cardPane;
}
@Override
public void setNode(Node node) {
cardPane = (HBox)node;
}
@Override
public String getFxmlPath() {
return FXML;
}
//@@author A0139956L
public void hideFieldsAccordingToType(ReadOnlyEntry entry) {
//deadline task
if (endDateInput(entry)) {
startTime.setVisible(false);
endTime.setVisible(false);
date.setVisible(false);
}
if (startDateInput(entry)) {
startTime.setVisible(false);
endTime.setVisible(false);
endDate.setVisible(false);
}
if(startAndEndDateInput(entry)) {
startTime.setVisible(false);
endTime.setVisible(false);
}
//floating task
if (floatTask(entry)) {
startTime.setVisible(false);
endTime.setVisible(false);
date.setVisible(false);
endDate.setVisible(false);
}
if (startTimeInput(entry)) {
endTime.setVisible(false);
endDate.setVisible(false);
}
if (endTimeInput(entry)) {
startTime.setVisible(false);
date.setVisible(false);
}
}
private boolean startAndEndDateInput(ReadOnlyEntry entry) {
return entry.getStartTime().toString().contains("empty")
&& entry.getEndTime().toString().contains("empty");
}
private boolean endDateInput(ReadOnlyEntry entry) {
return entry.getStartTime().toString().contains("empty")
&& entry.getEndTime().toString().contains("empty")
&& entry.getDate().toString().contains("empty");
}
private boolean startDateInput(ReadOnlyEntry entry) {
return entry.getStartTime().toString().contains("empty")
&& entry.getEndTime().toString().contains("empty")
&& entry.getEndDate().toString().contains("empty");
}
private boolean floatTask(ReadOnlyEntry entry) {
return entry.getStartTime().toString().contains("empty")
&& entry.getEndTime().toString().contains("empty")
&& entry.getDate().toString().contains("empty")
&& entry.getEndDate().toString().contains("empty");
}
private boolean startTimeInput(ReadOnlyEntry entry) {
return entry.getEndTime().toString().contains("empty")
&& entry.getEndDate().toString().contains("empty");
}
private boolean endTimeInput(ReadOnlyEntry entry) {
return entry.getStartTime().toString().contains("empty")
&& entry.getDate().toString().contains("empty");
}
public void indicatingColourByCondition(ReadOnlyEntry entry) {
//get today date from system
DateFormat df = new SimpleDateFormat("dd-MM-yyyy");
Date today = new Date();
//System.out.println("System today: " + df.format(today));
//put startDate and endDate into Date
String startDate = entry.getDate().toString();
String endDate = entry.getEndDate().toString();
String endTime = entry.getEndTime().toString();
DateFormat sdf = new SimpleDateFormat("dd-MM-yyyy");
DateFormat edf = new SimpleDateFormat("dd-MM-yyyy");
Date startDateObj;
Date endDateObj;
//if only entry startDate overdue
try {
startDateObj = sdf.parse(startDate);
if (onlyStartDateInput(entry, today, startDateObj)) {
cardPane.setStyle(OVERDUE_INDICATION);
}
} catch (ParseException e1) {
}
//if only entry endDate overdue
try {
endDateObj = edf.parse(endDate);
if (onlyEndDateInput(entry, today, endDateObj)) {
cardPane.setStyle(OVERDUE_INDICATION);
}
} catch (ParseException e) {
}
//endDate is today and endTime is overdue
try {
endDateObj = edf.parse(endDate);
if (endDateObj.equals(today)){
try {
if (checkEndTimeOverdueOrNot(endTime)) {
cardPane.setStyle(OVERDUE_INDICATION);
}
}
catch (ParseException e) {
}
}
} catch (ParseException e) {
}
//if entry completed
if (entry.tagsString().contains("Completed")) {
cardPane.setStyle(COMPLETED_INDICATION);
}
}
private boolean onlyEndDateInput(ReadOnlyEntry entry, Date today, Date endDateObj) {
return endDateObj.before(today)
&& entry.getStartTime().toString().contains("empty")
&& entry.getEndTime().toString().contains("empty")
&& entry.getDate().toString().contains("empty");
}
public static boolean checkEndTimeOverdueOrNot(String endTime) throws ParseException {
boolean endTimeOverdueOrnot = false;
Calendar cal = Calendar.getInstance();
cal.set(Calendar.HOUR_OF_DAY, Integer.parseInt(endTime.substring(0, 2)));
cal.set(Calendar.MINUTE, Integer.parseInt(endTime.substring(3)));
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
if (Calendar.getInstance().after(cal)) {
System.out.println("it's overdue");
endTimeOverdueOrnot = true;
} else {
System.out.println("it's not overdue");
}
return endTimeOverdueOrnot;
}
private boolean onlyStartDateInput(ReadOnlyEntry entry, Date today, Date startDateObj) {
return startDateObj.before(today)
&& entry.getStartTime().toString().contains("empty")
&& entry.getEndTime().toString().contains("empty")
&& entry.getEndDate().toString().contains("empty");
}
}
|
package tbax.baxshops.items;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.DyeColor;
import org.bukkit.Material;
import org.bukkit.block.banner.Pattern;
import org.bukkit.block.banner.PatternType;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.enchantments.Enchantment;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.BannerMeta;
import org.bukkit.inventory.meta.ItemMeta;
import org.jetbrains.annotations.NotNull;
import tbax.baxshops.*;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.reflect.Method;
import java.util.*;
@SuppressWarnings("JavaDoc")
public final class ItemUtil
{
private static final String MINECRAFT_VERSION;
private static final Method AS_NMS_COPY;
private static final Method GET_NAME;
static {
String name = Bukkit.getServer().getClass().getPackage().getName();
MINECRAFT_VERSION = name.substring(name.lastIndexOf('.') + 1);
Method nmsCpyMthd = null;
Method getNmMthd = null;
try {
Class<?> itemStackCls = Class.forName("net.minecraft.server." + MINECRAFT_VERSION + ".ItemStack");
nmsCpyMthd = Class.forName("org.bukkit.craftbukkit." + MINECRAFT_VERSION + ".inventory.CraftItemStack")
.getMethod("asNMSCopy", ItemStack.class);
getNmMthd = itemStackCls.getMethod("getName");
}
catch (ReflectiveOperationException e) {
e.printStackTrace();
}
AS_NMS_COPY = nmsCpyMthd;
GET_NAME = getNmMthd;
}
/**
* An array of items that can be damaged
*/
private static final Map<Material, Short> damageable = new HashMap<>();
/**
* A list of enchantment names
*/
private static final Map<Enchantment, Enchantable> enchants = new HashMap<>();
private ItemUtil()
{
}
public static List<BaxEntry> getItemFromAlias(String input, BaxShop shop)
{
String[] words = input.toUpperCase().split("_");
String normalizedInput = input.replace('_', ' ').toUpperCase();
int maxMatch = -1;
List<BaxEntry> entries = new ArrayList<>();
for(BaxEntry entry : shop) {
String entryName = entry.getName().toUpperCase();
if (Objects.equals(entryName, normalizedInput)) {
return Collections.singletonList(entry); // 100% match
}
else {
String[] entryWords = entryName.split(" ");
int matches = getMatches(entryWords, words);
if (matches == maxMatch) {
entries.add(entry);
}
else if (matches > maxMatch) {
entries.clear();
entries.add(entry);
maxMatch = matches;
}
}
}
return entries;
}
private static int getMatches(String[] array1, String[] array2)
{
int matches = 0;
for(String word1 : array1) {
for(String word2 : array2) {
if (word1.equals(word2)) {
++matches;
}
}
}
return matches;
}
/**
* Gets the name of an item.
*
* @param entry the shop entry
* @return the item's name
*/
public static String getName(BaxEntry entry)
{
return ItemUtil.getName(entry.getItemStack());
}
/**
* Gets the name of an item.
*
* @param item an item stack
* @return the item's name
*/
public static String getName(ItemStack item)
{
if (item.getType() == Material.ENCHANTED_BOOK) {
Map<Enchantment, Integer> enchants = EnchantMap.getEnchants(item);
if (enchants != null)
return EnchantMap.fullListString(enchants);
}
else if (isOminousBanner(item)) {
return ChatColor.GOLD + "Ominous Banner";
}
item = item.clone();
ItemMeta meta = item.getItemMeta();
meta.setDisplayName(null);
item.setItemMeta(meta);
try {
Object nmsCopy = AS_NMS_COPY.invoke(null, item);
Object txtObj = GET_NAME.invoke(nmsCopy);
try {
return (String) txtObj;
}
catch (ClassCastException e) {
return (String)txtObj.getClass().getMethod("getText").invoke(txtObj);
}
}
catch (ReflectiveOperationException | ClassCastException e) {
ShopPlugin.logWarning("Could not get item name for " + item.getType());
return item.getType().toString();
}
}
public static boolean isOminousBanner(@NotNull ItemStack stack)
{
if (stack.getType() != Material.WHITE_BANNER)
return false;
BannerMeta bannerMeta = (BannerMeta)stack.getItemMeta();
return bannerMeta.getPatterns().containsAll(ominousPatterns());
}
private static List<Pattern> ominousPatterns()
{
Pattern[] patterns = new Pattern[8];
patterns[0] = new Pattern(DyeColor.CYAN, PatternType.RHOMBUS_MIDDLE);
patterns[1] = new Pattern(DyeColor.LIGHT_GRAY, PatternType.STRIPE_BOTTOM);
patterns[2] = new Pattern(DyeColor.GRAY, PatternType.STRIPE_CENTER);
patterns[3] = new Pattern(DyeColor.LIGHT_GRAY, PatternType.BORDER);
patterns[4] = new Pattern(DyeColor.BLACK, PatternType.STRIPE_MIDDLE);
patterns[5] = new Pattern(DyeColor.LIGHT_GRAY, PatternType.HALF_HORIZONTAL);
patterns[6] = new Pattern(DyeColor.LIGHT_GRAY, PatternType.CIRCLE_MIDDLE);
patterns[7] = new Pattern(DyeColor.BLACK, PatternType.BORDER);
return Arrays.asList(patterns);
}
public static String getEnchantName(Enchantment enchant)
{
Enchantable enchantable = enchants.get(enchant);
if (enchantable == null)
return Format.toFriendlyName(enchant.toString());
return enchantable.getName();
}
/**
* Determines if a material can be damaged
* @param item
* @return
*/
public static boolean isDamageable(Material item)
{
return damageable.containsKey(item);
}
/**
* Gets the maximum damage for an item. This assumes damageability
* has been confirmed with isDamageable()
* @param item
* @return
*/
public static short getMaxDamage(Material item)
{
return damageable.get(item);
}
/**
* Loads the damageable items list from the damageable.txt resource.
* @param plugin
*/
public static void loadDamageable(ShopPlugin plugin)
{
InputStream stream = plugin.getResource("damageable.txt");
if (stream == null) {
return;
}
int i = 1;
try {
BufferedReader br = new BufferedReader(new InputStreamReader(stream));
String line;
while ((line = br.readLine()) != null) {
if (line.length() == 0 || line.charAt(0) == '
continue;
}
Scanner scanner = new Scanner(line);
Material material = Material.getMaterial(scanner.next());
short maxDamage = scanner.nextShort();
damageable.put(material, maxDamage);
i++;
}
stream.close();
}
catch (IOException e) {
plugin.getLogger().warning("Failed to readFromDisk damageable: " + e.toString());
}
catch (NoSuchElementException e) {
plugin.getLogger().info("loadDamageable broke at line: " + i);
e.printStackTrace();
}
}
/**
* Loads the enchantment names in enchants.txt
* @param plugin
*/
public static void loadEnchants(ShopPlugin plugin)
{
try (InputStream stream = plugin.getResource("enchants.yml")) {
YamlConfiguration enchantConfig = YamlConfiguration.loadConfiguration(new InputStreamReader(stream));
List<Map<?, ?>> section = enchantConfig.getMapList("enchants");
for (Map<?, ?> enchantMap : section) {
Enchantment enchantment = Enchantment.getByName((String) enchantMap.get("enchantment"));
String name = (String) enchantMap.get("name");
boolean levels = (Boolean) enchantMap.get("levels");
enchants.put(enchantment, new Enchantable(name, levels));
}
}
catch (IOException e) {
plugin.getLogger().warning("Failed to readFromDisk enchants: " + e.toString());
}
}
public static boolean hasEnchantLevels(Enchantment enchantment)
{
return getEnchantable(enchantment).hasLevels();
}
public static Enchantable getEnchantable(Enchantment enchantment)
{
Enchantable enchantable = enchants.get(enchantment);
if (enchantable == null)
return new Enchantable(Format.toFriendlyName(enchantment.toString()), true);
return enchantable;
}
}
|
package io.spacedog.client;
import java.util.Optional;
import org.junit.Assert;
import io.spacedog.sdk.SpaceDog;
import io.spacedog.utils.Backends;
import io.spacedog.utils.Credentials;
import io.spacedog.utils.Passwords;
import io.spacedog.utils.SpaceFields;
import io.spacedog.utils.SpaceParams;
import io.spacedog.utils.Utils;
public class SpaceTest extends Assert implements SpaceFields, SpaceParams {
public static SpaceDog getOrSignUp(SpaceDog dog,
String username, String password, String email) {
Optional<Credentials> credentials = dog.credentials().getByUsername(username);
return credentials.isPresent()
? SpaceDog.fromCredentials(credentials.get())
: dog.signUp(username, password, email);
}
public static SpaceDog signUp(SpaceDog backend, String username, String password) {
return SpaceDog.backend(backend.backendId()).signUp(username, password, "platform@spacedog.io");
}
public static SpaceDog signUp(String backendId, String username, String password) {
return SpaceDog.backend(backendId).signUp(username, password, "platform@spacedog.io");
}
public static SpaceDog signUp(SpaceDog backend, String username, String password, String email) {
return SpaceDog.backend(backend.backendId()).signUp(username, password, email);
}
public static SpaceDog createTempUser(SpaceDog superadmin, String username) {
return createTempUser(superadmin.backendId(), username);
}
public static SpaceDog createTempUser(String backendId, String username) {
String password = Passwords.random();
Credentials credentials = SpaceDog.backend(backendId)
.credentials().create(username, password, "platform@spacedog.io");
return SpaceDog.fromCredentials(credentials).password(password);
}
public static SpaceDog createAdminCredentials(SpaceDog backend, String username, String password, String email) {
Credentials credentials = SpaceDog.backend(backend.backendId())
.credentials().create(username, password, email, true);
return SpaceDog.fromCredentials(credentials).password(password);
}
public static void superdogDeletesCredentials(String backendId, String username) {
SpaceDog superdog = superdog(backendId);
Optional<Credentials> optional = superdog.credentials().getByUsername(username);
if (optional.isPresent())
superdog.credentials().delete(optional.get().id());
}
public static SpaceDog resetTestBackend() {
return resetBackend("test", "test", "hi test");
}
public static SpaceDog resetTest2Backend() {
return resetBackend("test2", "test2", "hi test2");
}
public static SpaceDog resetBackend(String backendId, String username, String password) {
return resetBackend(backendId, username, password, "platform@spacedog.io");
}
public static SpaceDog resetBackend(String backendId, String username, String password,
String email) {
return SpaceDog.backend(backendId).username(username).password(password).email(email)
.backend().delete()
.dog().signUpBackend();
}
public static void prepareTest() {
prepareTestInternal(true);
}
public static void prepareTest(boolean forTesting) {
prepareTestInternal(forTesting);
}
private static void prepareTestInternal(boolean forTesting) {
SpaceRequest.setForTestingDefault(forTesting);
StackTraceElement grandParentStackTraceElement = Utils.getGrandParentStackTraceElement();
Utils.info();
Utils.info("--- %s.%s() ---",
grandParentStackTraceElement.getClassName(),
grandParentStackTraceElement.getMethodName());
}
public static void deleteAll(String type, SpaceDog backend) {
SpaceRequest.delete("/1/data/" + type).adminAuth(backend).go(200);
}
public static void setRole(SpaceDog admin, SpaceDog user, String role) {
admin.credentials().setRole(user.id(), role);
}
public static SpaceDog superdog() {
return superdog(Backends.rootApi());
}
public static SpaceDog superdog(SpaceDog dog) {
return superdog(dog.backendId());
}
public static SpaceDog superdog(String backendId) {
SpaceEnv env = SpaceEnv.defaultEnv();
return SpaceDog.backend(backendId)
.username(env.get("spacedog.superdog.username"))
.password(env.get("spacedog.superdog.password"));
}
}
|
package joliex.gwt.client;
import com.google.gwt.user.client.rpc.IsSerializable;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
public class Value implements Serializable, IsSerializable
{
public enum Type implements IsSerializable {
UNDEFINED, STRING, INT, DOUBLE, LONG, BOOLEAN, BYTEARRAY
}
// This field must not be final or GWT will not serialize it correctly
private Map< String, ValueVector > children = new HashMap< String, ValueVector >();
private String valueObject = null;
private Type type = Type.UNDEFINED;
public Value()
{}
public Value( String value )
{
setValue( value );
}
public Value( Integer value )
{
setValue( value );
}
public Value( Double value )
{
setValue( value );
}
// Added by Balint Maschio
public Value( Long value )
{
setValue( value );
}
public Value( Boolean value )
{
setValue( value );
}
public Value( ByteArray value )
{
setValue( value );
}
public boolean isString()
{
return type == Type.STRING;
}
public boolean isInt()
{
return type == Type.INT;
}
public boolean isDouble()
{
return type == Type.DOUBLE;
}
// Added by Balint Maschio
public boolean isLong()
{
return type == Type.LONG;
}
// Added by Balint Maschio
public boolean isBool()
{
return type == Type.BOOLEAN;
}
public boolean isByteArray()
{
return type == Type.BYTEARRAY;
}
public boolean isDefined()
{
return type != Type.UNDEFINED;
}
public ValueVector getChildren( String id )
{
ValueVector v = children.get( id );
if ( v == null ) {
v = new ValueVector();
children.put( id, v );
}
return v;
}
public boolean hasChildren()
{
return !children.isEmpty();
}
public boolean hasChildren( String id )
{
return children.get( id ) != null;
}
public void deepCopy( Value otherValue )
{
valueObject = otherValue.valueObject;
type = otherValue.type;
ValueVector myVector;
Value myValue;
for( Entry< String, ValueVector > entry : otherValue.children.entrySet() ) {
myVector = new ValueVector();
for( Value v : entry.getValue() ) {
myValue = new Value();
myValue.deepCopy( v );
myVector.add( v );
}
children.put( entry.getKey(), myVector );
}
}
public String strValue()
{
if ( valueObject == null )
return new String();
return valueObject;
}
public int intValue()
{
if ( valueObject == null )
return 0;
return Integer.valueOf( valueObject );
}
public double doubleValue()
{
if ( valueObject == null )
return 0.0;
return Double.valueOf( valueObject );
}
// Added by Balint Maschio
public long longValue()
{
if ( valueObject == null )
return 0L;
return Long.valueOf( valueObject );
}
public boolean boolValue()
{
if ( valueObject == null )
return false;
return Boolean.valueOf( valueObject );
}
public ByteArray byteArrayValue() {
ByteArray r = null;
if ( valueObject == null ) {
byte[] resp = new byte[0];
return new ByteArray( resp );
} else {
char[] chars = valueObject.toCharArray();
byte[] byteArrayToReturn= new byte[chars.length * 2 ]; //bytes per char = 2
for (int i = 0; i < chars.length; i++)
{
for (int j = 0; j < 2; j++)
byteArrayToReturn[i * 2 + j] = (byte) (chars[i] >>> (8 * (1 - j)));
}
return new ByteArray( byteArrayToReturn );
}
}
public Value getNewChild( String childId )
{
ValueVector vec = getChildren( childId );
Value retVal = new Value();
vec.add( retVal );
return retVal;
}
public Map< String, ValueVector > children()
{
return children;
}
public Value getFirstChild( String id )
{
return getChildren( id ).first();
}
public final void setValue( String obj )
{
valueObject = obj;
type = Type.STRING;
}
public final void setValue( Integer obj )
{
valueObject = obj.toString();
type = Type.INT;
}
public final void setValue( Double obj )
{
valueObject = obj.toString();
type = Type.DOUBLE;
}
// Added by Balint Maschio
public final void setValue( Long obj )
{
valueObject = obj.toString();
type = Type.LONG;
}
public final void setValue( Boolean obj )
{
valueObject = obj.toString();
type = Type.BOOLEAN;
}
public final void setValue( ByteArray obj )
{
valueObject = obj.toString();
type = Type.BYTEARRAY;
}
}
|
package com.couchbase.cblite.phonegap;
import android.content.Context;
import android.text.TextUtils;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.CordovaWebView;
import org.apache.cordova.CordovaInterface;
import org.apache.cordova.PluginResult;
import org.json.JSONArray;
import com.couchbase.lite.DatabaseOptions;
import com.couchbase.lite.Document;
import com.couchbase.lite.DocumentChange;
import com.couchbase.lite.Query;
import com.couchbase.lite.QueryEnumerator;
import com.couchbase.lite.QueryRow;
import com.couchbase.lite.UnsavedRevision;
import com.couchbase.lite.android.AndroidContext;
import com.couchbase.lite.Database;
import com.couchbase.lite.Manager;
import com.couchbase.lite.auth.Authenticator;
import com.couchbase.lite.auth.AuthenticatorFactory;
import com.couchbase.lite.replicator.Replication;
import com.couchbase.lite.View;
import com.couchbase.lite.javascript.JavaScriptReplicationFilterCompiler;
import com.couchbase.lite.javascript.JavaScriptViewCompiler;
import com.couchbase.lite.util.Log;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
public class CBLite extends CordovaPlugin {
private static Manager dbmgr = null;
private static HashMap<String, Database> dbs = null;
private static HashMap<String, Replication> replications = null;
private static HashMap<String, Database.ChangeListener> changeListeners = null;
private static HashMap<String, Replication.ChangeListener> replicationListeners = null;
private static int runnerCount = 0;
final static int MAX_THREADS = 3;
private static ObjectMapper mapper = new ObjectMapper();
public CBLite() {
super();
System.out.println("CBLite() constructor called");
}
@Override
public void initialize(CordovaInterface cordova, CordovaWebView webView) {
System.out.println("initialize() called");
super.initialize(cordova, webView);
try {
View.setCompiler(new JavaScriptViewCompiler());
Database.setFilterCompiler(new JavaScriptReplicationFilterCompiler());
dbmgr = startCBLite(this.cordova.getActivity());
} catch (final Exception e) {
e.printStackTrace();
}
}
@Override
public void onReset() {
//cancel change listeners
if (changeListeners != null) {
for (String dbName : changeListeners.keySet()) {
for (Database.ChangeListener listener : changeListeners.values()) {
dbs.get(dbName).removeChangeListener(listener);
}
}
}
if (replicationListeners != null) {
for (String dbName : replicationListeners.keySet()) {
for (Replication.ChangeListener listener : replicationListeners.values()) {
try {
replications.get(dbName + "_push").removeChangeListener(listener);
} catch (Exception e) {
}
try {
replications.get(dbName + "_pull").removeChangeListener(listener);
} catch (Exception e) {
}
}
}
}
//cancel replications
if (replications != null) {
for (Replication replication : replications.values()) {
replication.stop();
}
}
if (dbs != null) dbs.clear();
if (changeListeners != null) changeListeners.clear();
if (replicationListeners != null) replicationListeners.clear();
if (replications != null) replications.clear();
runnerCount = 0;
}
@Override
public boolean execute(String action, JSONArray args, CallbackContext callback) {
//UTIL
if (action.equals("changesDatabase")) changesDatabase(args, callback);
else if (action.equals("changesReplication")) changesReplication(args, callback);
else if (action.equals("compact")) compact(args, callback);
else if (action.equals("info")) info(args, callback);
else if (action.equals("initDb")) initDb(args, callback);
else if (action.equals("lastSequence")) lastSequence(args, callback);
else if (action.equals("replicateFrom")) replicateFrom(args, callback);
else if (action.equals("replicateTo")) replicateTo(args, callback);
else if (action.equals("reset")) reset(args, callback);
else if (action.equals("stopReplication")) stopReplication(args, callback);
else if (action.equals("sync")) sync(args, callback);
//READ
else if (action.equals("allDocs")) allDocs(args, callback);
else if (action.equals("get")) get(args, callback);
else if (action.equals("getDocRev")) getDocRev(args, callback);
//WRITE
else if (action.equals("putAttachment")) putAttachment(args, callback);
else if (action.equals("upsert")) upsert(args, callback);
return true;
}
private void changesDatabase(final JSONArray args, final CallbackContext callback) {
PluginResult result = new PluginResult(PluginResult.Status.NO_RESULT);
result.setKeepCallback(true);
callback.sendPluginResult(result);
cordova.getThreadPool().execute(new Runnable() {
public void run() {
try {
final String dbName = args.getString(0);
if (changeListeners == null) {
changeListeners = new HashMap<String, Database.ChangeListener>();
}
if (dbs.get(dbName) != null) {
changeListeners.put(dbName, new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
List<DocumentChange> changes = event.getChanges();
long lastSequence = dbs.get(dbName).getLastSequenceNumber();
for (DocumentChange change : changes) {
PluginResult result = new PluginResult(PluginResult.Status.OK,
"{\"id\":" + "\"" + change.getDocumentId() + "\"" + ",\"is_delete\":" + change.isDeletion() + ",\"seq_num\":" + lastSequence + "}");
result.setKeepCallback(true);
callback.sendPluginResult(result);
}
}
});
dbs.get(dbName).addChangeListener(changeListeners.get(dbName));
}
} catch (final Exception e) {
callback.error(e.getMessage());
}
}
});
}
private void changesReplication(final JSONArray args, final CallbackContext callback) {
PluginResult result = new PluginResult(PluginResult.Status.NO_RESULT);
result.setKeepCallback(true);
callback.sendPluginResult(result);
cordova.getThreadPool().execute(new Runnable() {
public void run() {
try {
String dbName = args.getString(0);
if (replicationListeners == null) {
replicationListeners = new HashMap<String, Replication.ChangeListener>();
}
if (dbs.get(dbName) != null) {
replicationListeners.put(dbName + "_push", new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
Replication.ReplicationStatus status = event.getStatus();
PluginResult result = new PluginResult(PluginResult.Status.OK, "push replication: " + status.toString());
result.setKeepCallback(true);
callback.sendPluginResult(result);
}
});
replicationListeners.put(dbName + "_pull", new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
Replication.ReplicationStatus status = event.getStatus();
PluginResult result = new PluginResult(PluginResult.Status.OK, "pull replication: " + status.toString());
result.setKeepCallback(true);
callback.sendPluginResult(result);
}
});
replications.get(dbName + "_push").addChangeListener(replicationListeners.get(dbName + "_push"));
replications.get(dbName + "_pull").addChangeListener(replicationListeners.get(dbName + "_pull"));
}
} catch (final Exception e) {
callback.error(e.getMessage());
}
}
});
}
private void compact(final JSONArray args, final CallbackContext callback) {
cordova.getThreadPool().execute(new Runnable() {
public void run() {
try {
String dbName = args.getString(0);
dbs.get(dbName).compact();
callback.success("attachment saved!");
} catch (final Exception e) {
callback.error(e.getMessage());
}
}
});
}
private void info(final JSONArray args, final CallbackContext callback) {
cordova.getThreadPool().execute(new Runnable() {
public void run() {
try {
String dbName = args.getString(0);
callback.success(dbs.get(dbName).getDocumentCount());
} catch (final Exception e) {
callback.error(e.getMessage());
}
}
});
}
private void initDb(final JSONArray args, final CallbackContext callback) {
cordova.getThreadPool().execute(new Runnable() {
public void run() {
try {
String dbName = args.getString(0);
if (dbs == null) dbs = new HashMap<String, Database>();
DatabaseOptions options = new DatabaseOptions();
options.setCreate(true);
options.setStorageType(Manager.FORESTDB_STORAGE);
dbs.put(dbName, dbmgr.openDatabase(dbName, options));
callback.success("CBL db init success");
} catch (final Exception e) {
callback.error(e.getMessage());
}
}
});
}
private void lastSequence(final JSONArray args, final CallbackContext callback) {
cordova.getThreadPool().execute(new Runnable() {
public void run() {
try {
String dbName = args.getString(0);
callback.success((int) dbs.get(dbName).getLastSequenceNumber());
} catch (final Exception e) {
callback.error(e.getMessage());
}
}
});
}
private void replicateFrom(JSONArray args, CallbackContext callback) {
}
private void replicateTo(JSONArray args, CallbackContext callback) {
}
private void reset(JSONArray args, CallbackContext callback) {
this.onReset();
}
private void stopReplication(final JSONArray args, final CallbackContext callback) {
cordova.getThreadPool().execute(new Runnable() {
public void run() {
try {
String dbName = args.getString(0);
Database db = dbs.get(dbName);
if (db != null) {
for (Replication replication : db.getAllReplications()) replication.stop();
callback.success("true");
} else callback.error("false");
} catch (final Exception e) {
callback.error(e.getMessage());
}
}
});
}
private void sync(final JSONArray args, final CallbackContext callback) {
cordova.getThreadPool().execute(new Runnable() {
public void run() {
try {
String dbName = args.getString(0);
URL syncUrl = new URL(args.getString(1));
String user = args.getString(2);
String pass = args.getString(3);
if (replications == null) replications = new HashMap<String, Replication>();
Replication push = dbs.get(dbName).createPushReplication(syncUrl);
Replication pull = dbs.get(dbName).createPullReplication(syncUrl);
Authenticator auth = AuthenticatorFactory.createBasicAuthenticator(user, pass);
push.setAuthenticator(auth);
pull.setAuthenticator(auth);
push.setContinuous(true);
pull.setContinuous(true);
push.start();
pull.start();
replications.put(dbName + "_push", push);
replications.put(dbName + "_pull", pull);
callback.success("true");
} catch (Exception e) {
callback.error(e.getMessage());
}
}
});
}
private void allDocs(final JSONArray args, final CallbackContext callback) {
PluginResult firstResult = new PluginResult(PluginResult.Status.NO_RESULT);
firstResult.setKeepCallback(true);
callback.sendPluginResult(firstResult);
cordova.getThreadPool().execute(new Runnable() {
public void run() {
try {
final String dbName = args.getString(0);
final int totalDocs = dbs.get(dbName).getDocumentCount();
final int batch = 500;
final int segments = batch > totalDocs ? 1 : totalDocs / batch;
final ArrayList<Integer> skipList = new ArrayList<Integer>();
final AtomicInteger numCompleted = new AtomicInteger();
for (int i = 0; i <= segments; i++) skipList.add(i * batch);
ExecutorService executor = Executors.newFixedThreadPool(MAX_THREADS);
for (Integer skipCount : skipList) {
final int innerSkip = skipCount;
Future<Boolean> isComplete = executor.submit(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
Query query = dbs.get(dbName).createAllDocumentsQuery();
query.setAllDocsMode(Query.AllDocsMode.ALL_DOCS);
query.setPrefetch(true);
query.setLimit(batch);
query.setSkip(innerSkip);
try {
QueryEnumerator allDocsQuery = query.run();
final ArrayList<String> responseBuffer = new ArrayList<String>();
for (Iterator<QueryRow> it = allDocsQuery; it.hasNext(); ) {
QueryRow row = it.next();
responseBuffer.add(mapper.writeValueAsString(row.asJSONDictionary()));
}
PluginResult result = new PluginResult(PluginResult.Status.OK, "[" + TextUtils.join(",", responseBuffer) + "]");
result.setKeepCallback(true);
callback.sendPluginResult(result);
numCompleted.incrementAndGet();
} catch (Exception e) {
PluginResult result = new PluginResult(PluginResult.Status.ERROR, e.getMessage());
result.setKeepCallback(false);
callback.sendPluginResult(result);
return false;
}
return true;
}
});
runnerCount += 1;
if (runnerCount >= MAX_THREADS) {
isComplete.get();
runnerCount = 0;
}
}
executor.submit(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
while (numCompleted.get() < skipList.size()) {
Thread.sleep(1000);
}
PluginResult finalResult = new PluginResult(PluginResult.Status.OK, "");
finalResult.setKeepCallback(false);
callback.sendPluginResult(finalResult);
runnerCount = 0;
return true;
}
});
} catch (Exception e) {
PluginResult result = new PluginResult(PluginResult.Status.ERROR, e.getMessage());
result.setKeepCallback(false);
callback.sendPluginResult(result);
}
}
});
}
private void get(final JSONArray args, final CallbackContext callback) {
cordova.getThreadPool().execute(new Runnable() {
public void run() {
try {
String dbName = args.getString(0);
String id = args.getString(1);
Boolean isLocal = args.getBoolean(2);
if (isLocal) {
Map<String, Object> localDoc = dbs.get(dbName).getExistingLocalDocument(id);
if (localDoc != null) {
callback.success(mapper.writeValueAsString(localDoc));
} else callback.error("null");
} else {
Document doc = dbs.get(dbName).getExistingDocument(id);
if (doc != null) {
String jsonString = mapper.writeValueAsString(doc.getProperties());
callback.success(jsonString);
} else callback.error("null");
}
} catch (final Exception e) {
callback.error(e.getMessage());
}
}
});
}
private void getDocRev(final JSONArray args, final CallbackContext callback) {
cordova.getThreadPool().execute(new Runnable() {
public void run() {
try {
String dbName = args.getString(0);
String id = args.getString(1);
Document doc = dbs.get(dbName).getExistingDocument(id);
if (doc != null) {
callback.success(doc.getCurrentRevisionId());
} else callback.error("null");
} catch (final Exception e) {
callback.error(e.getMessage());
}
}
});
}
private void putAttachment(final JSONArray args, final CallbackContext callback) {
cordova.getThreadPool().execute(new Runnable() {
public void run() {
try {
String dbName = args.getString(0);
String filePath = cordova.getActivity().getApplicationContext().getFilesDir() + "/" + args.getString(5) + "/" + args.getString(2);
FileInputStream stream = new FileInputStream(filePath);
Document doc = dbs.get(dbName).getDocument(args.getString(1));
UnsavedRevision newRev = doc.getCurrentRevision().createRevision();
newRev.setAttachment(args.getString(3), args.getString(4), stream);
newRev.save();
callback.success("attachment saved!");
} catch (final Exception e) {
callback.error(e.getMessage());
}
}
});
}
private void upsert(final JSONArray args, final CallbackContext callback) {
cordova.getThreadPool().execute(new Runnable() {
public void run() {
try {
String dbName = args.getString(0);
String id = args.getString(1);
String jsonString = args.getString(2);
Boolean isLocal = args.getBoolean(3);
ObjectMapper mapper = new ObjectMapper();
if (isLocal) {
Map<String, Object> mapDoc = mapper.readValue(jsonString, new TypeReference<Map<String, Object>>() {
});
dbs.get(dbName).putLocalDocument(id, mapDoc);
callback.success("local upsert successful");
} else {
Document doc = dbs.get(dbName).getExistingDocument(id);
Map<String, Object> mapDoc = mapper.readValue(jsonString, new TypeReference<Map<String, Object>>() {
});
if (doc != null) doc.putProperties(mapDoc);
else {
Document newDoc = dbs.get(dbName).getDocument(id);
newDoc.putProperties(mapDoc);
}
callback.success("upsert successful");
}
} catch (final Exception e) {
callback.error(e.getMessage());
}
}
});
}
//PLUGIN BOILER PLATE
private Manager startCBLite(Context context) {
try {
// Manager.enableLogging(Log.TAG, Log.VERBOSE);
// Manager.enableLogging(Log.TAG_SYNC, Log.VERBOSE);
// Manager.enableLogging(Log.TAG_QUERY, Log.VERBOSE);
// Manager.enableLogging(Log.TAG_VIEW, Log.VERBOSE);
// Manager.enableLogging(Log.TAG_CHANGE_TRACKER, Log.VERBOSE);
// Manager.enableLogging(Log.TAG_BLOB_STORE, Log.VERBOSE);
// Manager.enableLogging(Log.TAG_DATABASE, Log.VERBOSE);
// Manager.enableLogging(Log.TAG_LISTENER, Log.VERBOSE);
// Manager.enableLogging(Log.TAG_MULTI_STREAM_WRITER, Log.VERBOSE);
// Manager.enableLogging(Log.TAG_REMOTE_REQUEST, Log.VERBOSE);
// Manager.enableLogging(Log.TAG_ROUTER, Log.VERBOSE);
dbmgr = new Manager(new AndroidContext(context), Manager.DEFAULT_OPTIONS);
} catch (IOException e) {
throw new RuntimeException(e);
}
return dbmgr;
}
@Override
public void onResume(boolean multitasking) {
System.out.println("CBLite.onResume() called");
}
@Override
public void onPause(boolean multitasking) {
System.out.println("CBLite.onPause() called");
}
}
|
package com.db.net.http;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.net.MalformedURLException;
import java.net.Socket;
import java.net.URL;
import java.security.KeyStore;
import javax.net.ssl.KeyManager;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocket;
import javax.net.ssl.SSLSocketFactory;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import com.db.logging.Logger;
import com.db.logging.LoggerManager;
import com.db.net.ssl.TrustAllSSLManager;
/**
* This client is used to connect to a web server that supports
* http (HyperText Transfer Protocol).
*
* @author Dave Longley
*/
public class HttpWebClient
{
/**
* The URL to connect to.
*/
protected URL mUrl;
/**
* The SSL socket factory for creating SSL sockets.
*/
protected SSLSocketFactory mSSLSocketFactory;
/**
* The default user-agent name for this client.
*/
public static final String DEFAULT_USER_AGENT =
"Digital Bazaar Http Client 1.0";
/**
* Creates a new http web client with no specified URL to connect to.
*/
public HttpWebClient()
{
this((URL)null);
}
/**
* Creates a new HttpWebClient with the given host and port.
*
* @param host the host to connect to.
* @param port the port to connect on.
*
* @throws MalformedURLException
*/
public HttpWebClient(String host, int port) throws MalformedURLException
{
this(host + ":" + port);
}
/**
* Creates a new HttpWebClient with the given URL.
*
* @param url the URL to connect to.
*
* @throws MalformedURLException
*/
public HttpWebClient(String url) throws MalformedURLException
{
// set the URL
setUrl(url);
}
/**
* Creates a new HttpWebClient with the given URL.
*
* @param url the URL to connect to.
*/
public HttpWebClient(URL url)
{
// set the URL
setUrl(url);
}
/**
* Creates the internal SSL socket factory.
*
* @param keyManagers the key managers for the factory.
* @param trustManagers the trust managers for the factory.
*/
protected void createSSLSocketFactory(
KeyManager[] keyManagers, TrustManager[] trustManagers)
{
try
{
// create ssl context
SSLContext sslContext = SSLContext.getInstance("TLS");
// initialize ssl context
sslContext.init(keyManagers, trustManagers, null);
// create the ssl factory
mSSLSocketFactory = sslContext.getSocketFactory();
}
catch(Throwable t)
{
getLogger().debug(getClass(),
"could not create SSL socket factory.");
getLogger().debug(getClass(), LoggerManager.getStackTrace(t));
// set ssl factory to null
mSSLSocketFactory = null;
}
}
/**
* Gets the SSL socket factory.
*
* @return the SSL socket factory.
*/
protected SSLSocketFactory getSSLSocketFactory()
{
if(mSSLSocketFactory == null)
{
// create default SSL socket factory
// use trust all manager
TrustManager[] trustManagers =
new TrustManager[]{new TrustAllSSLManager()};
// create SSLSocketFactory
createSSLSocketFactory(null, trustManagers);
}
return mSSLSocketFactory;
}
/**
* Gets an http web connection to the specified URL.
*
* @param url the URL to connect to.
*
* @return the http web connection or null if the connection could
* not be made.
*/
protected HttpWebConnection getWebConnection(URL url)
{
HttpWebConnection hwc = null;
try
{
// create an unconnected socket
Socket socket = new Socket();
// connect the socket with a timeout of 10 seconds
InetSocketAddress address = new InetSocketAddress(
url.getHost(), url.getPort());
socket.connect(address, 10000);
if(url.getProtocol().equals("https"))
{
// wrap the socket with an SSL socket
socket = getSSLSocketFactory().createSocket(
socket, url.getHost(), url.getPort(), true);
// set the enabled cipher suites
String[] suites = getSSLSocketFactory().getSupportedCipherSuites();
((SSLSocket)socket).setEnabledCipherSuites(suites);
}
// create web connection
hwc = new HttpWebConnection(socket);
getLogger().debug(getClass(), "connected to: " + url.toString());
}
catch(Throwable t)
{
getLogger().debug(getClass(),
"could not establish an http web connection" +
",url='" + url.toString() + "'" +
",reason=" + t);
getLogger().debug(getClass(), LoggerManager.getStackTrace(t));
if(hwc != null)
{
hwc.disconnect();
}
hwc = null;
}
return hwc;
}
/**
* Attempts to connect to the stored endpoint address.
*
* If a connection cannot be established, the connection will be retried
* multiple times.
*
* @return the HttpWebConnection to the endpoint address or null if failure.
*/
public synchronized HttpWebConnection connect()
{
HttpWebConnection rval = null;
rval = connect(getUrl());
return rval;
}
/**
* Attempts to connect to the passed URL. If a connection cannot be
* established, the connection will be retried multiple times.
*
* @param url the URL to connect to.
*
* @return the web connection to the URL or null if failure.
*
* @throws MalformedURLException
*/
public synchronized HttpWebConnection connect(String url)
throws MalformedURLException
{
return connect(new URL(url));
}
/**
* Attempts to connect to the passed URL. If a connection cannot be
* established, the connection will be retried multiple times.
*
* @param url the URL to connect to.
*
* @return the web connection to the URL or null if failure.
*/
public synchronized HttpWebConnection connect(URL url)
{
HttpWebConnection wc = null;
if(url != null)
{
getLogger().debug(getClass(),
"trying to establish an http web connection to '" +
url + "'...");
// try to get a web connection
wc = getWebConnection(url);
// try twice more if a web connection could not be established
for(int i = 0; i < 2 && wc == null &&
!Thread.currentThread().isInterrupted(); i++)
{
wc = getWebConnection(url);
}
if(wc != null)
{
getLogger().debug(getClass(),
"http web connection established," +
"ip=" + wc.getHost() + ":" + wc.getRemotePort());
}
else
{
getLogger().error(getClass(),
"could not establish an http web connection!,url=" + url);
}
}
else
{
getLogger().error(getClass(),
"could not establish an http web connection! " +
"URL was null or blank!");
}
return wc;
}
/**
* Sends an http web request from the client to the server.
*
* @param request the http web request to send to the server.
* @param is the input stream to read the request body from.
*
* @return true if the request was successfully sent, false if not.
*/
public boolean sendRequest(HttpWebRequest request, InputStream is)
{
boolean rval = false;
if(request.sendHeader())
{
rval = request.sendBody(is);
}
return rval;
}
/**
* Sends an http web request from the client to the server.
*
* @param request the http web request to send to the server.
* @param body the body to send along with the request.
*
* @return true if the request was successfully sent, false if not.
*/
public boolean sendRequest(HttpWebRequest request, String body)
{
boolean rval = false;
if(request.sendHeader())
{
rval = request.sendBody(body);
}
return rval;
}
/**
* Sends an http web request from the client to the server.
*
* @param request the http web request to send to the server.
* @param body the body to send along with the request.
*
* @return true if the request was successfully sent, false if not.
*/
public boolean sendRequest(HttpWebRequest request, byte[] body)
{
boolean rval = false;
if(request.sendHeader())
{
rval = request.sendBody(body);
}
return rval;
}
/**
* Sends an http web request from the client to the server.
*
* @param request the http web request to send to the server.
*
* @return true if the request was successfully sent, false if not.
*/
public boolean sendRequest(HttpWebRequest request)
{
boolean rval = false;
rval = request.sendHeader();
return rval;
}
/**
* Recieves the response header from the server.
*
* @param response the http response to read with.
*
* @return true if the response header could be read, false if not.
*/
public boolean receiveResponseHeader(HttpWebResponse response)
{
boolean rval = false;
// keep reading while HTTP 1xx
boolean received = false;
while((received = response.receiveHeader()) &&
response.getHeader().getStatusCode().startsWith("1"))
{
// read and discard body if status code is 1xx
response.receiveBody();
}
// set whether or not the header was read
if(received)
{
rval = !response.getHeader().getStatusCode().startsWith("1");
}
return rval;
}
/**
* A convenience method for performing an HTTP GET to retrieve a file.
*
* @param url the url for the file.
* @param directory the directory to store the file in.
*
* @return the file if it was received or null if the file could not
* be received.
*
* @throws MalformedURLException
*/
public File getFile(String url, File directory)
throws MalformedURLException
{
return getFile(new URL(url), directory);
}
/**
* A convenience method for performing an HTTP GET to retrieve a file.
*
* @param url the URL for the file.
* @param directory the directory to store the file in.
*
* @return the file if it was received or null if the file could not
* be received.
*/
public File getFile(URL url, File directory)
{
File rval = null;
// get a web connection
HttpWebConnection connection = connect(url);
if(connection != null)
{
// create http web request
HttpWebRequest request = new HttpWebRequest(connection);
request.getHeader().setMethod("GET");
request.getHeader().setPath(url.getPath());
request.getHeader().setVersion("HTTP/1.1");
request.getHeader().setHost(url.getHost() + ":" + url.getPort());
request.getHeader().setUserAgent(DEFAULT_USER_AGENT);
request.getHeader().setConnection("close");
// send request
if(sendRequest(request))
{
// receive response header
HttpWebResponse response = request.createHttpWebResponse();
if(receiveResponseHeader(response))
{
// see if response was OK
if(response.getHeader().hasOKStatusCode())
{
// get the file name
String filename = response.getHeader().
getContentDispositionValue("filename");
if(filename == null)
{
filename = "tempfile.tmp";
}
// get full path of file to write to
String path =
directory.getAbsolutePath() + File.separator + filename;
// create file output stream reference
FileOutputStream fos = null;
try
{
// create file output stream for writing to the file
fos = new FileOutputStream(path);
// receive response body
response.receiveBody(fos);
// close the file output stream
fos.close();
// file received
rval = new File(path);
}
catch(IOException e)
{
getLogger().error(getClass(),
"An exception occurred while receiving a file!," +
"exception= e");
getLogger().debug(getClass(), Logger.getStackTrace(e));
}
try
{
// ensure file output stream is closed
if(fos != null)
{
fos.close();
}
}
catch(IOException ignore)
{
}
}
}
}
// disconnect
connection.disconnect();
}
return rval;
}
/**
* Sets an ssl certificate (from a keystore) to trust. Uses
* the default algorithm for the certificate (SunX509).
*
* @param keystore the name of the keystore file.
* @param password the password to unlock the keystore.
*
* @return true if the ssl certificate was successfully loaded,
* false if not.
*/
public boolean setTrustedSSLKeystore(String keystore, String password)
{
return setTrustedSSLKeystore(keystore, password, "SunX509");
}
/**
* Sets an ssl certificate (from a keystore) to trust.
*
* @param keystore the name of the keystore file.
* @param password the password to unlock the keystore.
* @param algorithm the algorithm for the keystore.
*
* @return true if the ssl certificate was successfully loaded,
* false if not.
*/
public synchronized boolean setTrustedSSLKeystore(
String keystore, String password, String algorithm)
{
boolean rval = false;
try
{
// load keystore
KeyStore ks = KeyStore.getInstance("JKS");
ks.load(new FileInputStream(keystore), password.toCharArray());
// get key managers
KeyManagerFactory kmf = KeyManagerFactory.getInstance(algorithm);
kmf.init(ks, password.toCharArray());
// get trust managers
String alg = TrustManagerFactory.getDefaultAlgorithm();
TrustManagerFactory tmf = TrustManagerFactory.getInstance(alg);
tmf.init(ks);
// create SSL socket factory
createSSLSocketFactory(kmf.getKeyManagers(), tmf.getTrustManagers());
rval = true;
}
catch(Throwable t)
{
getLogger().error(getClass(),
"Could not load keystore!, keystore=" + keystore);
getLogger().debug(getClass(), Logger.getStackTrace(t));
}
return rval;
}
/**
* Gets the path to the web service.
*
* @return the path to the web service.
*/
public synchronized String getWebServicePath()
{
String rval = null;
if(getUrl() != null)
{
// get the URL path
rval = getUrl().getPath();
}
return rval;
}
/**
* Sets the URL to connect to.
*
* @param url the URL to connect to.
*
* @throws MalformedURLException
*/
public void setUrl(String url) throws MalformedURLException
{
setUrl(new URL(url));
}
/**
* Sets the URL to connect to.
*
* @param url the URL to connect to.
*/
public synchronized void setUrl(URL url)
{
mUrl = url;
}
/**
* Gets the URL to connect to.
*
* @return the URl to connect to.
*/
public synchronized URL getUrl()
{
return mUrl;
}
/**
* Gets the logger for this http web client.
*
* @return the logger for this http web client.
*/
public Logger getLogger()
{
return LoggerManager.getLogger("dbnet");
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package ch.unizh.ini.jaer.projects.virtualslotcar;
import com.sun.opengl.util.GLUT;
import java.awt.Point;
import java.awt.geom.Point2D;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList;
import java.util.Arrays;
import javax.media.opengl.GL;
import javax.media.opengl.GLAutoDrawable;
import net.sf.jaer.chip.AEChip;
import net.sf.jaer.event.BasicEvent;
import net.sf.jaer.event.EventPacket;
import net.sf.jaer.event.OutputEventIterator;
import net.sf.jaer.eventprocessing.FilterChain;
import net.sf.jaer.eventprocessing.filter.BackgroundActivityFilter;
import net.sf.jaer.eventprocessing.tracking.*;
import net.sf.jaer.graphics.FrameAnnotater;
import net.sf.jaer.util.filter.LowpassFilter;
/**
* Tracks two slot cars using a SlotcarTrack model and determines which car is which.
*
*
* @author tobi
*/
public class TwoCarTracker extends RectangularClusterTracker implements FrameAnnotater, PropertyChangeListener, CarTracker {
private boolean onlyFollowTrack = getBoolean("onlyFollowTrack", true);
private float relaxToTrackFactor = getFloat("relaxToTrackFactor", 0.05f);
private float distanceFromTrackMetricTauMs = getFloat("distanceFromTrackMetricTauMs", 200);
private int minSegmentsToBeCarCluster = getInt("minSegmentsToBeCarCluster", 4);
private SlotcarTrack track;
private TwoCarCluster currentCarCluster = null, lastValidCarCluster = null;
private NearbyTrackEventFilter nearbyTrackFilter = null;
private float maxDistanceFromTrackPoint = getFloat("maxDistanceFromTrackPoint", 15); // pixels - need to set in track model
public TwoCarTracker(AEChip chip) {
super(chip);
setPropertyTooltip("onlyFollowTrack", "If set, clusters will only follow the track. If false, clusters can follow car off the track.");
setPropertyTooltip("relaxToTrackFactor", "Tracking will normally only parallel the track. This factor control how much the cluster converges onto the track, i.e., the allowed normal motion as fraction of the parallel motion.");
setPropertyTooltip("distanceFromTrackMetricTauMs", "Each car cluster distance from track model is lowpass filtered with this time constant in ms; the closest one is chosen as the computer controlled car");
setPropertyTooltip("minSegmentsToBeCarCluster", "a CarCluster needs to pass at least this many segments to be marked as the car cluster");
setPropertyTooltip("maxDistanceFromTrackPoint", "Maximum allowed distance in pixels from track spline point to find nearest spline point; if currentTrackPos=-1 increase maxDistanceFromTrackPoint");
// set reasonable defaults
if (!isPreferenceStored("maxNumClusters")) {
setMaxNumClusters(2);
}
if (!isPreferenceStored("highwayPerspectiveEnabled")) {
setHighwayPerspectiveEnabled(false);
}
if (!isPreferenceStored("enableClusterExitPurging")) {
setEnableClusterExitPurging(false);
}
if (!isPreferenceStored("dynamicSizeEnabled")) {
setDynamicSizeEnabled(false);
}
if (!isPreferenceStored("aspectRatio")) {
setAspectRatio(.78f);
}
if (!isPreferenceStored("clusterSize")) {
setClusterSize(.07f);
}
if (!isPreferenceStored("dontMergeEver")) {
setDontMergeEver(true);
}
if (!isPreferenceStored("angleFollowsVelocity")) {
setAngleFollowsVelocity(true);
}
if (!isPreferenceStored("useVelocity")) {
setUseVelocity(true);
}
if (!isPreferenceStored("useNearestCluster")) {
setUseNearestCluster(true);
}
if (!isPreferenceStored("pathsEnabled")) {
setPathsEnabled(true);
}
if (!isPreferenceStored("pathLength")) {
setPathLength(50);
}
if (!isPreferenceStored("velocityTauMs")) {
setVelocityTauMs(15);
}
if (!isPreferenceStored("showClusterVelocity")) {
setShowClusterVelocity(true);
}
if (!isPreferenceStored("colorClustersDifferentlyEnabled")) {
setColorClustersDifferentlyEnabled(true);
}
FilterChain filterChain = new FilterChain(chip);
filterChain.add(new BackgroundActivityFilter(chip));
nearbyTrackFilter = new NearbyTrackEventFilter(chip);
filterChain.add(nearbyTrackFilter);
setEnclosedFilterChain(filterChain);
}
@Override
public Cluster createCluster() {
return new TwoCarCluster();
}
@Override
public Cluster createCluster(BasicEvent ev) {
return new TwoCarCluster(ev);
}
@Override
public Cluster createCluster(Cluster one, Cluster two) {
return new TwoCarCluster(one, two);
}
@Override
public Cluster createCluster(BasicEvent ev, OutputEventIterator itr) {
return new TwoCarCluster(ev, itr);
}
@Override
protected void updateClusterLocations(int t) {
return; // don't move clusters between events to avoid clusters running off of track. TODO this may mess up the prediction step and perhaps we should predict *along* the track
}
/** The method that actually does the tracking.
*
* @param in the event packet.
* @return a possibly filtered event packet passing only events contained in the tracked and visible Clusters, depending on filterEventsEnabled.
*/
@Override
synchronized protected EventPacket<? extends BasicEvent> track(EventPacket<BasicEvent> in) {
boolean updatedClusterList = false;
out = getEnclosedFilterChain().filterPacket(in);
// record cluster locations before packet is processed
for (Cluster c : clusters) {
c.getLastPacketLocation().setLocation(c.location);
}
// for each event, assign events to each cluster according probabalistically to the distance of the event from the cluster
// if its too far from any cluster, make a new cluster if we can
for (BasicEvent ev : in) {
addEventToClustersOrSpawnNewCluster(ev);
updatedClusterList = maybeCallUpdateObservers(in, ev.timestamp); // callback to update()
if (isLogDataEnabled()) {
logData(ev, in);
}
}
// TODO update here again, relying on the fact that lastEventTimestamp was set by possible previous update according to
// schedule; we have have double update of velocityPPT using same dt otherwise
if (!updatedClusterList && in.getSize() > 0) { // make sure we have at least one event here to get a timestamp
updateClusterList(in, in.getLastTimestamp()); // at laest once per packet update list
}
if (track == null) {
log.warning("null track - perhaps deserialization failed or no track was saved?");
return null;
}
// now accumulate votes for the car cluster, e.g. closest, oldest, nearest last choice as last; winner of voting is the CC cluster
// iterate over clusters to find distance of each from track model.
// Accumulate the results in a LowPassFilter for each cluster.
TwoCarCluster closestAvgToTrack = null, oldest = null, nearestLast = null;
float minDistFromTrack = Float.MAX_VALUE, maxAge = Integer.MIN_VALUE, minFromLast = Float.MAX_VALUE;
for (ClusterInterface c : clusters) {
TwoCarCluster cc = (TwoCarCluster) c;
cc.computerControlledCar = false; // mark all false
cc.updateState();
if(!cc.isVisible()) continue;
if (cc.avgDistanceFromTrack < minDistFromTrack) {
minDistFromTrack = cc.avgDistanceFromTrack;
closestAvgToTrack = cc;
}
if (cc.getLifetime() > maxAge) {
maxAge = cc.getLifetime();
oldest = cc;
}
if (currentCarCluster != null) {
float d;
if ((d = (float) cc.getLocation().distanceSq(currentCarCluster.getLocation())) < minFromLast) {
minFromLast = d;
nearestLast = cc;
}
}
}
TwoCarCluster compControlled = null;
int[] votes = new int[getNumClusters()];
int idx = 0;
for (ClusterInterface c : clusters) {
TwoCarCluster cc = (TwoCarCluster) c;
if (cc == nearestLast) {
votes[idx]++;
}
if (cc == closestAvgToTrack) {
votes[idx]++;
}
if (cc == oldest) {
votes[idx]++;
}
}
int maxVote = 0, maxbin = 0;
for (int i = 0; i < votes.length; i++) {
if (votes[i] > maxVote) {
maxVote = votes[i];
maxbin = i;
}
}
if (clusters.size() > 0 && maxVote>0) {
compControlled = (TwoCarCluster) clusters.get(maxbin);
compControlled.computerControlledCar = true;
if (compControlled != currentCarCluster) {
log.info("Switched CarCluster from " + currentCarCluster + " \n to \n" + compControlled);
}
currentCarCluster = (TwoCarCluster) compControlled;
} else {
currentCarCluster = null;
}
if (currentCarCluster != null) {
lastValidCarCluster = currentCarCluster;
}
return out;
}
/** Returns the putative car cluster.
*
* @return the car cluster, or null if there is no good cluster
*/
@Override
public TwoCarCluster findCarCluster() {
return lastValidCarCluster;
}
private void addEventToClustersOrSpawnNewCluster(BasicEvent ev) {
class ClusterDistance {
Cluster cluster;
float distance;
public ClusterDistance(Cluster cluster, float distance) {
this.cluster = cluster;
this.distance = distance;
}
}
ArrayList<ClusterDistance> addList = new ArrayList();
for (Cluster c : clusters) {
float dist;
if ((dist = c.distanceTo(ev)) < c.getRadius()) {
addList.add(new ClusterDistance(c, dist));
}
}
if (addList.size() > 0) {
// we have a list of cluster that all contain the event.
// We now partition the event randomly to the clusters.
int r = random.nextInt(addList.size());
addList.get(r).cluster.addEvent(ev);
} else if (clusters.size() < getMaxNumClusters()) {
// start a new cluster bu tonly if event in range of track
if (track == null || track.findClosestIndex(new Point(ev.x, ev.y), track.getPointTolerance(), true) != -1) {
Cluster newCluster = null;
newCluster = createCluster(ev); // new Cluster(ev);
clusters.add(newCluster);
}
}
}
@Override
public synchronized void annotate(GLAutoDrawable drawable) {
// super.annotate(drawable);
for (Cluster c : clusters) {
TwoCarCluster cc = (TwoCarCluster) c;
if (isShowAllClusters() || cc.isVisible()) {
cc.draw(drawable);
}
}
}
/**
* @return the minSegmentsToBeCarCluster
*/
public int getMinSegmentsToBeCarCluster() {
return minSegmentsToBeCarCluster;
}
/**
* @param minSegmentsToBeCarCluster the minSegmentsToBeCarCluster to set
*/
public void setMinSegmentsToBeCarCluster(int minSegmentsToBeCarCluster) {
this.minSegmentsToBeCarCluster = minSegmentsToBeCarCluster;
putInt("minSegmentsToBeCarCluster", minSegmentsToBeCarCluster);
}
/** The cluster used for tracking cars. It extends the RectangularClusterTracker.Cluster with segment index and crashed status fields.
*
*/
public class TwoCarCluster extends RectangularClusterTracker.Cluster implements CarCluster {
private final int SEGMENT_HISTORY_LENGTH = 50; // number of segments to keep track of in past
private final int NUM_SEGMENTS_TO_BE_MARKED_RUNNING = 30;
int segmentIdx = -1; // current segment
boolean crashed = false; // flag that we crashed
boolean wasRunningSuccessfully = false; // marked true when car has been running successfully over segments for a while
float lastDistanceFromTrack = 0, avgDistanceFromTrack = 0; // instantaneous and lowpassed distance from track model
int birthSegmentIdx = -1; // which segment we were born on
int numSegmentIncreases = 0; // how many times segment number increased
int crashSegment = -1; // where we crashed
int[] segmentHistory = new int[SEGMENT_HISTORY_LENGTH]; // ring buffer of segment history
private float relaxToTrackFactor;
float segmentSpeedSPS = 0;
private int lastSegmentChangeTimestamp = 0;
{
Arrays.fill(segmentHistory, Integer.MIN_VALUE);
}
int segmentHistoryPointer = 0; // ring pointer, points to next location in ring buffer
LowpassFilter distFilter = new LowpassFilter();
{
distFilter.setTauMs(distanceFromTrackMetricTauMs);
}
boolean computerControlledCar = false;
public TwoCarCluster(BasicEvent ev, OutputEventIterator outItr) {
super(ev, outItr);
}
public TwoCarCluster(Cluster one, Cluster two) {
super(one, two);
}
public TwoCarCluster(BasicEvent ev) {
super(ev);
}
public TwoCarCluster() {
super();
}
/** Overrides updatePosition method to only allow movement along the track model, between spline points of the track.
* Events that
* @param m the mixing factor, 0 to not move, 1 to move cluster to location of event.
* @param event
*/
@Override
protected void updatePosition(BasicEvent event, float m) {
if (track == null) {
super.updatePosition(event, m);
return;
} else {
int idx = updateSegmentInfo(event.timestamp);
// move cluster, but only along the track
Point2D.Float v = findClosestTrackSegmentVector();
if (v == null) {
if (!onlyFollowTrack) {
super.updatePosition(event, m);
}
return;
}
float vnorm = (float) v.distance(0, 0);
if (vnorm < 1) {
log.warning("track segment vector is zero; track has idential track points. Edit the track to remove these identical points."); // warn about idential track points
}
float ex = event.x - location.x;
float ey = event.y - location.y;
float proj = m * (v.x * ex + v.y * ey) / vnorm;
relaxToTrackFactor = relaxToTrackFactor * m;
location.x += (proj * v.x) + relaxToTrackFactor * ex;
location.y += (proj * v.y) + relaxToTrackFactor * ey;
}
}
@Override
public void draw(GLAutoDrawable drawable) {
super.draw(drawable);
final float BOX_LINE_WIDTH = 8f; // in chip
GL gl = drawable.getGL();
// set color and line width of cluster annotation
if (computerControlledCar) {
int x = (int) getLocation().x;
int y = (int) getLocation().y;
int sy = (int) radiusY; // sx sy are (half) size of rectangle
int sx = (int) radiusX;
gl.glColor3f(.8f, .8f, .8f);
gl.glLineWidth(BOX_LINE_WIDTH);
// draw cluster rectangle
drawBox(gl, x, y, sx, sy, getAngle());
} else {
float[] rgb = getColor().getRGBColorComponents(null);
gl.glColor3fv(rgb, 0);
}
gl.glRasterPos3f(location.x, location.y - 4, 0);
chip.getCanvas().getGlut().glutBitmapString(
GLUT.BITMAP_HELVETICA_18,
String.format("dist=%.1f segSp=%.1f", distFilter.getValue(), segmentSpeedSPS));
}
private int updateSegmentInfo(int lastTimestamp) {
if (track == null) {
return -1;
}
int idx = track.findClosestIndex(location, 0, true);
if (birthSegmentIdx == -1 && idx != -1) {
birthSegmentIdx = idx;
}
if (idx != segmentIdx) {
segmentHistory[segmentHistoryPointer] = idx;
segmentHistoryPointer = (segmentHistoryPointer + 1) % SEGMENT_HISTORY_LENGTH; // LENGTH=2,pointer =0, 1, 0, 1, etc
}
if (idx > segmentIdx) {
numSegmentIncreases++;
if (numSegmentIncreases > NUM_SEGMENTS_TO_BE_MARKED_RUNNING) {
wasRunningSuccessfully = true;
}
if (this.lastSegmentChangeTimestamp == 0) {
segmentSpeedSPS = Float.NaN;
} else {
int dt = lastTimestamp - this.lastSegmentChangeTimestamp; // TODO handle dt<0
segmentSpeedSPS = (Float.isInfinite(segmentSpeedSPS) || Float.isNaN(segmentSpeedSPS)) ? 1e6f / dt : .95f * segmentSpeedSPS + .05f * 1e6f / dt;
}
this.lastSegmentChangeTimestamp = lastTimestamp;
}
segmentIdx = idx;
return idx;
}
private Point2D.Float findClosestTrackSegmentVector() {
if (segmentIdx != -1) {
return track.segmentVectors.get(segmentIdx);
} else {
return null;
}
}
public String toString() {
return "CarCluster segmentIdx=" + segmentIdx + " segmentSpeedSPS=" + segmentSpeedSPS + " crashed=" + crashed + " numSegmentIncreases=" + numSegmentIncreases + " wasRunningSuccessfully=" + wasRunningSuccessfully + " " + super.toString();
}
/**
* @return the segmentIdx
*/
@Override
public int getSegmentIdx() {
return segmentIdx;
}
/**
* @param segmentIdx the segmentIdx to set
*/
@Override
public void setSegmentIdx(int segmentIdx) {
this.segmentIdx = segmentIdx;
}
/**
* @return the crashed
*/
@Override
public boolean isCrashed() {
return crashed;
}
/**
* @param crashed the crashed to set
*/
@Override
public void setCrashed(boolean crashed) {
this.crashed = crashed;
}
private void determineIfcrashed() {
// final float SPEED_FOR_CRASH = 10;
if (getSpeedPPS() > getThresholdVelocityForVisibleCluster() || !wasRunningSuccessfully || getLifetime() < 300000) {
crashed = false;
return;
}
crashed=false;
// looks over segment history to find last index of increasing sequence of track points - this is crash point
// march up the history (first point is then the oldest) until we stop increasing (counting wraparound as an increase).
// the last point of increase is the crash location.
final int LOOKING_FOR_LAST = 0, COUNTING = 1, FOUND_CRASH = 2;
final int SEGMENTS_BEFORE_CRASH = 5; // need this many upwards to see a crash
int state = LOOKING_FOR_LAST;
int crashSeg = -1;
int count = 0;
int lastValidSeg = Integer.MAX_VALUE;
int startSeg = -1;
StringBuilder sb = new StringBuilder("Pre-crash segment history, counting backwards in time = ");
search:
for (int i = 0; i < SEGMENT_HISTORY_LENGTH; i++) { // for all the recorded segments
int segPointer = segmentHistoryPointer - i - 1;
if (segPointer < 0) {
segPointer = SEGMENT_HISTORY_LENGTH + segPointer; // wrap back on ring buffer
}
int thisSeg = segmentHistory[segPointer];
sb.append(Integer.toString(thisSeg)).append(" ");
switch (state) {
case LOOKING_FOR_LAST:
if (thisSeg == Integer.MIN_VALUE) {
break search; // done with valid points that have had something put in them
}
if (thisSeg == -1) {
continue; // not initialized yet or in crash state
}
lastValidSeg = thisSeg;
startSeg = thisSeg;
count = 1;
state = COUNTING;
break;
case COUNTING:
if (thisSeg == Integer.MIN_VALUE) {
state = LOOKING_FOR_LAST;
count = 0;
break search;
} else if (thisSeg == -1) {
state = LOOKING_FOR_LAST;
count = 0;
continue;
} else if ((thisSeg <= lastValidSeg ) // if this segment less that last one, accounting for jiggle around nearby points
|| ( // OR wrap backwards:
thisSeg > track.getNumPoints() - SEGMENTS_BEFORE_CRASH // this seg at end of track
&& lastValidSeg < SEGMENTS_BEFORE_CRASH // previuos was at start of track
)) { // normal decrement or wraparound
count++; // then count this
lastValidSeg = thisSeg;
if (count >= SEGMENTS_BEFORE_CRASH) {
state = FOUND_CRASH;
crashSeg = startSeg; // mark this one as crash point
break search;
}
} else { // either backwards or -1 (off track)
state = LOOKING_FOR_LAST;
count = 0;
startSeg = thisSeg;
}
break;
case FOUND_CRASH:
break search;
default:
throw new Error("invalid state=" + state);
}
}
switch (state) {
case LOOKING_FOR_LAST:
sb.append("could't find last crash segment, using lastValidSeg=" + lastValidSeg);
crashSegment = lastValidSeg;
break;
case COUNTING:
sb.append("could't find last crash segment, using startSeg=" + startSeg);
crashSegment = startSeg;
break;
case FOUND_CRASH:
sb.append("\ndetermined crash was at segment " + crashSeg);
crashSegment = crashSeg;
crashed=true;
break;
default:
sb.append("\ninvalid state=" + state);
}
sb.append(" for ").append(this.toString());
log.info(sb.toString());
} // determineCrashLocation
private void updateState() {
lastDistanceFromTrack = track.findDistanceToTrack(getLocation());
avgDistanceFromTrack = distFilter.filter(lastDistanceFromTrack, getLastEventTimestamp());
determineIfcrashed();
}
} // TwoCarCluster
/**
* @param track the track to set
*/
public final void setTrack(SlotcarTrack track) {
SlotcarTrack old = this.track;
this.track = track;
nearbyTrackFilter.setTrack(track);
if (this.track != old) {
if (this.track != null) {
this.track.setPointTolerance(maxDistanceFromTrackPoint);
}
log.info("new track with " + track.getNumPoints() + " points");
getSupport().firePropertyChange("track", old, this.track);
}
}
/**
* @return the onlyFollowTrack
*/
public boolean isOnlyFollowTrack() {
return onlyFollowTrack;
}
/**
* @param onlyFollowTrack the onlyFollowTrack to set
*/
public void setOnlyFollowTrack(boolean onlyFollowTrack) {
this.onlyFollowTrack = onlyFollowTrack;
putBoolean("onlyFollowTrack", onlyFollowTrack);
}
/**
* @return the relaxToTrackFactor
*/
public float getRelaxToTrackFactor() {
return relaxToTrackFactor;
}
/**
* @param relaxToTrackFactor the relaxToTrackFactor to set
*/
public void setRelaxToTrackFactor(float relaxToTrackFactor) {
if (relaxToTrackFactor > 1) {
relaxToTrackFactor = 1;
} else if (relaxToTrackFactor < 0) {
relaxToTrackFactor = 0;
}
this.relaxToTrackFactor = relaxToTrackFactor;
putFloat("relaxToTrackFactor", relaxToTrackFactor);
}
/**
* @return the distanceFromTrackMetricTauMs
*/
public float getDistanceFromTrackMetricTauMs() {
return distanceFromTrackMetricTauMs;
}
/**
* @param distanceFromTrackMetricTauMs the distanceFromTrackMetricTauMs to set
*/
public void setDistanceFromTrackMetricTauMs(float distanceFromTrackMetricTauMs) {
this.distanceFromTrackMetricTauMs = distanceFromTrackMetricTauMs;
putFloat("distanceFromTrackMetricTauMs", distanceFromTrackMetricTauMs);
}
@Override
public void propertyChange(PropertyChangeEvent evt) {
if (evt.getPropertyName() == SlotcarTrack.EVENT_TRACK_CHANGED) {
try {
track = (SlotcarTrack) evt.getNewValue();
setTrack(track);
} catch (Exception e) {
log.warning("caught " + e + " when handling property change");
}
}
}
/**
* @return the maxDistanceFromTrackPoint
*/
public float getMaxDistanceFromTrackPoint() {
return maxDistanceFromTrackPoint;
}
/**
* @param maxDistanceFromTrackPoint the maxDistanceFromTrackPoint to set
*/
public void setMaxDistanceFromTrackPoint(float maxDistanceFromTrackPoint) {
float old = this.maxDistanceFromTrackPoint;
// Define tolerance for track model
if (track != null) {
this.maxDistanceFromTrackPoint = maxDistanceFromTrackPoint;
track.setPointTolerance(maxDistanceFromTrackPoint);
} else {
log.warning("cannot set point tolerance on track yet - track is null");
}
putFloat("maxDistanceFromTrackPoint", maxDistanceFromTrackPoint);
getSupport().firePropertyChange("maxDistanceFromTrackPoint", old, maxDistanceFromTrackPoint);
}
}
|
package com.mebigfatguy.fbcontrib.detect;
import org.apache.bcel.Repository;
import org.apache.bcel.classfile.AnnotationEntry;
import org.apache.bcel.classfile.Attribute;
import org.apache.bcel.classfile.Code;
import org.apache.bcel.classfile.Constant;
import org.apache.bcel.classfile.ConstantPool;
import org.apache.bcel.classfile.ConstantUtf8;
import org.apache.bcel.classfile.JavaClass;
import org.apache.bcel.classfile.Method;
import org.apache.bcel.classfile.RuntimeVisibleAnnotations;
import org.apache.bcel.classfile.Unknown;
import org.apache.bcel.generic.Type;
import com.mebigfatguy.fbcontrib.utils.TernaryPatcher;
import edu.umd.cs.findbugs.BugInstance;
import edu.umd.cs.findbugs.BugReporter;
import edu.umd.cs.findbugs.BytecodeScanningDetector;
import edu.umd.cs.findbugs.OpcodeStack;
import edu.umd.cs.findbugs.ba.ClassContext;
import edu.umd.cs.findbugs.classfile.DescriptorFactory;
/** looks for odd uses of the Assert class of the JUnit framework */
public class JUnitAssertionOddities extends BytecodeScanningDetector
{
private enum State {SAW_NOTHING, SAW_IF_ICMPNE, SAW_ICONST_1, SAW_GOTO, SAW_ICONST_0, SAW_EQUALS};
private static final String RUNTIME_VISIBLE_ANNOTATIONS = "RuntimeVisibleAnnotations";
private static final String TESTCASE_CLASS = "junit.framework.TestCase";
private static final String TEST_CLASS = "org.junit.Test";
private static final String TEST_ANNOTATION_SIGNATURE = "Lorg/junit/Test;";
private static final String OLD_ASSERT_CLASS = "junit/framework/Assert";
private static final String NEW_ASSERT_CLASS = "org/junit/Assert";
private BugReporter bugReporter;
private JavaClass testCaseClass;
private JavaClass testAnnotationClass;
private OpcodeStack stack;
private boolean isTestCaseDerived;
private boolean isAnnotationCapable;
private State state;
/**
* constructs a JOA detector given the reporter to report bugs on
* @param bugReporter the sync of bug reports
*/
public JUnitAssertionOddities(BugReporter bugReporter) {
this.bugReporter = bugReporter;
try {
testCaseClass = Repository.lookupClass(TESTCASE_CLASS);
} catch (ClassNotFoundException cnfe) {
testCaseClass = null;
bugReporter.reportMissingClass(DescriptorFactory.createClassDescriptor(TESTCASE_CLASS));
}
try {
testAnnotationClass = Repository.lookupClass(TEST_CLASS);
} catch (ClassNotFoundException cnfe) {
testAnnotationClass = null;
bugReporter.reportMissingClass(DescriptorFactory.createClassDescriptor(TEST_CLASS));
}
}
/**
* override the visitor to see if this class could be a test class
*
* @param classContext the context object of the currently parsed class
*/
@Override
public void visitClassContext(ClassContext classContext) {
try {
JavaClass cls = classContext.getJavaClass();
isTestCaseDerived = ((testCaseClass != null) && cls.instanceOf(testCaseClass));
isAnnotationCapable = (cls.getMajor() >= 5) && (testAnnotationClass != null);
if (isTestCaseDerived || isAnnotationCapable) {
stack = new OpcodeStack();
super.visitClassContext(classContext);
}
} catch (ClassNotFoundException cnfe) {
bugReporter.reportMissingClass(cnfe);
} finally {
stack = null;
}
}
@Override
public void visitCode(Code obj) {
Method m = getMethod();
boolean isTestMethod = isTestCaseDerived && m.getName().startsWith("test");
if (!isTestMethod && isAnnotationCapable) {
Attribute[] atts = m.getAttributes();
for (Attribute att : atts) {
ConstantPool cp = att.getConstantPool();
Constant c = cp.getConstant(att.getNameIndex());
if (c instanceof ConstantUtf8) {
String name = ((ConstantUtf8) c).getBytes();
if (RUNTIME_VISIBLE_ANNOTATIONS.equals(name)) {
if (att instanceof Unknown) {
Unknown unAtt = (Unknown)att;
byte[] bytes = unAtt.getBytes();
int constantPoolIndex = bytes[3] & 0x000000FF;
c = cp.getConstant(constantPoolIndex);
if (c instanceof ConstantUtf8) {
name = ((ConstantUtf8) c).getBytes();
if (TEST_ANNOTATION_SIGNATURE.equals(name)) {
isTestMethod = true;
break;
}
}
} else if (att instanceof RuntimeVisibleAnnotations) {
RuntimeVisibleAnnotations rva = (RuntimeVisibleAnnotations) att;
AnnotationEntry[] entries = rva.getAnnotationEntries();
for (AnnotationEntry entry : entries) {
if (TEST_ANNOTATION_SIGNATURE.equals(entry.getAnnotationType())) {
isTestMethod = true;
break;
}
}
}
}
}
}
}
if (isTestMethod) {
stack.resetForMethodEntry(this);
state = State.SAW_NOTHING;
super.visitCode(obj);
}
}
@Override
public void sawOpcode(int seen) {
String userValue = null;
try {
stack.mergeJumps(this);
if (seen == INVOKESTATIC) {
String clsName = getClassConstantOperand();
if (OLD_ASSERT_CLASS.equals(clsName) || NEW_ASSERT_CLASS.equals(clsName)) {
String methodName = getNameConstantOperand();
if ("assertEquals".equals(methodName)) {
String signature = getSigConstantOperand();
Type[] argTypes = Type.getArgumentTypes(signature);
if (argTypes.length == 2) {
if (argTypes[0].equals(Type.STRING) && argTypes[1].equals(Type.STRING))
return;
if (stack.getStackDepth() >= 2) {
OpcodeStack.Item item1 = stack.getStackItem(1);
Object cons1 = item1.getConstant();
if ((cons1 != null) && (argTypes[argTypes.length-1].equals(Type.BOOLEAN)) && (argTypes[argTypes.length-2].equals(Type.BOOLEAN))) {
bugReporter.reportBug(new BugInstance(this, "JAO_JUNIT_ASSERTION_ODDITIES_BOOLEAN_ASSERT", NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
return;
}
OpcodeStack.Item item0 = stack.getStackItem(0);
if (item0.getConstant() != null) {
bugReporter.reportBug(new BugInstance(this, "JAO_JUNIT_ASSERTION_ODDITIES_ACTUAL_CONSTANT", NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
return;
}
if (argTypes[0].equals(Type.OBJECT) && argTypes[1].equals(Type.OBJECT)) {
if ("Ljava/lang/Double;".equals(item0.getSignature()) && "Ljava/lang/Double;".equals(item1.getSignature())) {
bugReporter.reportBug(new BugInstance(this, "JAO_JUNIT_ASSERTION_ODDITIES_INEXACT_DOUBLE", NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
return;
}
}
}
}
} else if ("assertNotNull".equals(methodName)) {
if (stack.getStackDepth() > 0) {
if ("valueOf".equals(stack.getStackItem(0).getUserValue())) {
bugReporter.reportBug(new BugInstance(this, "JAO_JUNIT_ASSERTION_ODDITIES_IMPOSSIBLE_NULL", NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
} else if ("assertTrue".equals(methodName)) {
if ((state == State.SAW_ICONST_0) || (state == State.SAW_EQUALS)) {
bugReporter.reportBug(new BugInstance(this, "JAO_JUNIT_ASSERTION_ODDITIES_USE_ASSERT_EQUALS", NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
} else {
String methodName = getNameConstantOperand();
String sig = getSigConstantOperand();
if (clsName.startsWith("java/lang/")
&& "valueOf".equals(methodName)
&& (sig.indexOf(")Ljava/lang/") >= 0)) {
userValue = "valueOf";
}
}
} else if (seen == ATHROW) {
if (stack.getStackDepth() > 0) {
OpcodeStack.Item item = stack.getStackItem(0);
String throwClass = item.getSignature();
if ("Ljava/lang/AssertionError;".equals(throwClass)) {
bugReporter.reportBug(new BugInstance(this, "JAO_JUNIT_ASSERTION_ODDITIES_ASSERT_USED", NORMAL_PRIORITY)
.addClass(this)
.addMethod(this)
.addSourceLine(this));
}
}
}
switch (state) {
case SAW_NOTHING:
case SAW_EQUALS:
if (seen == IF_ICMPNE)
state = State.SAW_IF_ICMPNE;
else
state = State.SAW_NOTHING;
break;
case SAW_IF_ICMPNE:
if (seen == ICONST_1)
state = State.SAW_ICONST_1;
else
state = State.SAW_NOTHING;
break;
case SAW_ICONST_1:
if (seen == GOTO)
state = State.SAW_GOTO;
else
state = State.SAW_NOTHING;
break;
case SAW_GOTO:
if (seen == ICONST_0)
state = State.SAW_ICONST_0;
else
state = State.SAW_NOTHING;
break;
default:
state = State.SAW_NOTHING;
break;
}
if (seen == INVOKEVIRTUAL) {
String methodName = getNameConstantOperand();
String sig = getSigConstantOperand();
if ("equals".equals(methodName) && "(Ljava/lang/Object;)Z".equals(sig)) {
state = State.SAW_EQUALS;
}
}
} finally {
TernaryPatcher.pre(stack, seen);
stack.sawOpcode(this, seen);
TernaryPatcher.post(stack, seen);
if ((userValue != null) && (stack.getStackDepth() > 0)) {
OpcodeStack.Item item = stack.getStackItem(0);
item.setUserValue(userValue);
}
}
}
}
|
package com.morph.engine.graphics.shaders;
import com.morph.engine.core.Game;
import com.morph.engine.graphics.GLRenderingEngine;
import com.morph.engine.graphics.Texture;
import com.morph.engine.graphics.Uniforms;
import com.morph.engine.graphics.components.RenderData;
import com.morph.engine.math.Matrix4f;
import com.morph.engine.physics.components.Transform;
public class GUITintShaderUniforms extends Uniforms {
private Matrix4f mvp;
private Texture diffuse;
@Override
public void defineUniforms(int shader) {
addUniform("mvp", shader);
addUniform("diffuse", shader);
addUniform("diffuseColor", shader);
}
@Override
public void setUniforms(Transform t, RenderData data) {
mvp = t.getTransformationMatrix();
diffuse = data.getTexture(0);
setUniformMatrix4fv("mvp", mvp.mul(Game.screenOrtho).getTranspose());
setUniform1i("diffuse", 0);
setUniform4f("diffuseColor", data.getTint());
diffuse.bind();
}
@Override
public void unbind(Transform t, RenderData data) {
diffuse.unbind();
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.slidellrobotics.reboundrumble.commands;
import com.slidellrobotics.reboundrumble.RobotMap;
import com.slidellrobotics.reboundrumble.subsystems.TrackingCamera;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.image.NIVisionException;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
/**
*
* @author Allister Wright
*/
public class ProcessImage extends CommandBase {
static int stepNo =1;
private static double lastTime = 0;
private static double thisTime = 0;
private static double timeLapse = 0;
double targetHeight = TrackingCamera.targetHeight; // Create a few necesarry local variables
double targetWidth = TrackingCamera.targetWidth; // for concise code and calcs.
double targetHeightFt; // Target Height in Feet
double targetWidthFt; // Target Width in Feet
double imageHeight; // Total Height in Pixels
double imageWidth; // Total Width imn Pixels
double verticalFOV; // Vertical Field of View in Feet
double horizontalFOV; // Horizontal Field of View in Feet
double verticalViewingAngle; // Vertical Camera Viewing Angle
double horizontalViewingAngle; // Horizontal Camera Viewing Angle
double horizontalRattle; // Horizontal off-centerness of center of goal
double verticalRattle; // Vertical off-centerness of center of goal
double centerDistance = 0; // Distance Variable to be used in firing Calculation
double offCenterPixels = 0;
double offCenterFt = 0;
double verticalDistanceResult = 0;
double horizontalDistanceResult = 0;
double trueDistance = 0;
double d = 0;
double pi = 3.14159262;
public ProcessImage() {
// Use requires() here to declare subsystem dependencies
// eg. requires(chassis);
requires(camera);
requires(lazySusan);
requires(leftShootingMotors);
requires(rightShootingMotors);
}
// Called just before this Command runs the first time
protected void initialize() {
stepNo = 1;
}
// Called repeatedly when this Command is scheduled to run
protected void execute() {
thisTime = Timer.getFPGATimestamp();
timeLapse = thisTime - lastTime;
if(timeLapse >= 1.0) {
getImage();
if(stepNo == 6) {
if(TrackingCamera.reports != null) {
selectGoal();
findAngle();
findDistance();
} else { // If no goals are found
System.out.println("Goal Selection and Analysis Aborted"); // Print a notifier
}
stepNo++;
}
}
}
// Make this return true when this Command no longer needs to run execute()
protected boolean isFinished() {
//todo this can just be set to true
if(stepNo == 7) {
return true;
} else if(stepNo > 7) {
System.out.println("Error Exit");
return true;
} else {
return false;
}
}
// Called once after isFinished returns true
protected void end() {
try {
if (TrackingCamera.pic != null) {
TrackingCamera.pic.free();
}
if (TrackingCamera.convexHullImage != null) {
TrackingCamera.convexHullImage.free();
}
if (TrackingCamera.thresholdHSL != null) {
TrackingCamera.thresholdHSL.free();
}
if (TrackingCamera.boundImage != null) {
TrackingCamera.boundImage.free();
}
} catch (Exception ex) {
System.out.println("Memory: "+ex);
}
lastTime = thisTime;
}
// Called when another command which requires one or more of the same
// subsystems is scheduled to run
protected void interrupted() {
end();
}
private void getImage() {
System.out.println("getImage");
try {
switch(stepNo){
case 1:
System.out.println("Running Case: 1");
TrackingCamera.pic = camera.getImageFromCamera();
System.out.println("Ran Case: 1.1");
if(TrackingCamera.pic != null) {
TrackingCamera.totalWidth = TrackingCamera.pic.getWidth();
System.out.println("Ran Case: 1.2");
TrackingCamera.totalHeight = TrackingCamera.pic.getHeight();
System.out.println("Ran Case: 1.3");
} else {
break;
}
stepNo++;
break;
case 2:
System.out.println("Running Case: 2");
TrackingCamera.thresholdHSL = TrackingCamera.pic.thresholdHSL(150, 185, 244, 255, 2, 20); //Sets a Blue light threshold
System.out.println("Ran Case: 2.1");
stepNo++;
break;
case 3:
System.out.println("Running Case: 3");
TrackingCamera.convexHullImage = TrackingCamera.thresholdHSL.convexHull(false); //Fills in the bounding boxes for the targets
System.out.println("Ran Case: 3.1");
stepNo++;
break;
case 4:
System.out.println("Running Case: 4");
TrackingCamera.boundImage = TrackingCamera.convexHullImage.particleFilter(TrackingCamera.cc);
System.out.println("Ran Case: 4.1");
stepNo++;
break;
case 5:
System.out.println("Running Case: 5");
TrackingCamera.reports = TrackingCamera.boundImage.getOrderedParticleAnalysisReports();
System.out.println("Ran Case: 5.1");
System.out.println("Reports: "+TrackingCamera.reports.length);
System.out.println("Ran Case: 5.2");
stepNo++;
break;
}
} catch (NIVisionException ex) {
System.out.println(ex);
stepNo = 60;
} catch (Exception ex) {
System.out.println(ex);
stepNo = 60;
}
}
private void selectGoal() {
System.out.println("selectGoal");
TrackingCamera.targetGoal = null;
if (TrackingCamera.reports.length == 0) {
return;
} if (TrackingCamera.reports.length == 1) {
System.out.println("Not enough goals");
TrackingCamera.targetGoal = TrackingCamera.reports[0];
} else {
//Gus says: is this right. it doesn't look right to me...
TrackingCamera.leftGoal = TrackingCamera.reports[0]; //Recognizes the
TrackingCamera.rightGoal = TrackingCamera.reports[0]; //middle goals.
int maxIndex = TrackingCamera.reports.length;
if (maxIndex > 4) {
maxIndex=4;
}
for(int i = 1; i <= maxIndex; i++) {
if(TrackingCamera.reports[i].center_mass_x < TrackingCamera.leftGoal.center_mass_x) {
TrackingCamera.leftGoal = TrackingCamera.reports[i];
} if(TrackingCamera.reports[i].center_mass_x > TrackingCamera.leftGoal.center_mass_x) {
TrackingCamera.rightGoal = TrackingCamera.reports[i];
}
}
//We have four goals in view index 1 is the left and index 2 is right
double leftWidth = TrackingCamera.leftGoal.boundingRectWidth; //Finds the widths of
double rightWidth = TrackingCamera.rightGoal.boundingRectWidth; //both middle goals.
if (leftWidth <= rightWidth) {
TrackingCamera.targetGoal = TrackingCamera.rightGoal; //Decides which goal we are
} else { //closer to and targets it.
TrackingCamera.targetGoal = TrackingCamera.leftGoal;
}
System.out.println("Target Selected");
}
}
private void findAngle() {
System.out.println("findAngle");
if (TrackingCamera.targetGoal == null){
return;
}
TrackingCamera.horCenter = (TrackingCamera.totalWidth / 2); //Finds the pixel value of the horizontal center
TrackingCamera.targetLocale = TrackingCamera.targetGoal.center_mass_x; //Finds the center of our target
TrackingCamera.targetDiff = Math.abs(TrackingCamera.targetLocale - TrackingCamera.horCenter); // see how far away we are
//TODO: tune the 10 pixels to the right number
//there is always going to be a little error, but we want some small window
//where the lazy suzan stops moving to we can make an accurate shot.
System.out.println("Targe Diff: "+TrackingCamera.targetDiff);
if (TrackingCamera.targetDiff < 15) {
//lazySusan.setRelay(RobotMap.susanOff); //turn off
} else if (TrackingCamera.targetLocale < TrackingCamera.horCenter) { //and if we are facing right
//lazySusan.setRelay(RobotMap.susanLeft); //turn left
lazySusan.setSetpointRelative(-5);
} else { //if we face left
//lazySusan.setRelay(RobotMap.susanRight); //turn right
lazySusan.setSetpointRelative(+5);
}
}
private void findDistance() {
System.out.println("findDistance");
if (TrackingCamera.targetGoal == null){ // If no target is found
//leftShootingMotors.setSetpoint(1000); // Set Left shooting Motors to about Half Speed
//rightShootingMotors.setSetpoint(1000); // Set Right Shooting Motors to about Half Speed
System.out.println("No target set"); // Debug Print Statement
System.out.println("Checkpoint 10a");
return;
}
verticalViewingAngle = 47; // Defines the Viewing
horizontalViewingAngle = 47; // Angles of our camera
imageHeight = 480; // Image Height
targetHeight = TrackingCamera.targetGoal.boundingRectHeight; // Sets the height of our target.
targetHeightFt = 1.5; // Defines goal's constant ft height
imageWidth = 640; // Image Width
targetWidth = TrackingCamera.targetGoal.boundingRectWidth; // Sets the width of our target.
targetWidthFt = 2.0; // Defines goal's constant ft width
verticalFOV = imageHeight*(targetHeightFt/targetHeight); // Gets the Foot Value of our Vertical Field of View.
horizontalFOV = imageWidth*(targetWidthFt/targetWidth); // Gets the ft value of our horizontal Field of View.
verticalRattle = Math.abs(TrackingCamera.targetGoal.center_mass_y - (imageHeight/2)); // Finds the vertical off-ceneterness.
horizontalRattle = Math.abs(TrackingCamera.targetGoal.center_mass_x - (imageWidth/2)); // Finds the horizontal off-centerness.
verticalDistanceResult = Math.sqrt(4/3)*(verticalFOV/2)/Math.tan(verticalViewingAngle/2); // Provides the Result of our Vertically-Based Calculation.
horizontalDistanceResult = Math.sqrt(3/4)*(horizontalFOV/2)/Math.tan(horizontalViewingAngle/2); // Provides the Result of our Horizontally-Based Calculation.
centerDistance = (verticalDistanceResult + horizontalDistanceResult) / 2; // Take the average to try get a more accurate measurement.
offCenterPixels = Math.sqrt((verticalRattle*verticalRattle) + (horizontalRattle*horizontalRattle)); // Finds the Linear Distance from the Center of the Image to the Center of the Goal.
offCenterFt = offCenterPixels*(Math.sqrt((verticalFOV*verticalFOV)+(horizontalFOV*horizontalFOV))); // Converts the above Caluclated measurement into its proper Ft value.
trueDistance = Math.sqrt((centerDistance*centerDistance)+(offCenterFt*offCenterFt)); // Find the Linear Distance form the Lens of our Camera to the Center of our Goal.
//if distance to target is invalid, just set it to some number
if (TrackingCamera.distanceToTarget > 60 || TrackingCamera.distanceToTarget <= 0) {
TrackingCamera.distanceToTarget = 60;
}
d = trueDistance; // See below Calculation for conciseness
TrackingCamera.launchSpeed = 60 * (d / Math.sqrt((11 / 6 - d) / -16.1) / (2 / 3 * pi)); //Calcs the required rpms for firing
leftShootingMotors.setSetpoint(TrackingCamera.launchSpeed); // Sets the shooting Left Shooting Motors
rightShootingMotors.setSetpoint(TrackingCamera.launchSpeed); // Sets the Right Shooting Motors
/* A String of Debug Print Statements */
System.out.println();
System.out.println("Vertcal Distance Result: "+verticalDistanceResult);
System.out.println("Horizontal Distance Result: "+horizontalDistanceResult);
System.out.println("Central Distance: "+centerDistance);
System.out.println("True Distance: "+d);
System.out.println("Camera Launch Speed: "+TrackingCamera.launchSpeed);
System.out.println();
SmartDashboard.putDouble("Vertical Distance Result", verticalDistanceResult);
SmartDashboard.putDouble("Horizontal Distance Result", horizontalDistanceResult);
SmartDashboard.putDouble("Center Point Distance", centerDistance);
SmartDashboard.putDouble("Distance", d);
SmartDashboard.putDouble("Camera Launch Speed", TrackingCamera.launchSpeed);
}
}
|
package net.formula97.andorid.car_kei_bo;
public final class R {
public static final class array {
public static final int distanceUnit=0x7f060000;
public static final int priceUnit=0x7f060001;
public static final int volumeUnit=0x7f060002;
}
public static final class attr {
}
public static final class drawable {
public static final int ic_launcher=0x7f020000;
}
public static final class id {
public static final int EditText_odometer=0x7f09001d;
public static final int LinearLayout1=0x7f090013;
public static final int LinearLayout_vert_parent=0x7f090015;
public static final int RelativeLayout1=0x7f090000;
public static final int ScrollView1=0x7f090014;
public static final int TextView1=0x7f090001;
public static final int btn_add_mileage=0x7f090039;
public static final int button_addCar=0x7f09000a;
public static final int button_addFuelRecord=0x7f090011;
public static final int button_addRefuelRecord=0x7f09002d;
public static final int button_cancelAddRefuelRecord=0x7f09002e;
public static final int button_cancel_addCar=0x7f09000b;
public static final int button_editDate=0x7f090026;
public static final int button_editTime=0x7f090029;
public static final int checkBox_SetDefault=0x7f090003;
public static final int ctxitem_add_mileage=0x7f090045;
public static final int ctxitem_delete_car=0x7f090047;
public static final int ctxitem_set_default_car=0x7f090046;
public static final int ctxitem_show_mileage=0x7f090044;
public static final int editText_amountOfOil=0x7f090019;
public static final int editText_comments=0x7f09002b;
public static final int editText_dateOfRefuel=0x7f090025;
public static final int editText_timeOfRefuel=0x7f090028;
public static final int editText_unitPrice=0x7f090021;
public static final int linearLayout_horiz=0x7f09002c;
public static final int listView_CarList=0x7f090012;
public static final int ll_horizParent=0x7f09000e;
public static final int ll_vertParent=0x7f09000d;
public static final int lv_mileagelist=0x7f09003a;
public static final int optionsmenu_addcar=0x7f090049;
public static final int optionsmenu_carlist=0x7f090048;
public static final int optionsmenu_closeAPP=0x7f09004a;
public static final int spinner_carName=0x7f090017;
public static final int spinner_distanceUnit=0x7f090007;
public static final int spinner_priceUnit=0x7f090005;
public static final int spinner_volumeUnit=0x7f090009;
public static final int tableRow_1=0x7f090016;
public static final int tableRow_2=0x7f090018;
public static final int tableRow_3=0x7f09001b;
public static final int tableRow_4=0x7f09001f;
public static final int tableRow_5=0x7f090023;
public static final int tableRow_6=0x7f090027;
public static final int textView1=0x7f090004;
public static final int textView2=0x7f090006;
public static final int textView3=0x7f090008;
public static final int textView4=0x7f09001c;
public static final int textView5=0x7f09002a;
public static final int textView6=0x7f090020;
public static final int textView8=0x7f090024;
public static final int textView_CarListTitleContainer=0x7f09000c;
public static final int textView_distanceUnit=0x7f09001e;
public static final int textView_moneyUnit=0x7f090022;
public static final int textView_oilUnit=0x7f09001a;
public static final int textview_addCarName=0x7f090002;
public static final int tv_element_CarName=0x7f09003b;
public static final int tv_element_CarName2=0x7f09002f;
public static final int tv_element_FuelMileageLabel=0x7f090031;
public static final int tv_element_RunningCostsLabel=0x7f090034;
public static final int tv_element_carname_suffix=0x7f090030;
public static final int tv_label_defaultcar=0x7f09000f;
public static final int tv_label_value_defaultcar=0x7f090010;
public static final int tv_spinner_carname=0x7f090043;
public static final int tv_title_dateOfRefuel=0x7f090037;
public static final int tv_title_unit_volume=0x7f090038;
public static final int tv_unit_amountOfOil=0x7f090041;
public static final int tv_unit_fuelMileage=0x7f09003d;
public static final int tv_unit_fuelMileage2=0x7f090033;
public static final int tv_unit_runningCosts=0x7f09003f;
public static final int tv_unit_runningCosts2=0x7f090036;
public static final int tv_value_FuelMileage=0x7f09003c;
public static final int tv_value_FuelMileage2=0x7f090032;
public static final int tv_value_RunningCosts=0x7f09003e;
public static final int tv_value_RunningCosts2=0x7f090035;
public static final int tv_value_amountOfOil=0x7f090042;
public static final int tv_value_dateOfRefuel=0x7f090040;
}
public static final class layout {
public static final int addcar=0x7f030000;
public static final int carlist=0x7f030001;
public static final int configuration=0x7f030002;
public static final int fuelmileageadd=0x7f030003;
public static final int fuelmileagelist=0x7f030004;
public static final int listviewelement_carlist=0x7f030005;
public static final int listviewelemnt_mileagelist=0x7f030006;
public static final int spinnerelement_fuelmileageadd=0x7f030007;
}
public static final class menu {
public static final int context_carlist=0x7f080000;
public static final int optionsmenu=0x7f080001;
}
public static final class raw {
public static final int ddl_create_table=0x7f050000;
}
public static final class string {
public static final int adbuilder_confirm_deletecar=0x7f07002f;
public static final int adbuilder_confirm_deletemileage=0x7f070030;
public static final int adbuilder_confirm_reorg_db=0x7f070031;
public static final int adtitle_detail_of_refuel=0x7f070039;
public static final int app_name=0x7f070000;
public static final int config_title_distance=0x7f07001f;
public static final int config_title_price=0x7f07001e;
public static final int config_title_volume=0x7f070020;
public static final int ctxitem_add_mileage=0x7f070026;
public static final int ctxitem_delete_car=0x7f070029;
public static final int ctxitem_edit_car_preference=0x7f070028;
public static final int ctxitem_set_default_car=0x7f070027;
public static final int ctxitem_show_mileage=0x7f070025;
public static final int ctxmenutitle_carlist=0x7f07002a;
public static final int label_UNIT_distance_kirometer=0x7f070012;
public static final int label_UNIT_distance_mile=0x7f070013;
public static final int label_UNIT_fuel_gallon=0x7f070015;
public static final int label_UNIT_fuel_litter=0x7f070014;
public static final int label_UNIT_money_jpy=0x7f070016;
public static final int label_UNIT_money_usd=0x7f070017;
public static final int label_amountOfOil=0x7f07000c;
public static final int label_btn_addCar=0x7f070009;
public static final int label_btn_addRefuelRecord=0x7f070010;
public static final int label_btn_addfuelmileagerecord=0x7f07000b;
public static final int label_btn_cancel=0x7f07000a;
public static final int label_btn_edit=0x7f070034;
public static final int label_carName=0x7f070006;
public static final int label_cb_setdfault=0x7f070007;
public static final int label_comments=0x7f070033;
public static final int label_dateOfRefuel=0x7f07000f;
public static final int label_fuelmileage=0x7f070018;
public static final int label_hint_inputYourCarName=0x7f070008;
public static final int label_mileagelist_suffix=0x7f07002e;
public static final int label_odometer=0x7f07000d;
public static final int label_runningcost=0x7f070019;
public static final int label_spinner_distance=0x7f070023;
public static final int label_spinner_price=0x7f070022;
public static final int label_spinner_volume=0x7f070024;
public static final int label_timeOfRefuel=0x7f070032;
public static final int label_unitPrice=0x7f07000e;
public static final int lael_btn_cancel_AddRefuelRecord=0x7f070011;
public static final int opmenu_addcar=0x7f07001d;
public static final int opmenu_call_preference=0x7f07001b;
public static final int opmenu_carlist=0x7f07001c;
public static final int opmenu_close_app=0x7f07001a;
public static final int title_addcar=0x7f070003;
public static final int title_carlist=0x7f070002;
public static final int title_config=0x7f070001;
public static final int title_fuelMileageAdd=0x7f070004;
public static final int title_fuelMileageList=0x7f070005;
public static final int toastmsg_addcar1=0x7f07002b;
public static final int toastmsg_addcar2=0x7f07002c;
public static final int toastmsg_addcar3=0x7f07002d;
public static final int toastmsg_addmileage1=0x7f070035;
public static final int toastmsg_addmileage2=0x7f070036;
public static final int toastmsg_addmileage3=0x7f070037;
public static final int toastmsg_addmileage4=0x7f070038;
public static final int tv_label_defaultcar=0x7f070021;
}
public static final class xml {
public static final int config=0x7f040000;
}
}
|
package edu.psu.compbio.seqcode.projects.akshay.MultiSeq;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import edu.psu.compbio.seqcode.projects.akshay.MultiSeq.LBFGSCopy;
import weka.core.Utils;
import edu.psu.compbio.seqcode.projects.akshay.MultiSeq.LBFGSCopy.ExceptionWithIflag;
import edu.psu.compbio.seqcode.projects.akshay.MultiSeq.SeqUnwinder.ClassRelationStructure;
import edu.psu.compbio.seqcode.projects.akshay.MultiSeq.SeqUnwinder.ClassRelationStructure.Node;
public class LOneTh extends Optimizer {
// Fixed ADMM parameters
/** Relaxation parameter (to help faster convergence) */
public final double ADMM_ALPHA = 1.9;
/** Absolute feasibility tolerance for the primal and dual feasibility conditions */
public final double ADMM_ABSTOL = 1E-2;
/** Relative feasibility tolerance for the primal and dual feasibility conditions */
public final double ADMM_RELTOL = 1E-2;
// Fixed SeqUnwinder parameters
/** Tolerence for internal Nodes convergence */
public final double NODES_tol = 1E-2;
// Tunable ADMM parameters
/** The maximum number of allowed iterations for the ADMM algorithm */
public int ADMM_maxItr = 30;
/** Augmented Lagrangian parameter rho */
public double ADMM_pho = 0.001;
public double ADMM_pho_fold = 1.0;
/** The maximum allowed value for pho */
public double ADMM_pho_max = 100000000;
/** Number of threads to run ADMM on */
public int ADMM_numThreads = 5;
// BGFS parameters
/** The maximum number of allowed iterations for the BGFS algorithm */
public int BGFS_maxIts=-1;
// SeqUnwinder parameters
/** The total number of predictors/features in the model (does not include the intercept term) */
protected int numPredictors;
/** Total number of classes to be predicted */
protected int numClasses;
/** Relationships between the different nodes in SeqUnwinder */
protected ClassRelationStructure classStructure;
/** Total number of nodes (internal nodes and classes) in SeqUnwinder */
protected int numNodes;
/** The L1 regularization parameter in the SeqUnwinder model */
protected double regularization;
/** Maximum number of iterations to update internal nodes. For small number of node levels (usually 3 to 4), we might need very few iterations*/
protected int NODES_maxItr=10;
/** Internal flag that indicated if ADMM has been run before */
protected boolean ranADMM=false;
// SeqUnwinder and ADMM variables
/** Current feature weights for all the nodes in the SeqUnwinder (also contains the intercept term). Dimension :- (numPredictors+1)*numNodes */
public double[] sm_x;
/** Current feature weights for all the leaf nodes (classes) in the SeqUnwinder (also contains the intercept term). Dimension :- (numPredictors+1)*numClasses */
public double[] x;
// SeqUnwinder training data
/** Training data (Instances)*/
public double[][] data;
/** Weights of the instances */
protected double[] weights;
/** Instance class membership */
protected int[] cls;
// Misc
/** Boolean variable indicating debug mode */
protected boolean sm_Debug;
//Settors
public void setBGFSmaxItrs(int m){BGFS_maxIts=m;}
public void setADMMmaxItrs(int m){ADMM_maxItr = m;}
public void setSeqUnwinderMaxIts(int m){NODES_maxItr = m;}
public void setInstanceWeights(double[] w){weights=w;}
public void setClsMembership(int[] c){cls=c;}
public void setNumPredictors(int p){numPredictors=p;}
public void setNumClasses(int c){numClasses = c;}
public void setClassStructure(ClassRelationStructure rel){classStructure = rel; setNumNodes(rel.allNodes.size());}
public void setNumNodes(int n){numNodes = n;}
public void setRidge(double r){regularization = r;}
public void setDebugMode(boolean debug){sm_Debug =debug;}
public void setPho(double ph){ADMM_pho = ph;}
public void set_numThreads(int nt){ADMM_numThreads = nt;}
//gettors
public double[] getX(){return x;}
public double[] getsmX(){return sm_x;}
public LOneTh(double[] xinit, double[] sm_xinit, double[][] d) {
x = xinit;
sm_x = sm_xinit;
data=d;
}
public void execute() throws Exception{
for(int it=0; it<NODES_maxItr; it++){
System.err.println("Running SeqUnwinder for Iteration: "+ it);
double[] sm_x_old = new double[sm_x.length];
for(int i=0; i<sm_x.length; i++){
sm_x_old[i] = sm_x[i];
}
// First, run admm on leaf nodes
ADMMrunner admm = new ADMMrunner();
admm.execute();
// Now update the internal nodes
updateInternalNodes();
// Check Convergence
boolean converged = true;
for(Node n : classStructure.allNodes.values()){
double diff = 0.0;
for(int w=0; w<(numPredictors+1); w++){
diff += Math.pow(sm_x_old[n.nodeIndex*(numPredictors+1)+w]-sm_x[n.nodeIndex*(numPredictors+1)+w],2);
}
diff = Math.sqrt(diff);
if(sm_Debug){
System.err.println("Hierarchy update diff: Node: "+n.nodeIndex + " diff is: "+ diff);
double tmp = NODES_tol*getL2NormX(n.nodeIndex);
System.err.println("Target diff : Node: " + n.nodeIndex + " is "+ tmp);
}
if( diff > NODES_tol*getL2NormX(n.nodeIndex)){
converged=false;
break;
}
}
if(converged){
System.err.println();
System.err.println("SeqUnwinder has converged after "+it+1+" iterations !!");
break;
}
}
}
// Slave methods
private void updateInternalNodes(){
// First update odd layaer nodes
for(int l=1; l<classStructure.numLayers; l+=2){
for(Node n : classStructure.layers.get(l)){// Get nodes in this layer
updateNode(n);
}
}
// Now update even layer nodes except the leaf node
for(int l=2; l<classStructure.numLayers; l+=2){
for(Node n : classStructure.layers.get(l)){// Get nodes in this layer
updateNode(n);
}
}
}
private void updateNode(Node n){
int dim = numPredictors+1;
int nOffset = n.nodeIndex*dim;
// Note the intercept term not included
for(int w=1; w<dim; w++){
List<Double> xs = new ArrayList<Double>();
for(int pid : n.parents){
xs.add(sm_x[pid*dim+w]);
}
for(int cid : n.children){
xs.add(sm_x[cid*dim+w]);
}
Collections.sort(xs);
sm_x[nOffset+w] = (xs.size() % 2 == 0) ? (xs.get(xs.size()/2) + xs.get((xs.size()/2)-1))/2 : xs.get(xs.size()/2);
// int midInd = xs.size()/2;
// sm_x[nOffset+w] = xs.get(midInd);
}
}
private double getL2NormX(int nodeIndex){
double norm=0;
int dim = numPredictors+1;
int offset = nodeIndex*dim;
for(int w=0; w<dim; w++){
norm += Math.pow(sm_x[offset+w], 2);
}
return Math.sqrt(norm);
}
// To clear memory
public void clearOptimizer(){
data=null;
sm_x=null;
x=null;
}
public class ADMMrunner {
// Threaded variables
/**
* Hashmap holding the data block that goes into each thread.
* Keys are the thread ids (for eg:- Thread2) and value is the data-block
*/
public HashMap<String,double[][]> t_Data = new HashMap<String,double[][]>();
/**
* Hashmap holding the computed x's from each thread at iteration "t+1".
* Keys are the thread ids (for eg:- Thread2)
*/
public HashMap<String,double[]> t_x = new HashMap<String,double[]>();
/** Hashmap holding the u's from each thread at iteration "t" */
public HashMap<String,double[]> t_u = new HashMap<String,double[]>();
/** The weights of input instances for the data blocks that go into each thread */
public HashMap<String,double[]> t_weights = new HashMap<String,double[]>();
/** The class assignment of input instances for the data blocks that go into each thread*/
public HashMap<String, int[]> t_cls = new HashMap<String,int[]>();
/** All the threads vote their status on line search (or thex-update) */
public boolean[] finished_linesrch;
/** Tracks the convergenece of ADMM */
public AtomicBoolean ADMMconverged = new AtomicBoolean(false);
/** Finished running the current z-step */
public AtomicBoolean updatedZ = new AtomicBoolean(false);
//ADMM consensus variables
/** Current values (t) of z (z-step in ADMM). Dimension :- (numPredictors+1)*numNodes*numNodes */
public double[] z; // for the Z-step
/** Value of z at previous iteration (t-1). Needed to assess convergence Dimension :- (numPredictors+1)*numNodes*numNodes */
public double[] zold;
/** Current values of the augmented lagrange dual variables (t). Dimension :- (numPredictors+1)*numNodes*numNodes */
public double[] u;
/** Stores the primal residuals over the course of the ADMM algorithm Dimension:- [ADMM_maxItr][numNodes*numNodes] */
public double[][] history_primal;
/** Stores the dual residuals over the course of the ADMM algorithm Dimension:- [ADMM_maxItr][numNodes*numNodes] */
public double[][] history_dual;
public double[][] history_xnorm;
public double[][] history_unorm;
public double[][] history_znorm;
/** Tracking the iterations of ADMM */
public AtomicInteger ADMM_currItr_value = new AtomicInteger(0);
// Initialize
public void initZandU(){
int dim = numPredictors+1;
z= new double[numNodes*numNodes*dim];
zold = new double[numNodes*numNodes*dim];
u= new double[numNodes*numNodes*dim];
history_primal = new double[ADMM_maxItr][numNodes*numNodes];
history_dual = new double[ADMM_maxItr][numNodes*numNodes];
history_xnorm = new double[ADMM_maxItr][numNodes];
history_unorm = new double[ADMM_maxItr][numNodes*numNodes];
history_znorm = new double[ADMM_maxItr][numNodes*numNodes];
}
public ADMMrunner() {
finished_linesrch = new boolean[ADMM_numThreads];
int blockSize = data.length/ADMM_numThreads;
for(int i=0; i<blockSize*ADMM_numThreads; i++){
int threadID = i % ADMM_numThreads;
String threadName = "Thread"+threadID;
if(t_Data.containsKey(threadName)){
t_weights.get(threadName)[i/ADMM_numThreads] = weights[i];
t_cls.get(threadName)[i/ADMM_numThreads] = cls[i];
for(int j=0; j<data[0].length; j++){
t_Data.get(threadName)[i/ADMM_numThreads][j] = data[i][j];
}
}else{
t_Data.put(threadName, new double[blockSize][data[0].length]);
t_weights.put(threadName, new double[blockSize]);
t_weights.get(threadName)[i/ADMM_numThreads] = weights[i];
t_cls.put(threadName, new int[blockSize]);
t_cls.get(threadName)[i/ADMM_numThreads] = cls[i];
for(int j=0; j< data[0].length; j++){
t_Data.get(threadName)[i/ADMM_numThreads][j] = data[i][j];
}
}
}
// Initialize t_x
for(int i=0; i< ADMM_numThreads; i++){
String threadName = "Thread"+i;
if(t_x.containsKey(threadName)){
for(int j=0; j<x.length; j++){
t_x.get(threadName)[j] = x[j];
}
}else{
t_x.put(threadName, new double[x.length]);
for(int j=0; j<x.length; j++){
t_x.get(threadName)[j] = x[j];
}
}
}
// Initialize u, z and zold
initZandU();
//Initialize t_u
for(int i=0; i<ADMM_numThreads; i++){
String threadName = "Thread"+i;
if(t_u.containsKey(threadName)){
for(int j=0; j<u.length; j++){
t_u.get(threadName)[j] = u[j];
}
}else{
t_u.put(threadName, new double[u.length]);
for(int j=0; j<u.length; j++){
t_u.get(threadName)[j] = u[j];
}
}
}
}
//Update methods
public void updateUbar(){
synchronized(t_u){
for(int i=0; i<u.length; i++){
u[i] = 0;
for(String tname: t_u.keySet()){
u[i] += t_u.get(tname)[i];
}
u[i] = u[i]/ADMM_numThreads;
}
}
}
public void updateXbar(){
synchronized(t_x){
for(int i=0; i<x.length; i++){
x[i] = 0;
for(String tname: t_x.keySet()){
x[i] += t_x.get(tname)[i];
}
x[i] = x[i]/ADMM_numThreads;
}
}
}
public void updateResiduals(int itr){
int dim = numPredictors + 1;
// First calculate and update the primal residual at the current iteration
for(Node n : classStructure.leafs){
double r_t = 0.0;
double s_t =0.0;
if(n.parents.size() > 0){
for(int pid : n.parents){
int zOffset = (n.nodeIndex*numNodes*dim)+(pid*dim);
for(String thname: t_x.keySet()){
for(int w=0; w<dim; w++){
r_t += Math.pow(t_x.get(thname)[n.nodeIndex*dim+w]-z[zOffset+w]-sm_x[pid*dim+w], 2);
}
}
for(int w=0; w<dim; w++){
s_t += Math.pow(ADMM_pho*(z[zOffset+w] - zold[zOffset+w]), 2);
}
s_t = Math.sqrt(s_t*ADMM_numThreads);
history_primal[itr][n.nodeIndex*numNodes+pid] = Math.sqrt(r_t);
history_dual[itr][n.nodeIndex*numNodes+pid] = s_t;
}
}else{
int zOffset = (n.nodeIndex*numNodes*dim)+(n.nodeIndex*dim);
for(String thname: t_x.keySet()){
for(int w=0; w<dim; w++){
r_t += Math.pow(t_x.get(thname)[n.nodeIndex*dim+w]-z[zOffset+w], 2);
}
}
for(int w=0; w<dim; w++){
s_t += Math.pow(ADMM_pho*(z[zOffset+w] - zold[zOffset+w]), 2);
}
s_t = Math.sqrt(s_t*ADMM_numThreads);
history_primal[itr][n.nodeIndex*numNodes+n.nodeIndex] = Math.sqrt(r_t);
history_dual[itr][n.nodeIndex*numNodes+n.nodeIndex] = s_t;
}
} // Over all the leaf nodes
}
public void updateUnorm(int itr){
for(Node n : classStructure.leafs){
int dim = numPredictors+1;
if(n.parents.size() > 0){
for(int pid: n.parents){
double unorm = 0.0;
int uOffset = (n.nodeIndex*numNodes*dim)+(pid*dim);
for(int w=0; w<dim; w++){
unorm += Math.pow(u[uOffset+w], 2);
}
history_unorm[itr][n.nodeIndex*numNodes+pid] = Math.sqrt(unorm);
}
}else{
double unorm = 0;
int uOffset = (n.nodeIndex*numNodes*dim)+(n.nodeIndex*dim);
for(int w=0; w<dim; w++){
unorm += Math.pow(u[uOffset+w], 2);
}
history_unorm[itr][n.nodeIndex*numNodes+n.nodeIndex] = Math.sqrt(unorm);
}
}
}
public void updateXnorm(int itr){
int dim = numPredictors+1;
for(Node n : classStructure.leafs){
int xOffset = n.nodeIndex*dim;
double xnorm = 0.0;
for(int w=0; w<dim; w++){
xnorm += Math.pow(x[xOffset+w], 2);
}
history_xnorm[itr][n.nodeIndex] = Math.sqrt(xnorm);
}
}
public void updateZnorm(int itr){
for(Node n : classStructure.leafs){
int dim = numPredictors+1;
if(n.parents.size() > 0){
for(int pid: n.parents){
double znorm = 0.0;
int zOffset = (n.nodeIndex*numNodes*dim)+(pid*dim);
for(int w=0; w<dim; w++){
znorm += Math.pow(z[zOffset+w], 2);
}
history_znorm[itr][n.nodeIndex*numNodes+pid] = Math.sqrt(znorm);
}
}else{
double znorm = 0;
int zOffset = (n.nodeIndex*numNodes*dim)+(n.nodeIndex*dim);
for(int w=0; w<dim; w++){
znorm += Math.pow(u[zOffset+w], 2);
}
history_znorm[itr][n.nodeIndex*numNodes+n.nodeIndex] = Math.sqrt(znorm);
}
}
}
public void updatePhoAndFold(int A_itr){
double primal_residuals=0; // Sum of all the primal residuals
double dual_residuals=0; // Sum of all the dual residuals
double mu = 10; // maintains the primal and dual residuals within a factor of mu of one another
double tao=2; // the factor by with pho will be increased or decreased at each iterations
for(int i=0; i<(numNodes*numNodes); i++){
primal_residuals += history_primal[A_itr][i];
dual_residuals += history_dual[A_itr][i];
}
if(primal_residuals > mu*dual_residuals){ // if primal residual are greater than dual by a factor of mu; decrease pho
double old_pho = ADMM_pho;
ADMM_pho = Math.min(ADMM_pho_max,ADMM_pho*tao);
ADMM_pho_fold = ADMM_pho/old_pho;
}else if(dual_residuals > primal_residuals*mu){
ADMM_pho = ADMM_pho/tao;
ADMM_pho_fold = 1/tao;
}else {
ADMM_pho_fold = 1.0;
}
}
public void updateZ(double pho){
for(int i=0; i<z.length; i++){
z[i] = z[i] - Math.signum(z[i])*Math.min(pho, Math.abs(z[i]));
//z[i] = Math.max(0, xrel[i]-pho) - Math.max(0, -xrel[i] - pho);
}
}
//Has ADMM converged
public boolean hasADMMConverged(int itr){
boolean converged =true;
int dim = numPredictors + 1;
double[] primal_tol = new double[numNodes*numNodes];
double[] dual_tol = new double[numNodes*numNodes];
for(Node n : classStructure.leafs){
double xnorm = history_xnorm[itr][n.nodeIndex];
if(n.parents.size() > 0){ // If this node has parents
for(int pid : n.parents){ // Over all the parents of this node
int zOffset = (n.nodeIndex*numNodes)+(pid);
double znorm = history_znorm[itr][zOffset];
double unorm = history_unorm[itr][zOffset];
double cnorm = getL2NormX(pid);
primal_tol[n.nodeIndex*numNodes+pid] = Math.sqrt(dim)*ADMM_ABSTOL + Math.sqrt(ADMM_numThreads)*ADMM_RELTOL*Math.max(xnorm, Math.max(znorm, cnorm));
dual_tol[n.nodeIndex*numNodes+pid] = Math.sqrt(dim)*ADMM_ABSTOL + Math.sqrt(ADMM_numThreads)*ADMM_RELTOL*ADMM_pho*unorm;
}
}else{
int zOffset = (n.nodeIndex*numNodes)+(n.nodeIndex);
double znorm = history_znorm[itr][zOffset];
double unorm = history_unorm[itr][zOffset];
primal_tol[n.nodeIndex*numNodes+n.nodeIndex] = Math.sqrt(dim)*ADMM_ABSTOL + Math.sqrt(ADMM_numThreads)*ADMM_RELTOL*Math.max(xnorm, znorm);
dual_tol[n.nodeIndex*numNodes+n.nodeIndex] = Math.sqrt(dim)*ADMM_ABSTOL + Math.sqrt(ADMM_numThreads)*ADMM_RELTOL*ADMM_pho*unorm;
}
}
for(int i=0; i<primal_tol.length; i++){
if(history_primal[itr][i] > primal_tol[i])
converged=false;
if(history_dual[itr][i] > dual_tol[i])
converged=false;
if(!converged)
break;
}
return converged;
}
public boolean finshedLineSrch(){
boolean ret = true;
synchronized(finished_linesrch){
for(int i=0; i<finished_linesrch.length; i++){
if(!finished_linesrch[i]){
ret = false;
break;
}
}
}
return ret;
}
// Runs the ADMM algorithm
public void execute(){
int dim = numPredictors+1;
// Initiate the threads
Thread[] threads = new Thread[ADMM_numThreads];
for(int i=0; i<ADMM_numThreads; i++){
String thname = "Thread"+i;
ADMMrun th = new ADMMrun(t_Data.get(thname), t_weights.get(thname), t_x.get(thname), t_cls.get(thname), t_u.get(thname), thname);
Thread t = new Thread(th, thname);
t.start();
threads[i] = t;
}
while(ADMM_currItr_value.get() < ADMM_maxItr){
if(sm_Debug)
System.err.print(". "+ ADMM_currItr_value.get() + " .");
// Update pho
if(ADMM_currItr_value.get() >0 && !ranADMM && ADMM_pho < ADMM_pho_max)
updatePhoAndFold(ADMM_currItr_value.get()-1);
if(sm_Debug)
System.err.print(" "+ADMM_pho+" ");
//Make sure finshedLineSrch[] are all false before releasing the other threads to sleep the current thread
synchronized(finished_linesrch){
for(int i=0; i<finished_linesrch.length; i++){
finished_linesrch[i] = false;
}
}
// Now atomically update "updateZ" boolean to true to trigger x-update
updatedZ.set(true);
//Periodically check if all threads have finished line search
while(!finshedLineSrch()){
try {
Thread.sleep(2000);
} catch (InterruptedException e) {}
}
//Now check for convergence at previous iteration
if(ADMM_currItr_value.get()>0){
ADMMconverged.set(hasADMMConverged(ADMM_currItr_value.get()-1));
}
// Print the primal and dual residuals
if(ADMM_currItr_value.get()>0){
if(sm_Debug && !ADMMconverged.get()){
double primal = 0.0;
double dual = 0.0;
for(int i=0; i<(numNodes*numNodes); i++){
primal += history_primal[ADMM_currItr_value.get()-1][i];
dual += history_dual[ADMM_currItr_value.get()-1][i];
}
System.err.println("Primal residual "+ primal + " , Dual residual "+ dual);
}else{
System.err.println();
System.err.println("ADMM has converged after "+ADMM_currItr_value.get()+" iterations !!");
updatedZ.set(true);
ranADMM=true;
break;
}
}
// Now update z
//Fist copy z to zold
for(int i=0; i<z.length; i++){
zold[i] = z[i];
}
//Now pool the estimates of x_t+1 from all the threads
updateXbar();
//Also, pool the estimates of u_t from all the threads
updateUbar();
// Also update norms
if(ADMM_currItr_value.get()>0)
updateUnorm(ADMM_currItr_value.get()-1);
updateXnorm(ADMM_currItr_value.get());
// Calculate over-relaxed xhat
double[] xhat = new double[z.length];
for(Node n : classStructure.leafs){
int nOffset = n.nodeIndex*dim;
if(n.parents.size()>0){
for(int pid : n.parents){
int zOffset = (n.nodeIndex*numNodes*dim)+(pid*dim);
int pOffset = pid*dim;
for(int w=0; w<dim; w++){
xhat[zOffset+w] = ADMM_ALPHA*(x[nOffset+w])+(1-ADMM_ALPHA)*zold[zOffset+w]-ADMM_ALPHA*sm_x[pOffset+w];
}
}
}else{
int zOffset = (n.nodeIndex*numNodes*dim)+(n.nodeIndex*dim);
for(int w=0; w<dim; w++){
xhat[zOffset+w] = ADMM_ALPHA*x[nOffset+w]+(1-ADMM_ALPHA)*zold[zOffset+w];
}
}
}
for(int i=0; i<z.length;i++){
z[i] = xhat[i]+u[i];
}
// Z-update
updateZ((2*regularization)/(ADMM_pho*ADMM_numThreads));
updateZnorm(ADMM_currItr_value.get());
// Calculate and update the primal and dual residuals
updateResiduals(ADMM_currItr_value.get());
ADMM_currItr_value.incrementAndGet();
}
// Wait till all the threads terminate
boolean anyrunning = true;
while (anyrunning) {
anyrunning = false;
try {
Thread.sleep(1000);
} catch (InterruptedException e) { }
for (int i = 0; i < threads.length; i++) {
if (threads[i].isAlive()) {
anyrunning = true;
break;
}
}
}
// Now copy the leaf node weights (i.e x) to sm_x
for(Node n: classStructure.leafs){
int nOffset = n.nodeIndex*dim;
for(int w=0; w<dim; w++){
sm_x[nOffset+w] = x[nOffset+w];
}
}
}
public class ADMMrun implements Runnable{
/** Portion of the data this thread runs on */
public double[][] t_b_Data;
/** the corresponding weights of the dataset */
public double[] t_b_weights;
/** Learned weight vector */
public double[] t_b_x;
/** The current u of this thread */
public double[] t_b_u;
public int[] t_b_cls;
public OptObject oO = new OptObject();
public String threadName;
public ADMMrun(double[][] dat, double[] t_wts, double[] predictors, int[] cl, double[] admm_u, String tname) {
t_b_Data = dat;
t_b_weights = t_wts;
t_b_cls = cl;
threadName =tname;
t_b_x = new double[predictors.length];
for(int i=0; i<predictors.length; i++)
t_b_x[i] = predictors[i];
t_b_u = new double[admm_u.length];
for(int i=0; i<admm_u.length; i++)
t_b_u[i] = admm_u[i];
}
@Override
public void run() {
while(!ADMMconverged.get()){
while(!updatedZ.get()){ //Wait till the z-step has finished
try {
Thread.sleep(1000);
} catch (InterruptedException e){}
if(ADMM_currItr_value.get() >= ADMM_maxItr)
break;
}
if(ADMM_currItr_value.get() >= ADMM_maxItr)
break;
// update t_b_u
// first calculate xhat
// Calculate over-relaxed x:- xrel
int dim = numPredictors + 1;
double[] t_b_xhat = new double[z.length];
for(Node n : classStructure.leafs){
int nOffset = n.nodeIndex*dim;
if(n.parents.size()>0){
for(int pid : n.parents){
int zOffset = (n.nodeIndex*numNodes*dim)+(pid*dim);
int pOffset = pid*dim;
for(int w=0; w<dim; w++){
t_b_xhat[zOffset+w] = ADMM_ALPHA*(t_b_x[nOffset+w])+(1-ADMM_ALPHA)*zold[zOffset+w]-ADMM_ALPHA*sm_x[pOffset+w];
}
}
}else{
int zOffset = (n.nodeIndex*numNodes*dim)+(n.nodeIndex*dim);
for(int w=0; w<dim; w++){
//xrel[zOffset+w] = ADMM_ALPHA*sm_x[nOffset+w]+(1-ADMM_ALPHA)*zold[zOffset+w]+u[zOffset+w];
t_b_xhat[zOffset+w] = ADMM_ALPHA*t_b_x[nOffset+w]+(1-ADMM_ALPHA)*zold[zOffset+w];
}
}
}
for(int i=0; i<u.length; i++){
t_b_u[i] = t_b_u[i] + t_b_xhat[i] - z[i];
}
// Correct u
for(int i=0; i< t_b_u.length; i++){
t_b_u[i] = t_b_u[i]/ADMM_pho_fold;
}
// t_b_x update
int[] iflag = new int[1];
double obj= oO.objectiveFunction(t_b_x);
double[] grad = oO.evaluateGradient(t_b_x);
int m = 5;
double[] diag = new double[t_b_x.length];
int[] iprint = new int[2];
double eps = 0.1;
double xtol = 10e-16;
LBFGSCopy lineFinder = new LBFGSCopy();
try {
lineFinder.lbfgs(t_b_x.length, m, t_b_x, obj, grad, false, diag, iprint, eps, xtol, iflag);
} catch (ExceptionWithIflag e1) {
e1.printStackTrace();
}
while(iflag[0] == 1 ){
//re-evaluate the objective and the gradient
obj = oO.objectiveFunction(t_b_x);
grad = oO.evaluateGradient(t_b_x);
try {
lineFinder.lbfgs(t_b_x.length, m, t_b_x, obj, grad, false, diag, iprint, eps, xtol, iflag);
} catch (ExceptionWithIflag e) {
e.printStackTrace();
}
}
// This could be a weak link in the code. I'm assuming the other threads have initiated line search
// Which, they should. However, if a thread reaches this point too soon (which is highly unlikely)
// Then the other thread whouldn't have initiated line search
// Can't think of a good way to make this full proof at the moment.
updatedZ.set(false); // Atomically set to false
synchronized(t_x){
for(int i=0; i<t_b_x.length; i++){
t_x.get(threadName)[i] = t_b_x[i];
}
}
synchronized(t_u){
for(int i=0; i<t_b_u.length; i++){
t_u.get(threadName)[i] = t_b_u[i];
}
}
synchronized(finished_linesrch){
finished_linesrch[getThreadId()] = true;
}
}
}
//Gettors
public int getThreadId(){return Integer.parseInt(threadName.substring(6));}
/**
* This class implements two things:-
* It calculates the gradient for the x-update sub-problem. (The BGFS method will need this)
* It calculates the overall objective function for the x-update subproblem. (The BGFS method will need this)
* @author akshaykakumanu
*
*/
public class OptObject {
public OptObject() {
}
/**
* Claclulates the gradient
* @param currx
* @return
*/
public double[] evaluateGradient(double[] c_x){
double[] grad = new double[c_x.length];
int dim = numPredictors + 1; // Number of variables per class
for (int i = 0; i < t_b_cls.length; i++) { // ith instance
double[] num = new double[numClasses]; // numerator of
// [-log(1+sum(exp))]'
int index;
for (int offset = 0; offset < numClasses; offset++) { // Which
// part of
double exp = 0.0;
index = offset * dim;
for (int j = 0; j < dim; j++) {
exp += t_b_Data[i][j]*c_x[index + j];
}
num[offset] = exp;
}
double max = num[Utils.maxIndex(num)];
double denom=0.0;
for (int offset = 0; offset < numClasses; offset++) {
num[offset] = Math.exp(num[offset] - max);
denom += num[offset];
}
Utils.normalize(num, denom);
// Update denominator of the gradient of -log(Posterior)
double firstTerm;
for (int offset = 0; offset < numClasses; offset++) { // Which
// part of x
index = offset * dim;
firstTerm = t_b_weights[i] * num[offset];
for (int q = 0; q < dim; q++) {
grad[index + q] += firstTerm * t_b_Data[i][q];
}
}
for (int p = 0; p < dim; p++) {
grad[t_b_cls[i] * dim + p] -= t_b_weights[i] * t_b_Data[i][p];
}
}
for(Node n : classStructure.leafs){
int nOffset = n.nodeIndex*dim;
if(n.parents.size() > 0){
for(int pid : n.parents){
int pOffset = pid*dim;
int zOffset = (n.nodeIndex*numNodes*dim)+(pid*dim);
for(int w=1; w<dim; w++){
grad[nOffset+w] += ADMM_pho*(c_x[nOffset+w]-sm_x[pOffset+w]-z[zOffset+w]+t_b_u[zOffset+w]);
}
}
}else{
int zOffset = (n.nodeIndex*numNodes*dim)+(n.nodeIndex*dim);
for(int w=1; w<dim; w++){
grad[nOffset+w] += ADMM_pho*(c_x[nOffset+w]-z[zOffset+w]+t_b_u[zOffset+w]);
}
}
}
if(sm_Debug){
//System.err.println(grad[20]);
//System.err.println(grad[dim+20]);
//System.err.println(grad[2*dim+20]);
}
return grad;
}
/**
* Claclulates the objective function
* @param currx
* @return
*/
public double objectiveFunction(double[] c_x){
double nll=0.0;
int dim = numPredictors+1;
for (int i = 0; i < t_b_cls.length; i++) { // ith instance
double[] exp = new double[numClasses];
int index;
for (int offset = 0; offset < numClasses; offset++) {
index = offset * dim;
for (int j = 0; j < dim; j++) {
exp[offset] += t_b_Data[i][j] * c_x[index + j];
}
}
double max = exp[Utils.maxIndex(exp)];
double denom = 0;
double num = exp[t_b_cls[i]] - max;
for (int offset = 0; offset < numClasses; offset++) {
denom += Math.exp(exp[offset] - max);
}
nll -= t_b_weights[i] * (num - Math.log(denom)); // Weighted NLL
}
for(Node n : classStructure.leafs){
int nOffset = n.nodeIndex*dim;
if(n.parents.size() >0){
for(int pid : n.parents){
int pOffset = pid*dim;
int zOffset = (n.nodeIndex*numNodes*dim)+(pid*dim);
for(int w=1; w<dim; w++){
nll += (ADMM_pho/2)*(c_x[nOffset+w]-sm_x[pOffset+w]-z[zOffset+w]+t_b_u[zOffset+w])*(c_x[nOffset+w]-sm_x[pOffset+w]-z[zOffset+w]+t_b_u[zOffset+w]);
}
}
}else{
int zOffset = (n.nodeIndex*numNodes*dim)+(n.nodeIndex*dim);
for(int w=1; w<dim; w++){
nll += (ADMM_pho/2)*(c_x[nOffset+w]-z[zOffset+w]+t_b_u[zOffset+w])*(c_x[nOffset+w]-z[zOffset+w]+t_b_u[zOffset+w]);
}
}
}
if(sm_Debug){
//System.err.println("Negative Log Likelihood: "+nll);
}
return nll;
}
}
}
}
}
|
package edu.psu.compbio.seqcode.projects.akshay.MultiSeq;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import edu.psu.compbio.seqcode.projects.akshay.MultiSeq.LBFGSCopy;
import weka.core.Utils;
import edu.psu.compbio.seqcode.projects.akshay.MultiSeq.LBFGSCopy.ExceptionWithIflag;
import edu.psu.compbio.seqcode.projects.akshay.MultiSeq.SeqUnwinder.ClassRelationStructure;
import edu.psu.compbio.seqcode.projects.akshay.MultiSeq.SeqUnwinder.ClassRelationStructure.Node;
public class LOneTh extends Optimizer {
// Fixed ADMM parameters
/** Relaxation parameter (to help faster convergence) */
public final double ADMM_ALPHA = 1.9;
/** Absolute feasibility tolerance for the primal and dual feasibility conditions */
public final double ADMM_ABSTOL = 1E-2;
/** Relative feasibility tolerance for the primal and dual feasibility conditions */
public final double ADMM_RELTOL = 1E-2;
// Fixed SeqUnwinder parameters
/** Tolerence for internal Nodes convergence */
public final double NODES_tol = 1E-2;
// Tunable ADMM parameters
/** The maximum number of allowed iterations for the ADMM algorithm */
public int ADMM_maxItr = 30;
/** Augmented Lagrangian parameter rho */
public double ADMM_pho = 0.001;
public double ADMM_pho_fold = 1.0;
/** The maximum allowed value for pho */
public double ADMM_pho_max = 100000000;
/** Number of threads to run ADMM on */
public int ADMM_numThreads = 5;
// BGFS parameters
/** The maximum number of allowed iterations for the BGFS algorithm */
public int BGFS_maxIts=-1;
// SeqUnwinder parameters
/** The total number of predictors/features in the model (does not include the intercept term) */
protected int numPredictors;
/** Total number of classes to be predicted */
protected int numClasses;
/** Relationships between the different nodes in SeqUnwinder */
protected ClassRelationStructure classStructure;
/** Total number of nodes (internal nodes and classes) in SeqUnwinder */
protected int numNodes;
/** The L1 regularization parameter in the SeqUnwinder model */
protected double regularization;
/** Maximum number of iterations to update internal nodes. For small number of node levels (usually 3 to 4), we might need very few iterations*/
protected int NODES_maxItr=10;
/** Internal flag that indicated if ADMM has been run before */
protected boolean ranADMM=false;
// SeqUnwinder and ADMM variables
/** Current feature weights for all the nodes in the SeqUnwinder (also contains the intercept term). Dimension :- (numPredictors+1)*numNodes */
public double[] sm_x;
/** Current feature weights for all the leaf nodes (classes) in the SeqUnwinder (also contains the intercept term). Dimension :- (numPredictors+1)*numClasses */
public double[] x;
// SeqUnwinder training data
/** Training data (Instances)*/
public double[][] data;
/** Weights of the instances */
protected double[] weights;
/** Instance class membership */
protected int[] cls;
// Misc
/** Boolean variable indicating debug mode */
protected boolean sm_Debug;
//Settors
public void setBGFSmaxItrs(int m){BGFS_maxIts=m;}
public void setADMMmaxItrs(int m){ADMM_maxItr = m;}
public void setSeqUnwinderMaxIts(int m){NODES_maxItr = m;}
public void setInstanceWeights(double[] w){weights=w;}
public void setClsMembership(int[] c){cls=c;}
public void setNumPredictors(int p){numPredictors=p;}
public void setNumClasses(int c){numClasses = c;}
public void setClassStructure(ClassRelationStructure rel){classStructure = rel; setNumNodes(rel.allNodes.size());}
public void setNumNodes(int n){numNodes = n;}
public void setRidge(double r){regularization = r;}
public void setDebugMode(boolean debug){sm_Debug =debug;}
public void setPho(double ph){ADMM_pho = ph;}
public void set_numThreads(int nt){ADMM_numThreads = nt;}
//gettors
public double[] getX(){return x;}
public double[] getsmX(){return sm_x;}
public LOneTh(double[] xinit, double[] sm_xinit, double[][] d) {
x = xinit;
sm_x = sm_xinit;
data=d;
}
public void execute() throws Exception{
for(int it=0; it<NODES_maxItr; it++){
System.err.println("Running SeqUnwinder for Iteration: "+ it);
double[] sm_x_old = new double[sm_x.length];
for(int i=0; i<sm_x.length; i++){
sm_x_old[i] = sm_x[i];
}
// First, run admm on leaf nodes
ADMMrunner admm = new ADMMrunner();
admm.execute();
// Now update the internal nodes
updateInternalNodes();
// Check Convergence
boolean converged = true;
for(Node n : classStructure.allNodes.values()){
double diff = 0.0;
for(int w=0; w<(numPredictors+1); w++){
diff += Math.pow(sm_x_old[n.nodeIndex*(numPredictors+1)+w]-sm_x[n.nodeIndex*(numPredictors+1)+w],2);
}
diff = Math.sqrt(diff);
if(sm_Debug)
System.err.println("Hierarchy update diff: Node: "+n.nodeIndex + " diff is: "+ diff);
if( diff > (Math.sqrt(numPredictors)*NODES_tol + NODES_tol*getL2NormX(n.nodeIndex))){
converged=false;
break;
}
}
if(converged){
System.err.println();
System.err.println("SeqUnwinder has converged after "+it+1+" iterations !!");
break;
}
}
}
// Slave methods
private void updateInternalNodes(){
// First update odd layaer nodes
for(int l=1; l<classStructure.numLayers; l+=2){
for(Node n : classStructure.layers.get(l)){// Get nodes in this layer
updateNode(n);
}
}
// Now update even layer nodes except the leaf node
for(int l=2; l<classStructure.numLayers; l+=2){
for(Node n : classStructure.layers.get(l)){// Get nodes in this layer
updateNode(n);
}
}
}
private void updateNode(Node n){
int dim = numPredictors+1;
int nOffset = n.nodeIndex*dim;
// Note the intercept term not included
for(int w=1; w<dim; w++){
List<Double> xs = new ArrayList<Double>();
for(int pid : n.parents){
xs.add(sm_x[pid*dim+w]);
}
for(int cid : n.children){
xs.add(sm_x[cid*dim+w]);
}
Collections.sort(xs);
sm_x[nOffset+w] = (xs.size() % 2 == 0) ? (xs.get(xs.size()/2) + xs.get((xs.size()/2)-1))/2 : xs.get(xs.size()/2);
// int midInd = xs.size()/2;
// sm_x[nOffset+w] = xs.get(midInd);
}
}
private double getL2NormX(int nodeIndex){
double norm=0;
int dim = numPredictors+1;
int offset = nodeIndex*dim;
for(int w=0; w<dim; w++){
norm += Math.pow(sm_x[offset+w], 2);
}
return Math.sqrt(norm);
}
// To clear memory
public void clearOptimizer(){
data=null;
sm_x=null;
x=null;
}
public class ADMMrunner {
// Threaded variables
/**
* Hashmap holding the data block that goes into each thread.
* Keys are the thread ids (for eg:- Thread2) and value is the data-block
*/
public HashMap<String,double[][]> t_Data = new HashMap<String,double[][]>();
/**
* Hashmap holding the computed x's from each thread at iteration "t+1".
* Keys are the thread ids (for eg:- Thread2)
*/
public HashMap<String,double[]> t_x = new HashMap<String,double[]>();
/** Hashmap holding the u's from each thread at iteration "t" */
public HashMap<String,double[]> t_u = new HashMap<String,double[]>();
/** The weights of input instances for the data blocks that go into each thread */
public HashMap<String,double[]> t_weights = new HashMap<String,double[]>();
/** The class assignment of input instances for the data blocks that go into each thread*/
public HashMap<String, int[]> t_cls = new HashMap<String,int[]>();
/** All the threads vote their status on line search (or thex-update) */
public boolean[] finished_linesrch;
/** Tracks the convergenece of ADMM */
public AtomicBoolean ADMMconverged = new AtomicBoolean(false);
/** Finished running the current z-step */
public AtomicBoolean updatedZ = new AtomicBoolean(false);
//ADMM consensus variables
/** Current values (t) of z (z-step in ADMM). Dimension :- (numPredictors+1)*numNodes*numNodes */
public double[] z; // for the Z-step
/** Value of z at previous iteration (t-1). Needed to assess convergence Dimension :- (numPredictors+1)*numNodes*numNodes */
public double[] zold;
/** Current values of the augmented lagrange dual variables (t). Dimension :- (numPredictors+1)*numNodes*numNodes */
public double[] u;
/** Stores the primal residuals over the course of the ADMM algorithm Dimension:- [ADMM_maxItr][numNodes*numNodes] */
public double[][] history_primal;
/** Stores the dual residuals over the course of the ADMM algorithm Dimension:- [ADMM_maxItr][numNodes*numNodes] */
public double[][] history_dual;
public double[][] history_xnorm;
public double[][] history_unorm;
public double[][] history_znorm;
/** Tracking the iterations of ADMM */
public AtomicInteger ADMM_currItr_value = new AtomicInteger(0);
// Initialize
public void initZandU(){
int dim = numPredictors+1;
z= new double[numNodes*numNodes*dim];
zold = new double[numNodes*numNodes*dim];
u= new double[numNodes*numNodes*dim];
history_primal = new double[ADMM_maxItr][numNodes*numNodes];
history_dual = new double[ADMM_maxItr][numNodes*numNodes];
history_xnorm = new double[ADMM_maxItr][numNodes];
history_unorm = new double[ADMM_maxItr][numNodes*numNodes];
history_znorm = new double[ADMM_maxItr][numNodes*numNodes];
}
public ADMMrunner() {
finished_linesrch = new boolean[ADMM_numThreads];
int blockSize = data.length/ADMM_numThreads;
for(int i=0; i<blockSize*ADMM_numThreads; i++){
int threadID = i % ADMM_numThreads;
String threadName = "Thread"+threadID;
if(t_Data.containsKey(threadName)){
t_weights.get(threadName)[i/ADMM_numThreads] = weights[i];
t_cls.get(threadName)[i/ADMM_numThreads] = cls[i];
for(int j=0; j<data[0].length; j++){
t_Data.get(threadName)[i/ADMM_numThreads][j] = data[i][j];
}
}else{
t_Data.put(threadName, new double[blockSize][data[0].length]);
t_weights.put(threadName, new double[blockSize]);
t_weights.get(threadName)[i/ADMM_numThreads] = weights[i];
t_cls.put(threadName, new int[blockSize]);
t_cls.get(threadName)[i/ADMM_numThreads] = cls[i];
for(int j=0; j< data[0].length; j++){
t_Data.get(threadName)[i/ADMM_numThreads][j] = data[i][j];
}
}
}
// Initialize t_x
for(int i=0; i< ADMM_numThreads; i++){
String threadName = "Thread"+i;
if(t_x.containsKey(threadName)){
for(int j=0; j<x.length; j++){
t_x.get(threadName)[j] = x[j];
}
}else{
t_x.put(threadName, new double[x.length]);
for(int j=0; j<x.length; j++){
t_x.get(threadName)[j] = x[j];
}
}
}
// Initialize u, z and zold
initZandU();
//Initialize t_u
for(int i=0; i<ADMM_numThreads; i++){
String threadName = "Thread"+i;
if(t_u.containsKey(threadName)){
for(int j=0; j<u.length; j++){
t_u.get(threadName)[j] = u[j];
}
}else{
t_u.put(threadName, new double[u.length]);
for(int j=0; j<u.length; j++){
t_u.get(threadName)[j] = u[j];
}
}
}
}
//Update methods
public void updateUbar(){
synchronized(t_u){
for(int i=0; i<u.length; i++){
u[i] = 0;
for(String tname: t_u.keySet()){
u[i] += t_u.get(tname)[i];
}
u[i] = u[i]/ADMM_numThreads;
}
}
}
public void updateXbar(){
synchronized(t_x){
for(int i=0; i<x.length; i++){
x[i] = 0;
for(String tname: t_x.keySet()){
x[i] += t_x.get(tname)[i];
}
x[i] = x[i]/ADMM_numThreads;
}
}
}
public void updateResiduals(int itr){
int dim = numPredictors + 1;
// First calculate and update the primal residual at the current iteration
for(Node n : classStructure.leafs){
double r_t = 0.0;
double s_t =0.0;
if(n.parents.size() > 0){
for(int pid : n.parents){
int zOffset = (n.nodeIndex*numNodes*dim)+(pid*dim);
for(String thname: t_x.keySet()){
for(int w=0; w<dim; w++){
r_t += Math.pow(t_x.get(thname)[n.nodeIndex*dim+w]-z[zOffset+w]-sm_x[pid*dim+w], 2);
}
}
for(int w=0; w<dim; w++){
s_t += Math.pow(ADMM_pho*(z[zOffset+w] - zold[zOffset+w]), 2);
}
s_t = Math.sqrt(s_t*ADMM_numThreads);
history_primal[itr][n.nodeIndex*numNodes+pid] = Math.sqrt(r_t);
history_dual[itr][n.nodeIndex*numNodes+pid] = s_t;
}
}else{
int zOffset = (n.nodeIndex*numNodes*dim)+(n.nodeIndex*dim);
for(String thname: t_x.keySet()){
for(int w=0; w<dim; w++){
r_t += Math.pow(t_x.get(thname)[n.nodeIndex*dim+w]-z[zOffset+w], 2);
}
}
for(int w=0; w<dim; w++){
s_t += Math.pow(ADMM_pho*(z[zOffset+w] - zold[zOffset+w]), 2);
}
s_t = Math.sqrt(s_t*ADMM_numThreads);
history_primal[itr][n.nodeIndex*numNodes+n.nodeIndex] = Math.sqrt(r_t);
history_dual[itr][n.nodeIndex*numNodes+n.nodeIndex] = s_t;
}
} // Over all the leaf nodes
}
public void updateUnorm(int itr){
for(Node n : classStructure.leafs){
int dim = numPredictors+1;
if(n.parents.size() > 0){
for(int pid: n.parents){
double unorm = 0.0;
int uOffset = (n.nodeIndex*numNodes*dim)+(pid*dim);
for(int w=0; w<dim; w++){
unorm += Math.pow(u[uOffset+w], 2);
}
history_unorm[itr][n.nodeIndex*numNodes+pid] = Math.sqrt(unorm);
}
}else{
double unorm = 0;
int uOffset = (n.nodeIndex*numNodes*dim)+(n.nodeIndex*dim);
for(int w=0; w<dim; w++){
unorm += Math.pow(u[uOffset+w], 2);
}
history_unorm[itr][n.nodeIndex*numNodes+n.nodeIndex] = Math.sqrt(unorm);
}
}
}
public void updateXnorm(int itr){
int dim = numPredictors+1;
for(Node n : classStructure.leafs){
int xOffset = n.nodeIndex*dim;
double xnorm = 0.0;
for(int w=0; w<dim; w++){
xnorm += Math.pow(x[xOffset+w], 2);
}
history_xnorm[itr][n.nodeIndex] = Math.sqrt(xnorm);
}
}
public void updateZnorm(int itr){
for(Node n : classStructure.leafs){
int dim = numPredictors+1;
if(n.parents.size() > 0){
for(int pid: n.parents){
double znorm = 0.0;
int zOffset = (n.nodeIndex*numNodes*dim)+(pid*dim);
for(int w=0; w<dim; w++){
znorm += Math.pow(z[zOffset+w], 2);
}
history_znorm[itr][n.nodeIndex*numNodes+pid] = Math.sqrt(znorm);
}
}else{
double znorm = 0;
int zOffset = (n.nodeIndex*numNodes*dim)+(n.nodeIndex*dim);
for(int w=0; w<dim; w++){
znorm += Math.pow(u[zOffset+w], 2);
}
history_znorm[itr][n.nodeIndex*numNodes+n.nodeIndex] = Math.sqrt(znorm);
}
}
}
public void updatePhoAndFold(int A_itr){
double primal_residuals=0; // Sum of all the primal residuals
double dual_residuals=0; // Sum of all the dual residuals
double mu = 10; // maintains the primal and dual residuals within a factor of mu of one another
double tao=2; // the factor by with pho will be increased or decreased at each iterations
for(int i=0; i<(numNodes*numNodes); i++){
primal_residuals += history_primal[A_itr][i];
dual_residuals += history_dual[A_itr][i];
}
if(primal_residuals > mu*dual_residuals){ // if primal residual are greater than dual by a factor of mu; decrease pho
double old_pho = ADMM_pho;
ADMM_pho = Math.min(ADMM_pho_max,ADMM_pho*tao);
ADMM_pho_fold = ADMM_pho/old_pho;
}else if(dual_residuals > primal_residuals*mu){
ADMM_pho = ADMM_pho/tao;
ADMM_pho_fold = 1/tao;
}else {
ADMM_pho_fold = 1.0;
}
}
public void updateZ(double pho){
for(int i=0; i<z.length; i++){
z[i] = z[i] - Math.signum(z[i])*Math.min(pho, Math.abs(z[i]));
//z[i] = Math.max(0, xrel[i]-pho) - Math.max(0, -xrel[i] - pho);
}
}
//Has ADMM converged
public boolean hasADMMConverged(int itr){
boolean converged =true;
int dim = numPredictors + 1;
double[] primal_tol = new double[numNodes*numNodes];
double[] dual_tol = new double[numNodes*numNodes];
for(Node n : classStructure.leafs){
double xnorm = history_xnorm[itr][n.nodeIndex];
if(n.parents.size() > 0){ // If this node has parents
for(int pid : n.parents){ // Over all the parents of this node
int zOffset = (n.nodeIndex*numNodes)+(pid);
double znorm = history_znorm[itr][zOffset];
double unorm = history_unorm[itr][zOffset];
double cnorm = getL2NormX(pid);
primal_tol[n.nodeIndex*numNodes+pid] = Math.sqrt(dim)*ADMM_ABSTOL + Math.sqrt(ADMM_numThreads)*ADMM_RELTOL*Math.max(xnorm, Math.max(znorm, cnorm));
dual_tol[n.nodeIndex*numNodes+pid] = Math.sqrt(dim)*ADMM_ABSTOL + Math.sqrt(ADMM_numThreads)*ADMM_RELTOL*ADMM_pho*unorm;
}
}else{
int zOffset = (n.nodeIndex*numNodes)+(n.nodeIndex);
double znorm = history_znorm[itr][zOffset];
double unorm = history_unorm[itr][zOffset];
primal_tol[n.nodeIndex*numNodes+n.nodeIndex] = Math.sqrt(dim)*ADMM_ABSTOL + Math.sqrt(ADMM_numThreads)*ADMM_RELTOL*Math.max(xnorm, znorm);
dual_tol[n.nodeIndex*numNodes+n.nodeIndex] = Math.sqrt(dim)*ADMM_ABSTOL + Math.sqrt(ADMM_numThreads)*ADMM_RELTOL*ADMM_pho*unorm;
}
}
for(int i=0; i<primal_tol.length; i++){
if(history_primal[itr][i] > primal_tol[i])
converged=false;
if(history_dual[itr][i] > dual_tol[i])
converged=false;
if(!converged)
break;
}
return converged;
}
public boolean finshedLineSrch(){
boolean ret = true;
synchronized(finished_linesrch){
for(int i=0; i<finished_linesrch.length; i++){
if(!finished_linesrch[i]){
ret = false;
break;
}
}
}
return ret;
}
// Runs the ADMM algorithm
public void execute(){
int dim = numPredictors+1;
// Initiate the threads
Thread[] threads = new Thread[ADMM_numThreads];
for(int i=0; i<ADMM_numThreads; i++){
String thname = "Thread"+i;
ADMMrun th = new ADMMrun(t_Data.get(thname), t_weights.get(thname), t_x.get(thname), t_cls.get(thname), t_u.get(thname), thname);
Thread t = new Thread(th, thname);
t.start();
threads[i] = t;
}
while(ADMM_currItr_value.get() < ADMM_maxItr){
if(sm_Debug)
System.err.print(". "+ ADMM_currItr_value.get() + " .");
// Update pho
if(ADMM_currItr_value.get() >0 && !ranADMM && ADMM_pho < ADMM_pho_max)
updatePhoAndFold(ADMM_currItr_value.get()-1);
if(sm_Debug)
System.err.print(" "+ADMM_pho+" ");
//Make sure finshedLineSrch[] are all false before releasing the other threads to sleep the current thread
synchronized(finished_linesrch){
for(int i=0; i<finished_linesrch.length; i++){
finished_linesrch[i] = false;
}
}
// Now atomically update "updateZ" boolean to true to trigger x-update
updatedZ.set(true);
//Periodically check if all threads have finished line search
while(!finshedLineSrch()){
try {
Thread.sleep(2000);
} catch (InterruptedException e) {}
}
//Now check for convergence at previous iteration
if(ADMM_currItr_value.get()>0){
ADMMconverged.set(hasADMMConverged(ADMM_currItr_value.get()-1));
}
// Print the primal and dual residuals
if(ADMM_currItr_value.get()>0){
if(sm_Debug && !ADMMconverged.get()){
double primal = 0.0;
double dual = 0.0;
for(int i=0; i<(numNodes*numNodes); i++){
primal += history_primal[ADMM_currItr_value.get()-1][i];
dual += history_dual[ADMM_currItr_value.get()-1][i];
}
System.err.println("Primal residual "+ primal + " , Dual residual "+ dual);
}else{
System.err.println();
System.err.println("ADMM has converged after "+ADMM_currItr_value.get()+" iterations !!");
updatedZ.set(true);
ranADMM=true;
break;
}
}
// Now update z
//Fist copy z to zold
for(int i=0; i<z.length; i++){
zold[i] = z[i];
}
//Now pool the estimates of x_t+1 from all the threads
updateXbar();
//Also, pool the estimates of u_t from all the threads
updateUbar();
// Also update norms
if(ADMM_currItr_value.get()>0)
updateUnorm(ADMM_currItr_value.get()-1);
updateXnorm(ADMM_currItr_value.get());
// Calculate over-relaxed xhat
double[] xhat = new double[z.length];
for(Node n : classStructure.leafs){
int nOffset = n.nodeIndex*dim;
if(n.parents.size()>0){
for(int pid : n.parents){
int zOffset = (n.nodeIndex*numNodes*dim)+(pid*dim);
int pOffset = pid*dim;
for(int w=0; w<dim; w++){
xhat[zOffset+w] = ADMM_ALPHA*(x[nOffset+w])+(1-ADMM_ALPHA)*zold[zOffset+w]-ADMM_ALPHA*sm_x[pOffset+w];
}
}
}else{
int zOffset = (n.nodeIndex*numNodes*dim)+(n.nodeIndex*dim);
for(int w=0; w<dim; w++){
xhat[zOffset+w] = ADMM_ALPHA*x[nOffset+w]+(1-ADMM_ALPHA)*zold[zOffset+w];
}
}
}
for(int i=0; i<z.length;i++){
z[i] = xhat[i]+u[i];
}
// Z-update
updateZ((2*regularization)/(ADMM_pho*ADMM_numThreads));
updateZnorm(ADMM_currItr_value.get());
// Calculate and update the primal and dual residuals
updateResiduals(ADMM_currItr_value.get());
ADMM_currItr_value.incrementAndGet();
}
// Wait till all the threads terminate
boolean anyrunning = true;
while (anyrunning) {
anyrunning = false;
try {
Thread.sleep(1000);
} catch (InterruptedException e) { }
for (int i = 0; i < threads.length; i++) {
if (threads[i].isAlive()) {
anyrunning = true;
break;
}
}
}
// Now copy the leaf node weights (i.e x) to sm_x
for(Node n: classStructure.leafs){
int nOffset = n.nodeIndex*dim;
for(int w=0; w<dim; w++){
sm_x[nOffset+w] = x[nOffset+w];
}
}
}
public class ADMMrun implements Runnable{
/** Portion of the data this thread runs on */
public double[][] t_b_Data;
/** the corresponding weights of the dataset */
public double[] t_b_weights;
/** Learned weight vector */
public double[] t_b_x;
/** The current u of this thread */
public double[] t_b_u;
public int[] t_b_cls;
public OptObject oO = new OptObject();
public String threadName;
public ADMMrun(double[][] dat, double[] t_wts, double[] predictors, int[] cl, double[] admm_u, String tname) {
t_b_Data = dat;
t_b_weights = t_wts;
t_b_cls = cl;
threadName =tname;
t_b_x = new double[predictors.length];
for(int i=0; i<predictors.length; i++)
t_b_x[i] = predictors[i];
t_b_u = new double[admm_u.length];
for(int i=0; i<admm_u.length; i++)
t_b_u[i] = admm_u[i];
}
@Override
public void run() {
while(!ADMMconverged.get()){
while(!updatedZ.get()){ //Wait till the z-step has finished
try {
Thread.sleep(1000);
} catch (InterruptedException e){}
if(ADMM_currItr_value.get() >= ADMM_maxItr)
break;
}
if(ADMM_currItr_value.get() >= ADMM_maxItr)
break;
// update t_b_u
// first calculate xhat
// Calculate over-relaxed x:- xrel
int dim = numPredictors + 1;
double[] t_b_xhat = new double[z.length];
for(Node n : classStructure.leafs){
int nOffset = n.nodeIndex*dim;
if(n.parents.size()>0){
for(int pid : n.parents){
int zOffset = (n.nodeIndex*numNodes*dim)+(pid*dim);
int pOffset = pid*dim;
for(int w=0; w<dim; w++){
t_b_xhat[zOffset+w] = ADMM_ALPHA*(t_b_x[nOffset+w])+(1-ADMM_ALPHA)*zold[zOffset+w]-ADMM_ALPHA*sm_x[pOffset+w];
}
}
}else{
int zOffset = (n.nodeIndex*numNodes*dim)+(n.nodeIndex*dim);
for(int w=0; w<dim; w++){
//xrel[zOffset+w] = ADMM_ALPHA*sm_x[nOffset+w]+(1-ADMM_ALPHA)*zold[zOffset+w]+u[zOffset+w];
t_b_xhat[zOffset+w] = ADMM_ALPHA*t_b_x[nOffset+w]+(1-ADMM_ALPHA)*zold[zOffset+w];
}
}
}
for(int i=0; i<u.length; i++){
t_b_u[i] = t_b_u[i] + t_b_xhat[i] - z[i];
}
// Correct u
for(int i=0; i< t_b_u.length; i++){
t_b_u[i] = t_b_u[i]/ADMM_pho_fold;
}
// t_b_x update
int[] iflag = new int[1];
double obj= oO.objectiveFunction(t_b_x);
double[] grad = oO.evaluateGradient(t_b_x);
int m = 5;
double[] diag = new double[t_b_x.length];
int[] iprint = new int[2];
double eps = 0.1;
double xtol = 10e-16;
LBFGSCopy lineFinder = new LBFGSCopy();
try {
lineFinder.lbfgs(t_b_x.length, m, t_b_x, obj, grad, false, diag, iprint, eps, xtol, iflag);
} catch (ExceptionWithIflag e1) {
e1.printStackTrace();
}
while(iflag[0] == 1 ){
//re-evaluate the objective and the gradient
obj = oO.objectiveFunction(t_b_x);
grad = oO.evaluateGradient(t_b_x);
try {
lineFinder.lbfgs(t_b_x.length, m, t_b_x, obj, grad, false, diag, iprint, eps, xtol, iflag);
} catch (ExceptionWithIflag e) {
e.printStackTrace();
}
}
// This could be a weak link in the code. I'm assuming the other threads have initiated line search
// Which, they should. However, if a thread reaches this point too soon (which is highly unlikely)
// Then the other thread whouldn't have initiated line search
// Can't think of a good way to make this full proof at the moment.
updatedZ.set(false); // Atomically set to false
synchronized(t_x){
for(int i=0; i<t_b_x.length; i++){
t_x.get(threadName)[i] = t_b_x[i];
}
}
synchronized(t_u){
for(int i=0; i<t_b_u.length; i++){
t_u.get(threadName)[i] = t_b_u[i];
}
}
synchronized(finished_linesrch){
finished_linesrch[getThreadId()] = true;
}
}
}
//Gettors
public int getThreadId(){return Integer.parseInt(threadName.substring(6));}
/**
* This class implements two things:-
* It calculates the gradient for the x-update sub-problem. (The BGFS method will need this)
* It calculates the overall objective function for the x-update subproblem. (The BGFS method will need this)
* @author akshaykakumanu
*
*/
public class OptObject {
public OptObject() {
}
/**
* Claclulates the gradient
* @param currx
* @return
*/
public double[] evaluateGradient(double[] c_x){
double[] grad = new double[c_x.length];
int dim = numPredictors + 1; // Number of variables per class
for (int i = 0; i < t_b_cls.length; i++) { // ith instance
double[] num = new double[numClasses]; // numerator of
// [-log(1+sum(exp))]'
int index;
for (int offset = 0; offset < numClasses; offset++) { // Which
// part of
double exp = 0.0;
index = offset * dim;
for (int j = 0; j < dim; j++) {
exp += t_b_Data[i][j]*c_x[index + j];
}
num[offset] = exp;
}
double max = num[Utils.maxIndex(num)];
double denom=0.0;
for (int offset = 0; offset < numClasses; offset++) {
num[offset] = Math.exp(num[offset] - max);
denom += num[offset];
}
Utils.normalize(num, denom);
// Update denominator of the gradient of -log(Posterior)
double firstTerm;
for (int offset = 0; offset < numClasses; offset++) { // Which
// part of x
index = offset * dim;
firstTerm = t_b_weights[i] * num[offset];
for (int q = 0; q < dim; q++) {
grad[index + q] += firstTerm * t_b_Data[i][q];
}
}
for (int p = 0; p < dim; p++) {
grad[t_b_cls[i] * dim + p] -= t_b_weights[i] * t_b_Data[i][p];
}
}
for(Node n : classStructure.leafs){
int nOffset = n.nodeIndex*dim;
if(n.parents.size() > 0){
for(int pid : n.parents){
int pOffset = pid*dim;
int zOffset = (n.nodeIndex*numNodes*dim)+(pid*dim);
for(int w=1; w<dim; w++){
grad[nOffset+w] += ADMM_pho*(c_x[nOffset+w]-sm_x[pOffset+w]-z[zOffset+w]+t_b_u[zOffset+w]);
}
}
}else{
int zOffset = (n.nodeIndex*numNodes*dim)+(n.nodeIndex*dim);
for(int w=1; w<dim; w++){
grad[nOffset+w] += ADMM_pho*(c_x[nOffset+w]-z[zOffset+w]+t_b_u[zOffset+w]);
}
}
}
if(sm_Debug){
//System.err.println(grad[20]);
//System.err.println(grad[dim+20]);
//System.err.println(grad[2*dim+20]);
}
return grad;
}
/**
* Claclulates the objective function
* @param currx
* @return
*/
public double objectiveFunction(double[] c_x){
double nll=0.0;
int dim = numPredictors+1;
for (int i = 0; i < t_b_cls.length; i++) { // ith instance
double[] exp = new double[numClasses];
int index;
for (int offset = 0; offset < numClasses; offset++) {
index = offset * dim;
for (int j = 0; j < dim; j++) {
exp[offset] += t_b_Data[i][j] * c_x[index + j];
}
}
double max = exp[Utils.maxIndex(exp)];
double denom = 0;
double num = exp[t_b_cls[i]] - max;
for (int offset = 0; offset < numClasses; offset++) {
denom += Math.exp(exp[offset] - max);
}
nll -= t_b_weights[i] * (num - Math.log(denom)); // Weighted NLL
}
for(Node n : classStructure.leafs){
int nOffset = n.nodeIndex*dim;
if(n.parents.size() >0){
for(int pid : n.parents){
int pOffset = pid*dim;
int zOffset = (n.nodeIndex*numNodes*dim)+(pid*dim);
for(int w=1; w<dim; w++){
nll += (ADMM_pho/2)*(c_x[nOffset+w]-sm_x[pOffset+w]-z[zOffset+w]+t_b_u[zOffset+w])*(c_x[nOffset+w]-sm_x[pOffset+w]-z[zOffset+w]+t_b_u[zOffset+w]);
}
}
}else{
int zOffset = (n.nodeIndex*numNodes*dim)+(n.nodeIndex*dim);
for(int w=1; w<dim; w++){
nll += (ADMM_pho/2)*(c_x[nOffset+w]-z[zOffset+w]+t_b_u[zOffset+w])*(c_x[nOffset+w]-z[zOffset+w]+t_b_u[zOffset+w]);
}
}
}
if(sm_Debug){
//System.err.println("Negative Log Likelihood: "+nll);
}
return nll;
}
}
}
}
}
|
package es.tid.tests;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.net.Inet4Address;
import java.net.Inet6Address;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import es.tid.bgp.bgp4.update.tlv.node_link_prefix_descriptor_subTLVs.IGPRouterIDNodeDescriptorSubTLV;
import es.tid.of.DataPathID;
import es.tid.pce.pcep.constructs.GeneralizedBandwidthSSON;
import es.tid.pce.pcep.constructs.Path;
import es.tid.pce.pcep.constructs.SwitchEncodingType;
import es.tid.pce.pcep.objects.BandwidthRequested;
import es.tid.pce.pcep.objects.BitmapLabelSet;
import es.tid.pce.pcep.objects.EndPointsIPv4;
import es.tid.pce.pcep.objects.Metric;
import es.tid.pce.pcep.objects.ObjectiveFunction;
import es.tid.pce.pcep.objects.PceIdIPv4;
import es.tid.rsvp.objects.subobjects.EROSubobject;
import es.tid.rsvp.objects.subobjects.IPv4prefixEROSubobject;
public class TestPCEPCommons {
public static void createAllFields(Object object){
try {
//System.out.println("Looking at "+object.getClass().getName() );
List<Field> fieldListNS = new ArrayList<Field>();
List<Field> fieldList= Arrays.asList(object.getClass().getDeclaredFields());
//System.out.println("XXXX "+fieldList.size());
for (Field field : fieldList) {
if (!java.lang.reflect.Modifier.isStatic(field.getModifiers())) {
fieldListNS.add(field);
Type ty=field.getGenericType();
//System.out.println("Type: "+ty);
if (ty instanceof Class){
Object o=null;
Class c =(Class)ty;
Method method = object.getClass().getMethod("set"+field.getName().replaceFirst(field.getName().substring(0, 1), field.getName().substring(0, 1).toUpperCase()),field.getType());
//System.out.println("mi "+method.getName());
if (c.isPrimitive()){
fillPrimitive(object,method,ty);
} else if (c.isArray()){
Class c2=c.getComponentType();
//System.out.println("c2: "+c2.getName());
o = Array.newInstance(c2, 5);
method.invoke(object, o);
//System.out.println("FIXME: ES UN ARRAY OJO ");
}
else {
//System.out.println("me "+method.getName());
if (c.getName().equals("String")) {
//System.out.println("FIXME: String");
o="TEST";
} else if (c.getName().equals("java.net.Inet4Address")) {
o=Inet4Address.getByName("1.1.1.1");
}else if (c.getName().equals("java.net.Inet6Address")) {
o=Inet6Address.getByName("1080:0:0:0:8:800:200C:417A");
}
else if (c.getName().equals("es.tid.pce.pcep.objects.EndPoints")) {
o = new EndPointsIPv4();
createAllFields(o);
}else if (c.getName().equals("es.tid.pce.pcep.objects.Bandwidth")) {
o= new BandwidthRequested();
createAllFields(o);
}else if (c.getName().equals("es.tid.pce.pcep.constructs.GeneralizedBandwidth")) {
o= new GeneralizedBandwidthSSON();
createAllFields(o);
}
else if (c.getName().equals("es.tid.pce.pcep.objects.LabelSet")) {
o= new BitmapLabelSet();
createAllFields(o);
}
else if (c.getName().equals("es.tid.pce.pcep.objects.PceId")){
o= new PceIdIPv4();
createAllFields(o);
}else if (c.getName().equals("es.tid.of.DataPathID")){
o= new DataPathID();
((DataPathID)o).setDataPathID("11:22:00:AA:33:BB:11:11");
} else if (c.getName().equals("es.tid.bgp.bgp4.update.tlv.node_link_prefix_descriptor_subTLVs.IGPRouterIDNodeDescriptorSubTLV")){
o= new IGPRouterIDNodeDescriptorSubTLV();
Inet4Address in=(Inet4Address) Inet4Address.getByName("1.1.1.1");
((IGPRouterIDNodeDescriptorSubTLV)o).setIgp_router_id_type(IGPRouterIDNodeDescriptorSubTLV.IGP_ROUTER_ID_TYPE_OSPF_NON_PSEUDO);
((IGPRouterIDNodeDescriptorSubTLV)o).setIpv4Address_ospf(in);
}
else {
//System.out.println("yyyy "+c.getName());
o = ((Class)ty).newInstance();
createAllFields(o);
}
method.invoke(object, o);
}
}else if (ty instanceof ParameterizedType){
ParameterizedType pt=(ParameterizedType)ty;
Type rt=pt.getRawType();
Type at=pt.getActualTypeArguments()[0];
if (rt instanceof Class){
Class ca=(Class)rt;
if (ca.getName().equals("java.util.LinkedList")){
String name="get"+field.getName().replaceFirst(field.getName().substring(0, 1), field.getName().substring(0, 1).toUpperCase());
String name2="set"+field.getName().replaceFirst(field.getName().substring(0, 1), field.getName().substring(0, 1).toUpperCase());
//System.out.println("name "+name);
//System.out.println("name2 "+name2);
//Method method = object.getClass().getMethod("get"+field.getName().replaceFirst(field.getName().substring(0, 1), field.getName().substring(0, 1).toUpperCase()));
Method method = object.getClass().getMethod(name);
Method method2 = object.getClass().getMethod(name2,ca);
Object res=method.invoke(object);
Method[] methods =res.getClass().getDeclaredMethods();
if (((Class)at).getName().equals("es.tid.rsvp.objects.subobjects.EROSubobject")) {
LinkedList<EROSubobject> llero = new LinkedList<EROSubobject>();
IPv4prefixEROSubobject eroso = new IPv4prefixEROSubobject();
Inet4Address in=(Inet4Address) Inet4Address.getByName("1.1.1.1");
eroso.setIpv4address(in);
eroso.setPrefix(16);
llero.add(eroso);
method2.invoke(object, llero);
} else if (((Class)at).getName().equals("es.tid.rsvp.objects.subobjects.RROSubobject")) {
System.out.println("FIXME: es.tid.rsvp.objects.subobjects.RROSubobject");
}else if (((Class)at).getName().equals("es.tid.pce.pcep.objects.subobjects.XROSubobject")) {
System.out.println("FIXME: es.tid.pce.pcep.objects.subobjects.XROSubobject");
}else if (((Class)at).getName().equals("es.tid.pce.pcep.tlvs.PCEPTLV")) {
System.out.println("FIXME: es.tid.pce.pcep.tlvs.PCEPTLV");
}
else if (((Class)at).getName().equals("es.tid.pce.pcep.objects.Metric")) {
LinkedList<Metric> ll=new LinkedList<Metric>();
Object o = ((Class)at).newInstance();
createAllFields(o);
ll.add((Metric)o);
method2.invoke(object,ll);
}
else if (((Class)at).getName().equals("es.tid.pce.pcep.objects.ObjectiveFunction")) {
LinkedList<ObjectiveFunction> ll2=new LinkedList<ObjectiveFunction>();
Object o = ((Class)at).newInstance();
createAllFields(o);
ll2.add((ObjectiveFunction)o);
method2.invoke(object,ll2);
}
else if (((Class)at).getName().equals("es.tid.pce.pcep.constructs.SwitchEncodingType")) {
LinkedList<SwitchEncodingType> ll=new LinkedList<SwitchEncodingType>();
Object o = ((Class)at).newInstance();
createAllFields(o);
ll.add((SwitchEncodingType)o);
method2.invoke(object,ll);
}
else if (((Class)at).getName().equals("es.tid.pce.pcep.constructs.Path")) {
LinkedList<Path> ll=new LinkedList<Path>();
Object o = ((Class)at).newInstance();
createAllFields(o);
ll.add((Path)o);
method2.invoke(object,ll);
}
else if (((Class) at).isPrimitive()){
System.out.println("FIXME: PRIMITIVE "+ ((Class)at).getName());
}
else {
if (((Class)at).getName().equals("java.lang.Integer")) {
LinkedList<Integer> ll=new LinkedList<Integer>();
method2.invoke(object,ll);
Integer in=new Integer(3);
ll.add(in);
}else if (((Class)at).getName().equals("java.lang.Long")) {
Long in=new Long(5);
methods[0].invoke(res, in);
}else if (((Class)at).getName().equals("java.net.Inet4Address")) {
Inet4Address in=(Inet4Address) Inet4Address.getByName("1.1.1.1");
methods[0].invoke(res, in);
}else {
//Object ll= pt.getRawType(). .newInstance();
Object ll= ca.newInstance();
System.out.println("FIXME in java 7: "+((Class)at).getName());
Object o = ((Class)at).newInstance();
createAllFields(o);
//Method method3 = ll.getClass().getMethod("add");
//method3.invoke(ll, o);
Method[] methodss =ll.getClass().getDeclaredMethods();
methodss[0].invoke(ll, o);
method2.invoke(object,ll);
}
}
}
}
}
}
}
}
catch (Exception e) {
e.printStackTrace();
}
}
public static void fillPrimitive(Object object, Method method,Type tyy) {
try {
Class ty=(Class)tyy;
if (ty.getName().equals("int")){
method.invoke(object, 0);
}else if (ty.getName().equals("boolean")){
method.invoke(object,true);
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
|
package fr.adrienbrault.idea.symfony2plugin.ui;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.options.Configurable;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.project.Project;
import com.intellij.ui.AnActionButton;
import com.intellij.ui.AnActionButtonRunnable;
import com.intellij.ui.AnActionButtonUpdater;
import com.intellij.ui.ToolbarDecorator;
import com.intellij.ui.table.TableView;
import com.intellij.util.ui.ColumnInfo;
import com.intellij.util.ui.ElementProducer;
import com.intellij.util.ui.ListTableModel;
import fr.adrienbrault.idea.symfony2plugin.Settings;
import fr.adrienbrault.idea.symfony2plugin.TwigHelper;
import fr.adrienbrault.idea.symfony2plugin.templating.path.TwigNamespaceSetting;
import fr.adrienbrault.idea.symfony2plugin.templating.path.TwigPath;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.TableModelEvent;
import javax.swing.event.TableModelListener;
import java.awt.*;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class TwigSettingsForm implements Configurable {
private JPanel panel1;
private JPanel panelTableView;
private JButton resetToDefault;
private TableView<TwigPath> tableView;
private Project project;
private boolean changed = false;
private ListTableModel<TwigPath> modelList;
public TwigSettingsForm(@NotNull Project project) {
this.project = project;
}
private void attachItems(boolean includeSettings) {
// @TODO: remove this check, moved init stuff out of constructor
// dont load on project less context
if(this.project == null) {
return;
}
List<TwigPath> sortableLookupItems = new ArrayList<TwigPath>();
sortableLookupItems.addAll(TwigHelper.getTwigNamespaces(this.project, includeSettings));
Collections.sort(sortableLookupItems);
for (TwigPath twigPath : sortableLookupItems) {
// dont use managed class here
this.modelList.addRow(twigPath.clone());
}
}
@Nls
@Override
public String getDisplayName() {
return "Twig";
}
@Nullable
@Override
public String getHelpTopic() {
return null;
}
@Nullable
@Override
public JComponent createComponent() {
this.tableView = new TableView<TwigPath>();
this.modelList = new ListTableModel<TwigPath>(
new NamespaceColumn(),
new PathColumn(project),
new TypeColumn(),
new CustomColumn(),
new DisableColumn()
);
this.attachItems(true);
this.tableView.setModelAndUpdateColumns(this.modelList);
this.modelList.addTableModelListener(new TableModelListener() {
@Override
public void tableChanged(TableModelEvent e) {
TwigSettingsForm.this.changed = true;
}
});
resetToDefault.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
super.mouseClicked(e);
TwigSettingsForm.this.resetList();
List<TwigPath> sortableLookupItems = new ArrayList<TwigPath>();
sortableLookupItems.addAll(TwigHelper.getTwigNamespaces(TwigSettingsForm.this.project, false));
Collections.sort(sortableLookupItems);
for (TwigPath twigPath : sortableLookupItems) {
// dont use managed class here
// @TODO state to enabled (should not be here)
TwigSettingsForm.this.modelList.addRow(twigPath.clone().setEnabled(true));
}
}
});
ToolbarDecorator tablePanel = ToolbarDecorator.createDecorator(this.tableView, new ElementProducer<TwigPath>() {
@Override
public TwigPath createElement() {
//IdeFocusManager.getInstance(TwigSettingsForm.this.project).requestFocus(TwigNamespaceDialog.getWindows(), true);
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean canCreateElement() {
return true; //To change body of implemented methods use File | Settings | File Templates.
}
});
tablePanel.setEditAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton anActionButton) {
TwigSettingsForm.this.openTwigPathDialog(TwigSettingsForm.this.tableView.getSelectedObject());
}
});
tablePanel.setAddAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton anActionButton) {
TwigSettingsForm.this.openTwigPathDialog(null);
}
});
tablePanel.setEditActionUpdater(new AnActionButtonUpdater() {
@Override
public boolean isEnabled(AnActionEvent e) {
TwigPath twigPath = TwigSettingsForm.this.tableView.getSelectedObject();
return twigPath != null && twigPath.isCustomPath();
}
});
tablePanel.setRemoveActionUpdater(new AnActionButtonUpdater() {
@Override
public boolean isEnabled(AnActionEvent e) {
TwigPath twigPath = TwigSettingsForm.this.tableView.getSelectedObject();
return twigPath != null && twigPath.isCustomPath();
}
});
tablePanel.disableUpAction();
tablePanel.disableDownAction();
this.panelTableView.add(tablePanel.createPanel());
return this.panel1;
}
@Override
public boolean isModified() {
return this.changed;
}
@Override
public void apply() throws ConfigurationException {
List<TwigNamespaceSetting> twigPaths = new ArrayList<TwigNamespaceSetting>();
for(TwigPath twigPath :this.tableView.getListTableModel().getItems()) {
// only custom and disabled path need to save
if((!twigPath.isEnabled() && twigPath.getRelativePath(this.project) != null) || twigPath.isCustomPath()) {
twigPaths.add(new TwigNamespaceSetting(twigPath.getNamespace(), twigPath.getRelativePath(this.project), twigPath.isEnabled(), twigPath.getNamespaceType(), twigPath.isCustomPath()));
}
}
getSettings().twigNamespaces = twigPaths;
this.changed = false;
}
private Settings getSettings() {
return Settings.getInstance(this.project);
}
private void resetList() {
// clear list, easier?
while(this.modelList.getRowCount() > 0) {
this.modelList.removeRow(0);
}
}
@Override
public void reset() {
this.resetList();
this.attachItems(true);
this.changed = false;
}
@Override
public void disposeUIResources() {
this.resetList();
}
private class NamespaceColumn extends ColumnInfo<TwigPath, String> {
public NamespaceColumn() {
super("Namespace");
}
@Nullable
@Override
public String valueOf(TwigPath twigPath) {
return twigPath.getNamespace();
}
}
private class PathColumn extends ColumnInfo<TwigPath, String> {
private Project project;
public PathColumn(Project project) {
super("Path");
this.project = project;
}
@Nullable
@Override
public String valueOf(TwigPath twigPath) {
return twigPath.getRelativePath(this.project);
}
}
private class CustomColumn extends ColumnInfo<TwigPath, String> {
public CustomColumn() {
super("Parser");
}
@Nullable
@Override
public String valueOf(TwigPath twigPath) {
return twigPath.isCustomPath() ? "Custom" : "Internal";
}
}
private class TypeColumn extends ColumnInfo<TwigPath, String> {
public TypeColumn() {
super("Type");
}
@Nullable
@Override
public String valueOf(TwigPath twigPath) {
return twigPath.getNamespaceType().toString();
}
}
private abstract class BooleanColumn extends ColumnInfo<TwigPath, Boolean>
{
public BooleanColumn(String name) {
super(name);
}
public boolean isCellEditable(TwigPath groupItem)
{
return true;
}
public Class getColumnClass()
{
return Boolean.class;
}
}
private class DisableColumn extends BooleanColumn {
public DisableColumn() {
super("on");
}
public Boolean valueOf(TwigPath twigPath) {
return twigPath.isEnabled();
}
public void setValue(TwigPath twigPath, Boolean value){
twigPath.setEnabled(value);
TwigSettingsForm.this.tableView.getListTableModel().fireTableDataChanged();
}
public int getWidth(JTable table) {
return 50;
}
}
private void openTwigPathDialog(@Nullable TwigPath twigPath) {
TwigNamespaceDialog twigNamespaceDialog;
if(twigPath == null) {
twigNamespaceDialog = new TwigNamespaceDialog(project, this.tableView);
} else {
twigNamespaceDialog = new TwigNamespaceDialog(project, this.tableView, twigPath);
}
Dimension dim = new Dimension();
dim.setSize(500, 190);
twigNamespaceDialog.setTitle("Twig Namespace");
twigNamespaceDialog.setMinimumSize(dim);
twigNamespaceDialog.pack();
twigNamespaceDialog.setLocationRelativeTo(TwigSettingsForm.this.panel1);
twigNamespaceDialog.setVisible(true);
}
}
|
package gate.plugin.learningframework.engines;
import cc.mallet.fst.CRF;
import cc.mallet.fst.CRFOptimizableByLabelLikelihood;
import cc.mallet.fst.CRFTrainerByLabelLikelihood;
import cc.mallet.fst.CRFTrainerByStochasticGradient;
import cc.mallet.fst.CRFTrainerByThreadedLabelLikelihood;
import cc.mallet.fst.CRFTrainerByValueGradients;
import cc.mallet.fst.MEMM;
import cc.mallet.fst.MEMMTrainer;
import cc.mallet.fst.SumLatticeDefault;
import cc.mallet.fst.Transducer;
import cc.mallet.fst.TransducerTrainer;
import cc.mallet.fst.ViterbiWriter;
import cc.mallet.optimize.Optimizable;
import cc.mallet.types.FeatureVectorSequence;
import cc.mallet.types.Instance;
import cc.mallet.types.InstanceList;
import gate.Annotation;
import gate.AnnotationSet;
import gate.plugin.learningframework.EvaluationMethod;
import gate.plugin.learningframework.ModelApplication;
import gate.plugin.learningframework.data.CorpusRepresentationMalletSeq;
import static gate.plugin.learningframework.engines.Engine.FILENAME_MODEL;
import gate.plugin.learningframework.features.TargetType;
import gate.util.GateRuntimeException;
import java.io.File;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import org.apache.log4j.Logger;
import static gate.plugin.learningframework.LFUtils.newURL;
/**
*
* @author Johann Petrak
*/
public class EngineMBMalletSeq extends EngineMBMallet {
private static Logger logger = Logger.getLogger(EngineMBMalletSeq.class);
@Override
public void initializeAlgorithm(Algorithm algorithm, String parms) {
// DOES NOTHINIG?
}
@Override
public void trainModel(File DataDirectory, String instanceType, String options) {
InstanceList trainingData = corpusRepresentation.getRepresentationMallet();
Transducer td = trainModel(trainingData,options);
model = td;
updateInfo();
}
public static TransducerTrainer createTrainer(InstanceList trainingData, Info info, String options) {
TransducerTrainer transtrainer = null;
// NOTE: Training of the CRF is very flexible in Mallet and not everything is clear to me
// yet. Unfortunately, there is practically no documentation available.
// There is some useful example code around:
// src/cc/mallet/examples/TrainCRF.java - very basic example
// src/cc/mallet/fst/SimpleTagger.java - more detailled: especially also shows multithreaded training!
// NOTE: the name can come from an algorithm selected for classification OR an algorithm
// selected for actual sequence tagging. This is why we check the literal name here
// instead of something derived from the Algorithm enum class.
// NOTE on supported trainers: we only support trainers here which are not
// too complex to set up and which can be used with the normal succession of
// how training works in the LF.
// Mallet also supports a lot of additional things, e.g. regularization
// on unlabeled data, but this cannot be used here.
String alg = info.algorithmName;
System.err.println("DEBUG: our algorithm name is "+alg);
if(alg.startsWith("MALLET_SEQ_CRF")) {
CRF crf = new CRF(trainingData.getPipe(), null);
Parms parms = new Parms(options,
"S:states:s",
"o:orders:s",
"f:ofully:b",
"a:addstart:b",
"v:logViterbiPaths:i",
"t:threads:i",
"sg:stochasticGradient:b",
"wdd:weightDimDensely:b",
"usw:useSparseWeights:b",
"ssut:setSomeUnsupportedTrick:b");
String states = (String)parms.getValueOrElse("states", "fully-connected");
switch (states) {
case "fully-connected":
crf.addFullyConnectedStatesForLabels();
break;
case "as-in":
crf.addStatesForLabelsConnectedAsIn(trainingData);
break;
case "fully-threequarter":
crf.addFullyConnectedStatesForThreeQuarterLabels(trainingData);
break;
case "half":
crf.addStatesForHalfLabelsConnectedAsIn(trainingData);
break;
case "order-n":
int[] orders = new int[]{1};
String ordersparm = (String)parms.getValueOrElse("orders", "0:1");
if(ordersparm.equals("1")) {
orders = new int[]{1};
} else if(ordersparm.equals("0:1")) {
orders = new int[]{0,1};
} else if(ordersparm.equals("0:1:2")) {
orders = new int[]{0,1,2};
} else if(ordersparm.equals("0")) {
orders = new int[]{0};
} else if(ordersparm.equals("1:2")) {
orders = new int[]{1,2};
} else if(ordersparm.equals("2")) {
orders = new int[]{2};
} else {
throw new GateRuntimeException("Invalid value for parameter orders: "+ordersparm);
}
boolean ofully = (Boolean)parms.getValueOrElse("ofully", false);
crf.addOrderNStates(trainingData, orders, null, null, null, null, ofully);
break;
default:
throw new GateRuntimeException("Unknown value for parameter states: "+states);
}
boolean addStart = (boolean) parms.getValueOrElse("addstart", true);
if(addStart) crf.addStartState();
boolean wdd = (boolean) parms.getValueOrElse("weightDimDensely", false);
if(wdd) crf.setWeightsDimensionDensely();
// initialize model's weights
// TODO: make this conditional on a parm, how does this relate to
// weightDimDensely??
// !!! This should probably be the same parameter!!!
// TODO: second parm should depend on the unsupported trick option!
crf.setWeightsDimensionAsIn(trainingData, false);
// now depending on which trainer we want we need to do slightly different
// things
if(alg.equals("MALLET_SEQ_CRF")) { // By[Thread]LabelLikelihood
// if threads parameter is specified and >0, we use ByThreadLabelLikelihood
int threads = (int) parms.getValueOrElse("threads", 0);
boolean usw = (boolean) parms.getValueOrElse("useSparseWeights", false);
boolean ssut = (boolean) parms.getValueOrElse("setSomeUnsupportedTrick", false);
if(threads<=0) {
CRFTrainerByLabelLikelihood tr = new CRFTrainerByLabelLikelihood(crf);
if(usw) tr.setUseSparseWeights(true);
if(ssut) tr.setUseSomeUnsupportedTrick(true);
transtrainer = tr;
} else {
CRFTrainerByThreadedLabelLikelihood tr =
new CRFTrainerByThreadedLabelLikelihood(crf, threads);
if(usw) tr.setUseSparseWeights(true);
if(ssut) tr.setUseSomeUnsupportedTrick(true);
transtrainer = tr;
}
} else if(alg.equals("MALLET_SEQ_CRF_SG")) {
// TODO: instead of all trainingData, use sample?
// TODO: allow to use training rate instead of trainingData?
CRFTrainerByStochasticGradient crft =
new CRFTrainerByStochasticGradient(crf, trainingData);
} else if(alg.equals("MALLET_SEQ_CRF_VG")) {
// CRFOptimizableBy* objects (terms in the objective function)
// objective 1: label likelihood objective
CRFOptimizableByLabelLikelihood optLabel
= new CRFOptimizableByLabelLikelihood(crf, trainingData);
Optimizable.ByGradientValue[] opts
= new Optimizable.ByGradientValue[]{optLabel};
// by default, use L-BFGS as the optimizer
CRFTrainerByValueGradients crfTrainer = new CRFTrainerByValueGradients(crf, opts);
crfTrainer.setMaxResets(0);
transtrainer = crfTrainer;
} else {
throw new GateRuntimeException("Not yet supported: "+alg);
}
// TODO: if we want to output the viterbi paths:
int logVit = (int) parms.getValueOrElse("logViterbiPaths", 0);
if(logVit==0) logVit=Integer.MAX_VALUE;
final int lv = logVit;
ViterbiWriter viterbiWriter = new ViterbiWriter(
"LF_debug", // output file prefix
new InstanceList[] { trainingData },
new String[] { "train" }) {
@Override
public boolean precondition (TransducerTrainer tt) {
return tt.getIteration() % lv == 0;
}
};
transtrainer.addEvaluator(viterbiWriter);
} else if(alg.equals("MALLET_SEQ_MEMM")) {
// TODO:
MEMM memm = new MEMM(trainingData.getDataAlphabet(),trainingData.getTargetAlphabet());
transtrainer = new MEMMTrainer(memm);
} else {
// Nothing else supported!
throw new GateRuntimeException("EngineMalletSeq: unknown/unsupported algorithm: "+alg);
}
return transtrainer;
}
@Override
protected void loadAndSetCorpusRepresentation(URL directory) {
if(corpusRepresentation==null)
corpusRepresentation = CorpusRepresentationMalletSeq.load(directory);
}
public Transducer trainModel(InstanceList trainingData, String options) {
TransducerTrainer trainer = createTrainer(trainingData, info, options);
Parms parms = new Parms(options,"i:iterations:i","V:verbose:b");
boolean verbose = (boolean)parms.getValueOrElse("verbose", false);
int iters = (int) parms.getValueOrElse("iterations", 0);
if(iters==0) iters = Integer.MAX_VALUE;
trainer.train(trainingData, iters);
if(verbose)
trainer.getTransducer().print();
Transducer td = trainer.getTransducer();
return td;
}
@Override
public List<ModelApplication> applyModel(
AnnotationSet instanceAS, AnnotationSet inputAS, AnnotationSet sequenceAS,
String parms) {
// stop growth
CorpusRepresentationMalletSeq data = (CorpusRepresentationMalletSeq)corpusRepresentation;
data.stopGrowth();
List<ModelApplication> gcs = new ArrayList<ModelApplication>();
Transducer crf = (Transducer)model;
for(Annotation sequenceAnn : sequenceAS) {
int sequenceSpanId = sequenceAnn.getId();
Instance inst = data.getInstanceForSequence(
instanceAS, sequenceAnn, inputAS, null, null, TargetType.NONE, null, null);
//Always put the instance through the same pipe used for training.
inst = crf.getInputPipe().instanceFrom(inst);
SumLatticeDefault sl = new SumLatticeDefault(crf,
(FeatureVectorSequence) inst.getData());
List<Annotation> instanceAnnotations = gate.Utils.getContainedAnnotations(
instanceAS, sequenceAnn).inDocumentOrder();
//Sanity check that we're mapping the probs back onto the right anns.
//This being wrong might follow from errors reading in the data to mallet inst.
if (instanceAnnotations.size() != ((FeatureVectorSequence) inst.getData()).size()) {
logger.warn("LearningFramework: CRF output length: "
+ ((FeatureVectorSequence) inst.getData()).size()
+ ", GATE instances: " + instanceAnnotations.size()
+ ". Can't assign.");
} else {
int i = 0;
for (Annotation instanceAnn : instanceAnnotations) {
i++;
String bestLabel = null;
double bestProb = 0.0;
//For each label option ..
// NOTE: for CRF we had this code:
//for (int j = 0; j < crf.getOutputAlphabet().size(); j++) {
// String label = crf.getOutputAlphabet().lookupObject(j).toString();
// but for Transducer we do not have the getOutputAlphabet method so we use
// model.getInputPipe().getTargetAlphabet() instead (this seems to be what
// is used inside CRF anyway.)
for (int j = 0; j < crf.getInputPipe().getTargetAlphabet().size(); j++) {
String label = crf.getInputPipe().getTargetAlphabet().lookupObject(j).toString();
//Get the probability of being in state j at position i+1
//Note that the plus one is because the labels are on the
//transitions. Positions are between transitions.
double marg = sl.getGammaProbability(i, crf.getState(j));
if (marg > bestProb) {
bestLabel = label;
bestProb = marg;
}
}
ModelApplication gc = new ModelApplication(
instanceAnn, bestLabel, bestProb, sequenceSpanId);
gcs.add(gc);
}
}
}
data.startGrowth();
return gcs;
}
@Override
protected void loadModel(URL directory, String parms) {
URL modelFile = newURL(directory, FILENAME_MODEL);
Transducer classifier;
try (InputStream is = modelFile.openStream();
ObjectInputStream ois = new ObjectInputStream(is)) {
classifier = (CRF) ois.readObject();
model=classifier;
} catch (Exception ex) {
throw new GateRuntimeException("Could not load Mallet model", ex);
}
}
@Override
// NOTE: this evaluates only the classification problem generated from the original chunking problem,
// so as for classification, we get accuracy estimates, not precision/recall/F-measure.
// We do not have anything in the LearningFramework for doing F-measure evaluation, this has to
// be done outside of the LF in some kind of wrapper or script that invokes the proper LF methods.
public EvaluationResult evaluate(String algorithmParameters, EvaluationMethod evaluationMethod, int numberOfFolds, double trainingFraction, int numberOfRepeats) {
EvaluationResult ret = null;
Parms parms = new Parms(algorithmParameters,"s:seed:i");
int seed = (Integer)parms.getValueOrElse("seed", 1);
if(evaluationMethod == EvaluationMethod.CROSSVALIDATION) {
InstanceList.CrossValidationIterator cvi = corpusRepresentation.getRepresentationMallet().crossValidationIterator(numberOfFolds, seed);
if(algorithm instanceof AlgorithmClassification) {
double sumOfAccs = 0.0;
while(cvi.hasNext()) {
InstanceList[] il = cvi.nextSplit();
InstanceList trainSet = il[0];
InstanceList testSet = il[1];
Transducer crf = trainModel(trainSet, algorithmParameters);
sumOfAccs += crf.averageTokenAccuracy(testSet);
}
EvaluationResultClXval e = new EvaluationResultClXval();
e.internalEvaluationResult = null;
e.accuracyEstimate = sumOfAccs/numberOfFolds;
e.nrFolds = numberOfFolds;
ret = e;
} else {
throw new GateRuntimeException("Mallet evaluation: not available for regression!");
}
} else {
if(algorithm instanceof AlgorithmClassification) {
Random rnd = new Random(seed);
double sumOfAccs = 0.0;
for(int i = 0; i<numberOfRepeats; i++) {
InstanceList[] sets = corpusRepresentation.getRepresentationMallet().split(rnd,
new double[]{trainingFraction, 1-trainingFraction});
Transducer crf = trainModel(sets[0], algorithmParameters);
sumOfAccs += crf.averageTokenAccuracy(sets[1]);
}
EvaluationResultClHO e = new EvaluationResultClHO();
e.internalEvaluationResult = null;
e.accuracyEstimate = sumOfAccs/numberOfRepeats;
e.trainingFraction = trainingFraction;
e.nrRepeats = numberOfRepeats;
ret = e;
} else {
throw new GateRuntimeException("Mallet evaluation: not available for regression!");
}
}
return ret;
}
}
|
package me.coley.recaf;
import me.coley.recaf.bytecode.AccessFlag;
import me.coley.recaf.bytecode.InsnUtil;
import org.objectweb.asm.tree.*;
import me.coley.recaf.parse.assembly.Assembly;
import me.coley.recaf.parse.assembly.exception.ExceptionWrapper;
import org.junit.jupiter.api.Test;
import java.lang.reflect.Field;
import static org.junit.jupiter.api.Assertions.*;
import static org.objectweb.asm.Opcodes.*;
/**
* Tests for Assembler
*
* @author Matt
*/
// TODO: Tests for compiling:
// - TableSwitchInsnNode
// - LookupSwitchInsnNode
// - LineNumberNode
// TODO: Tests for verifying bad syntax doesn't compile
public class AssemblerTest {
private final Assembly asm = new Assembly();
@Test
public void testIndividualInsns() {
asm.setMethodDeclaration(ACC_PUBLIC, "name", "()V");
asm.setDoVerify(false);
checkInsnMatch("ALOAD 0", new VarInsnNode(ALOAD, 0));
checkInsnMatch("ALOAD this", new VarInsnNode(ALOAD, 0));
checkInsnMatch("BIPUSH 10", new IntInsnNode(BIPUSH, 10));
checkInsnMatch("NEW java/io/InputStream", new TypeInsnNode(NEW, "java/io/InputStream"));
checkInsnMatch("LDC \"String\"", new LdcInsnNode("String"));
checkInsnMatch("LDC 10L", new LdcInsnNode(10L));
checkInsnMatch("LDC 10D", new LdcInsnNode(10D));
checkInsnMatch("LDC 10F", new LdcInsnNode(10F));
checkInsnMatch("IINC 1 + 1", new IincInsnNode(1, 1));
checkInsnMatch("IINC 1 - 1", new IincInsnNode(1, -1));
checkInsnMatch("MULTIANEWARRAY java/lang/String 2",
new MultiANewArrayInsnNode("java/lang/String", 2));
checkInsnMatch("GETFIELD java/lang/System.out Ljava/io/PrintStream;",
new FieldInsnNode(GETFIELD, "java/lang/System", "out", "Ljava/io/PrintStream;"));
checkInsnMatch("INVOKEVIRTUAL java/io/PrintStream.println(Ljava/lang/String;)V",
new MethodInsnNode(INVOKEVIRTUAL, "java/io/PrintStream", "println", "(Ljava/lang/String;)V"));
}
@Test
public void verifyExpectedVarTypes() {
// Setting up some example scenario, a class for a painting.
// The method will draw a color at a location with some given amount of blur.
/*
public void draw(int x, int y, double blur, Color color) {
Pixel pixel = getPixel(x, y);
pixel.setColor(color, blur);
}
*/
asm.setHostType("example/Painting");
asm.setMethodDeclaration(ACC_PUBLIC, "draw", "(IIDLexample/Color;)V");
asm.setDoVerify(true);
asm.setDoGenerateLocals(true);
String[] lines = new String[] {
"ALOAD this",
"ILOAD p1x",
"ILOAD p2y",
"INVOKESPECIAL example/Painting.getPixel(II)Lexample/Pixel;",
"ASTORE pixel",
"ALOAD pixel",
"ALOAD p4color",
"DLOAD p3blur",
"INVOKEVIRTUAL example/Painting.setColor(Lexample/Color;D)V",
"RETURN"
};
assertTrue(asm.parseInstructions(lines));
// Test locals
LocalVariableNode lThis = InsnUtil.getLocal(asm.getMethod(), 0);
assertNotNull(lThis);
assertEquals("this", lThis.name);
assertEquals("Lexample/Painting;", lThis.desc);
LocalVariableNode lX = InsnUtil.getLocal(asm.getMethod(), 1);
assertNotNull(lX);
assertEquals("x", lX.name);
assertEquals("I", lX.desc);
LocalVariableNode lY = InsnUtil.getLocal(asm.getMethod(), 2);
assertNotNull(lY);
assertEquals("y", lY.name);
assertEquals("I", lY.desc);
LocalVariableNode lBlur = InsnUtil.getLocal(asm.getMethod(), 3);
assertNotNull(lBlur);
assertEquals("blur", lBlur.name);
assertEquals("D", lBlur.desc);
// Index is 5, not 4 because "blur" which is a double, takes 2 local variable spaces
LocalVariableNode lColor = InsnUtil.getLocal(asm.getMethod(), 5);
assertNotNull(lColor);
assertEquals("color", lColor.name);
assertEquals("Lexample/Color;", lColor.desc);
// "pixel" should be 6 because it is the next open space after 5
// Method-locals should start indexing just after the highest parameter value.
asm.getMethod().localVariables.forEach(lv -> {
System.out.println(lv.index + ":" + lv.name + ":" + lv.desc);
});
LocalVariableNode lPixel = InsnUtil.getLocal(asm.getMethod(), 6);
assertNotNull(lPixel);
assertEquals("pixel", lPixel.name);
assertEquals("Lexample/Pixel;", lPixel.desc);
}
@Test
public void testVerifyPopNoStack() {
asm.setMethodDeclaration(ACC_PUBLIC, "name", "()V");
asm.setDoVerify(true);
// One value on the stack, but two values are popped off.
String[] lines = new String[] {
"ICONST_0",
"POP",
"POP",
"RETURN"
};
// Parse should fail due to verification on 3rd line
assertFalse(asm.parseInstructions(lines));
ExceptionWrapper wrapper = asm.getExceptions().get(0);
assertEquals(wrapper.line, 3);
assertTrue(wrapper.exception.toString().contains("Cannot pop operand off an empty stack"));
}
@Test
public void testVerifyNoReturn() {
asm.setMethodDeclaration(ACC_PUBLIC, "name", "()V");
asm.setDoVerify(true);
String[][] cases = new String[][] {
// Do-nothing void methods still require a RETURN at the end.
new String[] {
"NOP"
},
// The jump can skip past the RETURN
new String[] {
"ICONST_0",
"IFEQ after",
"LABEL before",
"RETURN",
"LABEL after"
}
};
for (String[] lines : cases) {
assertFalse(asm.parseInstructions(lines));
ExceptionWrapper wrapper = asm.getExceptions().get(0);
assertEquals(wrapper.line, -1);
assertTrue(wrapper.exception.toString().contains("fall off"));
}
}
private <T extends AbstractInsnNode> T getInsn(String line) {
asm.parseInstructions(new String[] {line});
if (!asm.getExceptions().isEmpty()) {
asm.getExceptions().forEach(
wrap -> wrap.printStackTrace());
fail("Parse failure");
}
return (T) asm.getMethod().instructions.get(0);
}
private void checkInsnMatch(String line, AbstractInsnNode expected) {
AbstractInsnNode insn = getInsn(line);
assertEquals(expected.getOpcode(), insn.getOpcode());
reflectEquals(expected, insn);
}
private void reflectEquals(Object expected, Object actual) {
assertNotNull(expected);
assertNotNull(actual);
Class<?> ce = expected.getClass();
Class<?> ca = actual.getClass();
assertEquals(ce, ca);
for (int i = 0; i < ce.getDeclaredFields().length; i++) {
Field f1 = ce.getDeclaredFields()[i];
// Skip static, final, and non-publics
if (AccessFlag.isStatic(f1.getModifiers()) ||
AccessFlag.isFinal(f1.getModifiers()) ||
!AccessFlag.isPublic(f1.getModifiers())) {
continue;
}
Field f2 = ce.getDeclaredFields()[i];
try {
f1.setAccessible(true);
f2.setAccessible(true);
assertEquals(f1.get(expected), f2.get(actual));
} catch(ReflectiveOperationException e) {
fail(e);
}
}
}
}
|
package org.xins.server;
import java.io.IOException;
import java.util.ArrayList;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.oro.text.regex.MalformedPatternException;
import org.apache.oro.text.regex.Pattern;
import org.apache.oro.text.regex.Perl5Compiler;
import org.apache.oro.text.regex.Perl5Matcher;
import org.xins.common.MandatoryArgumentChecker;
import org.xins.common.ProgrammingError;
import org.xins.common.Utils;
import org.xins.common.collections.CollectionUtils;
import org.xins.common.collections.ProtectedPropertyReader;
import org.xins.common.text.TextUtils;
/**
* Abstraction of a calling convention. A calling convention determines how an
* HTTP request is converted to a XINS request and how a XINS response is
* converted back to an HTTP response.
*
* <p>Calling convention implementations are thread-safe. Hence if a calling
* convention does not have any configuration parameters per instance, then
* the <em>Singleton</em> pattern can be applied.
*
* @version $Revision$ $Date$
* @author Anthony Goubard (<a href="mailto:anthony.goubard@nl.wanadoo.com">anthony.goubard@nl.wanadoo.com</a>)
* @author Ernst de Haan (<a href="mailto:ernst.dehaan@nl.wanadoo.com">ernst.dehaan@nl.wanadoo.com</a>)
*/
abstract class CallingConvention
extends Object {
// Class fields
/**
* Fully-qualified name of this class.
*/
private static final String CLASSNAME = CallingConvention.class.getName();
/**
* Perl 5 pattern compiler.
*/
private static final Perl5Compiler PATTERN_COMPILER = new Perl5Compiler();
/**
* Pattern matcher.
*/
private static final Perl5Matcher PATTERN_MATCHER = new Perl5Matcher();
/**
* The pattern which normal parameter names should match, as a character
* string.
*/
private static final String PATTERN_STRING = "[a-z][a-z0-9_]*";
/**
* The compiled pattern which normal parameter names should match.
*/
private static final Pattern PATTERN;
// Class functions
/**
* Initializes this class. This function compiles {@link #PATTERN_STRING}
* to a {@link Pattern} and then stores that in {@link #PATTERN}.
*/
static {
final String THIS_METHOD = "<clinit>()";
try {
PATTERN = PATTERN_COMPILER.compile(
PATTERN_STRING,
Perl5Compiler.READ_ONLY_MASK | Perl5Compiler.CASE_INSENSITIVE_MASK);
} catch (MalformedPatternException exception) {
final String SUBJECT_CLASS = PATTERN_COMPILER.getClass().getName();
final String SUBJECT_METHOD = "compile(java.lang.String,int)";
final String DETAIL = "The pattern \""
+ PATTERN_STRING
+ "\" is considered malformed.";
throw Utils.logProgrammingError(CLASSNAME,
THIS_METHOD,
SUBJECT_CLASS,
SUBJECT_METHOD,
DETAIL,
exception);
}
}
/**
* Determines the name of the function to be called based on the parameters
* in the specified <code>HttpServletRequest</code>.
*
* @param httpRequest
* the {@link HttpServletRequest} to get the parameters from, cannot be
* <code>null</code>.
*
* @throws NullPointerException
* if <code>httpRequest == null</code>.
*
* @throws FunctionNotSpecifiedException
* if the function name is not specified.
*
* @throws InvalidRequestException
* if both the parameter <code>"_function"</code> and the parameter
* <code>"function"</code> are specified, but they have different
* values.
*/
static String determineFunction(HttpServletRequest httpRequest)
throws FunctionNotSpecifiedException, InvalidRequestException {
// Determine function name
return determineFunction(httpRequest.getParameter("_function"),
httpRequest.getParameter("function"));
}
/**
* Determines the name of the function to be called based on the specified
* values for the parameters <code>"_function"</code> and
* <code>"function"</code>.
*
* @param withUnderScore
* the value of the parameter <code>"_function"</code>.
*
* @param withoutUnderScore
* the value of the parameter <code>"function"</code>.
*
* @throws FunctionNotSpecifiedException
* if the function name is not specified in either of the parameters.
*
* @throws InvalidRequestException
* if both the parameter <code>"_function"</code> and the parameter
* <code>"function"</code> are specified, but they have different
* values.
*/
static String determineFunction(String withUnderScore,
String withoutUnderScore)
throws FunctionNotSpecifiedException, InvalidRequestException {
String functionName;
// Function name is not specified
if (TextUtils.isEmpty(withUnderScore)
&& TextUtils.isEmpty(withoutUnderScore)) {
throw new FunctionNotSpecifiedException();
// Only "function" is specified
} else if (TextUtils.isEmpty(withUnderScore)) {
functionName = withoutUnderScore;
// Only "_function" is specified
} else if (TextUtils.isEmpty(withoutUnderScore)) {
functionName = withUnderScore;
// Both "function" and "_function" are specified, and they are equal
} else if (withUnderScore.equals(withoutUnderScore)) {
functionName = withUnderScore;
// Both "function" and "_function" are specified, but they are different
} else {
final String DETAIL = "_function="
+ TextUtils.quote(withUnderScore)
+ "; function="
+ TextUtils.quote(withoutUnderScore);
throw new InvalidRequestException(DETAIL, null);
}
return functionName;
}
/**
* Removes all parameters that should not be passed to a function. If the
* set of parameters passed is <code>null</code>, then nothing is done.
*
* @param parameters
* the {@link ProtectedPropertyReader} containing the set of parameters
* to investigate, or <code>null</code>.
*
* @param secretKey
* the secret key required to be able to modify the parameters, can be
* <code>null</code>.
*/
void cleanUpParameters(ProtectedPropertyReader parameters,
Object secretKey) {
// TODO: Should we not let the diagnostic context ID through?
// If the set of parameters passed is null, then nothing is done.
if (parameters == null) {
return;
}
// Get an list of the parameter names
ArrayList names = CollectionUtils.list(parameters.getNames());
// Loop through all parameters
for (int i = 0; i < names.size(); i++) {
// Determine parameter name and value
String name = (String) names.get(i);
String value = parameters.get(name);
// If the name or value is empty, then remove the parameter
if (TextUtils.isEmpty(name) || TextUtils.isEmpty(value)) {
parameters.set(secretKey, name, null);
// XXX: If the parameter name is "function", then remove it
} else if ("function".equals(name)) {
parameters.set(secretKey, name, null);
// If the pattern is not matched, then log and remove it
} else if (! PATTERN_MATCHER.matches(name, PATTERN)) {
// FIXME: Log this
parameters.set(secretKey, name, null);
}
}
}
// Constructors
/**
* Constructs a new <code>CallingConvention</code>.
*/
protected CallingConvention() {
// empty
}
// Fields
// Methods
final FunctionRequest convertRequest(HttpServletRequest httpRequest)
throws IllegalArgumentException,
InvalidRequestException,
FunctionNotSpecifiedException {
final String THIS_METHOD = "convertRequest("
+ HttpServletRequest.class.getName()
+ ')';
// Check preconditions
MandatoryArgumentChecker.check("httpRequest", httpRequest);
final String SUBJECT_CLASS = getClass().getName(); // XXX: Cache?
final String SUBJECT_METHOD = "convertRequestImpl("
+ HttpServletRequest.class.getName()
+ ')'; // XXX: Cache?
// Delegate to the implementation method
FunctionRequest xinsRequest;
try {
xinsRequest = convertRequestImpl(httpRequest);
// Filter any thrown exceptions
} catch (Throwable t) {
if (t instanceof InvalidRequestException) {
throw (InvalidRequestException) t;
} else if (t instanceof FunctionNotSpecifiedException) {
throw (FunctionNotSpecifiedException) t;
} else {
throw Utils.logProgrammingError(CLASSNAME,
THIS_METHOD,
SUBJECT_CLASS,
SUBJECT_METHOD,
null,
t);
}
}
// Make sure the returned value is not null
if (xinsRequest == null) {
// FIXME: Use Utils.logProgrammingError
Log.log_3050(SUBJECT_CLASS, SUBJECT_METHOD, "Method returned null.");
throw new ProgrammingError(SUBJECT_CLASS + '.' + SUBJECT_METHOD + " returned null.");
}
return xinsRequest;
}
/**
* Converts an HTTP request to a XINS request (implementation method). This
* method should only be called from class {@link CallingConvention}. Only
* then it is guaranteed that the <code>httpRequest</code> argument is not
* <code>null</code>.
*
* @param httpRequest
* the HTTP request, will not be <code>null</code>.
*
* @return
* the XINS request object, should not be <code>null</code>.
*
* @throws InvalidRequestException
* if the request is considerd to be invalid.
*
* @throws FunctionNotSpecifiedException
* if the request does not indicate the name of the function to execute.
*/
protected abstract FunctionRequest convertRequestImpl(HttpServletRequest httpRequest)
throws InvalidRequestException,
FunctionNotSpecifiedException;
final void convertResult(FunctionResult xinsResult,
HttpServletResponse httpResponse)
throws IllegalArgumentException, IOException {
// Check preconditions
MandatoryArgumentChecker.check("xinsResult", xinsResult,
"httpResponse", httpResponse);
// Delegate to the implementation method
try {
convertResultImpl(xinsResult, httpResponse);
// Filter any thrown exceptions
} catch (Throwable exception) {
if (exception instanceof IOException) {
throw (IOException) exception;
} else {
final String THIS_METHOD = "convertResult("
+ FunctionResult.class.getName()
+ ','
+ HttpServletResponse.class.getName()
+ ')';
final String SUBJECT_CLASS = getClass().getName();
final String SUBJECT_METHOD = "convertResultImpl("
+ HttpServletRequest.class.getName()
+ ')';
throw Utils.logProgrammingError(CLASSNAME,
THIS_METHOD,
SUBJECT_CLASS,
SUBJECT_METHOD,
null,
exception);
}
}
}
/**
* Converts a XINS result to an HTTP response (implementation method).
*
* @param xinsResult
* the XINS result object that should be converted to an HTTP response,
* will not be <code>null</code>.
*
* @param httpResponse
* the HTTP response object to configure, will not be <code>null</code>.
*
* @throws IOException
* if calling any of the methods in <code>httpResponse</code> causes an
* I/O error.
*/
protected abstract void convertResultImpl(FunctionResult xinsResult,
HttpServletResponse httpResponse)
throws IOException;
// XXX: Replace IOException with more appropriate exception?
}
|
package org.takes.rq;
import com.google.common.base.Joiner;
import com.jcabi.http.request.JdkRequest;
import com.jcabi.http.response.RestResponse;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URI;
import java.util.Arrays;
import java.util.HashSet;
import org.apache.commons.lang.StringUtils;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Test;
import org.takes.Request;
import org.takes.Response;
import org.takes.Take;
import org.takes.http.FtRemote;
import org.takes.rs.RsText;
@SuppressWarnings("PMD.TooManyMethods")
public final class RqMultipartTest {
/**
* Carriage return constant.
*/
private static final String CRLF = "\r\n";
/**
* Content disposition.
*/
private static final String DISPOSITION = "Content-Disposition";
/**
* RqMultipart.Base can satisfy equals contract.
* @throws IOException if some problem inside
*/
@Test
public void satisfiesEqualsContract() throws IOException {
final Request req = new RqMultipart.Fake(
new RqFake(),
new RqWithHeader(
new RqFake("", "", "449 N Wolfe Rd, Sunnyvale, CA 94085"),
RqMultipartTest.DISPOSITION, "form-data; name=\"t-1\""
),
new RqWithHeader(
new RqFake("", "", ""),
RqMultipartTest.DISPOSITION,
"form-data; name=\"data\"; filename=\"a.bin\""
)
);
MatcherAssert.assertThat(
new RqMultipart.Base(req),
Matchers.equalTo(new RqMultipart.Base(req))
);
}
/**
* RqMultipart.Base can throw exception on no closing boundary found.
* @throws IOException if some problem inside
*/
@Test(expected = IOException.class)
public void throwsExceptionOnNoClosingBoundaryFound() throws IOException {
new RqMultipart.Base(
new RqFake(
Arrays.asList(
"POST /h?a=4 HTTP/1.1",
"Host: rtw.example.com",
"Content-Type: multipart/form-data; boundary=AaB01x",
"Content-Length: 100007"
),
Joiner.on(RqMultipartTest.CRLF).join(
"--AaB01x",
"Content-Disposition: form-data; fake=\"t2\"",
"",
"447 N Wolfe Rd, Sunnyvale, CA 94085",
"Content-Transfer-Encoding: uwf-8"
)
)
);
}
/**
* RqMultipart.Fake can throw exception on no name
* at Content-Disposition header.
* @throws IOException if some problem inside
*/
@Test(expected = IOException.class)
public void throwsExceptionOnNoNameAtContentDispositionHeader()
throws IOException {
new RqMultipart.Fake(
new RqWithHeader(
new RqFake("", "", "340 N Wolfe Rd, Sunnyvale, CA 94085"),
RqMultipartTest.DISPOSITION, "form-data; fake=\"t-3\""
)
);
}
/**
* RqMultipart.Base can throw exception on no boundary
* at Content-Type header.
* @throws IOException if some problem inside
*/
@Test(expected = IOException.class)
public void throwsExceptionOnNoBoundaryAtContentTypeHeader()
throws IOException {
new RqMultipart.Base(
new RqFake(
Arrays.asList(
"POST /h?s=3 HTTP/1.1",
"Host: wwo.example.com",
"Content-Type: multipart/form-data; boundaryAaB03x",
"Content-Length: 100005"
),
""
)
);
}
/**
* RqMultipart.Base can throw exception on invalid Content-Type header.
* @throws IOException if some problem inside
*/
@Test(expected = IOException.class)
public void throwsExceptionOnInvalidContentTypeHeader() throws IOException {
new RqMultipart.Base(
new RqFake(
Arrays.asList(
"POST /h?r=3 HTTP/1.1",
"Host: www.example.com",
"Content-Type: multipart; boundary=AaB03x",
"Content-Length: 100004"
),
""
)
);
}
/**
* RqMultipart.Base can parse http body.
* @throws IOException If some problem inside
*/
@Test
public void parsesHttpBody() throws IOException {
final RqMultipart multi = new RqMultipart.Fake(
new RqFake(),
new RqWithHeader(
new RqFake("", "", "40 N Wolfe Rd, Sunnyvale, CA 94085"),
DISPOSITION, "form-data; name=\"t4\""
),
new RqWithHeader(
new RqFake("", "", ""),
DISPOSITION,
"form-data; name=\"data\"; filename=\"a.bin\""
)
);
MatcherAssert.assertThat(
new RqHeaders.Base(
multi.part("t4").iterator().next()
).header(DISPOSITION),
Matchers.hasItem("form-data; name=\"t4\"")
);
MatcherAssert.assertThat(
new RqPrint(
new RqHeaders.Base(
multi.part("t4").iterator().next()
)
).printBody(),
Matchers.allOf(
Matchers.startsWith("40 N"),
Matchers.endsWith("CA 94085")
)
);
}
/**
* RqMultipart.Fake can return empty iterator on invalid part request.
* @throws IOException If some problem inside
*/
@Test
public void returnsEmptyIteratorOnInvalidPartRequest() throws IOException {
final RqMultipart multi = new RqMultipart.Fake(
new RqFake(),
new RqWithHeader(
new RqFake("", "", "443 N Wolfe Rd, Sunnyvale, CA 94085"),
DISPOSITION, "form-data; name=\"t5\""
),
new RqWithHeader(
new RqFake("", "", ""),
DISPOSITION,
"form-data; name=\"data\"; filename=\"a.zip\""
)
);
MatcherAssert.assertThat(
multi.part("fake").iterator().hasNext(),
Matchers.is(false)
);
}
/**
* RqMultipart.Fake can return correct name set.
* @throws IOException If some problem inside
*/
@Test
public void returnsCorrectNamesSet() throws IOException {
final RqMultipart multi = new RqMultipart.Fake(
new RqFake(),
new RqWithHeader(
new RqFake("", "", "441 N Wolfe Rd, Sunnyvale, CA 94085"),
DISPOSITION, "form-data; name=\"address\""
),
new RqWithHeader(
new RqFake("", "", ""),
DISPOSITION,
"form-data; name=\"data\"; filename=\"a.bin\""
)
);
MatcherAssert.assertThat(
multi.names(),
Matchers.<Iterable<String>>equalTo(
new HashSet<String>(Arrays.asList("address", "data"))
)
);
}
/**
* RqMultipart.Base can return correct part length.
* @throws IOException If some problem inside
*/
@Test
public void returnsCorrectPartLength() throws IOException {
final int length = 5000;
final Request req = new RqFake(
Arrays.asList(
"POST /post?u=3 HTTP/1.1",
"Host: www.example.com",
"Content-Type: multipart/form-data; boundary=zzz"
),
Joiner.on(RqMultipartTest.CRLF).join(
"--zzz",
"Content-Disposition: form-data; name=\"x-1\"",
"",
StringUtils.repeat("X", length),
"--zzz
)
);
MatcherAssert.assertThat(
new RqMultipart.Smart(
new RqMultipart.Base(req)
).single("x-1").body().available(),
Matchers.equalTo(length)
);
}
/**
* RqMultipart.Base can work in integration mode.
* @throws IOException if some problem inside
*/
@Test
public void consumesHttpRequest() throws IOException {
final Take take = new Take() {
@Override
public Response act(final Request req) throws IOException {
return new RsText(
new RqPrint(
new RqMultipart.Smart(
new RqMultipart.Base(req)
).single("f-1")
).printBody()
);
}
};
new FtRemote(take).exec(
// @checkstyle AnonInnerLengthCheck (50 lines)
new FtRemote.Script() {
@Override
public void exec(final URI home) throws IOException {
new JdkRequest(home)
.method("POST")
.header(
"Content-Type",
"multipart/form-data; boundary=AaB0zz"
)
.body()
.set(
Joiner.on(RqMultipartTest.CRLF).join(
"--AaB0zz",
"Content-Disposition: form-data; name=\"f-1\"",
"",
"my picture",
"--AaB0zz
)
)
.back()
.fetch()
.as(RestResponse.class)
.assertStatus(HttpURLConnection.HTTP_OK)
.assertBody(Matchers.containsString("pic"));
}
}
);
}
/**
* RqMultipart.Base can handle a big request in an acceptable time.
* @throws IOException If some problem inside
* @checkstyle MagicNumberCheck (2 lines)
*/
@Test(timeout = 10000)
public void handlesRequestInTime() throws IOException {
final int length = 100000000;
final File temp = File.createTempFile("handlesRequestInTime", ".tmp");
final BufferedWriter bwr = new BufferedWriter(new FileWriter(temp));
bwr.write(
Joiner.on(RqMultipartTest.CRLF).join(
"--zzz",
"Content-Disposition: form-data; name=\"test\"",
"",
""
)
);
for (int ind = 0; ind < length; ++ind) {
bwr.write("X");
}
bwr.write(RqMultipartTest.CRLF);
bwr.write("--zzz
bwr.write(RqMultipartTest.CRLF);
bwr.close();
final long start = System.currentTimeMillis();
final Request req = new RqFake(
Arrays.asList(
"POST /post?u=3 HTTP/1.1",
"Host: example.com",
"Content-Type: multipart/form-data; boundary=zzz"
),
new FileInputStream(temp)
);
MatcherAssert.assertThat(
new RqMultipart.Smart(
new RqMultipart.Base(req)
).single("test").body().available(),
Matchers.equalTo(length)
);
MatcherAssert.assertThat(
System.currentTimeMillis() - start,
//@checkstyle MagicNumberCheck (1 line)
Matchers.lessThan(3000L)
);
temp.delete();
}
/**
* RqMultipart.Base doesn't distort the content.
* @throws IOException If some problem inside
* @checkstyle MagicNumberCheck (3 lines)
*/
@Test(timeout = 10000)
public void notDistortContent() throws IOException {
final int length = 1000000;
final File temp = File.createTempFile("notDistortContent", ".tmp");
final BufferedWriter bwr = new BufferedWriter(new FileWriter(temp));
final byte byt = 0x7f;
bwr.write(
Joiner.on(RqMultipartTest.CRLF).join(
"--zzz1",
"Content-Disposition: form-data; name=\"test1\"",
"",
""
)
);
for (int idx = 0; idx < length; ++idx) {
// @checkstyle MagicNumberCheck (1 line)
bwr.write(idx % byt);
}
bwr.write(RqMultipartTest.CRLF);
bwr.write("--zzz1
bwr.write(RqMultipartTest.CRLF);
bwr.close();
final Request req = new RqFake(
Arrays.asList(
"POST /post?u=3 HTTP/1.1",
"Host: exampl.com",
"Content-Type: multipart/form-data; boundary=zzz1"
),
new FileInputStream(temp)
);
final InputStream stream = new RqMultipart.Smart(
new RqMultipart.Base(req)
).single("test1").body();
MatcherAssert.assertThat(
stream.available(),
Matchers.equalTo(length)
);
for (int idx = 0; idx < length; ++idx) {
MatcherAssert.assertThat(
String.format("byte %d not matched", idx),
stream.read(),
Matchers.equalTo(idx % byt)
);
}
temp.delete();
}
}
|
package org.antlr.intellij.plugin;
import com.intellij.lang.annotation.AnnotationHolder;
import com.intellij.lang.annotation.ExternalAnnotator;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiFile;
import org.antlr.intellij.plugin.parsing.RunANTLROnGrammarFile;
import org.antlr.intellij.plugin.psi.MyPsiUtils;
import org.antlr.runtime.ANTLRReaderStream;
import org.antlr.runtime.CommonToken;
import org.antlr.runtime.Token;
import org.antlr.v4.Tool;
import org.antlr.v4.tool.ANTLRMessage;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.GrammarSemanticsMessage;
import org.antlr.v4.tool.GrammarSyntaxMessage;
import org.antlr.v4.tool.LeftRecursionCyclesMessage;
import org.antlr.v4.tool.Rule;
import org.antlr.v4.tool.ToolMessage;
import org.antlr.v4.tool.ast.GrammarAST;
import org.antlr.v4.tool.ast.GrammarRootAST;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.stringtemplate.v4.ST;
import java.io.File;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
public class ANTLRv4ExternalAnnotator extends ExternalAnnotator<PsiFile, List<ANTLRv4ExternalAnnotator.Issue>> {
// NOTE: can't use instance var as only 1 instance
public static final Logger LOG = Logger.getInstance("ANTLR ANTLRv4ExternalAnnotator");
public static class Issue {
String annotation;
List<Token> offendingTokens = new ArrayList<Token>();
ANTLRMessage msg;
public Issue(ANTLRMessage msg) { this.msg = msg; }
}
/** Called first; return file; idea 12 */
@Nullable
public PsiFile collectionInformation(@NotNull PsiFile file) {
LOG.info("collectionInformation "+file.getVirtualFile());
return file;
}
/** Called first; return file; idea 13; can't use @Override */
@Nullable
public PsiFile collectInformation(@NotNull PsiFile file) {
LOG.info("collectionInformation "+file.getVirtualFile());
return file;
}
/** Called 2nd; run antlr on file */
@Nullable
@Override
public List<ANTLRv4ExternalAnnotator.Issue> doAnnotate(final PsiFile file) {
String fileContents = file.getText();
List<String> args = RunANTLROnGrammarFile.getANTLRArgsAsList(file.getProject(), file.getVirtualFile());
final Tool antlr = new Tool(args.toArray(new String[args.size()]));
if ( !args.contains("-lib") ) {
// getContainingDirectory() must be identified as a read operation on file system
ApplicationManager.getApplication().runReadAction(new Runnable() {
@Override
public void run() {
antlr.libDirectory = file.getContainingDirectory().toString();
}
});
}
final FindVocabFileRunnable findVocabAction = new FindVocabFileRunnable(file);
ApplicationManager.getApplication().runReadAction(findVocabAction);
if ( findVocabAction.vocabName!=null ) { // need to generate other file?
// for now, just turn off undef token warnings
}
antlr.removeListeners();
AnnotatorToolListener listener = new AnnotatorToolListener(findVocabAction.vocabName);
antlr.addListener(listener);
try {
StringReader sr = new StringReader(fileContents);
ANTLRReaderStream in = new ANTLRReaderStream(sr);
in.name = file.getName();
GrammarRootAST ast = antlr.parse(file.getName(), in);
if ( ast==null || ast.hasErrors ) return Collections.emptyList();
Grammar g = antlr.createGrammar(ast);
VirtualFile vfile = file.getVirtualFile();
if ( vfile==null ) {
LOG.error("doAnnotate no virtual file for "+file);
return listener.issues;
}
g.fileName = vfile.getPath();
antlr.process(g, false);
for (Issue issue : listener.issues) {
processIssue(file, issue);
}
}
catch (Exception e) {
LOG.error("antlr can't process "+file.getName(), e);
}
return listener.issues;
}
/** Called 3rd */
@Override
public void apply(@NotNull PsiFile file,
List<ANTLRv4ExternalAnnotator.Issue> issues,
@NotNull AnnotationHolder holder)
{
for (int i = 0; i < issues.size(); i++) {
Issue issue = issues.get(i);
for (int j = 0; j < issue.offendingTokens.size(); j++) {
Token t = issue.offendingTokens.get(j);
if ( t instanceof CommonToken ) {
CommonToken ct = (CommonToken)t;
int startIndex = ct.getStartIndex();
int stopIndex = ct.getStopIndex();
TextRange range = new TextRange(startIndex, stopIndex + 1);
switch (issue.msg.getErrorType().severity) {
case ERROR:
case ERROR_ONE_OFF:
case FATAL:
holder.createErrorAnnotation(range, issue.annotation);
break;
case WARNING:
holder.createWarningAnnotation(range, issue.annotation);
break;
case WARNING_ONE_OFF:
case INFO:
holder.createWeakWarningAnnotation(range, issue.annotation);
default:
break;
}
}
}
}
super.apply(file, issues, holder);
}
public void processIssue(final PsiFile file, Issue issue) {
File grammarFile = new File(file.getVirtualFile().getPath());
File issueFile = new File(issue.msg.fileName);
if ( !grammarFile.getName().equals(issueFile.getName()) ) {
return; // ignore errors from external files
}
if ( issue.msg instanceof GrammarSemanticsMessage ) {
Token t = ((GrammarSemanticsMessage)issue.msg).offendingToken;
issue.offendingTokens.add(t);
}
else if ( issue.msg instanceof LeftRecursionCyclesMessage ) {
List<String> rulesToHighlight = new ArrayList<String>();
LeftRecursionCyclesMessage lmsg = (LeftRecursionCyclesMessage)issue.msg;
Collection<? extends Collection<Rule>> cycles =
(Collection<? extends Collection<Rule>>)lmsg.getArgs()[0];
for (Collection<Rule> cycle : cycles) {
for (Rule r : cycle) {
rulesToHighlight.add(r.name);
GrammarAST nameNode = (GrammarAST)r.ast.getChild(0);
issue.offendingTokens.add(nameNode.getToken());
}
}
}
else if ( issue.msg instanceof GrammarSyntaxMessage ) {
Token t = issue.msg.offendingToken;
issue.offendingTokens.add(t);
}
else if ( issue.msg instanceof ToolMessage ) {
issue.offendingTokens.add(issue.msg.offendingToken);
}
Tool antlr = new Tool();
ST msgST = antlr.errMgr.getMessageTemplate(issue.msg);
String outputMsg = msgST.render();
if (antlr.errMgr.formatWantsSingleLineMessage()) {
outputMsg = outputMsg.replace('\n', ' ');
}
issue.annotation = outputMsg;
}
protected static class FindVocabFileRunnable implements Runnable {
public String vocabName;
private final PsiFile file;
public FindVocabFileRunnable(PsiFile file) {
this.file = file;
}
@Override
public void run() {
vocabName = MyPsiUtils.findTokenVocabIfAny((ANTLRv4FileRoot) file);
}
}
}
|
package rocks.matchmaker;
import example.ast.FilterNode;
import example.ast.ProjectNode;
import org.junit.jupiter.api.Test;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
import static java.util.stream.Collectors.toList;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static rocks.matchmaker.Capture.newCapture;
import static rocks.matchmaker.Extractor.assuming;
import static rocks.matchmaker.Matcher.any;
import static rocks.matchmaker.Matcher.match;
import static rocks.matchmaker.Property.$;
@SuppressWarnings("WeakerAccess")
public class MatcherTest {
Matcher<ProjectNode> Project = match(ProjectNode.class);
Property<ProjectNode> source = $(ProjectNode::getSource);
Matcher<FilterNode> Filter = match(FilterNode.class);
@Test
void trivial_matchers() {
//any
assertMatch(any(), 42);
assertMatch(any(), "John Doe");
//class based
assertMatch(match(Integer.class), 42);
assertMatch(match(Number.class), 42);
assertNoMatch(match(Integer.class), "John Doe");
//predicate-based
assertMatch(match(Integer.class, (x1) -> x1 > 0), 42);
assertNoMatch(match(Integer.class, (x) -> x > 0), -1);
}
@Test
void match_object() {
assertMatch(Project, new ProjectNode(null));
assertNoMatch(Project, new FilterNode(null));
}
@Test
void property_matchers() {
PropertyMatcher<String, Integer> lengthOne = $(String::length).matching(match(Integer.class, (x) -> x == 1));
assertMatch(match(String.class).with(lengthOne), "a");
assertNoMatch(match(String.class).with(lengthOne), "aa");
}
@Test
void match_nested_properties() {
Matcher<ProjectNode> matcher = Project
.with($(ProjectNode::getSource).matching(Filter));
assertMatch(matcher, new ProjectNode(new FilterNode(null)));
assertNoMatch(matcher, new FilterNode(null));
assertNoMatch(matcher, new ProjectNode(null));
assertNoMatch(matcher, new ProjectNode(new ProjectNode(null)));
}
@Test
void capturing_matches_in_a_typesafe_manner() {
Capture<ProjectNode> child = newCapture();
Capture<FilterNode> filter = newCapture();
Matcher<ProjectNode> matcher = Project
.with(source.matching(Project.as(child)
.with(source.matching(Filter.as(filter)))));
ProjectNode tree = new ProjectNode(new ProjectNode(new FilterNode(null)));
Match<ProjectNode> match = assertMatch(matcher, tree);
//notice the concrete type despite no casts:
ProjectNode capturedChild = match.capture(child);
assertEquals(tree.getSource(), capturedChild);
assertEquals(((ProjectNode) tree.getSource()).getSource(), match.capture(filter));
}
@Test
void evidence_backed_matching_using_extractors() {
Matcher<List<String>> stringWithVowels = match(assuming(String.class, (x) -> {
Stream<String> characters = x.chars().mapToObj(c -> String.valueOf((char) c));
List<String> vowels = characters.filter(c -> "aeiouy".contains(c.toLowerCase())).collect(toList());
return Match.of(vowels).filter(l -> !l.isEmpty());
}));
Capture<List<String>> vowels = newCapture();
Match<List<String>> match = assertMatch(stringWithVowels.as(vowels), "John Doe", asList("o", "o", "e"));
assertEquals(match.value(), match.capture(vowels));
assertNoMatch(stringWithVowels, "pqrst");
}
@Test
void no_match_means_no_captures() {
Capture<Void> impossible = newCapture();
Matcher<Void> matcher = match(Void.class).as(impossible);
Match<Void> match = matcher.match(42);
assertTrue(match.isEmpty());
Throwable throwable = assertThrows(NoSuchElementException.class, () -> match.capture(impossible));
assertTrue(() -> throwable.getMessage().contains("empty match"));
}
@Test
void unknown_capture_is_an_error() {
Matcher<?> matcher = any();
Capture<?> unknownCapture = newCapture();
Match<?> match = matcher.match(42);
Throwable throwable = assertThrows(IllegalArgumentException.class, () -> match.capture(unknownCapture));
assertTrue(() -> throwable.getMessage().contains("This capture is unknown to this matcher"));
//TODO make the error message somewhat help which capture was used, when the captures are human-discernable.
}
@Test
void null_not_matched_by_default() {
assertNoMatch(any(), null);
assertNoMatch(match(Integer.class), null);
//nulls can be matched using a custom extractor for now
Extractor<Object> nullAcceptingExtractor = (x) -> Match.of(x);
assertMatch(match(nullAcceptingExtractor), null);
}
private <T> Match<T> assertMatch(Matcher<T> matcher, T expectedMatch) {
return assertMatch(matcher, expectedMatch, expectedMatch);
}
private <T, R> Match<R> assertMatch(Matcher<R> matcher, T matchedAgainst, R expectedMatch) {
Match<R> match = matcher.match(matchedAgainst);
assertEquals(expectedMatch, match.value());
return match;
}
private <T> void assertNoMatch(Matcher<T> matcher, Object expectedNoMatch) {
Match<T> match = matcher.match(expectedNoMatch);
assertEquals(Match.empty(), match);
}
}
|
package uk.me.sa.cursus.app;
import static eu.lp0.cursus.db.data.Gender.FEMALE;
import static eu.lp0.cursus.db.data.Gender.MALE;
import static uk.me.sa.cursus.app.MainTest.Country.ARMBONIA;
import static uk.me.sa.cursus.app.MainTest.Country.EARBONIA;
import static uk.me.sa.cursus.app.MainTest.Country.ELBONIA;
import static uk.me.sa.cursus.app.MainTest.Country.KNEEBONIA;
import static uk.me.sa.cursus.app.MainTest.Country.NORTH_LEGBONIA;
import static uk.me.sa.cursus.app.MainTest.Country.SOUTH_LEGBONIA;
import static uk.me.sa.cursus.app.MainTest.Country.TOEBONIA;
import java.sql.SQLException;
import java.util.Arrays;
import com.google.common.base.CaseFormat;
import eu.lp0.cursus.app.Main;
import eu.lp0.cursus.db.Database;
import eu.lp0.cursus.db.DatabaseSession;
import eu.lp0.cursus.db.InvalidDatabaseException;
import eu.lp0.cursus.db.dao.ClassDAO;
import eu.lp0.cursus.db.dao.CursusDAO;
import eu.lp0.cursus.db.dao.EventDAO;
import eu.lp0.cursus.db.dao.PilotDAO;
import eu.lp0.cursus.db.dao.RaceAttendeeDAO;
import eu.lp0.cursus.db.dao.RaceDAO;
import eu.lp0.cursus.db.dao.RaceNumberDAO;
import eu.lp0.cursus.db.dao.SeriesDAO;
import eu.lp0.cursus.db.data.Class;
import eu.lp0.cursus.db.data.Gender;
import eu.lp0.cursus.db.data.Pilot;
import eu.lp0.cursus.db.data.RaceNumber;
import eu.lp0.cursus.db.data.Series;
import eu.lp0.cursus.util.Background;
import eu.lp0.cursus.util.Messages;
public class MainTest extends Main {
private static final ClassDAO classDAO = new ClassDAO();
@SuppressWarnings("unused")
private static final CursusDAO cursusDAO = new CursusDAO();
@SuppressWarnings("unused")
private static final EventDAO eventDAO = new EventDAO();
private static final PilotDAO pilotDAO = new PilotDAO();
@SuppressWarnings("unused")
private static final RaceDAO raceDAO = new RaceDAO();
@SuppressWarnings("unused")
private static final RaceAttendeeDAO raceAttendeeDAO = new RaceAttendeeDAO();
@SuppressWarnings("unused")
private static final RaceNumberDAO raceNumberDAO = new RaceNumberDAO();
private static final SeriesDAO seriesDAO = new SeriesDAO();
public enum Country {
ELBONIA ("ELB"), //$NON-NLS-1$
KNEEBONIA ("K"), //$NON-NLS-1$
NORTH_LEGBONIA ("NL"), //$NON-NLS-1$
SOUTH_LEGBONIA ("SL"), //$NON-NLS-1$
EARBONIA ("H"), //$NON-NLS-1$
ARMBONIA ("A"), //$NON-NLS-1$
TOEBONIA ("TO"); //$NON-NLS-1$
public final String org;
public final String name;
private Country(String org) {
this.org = org;
this.name = CaseFormat.UPPER_UNDERSCORE.to(CaseFormat.UPPER_CAMEL, name()).replaceAll("([A-Z])", " $1"); //$NON-NLS-1$ //$NON-NLS-2$
}
}
public static void main(String[] args) {
Background.execute(new MainTest(args));
}
public MainTest(String[] args) {
super(args);
}
@Override
protected Database createEmptyDatabase() throws InvalidDatabaseException, SQLException {
Database db = super.createEmptyDatabase();
db.startSession();
try {
DatabaseSession.begin();
Series series = seriesDAO.find(Messages.getString(Database.UNTITLED_SERIES));
Class class1 = makeClass(series, "Class 1"); //$NON-NLS-1$
Class class2 = makeClass(series, "Class 2"); //$NON-NLS-1$
Class class3 = makeClass(series, "Class 3"); //$NON-NLS-1$
Class class4 = makeClass(series, "Class 4"); //$NON-NLS-1$
Class class5 = makeClass(series, "Class 5"); //$NON-NLS-1$
@SuppressWarnings("unused")
Class class6 = makeClass(series, "Class 6"); //$NON-NLS-1$
@SuppressWarnings("unused")
Class class7 = makeClass(series, "Class 7"); //$NON-NLS-1$
@SuppressWarnings("unused")
Class class8 = makeClass(series, "Class 8"); //$NON-NLS-1$
@SuppressWarnings("unused")
Class class9 = makeClass(series, "Class 9"); //$NON-NLS-1$
Class classA = makeClass(series, "Class A"); //$NON-NLS-1$
Class classB = makeClass(series, "Class B"); //$NON-NLS-1$
Class classC = makeClass(series, "Class C"); //$NON-NLS-1$
Class classD = makeClass(series, "Class D"); //$NON-NLS-1$
Class classE = makeClass(series, "Class E"); //$NON-NLS-1$
makePilot(series, "Alice", FEMALE, ELBONIA, ELBONIA, 1, class1, classA); //$NON-NLS-1$
makePilot(series, "Arthur", MALE, EARBONIA, EARBONIA, 69, class1, classB); //$NON-NLS-1$
makePilot(series, "Bob", MALE, KNEEBONIA, KNEEBONIA, 2, class1, classA); //$NON-NLS-1$
makePilot(series, "Carol", FEMALE, ELBONIA, KNEEBONIA, 1, class1, classB); //$NON-NLS-1$
makePilot(series, "Charlie", MALE, KNEEBONIA, ELBONIA, 2, class1, classC); //$NON-NLS-1$
makePilot(series, "Carlos", MALE, ELBONIA, KNEEBONIA, 3, class1, classC); //$NON-NLS-1$
makePilot(series, "Chuck", MALE, KNEEBONIA, ELBONIA, 3, class2, classD); //$NON-NLS-1$
makePilot(series, "Dave", MALE, NORTH_LEGBONIA, NORTH_LEGBONIA, 30, class2, classE); //$NON-NLS-1$
makePilot(series, "Dan", MALE, NORTH_LEGBONIA, NORTH_LEGBONIA, 50, class2, classE); //$NON-NLS-1$
makePilot(series, "Eve", FEMALE, SOUTH_LEGBONIA, SOUTH_LEGBONIA, 400, class2, classD); //$NON-NLS-1$
makePilot(series, "John", MALE, TOEBONIA, TOEBONIA, 38, class3, classA); //$NON-NLS-1$
makePilot(series, "Jane", FEMALE, TOEBONIA, TOEBONIA, 26, class4, classB); //$NON-NLS-1$
makePilot(series, "Mallory", MALE, SOUTH_LEGBONIA, SOUTH_LEGBONIA, 406, class3, classB); //$NON-NLS-1$
makePilot(series, "Merlin", MALE, ARMBONIA, ARMBONIA, 1, class4, classA); //$NON-NLS-1$
makePilot(series, "Peggy", FEMALE, SOUTH_LEGBONIA, SOUTH_LEGBONIA, 401, class3, classA); //$NON-NLS-1$
makePilot(series, "Trent", MALE, EARBONIA, EARBONIA, 77, class4, classB); //$NON-NLS-1$
makePilot(series, "Trudy", FEMALE, EARBONIA, EARBONIA, 84, class3, classB); //$NON-NLS-1$
makePilot(series, "Victor", MALE, EARBONIA, EARBONIA, 91, class4, classA); //$NON-NLS-1$
makePilot(series, "Walter", MALE, EARBONIA, EARBONIA, 4, class5); //$NON-NLS-1$
DatabaseSession.commit();
} finally {
db.endSession();
}
return db;
}
private Pilot makePilot(Series series, String name, Gender gender, Country country, Country org, int raceNo, Class... classes) {
Pilot pilot = new Pilot(series, name, gender, country.name);
pilot.setRaceNumber(new RaceNumber(pilot, org.org, raceNo));
if (classes != null) {
pilot.getClasses().addAll(Arrays.asList(classes));
}
pilotDAO.persist(pilot);
return pilot;
}
private Class makeClass(Series series, String name) {
Class cls = new Class(series, name);
classDAO.persist(cls);
return cls;
}
}
|
package net.wendal.nutzbook.module;
import static net.wendal.nutzbook.util.RedisInterceptor.jedis;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import javax.servlet.http.HttpServletResponse;
import org.apache.shiro.authz.annotation.RequiresUser;
import org.nutz.dao.Cnd;
import org.nutz.ioc.aop.Aop;
import org.nutz.ioc.loader.annotation.IocBean;
import org.nutz.lang.Encoding;
import org.nutz.lang.Strings;
import org.nutz.lang.random.R;
import org.nutz.mvc.Scope;
import org.nutz.mvc.annotation.At;
import org.nutz.mvc.annotation.Attr;
import org.nutz.mvc.annotation.Ok;
import org.nutz.mvc.view.RawView;
import org.nutz.mvc.view.ViewWrapper;
import net.wendal.nutzbook.bean.OAuthUser;
import net.wendal.nutzbook.bean.User;
@IocBean
@At("/ngrok")
public class NgrokModule extends BaseModule {
@RequiresUser
@At
@Ok("->:/yvr/links/ngrok")
public Object me(@Attr(scope = Scope.SESSION, value = "me") int userId) {
String token = getAuthToken(userId);
if (token == null) {
return new ViewWrapper(new RawView(null), ",github");
}
return null;
}
@RequiresUser
@At("/config/download")
@Ok("raw:xml")
public Object getConfigureFile(@Attr(scope = Scope.SESSION, value = "me") int userId, HttpServletResponse resp) throws UnsupportedEncodingException {
String token = getAuthToken(userId);
if (token == null) {
return HTTP_403;
}
String filename = URLEncoder.encode("ngrok.yml", Encoding.UTF8);
resp.setHeader("Content-Disposition", "attachment; filename=\"" + filename + "\"");
String[] lines = new String[]{
"server_addr: nutz.cn:4443",
"trust_host_root_certs: true",
"auth_token: " + token,
""
};
return Strings.join("\r\n", lines);
}
@Aop("redis")
public String getAuthToken(int userId) {
int count = dao.count(OAuthUser.class, Cnd.where("providerId", "=", "github").and("userId", "=", userId));
if (count != 1 && userId > 2) {
return null;
}
User user = dao.fetch(User.class, userId);
String token = jedis().hget("ngrok2", ""+userId);
if (token == null) {
token = R.UU32();
jedis().hset("ngrok2", ""+userId, token);
jedis().hset("ngrok", token, user.getName() + ".ngrok");
}
return token;
}
}
|
package arekkuusu.solar.common.block.tile;
import arekkuusu.solar.api.entanglement.relativity.IRelativeTile;
import arekkuusu.solar.api.entanglement.relativity.RelativityHandler;
import arekkuusu.solar.common.block.BlockQelaion;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import net.minecraft.block.state.IBlockState;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.math.BlockPos;
import net.minecraftforge.common.capabilities.Capability;
import javax.annotation.Nullable;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
public class TileQelaion extends TileRelativeBase {
private List<EnumFacing> facings = Lists.newArrayList();
private int facingIndex;
private UUID nodes;
private int nodeIndex;
@Override
public boolean hasCapability(Capability<?> capability, @Nullable EnumFacing facing) {
markDirty();
return hasAccess(capability, facing, 0);
}
public boolean hasAccess(Capability<?> capability, @Nullable EnumFacing from, int level) {
if(level < 0 || level++ > 4) return false; //Do not go deeper than 4 levels. Why? Yes.
if(from != null && facings.contains(from.getOpposite())) return false;
ImmutableList<TileQelaion> nodes;
if(facingIndex < facings.size()) {
return hasFacing(capability, facingIndex, level);
} else if((nodes = getNodeList()).isEmpty() && !facings.isEmpty()) {
return hasFacing(capability, 0, level);
} else if(!nodes.isEmpty()) {
if(nodeIndex + 1 > nodes.size()) nodeIndex = 0;
if(!nodes.get(nodeIndex).hasAccess(capability, null, level)) {
nodeIndex++;
return false;
}
return true;
}
return super.hasCapability(capability, from);
}
private boolean hasFacing(Capability<?> capability, int index, int level) {
if(fromFacing(capability, index, level) == null) {
if(++facingIndex > facings.size()) {
facingIndex = 0;
}
return false;
} else {
return true;
}
}
@Nullable
@Override
public <T> T getCapability(Capability<T> capability, @Nullable EnumFacing facing) {
markDirty();
return access(capability, facing, 0);
}
@Nullable
public <T> T access(Capability<T> capability, @Nullable EnumFacing from, int level) {
if(level < 0 || level++ > 4) return null; //Do not go deeper than 4 levels. Why? Yes.
if(from != null && facings.contains(from.getOpposite())) return null;
ImmutableList<TileQelaion> nodes;
if(facingIndex < facings.size()) {
return fromFacing(capability, facingIndex++, level);
} else if((nodes = getNodeList()).isEmpty() && !facings.isEmpty()) {
facingIndex = 0;
return fromFacing(capability, facingIndex++, level);
} else if(!nodes.isEmpty()) {
if(nodeIndex + 1 > nodes.size()) nodeIndex = 0;
facingIndex = 0;
return nodes.get(nodeIndex++).access(capability, null, level);
}
return super.getCapability(capability, from);
}
@Nullable
private <T> T fromFacing(Capability<T> capability, int index, int level) {
EnumFacing facing = facings.get(index);
BlockPos pos = getPos().offset(facing);
IBlockState state = world.getBlockState(pos);
if(state.getBlock().hasTileEntity(state)) {
TileEntity tile = world.getTileEntity(pos);
if(tile != null) {
return tile instanceof TileQelaion ?
((TileQelaion) tile).access(capability, facing, level)
: tile.getCapability(capability, facing.getOpposite());
}
}
return null;
}
public ImmutableList<TileQelaion> getNodeList() {
return nodes != null ? ImmutableList.copyOf(
RelativityHandler.getRelatives(nodes).stream()
.filter(IRelativeTile::isLoaded)
.map(n -> (TileQelaion) n)
.collect(Collectors.toList())
) : ImmutableList.of();
}
@Nullable
public UUID getNodes() {
return nodes;
}
public void setNodes(@Nullable UUID nodes) {
IBlockState state = world.getBlockState(getPos());
world.setBlockState(getPos(), state.withProperty(BlockQelaion.HAS_NODE, nodes != null));
this.nodes = nodes;
markDirty();
}
public ImmutableList<EnumFacing> getFacings() {
return ImmutableList.copyOf(facings);
}
public void putFacing(EnumFacing facing) {
if(facings.contains(facing)) {
facings.remove(facing);
} else facings.add(facing);
updatePosition(world, getPos());
markDirty();
}
@Override
void readNBT(NBTTagCompound compound) {
super.readNBT(compound);
if(compound.hasUniqueId("nodes")) {
nodes = compound.getUniqueId("nodes");
}
if(compound.hasKey("facingIndex")) {
facingIndex = compound.getInteger("facingIndex");
}
if(compound.hasKey("nodeIndex")) {
nodeIndex = compound.getInteger("nodeIndex");
}
facings.clear();
NBTTagList list = compound.getTagList("facings", 10);
for(int i = 0; i < list.tagCount(); i++) {
NBTTagCompound tag = list.getCompoundTagAt(i);
facings.add(EnumFacing.byName(tag.getString("facing")));
}
}
@Override
void writeNBT(NBTTagCompound compound) {
super.writeNBT(compound);
NBTTagList list = new NBTTagList();
facings.forEach(facing -> {
NBTTagCompound tag = new NBTTagCompound();
tag.setString("facing", facing.getName());
list.appendTag(tag);
});
compound.setTag("facings", list);
if(nodes != null) {
compound.setUniqueId("nodes", nodes);
}
compound.setInteger("facingIndex", facingIndex);
compound.setInteger("nodeIndex", nodeIndex);
}
}
|
package net.zephyrizing.http_server;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.ServerSocket;
import java.net.Socket;
public class HttpServer {
public static void main(String[] args) throws Exception {
int portNumber;
if (args.length == 1) {
portNumber = Integer.parseInt(args[0]);
} else {
portNumber = 5000;
}
System.err.println("Starting server on port " + portNumber);
try (ServerSocket listenSocket = new ServerSocket(portNumber)) {
System.err.println("Listening for clients...");
while (true) {
try (Socket socket = listenSocket.accept();
PrintWriter out = new PrintWriter(socket.getOutputStream(), true);
BufferedReader in = new BufferedReader(
new InputStreamReader(socket.getInputStream()));
BufferedReader stdIn = new BufferedReader(
new InputStreamReader(System.in))) {
System.err.println("Connected to client.");
String request = in.readLine();
String[] params = request.split(" ");
assert(params.length == 3);
String method = params[0];
String path = params[1];
String protocolVersion = params[2];
System.out.format("Client requested to %s file %s over %s.\n",
method, path, protocolVersion);
out.format("%s 200 OK\r\n", protocolVersion);
}
}
}
}
}
|
package ch.uzh.csg.reimbursement.dto;
import lombok.Data;
@Data
public class SearchExpenseDto {
private String lastName;
private String role;
private String accountingText;
}
|
package com.KOIFish.FishStock.backend;
import org.hibernate.Session;
import org.hibernate.Transaction;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.KOIFish.FishStock.beans.FishStockUser;
@Component(value="facade")
public class FishStockFacade {
@Autowired
private FishStockUserDAO userDAO;
public void setUserDAO(FishStockUserDAO userDAO) { this.userDAO = userDAO; }
@Autowired
private FishStockSessionGiver sessionGiver;
public void setSessionGiver(FishStockSessionGiver sessionGiver) { this.sessionGiver = sessionGiver; }
public FishStockFacade() { super(); }
public FishStockUser getUserByUsername(String username) {
Session session = null;
Transaction tx = null;
FishStockUser result = null;
try {
session = sessionGiver.getNewSession();
tx = session.beginTransaction();
result = userDAO.getUserByUsername(session, username);
tx.commit();
}
catch (RuntimeException e) {
if (tx != null) {
tx.rollback();
}
}
finally{
session.disconnect();
session.close();
}
return result;
}
public FishStockUser getUserById(Integer id) {
Session session = null;
Transaction tx = null;
FishStockUser result = null;
try {
session = sessionGiver.getNewSession();
tx = session.beginTransaction();
result = userDAO.getUserById(session, id);
tx.commit();
}
catch (RuntimeException e) {
if (tx != null) {
tx.rollback();
}
}
finally{
session.disconnect();
session.close();
}
return result;
}
}
|
package com.akiban.sql.optimizer.rule;
import com.akiban.ais.model.Column;
import com.akiban.server.error.UnsupportedSQLException;
import com.akiban.sql.optimizer.plan.*;
import com.akiban.sql.optimizer.plan.JoinNode.JoinType;
import com.akiban.server.expression.std.Comparison;
import com.akiban.ais.model.Group;
import com.akiban.ais.model.Join;
import com.akiban.ais.model.JoinColumn;
import com.akiban.ais.model.UserTable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/** Use join conditions to identify which tables are part of the same group.
*/
public class GroupJoinFinder extends BaseRule
{
private static final Logger logger = LoggerFactory.getLogger(GroupJoinFinder.class);
@Override
protected Logger getLogger() {
return logger;
}
@Override
public void apply(PlanContext plan) {
List<JoinIsland> islands = new JoinIslandFinder().find(plan.getPlan());
moveAndNormalizeWhereConditions(islands);
findGroupJoins(islands);
reorderJoins(islands);
moveJoinConditions(islands);
isolateGroups(islands);
}
static class JoinIslandFinder implements PlanVisitor, ExpressionVisitor {
List<JoinIsland> result = new ArrayList<JoinIsland>();
public List<JoinIsland> find(PlanNode root) {
root.accept(this);
return result;
}
@Override
public boolean visitEnter(PlanNode n) {
return visit(n);
}
@Override
public boolean visitLeave(PlanNode n) {
return true;
}
@Override
public boolean visit(PlanNode n) {
if (n instanceof Joinable) {
Joinable joinable = (Joinable)n;
PlanWithInput output = joinable.getOutput();
if (!(output instanceof Joinable)) {
result.add(new JoinIsland(joinable, output));
}
}
return true;
}
@Override
public boolean visitEnter(ExpressionNode n) {
return visit(n);
}
@Override
public boolean visitLeave(ExpressionNode n) {
return true;
}
@Override
public boolean visit(ExpressionNode n) {
return true;
}
}
// A subtree of joins.
static class JoinIsland {
Joinable root;
PlanWithInput output;
ConditionList whereConditions;
List<TableGroupJoin> whereJoins;
public JoinIsland(Joinable root, PlanWithInput output) {
this.root = root;
this.output = output;
if (output instanceof Select)
whereConditions = ((Select)output).getConditions();
}
}
// First pass: find all the WHERE conditions above inner joins
// and put given join condition up there, since it's equivalent.
// While there, normalize comparisons.
protected void moveAndNormalizeWhereConditions(List<JoinIsland> islands) {
for (JoinIsland island : islands) {
if (island.whereConditions != null) {
moveInnerJoinConditions(island.root, island.whereConditions);
normalizeColumnComparisons(island.whereConditions);
}
normalizeColumnComparisons(island.root);
}
}
// So long as there are INNER joins, move their conditions up to
// the top-level join.
protected void moveInnerJoinConditions(Joinable joinable,
ConditionList whereConditions) {
if (joinable.isInnerJoin()) {
JoinNode join = (JoinNode)joinable;
ConditionList joinConditions = join.getJoinConditions();
if (joinConditions != null) {
whereConditions.addAll(joinConditions);
joinConditions.clear();
}
moveInnerJoinConditions(join.getLeft(), whereConditions);
moveInnerJoinConditions(join.getRight(), whereConditions);
}
}
// Make comparisons involving a single column have
// the form <col> <op> <expr>, with the child on the left in the
// case of two columns, which is what we may then recognize as a
// group join.
protected void normalizeColumnComparisons(ConditionList conditions) {
if (conditions == null) return;
Collection<ConditionExpression> newExpressions = new ArrayList<ConditionExpression>();
for (Iterator<ConditionExpression> iterator = conditions.iterator(); iterator.hasNext(); ) {
ConditionExpression cond = iterator.next();
if (cond instanceof ComparisonCondition) {
ComparisonCondition ccond = (ComparisonCondition)cond;
ExpressionNode left = ccond.getLeft();
ExpressionNode right = ccond.getRight();
if (right.isColumn()) {
ColumnSource rightTable = ((ColumnExpression)right).getTable();
if (left.isColumn()) {
ColumnSource leftTable = ((ColumnExpression)left).getTable();
if (compareColumnSources(leftTable, rightTable) < 0) {
ccond.reverse();
}
}
else {
ccond.reverse();
}
boolean conditionIsObsolete = normalizeGroupJoinCondition(ccond, newExpressions);
if (conditionIsObsolete)
iterator.remove();
}
}
}
conditions.addAll(newExpressions);
}
private boolean normalizeGroupJoinCondition(ComparisonCondition ccond, Collection<? super ConditionExpression> out)
{
boolean conditionIsObsolete = false;
if (ccond.getOperation().equals(Comparison.EQ)) {
ExpressionNode leftRaw = ccond.getLeft();
ExpressionNode rightRaw = ccond.getRight();
if (leftRaw instanceof ColumnExpression && rightRaw instanceof ColumnExpression) {
ColumnExpression ccondLeft = (ColumnExpression) leftRaw;
ColumnExpression ccondRight = (ColumnExpression) rightRaw;
boolean conditionOnDifferentTables =
ccondLeft.getColumn().getUserTable() != ccondRight.getColumn().getUserTable();
for (ColumnExpression leftColExpr : ccondLeft.getEquivalentsPlusSelf()) {
for (ColumnExpression rightColExpr : ccondRight.getEquivalentsPlusSelf()) {
Column leftColumn = leftColExpr.getColumn();
Column rightColumn = rightColExpr.getColumn();
UserTable leftTable = leftColumn.getUserTable();
UserTable rightTable = rightColumn.getUserTable();
Join parentJoin = leftTable.getParentJoin();
if (parentJoin != null && parentJoin.getParent() != null
&& parentJoin.getParent().equals(rightTable))
{
// found a parent-child relationship
for (JoinColumn joinColumn : parentJoin.getJoinColumns()) {
Column parentCol = joinColumn.getParent();
Column childCol = joinColumn.getChild();
// look for a group join condition that isn't the original one
if (leftColumn.equals(childCol) && rightColumn.equals(parentCol)
&& !(leftColumn == ccondLeft.getColumn() && rightColumn == ccondRight.getColumn()))
{
// create a new comparison condition that's in canonical form
ComparisonCondition canonical = new ComparisonCondition(
Comparison.EQ,
leftColExpr,
rightColExpr,
ccond.getSQLtype(),
ccond.getSQLsource()
);
out.add(canonical);
conditionIsObsolete |= conditionOnDifferentTables;
logger.debug("rewriting {} as {}", ccond, canonical);
}
}
}
}
}
}
}
return conditionIsObsolete;
}
// Normalize join's conditions and any below it.
protected void normalizeColumnComparisons(Joinable joinable) {
if (joinable.isJoin()) {
JoinNode join = (JoinNode)joinable;
normalizeColumnComparisons(join.getJoinConditions());
normalizeColumnComparisons(join.getLeft());
normalizeColumnComparisons(join.getRight());
}
}
// Third pass: put adjacent inner joined tables together in
// left-deep ascending-ordinal order. E.g. (CO)I.
protected void reorderJoins(List<JoinIsland> islands) {
for (JoinIsland island : islands) {
Joinable nroot = reorderJoins(island.root);
if (island.root != nroot) {
island.output.replaceInput(island.root, nroot);
island.root = nroot;
}
}
}
protected Joinable reorderJoins(Joinable joinable) {
if (countInnerJoins(joinable) > 1) {
List<Joinable> joins = new ArrayList<Joinable>();
getInnerJoins(joinable, joins);
for (int i = 0; i < joins.size(); i++) {
joins.set(i, reorderJoins(joins.get(i)));
}
return orderInnerJoins(joins);
}
else if (joinable.isJoin()) {
JoinNode join = (JoinNode)joinable;
join.setLeft(reorderJoins(join.getLeft()));
join.setRight(reorderJoins(join.getRight()));
if (compareJoinables(join.getLeft(), join.getRight()) > 0)
join.reverse();
}
return joinable;
}
// Make inner joins into a tree of group-tree / non-table.
protected Joinable orderInnerJoins(List<Joinable> joinables) {
Map<TableGroup,List<TableSource>> groups =
new HashMap<TableGroup,List<TableSource>>();
List<Joinable> nonTables = new ArrayList<Joinable>();
for (Joinable joinable : joinables) {
if (joinable instanceof TableSource) {
TableSource table = (TableSource)joinable;
TableGroup group = table.getGroup();
List<TableSource> entry = groups.get(group);
if (entry == null) {
entry = new ArrayList<TableSource>();
groups.put(group, entry);
}
entry.add(table);
}
else
nonTables.add(joinable);
}
joinables.clear();
// Make order of groups predictable.
List<TableGroup> keys = new ArrayList(groups.keySet());
Collections.sort(keys, tableGroupComparator);
for (TableGroup gkey : keys) {
List<TableSource> group = groups.get(gkey);
Collections.sort(group, tableSourceComparator);
joinables.add(constructLeftInnerJoins(group));
}
joinables.addAll(nonTables);
if (joinables.size() > 1)
return constructRightInnerJoins(joinables);
else
return joinables.get(0);
}
// Group flattening is left-recursive.
protected Joinable constructLeftInnerJoins(List<? extends Joinable> joinables) {
Joinable result = joinables.get(0);
for (int i = 1; i < joinables.size(); i++) {
result = new JoinNode(result, joinables.get(i), JoinType.INNER);
}
return result;
}
// Nested loop joins are right-recursive.
protected Joinable constructRightInnerJoins(List<? extends Joinable> joinables) {
int size = joinables.size();
Joinable result = joinables.get(--size);
while (size > 0) {
result = new JoinNode(joinables.get(--size), result, JoinType.INNER);
}
return result;
}
// Second pass: find join conditions corresponding to group joins.
protected void findGroupJoins(List<JoinIsland> islands) {
for (JoinIsland island : islands) {
List<TableGroupJoin> whereJoins = new ArrayList<TableGroupJoin>();
findGroupJoins(island.root, new ArrayDeque<JoinNode>(),
island.whereConditions, whereJoins);
island.whereJoins = whereJoins;
}
for (JoinIsland island : islands) {
findSingleGroups(island.root);
}
}
protected void findGroupJoins(Joinable joinable,
Deque<JoinNode> outputJoins,
ConditionList whereConditions,
List<TableGroupJoin> whereJoins) {
if (joinable.isTable()) {
TableSource table = (TableSource)joinable;
for (JoinNode output : outputJoins) {
ConditionList conditions = output.getJoinConditions();
TableGroupJoin tableJoin = findParentJoin(table, conditions);
if (tableJoin != null) {
output.setGroupJoin(tableJoin);
return;
}
}
TableGroupJoin tableJoin = findParentJoin(table, whereConditions);
if (tableJoin != null) {
whereJoins.add(tableJoin); // Position after reordering.
return;
}
}
else if (joinable.isJoin()) {
JoinNode join = (JoinNode)joinable;
Joinable right = join.getRight();
outputJoins.push(join);
if (join.isInnerJoin()) {
findGroupJoins(join.getLeft(), outputJoins, whereConditions, whereJoins);
findGroupJoins(join.getRight(), outputJoins, whereConditions, whereJoins);
}
else {
Deque<JoinNode> singleJoin = new ArrayDeque<JoinNode>(1);
singleJoin.push(join);
// In a LEFT OUTER JOIN, the outer half is allowed to
// take from higher conditions.
if (join.getJoinType() == JoinType.LEFT)
findGroupJoins(join.getLeft(), outputJoins, whereConditions, whereJoins);
else
findGroupJoins(join.getLeft(), singleJoin, null, null);
if (join.getJoinType() == JoinType.RIGHT)
findGroupJoins(join.getRight(), outputJoins, whereConditions, whereJoins);
else
findGroupJoins(join.getRight(), singleJoin, null, null);
}
outputJoins.pop();
}
}
// Find a condition among the given conditions that matches the
// parent join for the given table.
protected TableGroupJoin findParentJoin(TableSource childTable,
ConditionList conditions) {
if ((conditions == null) || conditions.isEmpty()) return null;
TableNode childNode = childTable.getTable();
Join groupJoin = childNode.getTable().getParentJoin();
if (groupJoin == null) return null;
TableNode parentNode = childNode.getTree().getNode(groupJoin.getParent());
if (parentNode == null) return null;
List<JoinColumn> joinColumns = groupJoin.getJoinColumns();
int ncols = joinColumns.size();
Map<TableSource,List<ComparisonCondition>> parentTables =
new HashMap<TableSource,List<ComparisonCondition>>();
for (ConditionExpression condition : conditions) {
if (condition instanceof ComparisonCondition) {
ComparisonCondition ccond = (ComparisonCondition)condition;
if (ccond.getOperation() == Comparison.EQ) {
ExpressionNode left = ccond.getLeft();
ExpressionNode right = ccond.getRight();
if (left.isColumn() && right.isColumn()) {
ColumnExpression lcol = (ColumnExpression)left;
ColumnExpression rcol = (ColumnExpression)right;
if (lcol.getTable() == childTable) {
ColumnSource rightSource = rcol.getTable();
if (rightSource instanceof TableSource) {
TableSource rightTable = (TableSource)rightSource;
if (rightTable.getTable() == parentNode) {
for (int i = 0; i < ncols; i++) {
JoinColumn joinColumn = joinColumns.get(i);
if ((joinColumn.getChild() == lcol.getColumn()) &&
(joinColumn.getParent() == rcol.getColumn())) {
List<ComparisonCondition> entry =
parentTables.get(rightTable);
if (entry == null) {
entry = new ArrayList<ComparisonCondition>(Collections.<ComparisonCondition>nCopies(ncols, null));
parentTables.put(rightTable, entry);
}
entry.set(i, ccond);
}
}
}
}
}
}
}
}
}
TableSource parentTable = null;
List<ComparisonCondition> groupJoinConditions = null;
for (Map.Entry<TableSource,List<ComparisonCondition>> entry : parentTables.entrySet()) {
boolean found = true;
for (ComparisonCondition elem : entry.getValue()) {
if (elem == null) {
found = false;
break;
}
}
if (found) {
if (parentTable == null) {
parentTable = entry.getKey();
groupJoinConditions = entry.getValue();
}
else {
// TODO: What we need is something
// earlier to decide that the primary
// keys are equated and so share the
// references somehow.
ConditionExpression c1 = groupJoinConditions.get(0);
ConditionExpression c2 = entry.getValue().get(0);
if (conditions.indexOf(c1) > conditions.indexOf(c2)) {
// Make the order predictable for tests.
ConditionExpression temp = c1;
c1 = c2;
c2 = temp;
}
throw new UnsupportedSQLException("Found two possible parent joins",
c2.getSQLsource());
}
}
}
if (parentTable == null) return null;
TableGroup group = parentTable.getGroup();
if (group == null) {
group = childTable.getGroup();
if (group == null)
group = new TableGroup(groupJoin.getGroup());
}
else if (childTable.getGroup() != null) {
group.merge(childTable.getGroup());
}
// TODO: Avoid duplicate group joins. Really, they should be
// recognized but only one allowed to Flatten and the other
// forced to use a nested loop, but still with BranchLookup.
for (TableSource otherChild : group.getTables()) {
if ((otherChild.getTable() == childTable.getTable()) &&
(otherChild != childTable))
return null;
}
return new TableGroupJoin(group, parentTable, childTable,
groupJoinConditions, groupJoin);
}
protected void findSingleGroups(Joinable joinable) {
if (joinable.isTable()) {
TableSource table = (TableSource)joinable;
if (table.getGroup() == null) {
table.setGroup(new TableGroup(table.getTable().getTable().getGroup()));
}
}
else if (joinable.isJoin()) {
JoinNode join = (JoinNode)joinable;
Joinable right = join.getRight();
findSingleGroups(join.getLeft());
findSingleGroups(join.getRight());
}
}
// Fourth pass: move the WHERE conditions back to their actual
// joins, which may be different from the ones they were on in the
// original query.
protected void moveJoinConditions(List<JoinIsland> islands) {
for (JoinIsland island : islands) {
if (!island.whereJoins.isEmpty())
moveJoinConditions(island.root, null,
island.whereConditions, island.whereJoins);
}
}
protected void moveJoinConditions(Joinable joinable, JoinNode output,
ConditionList whereConditions,
List<TableGroupJoin> whereJoins) {
if (joinable.isTable()) {
if (output != null) {
TableSource table = (TableSource)joinable;
TableGroupJoin tableJoin = table.getParentJoin();
if (whereJoins.contains(tableJoin)) {
output.setGroupJoin(tableJoin);
List<ComparisonCondition> joinConditions = tableJoin.getConditions();
// Move down from WHERE conditions to join conditions.
if (output.getJoinConditions() == null)
output.setJoinConditions(new ConditionList());
output.getJoinConditions().addAll(joinConditions);
whereConditions.removeAll(joinConditions);
}
}
}
else if (joinable.isJoin()) {
JoinNode join = (JoinNode)joinable;
moveJoinConditions(join.getLeft(), join, whereConditions, whereJoins);
moveJoinConditions(join.getRight(), join, whereConditions, whereJoins);
}
}
// Fifth pass: wrap contiguous group joins in separate joinable.
// We have done out best with the inner joins to make this possible,
// but some outer joins may require that a TableGroup be broken up into
// multiple TableJoins.
protected void isolateGroups(List<JoinIsland> islands) {
for (JoinIsland island : islands) {
TableGroup group = isolateGroups(island.root);
if (group != null) {
Joinable nroot = getTableJoins(island.root, group);
island.output.replaceInput(island.root, nroot);
island.root = nroot;
}
}
}
protected TableGroup isolateGroups(Joinable joinable) {
if (joinable.isTable()) {
TableSource table = (TableSource)joinable;
assert (table.getGroup() != null);
return table.getGroup();
}
if (!joinable.isJoin())
return null;
// Both sides must be matching groups.
JoinNode join = (JoinNode)joinable;
Joinable left = join.getLeft();
Joinable right = join.getRight();
TableGroup leftGroup = isolateGroups(left);
TableGroup rightGroup = isolateGroups(right);
if ((leftGroup == rightGroup) && (leftGroup != null))
return leftGroup;
if (leftGroup != null)
join.setLeft(getTableJoins(left, leftGroup));
if (rightGroup != null)
join.setRight(getTableJoins(right, rightGroup));
// Make arbitrary joins LEFT not RIGHT.
if (join.getJoinType() == JoinType.RIGHT)
join.reverse();
return null;
}
// Make a new TableGroup, recording what it contains.
protected TableJoins getTableJoins(Joinable joins, TableGroup group) {
TableJoins tableJoins = new TableJoins(joins, group);
getTableJoinsTables(joins, tableJoins);
return tableJoins;
}
protected void getTableJoinsTables(Joinable joinable, TableJoins tableJoins) {
if (joinable.isJoin()) {
JoinNode join = (JoinNode)joinable;
getTableJoinsTables(join.getLeft(), tableJoins);
getTableJoinsTables(join.getRight(), tableJoins);
}
else {
assert joinable.isTable();
tableJoins.addTable((TableSource)joinable);
}
}
static final Comparator<TableGroup> tableGroupComparator = new Comparator<TableGroup>() {
@Override
public int compare(TableGroup tg1, TableGroup tg2) {
Group g1 = tg1.getGroup();
Group g2 = tg2.getGroup();
if (g1 != g2)
return g1.getName().compareTo(g2.getName());
return tg1.getMinOrdinal() - tg2.getMinOrdinal();
}
};
static final Comparator<TableSource> tableSourceComparator = new Comparator<TableSource>() {
public int compare(TableSource t1, TableSource t2) {
return compareTableSources(t1, t2);
}
};
protected static int compareColumnSources(ColumnSource c1, ColumnSource c2) {
if (c1 instanceof TableSource) {
if (!(c2 instanceof TableSource))
return +1;
return compareTableSources((TableSource)c1, (TableSource)c2);
}
else if (c2 instanceof TableSource)
return -1;
else
return 0;
}
protected static int compareTableSources(TableSource ts1, TableSource ts2) {
TableNode t1 = ts1.getTable();
UserTable ut1 = t1.getTable();
Group g1 = ut1.getGroup();
TableGroup tg1 = ts1.getGroup();
TableNode t2 = ts2.getTable();
UserTable ut2 = t2.getTable();
Group g2 = ut2.getGroup();
TableGroup tg2 = ts2.getGroup();
if (g1 != g2)
return g1.getName().compareTo(g2.getName());
if (tg1 == tg2) // Including null because not yet computed.
return t1.getOrdinal() - t2.getOrdinal();
return tg1.getMinOrdinal() - tg2.getMinOrdinal();
}
// Return size of directly-reachable subtree of all simple inner joins.
protected static int countInnerJoins(Joinable joinable) {
if (!isSimpleInnerJoin(joinable))
return 0;
return 1 +
countInnerJoins(((JoinNode)joinable).getLeft()) +
countInnerJoins(((JoinNode)joinable).getRight());
}
// Accumulate operands of directly-reachable subtree of simple inner joins.
protected static void getInnerJoins(Joinable joinable, Collection<Joinable> into) {
if (!isSimpleInnerJoin(joinable))
into.add(joinable);
else {
getInnerJoins(((JoinNode)joinable).getLeft(), into);
getInnerJoins(((JoinNode)joinable).getRight(), into);
}
}
// Can this inner join be reorderd?
// TODO: If there are inner joins with conditions that didn't get
// moved by the first pass, leave them alone. That will miss
// opportunities. Need to have a way to accumulate those
// conditions and put them into the join tree.
protected static boolean isSimpleInnerJoin(Joinable joinable) {
return (joinable.isInnerJoin() && !((JoinNode)joinable).hasJoinConditions());
}
protected static int compareJoinables(Joinable j1, Joinable j2) {
if (j1.isTable() && j2.isTable())
return compareTableSources((TableSource)j1, (TableSource)j2);
Group g1 = singleGroup(j1);
Group g2 = singleGroup(j2);
if (g1 == null) {
if (g2 != null)
return -1;
else
return 0;
}
else if (g2 == null)
return +1;
if (g1 != g2)
return g1.getName().compareTo(g2.getName());
int[] range1 = ordinalRange(j1);
int[] range2 = ordinalRange(j2);
if (range1[1] < range2[0])
return -1;
else if (range1[0] > range2[1])
return +1;
else
return 0;
}
protected static Group singleGroup(Joinable j) {
if (j.isTable())
return ((TableSource)j).getTable().getGroup();
else if (j.isJoin()) {
JoinNode join = (JoinNode)j;
Group gl = singleGroup(join.getLeft());
Group gr = singleGroup(join.getRight());
if (gl == gr)
return gl;
else
return null;
}
else
return null;
}
protected static int[] ordinalRange(Joinable j) {
if (j.isTable()) {
int ord = ((TableSource)j).getTable().getOrdinal();
return new int[] { ord, ord };
}
else if (j.isJoin()) {
JoinNode join = (JoinNode)j;
int[] ol = ordinalRange(join.getLeft());
int[] or = ordinalRange(join.getRight());
if (ol[0] > or[0])
ol[0] = or[0];
if (ol[1] < or[1])
ol[1] = or[1];
return ol;
}
else
return new int[] { -1, -1 };
}
}
|
package com.btk5h.skriptmirror.skript;
import com.btk5h.skriptmirror.SkriptMirror;
import org.bukkit.Bukkit;
import org.bukkit.event.Event;
import org.bukkit.event.EventPriority;
import org.bukkit.event.HandlerList;
import org.bukkit.event.Listener;
import org.bukkit.plugin.EventExecutor;
import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import ch.njol.skript.Skript;
import ch.njol.skript.SkriptConfig;
import ch.njol.skript.lang.Literal;
import ch.njol.skript.lang.SkriptEvent;
import ch.njol.skript.lang.SkriptParser;
public class EvtByReflection extends SkriptEvent {
static {
Skript.registerEvent("Bukkit Event", EvtByReflection.class, BukkitEvent.class,
"%strings% [(at|on|with) priority %-number%]");
}
private static class PriorityListener implements Listener {
private EventPriority priority;
private Set<Class<? extends Event>> events = new HashSet<>();
public PriorityListener(int priority) {
this.priority = EventPriority.values()[priority];
}
public EventPriority getPriority() {
return priority;
}
public Set<Class<? extends Event>> getEvents() {
return events;
}
}
private static EventExecutor executor =
(listener, event) -> Bukkit.getPluginManager()
.callEvent(new BukkitEvent(event, ((PriorityListener) listener).getPriority()));
private static PriorityListener[] listeners = new PriorityListener[]{
new PriorityListener(0),
new PriorityListener(1),
new PriorityListener(2),
new PriorityListener(3),
new PriorityListener(4),
new PriorityListener(5)
};
static class BukkitEvent extends Event {
private final static HandlerList handlers = new HandlerList();
private final Event event;
private final EventPriority priority;
public BukkitEvent(Event event, EventPriority priority) {
this.event = event;
this.priority = priority;
}
public Event getEvent() {
return event;
}
public EventPriority getPriority() {
return priority;
}
public static HandlerList getHandlerList() {
return handlers;
}
@Override
public HandlerList getHandlers() {
return handlers;
}
}
private static Set<Class<? extends Event>> events = new HashSet<>();
private static void registerEvent(Class<? extends Event> event, EventPriority priority) {
PriorityListener listener = listeners[priority.ordinal()];
Set<Class<? extends Event>> events = listener.getEvents();
if (!events.contains(event)) {
events.add(event);
Bukkit.getPluginManager()
.registerEvent(event, listener, priority, executor, SkriptMirror.getInstance());
}
}
private Class<? extends Event>[] classes;
private EventPriority priority;
@SuppressWarnings("unchecked")
@Override
public boolean init(Literal<?>[] args, int matchedPattern, SkriptParser.ParseResult parseResult) {
String[] events = ((Literal<String>) args[0]).getArray();
classes = (Class<? extends Event>[]) Array.newInstance(Class.class, events.length);
for (int i = 0; i < events.length; i++) {
String event = events[i];
try {
Class<?> eventClass = Class.forName(event);
if (!Event.class.isAssignableFrom(eventClass)) {
Skript.error(event + " is not an event.");
return false;
}
classes[i] = (Class<? extends Event>) eventClass;
} catch (ClassNotFoundException e) {
Skript.error(event + " refers to a non-existent class.");
return false;
}
}
if (args[1] == null) {
priority = SkriptConfig.defaultEventPriority.value();
} else {
int priorityIndex = ((Literal<Number>) args[1]).getSingle().intValue();
priorityIndex = Math.max(0, Math.min(priorityIndex, listeners.length - 1));
priority = listeners[priorityIndex].getPriority();
}
for (Class<? extends Event> cls : classes) {
registerEvent(cls, priority);
}
return true;
}
@Override
public boolean check(Event e) {
Class<? extends Event> eventClass = ((BukkitEvent) e).getEvent().getClass();
if (priority == ((BukkitEvent) e).getPriority()) {
for (Class<? extends Event> cls : classes) {
if (cls == eventClass) {
return true;
}
}
}
return false;
}
@Override
public String toString(Event e, boolean debug) {
return Arrays.toString(classes) + " priority " + priority;
}
}
|
package com.celements.web.plugin.cmd;
import static com.google.common.base.Strings.*;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.celements.web.utils.Html2Text;
public class PlainTextCommand {
private static final Logger LOGGER = LoggerFactory.getLogger(PlainTextCommand.class);
public String convertHtmlToPlainText(String htmlContent) throws ConvertToPlainTextException {
try {
return internalConvert(htmlContent);
} catch (StackOverflowError | Exception exp) {
throw new ConvertToPlainTextException("Fail to convertToPlainText.", exp);
}
}
/**
* @deprecated instead use <code>convertHtmlToPlainText</code>
*/
@Deprecated
public String convertToPlainText(String htmlContent) {
try {
return internalConvert(htmlContent);
} catch (IOException ioExp) {
LOGGER.error("Fail to convertToPlainText: ", ioExp);
}
return "";
}
private String internalConvert(String htmlContent) throws IOException {
Reader in = new StringReader(nullToEmpty(htmlContent));
Html2Text parser = new Html2Text();
parser.parse(in);
in.close();
return parser.getText();
}
}
|
package com.cookingfox.chefling.command;
import com.cookingfox.chefling.ContainerInterface;
import com.cookingfox.chefling.Factory;
import com.cookingfox.chefling.LifeCycle;
import com.cookingfox.chefling.exception.*;
import java.lang.reflect.Constructor;
import java.lang.reflect.Modifier;
import java.util.*;
/**
* Implementation of {@link ContainerInterface#create(Class)}.
*/
public class CreateCommand extends AbstractCommand {
// CONSTRUCTOR
/**
* @see AbstractCommand#AbstractCommand(ContainerInterface, Map, Map)
*/
public CreateCommand(ContainerInterface container, Map<Class, Object> instances, Map<Class, Object> mappings) {
super(container, instances, mappings);
}
// PUBLIC METHODS
/**
* @see ContainerInterface#create(Class)
*/
@SuppressWarnings("unchecked")
public <T> T create(Class<T> type) throws ContainerException {
Object mapping = mappings.get(type);
T instance;
if (mapping instanceof Class) {
// create instance using mapped type
instance = create((Class<T>) mapping);
} else if (type.isInstance(mapping)) {
// mapping is instance
instance = (T) mapping;
} else if (mapping instanceof Factory) {
// use factory to create instance
instance = resolveUsingFactory((Factory<T>) mapping, type);
} else {
// no mapping: create instance using provided type
instance = createInstance(type);
}
// call life cycle onCreate
if (instance instanceof LifeCycle) {
((LifeCycle) instance).onCreate();
}
return instance;
}
// PROTECTED METHODS
/**
* Creates a new instance of `type`, attempting to resolve its full dependency tree.
*
* @param type The type to instantiate.
* @param <T> Ensures the returned object is cast to the expected type.
* @return New instance of the type.
* @throws ContainerException
*/
@SuppressWarnings("unchecked")
protected <T> T createInstance(Class<T> type) throws ContainerException {
Constructor constructor = getDefaultConstructor(type);
Class[] parameterTypes = constructor.getParameterTypes();
Object[] parameters = new Object[parameterTypes.length];
// gather constructor parameters based on their types
for (int i = 0; i < parameterTypes.length; i++) {
parameters[i] = container.get(parameterTypes[i]);
}
try {
// create a new instance, passing the constructor parameters
return (T) constructor.newInstance(parameters);
} catch (Exception e) {
throw new TypeInstantiationException(type, e);
}
}
/**
* Get the default constructor for this type.
*
* @param type The type to get the constructor for.
* @return Constructor, if a resolvable one can be found.
* @throws TypeNotAllowedException
*/
protected Constructor getDefaultConstructor(Class type) throws TypeNotAllowedException {
isInstantiable(type);
Constructor[] constructors = type.getDeclaredConstructors();
ResolvabilityResult firstResult = getResolvabilityResult(constructors[0]);
// if first constructor is resolvable, return it immediately
if (firstResult.isResolvable()) {
return firstResult.constructor;
}
// map of resolvable results, by number of parameters: we favor a constructor with a small
// number of parameters, because the chances are higher that it is resolvable.
TreeMap<Integer, List<ResolvabilityResult>> resultMap = new TreeMap<Integer, List<ResolvabilityResult>>();
// inspect constructor resolvability
for (Constructor constructor : constructors) {
// create a resolvability result for this constructor
ResolvabilityResult result = getResolvabilityResult(constructor);
List<ResolvabilityResult> resultList = resultMap.get(result.numParameters);
if (resultList == null) {
resultList = new LinkedList<ResolvabilityResult>();
}
resultList.add(result);
resultMap.put(result.numParameters, resultList);
}
// select resolvable constructor
for (Map.Entry<Integer, List<ResolvabilityResult>> entry : resultMap.entrySet()) {
for (ResolvabilityResult result : entry.getValue()) {
if (result.isResolvable()) {
// constructor is resolvable: return it
return result.constructor;
}
}
}
// builder error message
StringBuilder errorBuilder = new StringBuilder();
errorBuilder.append("it does not have constructors that are resolvable by the Container:\n\n");
// create resolvability report for unresolvable type
for (Map.Entry<Integer, List<ResolvabilityResult>> entry : resultMap.entrySet()) {
List<ResolvabilityResult> resultList = entry.getValue();
// add error report entry for every resolvability result
for (int i = 0; i < resultList.size(); i++) {
addErrorReportEntry(errorBuilder, resultList.get(i), type);
if (i < resultList.size() - 1) {
errorBuilder.append("\n");
}
}
}
throw new TypeNotInstantiableException(type, errorBuilder.toString());
}
/**
* Create a "resolvability" result: check all constructor parameters to see whether they are
* resolvable by the Container.
*
* @param constructor The constructor to check.
* @return The result.
*/
protected ResolvabilityResult getResolvabilityResult(Constructor constructor) {
Class[] parameterTypes = constructor.getParameterTypes();
int numParameters = parameterTypes.length;
ResolvabilityResult result = new ResolvabilityResult(constructor, numParameters);
// check whether the constructor parameters are resolvable
for (int i = 0; i < numParameters; i++) {
Class parameterType = parameterTypes[i];
// container has a mapping for this parameter type: ok!
if (container.has(parameterType)) {
continue;
}
try {
// is this type instantiable?
isInstantiable(parameterType);
} catch (TypeNotAllowedException e) {
// not instantiable: store in result
result.unresolvable.add(new UnresolvableParameter(i, e));
}
}
return result;
}
/**
* Add an error report for an unresolvable constructor.
*
* @param errorBuilder The string builder for the error message.
* @param result The resolvability result.
* @param type The type we are attempting to instantiate.
*/
protected void addErrorReportEntry(StringBuilder errorBuilder, ResolvabilityResult result, Class type) {
// add name of this constructor
String modifierName = Modifier.toString(result.getModifiers());
errorBuilder.append(String.format("[%s] %s ( ", modifierName, type.getSimpleName()));
Class[] parameterTypes = result.constructor.getParameterTypes();
// add parameter types to constructor signature
for (int i = 0; i < parameterTypes.length; i++) {
Class parameterType = parameterTypes[i];
errorBuilder.append(parameterType.getName());
if (i < parameterTypes.length - 1) {
errorBuilder.append(", ");
}
}
errorBuilder.append(" )\n");
if (!result.isPublic()) {
errorBuilder.append(String.format("The constructor is %s\n", modifierName));
} else if (!result.unresolvable.isEmpty()) {
// loop through unresolvable parameters and print their exception messages
for (UnresolvableParameter notResolvable : result.unresolvable) {
errorBuilder.append(String.format("Parameter #%d: %s\n",
notResolvable.parameterIndex + 1, notResolvable.exception.getMessage()));
}
}
}
/**
* Resolves a type using a Factory instance. Throws if the returned value is null or invalid.
*
* @param factory The Factory object.
* @param type The expected type that the Factory should return.
* @param <T> Ensures the return value is cast to expected type.
* @return The created instance.
* @throws ContainerException
*/
protected <T> T resolveUsingFactory(Factory<T> factory, Class<T> type) throws ContainerException {
T instance = factory.create(container);
if (instance == null) {
throw new FactoryReturnedNullException(type);
} else if (!type.isInstance(instance)) {
throw new FactoryReturnedUnexpectedValueException(type, instance);
}
return instance;
}
// INTERNAL CLASSES
/**
* Represents information for an unresolvable constructor parameter.
*/
protected static class UnresolvableParameter {
public int parameterIndex;
public Exception exception;
public UnresolvableParameter(int parameterIndex, Exception exception) {
this.parameterIndex = parameterIndex;
this.exception = exception;
}
}
/**
* Represents information for a constructor's resolvability.
*/
protected static class ResolvabilityResult {
public Constructor constructor;
int numParameters;
public final ArrayList<UnresolvableParameter> unresolvable = new ArrayList<UnresolvableParameter>();
public ResolvabilityResult(Constructor constructor, int numParameters) {
this.constructor = constructor;
this.numParameters = numParameters;
}
public int getModifiers() {
return constructor.getModifiers();
}
public boolean isPublic() {
return Modifier.isPublic(getModifiers());
}
public boolean isResolvable() {
return isPublic() && unresolvable.isEmpty();
}
}
}
|
package com.github.AsaiYusuke.SushMock;
import java.io.File;
import java.io.IOException;
import org.apache.sshd.server.SshServer;
import org.apache.sshd.server.keyprovider.SimpleGeneratorHostKeyProvider;
import com.github.AsaiYusuke.SushMock.shell.CachedAuthenticator;
import com.github.AsaiYusuke.SushMock.shell.ProxyShellFactory;
import com.github.AsaiYusuke.SushMock.util.Constants;
import com.github.AsaiYusuke.SushMock.util.Constants.ExecutionType;
public class SushMockServer {
public static boolean isRunning;
public static void main(String[] args)
throws IOException, InterruptedException {
CommandOption option = new CommandOption();
option.parse(args);
if (option.getExecutionType() == ExecutionType.Help) {
return;
}
SshServer sshd = SshServer.setUpDefaultServer();
sshd.setPort(option.getListenPort());
sshd.setKeyPairProvider(new SimpleGeneratorHostKeyProvider(
new File(Constants.DefaultKeyFile)));
CachedAuthenticator authenticator = new CachedAuthenticator();
sshd.setPasswordAuthenticator(authenticator);
sshd.setPublickeyAuthenticator(authenticator);
sshd.setShellFactory(new ProxyShellFactory(authenticator));
sshd.start();
System.out.println("started");
isRunning = true;
while (isRunning) {
Thread.sleep(1000);
}
System.out.println("end");
}
public static void shutdown() {
isRunning = false;
}
}
|
package com.github.brandtabbott;
public class BlockingPoolException extends RuntimeException {
private static final long serialVersionUID = 6589942387653952899L;
public BlockingPoolException() {
super();
}
public BlockingPoolException(String message) {
super(message);
}
public BlockingPoolException(Throwable cause) {
super(cause);
}
public BlockingPoolException(String message, Throwable cause) {
super(message, cause);
}
public BlockingPoolException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
|
package com.github.sabomichal.immutablexjc;
import java.beans.Introspector;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.logging.Level;
import org.xml.sax.ErrorHandler;
import com.sun.codemodel.JBlock;
import com.sun.codemodel.JClass;
import com.sun.codemodel.JClassAlreadyExistsException;
import com.sun.codemodel.JCodeModel;
import com.sun.codemodel.JConditional;
import com.sun.codemodel.JDefinedClass;
import com.sun.codemodel.JExpr;
import com.sun.codemodel.JExpression;
import com.sun.codemodel.JFieldVar;
import com.sun.codemodel.JInvocation;
import com.sun.codemodel.JMethod;
import com.sun.codemodel.JMod;
import com.sun.codemodel.JType;
import com.sun.codemodel.JVar;
import com.sun.tools.xjc.BadCommandLineException;
import com.sun.tools.xjc.Options;
import com.sun.tools.xjc.Plugin;
import com.sun.tools.xjc.outline.ClassOutline;
import com.sun.tools.xjc.outline.FieldOutline;
import com.sun.tools.xjc.outline.Outline;
/**
* IMMUTABLE-XJC plugin implementation.
*
* @author <a href="mailto:sabo.michal@gmail.com">Michal Sabo</a>
*/
public final class PluginImpl extends Plugin {
private static final String BUILDER_OPTION_NAME = "-imm-builder";
private static final String CCONSTRUCTOR_OPTION_NAME = "-imm-cc";
private static final String WITHIFNOTNULL_OPTION_NAME = "-imm-ifnotnull";
private static final String NOPUBLICCONSTRUCTOR_OPTION_NAME = "-imm-nopubconstructor";
private static final String LEAVECOLLECTIONS_OPTION_NAME = "-imm-leavecollections";
private static final String UNSET_PREFIX = "unset";
private static final String SET_PREFIX = "set";
private static final String MESSAGE_PREFIX = "IMMUTABLE-XJC";
private static final String OPTION_NAME = "immutable";
private static final JType[] NO_ARGS = new JType[0];
private ResourceBundle resourceBundle = ResourceBundle.getBundle(PluginImpl.class.getCanonicalName());
private boolean createBuilder;
private boolean createCConstructor;
private boolean createWithIfNotNullMethod;
private boolean createBuilderWithoutPublicConstructor;
private boolean leaveCollectionsMutable;
private Options options;
@Override
public boolean run(final Outline model, final Options options, final ErrorHandler errorHandler) {
boolean success = true;
this.options = options;
this.log(Level.INFO, "title");
for (ClassOutline clazz : model.getClasses()) {
JDefinedClass implClass = clazz.implClass;
FieldOutline[] declaredFields = clazz.getDeclaredFields();
FieldOutline[] superclassFields = getSuperclassFields(clazz);
int declaredFieldsLength = declaredFields != null ? declaredFields.length : 0;
int superclassFieldsLength = superclassFields.length;
if (declaredFieldsLength + superclassFieldsLength > 0) {
if (addStandardConstructor(implClass, declaredFields, superclassFields) == null) {
log(Level.WARNING, "couldNotAddStdCtor", implClass.binaryName());
}
}
if (declaredFieldsLength + superclassFieldsLength > 0) {
if (createBuilderWithoutPublicConstructor
|| (createBuilder && declaredFieldsLength + superclassFieldsLength > 8)) {
if (addPropertyContructor(implClass, declaredFields, superclassFields, JMod.NONE) == null) {
log(Level.WARNING, "couldNotAddPropertyCtor", implClass.binaryName());
}
}
else {
if (addPropertyContructor(implClass, declaredFields, superclassFields, JMod.PUBLIC) == null) {
log(Level.WARNING, "couldNotAddPropertyCtor", implClass.binaryName());
}
}
}
// implClass.direct("// " + getMessage("title"));
makeClassFinal(implClass);
removeSetters(implClass);
makePropertiesPrivate(implClass);
makePropertiesFinal(implClass, declaredFields);
if (createBuilder) {
if (!clazz.implClass.isAbstract()) {
JDefinedClass builderClass;
if ((builderClass = addBuilderClass(clazz, declaredFields, superclassFields)) == null) {
log(Level.WARNING, "couldNotAddClassBuilder", implClass.binaryName());
}
if (createCConstructor && builderClass != null) {
if (addCopyConstructor(clazz.implClass, builderClass, declaredFields, superclassFields) == null) {
log(Level.WARNING, "couldNotAddCopyCtor", implClass.binaryName());
}
}
}
}
}
// if superclass is a JAXB bound class or an abstract class, revert setting it final
for (ClassOutline clazz : model.getClasses()) {
if (clazz.getSuperClass() != null) {
clazz.getSuperClass().implClass.mods().setFinal(false);
}
else if (clazz.implClass.isAbstract()) {
clazz.implClass.mods().setFinal(false);
}
}
this.options = null;
return success;
}
@Override
public String getOptionName() {
return OPTION_NAME;
}
@Override
public String getUsage() {
final String n = System.getProperty("line.separator", "\n");
StringBuilder retval = new StringBuilder(" -");
retval.append(OPTION_NAME);
retval.append(" : ");
retval.append(getMessage("usage"));
retval.append(n);
retval.append(" ");
retval.append(BUILDER_OPTION_NAME);
retval.append(" : ");
retval.append(getMessage("builderUsage"));
retval.append(n);
retval.append(" ");
retval.append(CCONSTRUCTOR_OPTION_NAME);
retval.append(" : ");
retval.append(getMessage("cConstructorUsage"));
retval.append(n);
retval.append(" ");
retval.append(WITHIFNOTNULL_OPTION_NAME);
retval.append(" : ");
retval.append(getMessage("withIfNotNullUsage"));
retval.append(n);
retval.append(" ");
retval.append(NOPUBLICCONSTRUCTOR_OPTION_NAME);
retval.append(" : ");
retval.append(getMessage("builderWithoutPublicConstructor"));
retval.append(n);
retval.append(" ");
retval.append(LEAVECOLLECTIONS_OPTION_NAME);
retval.append(" : ");
retval.append(getMessage("leaveCollectionsMutable"));
retval.append(n);
return retval.toString();
}
@Override
public int parseArgument(final Options opt, final String[] args, final int i) throws BadCommandLineException, IOException {
if (args[i].startsWith(BUILDER_OPTION_NAME)) {
this.createBuilder = true;
return 1;
}
if (args[i].startsWith(CCONSTRUCTOR_OPTION_NAME)) {
this.createCConstructor = true;
return 1;
}
if (args[i].startsWith(WITHIFNOTNULL_OPTION_NAME)) {
this.createWithIfNotNullMethod = true;
return 1;
}
if (args[i].startsWith(NOPUBLICCONSTRUCTOR_OPTION_NAME)) {
this.createBuilderWithoutPublicConstructor = true;
return 1;
}
if (args[i].startsWith(LEAVECOLLECTIONS_OPTION_NAME)) {
this.leaveCollectionsMutable = true;
return 1;
}
return 0;
}
private String getMessage(final String key, final Object... args) {
return MessageFormat.format(resourceBundle.getString(key), args);
}
private JDefinedClass addBuilderClass(ClassOutline clazz, FieldOutline[] declaredFields, FieldOutline[] superclassFields) {
JDefinedClass builderClass = generateBuilderClass(clazz.implClass);
if (builderClass == null) {
return null;
}
for (FieldOutline field : declaredFields) {
addProperty(builderClass, field);
JMethod unconditionalWithMethod = addWithMethod(builderClass, field);
if (createWithIfNotNullMethod) {
addWithIfNotNullMethod(builderClass, field, unconditionalWithMethod);
}
if (field.getPropertyInfo().isCollection()) {
addAddMethod(builderClass, field);
}
}
for (FieldOutline field : superclassFields) {
addProperty(builderClass, field);
JMethod unconditionalWithMethod = addWithMethod(builderClass, field);
if (createWithIfNotNullMethod) {
addWithIfNotNullMethod(builderClass, field, unconditionalWithMethod);
}
if (field.getPropertyInfo().isCollection()) {
addAddMethod(builderClass, field);
}
}
addNewBuilder(clazz, builderClass);
if (createCConstructor) {
addNewBuilderCc(clazz, builderClass);
}
addBuildMethod(clazz.implClass, builderClass, declaredFields, superclassFields);
return builderClass;
}
private JVar addProperty(JDefinedClass clazz, FieldOutline field) {
JType jType = getJavaType(field);
if (field.getPropertyInfo().isCollection()) {
return clazz.field(JMod.PRIVATE, jType, field.getPropertyInfo().getName(false),
getNewCollectionExpression(field.parent().implClass.owner(), jType));
}
else {
return clazz.field(JMod.PRIVATE, jType, field.getPropertyInfo().getName(false));
}
}
private JMethod addBuildMethod(JDefinedClass clazz, JDefinedClass builderClass, FieldOutline[] declaredFields,
FieldOutline[] superclassFields) {
JMethod method = builderClass.method(JMod.PUBLIC, clazz, "build");
JInvocation constructorInvocation = JExpr._new(clazz);
for (FieldOutline field : superclassFields) {
constructorInvocation.arg(JExpr.ref(field.getPropertyInfo().getName(false)));
}
for (FieldOutline field : declaredFields) {
constructorInvocation.arg(JExpr.ref(field.getPropertyInfo().getName(false)));
}
method.body()._return(constructorInvocation);
return method;
}
private void addNewBuilder(ClassOutline clazz, JDefinedClass builderClass) {
boolean superClassWithSameName = false;
ClassOutline superclass = clazz.getSuperClass();
while (superclass != null) {
if (superclass.implClass.name().equals(clazz.implClass.name())) {
superClassWithSameName = true;
}
superclass = superclass.getSuperClass();
}
if (!superClassWithSameName) {
JMethod method = clazz.implClass.method(JMod.PUBLIC | JMod.STATIC, builderClass,
Introspector.decapitalize(clazz.implClass.name()) + "Builder");
method.body()._return(JExpr._new(builderClass));
}
}
private void addNewBuilderCc(ClassOutline clazz, JDefinedClass builderClass) {
boolean superClassWithSameName = false;
ClassOutline superclass = clazz.getSuperClass();
while (superclass != null) {
if (superclass.implClass.name().equals(clazz.implClass.name())) {
superClassWithSameName = true;
}
superclass = superclass.getSuperClass();
}
if (!superClassWithSameName) {
JMethod method = clazz.implClass.method(JMod.PUBLIC | JMod.STATIC, builderClass,
Introspector.decapitalize(clazz.implClass.name()) + "Builder");
JVar param = method.param(JMod.FINAL, clazz.implClass, "o");
method.body()._return(JExpr._new(builderClass).arg(param));
}
}
private Object addPropertyContructor(JDefinedClass clazz, FieldOutline[] declaredFields, FieldOutline[] superclassFields,
int constAccess) {
JMethod ctor = clazz.getConstructor(getFieldTypes(declaredFields, superclassFields));
if (ctor == null) {
ctor = this.generatePropertyConstructor(clazz, declaredFields, superclassFields, constAccess);
}
else {
this.log(Level.WARNING, "standardCtorExists");
}
return ctor;
}
private JMethod addStandardConstructor(final JDefinedClass clazz, FieldOutline[] declaredFields,
FieldOutline[] superclassFields) {
JMethod ctor = clazz.getConstructor(NO_ARGS);
if (ctor == null) {
ctor = this.generateStandardConstructor(clazz, declaredFields, superclassFields);
}
else {
this.log(Level.WARNING, "standardCtorExists");
}
return ctor;
}
private JMethod addCopyConstructor(final JDefinedClass clazz, final JDefinedClass builderClass,
FieldOutline[] declaredFields, FieldOutline[] superclassFields) {
JMethod ctor = generateCopyConstructor(clazz, builderClass, declaredFields, superclassFields);
if (ctor != null) {
createConstructor(builderClass, JMod.PUBLIC);
}
return ctor;
}
private JMethod addWithMethod(JDefinedClass builderClass, FieldOutline field) {
String fieldName = field.getPropertyInfo().getName(true);
JMethod method = builderClass.method(JMod.PUBLIC, builderClass, "with" + fieldName);
generatePropertyAssignment(method, field);
method.body()._return(JExpr.direct("this"));
return method;
}
private JMethod addWithIfNotNullMethod(JDefinedClass builderClass, FieldOutline field, JMethod unconditionalWithMethod) {
if (field.getRawType().isPrimitive())
return null;
String fieldName = field.getPropertyInfo().getName(true);
JMethod method = builderClass.method(JMod.PUBLIC, builderClass, "with" + fieldName + "IfNotNull");
JVar param = generateMethodParameter(method, field);
JBlock block = method.body();
JConditional conditional = block._if(param.eq(JExpr._null()));
conditional._then()._return(JExpr.direct("this"));
conditional._else()._return(JExpr.invoke(unconditionalWithMethod).arg(param));
return method;
}
private JMethod addAddMethod(JDefinedClass builderClass, FieldOutline field) {
List<JClass> typeParams = ((JClass) getJavaType(field)).getTypeParameters();
if (!typeParams.iterator().hasNext()) {
return null;
}
JMethod method = builderClass.method(JMod.PUBLIC, builderClass, "add" + field.getPropertyInfo().getName(true));
JBlock block = method.body();
String fieldName = field.getPropertyInfo().getName(false);
JVar param = method.param(JMod.FINAL, typeParams.iterator().next(), fieldName);
JInvocation invocation = JExpr.refthis(fieldName).invoke("add").arg(param);
block.add(invocation);
block._return(JExpr.direct("this"));
return method;
}
private JDefinedClass generateBuilderClass(JDefinedClass clazz) {
JDefinedClass builderClass = null;
String builderClassName = clazz.name() + "Builder";
try {
builderClass = clazz._class(JMod.PUBLIC | JMod.STATIC, builderClassName);
}
catch (JClassAlreadyExistsException e) {
this.log(Level.WARNING, "builderClassExists", builderClassName);
}
return builderClass;
}
private void replaceCollectionGetter(FieldOutline field, final JMethod getter) {
JDefinedClass clazz = field.parent().implClass;
// remove the old getter
clazz.methods().remove(getter);
// and create a new one
JMethod newGetter = field.parent().implClass.method(getter.mods().getValue(), getter.type(), getter.name());
JBlock block = newGetter.body();
JVar ret = block.decl(getJavaType(field), "ret");
JCodeModel codeModel = field.parent().implClass.owner();
JVar param = generateMethodParameter(getter, field);
JConditional conditional = block._if(param.eq(JExpr._null()));
conditional._then().assign(ret, getEmptyCollectionExpression(codeModel, param));
conditional._else().assign(ret, getUnmodifiableWrappedExpression(codeModel, param));
block._return(ret);
getter.javadoc().append("Returns unmodifiable collection.");
}
private void generatePropertyAssignment(final JMethod method, FieldOutline fieldOutline) {
generatePropertyAssignment(method, fieldOutline, false);
}
private void generatePropertyAssignment(final JMethod method, FieldOutline fieldOutline, boolean wrapUnmodifiable) {
JBlock block = method.body();
JCodeModel codeModel = fieldOutline.parent().implClass.owner();
String fieldName = fieldOutline.getPropertyInfo().getName(false);
JVar param = generateMethodParameter(method, fieldOutline);
if (fieldOutline.getPropertyInfo().isCollection() && !leaveCollectionsMutable) {
if (wrapUnmodifiable) {
JConditional conditional = block._if(param.eq(JExpr._null()));
conditional._then().assign(JExpr.refthis(fieldName), JExpr._null());
conditional._else().assign(JExpr.refthis(fieldName),
getDefensiveCopyExpression(codeModel, getJavaType(fieldOutline), param));
}
else {
block.assign(JExpr.refthis(fieldName), JExpr.ref(fieldName));
}
replaceCollectionGetter(fieldOutline, getGetterProperty(fieldOutline));
}
else {
block.assign(JExpr.refthis(fieldName), JExpr.ref(fieldName));
}
}
private JVar generateMethodParameter(final JMethod method, FieldOutline fieldOutline) {
String fieldName = fieldOutline.getPropertyInfo().getName(false);
JType javaType = getJavaType(fieldOutline);
return method.param(JMod.FINAL, javaType, fieldName);
}
private JExpression getDefensiveCopyExpression(JCodeModel codeModel, JType jType, JVar param) {
List<JClass> typeParams = ((JClass) jType).getTypeParameters();
JClass typeParameter = null;
if (typeParams.iterator().hasNext()) {
typeParameter = typeParams.iterator().next();
}
JClass newClass = null;
if (param.type().erasure().equals(codeModel.ref(Collection.class))) {
newClass = codeModel.ref(ArrayList.class);
}
else if (param.type().erasure().equals(codeModel.ref(List.class))) {
newClass = codeModel.ref(ArrayList.class);
}
else if (param.type().erasure().equals(codeModel.ref(Map.class))) {
newClass = codeModel.ref(HashMap.class);
}
else if (param.type().erasure().equals(codeModel.ref(Set.class))) {
newClass = codeModel.ref(HashSet.class);
}
else if (param.type().erasure().equals(codeModel.ref(SortedMap.class))) {
newClass = codeModel.ref(TreeMap.class);
}
else if (param.type().erasure().equals(codeModel.ref(SortedSet.class))) {
newClass = codeModel.ref(TreeSet.class);
}
if (newClass != null && typeParameter != null) {
newClass = newClass.narrow(typeParameter);
}
return newClass == null ? JExpr._null() : JExpr._new(newClass).arg(param);
}
private JExpression getUnmodifiableWrappedExpression(JCodeModel codeModel, JVar param) {
if (param.type().erasure().equals(codeModel.ref(Collection.class))) {
return codeModel.ref(Collections.class).staticInvoke("unmodifiableCollection").arg(param);
}
else if (param.type().erasure().equals(codeModel.ref(List.class))) {
return codeModel.ref(Collections.class).staticInvoke("unmodifiableList").arg(param);
}
else if (param.type().erasure().equals(codeModel.ref(Map.class))) {
return codeModel.ref(Collections.class).staticInvoke("unmodifiableMap").arg(param);
}
else if (param.type().erasure().equals(codeModel.ref(Set.class))) {
return codeModel.ref(Collections.class).staticInvoke("unmodifiableSet").arg(param);
}
else if (param.type().erasure().equals(codeModel.ref(SortedMap.class))) {
return codeModel.ref(Collections.class).staticInvoke("unmodifiableSortedMap").arg(param);
}
else if (param.type().erasure().equals(codeModel.ref(SortedSet.class))) {
return codeModel.ref(Collections.class).staticInvoke("unmodifiableSortedSet").arg(param);
}
return param;
}
private JExpression getEmptyCollectionExpression(JCodeModel codeModel, JVar param) {
if (param.type().erasure().equals(codeModel.ref(Collection.class))) {
return codeModel.ref(Collections.class).staticInvoke("emptyList");
}
else if (param.type().erasure().equals(codeModel.ref(List.class))) {
return codeModel.ref(Collections.class).staticInvoke("emptyList");
}
else if (param.type().erasure().equals(codeModel.ref(Map.class))) {
return codeModel.ref(Collections.class).staticInvoke("emptyMap");
}
else if (param.type().erasure().equals(codeModel.ref(Set.class))) {
return codeModel.ref(Collections.class).staticInvoke("emptySet");
}
else if (param.type().erasure().equals(codeModel.ref(SortedMap.class))) {
return JExpr._new(codeModel.ref(TreeMap.class));
}
else if (param.type().erasure().equals(codeModel.ref(SortedSet.class))) {
return JExpr._new(codeModel.ref(TreeSet.class));
}
return param;
}
private JExpression getNewCollectionExpression(JCodeModel codeModel, JType jType) {
List<JClass> typeParams = ((JClass) jType).getTypeParameters();
JClass typeParameter = null;
if (typeParams.iterator().hasNext()) {
typeParameter = typeParams.iterator().next();
}
JClass newClass = null;
if (jType.erasure().equals(codeModel.ref(Collection.class))) {
newClass = codeModel.ref(ArrayList.class);
}
else if (jType.erasure().equals(codeModel.ref(List.class))) {
newClass = codeModel.ref(ArrayList.class);
}
else if (jType.erasure().equals(codeModel.ref(Map.class))) {
newClass = codeModel.ref(HashMap.class);
}
else if (jType.erasure().equals(codeModel.ref(Set.class))) {
newClass = codeModel.ref(HashSet.class);
}
else if (jType.erasure().equals(codeModel.ref(SortedMap.class))) {
newClass = codeModel.ref(TreeMap.class);
}
else if (jType.erasure().equals(codeModel.ref(SortedSet.class))) {
newClass = codeModel.ref(TreeSet.class);
}
if (newClass != null && typeParameter != null) {
newClass = newClass.narrow(typeParameter);
}
return newClass == null ? JExpr._null() : JExpr._new(newClass);
}
private void generateDefaultPropertyAssignment(JMethod method, FieldOutline fieldOutline) {
JBlock block = method.body();
String propertyName = fieldOutline.getPropertyInfo().getName(false);
block.assign(JExpr.refthis(propertyName), defaultValue(getJavaType(fieldOutline), fieldOutline));
}
private JExpression defaultValue(JType javaType, FieldOutline fieldOutline) {
if (javaType.isPrimitive()) {
if (fieldOutline.parent().parent().getCodeModel().BOOLEAN.equals(javaType)) {
return JExpr.lit(false);
}
else if (fieldOutline.parent().parent().getCodeModel().SHORT.equals(javaType)) {
return JExpr.cast(fieldOutline.parent().parent().getCodeModel().SHORT, JExpr.lit(0));
}
else {
return JExpr.lit(0);
}
}
return JExpr._null();
}
private JMethod generatePropertyConstructor(JDefinedClass clazz, FieldOutline[] declaredFields,
FieldOutline[] superclassFields, int constAccess) {
final JMethod ctor = createConstructor(clazz, constAccess);
if (superclassFields.length > 0) {
JInvocation superInvocation = ctor.body().invoke("super");
for (FieldOutline fieldOutline : superclassFields) {
superInvocation.arg(JExpr.ref(fieldOutline.getPropertyInfo().getName(false)));
generateMethodParameter(ctor, fieldOutline);
}
}
for (FieldOutline fieldOutline : declaredFields) {
generatePropertyAssignment(ctor, fieldOutline, true);
}
return ctor;
}
private JMethod generateStandardConstructor(final JDefinedClass clazz, FieldOutline[] declaredFields,
FieldOutline[] superclassFields) {
final JMethod ctor = createConstructor(clazz, JMod.PROTECTED);
ctor.javadoc().add("Used by JAX-B");
if (superclassFields.length > 0) {
JInvocation superInvocation = ctor.body().invoke("super");
for (FieldOutline fieldOutline : superclassFields) {
superInvocation.arg(defaultValue(getJavaType(fieldOutline), fieldOutline));
}
}
for (FieldOutline fieldOutline : declaredFields) {
generateDefaultPropertyAssignment(ctor, fieldOutline);
}
return ctor;
}
private JMethod generateCopyConstructor(final JDefinedClass clazz, final JDefinedClass builderClass,
FieldOutline[] declaredFields, FieldOutline[] superclassFields) {
final JMethod ctor = createConstructor(builderClass, JMod.PUBLIC);
final JVar o = ctor.param(JMod.FINAL, clazz, "o");
ctor.body()._if(o.eq(JExpr._null()))._then()._throw(JExpr._new(builderClass.owner().ref(NullPointerException.class))
.arg("Cannot create a copy of '" + builderClass.name() + "' from 'null'."));
JCodeModel codeModel = clazz.owner();
for (FieldOutline field : superclassFields) {
String propertyName = field.getPropertyInfo().getName(false);
JMethod getter = getPropertyGetter(field);
if (field.getPropertyInfo().isCollection()) {
JVar tmpVar = ctor.body().decl(0, getJavaType(field), "_" + propertyName, JExpr.invoke(o, getter));
JConditional conditional = ctor.body()._if(tmpVar.eq(JExpr._null()));
conditional._then().assign(JExpr.refthis(propertyName),
getNewCollectionExpression(codeModel, getJavaType(field)));
conditional._else().assign(JExpr.refthis(propertyName),
getDefensiveCopyExpression(codeModel, getJavaType(field), tmpVar));
}
else {
ctor.body().assign(JExpr.refthis(propertyName), JExpr.invoke(o, getter));
}
}
for (FieldOutline field : declaredFields) {
String propertyName = field.getPropertyInfo().getName(false);
if (field.getPropertyInfo().isCollection()) {
JVar tmpVar = ctor.body().decl(0, getJavaType(field), "_" + propertyName, JExpr.ref(o, propertyName));
JConditional conditional = ctor.body()._if(tmpVar.eq(JExpr._null()));
conditional._then().assign(JExpr.refthis(propertyName),
getNewCollectionExpression(codeModel, getJavaType(field)));
conditional._else().assign(JExpr.refthis(propertyName),
getDefensiveCopyExpression(codeModel, getJavaType(field), tmpVar));
}
else {
ctor.body().assign(JExpr.refthis(propertyName), JExpr.ref(o, propertyName));
}
}
return ctor;
}
private JMethod getPropertyGetter(final FieldOutline f) {
final JDefinedClass clazz = f.parent().implClass;
final String name = f.getPropertyInfo().getName(true);
JMethod getter = clazz.getMethod("get" + name, NO_ARGS);
if (getter == null) {
getter = clazz.getMethod("is" + name, NO_ARGS);
}
return getter;
}
private JMethod createConstructor(final JDefinedClass clazz, final int visibility) {
return clazz.constructor(visibility);
}
private JType getJavaType(FieldOutline field) {
return field.getRawType();
}
private JType[] getFieldTypes(FieldOutline[] declaredFields, FieldOutline[] superclassFields) {
JType[] fieldTypes = new JType[declaredFields.length + superclassFields.length];
int i = 0;
for (FieldOutline fieldOutline : superclassFields) {
fieldTypes[i++] = fieldOutline.getPropertyInfo().baseType;
}
for (FieldOutline fieldOutline : declaredFields) {
fieldTypes[i++] = fieldOutline.getPropertyInfo().baseType;
}
return fieldTypes;
}
private JMethod getGetterProperty(final FieldOutline fieldOutline) {
final JDefinedClass clazz = fieldOutline.parent().implClass;
final String name = fieldOutline.getPropertyInfo().getName(true);
JMethod getter = clazz.getMethod("get" + name, NO_ARGS);
if (getter == null) {
getter = clazz.getMethod("is" + name, NO_ARGS);
}
return getter;
}
private void log(final Level level, final String key, final Object... args) {
final String message = "[" + MESSAGE_PREFIX + "] [" + level.getLocalizedName() + "] " + getMessage(key, args);
int logLevel = Level.WARNING.intValue();
if (this.options != null && !this.options.quiet) {
if (this.options.verbose) {
logLevel = Level.INFO.intValue();
}
if (this.options.debugMode) {
logLevel = Level.ALL.intValue();
}
}
if (level.intValue() >= logLevel) {
if (level.intValue() <= Level.INFO.intValue()) {
System.out.println(message);
}
else {
System.err.println(message);
}
}
}
private void makeClassFinal(JDefinedClass clazz) {
clazz.mods().setFinal(true);
}
private void makePropertiesPrivate(JDefinedClass clazz) {
for (JFieldVar field : clazz.fields().values()) {
field.mods().setPrivate();
}
}
private void makePropertiesFinal(JDefinedClass clazz, FieldOutline[] declaredFields) {
for (FieldOutline fieldOutline : declaredFields) {
String fieldName = fieldOutline.getPropertyInfo().getName(false);
clazz.fields().get(fieldName).mods()
.setFinal(!(leaveCollectionsMutable && fieldOutline.getPropertyInfo().isCollection()));
}
}
private void removeSetters(JDefinedClass clazz) {
Collection<JMethod> methods = clazz.methods();
Iterator<JMethod> it = methods.iterator();
while (it.hasNext()) {
JMethod method = it.next();
String methodName = method.name();
if (methodName.startsWith(SET_PREFIX) || methodName.startsWith(UNSET_PREFIX)) {
it.remove();
}
}
}
private FieldOutline[] getSuperclassFields(ClassOutline clazz) {
// first get all superclasses
List<ClassOutline> superclasses = new ArrayList<ClassOutline>();
ClassOutline superclass = clazz.getSuperClass();
while (superclass != null) {
superclasses.add(superclass);
superclass = superclass.getSuperClass();
}
// get all fields in class reverse order
List<FieldOutline> superclassFields = new ArrayList<FieldOutline>();
Collections.reverse(superclasses);
for (ClassOutline classOutline : superclasses) {
superclassFields.addAll(Arrays.asList(classOutline.getDeclaredFields()));
}
return superclassFields.toArray(new FieldOutline[superclassFields.size()]);
}
}
|
package com.github.sbugat.rundeck.plugins;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.text.DateFormat;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.dtolabs.rundeck.core.plugins.Plugin;
import com.dtolabs.rundeck.plugins.ServiceNameConstants;
import com.dtolabs.rundeck.plugins.descriptions.PluginDescription;
import com.dtolabs.rundeck.plugins.descriptions.PluginProperty;
import com.dtolabs.rundeck.plugins.notification.NotificationPlugin;
/**
* Rundeck slack plugin class.
*
* @author Sylvain Bugat
*
*/
@Plugin(service = ServiceNameConstants.Notification, name = "SlackNotificationPlugin")
@PluginDescription(title = "Slack")
public class SlackPlugin implements NotificationPlugin {
static final String SLACK_SUCCESS_COLOR = "good";
static final String SLACK_FAILED_COLOR = "danger";
private final Logger logger = Logger.getLogger(SlackPlugin.class.getName());
@PluginProperty(title = "Incoming WebHook URL", description = "Slack incoming WebHook URL", required = true)
private String slackIncomingWebHookUrl;
@PluginProperty(title = "WebHook channel", description = "Override default WebHook channel (#channel")
private String slackOverrideDefaultWebHookChannel;
@PluginProperty(title = "WebHook name", description = "Override default WebHook name")
private String slackOverrideDefaultWebHookName;
@PluginProperty(title = "WebHook emoji", description = "Override default WebHook icon (:emoji:)")
private String slackOverrideDefaultWebHookEmoji;
private URLTools uRLTools = new URLTools();
@Override
public boolean postNotification(final String trigger, @SuppressWarnings("rawtypes") final Map executionData, @SuppressWarnings("rawtypes") final Map config) {
@SuppressWarnings("unchecked")
final Map<String, String> jobMap = (Map<String, String>) executionData.get("job");
final String jobName;
if (null != jobMap) {
jobName = jobMap.get("name");
} else {
jobName = null;
}
logger.log(Level.FINE, "Start to send Slack notification to WebHook URL {0} for the job {1} with trigger {2}", new Object[] { slackIncomingWebHookUrl, jobName, trigger });
HttpURLConnection connection = null;
try {
// Prepare the connection to Slack
connection = uRLTools.openURLConnection(slackIncomingWebHookUrl);
connection.setRequestMethod("POST");
connection.setRequestProperty("charset", StandardCharsets.UTF_8.name());
connection.setUseCaches(false);
connection.setDoInput(true);
connection.setDoOutput(true);
// Send the WebHook message
final String messagePayload = getMessage(trigger, executionData);
try (final DataOutputStream dataOutputStream = new DataOutputStream(connection.getOutputStream())) {
dataOutputStream.writeBytes("payload=" + URLEncoder.encode(messagePayload, StandardCharsets.UTF_8.name()));
}
// Get the HTTP response code
final int httpResponseCode = connection.getResponseCode();
if (HttpURLConnection.HTTP_OK != httpResponseCode) {
if (HttpURLConnection.HTTP_NOT_FOUND == httpResponseCode) {
logger.log(Level.SEVERE, "Invalid Slack WebHook URL {0} when sending {1} job notification with trigger {2}", new Object[] { slackIncomingWebHookUrl, jobName, trigger });
} else {
logger.log(Level.SEVERE, "Error sending {0} job notification with trigger {1}, http code: {2}", new Object[] { jobName, trigger, httpResponseCode });
logger.log(Level.SEVERE, "Error sending {0} job notification with trigger {1}, http code: {2}, payload:{3}", new Object[] { jobName, trigger, httpResponseCode, messagePayload });
}
return false;
}
}catch (final MalformedURLException e) {
logger.log(Level.SEVERE, "Malformed Slack WebHook URL {0} when sending {1} job notification with trigger {2}", new Object[] { slackIncomingWebHookUrl, jobName, trigger });
return false;
}catch (final IOException e) {
logger.log(Level.SEVERE, e.getMessage());
logger.log(Level.FINE, e.getMessage(), e);
return false;
}finally {
if (null != connection) {
connection.disconnect();
}
}
return true;
}
/**
* Return the complete message to send.
*
* @return complete message
*/
private String getMessage( final String trigger, @SuppressWarnings("rawtypes") final Map executionData ) {
final StringBuilder messageBuilder = new StringBuilder();
messageBuilder.append('{');
messageBuilder.append( getOptions() );
messageBuilder.append( getAttachmentsPart(trigger, executionData) );
messageBuilder.append('}');
return messageBuilder.toString();
}
/**
* Return a message with overridden options.
*
* @return optional message with channel, username and emoji to use
*/
private String getOptions() {
final StringBuilder stringBuilder = new StringBuilder();
if (null != slackOverrideDefaultWebHookChannel) {
stringBuilder.append("\"channel\":");
stringBuilder.append("\"" + slackOverrideDefaultWebHookChannel + "\",");
}
if (null != slackOverrideDefaultWebHookName) {
stringBuilder.append("\"username\":");
stringBuilder.append("\"" + slackOverrideDefaultWebHookName + "\",");
}
if (null != slackOverrideDefaultWebHookEmoji) {
stringBuilder.append("\"icon_emoji\":");
stringBuilder.append("\"" + slackOverrideDefaultWebHookEmoji + "\",");
}
return stringBuilder.toString();
}
/**
* Return a Slack message with the job execution data.
*
* @param trigger execution status
* @param executionData current execution state
*
* @return attachments part
*/
private static CharSequence getAttachmentsPart(final String trigger, @SuppressWarnings("rawtypes") final Map executionData) {
// Success and starting execution are good(green)
final String statusColor;
if ("success".equals(trigger) || "start".equals(trigger)) {
statusColor = SLACK_SUCCESS_COLOR;
} else {
statusColor = SLACK_FAILED_COLOR;
}
// Attachment begin and title
final StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("\"attachments\":[");
stringBuilder.append("{");
stringBuilder.append("\"title\":\"" + getTitlePart(executionData) + "\",");
stringBuilder.append("\"text\":\"" + getDurationPart(executionData) + getDownloadOptionPart(executionData) + "\",");
stringBuilder.append("\"color\":\"" + statusColor + "\"");
// Job options section
stringBuilder.append(getJobOptionsPart(executionData));
stringBuilder.append('}');
// Failed nodes section
stringBuilder.append(getFailedNodesAttachment(executionData, statusColor));
stringBuilder.append(']');
return stringBuilder;
}
private static CharSequence getDownloadOptionPart(@SuppressWarnings("rawtypes") final Map executionData) {
final StringBuilder downloadOptionBuilder = new StringBuilder();
// Context map containing additional information
@SuppressWarnings("unchecked")
final Map<String, Map<String, String>> contextMap = (Map<String, Map<String, String>>) executionData.get("context");
if (null == contextMap) {
return downloadOptionBuilder;
}
final Map<String, String> jobContextMap = contextMap.get("job");
// Download link if the job fails
boolean download = false;
if (!"running".equals(executionData.get("status")) && !"success".equals(executionData.get("status"))) {
downloadOptionBuilder.append("\n<" + jobContextMap.get("serverUrl") + "/" + executionData.get("project") + "/execution/downloadOutput/" + executionData.get("id") + "|Download log ouput>");
download = true;
}
final Map<String, String> optionContextMap = contextMap.get("option");
// Option header
if (null != optionContextMap && !optionContextMap.isEmpty()) {
if (!download) {
downloadOptionBuilder.append("\nJob options:");
} else {
downloadOptionBuilder.append(", job options:");
}
}
return downloadOptionBuilder;
}
private static CharSequence getDurationPart(@SuppressWarnings("rawtypes") final Map executionData) {
final StringBuilder durationBuilder = new StringBuilder();
final Long startTime = (Long) executionData.get("dateStartedUnixtime");
if (null == startTime) {
return durationBuilder;
}
final DateFormat dateFormat = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT, Locale.getDefault());
durationBuilder.append("Launched by ");
durationBuilder.append(executionData.get("user"));
durationBuilder.append(" at ");
durationBuilder.append(dateFormat.format(new Date(startTime.longValue())));
if ("aborted".equals(executionData.get("status")) && null != executionData.get("abortedby")) {
durationBuilder.append(executionData.get("status"));
durationBuilder.append(" by ");
durationBuilder.append(executionData.get("abortedby"));
}
if (!"running".equals(executionData.get("status"))) {
if ("timedout".equals(executionData.get("status"))) {
durationBuilder.append(", timed-out");
} else {
durationBuilder.append(", ended");
}
if (null != executionData.get("dateEndedUnixtime")) {
final long endTime = ((Long) executionData.get("dateEndedUnixtime")).longValue();
durationBuilder.append(" at ");
durationBuilder.append(dateFormat.format(new Date(endTime)));
durationBuilder.append(" (duration: ");
durationBuilder.append(formatDuration(endTime - startTime));
durationBuilder.append(')');
}
}
return durationBuilder;
}
private static CharSequence getTitlePart(@SuppressWarnings("rawtypes") final Map executionData) {
final StringBuilder titleBuilder = new StringBuilder();
@SuppressWarnings("unchecked")
final Map<String, String> jobMap = (Map<String, String>) executionData.get("job");
if (null == jobMap) {
return titleBuilder;
}
// Context map containing additional information
@SuppressWarnings("unchecked")
final Map<String, Map<String, String>> contextMap = (Map<String, Map<String, String>>) executionData.get("context");
if (null == contextMap) {
return titleBuilder;
}
final Map<String, String> jobContextMap = contextMap.get("job");
if (null == jobContextMap) {
return titleBuilder;
}
titleBuilder.append('<');
titleBuilder.append(executionData.get("href"));
titleBuilder.append("|
titleBuilder.append(executionData.get("id"));
titleBuilder.append(" - ");
final String status;
if (null != executionData.get("status")) {
status = ((String) executionData.get("status")).toUpperCase();
} else {
status = null;
}
titleBuilder.append(status);
if ("aborted".equals(executionData.get("status")) && null != executionData.get("abortedby")) {
titleBuilder.append(" by ");
titleBuilder.append(executionData.get("abortedby"));
}
titleBuilder.append(" - ");
titleBuilder.append(jobMap.get("name"));
titleBuilder.append("> - <");
titleBuilder.append(jobContextMap.get("serverUrl"));
titleBuilder.append('/');
titleBuilder.append(executionData.get("project"));
titleBuilder.append('|');
titleBuilder.append(executionData.get("project"));
titleBuilder.append("> - ");
if (null != jobMap.get("group") && !jobMap.get("group").isEmpty()) {
final StringBuilder rootGroups = new StringBuilder();
for (final String group : jobMap.get("group").split("/")) {
rootGroups.append('/');
rootGroups.append(group);
titleBuilder.append('<');
titleBuilder.append(jobContextMap.get("serverUrl"));
titleBuilder.append('/');
titleBuilder.append(executionData.get("project"));
titleBuilder.append("/jobs");
titleBuilder.append(rootGroups);
titleBuilder.append('|');
titleBuilder.append(group);
titleBuilder.append(">/");
}
}
titleBuilder.append('<');
titleBuilder.append(jobMap.get("href"));
titleBuilder.append('|');
titleBuilder.append(jobMap.get("name"));
titleBuilder.append('>');
return titleBuilder;
}
private static CharSequence getJobOptionsPart(@SuppressWarnings("rawtypes") final Map executionData) {
final StringBuilder messageBuilder = new StringBuilder();
// Context map containing additional information
@SuppressWarnings("unchecked")
final Map<String, Map<String, String>> contextMap = (Map<String, Map<String, String>>) executionData.get("context");
if (null == contextMap) {
return messageBuilder;
}
final Map<String, String> optionContextMap = contextMap.get("option");
final Map<String, String> secureOptionContextMap = contextMap.get("secureOption");
// Options part, secure options values are not displayed
if (null != optionContextMap && !optionContextMap.isEmpty()) {
messageBuilder.append(",\"fields\":[");
boolean firstOption = true;
for (final Map.Entry<String, String> mapEntry : optionContextMap.entrySet()) {
if (!firstOption) {
messageBuilder.append(',');
}
messageBuilder.append("{");
messageBuilder.append("\"title\":\"" + mapEntry.getKey() + "\",");
messageBuilder.append("\"value\":\"");
if (null != secureOptionContextMap && null != secureOptionContextMap.get(mapEntry.getKey())) {
messageBuilder.append("***********");
} else {
messageBuilder.append(mapEntry.getValue());
}
messageBuilder.append("\",");
messageBuilder.append("\"short\":true");
messageBuilder.append("}");
firstOption = false;
}
messageBuilder.append("]");
}
return messageBuilder;
}
/**
* Construct the failed nodes section.
*
* @param executionData current execution state
* @param statusColor status color to display
* @return char sequence containing the formated section
*/
private static CharSequence getFailedNodesAttachment(@SuppressWarnings("rawtypes") final Map executionData, final String statusColor) {
final StringBuilder messageBuilder = new StringBuilder();
@SuppressWarnings("unchecked")
final List<String> failedNodeList = (List<String>) executionData.get("failedNodeList");
@SuppressWarnings("unchecked")
final Map<String, Integer> nodeStatus = (Map<String, Integer>) executionData.get("nodestatus");
final int totalNodes;
if (null != nodeStatus && null != nodeStatus.get("total")) {
totalNodes = nodeStatus.get("total").intValue();
} else {
totalNodes = 0;
}
// Failed node part if a node is failed and if it's not the only one node executed
if (null != failedNodeList && !failedNodeList.isEmpty() && totalNodes > 1) {
messageBuilder.append(",{");
messageBuilder.append("\"fallback\":\"Failed nodes list\",");
messageBuilder.append("\"text\":\"Failed nodes:\",");
messageBuilder.append("\"color\":\"");
messageBuilder.append(statusColor);
messageBuilder.append("\",");
messageBuilder.append("\"fields\":[");
// Format a list with all failed nodes
boolean firstNode = true;
for (final String failedNode : failedNodeList) {
if (!firstNode) {
messageBuilder.append(',');
}
messageBuilder.append("{");
messageBuilder.append("\"title\":\"");
messageBuilder.append(failedNode);
messageBuilder.append("\",");
messageBuilder.append("\"short\":true");
messageBuilder.append("}");
firstNode = false;
}
messageBuilder.append(']');
messageBuilder.append('}');
}
return messageBuilder;
}
/**
* Format a millisecond duration to a readeable formatted String.
*
* @param milliseconds a positive duration in milliseconds to convert
* @return A string of the form "XdYh" or "XhYm" or "XmYs" or "Xs".
*/
public static CharSequence formatDuration(final long milliseconds) {
long millisecondsReminder = milliseconds;
final long days = TimeUnit.MILLISECONDS.toDays(millisecondsReminder);
if (days > 0) {
millisecondsReminder -= TimeUnit.DAYS.toMillis(days);
final long hours = TimeUnit.MILLISECONDS.toHours(millisecondsReminder);
return String.format("%dd%02dh", Long.valueOf(days), Long.valueOf(hours));
}
final long hours = TimeUnit.MILLISECONDS.toHours(millisecondsReminder);
if (hours > 0) {
millisecondsReminder -= TimeUnit.HOURS.toMillis(hours);
final long minutes = TimeUnit.MILLISECONDS.toMinutes(millisecondsReminder);
return String.format("%dh%02dm", Long.valueOf(hours), Long.valueOf(minutes));
}
final long minutes = TimeUnit.MILLISECONDS.toMinutes(millisecondsReminder);
if (minutes > 0) {
millisecondsReminder -= TimeUnit.MINUTES.toMillis(minutes);
final Long seconds = Long.valueOf(TimeUnit.MILLISECONDS.toSeconds(millisecondsReminder));
return String.format("%dm%02ds", Long.valueOf(minutes), seconds);
}
final Long seconds = Long.valueOf(TimeUnit.MILLISECONDS.toSeconds(millisecondsReminder));
return String.format("%ds", seconds);
}
}
|
package com.gmail.nossr50.commands.admin;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import com.gmail.nossr50.datatypes.McMMOPlayer;
import com.gmail.nossr50.datatypes.PlayerProfile;
import com.gmail.nossr50.locale.LocaleLoader;
import com.gmail.nossr50.util.Permissions;
import com.gmail.nossr50.util.Users;
public class McgodCommand implements CommandExecutor {
@Override
public boolean onCommand(CommandSender sender, Command command, String label, String[] args) {
PlayerProfile profile;
switch (args.length) {
case 0:
if (!(sender instanceof Player)) {
return false;
}
profile = Users.getPlayer((Player) sender).getProfile();
if (profile == null) {
sender.sendMessage(LocaleLoader.getString("Commands.DoesNotExist"));
return true;
}
if (profile.getGodMode()) {
sender.sendMessage(LocaleLoader.getString("Commands.GodMode.Disabled"));
}
else {
sender.sendMessage(LocaleLoader.getString("Commands.GodMode.Enabled"));
}
profile.toggleGodMode();
return true;
case 1:
if (!Permissions.hasPermission(sender, "mcmmo.commands.mcgod.others")) {
sender.sendMessage(command.getPermissionMessage());
return true;
}
McMMOPlayer mcMMOPlayer = Users.getPlayer(args[0]);
// If the mcMMOPlayer doesn't exist, create a temporary profile and
// check if it's present in the database. If it's not, abort the process.
if (mcMMOPlayer == null) {
profile = new PlayerProfile(args[0], false);
if (!profile.isLoaded()) {
sender.sendMessage(LocaleLoader.getString("Commands.DoesNotExist"));
return true;
}
}
else {
profile = mcMMOPlayer.getProfile();
Player player = mcMMOPlayer.getPlayer();
// Check if the player is online before we try to send them a message.
if (player.isOnline()) {
if (profile.getGodMode()) {
player.sendMessage(LocaleLoader.getString("Commands.GodMode.Disabled"));
}
else {
player.sendMessage(LocaleLoader.getString("Commands.GodMode.Enabled"));
}
}
}
profile.toggleGodMode();
return true;
default:
return false;
}
}
}
|
package com.jenjinstudios.io.concurrency;
import com.jenjinstudios.io.ExecutionContext;
import com.jenjinstudios.io.Message;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.function.Consumer;
/**
* Queues incoming and outgoing messages in a thread-safe manner.
*
* @author Caleb Brinkman
*/
public class MessageQueue<T extends ExecutionContext>
{
private final Collection<Consumer<T>> recurringTasks = new LinkedList<>();
private final Collection<Message> incoming = new LinkedList<>();
private final Collection<Message> outgoing = new LinkedList<>();
private final Collection<Throwable> errors = new LinkedList<>();
/**
* Indicate that a message has been received and add it to the incoming queue.
*
* @param message The message that has been received.
*/
public void messageReceived(Message message) {
if (message != null) {
synchronized (incoming) {
incoming.add(message);
}
}
}
/**
* Get all messages that have been received since the last time this method was called, and clear the internal
* list.
*
* @return A List of messages that have been received since the last time this method was called.
*/
public List<Message> getIncomingAndClear() {
List<Message> temp;
synchronized (incoming) {
temp = new LinkedList<>(incoming);
incoming.clear();
}
return temp;
}
/**
* Queue an outgoing message.
*
* @param message The message to be queued.
*/
public void queueOutgoing(Message message) {
synchronized (outgoing) {
outgoing.add(message);
}
}
/**
* Get all messages that have been queued since the last time this method was called, and clear the internal
* list.
*
* @return A List of messages that have been queued since the last time this method was called.
*/
public List<Message> getOutgoingAndClear() {
List<Message> temp;
synchronized (outgoing) {
temp = new LinkedList<>(outgoing);
outgoing.clear();
}
return temp;
}
/**
* Indicate than an exception has occurred and store the exception in the queue.
*
* @param throwable The exception.
*/
public void errorEncountered(Throwable throwable) {
synchronized (errors) {
errors.add(throwable);
}
}
/**
* Get any errors reported to this queue and clear the internal list.
*
* @return A List of errors reported to this queue.
*/
public List<Throwable> getErrorsAndClear() {
List<Throwable> temp;
synchronized (errors) {
temp = new LinkedList<>(errors);
errors.clear();
}
return temp;
}
public Collection<Consumer<T>> getRecurringTasks() { return Collections.unmodifiableCollection(recurringTasks); }
}
|
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// all copies or substantial portions of the Software.
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package com.microsoft.graph.tasks;
import com.microsoft.graph.core.ClientException;
import com.microsoft.graph.core.IBaseClient;
import com.microsoft.graph.options.Option;
import java.io.IOException;
import java.io.InputStream;
import java.security.InvalidParameterException;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import javax.annotation.Nullable;
import javax.annotation.Nonnull;
/**
* ChunkedUpload service provider
*
* @param <UploadType> the upload item type
*/
public class LargeFileUploadTask<UploadType> {
/**
* The default chunk size for upload. Currently set to 5 MiB.
*/
private static final int DEFAULT_CHUNK_SIZE = 5 * 1024 * 1024;
/**
* The required chunk size increment by OneDrive service, which is 320 KiB
*/
private static final int REQUIRED_CHUNK_SIZE_INCREMENT = 320 * 1024;
/**
* The maximum chunk size for a single upload allowed by OneDrive service.
* Currently the value is 60 MiB.
*/
private static final int MAXIMUM_CHUNK_SIZE = 60 * 1024 * 1024;
/**
* The client
*/
private final IBaseClient<?> client;
/**
* The input stream
*/
private final InputStream inputStream;
/**
* The upload session URL
*/
private final String uploadUrl;
/**
* The stream size
*/
private final long streamSize;
/**
* The upload response handler
*/
private final LargeFileUploadResponseHandler<UploadType> responseHandler;
/**
* The counter for how many bytes have been read from input stream
*/
private long readSoFar;
/**
* Creates the ChunkedUploadProvider
*
* @param uploadSession the initial upload session
* @param client the Graph client
* @param inputStream the input stream
* @param streamSize the stream size
* @param uploadTypeClass the upload type class
*/
public LargeFileUploadTask(@Nonnull final IUploadSession uploadSession,
@Nonnull final IBaseClient<?> client,
@Nonnull final InputStream inputStream,
final long streamSize,
@Nonnull final Class<UploadType> uploadTypeClass) {
Objects.requireNonNull(uploadSession, "Upload session is null.");
if (streamSize <= 0) {
throw new InvalidParameterException("Stream size should larger than 0.");
}
this.client = Objects.requireNonNull(client, "Graph client is null.");
this.readSoFar = 0;
this.inputStream = Objects.requireNonNull(inputStream, "Input stream is null.");
this.streamSize = streamSize;
this.uploadUrl = uploadSession.getUploadUrl();
this.responseHandler = new LargeFileUploadResponseHandler<UploadType>(uploadTypeClass, uploadSession.getClass());
}
/**
* Uploads content to remote upload session based on the input stream
*
* @param options the upload options
* @param chunkSize the customized chunk size
* @param progressCallback the callback for upload progress
* @return a future with the result
* @throws IOException the IO exception that occurred during upload
*/
@Nonnull
public CompletableFuture<LargeFileUploadResult<UploadType>> uploadAsync(@Nullable final int chunkSize, @Nullable final List<Option> options, @Nullable final IProgressCallback progressCallback)
throws IOException {
int internalChunkSize = chunkSize;
if (internalChunkSize == 0) {
internalChunkSize = DEFAULT_CHUNK_SIZE;
}
if (internalChunkSize % REQUIRED_CHUNK_SIZE_INCREMENT != 0) {
throw new IllegalArgumentException("Chunk size must be a multiple of 320 KiB");
}
if (internalChunkSize > MAXIMUM_CHUNK_SIZE) {
throw new IllegalArgumentException("Please set chunk size smaller than 60 MiB");
}
byte[] buffer = new byte[internalChunkSize];
while (this.readSoFar < this.streamSize) {
int buffRead = 0;
// inner loop is to work-around the case where read buffer size is limited to less than chunk size by a global setting
while (buffRead < internalChunkSize) {
int read = 0;
read = this.inputStream.read(buffer, buffRead, internalChunkSize - buffRead);
if (read == -1) {
break;
}
buffRead += read;
}
final LargeFileUploadRequest<UploadType> request =
new LargeFileUploadRequest<>(this.uploadUrl, this.client, options, buffer, buffRead,
this.readSoFar, this.streamSize);
final LargeFileUploadResponse<UploadType> response = request.upload(this.responseHandler);
// TODO: upload should return a future, use sendfuture instead and the futures should be chained with completableFuture.then apply
if (response.uploadCompleted()) {
if(progressCallback != null) {
progressCallback.progress(this.streamSize, this.streamSize);
}
final LargeFileUploadResult<UploadType> result = new LargeFileUploadResult<UploadType>();
if (response.getItem() != null) {
result.responseBody = response.getItem();
}
if (response.getLocation() != null) {
result.location = response.getLocation();
}
return completedFuture(result);
} else if (response.chunkCompleted()) {
if(progressCallback != null) {
progressCallback.progress(this.readSoFar, this.streamSize);
}
} else if (response.hasError()) {
return failedFuture(response.getError());
}
this.readSoFar += buffRead;
}
return failedFuture(new ClientException("Upload did not complete", null));
}
private CompletableFuture<LargeFileUploadResult<UploadType>> completedFuture(final LargeFileUploadResult<UploadType> result) { // CompletableFuture.completedFuture(result.getItem()); missing on android
final CompletableFuture<LargeFileUploadResult<UploadType>> fut = new CompletableFuture<LargeFileUploadResult<UploadType>>();
fut.complete(result);
return fut;
}
private CompletableFuture<LargeFileUploadResult<UploadType>> failedFuture(ClientException ex) { // CompletableFuture.failedFuture not available on android
final CompletableFuture<LargeFileUploadResult<UploadType>> fut = new CompletableFuture<LargeFileUploadResult<UploadType>>();
fut.completeExceptionally(ex);
return fut;
}
/**
* Uploads content to remote upload session based on the input stream
*
* @return a future with the result
* @throws IOException the IO exception that occurred during upload
*/
@Nonnull
public CompletableFuture<LargeFileUploadResult<UploadType>> uploadAsync()
throws IOException {
return uploadAsync(0);
}
/**
* Uploads content to remote upload session based on the input stream
*
* @param chunkSize the customized chunk size
* @return a future with the result
* @throws IOException the IO exception that occurred during upload
*/
@Nonnull
public CompletableFuture<LargeFileUploadResult<UploadType>> uploadAsync(@Nullable final int chunkSize)
throws IOException {
return uploadAsync(chunkSize, null);
}
/**
* Uploads content to remote upload session based on the input stream
*
* @param chunkSize the customized chunk size
* @param options the upload options
* @return a future with the result
* @throws IOException the IO exception that occurred during upload
*/
@Nonnull
public CompletableFuture<LargeFileUploadResult<UploadType>> uploadAsync(@Nullable final int chunkSize, @Nullable final List<Option> options)
throws IOException {
return uploadAsync(chunkSize, options, null);
}
/**
* Uploads content to remote upload session based on the input stream
*
* @param options the upload options
* @param chunkSize the customized chunk size
* @param progressCallback the callback for upload progress
* @return the result
* @throws IOException the IO exception that occurred during upload
*/
@Nonnull
public LargeFileUploadResult<UploadType> upload(@Nullable final int chunkSize, @Nullable final List<Option> options, @Nullable final IProgressCallback progressCallback)
throws IOException {
try {
return uploadAsync(chunkSize, options, progressCallback).get();
} catch (InterruptedException ex) {
throw new ClientException("The request was interrupted", ex);
} catch (ExecutionException ex) {
throw new ClientException("Error while executing the request", ex);
}
}
/**
* Uploads content to remote upload session based on the input stream
*
* @param options the upload options
* @param chunkSize the customized chunk size
* @return the result
* @throws IOException the IO exception that occurred during upload
*/
@Nonnull
public LargeFileUploadResult<UploadType> upload(@Nullable final int chunkSize, @Nullable final List<Option> options)
throws IOException {
return upload(chunkSize, options, null);
}
/**
* Uploads content to remote upload session based on the input stream
*
* @param chunkSize the customized chunk size
* @return the result
* @throws IOException the IO exception that occurred during upload
*/
@Nonnull
public LargeFileUploadResult<UploadType> upload(@Nullable final int chunkSize)
throws IOException {
return upload(chunkSize, null);
}
/**
* Uploads content to remote upload session based on the input stream
*
* @return the result
* @throws IOException the IO exception that occurred during upload
*/
@Nonnull
public LargeFileUploadResult<UploadType> upload()
throws IOException {
return upload(0);
}
}
|
package com.minimajack.v8.code.impl;
import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.UUID;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.minimajack.v8.code.ProjectTreeSearcher;
import com.minimajack.v8.metadata.external.V8MetaData;
import com.minimajack.v8.metadata.external.V8MetaDataDescription;
import com.minimajack.v8.metadata.external.attributes.AttributesSection;
import com.minimajack.v8.metadata.external.forms.FormDescription;
import com.minimajack.v8.metadata.external.forms.FormsSection;
import com.minimajack.v8.metadata.external.qualifier.Qualifiers;
import com.minimajack.v8.metadata.external.qualifier.QualityTransformer;
import com.minimajack.v8.metadata.external.tabularsection.TabularSections;
import com.minimajack.v8.metadata.external.template.TemplateDescription;
import com.minimajack.v8.metadata.external.template.TemplateSections;
import com.minimajack.v8.metadata.external.transformer.MetadataSection;
import com.minimajack.v8.metadata.external.transformer.SectionTransformer;
import com.minimajack.v8.metadata.external.type.Types;
import com.minimajack.v8.metadata.external.type.TypesTransformer;
import com.minimajack.v8.metadata.root.V8Root;
import com.minimajack.v8.project.FileType;
import com.minimajack.v8.project.Project;
import com.minimajack.v8.project.ProjectTree;
import com.minimajack.v8.utility.V8Reader;
public class MetadataProcessor
extends ProjectTreeSearcher
{
private Path path;
final Logger logger = LoggerFactory.getLogger( MetadataProcessor.class );
private static final UUID EXTERNAL_DATA_PROCESSOR = UUID.fromString( "c3831ec8-d8d5-4f93-8a22-f9bfae07327f" );
private static final String TEMPLATES_PATH = "Templates";
private static final String FORM_PATH = "Forms";
private static final String METADATA_FILE = "metadata.mdo";
public MetadataProcessor( Path path )
{
super( path );
this.path = path;
}
{
new V8Reader();
V8Reader.init();
V8Reader.registerTransformer( MetadataSection.class, new SectionTransformer() );
V8Reader.registerTransformer( Qualifiers.class, new QualityTransformer() );
V8Reader.registerTransformer( Types.class, new TypesTransformer() );
}
@Override
public ProjectTree process( ProjectTree tree )
{
V8Root root = V8Reader.read( V8Root.class, getFileBuffer( tree, "root" ) );
V8MetaData md = V8Reader.read( V8MetaData.class, getFileBuffer( tree, root.guid.toString() ) );
for ( V8MetaDataDescription v8MD : md.mdd )
{
if ( v8MD.type.equals( EXTERNAL_DATA_PROCESSOR ) )
{
processExternalDataProcessor( tree, v8MD );
}
}
return tree;
}
private void processExternalDataProcessor( ProjectTree tree, V8MetaDataDescription v8MD )
{
for ( MetadataSection section : v8MD.innerType.sections )
{
if ( section instanceof FormsSection )
{
logger.debug( "FormSections size: {}", ( (FormsSection) section ).forms.size() );
processForms( tree, (FormsSection) section );
}
else if ( section instanceof TabularSections )
{
logger.debug( "TabularSections size: {}", ( (TabularSections) section ).tabularSections.size() );
}
else if ( section instanceof TemplateSections )
{
logger.debug( "TemplateSection size: {}", ( (TemplateSections) section ).templates.size() );
processTemplates( tree, (TemplateSections) section );
}
else if ( section instanceof AttributesSection )
{
logger.debug( "Attributes size: {}", ( (AttributesSection) section ).descr.size() );
}
else
{
logger.warn( "Not implement section {}", section.getClass() );
}
}
}
private void processTemplates( ProjectTree tree, TemplateSections templateSection )
{
for ( UUID template : templateSection.templates )
{
String templateUUID = template.toString();
TemplateDescription description = getTemplateDescription( tree, templateUUID );
String destinationDir = path.toString() + File.separator + Project.SRC_PATH + File.separator
+ TEMPLATES_PATH + File.separator + description.templateInnerDescription.msn.name + File.separator;
moveToFolder( tree, templateUUID, destinationDir + METADATA_FILE );
moveLinkedContainerToFolder( tree, templateUUID + ".0", destinationDir );
}
}
private void processForms( ProjectTree tree, FormsSection formSection )
{
for ( UUID form : formSection.forms )
{
String formUUID = form.toString();
FormDescription description = getFormDescription( tree, formUUID );
String destinationDir = path.toString() + File.separator + Project.SRC_PATH + File.separator + FORM_PATH
+ File.separator + description.formInnerDescription.md.ffmd.v8mn.name + File.separator;
moveToFolder( tree, formUUID, destinationDir + METADATA_FILE );
moveLinkedContainerToFolder( tree, formUUID + ".0", destinationDir );
}
}
private TemplateDescription getTemplateDescription( ProjectTree tree, String template )
{
TemplateDescription description = null;
try
{
description = V8Reader.read( TemplateDescription.class, getFileBuffer( tree, template.toString() ) );
}
catch ( Exception e )
{
logger.warn( "Error while parsing template {}", template );
}
return description;
}
private FormDescription getFormDescription( ProjectTree tree, String form )
{
FormDescription description = null;
try
{
description = V8Reader.read( FormDescription.class, getFileBuffer( tree, form.toString() ) );
}
catch ( Exception e )
{
logger.warn( "Error while parsing form {}", form );
}
return description;
}
private void moveLinkedContainerToFolder( ProjectTree tree, String name, String dest )
{
ProjectTree pt = this.findFileByName( tree, name );
if ( pt != null && pt.type != FileType.ERROR )
{
if ( pt.type.equals( FileType.CONTAINER ) )
{
Path p = pt.getRawPath();
String destination = path.relativize( Paths.get( dest ) ).toString();
for ( ProjectTree child : pt.child )
{
Path simplename = p.relativize( child.getRawPath() );
Path abolute = Paths.get( this.path.toString() + File.separator + destination + File.separator
+ simplename.toString() );
moveToFolder( child, child.name, abolute.toString() );
}
Paths.get( path.toAbsolutePath() + File.separator + pt.getRawPath().toString() ).toFile().delete();
pt.setPath( destination );
}
else if ( pt.type.equals( FileType.FILE ) )
{
moveToFolder( tree, name, dest + File.separator + pt.getName() );
}
}
}
private void moveToFolder( ProjectTree tree, String name, String dest )
{
ProjectTree pt = this.findFileByName( tree, name );
Path p = Paths.get( path.toString() + File.separator + pt.getPath() );
File file = p.toFile();
File destName = new File( dest );
destName.getParentFile().mkdirs();
if ( destName.exists() )
{
destName.delete();
}
if ( file.renameTo( destName ) )
{
pt.setPath( path.relativize( destName.toPath().toAbsolutePath() ).toString() );
}
else
{
logger.warn( "Can't move {} to {} ", p, dest );
}
}
}
|
package com.mysterioustrousers.firehose;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.TimeZone;
import com.google.gson.annotations.SerializedName;
import org.apache.commons.lang3.StringUtils;
public class AgentSettings extends FHObject {
@SerializedName("seconds_from_utc")
private int _availabilitySecondsFromUTC;
@SerializedName("time_zone_name")
private String _availabilityTimeZoneId;
@SerializedName("dnd_end_hour_utc")
private int _availabilityStartHourUTC;
@SerializedName("dnd_start_hour_utc")
private int _availabilityEndHourUTC;
@SerializedName("dnd_is_manually_turned_on")
private boolean _manuallyUnavailable;
@SerializedName("digest_days")
private List<Integer> _digestDays; // TODO: private List<DayOfWeek> _digestDays;
public AgentSettings() {
super();
this.setAvailabilitySecondsFromUTC(0);
this.setAvailabilityTimeZoneId(null);
this.setAvailabilityStartHourUTC(0);
this.setAvailabilityEndHourUTC(0);
this.setManuallyUnavailable(false);
this.setDigestDays(new ArrayList<Integer>());
}
// region Getters & Setters
public int getAvailabilityStartHourForTimeZone() {
int ret = this.getAvailabilityStartHourUTC() + this.getAvailabilityHoursFromUTC();
if (ret >= 24) {
ret -= 24;
} else if (ret < 0) {
ret += 24;
}
return ret;
}
public void setAvailabilityStartHourForTimeZone(int startHour) {
int newHour = startHour - this.getAvailabilityHoursFromUTC();
if (newHour >= 24) {
newHour -= 24;
} else if (newHour < 0) {
newHour += 24;
}
_availabilityStartHourUTC = newHour;
}
public int getAvailabilityStartHourUTC() {
return _availabilityStartHourUTC;
}
public void setAvailabilityStartHourUTC(int startHourUTC) {
_availabilityStartHourUTC = startHourUTC;
}
public int getAvailabilityEndHourForTimeZone() {
int ret = this.getAvailabilityEndHourUTC() + this.getAvailabilityHoursFromUTC();
if (ret >= 24) {
ret -= 24;
} else if (ret < 0) {
ret += 24;
}
return ret;
}
public void setAvailabilityEndHourForTimeZone(int endHour) {
int newHour = endHour - this.getAvailabilityHoursFromUTC();
if (newHour >= 24) {
newHour -= 24;
} else if (newHour < 0) {
newHour += 24;
}
_availabilityEndHourUTC = newHour;
}
public int getAvailabilityEndHourUTC() {
return _availabilityEndHourUTC;
}
public void setAvailabilityEndHourUTC(int endHourUTC) {
_availabilityEndHourUTC = endHourUTC;
}
public boolean isManuallyUnavailable() {
return _manuallyUnavailable;
}
public void setManuallyUnavailable(boolean manuallyUnavailable) {
_manuallyUnavailable = manuallyUnavailable;
}
public int getAvailabilitySecondsFromUTC() {
return _availabilitySecondsFromUTC;
}
public void setAvailabilitySecondsFromUTC(int secondsFromUTC) {
_availabilitySecondsFromUTC = secondsFromUTC;
}
public int getAvailabilityMinutesFromUTC() {
return this.getAvailabilitySecondsFromUTC() / 60;
}
public int getAvailabilityHoursFromUTC() {
return this.getAvailabilityMinutesFromUTC() / 60;
}
public String getAvailabilityTimeZoneId() {
return StringUtils.defaultIfBlank(_availabilityTimeZoneId, Calendar.getInstance().getTimeZone().getID());
}
public void setAvailabilityTimeZoneId(String timeZoneId) {
_availabilityTimeZoneId = timeZoneId;
Calendar utcCal = Calendar.getInstance();
utcCal.setTimeZone(TimeZone.getTimeZone("UTC"));
long milliseconds = StringUtils.isBlank(_availabilityTimeZoneId) ? 0 : TimeZone.getTimeZone(_availabilityTimeZoneId).getOffset(utcCal.getTimeInMillis());
this.setAvailabilitySecondsFromUTC((int)(milliseconds / 1000));
}
public boolean isAvailable() {
if (this.isManuallyUnavailable() || this.getAvailabilityStartHourForTimeZone() == this.getAvailabilityEndHourForTimeZone()) {
return false;
}
Calendar cal = Calendar.getInstance();
cal.setTimeZone(TimeZone.getTimeZone(this.getAvailabilityTimeZoneId()));
int currentHour = cal.get(Calendar.HOUR_OF_DAY);
if (this.getAvailabilityStartHourForTimeZone() > this.getAvailabilityEndHourForTimeZone()) {
return currentHour < this.getAvailabilityStartHourForTimeZone() || currentHour > this.getAvailabilityEndHourForTimeZone();
}
return currentHour > this.getAvailabilityStartHourForTimeZone() && currentHour < this.getAvailabilityEndHourForTimeZone();
}
public List<Integer> getDigestDays() {
return _digestDays;
}
public void setDigestDays(List<Integer> digestDays) {
_digestDays = digestDays;
}
/*
public List<DayOfWeek> getDigestDays() {
return _digestDays;
}
public void setDigestDays(List<DayOfWeek> digestDays) {
_digestDays = digestDays;
}
public void setDigestDays(DayOfWeek... digestDays) {
// TODO: Implement
}
*/
// endregion
}
|
package com.nibr.oncology.util.realwordid;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.sql.SQLException;
@Component
public final class RealWordApp {
private static final Logger logger = LogManager.getLogger(RealWordApp.class);
private final UniqueWordFetcher uniqueWordFetcher;
private final DictionaryDao dictionaryDao;
@Autowired
public RealWordApp(final UniqueWordFetcher uniqueWordFetcher, DictionaryDao dictionaryDao) {
this.uniqueWordFetcher = uniqueWordFetcher;
this.dictionaryDao = dictionaryDao;
}
public static void main(String[] args) throws IOException, SQLException {
// should use parameterless constructor as the other one invoke refresh which we certainly don't want
// as it automatically trigger property injection and our CLI property is not ready yet
final AnnotationConfigApplicationContext applicationContext = new AnnotationConfigApplicationContext();
// setup configuration
applicationContext.register(Config.class);
// setup all the dependencies (refresh) and make them run (start)
applicationContext.refresh();
applicationContext.start();
try {
RealWordApp realWordIdUtil = applicationContext.getBean(RealWordApp.class);
// add CLI property source
OptionParser parser = new OptionParser();
parser.accepts("rebuildDatabase", "Rebuild word table.");
parser.accepts("removeUsedWordList", "Rebuild used word table.");
parser.accepts("removeWordFromDataBase", "Remove the returned word from the database");
parser.accepts("randomWord").withOptionalArg().ofType( Integer.class ).defaultsTo(5);
OptionSet options = parser.parse(args);
//PropertySource ps = new JOptCommandLinePropertySource(options);
//applicationContext.getEnvironment().getPropertySources().addLast(ps);
//logger.debug(applicationContext.getEnvironment());
if(options.has("randomWord")){
int wordSize = Integer.parseInt(options.valueOf("randomWord").toString());
boolean removeFromDataBase = options.has("removeWordFromDataBase");
realWordIdUtil.printRandomWord(wordSize, removeFromDataBase);
}
if(options.has("rebuildDatabase")){
realWordIdUtil.rebuildDataBase();
}
if(!options.hasOptions() || options.has("help")) {
parser.printHelpOn(System.out);
System.exit(0);
}
} finally {
applicationContext.close();
}
}
private void rebuildDataBase() throws IOException, SQLException {
dictionaryDao.rebuildUsedWordsTable();
dictionaryDao.createDataBase();
}
private void printRandomWord(int wordSize, boolean removeFromDataBase) {
logger.info("The word is " + uniqueWordFetcher.getUniqueWord(wordSize, removeFromDataBase));
}
}
|
// checkstyle: Checks Java source code for adherence to a set of rules.
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.puppycrawl.tools.checkstyle.api;
import java.util.BitSet;
import antlr.CommonASTWithHiddenTokens;
import antlr.Token;
import antlr.collections.AST;
import com.puppycrawl.tools.checkstyle.utils.TokenUtils;
public final class DetailAST extends CommonASTWithHiddenTokens {
/** For Serialisation that will never happen. */
private static final long serialVersionUID = -2580884815577559874L;
/** Constant to indicate if not calculated the child count */
private static final int NOT_INITIALIZED = Integer.MIN_VALUE;
/** The line number **/
private int lineNo = NOT_INITIALIZED;
/** The column number **/
private int columnNo = NOT_INITIALIZED;
/** Number of children */
private int childCount = NOT_INITIALIZED;
/** The parent token */
private DetailAST parent;
/** Previous sibling */
private DetailAST previousSibling;
/**
* All token types in this branch.
* Token 'x' (where x is an int) is in this branch
* if branchTokenTypes.get(x) is true.
*/
private BitSet branchTokenTypes;
@Override
public void initialize(Token tok) {
super.initialize(tok);
lineNo = tok.getLine();
// expect columns to start @ 0
columnNo = tok.getColumn() - 1;
}
@Override
public void initialize(AST ast) {
final DetailAST da = (DetailAST) ast;
setText(da.getText());
setType(da.getType());
lineNo = da.getLineNo();
columnNo = da.getColumnNo();
hiddenAfter = da.getHiddenAfter();
hiddenBefore = da.getHiddenBefore();
}
@Override
public void setFirstChild(AST ast) {
childCount = NOT_INITIALIZED;
super.setFirstChild(ast);
if (ast != null) {
((DetailAST) ast).setParent(this);
}
}
@Override
public void setNextSibling(AST ast) {
super.setNextSibling(ast);
if (ast != null && parent != null) {
((DetailAST) ast).setParent(parent);
}
if (ast != null) {
((DetailAST) ast).previousSibling = this;
}
}
/**
* Add previous sibling.
* @param ast
* DetailAST object.
*/
public void addPreviousSibling(DetailAST ast) {
if (ast != null) {
ast.setParent(parent);
final DetailAST previousSiblingNode = previousSibling;
if (previousSiblingNode != null) {
ast.previousSibling = previousSiblingNode;
previousSiblingNode.setNextSibling(ast);
}
else if (parent != null) {
parent.setFirstChild(ast);
}
ast.setNextSibling(this);
previousSibling = ast;
}
}
/**
* Add next sibling.
* @param ast
* DetailAST object.
*/
public void addNextSibling(DetailAST ast) {
if (ast != null) {
ast.setParent(parent);
final DetailAST nextSibling = getNextSibling();
if (nextSibling != null) {
ast.setNextSibling(nextSibling);
nextSibling.previousSibling = ast;
}
ast.previousSibling = this;
setNextSibling(ast);
}
}
@Override
public void addChild(AST ast) {
super.addChild(ast);
if (ast != null) {
((DetailAST) ast).setParent(this);
getFirstChild().setParent(this);
}
}
/**
* Returns the number of child nodes one level below this node. That is is
* does not recurse down the tree.
* @return the number of child nodes
*/
public int getChildCount() {
// lazy init
if (childCount == NOT_INITIALIZED) {
childCount = 0;
AST child = getFirstChild();
while (child != null) {
childCount += 1;
child = child.getNextSibling();
}
}
return childCount;
}
/**
* Set the parent token.
* @param parent the parent token
*/
void setParent(DetailAST parent) {
this.parent = parent;
final DetailAST nextSibling = getNextSibling();
if (nextSibling != null) {
nextSibling.setParent(parent);
nextSibling.previousSibling = this;
}
}
/**
* Returns the parent token.
* @return the parent token
*/
public DetailAST getParent() {
return parent;
}
/** @return the line number **/
public int getLineNo() {
int resultNo = -1;
if (lineNo == NOT_INITIALIZED) {
// an inner AST that has been initialized
// with initialize(String text)
resultNo = findLineNo(getFirstChild());
if (resultNo < 0) {
resultNo = findLineNo(getNextSibling());
}
}
if (resultNo < 0) {
resultNo = lineNo;
}
return resultNo;
}
/**
* Set line number.
* @param lineNo
* line number.
*/
public void setLineNo(int lineNo) {
this.lineNo = lineNo;
}
/** @return the column number **/
public int getColumnNo() {
int resultNo = -1;
if (columnNo == NOT_INITIALIZED) {
// an inner AST that has been initialized
// with initialize(String text)
resultNo = findColumnNo(getFirstChild());
if (resultNo < 0) {
resultNo = findColumnNo(getNextSibling());
}
}
if (resultNo < 0) {
resultNo = columnNo;
}
return resultNo;
}
/**
* Set column number.
* @param columnNo
* column number.
*/
public void setColumnNo(int columnNo) {
this.columnNo = columnNo;
}
/** @return the last child node */
public DetailAST getLastChild() {
DetailAST ast = getFirstChild();
while (ast != null && ast.getNextSibling() != null) {
ast = ast.getNextSibling();
}
return ast;
}
/**
* Finds column number in the first non-comment node.
*
* @param ast DetailAST node.
* @return Column number if non-comment node exists, -1 otherwise.
*/
private static int findColumnNo(DetailAST ast) {
int resultNo = -1;
DetailAST node = ast;
while (node != null) {
// comment node can't be start of any java statement/definition
if (TokenUtils.isCommentType(node.getType())) {
node = node.getNextSibling();
}
else {
resultNo = node.getColumnNo();
break;
}
}
return resultNo;
}
/**
* Finds line number in the first non-comment node.
*
* @param ast DetailAST node.
* @return Line number if non-comment node exists, -1 otherwise.
*/
private static int findLineNo(DetailAST ast) {
int resultNo = -1;
DetailAST node = ast;
while (node != null) {
// comment node can't be start of any java statement/definition
if (TokenUtils.isCommentType(node.getType())) {
node = node.getNextSibling();
}
else {
resultNo = node.getLineNo();
break;
}
}
return resultNo;
}
/**
* @return the token types that occur in the branch as a sorted set.
*/
private BitSet getBranchTokenTypes() {
// lazy init
if (branchTokenTypes == null) {
branchTokenTypes = new BitSet();
branchTokenTypes.set(getType());
// add union of all childs
DetailAST child = getFirstChild();
while (child != null) {
final BitSet childTypes = child.getBranchTokenTypes();
branchTokenTypes.or(childTypes);
child = child.getNextSibling();
}
}
return branchTokenTypes;
}
/**
* Checks if this branch of the parse tree contains a token
* of the provided type.
* @param type a TokenType
* @return true if and only if this branch (including this node)
* contains a token of type {@code type}.
*/
public boolean branchContains(int type) {
return getBranchTokenTypes().get(type);
}
/**
* Returns the number of direct child tokens that have the specified type.
* @param type the token type to match
* @return the number of matching token
*/
public int getChildCount(int type) {
int count = 0;
for (AST ast = getFirstChild(); ast != null; ast = ast.getNextSibling()) {
if (ast.getType() == type) {
count++;
}
}
return count;
}
/**
* Returns the previous sibling or null if no such sibling exists.
* @return the previous sibling or null if no such sibling exists.
*/
public DetailAST getPreviousSibling() {
return previousSibling;
}
/**
* Returns the first child token that makes a specified type.
* @param type the token type to match
* @return the matching token, or null if no match
*/
public DetailAST findFirstToken(int type) {
DetailAST retVal = null;
for (DetailAST ast = getFirstChild(); ast != null; ast = ast.getNextSibling()) {
if (ast.getType() == type) {
retVal = ast;
break;
}
}
return retVal;
}
@Override
public String toString() {
return super.toString() + "[" + getLineNo() + "x" + getColumnNo() + "]";
}
@Override
public DetailAST getNextSibling() {
return (DetailAST) super.getNextSibling();
}
@Override
public DetailAST getFirstChild() {
return (DetailAST) super.getFirstChild();
}
}
|
package com.raphydaphy.vitality.spell;
import java.util.AbstractMap.SimpleEntry;
import com.raphydaphy.vitality.api.essence.Essence;
import com.raphydaphy.vitality.api.spell.Spell;
import com.raphydaphy.vitality.api.wand.WandEnums.CoreType;
import com.raphydaphy.vitality.api.wand.WandEnums.TipType;
import com.raphydaphy.vitality.api.wand.WandHelper;
import com.raphydaphy.vitality.proxy.ClientProxy;
import com.raphydaphy.vitality.registry.ModItems;
import com.raphydaphy.vitality.util.VitalData;
import net.minecraft.client.resources.I18n;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.item.ItemStack;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumHand;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.RayTraceResult;
import net.minecraft.world.World;
import net.minecraftforge.fml.common.FMLCommonHandler;
public class SpellExcavation extends Spell {
public SpellExcavation() {
super("excavation", new Essence[] {}, ModItems.SPELL_EXCAVATION, 3, 3, 1, 10, false);
}
public static final Spell INSTANCE = new SpellExcavation();
@Override
public boolean onCastPre(ItemStack wand, EntityPlayer player, World world, BlockPos pos, EnumHand hand,
EnumFacing side, float hitX, float hitY, float hitZ) {
SimpleEntry<CoreType, TipType> pair = WandHelper.getUsefulInfo(wand);
int cooldown = (int) (pair.getKey().getCooldownMultiplier() * this.cooldown);
int cost = (int) (pair.getValue().getCostMultiplier() * this.cost);
int potency = (int) (pair.getKey().getPotencyMultiplier() * this.potency);
if (WandHelper.canUseEssence(wand, cost, pair.getKey().getCoreType())) {
player.setActiveHand(hand);
return true;
} else if (world.isRemote) {
ClientProxy.setActionText(I18n.format("vitality.wand.notenoughessence.name"),
pair.getKey().getCoreType().getColor());
}
return false;
}
@Override
public boolean onCast(ItemStack wand, EntityPlayer player, World world, BlockPos pos, EnumHand hand,
EnumFacing side, float hitX, float hitY, float hitZ) {
return true;
}
@Override
public void onCastPost(ItemStack wand, EntityPlayer player, World world, BlockPos pos, EnumHand hand,
EnumFacing side, float hitX, float hitY, float hitZ) {
player.getCooldownTracker().setCooldown(wand.getItem(), cooldown);
}
@Override
public boolean isEssenceValid(Essence essence) {
return true;
}
@Override
public boolean onCastTick(ItemStack wand, EntityPlayer player, int count) {
EntityPlayerMP realPlayer = FMLCommonHandler.instance().getMinecraftServerInstance().getPlayerList().getPlayerByUUID(player.getUniqueID());
BlockPos pos = player.rayTrace(6, 8).getBlockPos();
System.out.println("ticky");
realPlayer.interactionManager.updateBlockRemoving();
realPlayer.interactionManager.tryHarvestBlock(pos);
return true;
}
@Override
public void onCastTickSuccess(ItemStack wand, EntityPlayer player, int count) {
// TODO Auto-generated method stub
}
}
|
package com.redhat.ceylon.compiler.js;
import java.util.ArrayList;
import java.util.Map;
import java.util.Objects;
import com.redhat.ceylon.compiler.typechecker.model.Class;
import com.redhat.ceylon.compiler.typechecker.model.Declaration;
import com.redhat.ceylon.compiler.typechecker.model.Method;
import com.redhat.ceylon.compiler.typechecker.model.Module;
import com.redhat.ceylon.compiler.typechecker.model.ProducedType;
import com.redhat.ceylon.compiler.typechecker.model.Scope;
import com.redhat.ceylon.compiler.typechecker.model.TypeAlias;
import com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration;
import com.redhat.ceylon.compiler.typechecker.model.TypeParameter;
import com.redhat.ceylon.compiler.typechecker.model.Util;
import com.redhat.ceylon.compiler.typechecker.model.Value;
import com.redhat.ceylon.compiler.typechecker.tree.Node;
import com.redhat.ceylon.compiler.typechecker.tree.Tree;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.ValueLiteral;
public class MetamodelHelper {
static void generateOpenType(final Node that, Declaration d, final GenerateJsVisitor gen) {
final Module m = d.getUnit().getPackage().getModule();
if (d instanceof TypeParameter == false) {
if (JsCompiler.isCompilingLanguageModule()) {
gen.out("$init$Open");
} else {
gen.out(gen.getClAlias(), "Open");
}
}
if (d instanceof com.redhat.ceylon.compiler.typechecker.model.Interface) {
gen.out("Interface$jsint");
} else if (d instanceof com.redhat.ceylon.compiler.typechecker.model.Class) {
gen.out("Class$jsint");
} else if (d instanceof Method) {
gen.out("Function");
} else if (d instanceof Value) {
gen.out("Value$jsint");
} else if (d instanceof com.redhat.ceylon.compiler.typechecker.model.IntersectionType) {
gen.out("Intersection");
} else if (d instanceof com.redhat.ceylon.compiler.typechecker.model.UnionType) {
gen.out("Union");
} else if (d instanceof TypeParameter) {
generateOpenType(that, ((TypeParameter)d).getDeclaration(),gen);
gen.out(".getTypeParameterDeclaration('", d.getName(), "')");
return;
} else if (d instanceof com.redhat.ceylon.compiler.typechecker.model.NothingType) {
gen.out("NothingType");
} else if (d instanceof TypeAlias) {
gen.out("Alias$jsint(");
if (JsCompiler.isCompilingLanguageModule()) {
gen.out(")(");
}
if (d.isMember()) {
//Make the chain to the top-level container
ArrayList<Declaration> parents = new ArrayList<Declaration>(2);
Declaration pd = (Declaration)d.getContainer();
while (pd!=null) {
parents.add(0,pd);
pd = pd.isMember()?(Declaration)pd.getContainer():null;
}
for (Declaration _d : parents) {
gen.out(gen.getNames().name(_d), ".$$.prototype.");
}
}
gen.out(gen.getNames().name(d), ")");
return;
}
//TODO optimize for local declarations
if (JsCompiler.isCompilingLanguageModule()) {
gen.out("()");
}
gen.out("(", gen.getClAlias());
final String pkgname = d.getUnit().getPackage().getNameAsString();
if (Objects.equals(that.getUnit().getPackage().getModule(), d.getUnit().getPackage().getModule())) {
gen.out("lmp$(ex$,'");
} else {
gen.out("fmp$('", m.getNameAsString(), "','", m.getVersion(), "','");
}
gen.out("ceylon.language".equals(pkgname) ? "$" : pkgname, "'),");
if (d.isMember()) {
outputPathToDeclaration(that, d, gen);
}
if (d instanceof Value) {
if (!d.isMember()) gen.qualify(that, d);
gen.out("$prop$", gen.getNames().getter(d), ")");
} else {
if (d.isAnonymous()) {
final String oname = gen.getNames().objectName(d);
if (d.isToplevel()) {
gen.qualify(that, d);
}
gen.out("$init$", oname);
if (!d.isToplevel()) {
gen.out("()");
}
} else {
if (!d.isMember()) gen.qualify(that, d);
gen.out(gen.getNames().name(d));
}
gen.out(")");
}
}
static void generateClosedTypeLiteral(final Tree.TypeLiteral that, final GenerateJsVisitor gen) {
final ProducedType ltype = that.getType().getTypeModel();
final TypeDeclaration td = ltype.getDeclaration();
final Map<TypeParameter,ProducedType> targs = that.getType().getTypeModel().getTypeArguments();
if (td instanceof com.redhat.ceylon.compiler.typechecker.model.Class) {
if (Util.getContainingClassOrInterface(td.getContainer()) == null) {
gen.out(gen.getClAlias(), "$init$AppliedClass$meta$model()(");
} else {
gen.out(gen.getClAlias(), "$init$AppliedMemberClass$meta$model()(");
}
TypeUtils.outputQualifiedTypename(null, gen.isImported(gen.getCurrentPackage(), td), ltype, gen, false);
gen.out(",");
TypeUtils.printTypeArguments(that, that.getTypeModel().getTypeArguments(), gen, false,
that.getTypeModel().getVarianceOverrides());
if (targs != null && !targs.isEmpty()) {
gen.out(",undefined,");
TypeUtils.printTypeArguments(that, targs, gen, false,
that.getType().getTypeModel().getVarianceOverrides());
}
gen.out(")");
} else if (td instanceof com.redhat.ceylon.compiler.typechecker.model.Interface) {
if (td.isToplevel()) {
gen.out(gen.getClAlias(), "$init$AppliedInterface$jsint()(");
} else {
gen.out(gen.getClAlias(), "$init$AppliedMemberInterface$meta$model()(");
}
TypeUtils.outputQualifiedTypename(null, gen.isImported(gen.getCurrentPackage(), td), ltype, gen, false);
gen.out(",");
TypeUtils.printTypeArguments(that, that.getTypeModel().getTypeArguments(), gen, false,
that.getTypeModel().getVarianceOverrides());
if (targs != null && !targs.isEmpty()) {
gen.out(",undefined,");
TypeUtils.printTypeArguments(that, targs, gen, false,
that.getType().getTypeModel().getVarianceOverrides());
}
gen.out(")");
} else if (td instanceof com.redhat.ceylon.compiler.typechecker.model.NothingType) {
gen.out(gen.getClAlias(),"getNothingType$meta$model()");
} else if (that instanceof Tree.AliasLiteral) {
gen.out("/*TODO: applied alias*/");
} else if (that instanceof Tree.TypeParameterLiteral) {
gen.out("/*TODO: applied type parameter*/");
} else {
gen.out(gen.getClAlias(), "typeLiteral$meta({Type$typeLiteral:");
TypeUtils.typeNameOrList(that, ltype, gen, false);
gen.out("})");
}
}
static void generateMemberLiteral(final Tree.MemberLiteral that, final GenerateJsVisitor gen) {
final com.redhat.ceylon.compiler.typechecker.model.ProducedReference ref = that.getTarget();
final ProducedType ltype = that.getType() == null ? null : that.getType().getTypeModel();
final Declaration d = ref.getDeclaration();
final Class anonClass = d.isMember()&&d.getContainer() instanceof Class && ((Class)d.getContainer()).isAnonymous()?(Class)d.getContainer():null;
if (that instanceof Tree.FunctionLiteral || d instanceof Method) {
gen.out(gen.getClAlias(), d.isMember()&&anonClass==null?"AppliedMethod$meta$model(":"AppliedFunction$meta$model(");
if (ltype == null) {
if (anonClass != null) {
gen.qualify(that, anonClass);
gen.out(gen.getNames().objectName(anonClass), ".");
} else {
gen.qualify(that, d);
}
} else {
if (ltype.getDeclaration().isMember()) {
outputPathToDeclaration(that, ltype.getDeclaration(), gen);
} else {
gen.qualify(that, ltype.getDeclaration());
}
gen.out(gen.getNames().name(ltype.getDeclaration()));
gen.out(".$$.prototype.");
}
if (d instanceof Value) {
gen.out("$prop$", gen.getNames().getter(d), ",");
} else {
gen.out(gen.getNames().name(d),",");
}
if (d.isMember()&&anonClass==null) {
if (that.getTypeArgumentList()!=null) {
gen.out("[");
boolean first=true;
for (ProducedType targ : that.getTypeArgumentList().getTypeModels()) {
if (first)first=false;else gen.out(",");
gen.out(gen.getClAlias(),"typeLiteral$meta({Type$typeLiteral:");
TypeUtils.typeNameOrList(that, targ, gen, false);
gen.out("})");
}
gen.out("]");
gen.out(",");
} else {
gen.out("undefined,");
}
TypeUtils.printTypeArguments(that, that.getTypeModel().getTypeArguments(), gen, false,
that.getTypeModel().getVarianceOverrides());
} else {
TypeUtils.printTypeArguments(that, that.getTypeModel().getTypeArguments(), gen, false,
that.getTypeModel().getVarianceOverrides());
if (anonClass != null) {
gen.out(",");
gen.qualify(that, anonClass);
gen.out(gen.getNames().objectName(anonClass));
}
if (ref.getTypeArguments() != null && !ref.getTypeArguments().isEmpty()) {
if (anonClass == null) {
gen.out(",undefined");
}
gen.out(",");
TypeUtils.printTypeArguments(that, ref.getTypeArguments(), gen, false,
ref.getType().getVarianceOverrides());
}
}
gen.out(")");
} else if (that instanceof ValueLiteral || d instanceof Value) {
Value vd = (Value)d;
if (vd.isMember() && anonClass==null) {
gen.out(gen.getClAlias(), "$init$AppliedAttribute$meta$model()('");
gen.out(d.getName(), "',");
} else {
gen.out(gen.getClAlias(), "$init$AppliedValue$jsint()(");
if (anonClass == null) {
gen.out("undefined");
} else {
gen.qualify(that, anonClass);
gen.out(gen.getNames().objectName(anonClass));
}
gen.out(",");
}
if (ltype == null) {
if (anonClass != null) {
gen.qualify(that, anonClass);
gen.out(gen.getNames().objectName(anonClass), ".");
} else {
gen.qualify(that, d);
}
} else {
gen.qualify(that, ltype.getDeclaration());
gen.out(gen.getNames().name(ltype.getDeclaration()));
gen.out(".$$.prototype.");
}
if (d instanceof Value) {
gen.out("$prop$", gen.getNames().getter(d),",");
} else {
gen.out(gen.getNames().name(d),",");
}
TypeUtils.printTypeArguments(that, that.getTypeModel().getTypeArguments(), gen, false,
that.getTypeModel().getVarianceOverrides());
gen.out(")");
} else {
gen.out(gen.getClAlias(), "/*TODO:closed member literal*/typeLiteral$meta({Type$typeLiteral:");
gen.out("{t:");
if (ltype == null) {
gen.qualify(that, d);
} else {
gen.qualify(that, ltype.getDeclaration());
gen.out(gen.getNames().name(ltype.getDeclaration()));
gen.out(".$$.prototype.");
}
if (d instanceof Value) {
gen.out("$prop$", gen.getNames().getter(d));
} else {
gen.out(gen.getNames().name(d));
}
if (ltype != null && ltype.getTypeArguments() != null && !ltype.getTypeArguments().isEmpty()) {
gen.out(",a:");
TypeUtils.printTypeArguments(that, ltype.getTypeArguments(), gen, false,
ltype.getVarianceOverrides());
}
gen.out("}})");
}
}
static void findModule(final Module m, final GenerateJsVisitor gen) {
gen.out(gen.getClAlias(), "getModules$meta().find('",
m.getNameAsString(), "','", m.getVersion(), "')");
}
static void outputPathToDeclaration(final Node that, final Declaration d, final GenerateJsVisitor gen) {
final Declaration parent = Util.getContainingDeclaration(d);
if (!gen.opts.isOptimize() && parent instanceof TypeDeclaration && Util.contains((Scope)parent, that.getScope())) {
gen.out(gen.getNames().self((TypeDeclaration)parent), ".");
} else {
Declaration _md = d;
final ArrayList<Declaration> parents = new ArrayList<>(3);
while (_md.isMember()) {
_md=Util.getContainingDeclaration(_md);
parents.add(0, _md);
}
boolean first=true;
boolean imported=false;
for (Declaration _d : parents) {
if (first){
imported = gen.qualify(that, _d);
first=false;
}
if (_d.isAnonymous()) {
final String oname = gen.getNames().objectName(_d);
if (_d.isToplevel()) {
gen.out(oname, ".");
} else {
gen.out("$init$", oname, "().$$.prototype.");
}
} else {
if (!imported)gen.out("$init$");
gen.out(gen.getNames().name(_d), imported?".$$.prototype.":"().$$.prototype.");
}
imported=true;
}
}
}
}
|
package com.universeprojects.web;
import com.universeprojects.common.shared.log.Logger;
import com.universeprojects.common.shared.util.Strings;
import javax.servlet.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
* This servlet takes care of routing incoming requests to appropriate page controllers.
* The system only needs this single servlet to function, regardless of how many controllers are defined.
*/
public class PageControllerFilter implements Filter {
private final static Logger log = Logger.getLogger(PageControllerFilter.class);
private String uriPrefix;
protected ServletContext servletContext;
@Override
public void init(FilterConfig filterConfig) throws ServletException {
this.servletContext = filterConfig.getServletContext();
String filterName = filterConfig.getFilterName();
// Verify the init parameters, that were supposed to be set in web.xml
String uriPrefix = filterConfig.getInitParameter("uriPrefix");
if (Strings.isEmpty(uriPrefix)) {
throw new RuntimeException("Init parameter \"uriPrefix\" for filter " + filterName + " must be set in web.xml");
}
if (!uriPrefix.startsWith("/") || !uriPrefix.endsWith("/")) {
throw new RuntimeException("Init parameter \"uriPrefix\" for filter " + filterName + " must begin and end with '/'");
}
this.uriPrefix = uriPrefix;
String baseScanPackage = filterConfig.getInitParameter("baseScanPackage");
if (Strings.isEmpty(baseScanPackage)) {
throw new RuntimeException("Init parameter \"baseScanPackage\" for filter " + filterName + " must be set in web.xml");
}
// This will detect & register all controllers that are on the classpath
ControllerRegistry.INSTANCE.initialize(baseScanPackage);
}
@Override
public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException {
if (!(servletRequest instanceof HttpServletRequest) || !(servletResponse instanceof HttpServletResponse)) {
filterChain.doFilter(servletRequest, servletResponse);
return;
}
HttpServletRequest request = (HttpServletRequest) servletRequest;
HttpServletResponse response = (HttpServletResponse) servletResponse;
log.debug("Processing GET request");
// TODO: When the page is loaded for the first time, static resources are requested separately (CSS, JS, images)
// TODO: I think that these N requests are routed to this method. If confirmed, the extra calls should be ignored!
PageController controller = getController(request);
if (controller == null) {
// controller not found for this URL - abort
filterChain.doFilter(request, response);
return;
}
// TODO: For now, pass the request/response along to the controller
// TODO: In the future, create a suitable abstraction to limit the controller's power
controller.setupThreadLocal(request, response, servletContext);
String targetJspResourcePath = null;
try {
targetJspResourcePath = controller.processRequest(request, response);
} finally {
controller.clearThreadLocal();
}
if (Strings.isEmpty(targetJspResourcePath)) {
// NULL or "empty" return value indicates that we do not want to display the target page,
// due to something that was established during the processing of this request. It is assumed
// that the request object now knows what to do next (URL redirect, HTTP error, etc.)
return;
}
// The request has been processed by the controller without issues, and now we're ready to display the target JSP
request.getRequestDispatcher(targetJspResourcePath).forward(request, response);
}
/**
* (Helper method)
* Looks for a controller, that matches the context path in the request URL
* - If a controller is found, it is returned to the caller
* - If an issue arises, this method takes care of appropriate error messaging in the servlet response,
* and NULL is returned to the caller. The caller then should do no further processing
*/
private PageController getController(HttpServletRequest request) throws IOException {
String requestURI = request.getRequestURI();
if (!requestURI.equals(uriPrefix) && !requestURI.matches(uriPrefix + PAGE_NAME_REGEX)) {
// If the URL doesn't match the expected format for a controller, let it continue down the filter chain
return null;
}
String pageName = requestURI.substring(uriPrefix.length());
//Dev.check(!Strings.isEmpty(pageName), "Looks like someone messed with page-controller URL routing"); // regression-check
PageController controller = ControllerRegistry.INSTANCE.getController(pageName);
if (controller == null) {
return null;
}
return controller;
}
/** @return TRUE if the given page name is valid, FALSE otherwise */
static boolean isValidPageName(String pageName) {
if (Strings.isEmpty(pageName)) {
return false;
}
return pageName.matches(PAGE_NAME_REGEX);
}
/**
* This regex defines the accepted page name format, as follows:
*
* 1. Consisting of alpha-numeric characters 'A-Z', 'a-z', '0-9'
* 2. May consist of multiple words, separated by '-', '_', or '/'
*/
private static final String PAGE_NAME_REGEX = "([A-Za-z0-9]+[\\-_/]?)*[A-Za-z0-9]+";
@Override
public void destroy() {
// nada
}
}
|
package com.witchworks.common.item.baubles;
import baubles.api.BaublesApi;
import baubles.api.IBauble;
import baubles.api.cap.IBaublesItemHandler;
import com.witchworks.common.item.ItemMod;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ActionResult;
import net.minecraft.util.EnumActionResult;
import net.minecraft.util.EnumHand;
import net.minecraft.world.World;
import net.minecraftforge.items.ItemHandlerHelper;
public abstract class ItemBauble extends ItemMod implements IBauble {
public ItemBauble(String id) {
super(id);
setMaxStackSize(1);
}
@SuppressWarnings ("deprecation")
public ActionResult<ItemStack> onItemRightClick(ItemStack stack, World world, EntityPlayer player, EnumHand hand) {
ItemStack toEquip = stack.copy();
toEquip.setCount(1);
if(canEquip(toEquip, player)) {
if(world.isRemote)
return ActionResult.newResult(EnumActionResult.SUCCESS, stack);
IBaublesItemHandler baubles = BaublesApi.getBaublesHandler(player);
for(int i = 0; i < baubles.getSlots(); i++) {
if(baubles.isItemValidForSlot(i, toEquip, player)) {
ItemStack stackInSlot = baubles.getStackInSlot(i);
if(stackInSlot.isEmpty() || ((IBauble) stackInSlot.getItem()).canUnequip(stackInSlot, player)) {
baubles.setStackInSlot(i, toEquip);
stack.shrink(1);
if(!stackInSlot.isEmpty()) {
((IBauble) stackInSlot.getItem()).onUnequipped(stackInSlot, player);
if(stack.isEmpty()) {
return ActionResult.newResult(EnumActionResult.SUCCESS, stackInSlot);
} else {
ItemHandlerHelper.giveItemToPlayer(player, stackInSlot);
}
}
return ActionResult.newResult(EnumActionResult.SUCCESS, stack);
}
}
}
}
return ActionResult.newResult(EnumActionResult.PASS, stack);
}
@Override
public void onWornTick(ItemStack itemstack, EntityLivingBase player) {
}
@Override
public void onEquipped(ItemStack itemstack, EntityLivingBase player) {
}
@Override
public void onUnequipped(ItemStack itemstack, EntityLivingBase player) {
}
@Override
public boolean canEquip(ItemStack itemstack, EntityLivingBase player) {
return true;
}
@Override
public boolean canUnequip(ItemStack itemstack, EntityLivingBase player) {
return true;
}
@Override
public boolean willAutoSync(ItemStack itemstack, EntityLivingBase player) {
return false;
}
}
|
package com.youcruit.mailchimp.client.http;
import java.net.URI;
import java.util.Locale;
public enum APIBaseUrl {
US1, US2, US3, US4, US5, US6, US7, US8, US9, US10, US11, US12, US13;
private final String API_BASE_URL = "https://%s.api.mailchimp.com/3.0";
public URI getUri() {
return URI.create(String.format(Locale.US, API_BASE_URL, this.name().toLowerCase()));
}
}
|
package crazypants.enderio.machine.buffer;
import net.minecraft.block.Block;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.IIcon;
import net.minecraft.util.MovingObjectPosition;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import cpw.mods.fml.common.registry.GameRegistry;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import crazypants.enderio.EnderIO;
import crazypants.enderio.GuiHandler;
import crazypants.enderio.ModObject;
import crazypants.enderio.machine.AbstractMachineBlock;
import crazypants.enderio.machine.MachineRecipeInput;
import crazypants.enderio.machine.MachineRecipeRegistry;
import crazypants.enderio.machine.painter.BasicPainterTemplate;
import crazypants.enderio.machine.painter.PainterUtil;
import crazypants.enderio.network.PacketHandler;
import crazypants.util.IFacade;
public class BlockBuffer extends AbstractMachineBlock<TileBuffer> implements IFacade {
public static BlockBuffer create() {
PacketHandler.INSTANCE.registerMessage(PacketBufferIO.class, PacketBufferIO.class, PacketHandler.nextID(), Side.SERVER);
BlockBuffer res = new BlockBuffer();
res.init();
return res;
}
private static final String[] textureNames = new String[] { "blockBufferItem", "blockBufferPower", "blockBufferOmni", "blockBufferCreative" };
@SideOnly(Side.CLIENT)
private IIcon[] textures;
private BlockBuffer() {
super(ModObject.blockBuffer, TileBuffer.class);
}
@Override
protected void init() {
GameRegistry.registerBlock(this, BlockItemBuffer.class, modObject.unlocalisedName);
GameRegistry.registerTileEntity(teClass, modObject.unlocalisedName + "TileEntity");
EnderIO.guiHandler.registerGuiHandler(getGuiId(), this);
MachineRecipeRegistry.instance.registerRecipe(ModObject.blockPainter.unlocalisedName, new PainterTemplate());
}
@Override
public Object getServerGuiElement(int ID, EntityPlayer player, World world, int x, int y, int z) {
TileEntity te = world.getTileEntity(x, y, z);
if(te instanceof TileBuffer) {
return new ContainerBuffer(player.inventory, (TileBuffer) te);
}
return null;
}
@Override
public Object getClientGuiElement(int ID, EntityPlayer player, World world, int x, int y, int z) {
TileEntity te = world.getTileEntity(x, y, z);
if(te instanceof TileBuffer) {
return new GuiBuffer(player.inventory, (TileBuffer) te);
}
return null;
}
@Override
protected int getGuiId() {
return GuiHandler.GUI_ID_BUFFER;
}
@Override
@SideOnly(Side.CLIENT)
public void registerBlockIcons(IIconRegister iIconRegister) {
super.registerBlockIcons(iIconRegister);
textures = new IIcon[textureNames.length];
for (int i = 0; i < textureNames.length; i++) {
textures[i] = iIconRegister.registerIcon("enderio:" + textureNames[i]);
}
}
@Override
protected String getMachineFrontIconKey(boolean active) {
return getSideIconKey(active);
}
@Override
@SideOnly(Side.CLIENT)
public IIcon getIcon(int blockSide, int blockMeta) {
return blockSide > 1 ? textures[blockMeta] : super.getIcon(blockSide, blockMeta);
}
@Override
@SideOnly(Side.CLIENT)
public IIcon getIcon(IBlockAccess world, int x, int y, int z, int blockSide) {
TileEntity te = world.getTileEntity(x, y, z);
if(te instanceof TileBuffer) {
TileBuffer tef = (TileBuffer) te;
if(tef.getSourceBlock() != null) {
return tef.getSourceBlock().getIcon(blockSide, tef.getSourceBlockMetadata());
} else if(blockSide > 1) {
return textures[world.getBlockMetadata(x, y, z)];
}
}
return super.getIcon(world, x, y, z, blockSide);
}
@Override
public void onBlockPlacedBy(World world, int x, int y, int z, EntityLivingBase entity, ItemStack stack) {
if(entity instanceof EntityPlayer) {
TileEntity te = world.getTileEntity(x, y, z);
if(te instanceof TileBuffer) {
TileBuffer ta = (TileBuffer) te;
if(stack.stackTagCompound != null) {
ta.readCommon(stack.stackTagCompound);
}
world.markBlockForUpdate(x, y, z);
}
}
}
@Override
public int damageDropped(int meta) {
return meta;
}
// TODO refactor machines so all have this functionality
@Override
public ItemStack getPickBlock(MovingObjectPosition target, World world, int x, int y, int z) {
return createDrop((TileBuffer) world.getTileEntity(x, y, z));
}
private ItemStack createDrop(TileBuffer te) {
ItemStack stack = new ItemStack(this, 1, BlockItemBuffer.Type.get(te).ordinal());
stack.stackTagCompound = new NBTTagCompound();
te.writeCommon(stack.stackTagCompound);
return stack;
}
public ItemStack createItemStackForSourceBlock(ItemStack machine, Block block, int sourceMeta) {
PainterUtil.setSourceBlock(machine, block, sourceMeta);
return machine;
}
public final class PainterTemplate extends BasicPainterTemplate {
public PainterTemplate() {
super(BlockBuffer.this);
}
@Override
public ResultStack[] getCompletedResult(float chance, MachineRecipeInput... inputs) {
ItemStack paintSource = MachineRecipeInput.getInputForSlot(1, inputs);
if(paintSource == null) {
return new ResultStack[0];
}
ItemStack target = MachineRecipeInput.getInputForSlot(0, inputs);
target = target.copy();
target.stackSize = 1;
return new ResultStack[] { new ResultStack(createItemStackForSourceBlock(target, Block.getBlockFromItem(paintSource.getItem()),
paintSource.getItemDamage())) };
}
}
@Override
public int getFacadeMetadata(IBlockAccess world, int x, int y, int z, int side) {
TileEntity te = world.getTileEntity(x, y, z);
if(te instanceof TileBuffer) {
return ((TileBuffer) te).getSourceBlockMetadata();
}
return 0;
}
@Override
public Block getFacade(IBlockAccess world, int x, int y, int z, int side) {
TileEntity te = world.getTileEntity(x, y, z);
if(te instanceof TileBuffer) {
return ((TileBuffer) te).getSourceBlock();
}
return this;
}
}
|
package de.ehex.foss.gematik.specifications;
import static de.ehex.foss.gematik.specifications.PTStBs.gemProdT_CM_KOMLE_PTV1_2_0;
import static de.ehex.foss.gematik.specifications.PTStBs.gemProdT_FD_KOMLE_PTV1_2_0;
import static de.ehex.foss.gematik.specifications.PTStBs.gemProdT_FD_VSDM_PTV1_5_0_1;
import static de.ehex.foss.gematik.specifications.PTStBs.gemProdT_Intermediaer_VSDM_PTV150_V100;
import static de.ehex.foss.gematik.specifications.PTStBs.gemProdT_Kon_PTV260_V100_LDAPProxy;
import static de.ehex.foss.gematik.specifications.PTStBs.gemProdT_X_509_TSP_nonQES_eGK_PTV1_6_0_V1_2_1;
import static de.ehex.foss.gematik.specifications.gemErrata_R1_4_6.AFOs.ARV_706_3_SPEC_SST_STAMPEL_AFO_0010;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4384;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4387;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4642;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4643;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4646;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4647;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4648;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4649;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4650;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4655;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2323;
import static de.ehex.foss.gematik.specifications.gemSpec_TSL.AFOs.TIP1_A_5120;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static java.util.Collections.unmodifiableSet;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* According to the currently released specifications, this enumeration list all (currently considered) gematik EVT
* scopes.
*
* @author Stefan Gasterstädt
* @since September 27th, 2016
*/
public enum EVTScopes implements TestScope {
KOMLE_CM(gemProdT_CM_KOMLE_PTV1_2_0, emptyList(), emptyList()),
KOMLE_FD(gemProdT_FD_KOMLE_PTV1_2_0, emptyList(), emptyList()),
LDAP_PROXY(gemProdT_Kon_PTV260_V100_LDAPProxy, emptyList(), emptyList()),
/**
* TODO: Put a note why
* <em>GS_A_4642, GS_A_4643, GS_A_4646, GS_A_4647, GS_A_4648, GS_A_4649, GS_A_4650, GS_A_4655, TIP1_A_5120</em>
* is/are included into test-scope.
*/
OCSP_EGK(gemProdT_X_509_TSP_nonQES_eGK_PTV1_6_0_V1_2_1, emptyList(), asList(ARV_706_3_SPEC_SST_STAMPEL_AFO_0010, GS_A_4642, GS_A_4643, GS_A_4646, GS_A_4647, GS_A_4648, GS_A_4649, GS_A_4650, GS_A_4655, TIP1_A_5120)),
/**
* TODO: Put a note why <em>VSDM_A_2323</em> is/are be excluded from testing.
*
* TODO: Put a note why <em>GS_A_4384, GS_A_4387</em> is/are included into test-scope.
*/
VSDM_FD(gemProdT_FD_VSDM_PTV1_5_0_1, asList(VSDM_A_2323), asList(ARV_706_3_SPEC_SST_STAMPEL_AFO_0010, GS_A_4384, GS_A_4387)),
INTERMEDIAER_VSDM(gemProdT_Intermediaer_VSDM_PTV150_V100, emptyList(), singletonList(ARV_706_3_SPEC_SST_STAMPEL_AFO_0010)),
;
private Set<AFO> testAFOs;
private EVTScopes(final TestableSpecification baseSpec, final List<AFO> excludedAFOs, final List<AFO> includedAFOs) {
final List<AFO> tmpExcludedAFOs = new ArrayList<>(excludedAFOs);
tmpExcludedAFOs.retainAll(includedAFOs);
if (!tmpExcludedAFOs.isEmpty()) {
throw new IllegalArgumentException("Folgende AFOs sind als excluded und included angegeben: " + tmpExcludedAFOs);
}
final Set<AFO> resultSet = new HashSet<>();
resultSet.addAll(baseSpec.getTestableAFOs());
resultSet.removeAll(excludedAFOs);
resultSet.addAll(includedAFOs);
this.testAFOs = unmodifiableSet(resultSet);
}
@Override
public Set<AFO> getTestableAFOs() {
return this.testAFOs;
}
}
|
package de.proteinms.omxparser.util;
import com.compomics.util.Util;
import com.compomics.util.experiment.biology.AminoAcidPattern;
import com.compomics.util.experiment.biology.PTM;
import com.compomics.util.experiment.biology.Peptide;
import com.compomics.util.experiment.identification.Advocate;
import com.compomics.util.experiment.io.identifications.IdfileReader;
import com.compomics.util.experiment.identification.PeptideAssumption;
import com.compomics.util.experiment.identification.matches.ModificationMatch;
import com.compomics.util.experiment.identification.matches.SpectrumMatch;
import com.compomics.util.experiment.massspectrometry.Charge;
import com.compomics.util.experiment.massspectrometry.Spectrum;
import com.compomics.util.experiment.personalization.ExperimentObject;
import com.compomics.util.gui.waiting.WaitingHandler;
import com.compomics.util.protein.Header;
import de.proteinms.omxparser.OmssaOmxFile;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
public class OMSSAIdfileReader extends ExperimentObject implements IdfileReader {
/**
* The inspected OMSSA omx file.
*/
private File identificationFile;
/**
* The modification file mods.xml.
*/
private File modsFile;
/**
* The modification file usermods.xml.
*/
private File userModsFile;
/**
* The instance of the inspected omx file.
*/
private OmssaOmxFile omxFile;
/**
* Constructor for the reader.
*/
public OMSSAIdfileReader() {
}
/**
* Constructor for the reader.
*
* @param idFile the inspected file
*/
public OMSSAIdfileReader(File idFile) {
this.identificationFile = idFile;
omxFile = new OmssaOmxFile(idFile.getPath(), false);
}
/**
* Get the file name.
*
* @return the file name
*/
public String getFileName() {
if (modsFile != null && userModsFile != null) {
return identificationFile.getName().concat(", ").concat(modsFile.getName()).concat(", ").concat(userModsFile.getName());
} else if (modsFile != null) {
return identificationFile.getName().concat(", ").concat(modsFile.getName());
} else if (userModsFile != null) {
return identificationFile.getName().concat(", ").concat(userModsFile.getName());
} else {
return identificationFile.getName();
}
}
public String getExtension() {
return ".omx";
}
@Override
public HashSet<SpectrumMatch> getAllSpectrumMatches(WaitingHandler waitingHandler) throws IOException, IllegalArgumentException, Exception {
HashSet<SpectrumMatch> assignedSpectra = new HashSet<SpectrumMatch>();
HashMap<String, LinkedList<MSPepHit>> peptideToProteinMap = omxFile.getPeptideToProteinMap();
List<MSResponse> msSearchResponse = omxFile.getParserResult().MSSearch_response.MSResponse;
List<MSRequest> msRequest = omxFile.getParserResult().MSSearch_request.MSRequest;
int searchResponseSize = msSearchResponse.size();
if (waitingHandler != null) {
waitingHandler.setMaxSecondaryProgressValue(searchResponseSize);
}
for (int i = 0; i < searchResponseSize; i++) {
Map<Integer, MSHitSet> msHitSetMap = msSearchResponse.get(i).MSResponse_hitsets.MSHitSet;
String tempFile = msRequest.get(i).MSRequest_settings.MSSearchSettings.MSSearchSettings_infiles.MSInFile.MSInFile_infile;
for (int spectrumIndex : msHitSetMap.keySet()) {
MSHitSet msHitSet = msHitSetMap.get(spectrumIndex);
List<MSHits> hitSet = msHitSet.MSHitSet_hits.MSHits;
if (hitSet.size() > 0) {
HashMap<Double, ArrayList<MSHits>> hitMap = new HashMap<Double, ArrayList<MSHits>>();
for (MSHits msHits : hitSet) {
if (!hitMap.containsKey(msHits.MSHits_evalue)) {
hitMap.put(msHits.MSHits_evalue, new ArrayList<MSHits>());
}
hitMap.get(msHits.MSHits_evalue).add(msHits);
}
ArrayList<Double> eValues = new ArrayList<Double>(hitMap.keySet());
Collections.sort(eValues);
String tempName;
int tempIndex = spectrumIndex + 1;
if (msHitSet.MSHitSet_ids.MSHitSet_ids_E.isEmpty()) {
tempName = tempIndex + "";
} else {
tempName = msHitSet.MSHitSet_ids.MSHitSet_ids_E.get(0);
}
String name = fixMgfTitle(tempName);
SpectrumMatch currentMatch = new SpectrumMatch(Spectrum.getSpectrumKey(Util.getFileName(tempFile), name));
currentMatch.setSpectrumNumber(tempIndex);
int rank = 1;
for (double eValue : eValues) {
for (MSHits msHits : hitMap.get(eValue)) {
currentMatch.addHit(Advocate.OMSSA, getPeptideAssumption(msHits, rank, peptideToProteinMap));
}
rank += hitMap.get(eValue).size();
}
assignedSpectra.add(currentMatch);
}
}
if (waitingHandler != null) {
if (!waitingHandler.isRunCanceled()) {
break;
}
waitingHandler.setSecondaryProgressValue(i);
}
}
return assignedSpectra;
}
private PeptideAssumption getPeptideAssumption(MSHits currentMsHit, int rank,
HashMap<String, LinkedList<MSPepHit>> peptideToProteinMap) {
Charge charge = new Charge(Charge.PLUS, currentMsHit.MSHits_charge);
ArrayList<String> proteins = new ArrayList<String>();
for (MSPepHit msPepHit : (List<MSPepHit>) peptideToProteinMap.get(currentMsHit.MSHits_pepstring)) { // There might be redundancies in the map.
Boolean taken = false;
String accession = getProteinAccession(msPepHit.MSPepHit_defline);
if (accession == null) {
accession = msPepHit.MSPepHit_accession;
}
for (String protein : proteins) {
if (protein.compareTo(accession) == 0) {
taken = true;
break;
}
}
if (!taken) {
proteins.add(accession);
}
}
List<MSModHit> msModHits = currentMsHit.MSHits_mods.MSModHit;
ArrayList<ModificationMatch> modificationsFound = new ArrayList<ModificationMatch>();
// inspect variable modifications
for (MSModHit msModHit : msModHits) {
int msMod = msModHit.MSModHit_modtype.MSMod;
PTM currentPTM = new PTM(-1, msMod + "", -1, new AminoAcidPattern()); //@TODO: add more information to rescue the PTM when the omssa index is wrong
int location = msModHit.MSModHit_site + 1;
modificationsFound.add(new ModificationMatch(currentPTM.getName(), true, location));
}
Peptide thePeptide = new Peptide(currentMsHit.MSHits_pepstring, proteins, modificationsFound);
return new PeptideAssumption(thePeptide, rank, Advocate.OMSSA, charge, currentMsHit.MSHits_evalue, getFileName());
}
/**
* Parses omssa description to have the accession.
*
* @param description the protein description
* @return the protein accession
*/
private String getProteinAccession(String description) {
try {
Header header = Header.parseFromFASTA(description);
if (header.getAccession() != null) {
return header.getAccession();
} else {
return null;
}
} catch (Exception e) {
return description.substring(1);
}
}
/**
* Returns the fixed mgf title.
*
* @param spectrumTitle
* @return the fixed mgf title
*/
private String fixMgfTitle(String spectrumTitle) {
// a special fix for mgf files with titles containing url encoding, e.g.: %3b instead of ;
try {
spectrumTitle = URLDecoder.decode(spectrumTitle, "utf-8");
} catch (UnsupportedEncodingException e) {
System.out.println("An exception was thrown when trying to decode an mgf tile!");
e.printStackTrace();
}
//System.out.println("before: " + spectrumTitle);
// a special fix for mgf files with titles containing the escape character '\'
spectrumTitle = spectrumTitle.replaceAll("\\\\\"", "\\\""); // change \" into "
spectrumTitle = spectrumTitle.replaceAll("\\\\\\\\", "\\\\"); // change \\ into \
//System.out.println("after: " + spectrumTitle);
return spectrumTitle;
}
@Override
public void close() throws IOException {
omxFile = null;
}
}
|
package de.tblsoft.solr.pipeline.filter;
import com.google.common.base.Strings;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import de.tblsoft.solr.elastic.AliasManager;
import de.tblsoft.solr.http.ElasticHelper;
import de.tblsoft.solr.http.HTTPHelper;
import de.tblsoft.solr.pipeline.AbstractFilter;
import de.tblsoft.solr.pipeline.bean.Document;
import de.tblsoft.solr.pipeline.bean.Field;
import de.tblsoft.solr.util.IOUtils;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.*;
public class ElasticWriter extends AbstractFilter {
private static Logger LOG = LoggerFactory.getLogger(ElasticWriter.class);
private Gson gson;
private String type;
private String location;
private String elasticMappingLocation;
private boolean delete;
private String idField;
private Boolean hashId = false;
private List<Document> buffer = new ArrayList<Document>();
private int bufferSize = 10000;
private long currentBufferContentSize = 0;
private long maxBufferContentSize = 3000000L;
private boolean detectNumberValues = true;
private boolean failOnError = true;
private String indexUrl;
private Integer housekeepingCount;
private String housekeppingStrategy;
private Boolean housekeepingEnabled = false;
private String alias;
@Override
public void init() {
alias = getProperty("alias", null);
housekeepingEnabled = getPropertyAsBoolean("housekeepingEnabled", housekeepingEnabled);
housekeepingCount = getPropertyAsInt("housekeepingCount", 5);
housekeppingStrategy = getProperty("housekeppingStrategy", "linear");
bufferSize = getPropertyAsInt("bufferSize", 10000);
location = getProperty("location", null);
verify(location, "For the JsonWriter a location must be defined.");
failOnError = getPropertyAsBoolean("failOnError", Boolean.TRUE);
delete = getPropertyAsBoolean("delete", Boolean.TRUE);
detectNumberValues = getPropertyAsBoolean("detectNumberValues", Boolean.TRUE);
elasticMappingLocation = getProperty("elasticMappingLocation", null);
idField = getProperty("idField", null);
hashId = getPropertyAsBoolean("hashId", false);
GsonBuilder builder = new GsonBuilder();
gson = builder.create();
try {
if(housekeepingEnabled) {
indexUrl = AliasManager.getElasticUrlWithDatePattern(location);
} else {
indexUrl = ElasticHelper.getIndexUrl(location);
}
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
if (delete && !"elasticupdate".equals(type)) {
HTTPHelper.delete(indexUrl);
}
if (elasticMappingLocation != null) {
try {
String absoluteElasticMappingLocation = IOUtils.getAbsoluteFile(
getBaseDir(), elasticMappingLocation);
String mappingJson = IOUtils.getString(absoluteElasticMappingLocation);
String mappingUrl = ElasticHelper.getIndexUrl(indexUrl);
LOG.debug("mapping url: {} mappingJson: {}", mappingUrl, mappingJson );
HTTPHelper.put(mappingUrl, mappingJson, "application/json");
} catch (URISyntaxException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
super.init();
}
static Object transformDatatype(Field field, boolean detectNumberValues) {
String value = field.getValue();
if(!detectNumberValues) {
return value;
}
if(field.getDatatype() != null && "string".equals(field.getDatatype())) {
return value;
}
if(!detectNumberValues) {
return value;
}
if (NumberUtils.isNumber(value)) {
try {
Long intValue = Long.valueOf(value);
return intValue;
} catch (NumberFormatException e) {
return value;
}
}
return value;
}
void procesBuffer() {
if(buffer.size() == 0) {
return;
}
StringBuilder bulkRequest = new StringBuilder();
try {
for (Document document : buffer) {
Map<String, Object> jsonDocument = mapToJson(document, detectNumberValues);
if (jsonDocument.isEmpty()) {
continue;
}
String id;
if (Strings.isNullOrEmpty(idField)) {
id = UUID.randomUUID().toString();
} else if(hashId) {
id= DigestUtils.md5Hex(document.getFieldValue(idField));
} else {
id = document.getFieldValue(idField);
}
String index = ElasticHelper.getIndexFromUrl(indexUrl);
String type = ElasticHelper.getTypeFromUrl(location);
String bulkMethod = createBulkMethod("index", index, type, id);
String json = gson.toJson(jsonDocument);
bulkRequest.append(bulkMethod).append(" \n");
bulkRequest.append(json).append(" \n");
}
String bulkUrl = ElasticHelper.getBulkUrl(indexUrl);
HTTPHelper.post(bulkUrl, bulkRequest.toString(), "application/json");
} catch (Exception e) {
LOG.info("There was an error processing the bulk request: " + e.getMessage());
LOG.info(bulkRequest.toString());
if(failOnError) {
throw new RuntimeException(e);
} else {
LOG.info("Continue processing ... ");
}
}
}
@Override
public void document(Document document) {
long documentSize = document.getSize();
if (buffer.size() >= bufferSize || currentBufferContentSize + documentSize > maxBufferContentSize ) {
procesBuffer();
LOG.debug("bufferContentSize: " + currentBufferContentSize + " bufferSize: " + buffer.size());
buffer = new ArrayList<>();
currentBufferContentSize = 0;
}
buffer.add(document);
currentBufferContentSize = currentBufferContentSize + documentSize;
super.document(document);
}
public static String mapToJsonString(List<Document> documentList, boolean detectNumberValues) {
List<Map<String, Object>> documentMap = new ArrayList<Map<String, Object>>();
for (Document document : documentList) {
documentMap.add(mapToJson(document, detectNumberValues));
}
Gson gson = new GsonBuilder().setPrettyPrinting().create();
String json = gson.toJson(documentMap);
return json;
}
static Map createExpandedValue(String flatName, Object value) {
Map<String, Object> last = new HashMap<String, Object>();
Map<String, Object> result = last;
String[] parts = flatName.split("\\.");
for(int i = 1; i < parts.length; i++) {
String part = parts[i];
if(i == parts.length-1) {
last.put(part, value);
}
else {
Map<String, Object> lastMap = new HashMap<String, Object>();
last.put(part, lastMap);
last = lastMap;
}
}
return result;
}
static Map<String, Object> mapToJson(Document document, boolean detectNumberValues) {
Map<String, Object> jsonDocument = new HashMap<String, Object>();
for (Field field : document.getFields()) {
List<String> values = field.getValues();
if (values == null || values.isEmpty()) {
continue;
}
boolean fieldIsFlat= field.getName().contains(".");
String fieldName = field.getName();
Object fieldValue = field.getValues();
if (values.size() == 1) {
fieldValue = transformDatatype(field, detectNumberValues);
}
if(fieldIsFlat) {
fieldValue = createExpandedValue(fieldName, fieldValue);
fieldName = StringUtils.substringBefore(fieldName, ".");
}
jsonDocument.put(fieldName, fieldValue);
}
return jsonDocument;
}
public String createBulkMethod(String method, String index, String type,
String id) {
String bulkMethod = "{ \"" + method + "\" : { \"_index\" : \"" + index
+ "\", \"_type\" : \"" + type + "\" } }";
return bulkMethod;
}
public void housekeeping() {
String prefix = AliasManager.getIndexPrefixByUrl(indexUrl);
if(Strings.isNullOrEmpty(prefix)) {
throw new RuntimeException("Could not extract prefix from url: " + indexUrl);
}
List<String> indexes = AliasManager.getIndexesByPrefix(indexUrl,prefix);
Collections.sort(indexes);
try {
String alias = ElasticHelper.getIndexFromUrl(location);
AliasManager.switchAlias(location, alias, indexes, indexes.get(indexes.size()-1));
} catch (Exception e) {
LOG.info("There was an error switching the alias: " + e.getMessage());
}
int indexesToDeleteCount = indexes.size() - housekeepingCount;
if(indexesToDeleteCount < 0) {
indexesToDeleteCount = 0;
}
List<String> indexToDeleteList = indexes.subList(0,indexesToDeleteCount);
for(String indexToDelete: indexToDeleteList) {
try {
String deleteUrl = ElasticHelper.getIndexUrl(location, indexToDelete);
HTTPHelper.delete(deleteUrl);
} catch (Exception e) {
LOG.info("There was an error deleting the index: " + indexToDelete);
}
}
}
@Override
public void end() {
procesBuffer();
if(housekeepingEnabled) {
housekeeping();
}
if(alias != null) {
try {
String index = ElasticHelper.getIndexFromUrl(indexUrl);
AliasManager.switchAlias(indexUrl, alias, new ArrayList<>(), index);
} catch (Exception e) {
LOG.info("Error switching alias, because: " + e.getMessage());
}
}
super.end();
}
}
|
package exnihiloadscensio.compatibility.jei;
import java.util.List;
import com.google.common.collect.Lists;
import exnihiloadscensio.blocks.BlockSieve.MeshType;
import exnihiloadscensio.blocks.ENBlocks;
import exnihiloadscensio.compatibility.jei.barrel.fluidtransform.FluidTransformRecipe;
import exnihiloadscensio.compatibility.jei.barrel.fluidtransform.FluidTransformRecipeCategory;
import exnihiloadscensio.compatibility.jei.barrel.fluidtransform.FluidTransformRecipeHandler;
import exnihiloadscensio.compatibility.jei.hammer.HammerRecipe;
import exnihiloadscensio.compatibility.jei.hammer.HammerRecipeCategory;
import exnihiloadscensio.compatibility.jei.hammer.HammerRecipeHandler;
import exnihiloadscensio.compatibility.jei.sieve.SieveRecipe;
import exnihiloadscensio.compatibility.jei.sieve.SieveRecipeCategory;
import exnihiloadscensio.compatibility.jei.sieve.SieveRecipeHandler;
import exnihiloadscensio.items.ENItems;
import exnihiloadscensio.registries.FluidTransformRegistry;
import exnihiloadscensio.registries.HammerRegistry;
import exnihiloadscensio.registries.SieveRegistry;
import exnihiloadscensio.registries.types.FluidTransformer;
import exnihiloadscensio.util.BlockInfo;
import exnihiloadscensio.util.ItemInfo;
import mezz.jei.api.IJeiRuntime;
import mezz.jei.api.IModPlugin;
import mezz.jei.api.IModRegistry;
import mezz.jei.api.ISubtypeRegistry;
import mezz.jei.api.JEIPlugin;
import mezz.jei.api.ingredients.IModIngredientRegistration;
import net.minecraft.block.Block;
import net.minecraft.block.state.IBlockState;
import net.minecraft.item.ItemStack;
import net.minecraftforge.fluids.FluidRegistry;
@JEIPlugin
public class CompatJEI implements IModPlugin
{
@Override
public void registerItemSubtypes(ISubtypeRegistry subtypeRegistry)
{
}
@Override
public void registerIngredients(IModIngredientRegistration registry)
{
}
@Override
public void register(IModRegistry registry)
{
registry.addRecipeCategories(new SieveRecipeCategory(registry.getJeiHelpers().getGuiHelper()));
registry.addRecipeHandlers(new SieveRecipeHandler());
List<SieveRecipe> sieveRecipes = Lists.newArrayList();
for (BlockInfo info : SieveRegistry.getRegistry().keySet())
{
for (MeshType type : MeshType.values())
{
if (type.getID() != 0 && info.getBlockState() != null) // Bad configs strike back!
{
SieveRecipe recipe = new SieveRecipe(info.getBlockState(), type);
// If there's an input block, mesh, and at least one output
if(recipe.getInputs().size() > 2 && recipe.getOutputs().size() > 0)
{
sieveRecipes.add(recipe);
}
}
}
}
registry.addRecipes(sieveRecipes);
registry.addRecipeCategoryCraftingItem(new ItemStack(ENBlocks.sieve), SieveRecipeCategory.UID);
registry.addRecipeCategories(new HammerRecipeCategory(registry.getJeiHelpers().getGuiHelper()));
registry.addRecipeHandlers(new HammerRecipeHandler());
List<HammerRecipe> hammerRecipes = Lists.newArrayList();
for (ItemInfo info : HammerRegistry.getRegistry().keySet())
{
if (info.getItem() != null)
{
@SuppressWarnings("deprecation")
IBlockState block = Block.getBlockFromItem(info.getItem()).getStateFromMeta(info.getMeta());
HammerRecipe recipe = new HammerRecipe(block);
// If there's an input block, and at least one output
if(recipe.getInputs().size() > 1 && recipe.getOutputs().size() > 0)
{
hammerRecipes.add(recipe);
}
}
}
registry.addRecipes(hammerRecipes);
registry.addRecipeCategoryCraftingItem(new ItemStack(ENItems.hammerWood), HammerRecipeCategory.UID);
registry.addRecipeCategoryCraftingItem(new ItemStack(ENItems.hammerGold), HammerRecipeCategory.UID);
registry.addRecipeCategoryCraftingItem(new ItemStack(ENItems.hammerStone), HammerRecipeCategory.UID);
registry.addRecipeCategoryCraftingItem(new ItemStack(ENItems.hammerIron), HammerRecipeCategory.UID);
registry.addRecipeCategoryCraftingItem(new ItemStack(ENItems.hammerDiamond), HammerRecipeCategory.UID);
registry.addRecipeCategories(new FluidTransformRecipeCategory(registry.getJeiHelpers().getGuiHelper()));
registry.addRecipeHandlers(new FluidTransformRecipeHandler());
List<FluidTransformRecipe> fluidTransformRecipes = Lists.newArrayList();
for (FluidTransformer transformer : FluidTransformRegistry.getRegistry())
{
// Make sure both fluids are registered
if (FluidRegistry.isFluidRegistered(transformer.getInputFluid()) && FluidRegistry.isFluidRegistered(transformer.getOutputFluid()))
{
FluidTransformRecipe recipe = new FluidTransformRecipe(transformer);
// If theres a bucket + 1 block (and an output, for consistency)
if(recipe.getInputs().size() > 2 && recipe.getOutputs().size() > 0)
{
fluidTransformRecipes.add(new FluidTransformRecipe(transformer));
}
}
}
registry.addRecipes(fluidTransformRecipes);
registry.addRecipeCategoryCraftingItem(new ItemStack(ENBlocks.barrelWood), FluidTransformRecipeCategory.UID);
registry.addRecipeCategoryCraftingItem(new ItemStack(ENBlocks.barrelStone), FluidTransformRecipeCategory.UID);
}
@Override
public void onRuntimeAvailable(IJeiRuntime jeiRuntime)
{
}
}
|
package genepi.imputationserver.steps.vcf;
import genepi.hadoop.command.Command;
import genepi.io.FileUtil;
import genepi.io.text.LineReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Vector;
import java.util.zip.GZIPOutputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.broadinstitute.variant.vcf.VCFFileReader;
public class VcfFileUtil {
public static VcfFile load(String vcfFilename, int chunksize,
String tabixPath) throws IOException {
Set<Integer> chunks = new HashSet<Integer>();
Set<String> chromosomes = new HashSet<String>();
int noSnps = 0;
int noSamples = 0;
try {
VCFFileReader reader = new VCFFileReader(new File(vcfFilename),
false);
noSamples = reader.getFileHeader().getGenotypeSamples().size();
reader.close();
LineReader lineReader = new LineReader(vcfFilename);
boolean phased = true;
while (lineReader.next()) {
String line = lineReader.get();
if (!line.startsWith("
String tiles[] = line.split("\t", 6);
if (tiles.length < 3) {
throw new IOException(
"The provided VCF file is no tab-delimited");
}
String chromosome = tiles[0];
int position = Integer.parseInt(tiles[1]);
if (phased) {
boolean containsSlash = tiles[5].contains("/");
if (containsSlash) {
phased = false;
}
}
// TODO: check that all are phased
// context.getGenotypes().get(0).isPhased();
chromosomes.add(chromosome);
if (chromosomes.size() > 1) {
throw new IOException(
"The provided VCF file contains more than one chromosome. Please split your input VCF file by chromosome");
}
String ref = tiles[3];
String alt = tiles[4];
if (ref.equals(alt)) {
throw new IOException(
"The provided VCF file is malformed at variation "
+ tiles[2] + ": reference allele ("
+ ref + ") and alternate allele ("
+ alt + ") are the same.");
}
int chunk = position / chunksize;
if (position % chunksize == 0) {
chunk = chunk - 1;
}
chunks.add(chunk);
noSnps++;
}
}
lineReader.close();
reader.close();
// create index
if (tabixPath != null) {
Command tabix = new Command(tabixPath);
tabix.setParams("-p", "vcf", vcfFilename);
tabix.saveStdErr("tabix.output");
int returnCode = tabix.execute();
if (returnCode != 0) {
throw new IOException(
"The provided VCF file is malformed. Error during index creation: "
+ FileUtil.readFileAsString("tabix.output"));
}
}
VcfFile pair = new VcfFile();
pair.setVcfFilename(vcfFilename);
pair.setIndexFilename(vcfFilename + ".tbi");
pair.setNoSnps(noSnps);
pair.setNoSamples(noSamples);
pair.setChunks(chunks);
pair.setChromosomes(chromosomes);
pair.setPhased(phased);
return pair;
} catch (Exception e) {
throw new IOException(e.getMessage());
}
}
public static Set<String> validChromosomes = new HashSet<String>();
static {
validChromosomes.add("1");
validChromosomes.add("2");
validChromosomes.add("3");
validChromosomes.add("4");
validChromosomes.add("5");
validChromosomes.add("6");
validChromosomes.add("7");
validChromosomes.add("8");
validChromosomes.add("9");
validChromosomes.add("10");
validChromosomes.add("11");
validChromosomes.add("12");
validChromosomes.add("13");
validChromosomes.add("14");
validChromosomes.add("15");
validChromosomes.add("16");
validChromosomes.add("17");
validChromosomes.add("18");
validChromosomes.add("19");
validChromosomes.add("20");
validChromosomes.add("21");
validChromosomes.add("22");
}
public static boolean isAutosomal(String chromosome) {
return validChromosomes.contains(chromosome);
}
public static void mergeGz(String local, String hdfs, String ext)
throws FileNotFoundException, IOException {
GZIPOutputStream out = new GZIPOutputStream(new FileOutputStream(local));
merge(out, hdfs, ext);
}
public static void merge(OutputStream out, String hdfs, String ext)
throws IOException {
Configuration conf = new Configuration();
FileSystem fileSystem = FileSystem.get(conf);
Path pathFolder = new Path(hdfs);
FileStatus[] files = fileSystem.listStatus(pathFolder);
List<String> filenames = new Vector<String>();
if (files != null) {
// filters by extension and sorts by filename
for (FileStatus file : files) {
if (!file.isDir()
&& !file.getPath().getName().startsWith("_")
&& (ext == null || file.getPath().getName()
.endsWith(ext))) {
filenames.add(file.getPath().toString());
}
}
Collections.sort(filenames);
boolean firstFile = true;
boolean firstLine = true;
for (String filename : filenames) {
Path path = new Path(filename);
FSDataInputStream in = fileSystem.open(path);
LineReader reader = new LineReader(in);
while (reader.next()) {
String line = reader.get();
if (line.startsWith("
if (firstFile) {
if (!firstLine) {
out.write('\n');
}
out.write(line.getBytes());
firstLine = false;
}
} else {
if (!firstLine) {
out.write('\n');
}
out.write(line.getBytes());
firstLine = false;
}
}
in.close();
firstFile = false;
}
out.close();
}
}
}
|
package gov.nasa.jpl.mbee.generator;
import gov.nasa.jpl.mbee.lib.Utils;
import gov.nasa.jpl.mbee.viewedit.PresentationElement;
import gov.nasa.jpl.mbee.viewedit.PresentationElement.PEType;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.json.simple.parser.JSONParser;
import com.nomagic.magicdraw.core.Application;
import com.nomagic.uml2.ext.jmi.helpers.ModelHelper;
import com.nomagic.uml2.ext.jmi.helpers.StereotypesHelper;
import com.nomagic.uml2.ext.magicdraw.classes.mddependencies.Dependency;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.AggregationKindEnum;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.Class;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.Classifier;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.Constraint;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.Element;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.ElementValue;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.Expression;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.InstanceSpecification;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.InstanceValue;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.LiteralString;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.NamedElement;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.Package;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.Property;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.Slot;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.Type;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.TypedElement;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.ValueSpecification;
import com.nomagic.uml2.ext.magicdraw.mdprofiles.Stereotype;
import com.nomagic.uml2.impl.ElementsFactory;
public class ViewInstanceUtils {
private Classifier paraC = Utils.getOpaqueParaClassifier();
private Classifier tparaC = Utils.getParaClassifier();
private Classifier tableC = Utils.getOpaqueTableClassifier();
private Classifier listC = Utils.getOpaqueListClassifier();
private Classifier imageC = Utils.getOpaqueImageClassifier();
private Classifier sectionC = Utils.getOpaqueSectionClassifier();
private Classifier tsectionC = Utils.getSectionClassifier();
private Stereotype presentsS = Utils.getPresentsStereotype();
private Stereotype productS = Utils.getProductStereotype();
private Stereotype viewS = Utils.getViewClassStereotype();
private Property generatedFromView = Utils.getGeneratedFromViewProperty();
private Property generatedFromElement = Utils.getGeneratedFromElementProperty();
private ElementsFactory ef = Application.getInstance().getProject().getElementsFactory();
public ViewInstanceInfo getCurrentInstances(Element viewOrSection, Element view) {
List<InstanceSpecification> tables = new ArrayList<InstanceSpecification>();
List<InstanceSpecification> lists = new ArrayList<InstanceSpecification>();
List<InstanceSpecification> sections = new ArrayList<InstanceSpecification>();
List<InstanceSpecification> paras = new ArrayList<InstanceSpecification>();
List<InstanceSpecification> images = new ArrayList<InstanceSpecification>();
List<InstanceSpecification> manuals = new ArrayList<InstanceSpecification>();
List<InstanceSpecification> all = new ArrayList<InstanceSpecification>();
List<InstanceSpecification> extraRef = new ArrayList<InstanceSpecification>();
List<InstanceSpecification> unused = new ArrayList<InstanceSpecification>();
List<InstanceSpecification> opaque = new ArrayList<InstanceSpecification>();
List<InstanceSpecification> extraManualRef = new ArrayList<InstanceSpecification>();
Package viewInstancePackage = findViewInstancePackage(view);
ViewInstanceInfo res = new ViewInstanceInfo(all, images, tables, lists, paras, sections, manuals, extraRef, extraManualRef, unused, opaque);
Expression e = getViewOrSectionExpression(viewOrSection);
boolean isView = viewOrSection instanceof InstanceSpecification ? false : true;
if (e == null) {
return res;
}
for (ValueSpecification vs: e.getOperand()) {
if (vs instanceof InstanceValue) {
InstanceSpecification is = ((InstanceValue)vs).getInstance();
if (is == null)
continue;
if (!is.getClassifier().isEmpty()) {
List<Classifier> iscs = is.getClassifier();
boolean viewinstance = false;
if (iscs.contains(paraC) || iscs.contains(tableC) || iscs.contains(listC) ||
iscs.contains(imageC) || iscs.contains(sectionC)) {
for (Element el: is.getOwnedElement()) {
if (el instanceof Slot && ((Slot)el).getDefiningFeature().getName().equals("generatedFromView") &&
!((Slot)el).getValue().isEmpty() && ((Slot)el).getValue().get(0) instanceof ElementValue &&
((ElementValue)((Slot)el).getValue().get(0)).getElement() == view) {
viewinstance = true;
}
}
if (!viewinstance) {
for (InstanceValue iv: is.get_instanceValueOfInstance()) {
if (iv != vs && iv.getOwner() != null) { //an opaque instance that's referenced from somewhere else
extraRef.add(is);
break;
}
}
}
}
if ((iscs.contains(paraC) || iscs.contains(tparaC)) && isView && is.getSpecification() instanceof LiteralString) {
try {
JSONObject ob = (JSONObject)(new JSONParser()).parse(((LiteralString)is.getSpecification()).getValue());
if (view.getID().equals(ob.get("source")) && "documentation".equals(ob.get("sourceProperty"))) {
viewinstance = false; //a view doc instance
res.setViewDocHack(is);
}
} catch (Exception x) {}
}
if (viewinstance) {//instance generated by current view
if (iscs.contains(paraC))
paras.add(is);
else if (iscs.contains(tableC))
tables.add(is);
else if (iscs.contains(listC))
lists.add(is);
else if (iscs.contains(imageC))
images.add(is);
else if (iscs.contains(sectionC))
sections.add(is);
opaque.add(is);
} else {
manuals.add(is);
for (InstanceValue iv: is.get_instanceValueOfInstance()) {
if (iv != vs && iv.getOwner() != null) { //a non opaque instance being referenced from somewhere else
extraManualRef.add(is);
break;
}
}
}
all.add(is);
}
}
}
if (isView) {
Package viewp = findViewInstancePackage(view);
if (viewp != null) {
for (Element el: viewp.getOwnedElement()) {
if (el instanceof InstanceSpecification && ((InstanceSpecification)el).get_instanceValueOfInstance().isEmpty()) {
unused.add((InstanceSpecification)el); //but this might be a manual instance that's referenced by higher project?
}
}
}
}
return res;
}
public boolean isSection(InstanceSpecification is) {
if (is.getClassifier().contains(sectionC) || is.getClassifier().contains(tsectionC))
return true;
return false;
}
public boolean isInSomeViewPackage(InstanceSpecification is) {
Element owner = is.getOwner();
if (owner instanceof Package) {
for (Element e: Utils.collectDirectedRelatedElementsByRelationshipStereotype(owner, presentsS, 2, false, 1)) {
if (StereotypesHelper.hasStereotypeOrDerived(e, viewS))
return true;
}
}
return false;
}
public static Expression getViewOrSectionExpression(Element viewOrSection) {
if (viewOrSection instanceof InstanceSpecification) {
if (((InstanceSpecification)viewOrSection).getSpecification() instanceof Expression)
return (Expression)((InstanceSpecification)viewOrSection).getSpecification();
} else if (viewOrSection instanceof Class) {
Constraint c = Utils.getViewConstraint(viewOrSection);
if (c != null && c.getSpecification() instanceof Expression)
return (Expression)c.getSpecification();
}
return null;
}
public Package findViewInstancePackage(Element view) {
List<Element> results = Utils.collectDirectedRelatedElementsByRelationshipStereotype(view, presentsS, 1, false, 1);
if (!results.isEmpty() && results.get(0) instanceof Package) {
return (Package)results.get(0);
}
return null;
}
public List<Package> findCorrectViewInstancePackageOwners(Element view) {
Type viewt = (Type)view;
List<Package> parentPack = new ArrayList<Package>();
if (StereotypesHelper.hasStereotypeOrDerived(view, productS)) {
Element owner = view.getOwner();
while (!(owner instanceof Package)) {
owner = owner.getOwner();
}
parentPack.add((Package)owner);
} else {
for (TypedElement t: viewt.get_typedElementOfType()) {
if (t instanceof Property && ((Property)t).getAggregation().equals(AggregationKindEnum.COMPOSITE) &&
StereotypesHelper.hasStereotypeOrDerived(t.getOwner(), viewS)) {
Package parent = findViewInstancePackage(t.getOwner());
if (parent != null)
parentPack.add(parent);
}
}
if (parentPack.isEmpty()) {
Element owner = view.getOwner();
while (!(owner instanceof Package)) {
owner = owner.getOwner();
}
parentPack.add((Package)owner);
}
}
return parentPack;
}
public Package createViewInstancePackage(Element view, Package owner) {
Package viewPackage = ef.createPackageInstance();
viewPackage.setName(((NamedElement)view).getName() + " Instances");
viewPackage.setOwner(owner);
Dependency d = ef.createDependencyInstance();
d.setOwner(viewPackage);
ModelHelper.setSupplierElement(d, viewPackage);
ModelHelper.setClientElement(d, view);
StereotypesHelper.addStereotype(d, presentsS);
return viewPackage;
}
public boolean needLockForEdit(PresentationElement pe) {
InstanceSpecification is = pe.getInstance();
if (is == null || (pe.isManual() && !pe.isViewDocHack()))
return false;
if (pe.isViewDocHack())
return true;
ValueSpecification oldvs = is.getSpecification();
//check classifier
if (pe.getNewspec() != null && !pe.getNewspec().get("type").equals("Section")) {
if (oldvs instanceof LiteralString && ((LiteralString)oldvs).getValue() != null) {
try {
JSONObject oldob = (JSONObject)JSONValue.parse(((LiteralString)oldvs).getValue());
if (oldob == null || !oldob.equals(pe.getNewspec()))
return true;
} catch (Exception ex) {
return true;
}
} else
return true;
} else if (pe.getType().equals(PEType.SECTION)) {
if (!(is.getSpecification() instanceof Expression))
return true;
List<InstanceSpecification> list = new ArrayList<InstanceSpecification>();
for (PresentationElement cpe: pe.getChildren()) {
if (cpe.getInstance() == null)
return true;
list.add(cpe.getInstance());
}
List<ValueSpecification> model = ((Expression)is.getSpecification()).getOperand();
if (model.size() != list.size())
return true;
for (int i = 0; i < model.size(); i++) {
ValueSpecification modelvs = model.get(i);
if (!(modelvs instanceof InstanceValue) || ((InstanceValue)modelvs).getInstance() != list.get(i)) {
return true;
}
}
}
return false;
}
public InstanceSpecification updateOrCreateInstance(PresentationElement pe, Package owner) {
InstanceSpecification is = pe.getInstance();
if (is != null && pe.isManual() && !pe.isViewDocHack())
return is;
if (is == null) {
is = ef.createInstanceSpecificationInstance();
if (!pe.isViewDocHack()) {
Slot s = ef.createSlotInstance();
s.setOwner(is);
s.setDefiningFeature(generatedFromView);
ElementValue ev = ef.createElementValueInstance();
ev.setElement(pe.getView());
s.getValue().add(ev);
if (pe.getType() == PEType.SECTION && pe.getLoopElement() != null) {
Slot ss = ef.createSlotInstance();
ss.setOwner(is);
ss.setDefiningFeature(generatedFromElement);
ElementValue ev2 = ef.createElementValueInstance();
ev2.setElement(pe.getLoopElement());
ss.getValue().add(ev2);
}
}
}
JSONObject newspec = pe.getNewspec();
Classifier classifier = null;
String name = "<>";
if (pe.isViewDocHack()) {
newspec = new JSONObject();
newspec.put("source", is.getID());
newspec.put("type", "Paragraph");
newspec.put("sourceProperty", "documentation");
String transclude = "<p> </p><p><mms-transclude-doc data-mms-eid=\"" + pe.getView().getID() + "\">[cf." + ((NamedElement)pe.getView()).getName() +".doc]</mms-transclude-doc></p><p> </p>";
ModelHelper.setComment(is, transclude);
name = "View Documentation";
classifier = tparaC;
} else {
if (pe.getType() == PEType.PARA)
classifier = paraC;
else if (pe.getType() == PEType.TABLE)
classifier = tableC;
else if (pe.getType() == PEType.LIST)
classifier = listC;
else if (pe.getType() == PEType.IMAGE)
classifier = imageC;
else if (pe.getType() == PEType.SECTION)
classifier = sectionC;
name = pe.getName();
if (name == null || name.isEmpty())
name = "<>";
}
if (newspec != null) {
ValueSpecification string = is.getSpecification();
if (!(string instanceof LiteralString))
string = ef.createLiteralStringInstance();
string.setOwner(is);
((LiteralString)string).setValue(newspec.toJSONString());
is.setSpecification(string);
}
is.setName(name);
is.getClassifier().clear();
is.getClassifier().add(classifier);
if (pe.getType() == PEType.SECTION) { //assume all children pe have instance, caller should walk bottom up
ValueSpecification expression = is.getSpecification();
if (!(expression instanceof Expression))
expression = ef.createExpressionInstance();
expression.setOwner(is);
List<InstanceValue> ivs = new ArrayList<InstanceValue>();
for (PresentationElement spe: pe.getChildren()) {
InstanceValue iv = ef.createInstanceValueInstance();
iv.setInstance(spe.getInstance());
ivs.add(iv);
}
((Expression)expression).getOperand().clear();
((Expression)expression).getOperand().addAll(ivs);
}
is.setOwner(owner);
pe.setInstance(is);
return is;
}
//return bfs view order
public List<Element> getViewProcessOrder(Element start, Map<Element, List<Element>> view2view) {
List<Element> res = new ArrayList<Element>();
Queue<Element> toProcess = new LinkedList<Element>();
toProcess.add(start);
while (!toProcess.isEmpty()) {
Element next = toProcess.remove();
res.add(next);
if (view2view.containsKey(next))
toProcess.addAll(view2view.get(next));
}
return res;
}
public Constraint getOrCreateViewConstraint(Element view) {
Constraint c = Utils.getViewConstraint(view);
if (c != null)
return c;
c = ef.createConstraintInstance();
Application.getInstance().getProject().getCounter().setCanResetIDForObject(true);
c.setID(view.getID() + "_vc");
c.setOwner(view);
c.getConstrainedElement().add(view);
return c;
}
public void updateOrCreateConstraint(Element view, List<PresentationElement> pes) {
Constraint c = getOrCreateViewConstraint(view);
ValueSpecification expression = c.getSpecification();
if (!(expression instanceof Expression))
expression = ef.createExpressionInstance();
expression.setOwner(c);
List<InstanceValue> ivs = new ArrayList<InstanceValue>();
for (PresentationElement spe: pes) {
InstanceValue iv = ef.createInstanceValueInstance();
iv.setInstance(spe.getInstance());
ivs.add(iv);
}
((Expression)expression).getOperand().clear();
((Expression)expression).getOperand().addAll(ivs);
}
public boolean needLockForEditConstraint(Element view, List<PresentationElement> pes) {
Constraint c = Utils.getViewConstraint(view);
if (c == null)
return false;
ValueSpecification vs = c.getSpecification();
if (vs == null || !(vs instanceof Expression))
return true;
Expression ex = (Expression)vs;
List<InstanceSpecification> list = new ArrayList<InstanceSpecification>();
for (PresentationElement cpe: pes) {
if (cpe.getInstance() == null)
return true;
list.add(cpe.getInstance());
}
List<ValueSpecification> model = ex.getOperand();
if (model.size() != list.size())
return true;
for (int i = 0; i < model.size(); i++) {
ValueSpecification modelvs = model.get(i);
if (!(modelvs instanceof InstanceValue) || ((InstanceValue)modelvs).getInstance() != list.get(i)) {
return true;
}
}
return false;
}
public Package getOrCreateUnusedInstancePackage() {
Package rootPackage = Utils.getRootElement();
String viewInstID = Utils.getProject().getPrimaryProject() .getProjectID().replace("PROJECT", "View_Instances");
String unusedId = Utils.getProject().getPrimaryProject() .getProjectID().replace("PROJECT", "Unused_View_Instances");
Package viewInst = (Package)Application.getInstance().getProject().getElementByID(viewInstID);
Package unusedViewInst = (Package)Application.getInstance().getProject().getElementByID(unusedId);
if (unusedViewInst != null)
return unusedViewInst;
Application.getInstance().getProject().getCounter().setCanResetIDForObject(true);
if (viewInst == null) {
viewInst = ef.createPackageInstance();
viewInst.setID(viewInstID);
viewInst.setName("View Instances");
viewInst.setOwner(rootPackage);
}
unusedViewInst = ef.createPackageInstance();
unusedViewInst.setID(unusedId);
unusedViewInst.setName("Unused View Instances");
unusedViewInst.setOwner(viewInst);
return unusedViewInst;
}
}
|
package info.faceland.loot.listeners;
import static info.faceland.loot.utils.inventory.InventoryUtil.broadcast;
import static info.faceland.loot.utils.inventory.InventoryUtil.getFirstColor;
import com.tealcube.minecraft.bukkit.shade.apache.commons.lang3.StringUtils;
import com.tealcube.minecraft.bukkit.shade.apache.commons.lang3.math.NumberUtils;
import com.tealcube.minecraft.bukkit.shade.google.common.base.CharMatcher;
import com.tealcube.minecraft.bukkit.shade.google.common.collect.Sets;
import info.faceland.loot.LootPlugin;
import info.faceland.loot.api.enchantments.EnchantmentTome;
import info.faceland.loot.api.items.CustomItem;
import info.faceland.loot.api.items.ItemGenerationReason;
import info.faceland.loot.api.sockets.SocketGem;
import info.faceland.loot.api.tier.Tier;
import info.faceland.loot.data.ItemRarity;
import info.faceland.loot.data.UniqueLoot;
import info.faceland.loot.events.LootDropEvent;
import info.faceland.loot.items.prefabs.IdentityTome;
import info.faceland.loot.items.prefabs.SocketExtender;
import info.faceland.loot.items.prefabs.UnidentifiedItem;
import info.faceland.loot.items.prefabs.UpgradeScroll;
import info.faceland.loot.items.prefabs.UpgradeScroll.ScrollType;
import info.faceland.loot.math.LootRandom;
import info.faceland.loot.utils.inventory.MaterialUtil;
import io.pixeloutlaw.minecraft.spigot.hilt.HiltItemStack;
import java.util.List;
import java.util.UUID;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.enchantments.Enchantment;
import org.bukkit.entity.Item;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.inventory.ItemFlag;
import org.bukkit.inventory.meta.ItemMeta;
import org.bukkit.metadata.FixedMetadataValue;
public class LootDropListener implements Listener {
private final LootPlugin plugin;
private final LootRandom random;
private final String itemFoundFormat;
public LootDropListener(LootPlugin plugin) {
this.plugin = plugin;
this.random = new LootRandom();
this.itemFoundFormat = plugin.getSettings().getString("language.broadcast.found-item", "");
}
@EventHandler(priority = EventPriority.HIGHEST)
public void onLootDrop(LootDropEvent event) {
double dropMultiplier = event.getQuantityMultiplier();
double rarityMultiplier = event.getQualityMultiplier();
int mobLevel = event.getMonsterLevel();
UUID looterUUID = event.getLooterUUID();
Player killer = Bukkit.getPlayer(looterUUID);
if (StringUtils.isNotBlank(event.getUniqueEntity())) {
if (plugin.getUniqueDropsManager().getData(event.getUniqueEntity()) != null) {
UniqueLoot loot = plugin.getUniqueDropsManager().getData(event.getUniqueEntity());
dropMultiplier *= loot.getQuantityMultiplier();
rarityMultiplier *= loot.getQualityMultiplier();
doUniqueDrops(loot, event.getLocation(), killer);
}
}
if (random.nextDouble() < dropMultiplier * plugin.getSettings()
.getDouble("config.drops.normal-drop", 0D)) {
Tier tier = plugin.getTierManager().getRandomTier();
ItemRarity rarity;
if (rarityMultiplier == 1D) {
rarity = plugin.getRarityManager().getRandomRarity();
} else {
rarity = plugin.getRarityManager().getRandomRarityWithBonus(rarityMultiplier);
}
HiltItemStack his = plugin.getNewItemBuilder()
.withTier(tier)
.withRarity(rarity)
.withLevel(Math.max(1, Math.min(mobLevel - 2 + random.nextIntRange(0, 5), 100)))
.withItemGenerationReason(ItemGenerationReason.MONSTER)
.build();
int qualityBonus = 1;
double qualityChance = plugin.getSettings().getDouble("config.random-quality-chance", 0.1);
double multiQualityChance = plugin.getSettings()
.getDouble("config.multi-quality-chance", 0.1);
if (random.nextDouble() <= qualityChance) {
while (random.nextDouble() <= multiQualityChance && qualityBonus < 5) {
qualityBonus++;
}
upgradeItemQuality(his, qualityBonus);
}
int upgradeBonus = 1;
double upgradeChance = plugin.getSettings().getDouble("config.random-upgrade-chance", 0.1);
double multiUpgradeChance = plugin.getSettings()
.getDouble("config.multi-upgrade-chance", 0.1);
if (random.nextDouble() <= upgradeChance) {
while (random.nextDouble() <= multiUpgradeChance && upgradeBonus < 9) {
upgradeBonus++;
}
upgradeItem(his, upgradeBonus);
}
boolean broadcast = rarity.isBroadcast() || upgradeBonus > 4 || qualityBonus > 2;
dropItem(event.getLocation(), his, killer, broadcast);
}
if (random.nextDouble() < dropMultiplier * plugin.getSettings()
.getDouble("config.drops.craft-mat", 0D)) {
Object[] matArr = plugin.getCraftMatManager().getCraftMaterials().keySet().toArray();
Material m = (Material) matArr[random.nextInt(matArr.length)];
int quality = 2;
while (random.nextDouble() <= plugin.getSettings()
.getDouble("config.drops.material-quality-up", 0.1D) &&
quality < 3) {
quality++;
}
double materialLevel = mobLevel - (mobLevel * 0.3 * random.nextDouble());
HiltItemStack his = MaterialUtil.buildMaterial(
m, plugin.getCraftMatManager().getCraftMaterials().get(m), (int) materialLevel, quality);
his.setAmount(1 + random.nextInt(2));
dropItem(event.getLocation(), his, killer, false);
}
if (random.nextDouble() < dropMultiplier * plugin.getSettings()
.getDouble("config.drops.socket-gem", 0D)) {
SocketGem sg;
if (plugin.getSettings().getBoolean("config.beast.beast-mode-activate", false)) {
sg = plugin.getSocketGemManager().getRandomSocketGemByLevel(mobLevel);
} else {
sg = plugin.getSocketGemManager().getRandomSocketGem(true, event.getDistance());
}
HiltItemStack his = sg.toItemStack(1);
dropItem(event.getLocation(), his, killer, sg.isBroadcast());
}
if (plugin.getSettings().getBoolean("config.custom-enchanting", true)) {
if (random.nextDouble() < dropMultiplier * plugin.getSettings()
.getDouble("config.drops.enchant-gem", 0D)) {
EnchantmentTome es = plugin.getEnchantmentStoneManager()
.getRandomEnchantmentStone(true, event.getDistance());
HiltItemStack his = es.toItemStack(1);
dropItem(event.getLocation(), his, killer, es.isBroadcast());
}
}
if (random.nextDouble() < dropMultiplier * plugin.getSettings()
.getDouble("config.drops.upgrade-scroll", 0D)) {
UpgradeScroll us = new UpgradeScroll(UpgradeScroll.ScrollType.random(true));
ScrollType scrollType = us.getScrollType();
boolean broadcast = scrollType == ScrollType.ANCIENT || scrollType == ScrollType.AWAKENED ||
scrollType == ScrollType.FLAWLESS || scrollType == ScrollType.DIM ||
scrollType == ScrollType.SHINING || scrollType == ScrollType.ILLUMINATING ||
scrollType == ScrollType.RADIANT;
dropItem(event.getLocation(), us, null, broadcast);
}
if (random.nextDouble() < dropMultiplier * plugin.getSettings()
.getDouble("config.drops.identity-tome", 0D)) {
HiltItemStack his = new IdentityTome();
dropItem(event.getLocation(), his, killer, false);
}
if (random.nextDouble() < dropMultiplier * plugin.getSettings()
.getDouble("config.drops.custom-item", 0D)) {
CustomItem ci;
if (plugin.getSettings().getBoolean("config.beast.beast-mode-activate", false)) {
ci = plugin.getCustomItemManager().getRandomCustomItemByLevel(mobLevel);
} else {
ci = plugin.getCustomItemManager()
.getRandomCustomItem(true, event.getDistance());
}
HiltItemStack his = ci.toItemStack(1);
int qualityBonus = 1;
if (ci.canBeQuality()) {
double qualityChance = plugin.getSettings().getDouble("config.random-quality-chance", 0.1);
double multiQualityChance = plugin.getSettings()
.getDouble("config.multi-quality-chance", 0.1);
if (random.nextDouble() <= qualityChance) {
while (random.nextDouble() <= multiQualityChance && qualityBonus < 5) {
qualityBonus++;
}
his = upgradeItemQuality(his, qualityBonus);
}
}
boolean broadcast = ci.isBroadcast() || qualityBonus > 2;
dropItem(event.getLocation(), his, killer, broadcast);
}
if (random.nextDouble() < plugin.getSettings().getDouble("config.drops.socket-extender", 0D)) {
HiltItemStack his = new SocketExtender();
dropItem(event.getLocation(), his, killer, true);
}
// NOTE: Drop bonus should not be applied to Unidentified Items!
if (random.nextDouble() < dropMultiplier * plugin.getSettings()
.getDouble("config.drops.unidentified-item", 0D)) {
Material m = Material.WOOD_SWORD;
HiltItemStack his;
if (plugin.getSettings().getBoolean("config.beast.beast-mode-activate", false)) {
his = new UnidentifiedItem(m, Math.min(mobLevel, 100));
} else {
his = new UnidentifiedItem(m, -1);
}
ItemMeta itemMeta = his.getItemMeta();
itemMeta.addItemFlags(ItemFlag.HIDE_ATTRIBUTES);
his.setItemMeta(itemMeta);
dropItem(event.getLocation(), his, null, false);
}
}
private void doUniqueDrops(UniqueLoot uniqueLoot, Location location, Player killer) {
for (String gemString : uniqueLoot.getGemMap().keySet()) {
if (uniqueLoot.getGemMap().get(gemString) > random.nextDouble()) {
SocketGem gem = plugin.getSocketGemManager().getSocketGem(gemString);
if (gem == null) {
continue;
}
HiltItemStack his = gem.toItemStack(1);
dropItem(location, his, killer, gem.isBroadcast());
}
}
for (String tomeString : uniqueLoot.getTomeMap().keySet()) {
if (uniqueLoot.getTomeMap().get(tomeString) > random.nextDouble()) {
EnchantmentTome tome = plugin.getEnchantmentStoneManager().getEnchantmentStone(tomeString);
if (tome == null) {
continue;
}
HiltItemStack his = tome.toItemStack(1);
dropItem(location, his, killer, tome.isBroadcast());
}
}
for (String tableName : uniqueLoot.getCustomItemMap().keySet()) {
double totalWeight = 0;
for (double weight : uniqueLoot.getCustomItemMap().get(tableName).values()) {
totalWeight += weight;
}
totalWeight *= random.nextDouble();
double currentWeight = 0;
for (String customName : uniqueLoot.getCustomItemMap().get(tableName).keySet()) {
currentWeight += uniqueLoot.getCustomItemMap().get(tableName).get(customName);
if (currentWeight >= totalWeight) {
if ("NO_DROP_WEIGHT".equalsIgnoreCase(customName)) {
break;
}
CustomItem ci = plugin.getCustomItemManager().getCustomItem(customName);
if (ci == null) {
break;
}
HiltItemStack his = ci.toItemStack(1);
dropItem(location, his, killer, ci.isBroadcast());
break;
}
}
}
}
private HiltItemStack upgradeItem(HiltItemStack his, int upgradeBonus) {
boolean succeed = false;
List<String> lore = his.getLore();
for (int i = 0; i < lore.size(); i++) {
String s = lore.get(i);
String ss = ChatColor.stripColor(s);
if (!ss.startsWith("+")) {
continue;
}
succeed = true;
String loreLev = CharMatcher.DIGIT.or(CharMatcher.is('-')).retainFrom(ss);
int loreLevel = NumberUtils.toInt(loreLev);
lore.set(i, s.replace("+" + loreLevel, "+" + (loreLevel + upgradeBonus)));
String name = getFirstColor(his.getName()) + ("+" + upgradeBonus) + " " + his.getName();
his.setName(name);
break;
}
if (succeed) {
his.setLore(lore);
if (upgradeBonus > 6) {
his.addUnsafeEnchantment(Enchantment.DURABILITY, 1);
his.setItemFlags(Sets.newHashSet(ItemFlag.HIDE_ENCHANTS, ItemFlag.HIDE_ATTRIBUTES));
}
}
return his;
}
private HiltItemStack upgradeItemQuality(HiltItemStack his, int upgradeBonus) {
boolean succeed = false;
List<String> lore = his.getLore();
for (int i = 0; i < lore.size(); i++) {
String s = lore.get(i);
String ss = ChatColor.stripColor(s);
if (!ss.startsWith("+")) {
continue;
}
succeed = true;
String loreLev = CharMatcher.DIGIT.or(CharMatcher.is('-')).retainFrom(ss);
int loreLevel = NumberUtils.toInt(loreLev);
lore.set(i, s.replace("+" + loreLevel, "+" + (loreLevel + upgradeBonus)));
String qualityEnhanceName = plugin.getSettings()
.getString("language.quality." + upgradeBonus, "");
String name = getFirstColor(his.getName()) + qualityEnhanceName + " " + his.getName();
his.setName(name);
break;
}
if (succeed) {
his.setLore(lore);
}
return his;
}
private void dropItem(Location loc, HiltItemStack itemStack, Player looter, boolean broadcast) {
Item drop = loc.getWorld().dropItemNaturally(loc, itemStack);
if (looter != null) {
applyOwnerMeta(drop, looter.getUniqueId());
if (broadcast) {
broadcast(looter, itemStack, itemFoundFormat);
}
}
}
private void applyOwnerMeta(Item drop, UUID owner) {
drop.setMetadata("loot-owner", new FixedMetadataValue(plugin, owner));
drop.setMetadata("loot-time", new FixedMetadataValue(plugin, System.currentTimeMillis()));
}
}
|
package info.gameboxx.gameboxx.components;
import info.gameboxx.gameboxx.exceptions.OptionAlreadyExistsException;
import info.gameboxx.gameboxx.game.Game;
import info.gameboxx.gameboxx.components.internal.GameComponent;
import info.gameboxx.gameboxx.game.GameSession;
import info.gameboxx.gameboxx.util.SoundEffect;
import info.gameboxx.gameboxx.util.Str;
import org.bukkit.entity.Player;
import org.bukkit.scheduler.BukkitRunnable;
/**
* Adding this component will add an countdown before the game starts.
*/
//TODO: Method to start/stop/reset the countdown.
public class CountdownGC extends GameComponent {
public static final long TICKS_IN_SECOND = 20L;
private int countdown = 30;
private int seconds;
private int mainInterval;
private int startSecondInterval;
private SoundEffect sound;
private String message;
private CountdownRunnable runnable;
/**
* @see GameComponent
* @param seconds The amount of seconds to count down from.
* @param mainInterval The interval to send a message and play a sound. (Recommended at 10)
* @param startSecondInterval At which time the second countdown starts.
* @param sound The {@link SoundEffect} to play when the countdown triggers. (may be null for no sound)
* @param message The message to broadcast when the countdown triggers.
* Use the {seconds} placeholder in the message for displaying the time!
*/
public CountdownGC(Game game, int seconds, int mainInterval, int startSecondInterval,
SoundEffect sound, String message) {
super(game);
addDependency(PlayersCP.class);
this.seconds = seconds;
this.countdown = seconds;
this.mainInterval = mainInterval;
this.startSecondInterval = startSecondInterval;
this.sound = sound;
this.message = message;
this.runnable = new CountdownRunnable();
}
@Override
public void registerOptions() throws OptionAlreadyExistsException {}
@Override
public CountdownGC newInstance(GameSession session) {
return (CountdownGC) new CountdownGC(getGame(), seconds, mainInterval, startSecondInterval,
sound, message).setSession(session);
}
/**
* Executes the countdown.
* @return The number of seconds that the countdown has left.
*/
public void count() {
if (countdown <= 0) {
countdown = 0;
runnable.cancel();
// TODO: Start the session.
return;
}
if (countdown % mainInterval == 0 || countdown <= startSecondInterval) {
sound.play(getDependency(PlayersCP.class).getOnlinePlayers());
//TODO: Have a message component or put this somewhere else.
for (Player player : getDependency(PlayersCP.class).getOnlinePlayers()) {
player.sendMessage(Str.color(message));
}
}
countdown
}
/**
* Get the remaining countdown time in seconds.
* @return The remaining time on the countdown in seconds.
*/
public int getCountdown() {
return countdown;
}
/**
* Force override the countdown time.
* There is no need to manually decrease the countdown time unless you want to force decrease it.
* @param countdown The countdown time in seconds to set.
*/
public void setCountdown(int countdown) {
this.countdown = countdown;
}
/**
* Get the amount in seconds to start the countdown from.
* @return Seconds.
*/
public int getSeconds() {
return seconds;
}
/**
* Set the amount in seconds to start the countdown from.
* @param seconds The seconds to start the countdown from.
*/
public void setSeconds(int seconds) {
this.seconds = seconds;
}
/**
* Get the main interval between counts.
* @return The interval between counts in seconds.
*/
public int getMainInterval() {
return mainInterval;
}
/**
* Set the main interval between counts.
* It will be used like: (time % interval == 0)
* The higher it is the less frequent it will count.
* @param mainInterval The interval between counts in seconds.
*/
public void setMainInterval(int mainInterval) {
this.mainInterval = mainInterval;
}
/**
* Get the start time in seconds when to start the second interval.
* @return The start time for the second interval.
*/
public int getStartSecondInterval() {
return startSecondInterval;
}
/**
* Set the start time in seconds when to start the second interval.
* For example if you set it to 5 it would count like 30, 20, 10, 5, 4, 3, 2, 1...
* @param startSecondInterval The start time for the second interval.
*/
public void setStartSecondInterval(int startSecondInterval) {
this.startSecondInterval = startSecondInterval;
}
/**
* Get the {@link SoundEffect} to play on each count.
* @return The {@link SoundEffect} to play on each count. May be null if there is no sound to play!
*/
public SoundEffect getSound() {
return sound;
}
/**
* Set the {@link SoundEffect} to play on each count.
* @param sound The {@link SoundEffect} to play on each count. Set to null to have no sound play.
*/
public void setSound(SoundEffect sound) {
this.sound = sound;
}
/**
* Get the message that will be broadcasted on each count.
* Before displaying the message replace {seconds} with the remaining seconds on the countdown.
* @return The message that will be broadcasted.
*/
public String getMessage() {
return message;
}
/**
* Set the message that will be broadcasted on each count.
* Use the {seconds} placeholder in the message for displaying the time!
* @param message The message that will be broadcasted.
*/
public void setMessage(String message) {
this.message = message;
}
/**
* Starts the countdown associated with this class.
*/
public void startCountdown() {
runnable.runTaskTimer(getAPI(), 0L, TICKS_IN_SECOND);
}
private class CountdownRunnable extends BukkitRunnable {
@Override
public void run() {
count();
}
}
}
|
package land.face.strife.managers;
import static org.bukkit.attribute.Attribute.GENERIC_FOLLOW_RANGE;
import com.tealcube.minecraft.bukkit.shade.apache.commons.lang3.StringUtils;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import land.face.strife.StrifePlugin;
import land.face.strife.data.StrifeMob;
import land.face.strife.data.UniqueEntity;
import land.face.strife.data.ability.EntityAbilitySet;
import land.face.strife.data.ability.EntityAbilitySet.TriggerAbilityType;
import land.face.strife.stats.StrifeStat;
import land.face.strife.util.ItemUtil;
import land.face.strife.util.LogUtil;
import land.face.strife.util.StatUtil;
import me.libraryaddict.disguise.DisguiseAPI;
import me.libraryaddict.disguise.disguisetypes.Disguise;
import me.libraryaddict.disguise.disguisetypes.DisguiseType;
import me.libraryaddict.disguise.disguisetypes.FlagWatcher;
import me.libraryaddict.disguise.disguisetypes.MiscDisguise;
import me.libraryaddict.disguise.disguisetypes.MobDisguise;
import me.libraryaddict.disguise.disguisetypes.PlayerDisguise;
import me.libraryaddict.disguise.disguisetypes.RabbitType;
import me.libraryaddict.disguise.disguisetypes.watchers.FoxWatcher;
import me.libraryaddict.disguise.disguisetypes.watchers.RabbitWatcher;
import org.bukkit.Location;
import org.bukkit.attribute.Attribute;
import org.bukkit.attribute.AttributeModifier;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Fox;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Phantom;
import org.bukkit.entity.Rabbit;
import org.bukkit.entity.Slime;
import org.bukkit.entity.Zombie;
import org.bukkit.metadata.FixedMetadataValue;
public class UniqueEntityManager {
private final StrifePlugin plugin;
private final Map<String, UniqueEntity> loadedUniquesMap;
private final Map<UniqueEntity, Disguise> cachedDisguises;
public UniqueEntityManager(StrifePlugin plugin) {
this.plugin = plugin;
this.loadedUniquesMap = new HashMap<>();
this.cachedDisguises = new HashMap<>();
}
public UniqueEntity getUnique(String uniqueId) {
return loadedUniquesMap.getOrDefault(uniqueId, null);
}
public Map<String, UniqueEntity> getLoadedUniquesMap() {
return loadedUniquesMap;
}
public void addUniqueEntity(String key, UniqueEntity uniqueEntity) {
loadedUniquesMap.put(key, uniqueEntity);
}
public boolean isLoadedUnique(String name) {
return loadedUniquesMap.containsKey(name);
}
public StrifeMob spawnUnique(String unique, Location location) {
UniqueEntity uniqueEntity = loadedUniquesMap.get(unique);
if (uniqueEntity == null) {
plugin.getLogger().warning("Attempted to spawn non-existing unique: " + unique);
return null;
}
return spawnUnique(uniqueEntity, location);
}
StrifeMob spawnUnique(UniqueEntity uniqueEntity, Location location) {
if (uniqueEntity.getType() == null) {
LogUtil.printWarning("Null entity type: " + uniqueEntity.getName());
return null;
}
LogUtil.printDebug("Spawning unique entity " + uniqueEntity.getId());
assert uniqueEntity.getType().getEntityClass() != null;
Entity entity = Objects.requireNonNull(location.getWorld())
.spawn(location, uniqueEntity.getType().getEntityClass(),
e -> e.setMetadata("UNIQUE_ID", new FixedMetadataValue(plugin, uniqueEntity.getId())));
if (!entity.isValid()) {
LogUtil.printWarning(
"Attempted to spawn unique " + uniqueEntity.getName() + " but entity is invalid?");
return null;
}
LivingEntity le = (LivingEntity) entity;
le.setRemoveWhenFarAway(true);
if (le instanceof Zombie) {
((Zombie) le).setBaby(uniqueEntity.isBaby());
} else if (le instanceof Slime) {
int size = uniqueEntity.getSize();
if (size < 1) {
size = 2 + (int) (Math.random() * 3);
}
((Slime) le).setSize(size);
} else if (le instanceof Phantom) {
int size = uniqueEntity.getSize();
if (size < 1) {
size = 1 + (int) (Math.random() * 3);
}
((Phantom) le).setSize(size);
} else if (le instanceof Rabbit) {
((Rabbit) le).setRabbitType(Rabbit.Type.THE_KILLER_BUNNY);
((Rabbit) le).setAdult();
}
if (uniqueEntity.getFollowRange() != -1) {
if (le.getAttribute(GENERIC_FOLLOW_RANGE) != null) {
le.getAttribute(GENERIC_FOLLOW_RANGE)
.setBaseValue(uniqueEntity.getFollowRange());
}
if (le instanceof Zombie && uniqueEntity.isRemoveFollowMods()) {
for (AttributeModifier mod : le.getAttribute(GENERIC_FOLLOW_RANGE).getModifiers()) {
le.getAttribute(GENERIC_FOLLOW_RANGE).removeModifier(mod);
}
}
}
if (le.getAttribute(Attribute.GENERIC_KNOCKBACK_RESISTANCE) != null && uniqueEntity
.isKnockbackImmune()) {
le.getAttribute(Attribute.GENERIC_KNOCKBACK_RESISTANCE).setBaseValue(100);
}
if (le.getEquipment() != null) {
le.getEquipment().clear();
ItemUtil.delayedEquip(uniqueEntity.getEquipment(), le);
}
le.setCustomName(uniqueEntity.getName());
le.setCustomNameVisible(uniqueEntity.isShowName());
int mobLevel = uniqueEntity.getBaseLevel();
if (mobLevel == -1) {
mobLevel = StatUtil.getMobLevel(le);
}
Map<StrifeStat, Float> stats = new HashMap<>();
if (mobLevel != 0) {
stats.putAll(plugin.getMonsterManager().getBaseStats(le, mobLevel));
}
stats = StatUpdateManager.combineMaps(stats, uniqueEntity.getAttributeMap());
StrifeMob strifeMob = plugin.getStrifeMobManager().setEntityStats(le, stats);
if (uniqueEntity.isAllowMods()) {
plugin.getMobModManager().doModApplication(strifeMob);
}
strifeMob.setUniqueEntityId(uniqueEntity.getId());
strifeMob.setFactions(uniqueEntity.getFactions());
strifeMob.setDespawnOnUnload(true);
strifeMob.setCharmImmune(uniqueEntity.isCharmImmune());
if (uniqueEntity.isBurnImmune()) {
le.setMetadata("NO_BURN", new FixedMetadataValue(plugin, true));
}
if (uniqueEntity.isFallImmune()) {
le.setMetadata("NO_FALL", new FixedMetadataValue(plugin, true));
}
if (uniqueEntity.isIgnoreSneak()) {
le.setMetadata("IGNORE_SNEAK", new FixedMetadataValue(plugin, true));
}
if (StringUtils.isNotBlank(uniqueEntity.getMount())) {
StrifeMob mountMob = spawnUnique(uniqueEntity.getMount(), location);
if (mountMob != null) {
mountMob.getEntity().addPassenger(strifeMob.getEntity());
}
}
plugin.getStatUpdateManager().updateAttributes(strifeMob);
strifeMob.setAbilitySet(new EntityAbilitySet(uniqueEntity.getAbilitySet()));
plugin.getAbilityManager().abilityCast(strifeMob, TriggerAbilityType.PHASE_SHIFT);
plugin.getParticleTask().addParticle(le, uniqueEntity.getStrifeParticle());
if (cachedDisguises.containsKey(uniqueEntity)) {
DisguiseAPI.disguiseToAll(le, cachedDisguises.get(uniqueEntity));
}
plugin.getAbilityManager().startAbilityTimerTask(strifeMob);
return strifeMob;
}
public void cacheDisguise(UniqueEntity uniqueEntity, String disguiseType, String playerName,
String typeData) {
DisguiseType type = DisguiseType.valueOf(disguiseType);
if (type == DisguiseType.PLAYER) {
if (StringUtils.isBlank(playerName)) {
playerName = "Pur3p0w3r";
}
PlayerDisguise playerDisguise = new PlayerDisguise(uniqueEntity.getName(), playerName);
cachedDisguises.put(uniqueEntity, playerDisguise);
return;
}
if (type.isMob()) {
MobDisguise mobDisguise = new MobDisguise(type);
if (StringUtils.isNotBlank(typeData)) {
FlagWatcher watcher = mobDisguise.getWatcher();
try {
switch (type) {
case FOX:
Fox.Type foxType = Fox.Type.valueOf(typeData);
((FoxWatcher) watcher).setType(foxType);
break;
case RABBIT:
RabbitType rabbitType = RabbitType.valueOf(typeData);
((RabbitWatcher) watcher).setType(rabbitType);
break;
}
} catch (Exception e) {
LogUtil.printWarning("Cannot load type " + typeData + " for " + uniqueEntity.getId());
}
}
mobDisguise.setShowName(true);
mobDisguise.setReplaceSounds(true);
cachedDisguises.put(uniqueEntity, mobDisguise);
} else if (type.isMisc()) {
MiscDisguise miscDisguise = new MiscDisguise(type);
miscDisguise.setShowName(true);
miscDisguise.setReplaceSounds(true);
cachedDisguises.put(uniqueEntity, miscDisguise);
}
}
}
|
package me.deftware.client.framework.fonts;
import me.deftware.client.framework.main.Bootstrap;
import me.deftware.client.framework.utils.ChatColor;
import me.deftware.client.framework.utils.TexUtil;
import me.deftware.client.framework.utils.Texture;
import org.apache.commons.lang3.ArrayUtils;
import org.lwjgl.opengl.GL11;
import javax.annotation.Nonnull;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.util.HashMap;
@SuppressWarnings("Duplicates")
public class BitmapFont implements EMCFont{
protected int lastRenderedWidth;
protected int lastRenderedHeight;
protected String fontName;
protected int fontSize, shadowSize = 1;
protected boolean bold;
protected boolean italics;
protected boolean underlined;
protected boolean striked;
protected boolean moving;
protected boolean antialiased;
protected boolean memorysaving;
protected Font stdFont;
protected HashMap<Character, Texture> bitmapStore = new HashMap<>();
public BitmapFont(@Nonnull String fontName, int fontSize, int modifiers) {
this.fontName = fontName;
this.fontSize = fontSize;
this.bold = ((modifiers & 1) != 0);
this.italics = ((modifiers & 2) != 0);
this.underlined = ((modifiers & 4) != 0);
this.striked = ((modifiers & 8) != 0);
this.moving = ((modifiers & 16) != 0);
this.antialiased = ((modifiers & 32) != 0);
this.memorysaving = ((modifiers & 64) != 0);
prepareStandardFont();
lastRenderedWidth = 0;
lastRenderedHeight = 0;
}
protected void prepareStandardFont(){
if (!bold && !italics) {
this.stdFont = new Font(fontName, Font.PLAIN, fontSize);
} else {
if (bold && italics) {
this.stdFont = new Font(fontName, Font.BOLD | java.awt.Font.ITALIC, fontSize);
} else if (!bold) { //One of them must be false by now so we have to check only one
this.stdFont = new Font(fontName, Font.ITALIC, fontSize);
} else {
this.stdFont = new Font(fontName, Font.BOLD, fontSize);
}
}
}
@Override
public int initialize(Color color, String extras) {
if(extras == null)
extras= "";
char[] additionalCharacters = extras.toCharArray();
//Generate the font bitmaps
Texture bitmapTexture;
//Lowercase alphabet
for (char lowercaseAlphabet = 'a'; lowercaseAlphabet <= 'z'; lowercaseAlphabet++) {
characterGenerate(lowercaseAlphabet, color);
}
//Uppercase alphabet
for (char uppercaseAlphabet = 'A'; uppercaseAlphabet <= 'Z'; uppercaseAlphabet++) {
characterGenerate(uppercaseAlphabet, color);
}
//Numbers
for (char numeric = 48; numeric <= 57; numeric++) { //0 - 9 in ASCII
characterGenerate(numeric, color);
}
char specialCharacters[] = {'!', '
':', ';', '<', '=', '>', '?', '@', '[', '\\', ']', '^', '_', '`', '{', '|', '}', '~', '"'};
if(additionalCharacters.length > 0)
specialCharacters = ArrayUtils.addAll(specialCharacters, additionalCharacters);
//Additional and special characters
for (int additional = 0; additional < specialCharacters.length; additional++) { //0 - 9 in ASCII
characterGenerate(specialCharacters[additional], color);
}
return 0;
}
protected void characterGenerate(char character, Color color){
Texture bitmapTexture;
String letterBuffer = String.valueOf(character);
int textwidth = getStringWidth(letterBuffer);
int textheight = getStringHeight(letterBuffer);
BufferedImage characterTexture = new BufferedImage(textwidth,textheight, BufferedImage.TYPE_INT_ARGB);
Graphics2D graphics = characterTexture.createGraphics();
graphics.setFont(stdFont);
graphics.setColor(color);
if (antialiased) {
graphics.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
graphics.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY);
}
graphics.drawString(letterBuffer, 0, textheight - textheight / 4);
graphics.dispose();
bitmapTexture = new Texture(textwidth, textheight, true);
bitmapTexture.fillFromBufferedImageFlip(characterTexture);
bitmapTexture.update();
bitmapStore.put(character, bitmapTexture);
}
/**
* Unimplemented in BitmapFont
* @param text
* @param color
* @return 0
*/
@Override
public int generateString(String text, Color color) {
return 0;
}
@Override
public int drawString(int x, int y, String text) {
drawString(x, y, text, null);
return 0;
}
@Override
public int drawString(int x, int y, String text, Color color) {
char[] buffer = text.toCharArray();
int offset = 0;
for(int character = 0; character < buffer.length; character++){
if(buffer[character] == ' ') {
offset += getStringWidth(" ");
continue;
}
else if(!bitmapStore.containsKey(buffer[character])) {
buffer[character] = '?';
}
TexUtil.prepareAndPushMatrix(); //GL PART
if(color != null) {
GL11.glColor4ub((byte) color.getRed(), (byte) color.getGreen(), (byte) color.getBlue(), (byte) color.getAlpha());
}
Texture texture = bitmapStore.get(buffer[character]);
texture.updateTexture();
texture.bind(GL11.GL_ONE_MINUS_SRC_ALPHA);
int width = texture.getWidth();
int height = texture.getHeight();
TexUtil.renderAndPopMatrix(x + offset, y, width, height); //GL PART
offset += width;
}
lastRenderedWidth = offset;
return 0;
}
@Override
public int drawStringWithShadow(int x, int y, String text) {
drawStringWithShadow(x, y, text, Color.white);
return 0;
}
@Override
public int drawStringWithShadow(int x, int y, String text, Color color) {
drawString(x + shadowSize, y + shadowSize, text, Color.black);
drawString(x, y, text, color);
return 0;
}
@Override
public int drawCenteredString(int x, int y, String text) {
drawCenteredString(x, y, text, Color.white);
return 0;
}
@Override
public int drawCenteredString(int x, int y, String text, Color color) {
drawString(x - (getStringWidth(ChatColor.stripColor(text)) / 2), y - (getStringHeight(ChatColor.stripColor(text)) / 2), text, color);
return 0;
}
@Override
public int drawCenteredStringWithShadow(int x, int y, String text) {
drawCenteredStringWithShadow(x, y, text, Color.white);
return 0;
}
@Override
public int drawCenteredStringWithShadow(int x, int y, String text, Color color) {
drawCenteredString(x + shadowSize , y + shadowSize, text, Color.black);
drawCenteredString(x, y, text, color);
return 0;
}
@Override
public int drawOnScreen(int x, int y) {
return 0;
}
@Override
public int getStringWidth(String text) {
FontMetrics fontMetrics = new Canvas().getFontMetrics(stdFont);
return fontMetrics.charsWidth(text.toCharArray(), 0, text.length());
}
@Override
public int getStringHeight(String text) {
FontMetrics fontMetrics = new Canvas().getFontMetrics(stdFont);
return fontMetrics.getHeight();
}
@Override
public int getLastRenderedHeight() {
return lastRenderedHeight;
}
@Override
public int getLastRenderedWidth() {
return lastRenderedWidth;
}
@Override
public void clearCache() {
//Bootstrap.logger.error("Calling clearCache() on BitmapFont is forbidden!");
}
@Override
public void destroy() {
for (Character key : bitmapStore.keySet()) {
bitmapStore.get(key).destroy();
}
bitmapStore.clear();
}
@Override
public String getFontName() {
return fontName;
}
@Override
public void setFontName(String fontName) {
this.fontName = fontName;
prepareStandardFont();
}
@Override
public int getFontSize() {
return fontSize;
}
@Override
public void setFontSize(int fontSize) {
this.fontSize = fontSize;
prepareStandardFont();
}
@Override
public int getShadowSize() {
return shadowSize;
}
@Override
public void setShadowSize(int shadowSize) {
this.shadowSize = shadowSize;
}
@Override
public boolean isBold() {
return bold;
}
@Override
public void setBold(boolean bold) {
this.bold = bold;
}
@Override
public boolean isItalics() {
return italics;
}
@Override
public void setItalics(boolean italics) {
this.italics = italics;
}
@Override
public boolean isUnderlined() {
return underlined;
}
@Override
public void setUnderlined(boolean underlined) {
this.underlined = underlined;
}
@Override
public boolean isStriked() {
return striked;
}
@Override
public void setStriked(boolean striked) {
this.striked = striked;
}
@Override
public boolean isMoving() {
return moving;
}
@Override
public void setMoving(boolean moving) {
this.moving = moving;
}
@Override
public boolean isAntialiased() {
return antialiased;
}
@Override
public void setAntialiased(boolean antialiased) {
this.antialiased = antialiased;
}
@Override
public boolean isMemorysaving() {
return memorysaving;
}
@Override
public void setMemorysaving(boolean memorysaving) {
this.memorysaving = memorysaving;
}
}
|
package me.nallar.javatransformer.api;
import com.github.javaparser.JavaParser;
import com.github.javaparser.ParseException;
import com.github.javaparser.ast.CompilationUnit;
import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration;
import com.github.javaparser.ast.body.TypeDeclaration;
import lombok.*;
import me.nallar.javatransformer.internal.ByteCodeInfo;
import me.nallar.javatransformer.internal.SourceInfo;
import me.nallar.javatransformer.internal.util.CachingSupplier;
import me.nallar.javatransformer.internal.util.JVMUtil;
import me.nallar.javatransformer.internal.util.NodeUtil;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.tree.ClassNode;
import java.io.*;
import java.net.*;
import java.nio.charset.*;
import java.nio.file.*;
import java.nio.file.attribute.*;
import java.util.*;
import java.util.function.*;
import java.util.zip.*;
@Getter
@Setter
@ToString
public class JavaTransformer {
private final List<Transformer> transformers = new ArrayList<>();
private final SimpleMultiMap<String, Transformer> classTransformers = new SimpleMultiMap<>();
private final Map<String, byte[]> transformedFiles = new HashMap<>();
private static byte[] readFully(InputStream is) {
byte[] output = {};
int position = 0;
while (true) {
int bytesToRead;
if (position >= output.length) {
bytesToRead = output.length + 4096;
if (output.length < position + bytesToRead) {
output = Arrays.copyOf(output, position + bytesToRead);
}
} else {
bytesToRead = output.length - position;
}
int bytesRead;
try {
bytesRead = is.read(output, position, bytesToRead);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
if (bytesRead < 0) {
if (output.length != position) {
output = Arrays.copyOf(output, position);
}
break;
}
position += bytesRead;
}
return output;
}
/**
* Used to get the path of the jar/folder containing a class
*
* @param clazz Class to get path to
* @return Path to class
*/
public static Path pathFromClass(Class<?> clazz) {
try {
return Paths.get(clazz.getProtectionDomain().getCodeSource().getLocation().toURI());
} catch (URISyntaxException e) {
throw new TransformationException(e);
}
}
public Map<String, List<Transformer>> getClassTransformers() {
return Collections.unmodifiableMap(classTransformers.map);
}
public void save(@NonNull Path path) {
switch (PathType.of(path)) {
case JAR:
saveJar(path);
break;
case FOLDER:
saveFolder(path);
break;
}
}
public void load(@NonNull Path path) {
load(path, true);
}
public void parse(@NonNull Path path) {
load(path, false);
}
private void load(@NonNull Path path, boolean saveTransformedResults) {
switch (PathType.of(path)) {
case JAR:
loadJar(path, saveTransformedResults);
break;
case FOLDER:
loadFolder(path, saveTransformedResults);
break;
}
}
public void transform(@NonNull Path load, @NonNull Path save) {
load(load, true);
save(save);
clear();
}
private void loadFolder(Path input, boolean saveTransformedResults) {
try {
Files.walkFileTree(input, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
val relativeName = input.relativize(file).toString();
val supplier = transformBytes(() -> {
try {
return Files.readAllBytes(file);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}, relativeName);
saveTransformedResult(relativeName, supplier, saveTransformedResults);
return FileVisitResult.CONTINUE;
}
});
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private void loadJar(Path p, boolean saveTransformedResults) {
ZipEntry entry;
try (ZipInputStream is = new ZipInputStream(new BufferedInputStream(new FileInputStream(p.toFile())))) {
while ((entry = is.getNextEntry()) != null) {
saveTransformedResult(entry.getName(), transformBytes(() -> readFully(is), entry.getName()), saveTransformedResults);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private void saveTransformedResult(String relativeName, Supplier<byte[]> supplier, boolean saveTransformedResults) {
if (saveTransformedResults)
transformedFiles.put(relativeName, supplier.get());
}
private void saveFolder(Path output) {
transformedFiles.forEach(((fileName, bytes) -> {
Path outputFile = output.resolve(fileName);
try {
if (Files.exists(outputFile)) {
throw new IOException("Output file already exists: " + outputFile);
}
Files.createDirectories(outputFile.getParent());
Files.write(outputFile, bytes);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}));
}
private void saveJar(Path jar) {
try (ZipOutputStream os = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(jar.toFile())))) {
transformedFiles.forEach(((relativeName, bytes) -> {
try {
os.putNextEntry(new ZipEntry(relativeName));
os.write(bytes);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}));
} catch (IOException e) {
e.printStackTrace();
}
}
public void clear() {
transformedFiles.clear();
}
public void addTransformer(@NonNull Transformer.TargetedTransformer t) {
if (transformers.contains(t)) {
throw new IllegalArgumentException("Transformer " + t + " has already been added");
}
for (String name : t.getTargetClasses()) {
classTransformers.put(name, t);
}
}
public void addTransformer(@NonNull String s, @NonNull Transformer t) {
if (classTransformers.get(s).contains(t)) {
throw new IllegalArgumentException("Transformer " + t + " has already been added for class " + s);
}
classTransformers.put(s, t);
}
public void addTransformer(@NonNull Transformer t) {
if (t instanceof Transformer.TargetedTransformer) {
addTransformer((Transformer.TargetedTransformer) t);
return;
}
if (transformers.contains(t)) {
throw new IllegalArgumentException("Transformer " + t + " has already been added");
}
transformers.add(t);
}
public Supplier<byte[]> transformJava(@NonNull Supplier<byte[]> data, @NonNull String name) {
if (!shouldTransform(name))
return data;
CachingSupplier<ClassOrInterfaceDeclaration> supplier = CachingSupplier.of(() -> {
byte[] bytes = data.get();
CompilationUnit cu;
try {
cu = JavaParser.parse(new ByteArrayInputStream(bytes));
} catch (ParseException e) {
throw new TransformationException(e);
}
List<String> tried = new ArrayList<>();
String packageName = NodeUtil.qualifiedName(cu.getPackage().getName());
for (TypeDeclaration typeDeclaration : cu.getTypes()) {
if (!(typeDeclaration instanceof ClassOrInterfaceDeclaration)) {
continue;
}
ClassOrInterfaceDeclaration classDeclaration = (ClassOrInterfaceDeclaration) typeDeclaration;
String shortClassName = classDeclaration.getName();
String fullName = packageName + '.' + shortClassName;
if (fullName.equalsIgnoreCase(name)) {
return classDeclaration;
}
tried.add(fullName);
}
throw new Error("Couldn't find any class or interface declaration matching expected name " + name
+ "\nTried: " + tried
+ "\nClass data: " + new String(bytes, Charset.forName("UTF-8")));
});
transformClassInfo(new SourceInfo(supplier, name));
return supplier.isCached() ? () -> supplier.get().getParentNode().toString().getBytes(Charset.forName("UTF-8")) : data;
}
public Supplier<byte[]> transformClass(@NonNull Supplier<byte[]> data, @NonNull String name) {
if (!shouldTransform(name))
return data;
Holder<ClassReader> readerHolder = new Holder<>();
CachingSupplier<ClassNode> supplier = CachingSupplier.of(() -> {
ClassNode node = new ClassNode();
ClassReader reader = new ClassReader(data.get());
reader.accept(node, ClassReader.EXPAND_FRAMES);
readerHolder.value = reader;
return node;
});
transformClassInfo(new ByteCodeInfo(supplier, name));
if (!supplier.isCached())
return data;
return () -> {
ClassWriter classWriter = new ClassWriter(readerHolder.value, 0);
supplier.get().accept(classWriter);
return classWriter.toByteArray();
};
}
private void transformClassInfo(ClassInfo editor) {
transformers.forEach((x) -> x.transform(editor));
classTransformers.get(editor.getName()).forEach((it) -> it.transform(editor));
}
private boolean shouldTransform(String className) {
return !transformers.isEmpty() || !classTransformers.get(className).isEmpty();
}
Supplier<byte[]> transformBytes(Supplier<byte[]> dataSupplier, String relativeName) {
boolean isClass = relativeName.endsWith(".class");
boolean isSource = relativeName.endsWith(".java");
if (isClass || isSource) {
String className = JVMUtil.fileNameToClassName(relativeName);
// package-info files do not contain classes
if (className.endsWith(".package-info"))
return dataSupplier;
if (isClass)
return transformClass(dataSupplier, className);
return transformJava(dataSupplier, className);
}
return dataSupplier;
}
private enum PathType {
JAR,
FOLDER;
static PathType of(Path p) {
if (!p.getFileName().toString().contains(".")) {
if (Files.exists(p) && !Files.isDirectory(p)) {
throw new TransformationException("Path " + p + " should be a directory or not already exist");
}
return FOLDER;
}
if (Files.isDirectory(p)) {
throw new TransformationException("Path " + p + " should be a file or not already exist");
}
return JAR;
}
}
private static class SimpleMultiMap<K, T> {
private final Map<K, List<T>> map = new HashMap<>();
public void put(K key, T value) {
List<T> values = map.get(key);
if (values == null) {
values = new ArrayList<>();
map.put(key, values);
}
values.add(value);
}
public List<T> get(K key) {
List<T> values = map.get(key);
return values == null ? Collections.emptyList() : values;
}
public String toString() {
return map.toString();
}
}
private static class Holder<T> {
public T value;
}
}
|
package me.unrealization.jeeves.jsonModels;
public class EdsmModels
{
public static class EDStatus
{
public String lastUpdate;
public String type;
public String message;
public String status;
}
public static class CommanderLocation
{
public String msgnum;
public String msg;
public String system;
public String firstDiscover;
public String date;
public String systemId;
public String systemId64;
public EdsmModels.SystemInfo.Coordinates coordinates;
public String isDocked;
public String dateLastActivity;
public String url;
}
public static class SystemBodies
{
public static class Body
{
public static class Ring
{
public String name;
public String type;
public String mass;
public String innerRadius;
public String outerRadius;
}
public static class Materials
{
public String Carbon;
public String Iron;
public String Nickel;
public String Phosphorus;
public String Sulphur;
public String Chromium;
public String Germanium;
public String Manganese;
public String Vanadium;
public String Zinc;
public String Zirconium;
public String Arsenic;
public String Niobium;
public String Selenium;
public String Tungsten;
public String Cadmium;
public String Mercury;
public String Molybdenum;
public String Ruthenium;
public String Tin;
public String Yttrium;
public String Antimony;
public String Polonium;
public String Technetium;
public String Tellurium;
}
public String id;
public String id64;
public String name;
public String type;
public String subType;
public String offset;
public String distanceToArrival;
public String isMainStar;
public String isScoopable;
public String age;
public String luminosity;
public String absoluteMagnitude;
public String solarMasses;
public String solarRadius;
public String surfaceTemperature;
public String isLandable;
public String gravity;
public String earthMasses;
public String radius;
public String volcanismType;
public String atmosphereType;
public String terraformingState;
public String orbitalPeriod;
public String semiMajorAxis;
public String orbitalEccentricity;
public String orbitalInclination;
public String argOfPeriapsis;
public String rotationalPeriod;
public String rotationalPeriodTidallyLocked;
public String axialTilt;
public EdsmModels.SystemBodies.Body.Ring[] belts;
public EdsmModels.SystemBodies.Body.Ring[] rings;
public EdsmModels.SystemBodies.Body.Materials materials;
public String updateTime;
}
public String id;
public String id64;
public String name;
public EdsmModels.SystemBodies.Body[] bodies;
}
public static class SystemStations
{
public static class Station
{
public static class UpdateTime
{
public String information;
public String market;
public String shipyard;
public String outfitting;
}
public String id;
public String marketId;
public String name;
public String type;
public String distanceToArrival;
public String allegiance;
public String government;
public String economy;
public String haveMarket;
public String haveShipyard;
public String[] otherServices;
public EdsmModels.SystemFactions.Faction controllingFaction;
public EdsmModels.SystemStations.Station.UpdateTime updateTime;
}
public String id;
public String id64;
public String name;
public EdsmModels.SystemStations.Station[] stations;
}
public static class SystemFactions
{
public static class Faction
{
public static class State
{
public String state;
public String trend;
}
public String id;
public String name;
public String allegiance;
public String government;
public String influence;
public String state;
public EdsmModels.SystemFactions.Faction.State[] recoveringStates;
public EdsmModels.SystemFactions.Faction.State[] pendingStates;
public String isPlayer;
public String lastUpdate;
}
public String id;
public String id64;
public String name;
public EdsmModels.SystemFactions.Faction controllingFaction;
public EdsmModels.SystemFactions.Faction factions[];
}
public static class SystemInfo
{
public static class Coordinates
{
public String x;
public String y;
public String z;
}
public static class SystemInformation
{
public String allegiance;
public String government;
public String faction;
public String factionState;
public String population;
public String reserve;
public String security;
public String economy;
}
public static class PrimaryStar
{
public String type;
public String name;
public String isScoopable;
}
public String name;
public String id;
public String id64;
public EdsmModels.SystemInfo.Coordinates coords;
public String requirePermit;
//public EdsmModels.SystemInfo.SystemInformation information;
public EdsmModels.SystemInfo.PrimaryStar primaryStar;
}
}
|
package mil.nga.geopackage.extension.style;
import java.util.ArrayList;
import java.util.List;
import mil.nga.geopackage.db.GeoPackageDataType;
import mil.nga.geopackage.extension.related.simple.SimpleAttributesTable;
import mil.nga.geopackage.user.custom.UserCustomColumn;
import mil.nga.geopackage.user.custom.UserCustomTable;
/**
* Style Table
*
* @author osbornb
* @since 3.1.1
*/
public class StyleTable extends SimpleAttributesTable {
/**
* Table name
*/
public static final String TABLE_NAME = "nga_style";
/**
* Feature Style name
*/
public static final String COLUMN_NAME = "name";
/**
* Feature Style description
*/
public static final String COLUMN_DESCRIPTION = "description";
/**
* Geometry color in hex format #RRGGBB or #RGB
*/
public static final String COLUMN_COLOR = "color";
/**
* Geometry color opacity inclusively between 0.0 and 1.0
*/
public static final String COLUMN_OPACITY = "opacity";
/**
* Geometry line stroke or point width greater than or equal to 0.0
*/
public static final String COLUMN_WIDTH = "width";
/**
* Closed geometry fill color in hex format #RRGGBB or #RGB
*/
public static final String COLUMN_FILL_COLOR = "fill_color";
/**
* Closed geometry fill color opacity inclusively between 0.0 and 1.0
*/
public static final String COLUMN_FILL_OPACITY = "fill_opacity";
/**
* Constructor
*/
public StyleTable() {
super(TABLE_NAME, createColumns());
}
/**
* Constructor
*
* @param table
* user custom table
*/
protected StyleTable(UserCustomTable table) {
super(table);
}
/**
* Create the style columns
*
* @return columns
*/
private static List<UserCustomColumn> createColumns() {
List<UserCustomColumn> columns = new ArrayList<>();
columns.addAll(createRequiredColumns());
int index = columns.size();
columns.add(UserCustomColumn.createColumn(index++, COLUMN_NAME,
GeoPackageDataType.TEXT, false, null));
columns.add(UserCustomColumn.createColumn(index++, COLUMN_DESCRIPTION,
GeoPackageDataType.TEXT, false, null));
columns.add(UserCustomColumn.createColumn(index++, COLUMN_COLOR,
GeoPackageDataType.TEXT, false, null));
columns.add(UserCustomColumn.createColumn(index++, COLUMN_OPACITY,
GeoPackageDataType.DOUBLE, false, null));
columns.add(UserCustomColumn.createColumn(index++, COLUMN_WIDTH,
GeoPackageDataType.DOUBLE, false, null));
columns.add(UserCustomColumn.createColumn(index++, COLUMN_FILL_COLOR,
GeoPackageDataType.TEXT, false, null));
columns.add(UserCustomColumn.createColumn(index++, COLUMN_FILL_OPACITY,
GeoPackageDataType.DOUBLE, false, null));
return columns;
}
/**
* Get the name column index
*
* @return name column index
*/
public int getNameColumnIndex() {
return getColumnIndex(COLUMN_NAME);
}
/**
* Get the name column
*
* @return name column
*/
public UserCustomColumn getNameColumn() {
return getColumn(COLUMN_NAME);
}
/**
* Get the description column index
*
* @return description column index
*/
public int getDescriptionColumnIndex() {
return getColumnIndex(COLUMN_DESCRIPTION);
}
/**
* Get the description column
*
* @return description column
*/
public UserCustomColumn getDescriptionColumn() {
return getColumn(COLUMN_DESCRIPTION);
}
/**
* Get the color column index
*
* @return color column index
*/
public int getColorColumnIndex() {
return getColumnIndex(COLUMN_COLOR);
}
/**
* Get the color column
*
* @return color column
*/
public UserCustomColumn getColorColumn() {
return getColumn(COLUMN_COLOR);
}
/**
* Get the opacity column index
*
* @return opacity column index
*/
public int getOpacityColumnIndex() {
return getColumnIndex(COLUMN_OPACITY);
}
/**
* Get the opacity column
*
* @return opacity column
*/
public UserCustomColumn getOpacityColumn() {
return getColumn(COLUMN_OPACITY);
}
/**
* Get the width column index
*
* @return width column index
*/
public int getWidthColumnIndex() {
return getColumnIndex(COLUMN_WIDTH);
}
/**
* Get the width column
*
* @return width column
*/
public UserCustomColumn getWidthColumn() {
return getColumn(COLUMN_WIDTH);
}
/**
* Get the fill color column index
*
* @return fill color column index
*/
public int getFillColorColumnIndex() {
return getColumnIndex(COLUMN_FILL_COLOR);
}
/**
* Get the fill color column
*
* @return fill color column
*/
public UserCustomColumn getFillColorColumn() {
return getColumn(COLUMN_FILL_COLOR);
}
/**
* Get the fill opacity column index
*
* @return fill opacity column index
*/
public int getFillOpacityColumnIndex() {
return getColumnIndex(COLUMN_FILL_OPACITY);
}
/**
* Get the fill opacity column
*
* @return fill opacity column
*/
public UserCustomColumn getFillOpacityColumn() {
return getColumn(COLUMN_FILL_OPACITY);
}
}
|
package net.galaxygaming.dispenser.event;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import net.galaxygaming.dispenser.GameDispenser;
import net.galaxygaming.dispenser.game.Game;
import net.galaxygaming.dispenser.game.GameManager;
import net.galaxygaming.dispenser.game.GameType;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import org.bukkit.event.Event;
import org.bukkit.event.EventException;
import org.bukkit.event.EventHandler;
import org.bukkit.event.HandlerList;
import org.bukkit.event.Listener;
import org.bukkit.event.block.BlockBreakEvent;
import org.bukkit.event.block.BlockDamageEvent;
import org.bukkit.event.block.BlockPlaceEvent;
import org.bukkit.event.block.SignChangeEvent;
import org.bukkit.event.entity.EntityEvent;
import org.bukkit.event.player.PlayerEvent;
import org.bukkit.plugin.EventExecutor;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
public class EventManager {
/** Singleton instance */
private static final EventManager instance = new EventManager();
private final Map<GameType, Set<Listener>> listeners;
private GameDispenser plugin;
private EventManager() {
listeners = Maps.newHashMap();
}
public void setup(GameDispenser plugin) {
this.plugin = plugin;
plugin.getServer().getPluginManager().registerEvents(new Events(), plugin);
}
@Override
public EventManager clone() throws CloneNotSupportedException {
throw new CloneNotSupportedException();
}
public void registerListener(Listener listener, GameType type) {
Set<Method> methods;
try {
Method[] publicMethods = listener.getClass().getMethods();
methods = new HashSet<Method>(publicMethods.length, Float.MAX_VALUE);
for (Method method : publicMethods) {
methods.add(method);
}
for (Method method : listener.getClass().getDeclaredMethods()) {
methods.add(method);
}
} catch (NoClassDefFoundError e) {
plugin.getLogger().severe(type.toString() + " has failed to register events for " + listener.getClass() + " because " + e.getMessage() + " does not exist.");
return;
}
for (final Method method : methods) {
final EventHandler eh = method.getAnnotation(EventHandler.class);
if (eh == null) continue;
final Class<?> checkClass = method.getParameterTypes()[0];
final Class<? extends Game> gameClass;
if (method.getParameterTypes().length == 2 && Game.class.isAssignableFrom(method.getParameterTypes()[1])
&& (EntityEvent.class.isAssignableFrom(checkClass) || PlayerEvent.class.isAssignableFrom(checkClass)
|| BlockBreakEvent.class.isAssignableFrom(checkClass) || BlockPlaceEvent.class.isAssignableFrom(checkClass)
|| BlockDamageEvent.class.isAssignableFrom(checkClass) || SignChangeEvent.class.isAssignableFrom(checkClass))) {
gameClass = method.getParameterTypes()[1].asSubclass(Game.class);
} else if (method.getParameterTypes().length != 1 || !Event.class.isAssignableFrom(checkClass)) {
plugin.getLogger().severe(type.toString() + " attempted to register an invalid EventHandler method signature '" + method.toGenericString() + "' in " + listener.getClass());
continue;
} else {
gameClass = null;
}
final Class<? extends Event> eventClass = checkClass.asSubclass(Event.class);
method.setAccessible(true);
EventExecutor executor = new EventExecutor() {
public void execute(Listener listener, Event event) throws EventException {
try {
if (!eventClass.isAssignableFrom(event.getClass())) {
return;
}
if (gameClass != null) {
Player player = null;
if (event instanceof PlayerEvent) {
player = ((PlayerEvent) event).getPlayer();
} else if (event instanceof EntityEvent) {
Entity entity = ((EntityEvent) event).getEntity();
if (entity instanceof Player) {
player = (Player) entity;
}
} else if (event instanceof BlockBreakEvent) {
player = ((BlockBreakEvent) event).getPlayer();
} else if (event instanceof BlockPlaceEvent) {
player = ((BlockPlaceEvent) event).getPlayer();
} else if (event instanceof BlockDamageEvent) {
player = ((BlockDamageEvent) event).getPlayer();
} else if (event instanceof SignChangeEvent) {
player = ((SignChangeEvent) event).getPlayer();
}
if (player == null) {
return;
}
Game game = GameManager.getInstance().getGameForPlayer(player, gameClass);
if (game == null)
return;
method.invoke(listener, event, gameClass.cast(game));
} else {
method.invoke(listener, event);
}
} catch (InvocationTargetException e) {
throw new EventException(e.getCause());
} catch (Throwable e) {
throw new EventException(e);
}
}
};
Set<Listener> listenerSet = listeners.get(type);
if (listenerSet == null) {
listenerSet = Sets.newHashSet();
listeners.put(type, listenerSet);
}
listenerSet.add(listener);
plugin.getServer().getPluginManager().registerEvent(eventClass, listener, eh.priority(), executor, plugin, eh.ignoreCancelled());
}
}
public void unregisterListener(Listener listener) {
HandlerList.unregisterAll(listener);
for (Entry<GameType, Set<Listener>> entry : listeners.entrySet()) {
if (entry.getValue() != null) {
entry.getValue().remove(listener);
}
}
}
public void unregisterListeners(GameType type) {
Set<Listener> listenerSet = listeners.get(type);
if (listenerSet != null) {
for (Listener listener : listenerSet) {
HandlerList.unregisterAll(listener);
}
}
listeners.remove(type);
}
public static EventManager getInstance() {
return instance;
}
}
|
package opendap.wcs.v2_0;
import opendap.PathBuilder;
import opendap.wcs.srs.SimpleSrs;
import org.apache.commons.codec.binary.Hex;
import org.apache.http.client.CredentialsProvider;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Collection;
import java.util.List;
import java.util.Vector;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
/**
* This catalog doesn't maintain a catalog persay, but uses incoming requests to access remote services to build a
* single coverage catalog for the requested
*/
public class DynamicServiceCatalog implements WcsCatalog{
private Logger _log;
private boolean _intialized;
private String _cacheDir;
private ReentrantReadWriteLock _cacheLock;
// private ConcurrentHashMap<String,SimpleSrs> _defaultSRS;
private CredentialsProvider _credsProvider;
private ConcurrentHashMap<String,DynamicService> _dynamicServices;
public DynamicServiceCatalog(){
_intialized = false;
_log = LoggerFactory.getLogger(getClass());
_cacheLock = new ReentrantReadWriteLock();
_dynamicServices = new ConcurrentHashMap<>();
}
@Override
public void init(Element config, String cacheDir, String resourcePath) throws Exception {
if(_intialized)
return;
Element e1;
String msg;
XMLOutputter xmlo = new XMLOutputter(Format.getPrettyFormat());
// Sort out access credentials for getting things from places
// that require such...
_credsProvider = null;
e1 = config.getChild("Credentials");
if(e1!=null){
// There was a Credentials thing in the config, lets try it...
String filename = e1.getTextTrim();
try {
_credsProvider = opendap.http.Util.getNetRCCredentialsProvider(filename, true);
}
catch (IOException ioe){
_log.error("init() - The file '{}' cannot be processed as a .netrc file. " +
"msg: {}",filename,ioe.getMessage());
}
}
if(_credsProvider==null){
_log.warn("Looking in default location for .netrc");
try {
_credsProvider = opendap.http.Util.getNetRCCredentialsProvider();
} catch (IOException e) {
msg = "Unable to load authentication credentials from defult location. " +
"Try specifying the credentials location if credentials are required.";
_log.warn(msg);
}
}
e1 = config.getChild("CacheDirectory");
if(e1==null){
String defaultCacheDir = cacheDir + this.getClass().getSimpleName();
File defaultCatDir = new File(defaultCacheDir);
if(!defaultCatDir.exists()){
if(!defaultCatDir.mkdirs()){
msg = "Default Cache Directory ("+defaultCacheDir+")does not exist and cannot be " +
"created. Could not find CoveragesDirectory element in " +
"configuration element: "+ xmlo.outputString(config);
_log.error(msg);
throw new IOException(msg);
}
}
_cacheDir = defaultCacheDir;
}
else {
_cacheDir = e1.getTextTrim();
}
_log.debug("WCS-2.0 Cache Directory: " + _cacheDir);
List<Element> dynamicServices = config.getChildren("DynamicService");
for(Element dsElement:dynamicServices) {
DynamicService dynamicService = new DynamicService(dsElement);
DynamicService previous = _dynamicServices.put(dynamicService.getName(),dynamicService);
if(previous!=null){
//FIXME Do we care that something was in the way? I think so...
_log.warn("The addtion of the DynamicService: {} bumped this instance from the map:{}",
dynamicService.toString(),previous.toString());
}
}
_intialized = true;
}
private String anyId2CacheId(String someId) throws WcsException {
if(someId==null)
return null;
try {
MessageDigest digest = MessageDigest.getInstance("SHA-256");
byte[] hash = digest.digest(someId.getBytes(StandardCharsets.UTF_8));
String cacheId = Hex.encodeHexString( hash );
return cacheId;
} catch (NoSuchAlgorithmException e) {
throw new WcsException("Oops! No SHA-256 hashing available. msg: "+ e.getMessage(),WcsException.NO_APPLICABLE_CODE, getClass().getClass().getCanonicalName()+".getCacheId()");
}
}
/**
* Thread safe DMR acquisition and caching.
* @param coverageId The cacheId (aka cache file name)
* @return
*/
private Element getCachedDMR(String coverageId) throws IOException, JDOMException, InterruptedException, WcsException {
_log.debug("getCachedDMR() - BEGIN coverageId: {}",coverageId);
String datasetUrl = getDataAccessUrl(coverageId);
_log.debug("getCachedDMR() - DAP Dataset URL: {}",datasetUrl);
if(datasetUrl==null)
return null;
String dmrUrl = datasetUrl + ".dmr.xml";
_log.debug("getCachedDMR() - DMR URL: {}",dmrUrl);
String cacheId = anyId2CacheId(dmrUrl);
_log.debug("getCachedDMR() - cacheId: {}",cacheId);
File cacheFile = new File(_cacheDir,cacheId);
// TODO Improve by adding a shared read lock. jhrg 9/18/17
WriteLock writeLock = _cacheLock.writeLock();
writeLock.lock();
try {
if(cacheFile.exists()){
_log.debug("getCachedDMR() - Reading cached DMR.");
Element dmrElement = opendap.xml.Util.getDocumentRoot(cacheFile);
dmrElement.setAttribute("name",coverageId);
return dmrElement;
}
else {
_log.debug("getCachedDMR() - Retrieving DMR from DAP service");
FileOutputStream fos = new FileOutputStream(cacheFile);
opendap.http.Util.writeRemoteContent(dmrUrl, _credsProvider, fos);
fos.close();
Element dmrElement = opendap.xml.Util.getDocumentRoot(cacheFile);
// TODO QC the dmrElement to be sure it's not a DAP error object and then maybe uncache it if it's an error.
dmrElement.setAttribute("name",coverageId);
return dmrElement;
}
}
finally {
writeLock.unlock();
}
}
@Override
public boolean hasCoverage(String coverageId) throws InterruptedException {
try {
if(getCachedDMR(coverageId) != null)
return true;
} catch (IOException | JDOMException | WcsException e) {
_log.debug("hasCoverage() - Unable to locate coverage! Caught a(n) "+
e.getClass().getName()+" msg: " + e.getMessage());
}
return false;
}
/**
* TODO In this method should be set up to utilize a cached instance of the DynamicCoverageDescription object as the getDMR() method does for the DMR document.
* @param coverageId The Coverage ID (wcs:Identifier)
* @return
* @throws InterruptedException
* @throws WcsException
*/
@Override
public CoverageDescription getCoverageDescription(String coverageId) throws InterruptedException, WcsException {
try {
Element dmr = getCachedDMR(coverageId);
if(dmr==null)
return null;
DynamicService dynamicService = getLongestMatchingDynamicService(coverageId);
if(dynamicService==null)
return null;
DynamicCoverageDescription coverageDescription = new DynamicCoverageDescription(dmr,dynamicService);
return coverageDescription;
} catch (JDOMException | IOException e) {
_log.error("getCoverageDescription() - FAILED to get CoverageDescription for id: {} msg: {}"+
coverageId, e.getMessage());
}
return null;
}
@Override
public Element getCoverageDescriptionElement(String coverageId) throws InterruptedException, WcsException {
return getCoverageDescription(coverageId).getCoverageDescriptionElement();
}
@Override
public Element getCoverageSummaryElement(String coverageId) throws InterruptedException, WcsException {
CoverageDescription cDesc = getCoverageDescription(coverageId);
if(cDesc!=null){
return cDesc.getCoverageSummary();
/*
Element covSum = new Element("CoverageSummary",WCS.WCS_NS);
Element coverageID = cDesc.getCoverageIdElement();
covSum.addContent(coverageID);
Element coverageSubtype = new Element("CoverageSubtype",WCS.WCS_NS);
covSum.addContent(coverageSubtype);
coverageSubtype.addContent(cDesc.getCoverageDescriptionElement());
return covSum;
*/
}
return null;
}
@Override
public Collection<Element> getCoverageSummaryElements() throws InterruptedException, WcsException {
Vector<Element> results = new Vector<>();
CoverageDescription cDesc = getCoverageDescription("foo");
if (cDesc != null) {
}
return results;
}
@Override
public Collection<Element> getDatasetSeriesSummaryElements() throws InterruptedException, WcsException {
return new Vector<>();
}
public String getDmrUrl(String coverageId) throws InterruptedException {
String datasetUrl = getDataAccessUrl(coverageId);
if(datasetUrl==null)
return null;
return datasetUrl + ".dmr.xml";
}
/**
* Compares the passed coverageID with the collection Dynamix services and returns the one,
* if any, with the lingest matching name.
*
* @param coverageId
* @return
*/
public DynamicService getLongestMatchingDynamicService(String coverageId){
String longestMatchingDynamicServiceName=null;
DynamicService match = null;
for(DynamicService dynamicService:_dynamicServices.values()){
String dsName = dynamicService.getName();
if(coverageId.startsWith(dsName)){
_log.debug("CoverageId '{}' matchs DynamicService name '{}'",coverageId,dsName);
if(longestMatchingDynamicServiceName==null){
longestMatchingDynamicServiceName=dsName;
match = dynamicService;
}
else if(longestMatchingDynamicServiceName.length() < dsName.length()) {
longestMatchingDynamicServiceName = dsName;
match = dynamicService;
}
_log.debug("DynamicService '{}' matched.",match.getName());
}
}
return match;
}
@Override
public String getDataAccessUrl(String coverageId) throws InterruptedException {
_log.debug("getDataAccessUrl() - BEGIN coverageId: {}",coverageId);
DynamicService dynamicService = getLongestMatchingDynamicService(coverageId);
_log.debug("getDataAccessUrl() - DynamicService instance: {}",dynamicService);
if(dynamicService==null)
return null;
String resourceId = coverageId.replace(dynamicService.getName(),"");
PathBuilder pb = new PathBuilder(dynamicService.getDapServiceUrl().toString());
pb.pathAppend(resourceId);
pb.append("");
return pb.toString();
}
@Override
public long getLastModified() {
return 0;
}
@Override
public void destroy() {
}
@Override
public void update() throws Exception {
}
@Override
public EOCoverageDescription getEOCoverageDescription(String id) throws WcsException {
return null;
}
@Override
public EODatasetSeries getEODatasetSeries(String id) throws WcsException {
return null;
}
@Override
public boolean hasEoCoverage(String id) {
return false;
}
public SimpleSrs getDefaultSrs(String coverageId){
DynamicService dynamicService = getLongestMatchingDynamicService(coverageId);
if(dynamicService==null)
return null;
return dynamicService.getSrs();
}
}
|
package org.adligo.i.log.client;
import java.util.HashMap;
import org.adligo.i.util.client.I_Map;
import org.adligo.i.util.client.MapFactory;
import org.adligo.j2se.util.MapWrapper;
import org.adligo.tests.ATest;
public class SimpleLogTests extends ATest implements I_LogOutput {
private String currentLog = "";
private String newMessage = "";
private String errTrace;
private Exception x = null;
public void setUp() {
try {
Exception t = new Exception("Ex");
throw t;
} catch (Exception y) {
x = y;
}
StackTraceElement[] elements = x.getStackTrace();
StringBuffer sb = new StringBuffer();
sb.append(" <java.lang.Exception: Ex>\n");
for (int i = 0; i < elements.length; i++) {
sb.append("\t at ");
sb.append(elements[i].toString());
sb.append("\n");
}
errTrace = sb.toString();
}
@Override
public void write(String p) {
newMessage = p;
assertEquals("Sould match", currentLog,p);
}
@SuppressWarnings("unchecked")
public void testOutputs() {
SimpleLog.setOut(this);
SimpleLog log = new SimpleLog("TestLog", new MapWrapper(new HashMap()));
log.setLevel(I_LogDelegate.LOG_LEVEL_FATAL);
assertFatal(log);
log.setLevel(I_LogDelegate.LOG_LEVEL_ERROR);
assertError(log);
log.setLevel(I_LogDelegate.LOG_LEVEL_WARN);
assertWarn(log);
log.setLevel(I_LogDelegate.LOG_LEVEL_INFO);
assertInfo(log);
log.setLevel(I_LogDelegate.LOG_LEVEL_DEBUG);
assertDebug(log);
log.setLevel(I_LogDelegate.LOG_LEVEL_TRACE);
assertTrace(log);
I_Map map = new MapWrapper(new HashMap());
setDefaultLog(log, map, "FATAL");
assertFatal(log);
setDefaultLog(log, map, "ERROR");
assertError(log);
setDefaultLog(log, map, "WARN");
assertWarn(log);
setDefaultLog(log, map, "INFO");
assertInfo(log);
setDefaultLog(log, map, "DEBUG");
assertDebug(log);
setDefaultLog(log, map, "TRACE");
assertTrace(log);
setDefaultLog(log, map, "fatal");
assertFatal(log);
setDefaultLog(log, map, "error");
assertError(log);
setDefaultLog(log, map, "warn");
assertWarn(log);
setDefaultLog(log, map, "info");
assertInfo(log);
setDefaultLog(log, map, "debug");
assertDebug(log);
setDefaultLog(log, map, "trace");
assertTrace(log);
setDefaultLog(log, map, "fatal");
setTestLog(log, map, "fatal");
assertFatal(log);
setTestLog(log, map, "error");
assertError(log);
setTestLog(log, map, "warn");
assertWarn(log);
setTestLog(log, map, "info");
assertInfo(log);
setTestLog(log, map, "debug");
assertDebug(log);
setTestLog(log, map, "trace");
assertTrace(log);
setTestLog(log, map, "FATAL");
assertFatal(log);
setTestLog(log, map, "ERROR");
assertError(log);
setTestLog(log, map, "WARN");
assertWarn(log);
setTestLog(log, map, "INFO");
assertInfo(log);
setTestLog(log, map, "DEBUG");
assertDebug(log);
setTestLog(log, map, "TRACE");
assertTrace(log);
SimpleLog.setOut(new SystemErrOutput());
}
public void setTestLog(SimpleLog log, I_Map map, String level) {
map.put("TestLog", level);
log.setLogLevel(map);
}
public void setDefaultLog(SimpleLog log, I_Map map, String level) {
map.put("defaultlog", level);
log.setLogLevel(map);
}
/**
* fix for defaultlog
* set to something besides INFO
*
*/
public void testgetStringProperty() {
LogPlatform.setDebug(true);
I_Map props = MapFactory.create();
props.put("defaultlog", "DEBUG");
short result = SimpleLog.getLogLevel(props, "org.adligo");
assertEquals(I_LogDelegate.LOG_LEVEL_DEBUG, result);
props.put("org", "WARN");
result = SimpleLog.getLogLevel(props, "org.adligo");
assertEquals(I_LogDelegate.LOG_LEVEL_WARN, result);
props.put("org.adligo", "INFO");
result = SimpleLog.getLogLevel(props, "org.adligo");
assertEquals(I_LogDelegate.LOG_LEVEL_INFO, result);
props.put("defaultlog", "FATAL");
result = SimpleLog.getLogLevel(props, "com.bar");
assertEquals(I_LogDelegate.LOG_LEVEL_FATAL, result);
result = SimpleLog.getLogLevel(props, "org.adligo");
assertEquals(I_LogDelegate.LOG_LEVEL_INFO, result);
}
public void assertFatal(SimpleLog log) {
setupNextBlock();
log.trace("hey");
assertOutputNotCalled();
log.trace("hey", x);
assertOutputNotCalled();
setupNextBlock();
log.debug("hey");
assertOutputNotCalled();
log.debug("hey", x);
assertOutputNotCalled();
setupNextBlock();
log.info("hey");
assertOutputNotCalled();
log.info("hey", x);
assertOutputNotCalled();
setupNextBlock();
log.warn("hey");
assertOutputNotCalled();
log.warn("hey", x);
assertOutputNotCalled();
setupNextBlock();
log.error("hey");
assertOutputNotCalled();
log.error("hey", x);
assertOutputNotCalled();
setupNextMessage("[FATAL] TestLog - hey" + errTrace);
log.fatal("hey", x);
assertOutputCalled();
setupNextMessage("[FATAL] TestLog - hey");
log.fatal("hey");
assertOutputCalled();
}
public void assertError(SimpleLog log) {
setupNextBlock();
log.trace("hey");
assertOutputNotCalled();
setupNextBlock();
log.debug("hey");
assertOutputNotCalled();
setupNextBlock();
log.info("hey");
assertOutputNotCalled();
setupNextBlock();
log.warn("hey");
assertOutputNotCalled();
setupNextMessage("[ERROR] TestLog - hey");
log.error("hey");
assertOutputCalled();
setupNextMessage("[ERROR] TestLog - hey" + errTrace);
log.error("hey", x);
assertOutputCalled();
setupNextMessage("[FATAL] TestLog - hey");
log.fatal("hey");
assertOutputCalled();
setupNextMessage("[FATAL] TestLog - hey" + errTrace);
log.fatal("hey", x);
assertOutputCalled();
}
public void assertWarn(SimpleLog log) {
setupNextBlock();
log.trace("hey");
assertOutputNotCalled();
setupNextBlock();
log.debug("hey");
assertOutputNotCalled();
setupNextBlock();
log.info("hey");
assertOutputNotCalled();
setupNextMessage("[WARN] TestLog - hey");
log.warn("hey");
assertOutputCalled();
setupNextMessage("[WARN] TestLog - hey" + errTrace);
log.warn("hey", x);
assertOutputCalled();
setupNextMessage("[ERROR] TestLog - hey");
log.error("hey");
assertOutputCalled();
setupNextMessage("[ERROR] TestLog - hey" + errTrace);
log.error("hey", x);
assertOutputCalled();
setupNextMessage("[FATAL] TestLog - hey");
log.fatal("hey");
assertOutputCalled();
setupNextMessage("[FATAL] TestLog - hey" + errTrace);
log.fatal("hey", x);
assertOutputCalled();
}
public void assertInfo(SimpleLog log) {
setupNextBlock();
log.trace("hey");
assertOutputNotCalled();
setupNextBlock();
log.debug("hey");
assertOutputNotCalled();
setupNextMessage("[INFO] TestLog - hey");
log.info("hey");
assertOutputCalled();
setupNextMessage("[INFO] TestLog - hey" + errTrace);
log.info("hey", x);
assertOutputCalled();
setupNextMessage("[WARN] TestLog - hey");
log.warn("hey");
assertOutputCalled();
setupNextMessage("[WARN] TestLog - hey" + errTrace);
log.warn("hey", x);
assertOutputCalled();
setupNextMessage("[ERROR] TestLog - hey");
log.error("hey");
assertOutputCalled();
setupNextMessage("[ERROR] TestLog - hey" + errTrace);
log.error("hey", x);
assertOutputCalled();
setupNextMessage("[FATAL] TestLog - hey");
log.fatal("hey");
assertOutputCalled();
setupNextMessage("[FATAL] TestLog - hey" + errTrace);
log.fatal("hey", x);
assertOutputCalled();
}
public void assertDebug(SimpleLog log) {
setupNextBlock();
log.trace("hey");
assertOutputNotCalled();
setupNextMessage("[DEBUG] TestLog - hey");
log.debug("hey");
assertOutputCalled();
setupNextMessage("[DEBUG] TestLog - hey" + errTrace);
log.debug("hey", x);
assertOutputCalled();
setupNextMessage("[INFO] TestLog - hey");
log.info("hey");
assertOutputCalled();
setupNextMessage("[INFO] TestLog - hey" + errTrace);
log.info("hey", x);
assertOutputCalled();
setupNextMessage("[WARN] TestLog - hey");
log.warn("hey");
assertOutputCalled();
setupNextMessage("[WARN] TestLog - hey" + errTrace);
log.warn("hey", x);
assertOutputCalled();
setupNextMessage("[ERROR] TestLog - hey");
log.error("hey");
assertOutputCalled();
setupNextMessage("[ERROR] TestLog - hey" + errTrace);
log.error("hey", x);
assertOutputCalled();
setupNextMessage("[FATAL] TestLog - hey");
log.fatal("hey");
assertOutputCalled();
setupNextMessage("[FATAL] TestLog - hey" + errTrace);
log.fatal("hey", x);
assertOutputCalled();
}
public void assertTrace(SimpleLog log) {
setupNextMessage("[TRACE] TestLog - hey");
log.trace("hey");
assertOutputCalled();
setupNextMessage("[TRACE] TestLog - hey" + errTrace);
log.trace("hey", x);
assertOutputCalled();
setupNextMessage("[DEBUG] TestLog - hey");
log.debug("hey");
assertOutputCalled();
setupNextMessage("[DEBUG] TestLog - hey" + errTrace);
log.debug("hey", x);
assertOutputCalled();
setupNextMessage("[INFO] TestLog - hey");
log.info("hey");
assertOutputCalled();
setupNextMessage("[INFO] TestLog - hey" + errTrace);
log.info("hey", x);
assertOutputCalled();
setupNextMessage("[WARN] TestLog - hey");
log.warn("hey");
assertOutputCalled();
setupNextMessage("[WARN] TestLog - hey" + errTrace);
log.warn("hey", x);
assertOutputCalled();
setupNextMessage("[ERROR] TestLog - hey");
log.error("hey");
assertOutputCalled();
setupNextMessage("[ERROR] TestLog - hey" + errTrace);
log.error("hey", x);
assertOutputCalled();
setupNextMessage("[FATAL] TestLog - hey");
log.fatal("hey");
assertOutputCalled();
setupNextMessage("[FATAL] TestLog - hey" + errTrace);
log.fatal("hey", x);
assertOutputCalled();
}
public void assertOutputCalled() {
assertTrue("Should have logged something", !"".equals(newMessage));
}
public void assertOutputNotCalled() {
assertTrue("Should NOT have logged something", "".equals(newMessage));
}
public void setupNextBlock() {
newMessage = "";
currentLog = "";
}
public void setupNextMessage(String p) {
newMessage = "";
currentLog = p;
}
protected String getErrTrace() {
return errTrace;
}
protected void setErrTrace(String errTrace) {
this.errTrace = errTrace;
}
@SuppressWarnings("unchecked")
public void testGetLogLevels() {
I_Map props = new MapWrapper(new HashMap());
props.put("defaultlog", "WARN");
props.put(SimpleLogTests.class.getName(), "INFO");
short level = SimpleLog.getLogLevel(props, SimpleLogTests.class.getName());
assertEquals(DeferredLog.LOG_LEVEL_INFO, level);
level = SimpleLog.getLogLevel(props, DeferredLogTests.class.getName());
assertEquals(DeferredLog.LOG_LEVEL_WARN, level);
props.put("defaultlog", "DEBUG");
level = SimpleLog.getLogLevel(props, SimpleLogTests.class.getName());
assertEquals(DeferredLog.LOG_LEVEL_INFO, level);
level = SimpleLog.getLogLevel(props, DeferredLogTests.class.getName());
assertEquals(DeferredLog.LOG_LEVEL_DEBUG, level);
props.put("defaultlog", "WARN");
props.put("org.adligo.i.log.client", "DEBUG");
level = SimpleLog.getLogLevel(props, DeferredLogTests.class.getName());
assertEquals(DeferredLog.LOG_LEVEL_DEBUG, level);
props.remove("org.adligo.i.log.client");
level = SimpleLog.getLogLevel(props, DeferredLogTests.class.getName());
assertEquals(DeferredLog.LOG_LEVEL_WARN, level);
props.put("org.adligo.i", "DEBUG");
level = SimpleLog.getLogLevel(props, DeferredLogTests.class.getName());
assertEquals(DeferredLog.LOG_LEVEL_DEBUG, level);
props.put("org.adligo.i.log", "WARN");
level = SimpleLog.getLogLevel(props, DeferredLogTests.class.getName());
assertEquals(DeferredLog.LOG_LEVEL_WARN, level);
props.put("org.adligo.i.log", "TRACE");
level = SimpleLog.getLogLevel(props, DeferredLogTests.class.getName());
assertEquals(DeferredLog.LOG_LEVEL_TRACE, level);
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package tonegod.gui.controls.extras;
import com.jme3.font.LineWrapMode;
import com.jme3.input.event.KeyInputEvent;
import com.jme3.input.event.MouseButtonEvent;
import com.jme3.math.ColorRGBA;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector4f;
import java.util.ArrayList;
import java.util.List;
import tonegod.gui.controls.buttons.ButtonAdapter;
import tonegod.gui.controls.buttons.CheckBox;
import tonegod.gui.controls.form.Form;
import tonegod.gui.controls.lists.SelectBox;
import tonegod.gui.controls.scrolling.ScrollArea;
import tonegod.gui.controls.text.Label;
import tonegod.gui.controls.text.TextField;
import tonegod.gui.controls.windows.Panel;
import tonegod.gui.controls.windows.Window;
import tonegod.gui.core.Element;
import tonegod.gui.core.Screen;
import tonegod.gui.core.utils.BitmapTextUtil;
/**
*
* @author t0neg0d
*/
public abstract class ChatBoxExt extends Panel {
private ScrollArea saChatArea;
private TextField tfChatInput;
private ButtonAdapter btnChatSendMsg;
private ButtonAdapter btnChatFilter;
private float btnChatFilterHeight = 20;
private SelectBox sbDefaultChannel;
private float saContentPadding;
private boolean showSendButton = true;
private boolean showFilterButton = true;
private boolean showChannelLabels = true;
private Form chatForm;
private Window filters = null;
private ScrollArea filtersScrollArea = null;
float filterLineHeight;
float controlSpacing, controlSize, buttonWidth, scrollSize;
Vector4f indents;
private int sendKey;
private int chatHistorySize = 30;
protected List<ChatMessage> chatMessages = new ArrayList();
protected List<ChatChannel> channels = new ArrayList();
private String defaultCommand;
List<Label> displayMessages = new ArrayList();
/**
* Creates a new instance of the ChatBox control
*
* @param screen The screen control the Element is to be added to
* @param UID A unique String identifier for the Element
* @param position A Vector2f containing the x/y position of the Element
*/
public ChatBoxExt(Screen screen, String UID, Vector2f position) {
this(screen, UID, position,
screen.getStyle("Window").getVector2f("defaultSize"),
screen.getStyle("Window").getVector4f("resizeBorders"),
screen.getStyle("Window").getString("defaultImg")
);
}
/**
* Creates a new instance of the ChatBox control
*
* @param screen The screen control the Element is to be added to
* @param UID A unique String identifier for the Element
* @param position A Vector2f containing the x/y position of the Element
* @param dimensions A Vector2f containing the width/height dimensions of the Element
*/
public ChatBoxExt(Screen screen, String UID, Vector2f position, Vector2f dimensions) {
this(screen, UID, position, dimensions,
screen.getStyle("Window").getVector4f("resizeBorders"),
screen.getStyle("Window").getString("defaultImg")
);
}
/**
* Creates a new instance of the ChatBox control
*
* @param screen The screen control the Element is to be added to
* @param UID A unique String identifier for the Element
* @param position A Vector2f containing the x/y position of the Element
* @param dimensions A Vector2f containing the width/height dimensions of the Element
* @param resizeBorders A Vector4f containg the border information used when resizing the default image (x = N, y = W, z = E, w = S)
* @param defaultImg The default image to use for the Slider's track
*/
public ChatBoxExt(Screen screen, String UID, Vector2f position, Vector2f dimensions, Vector4f resizeBorders, String defaultImg) {
super(screen, UID, position, dimensions, resizeBorders, defaultImg);
this.setIsMovable(true);
this.setIsResizable(true);
this.setScaleNS(false);
this.setScaleEW(false);
chatForm = new Form(screen);
saContentPadding = screen.getStyle("ChatBox").getFloat("contentPadding");
indents = screen.getStyle("Window").getVector4f("contentIndents");
controlSpacing = screen.getStyle("Common").getFloat("defaultControlSpacing");
controlSize = screen.getStyle("Common").getFloat("defaultControlSize");
buttonWidth = screen.getStyle("Button").getVector2f("defaultSize").x;
scrollSize = screen.getStyle("ScrollArea#VScrollBar").getFloat("defaultControlSize");
saChatArea = new ScrollArea(screen, UID + ":ChatArea",
new Vector2f(
indents.y,
indents.x
),
new Vector2f(
getWidth()-indents.y-indents.z,
getHeight()-controlSize-(controlSpacing*2)-indents.x-indents.w
),
false
) {
@Override
public void controlResizeHook() {
float totalHeight = 0;
int index = 0;
for (Label l : displayMessages) {
l.setHeight(l.getTextElement().getHeight());
totalHeight += l.getHeight();
index++;
}
if (totalHeight > saChatArea.getHeight()) {
saChatArea.getScrollableArea().setHeight(totalHeight+(saChatArea.getPadding()*2));
}
totalHeight = 0;
for (Label l : displayMessages) {
totalHeight += l.getHeight();
l.setX(saContentPadding);
l.setWidth(saChatArea.getWidth()-(saContentPadding*2));
l.setY(saChatArea.getScrollableArea().getHeight()-totalHeight);
}
if (getVScrollBar() != null) {
getVScrollBar().setThumbScale();
}
adjustWidthForScroll();
}
};
saChatArea.setIsResizable(false);
saChatArea.setScaleEW(true);
saChatArea.setScaleNS(true);
saChatArea.setClippingLayer(saChatArea);
saChatArea.getScrollableArea().setIgnoreMouse(true);
saChatArea.getScrollableArea().setDockS(true);
saChatArea.setPadding(2);
saChatArea.setText("");
addChild(saChatArea);
btnChatFilter = new ButtonAdapter(
screen,
UID + ":ChatFilter",
new Vector2f(indents.y,getHeight()-controlSize-indents.w),
new Vector2f(controlSize, controlSize)
) {
@Override
public void onButtonMouseLeftUp(MouseButtonEvent evt, boolean isToggled) {
if (filters == null) {
filters = new Window(
screen,
getElementParent().getUID()+":FilterWindow",
new Vector2f(screen.getWidth()/2-225,screen.getHeight()/2-175),
new Vector2f(450,350)
);
filters.setWindowTitle("Chat Filters");
filters.setIsResizable(false);
filtersScrollArea = new ScrollArea(
screen,
filters.getUID() + ":ScrollArea",
new Vector2f(
indents.y,
indents.x+filters.getDragBarHeight()+controlSpacing
),
new Vector2f(
filters.getWidth()-indents.y-indents.z,
filters.getHeight()-indents.x-indents.w-filters.getDragBarHeight()-screen.getStyle("Window").getFloat("buttonAreaHeight")-(controlSpacing*2)
),
false
);
filtersScrollArea.getScrollableArea().setIgnoreMouse(true);
filters.addChild(filtersScrollArea);
ButtonAdapter btnFiltersClose = new ButtonAdapter(
screen,
filters.getUID() + ":btnClose",
new Vector2f(filters.getWidth()-buttonWidth-indents.z,filters.getHeight()-controlSize-controlSpacing-indents.w)
) {
@Override
public void onButtonMouseLeftUp(MouseButtonEvent evt, boolean isToggled) {
filters.hideWindow();
}
};
btnFiltersClose.setText("Close");
btnFiltersClose.setDockS(true);
btnFiltersClose.setDockE(true);
filters.addChild(btnFiltersClose);
screen.addElement(filters);
}
showFiltersWindow();
}
};
// btnChatFilter.setFontSize(16);
btnChatFilter.setDockS(true);
btnChatFilter.setDockW(true);
btnChatFilter.setScaleEW(false);
btnChatFilter.setScaleNS(false);
btnChatFilter.setText("F");
chatForm.addFormElement(btnChatFilter);
addChild(btnChatFilter);
sbDefaultChannel = new SelectBox(
screen,
UID + ":DefaultChannel",
new Vector2f(indents.y+controlSize, getHeight()-controlSize-indents.w),
new Vector2f(120, controlSize)
) {
@Override
public void onChange(int selectedIndex, Object value) {
// throw new UnsupportedOperationException("Not supported yet.");
}
};
sbDefaultChannel.setDockS(true);
sbDefaultChannel.setDockW(true);
sbDefaultChannel.setScaleEW(false);
sbDefaultChannel.setScaleNS(false);
chatForm.addFormElement(sbDefaultChannel);
addChild(sbDefaultChannel);
tfChatInput = new TextField(
screen,
UID + ":ChatInput",
new Vector2f(indents.y+sbDefaultChannel.getWidth()+(controlSize*2), getHeight()-controlSize-indents.w),
new Vector2f(getWidth()-sbDefaultChannel.getWidth()-(controlSize*2)-indents.y-indents.z-buttonWidth, controlSize)
) {
@Override
public void controlKeyPressHook(KeyInputEvent evt, String text) {
if (evt.getKeyCode() == sendKey) {
if (tfChatInput.getText().length() > 0) {
tfChatInput.setText(tfChatInput.getText().substring(0,tfChatInput.getText().length()-1));
sendMsg();
}
}
}
};
tfChatInput.setScaleEW(true);
tfChatInput.setScaleNS(false);
tfChatInput.setDockS(true);
tfChatInput.setDockW(true);
btnChatSendMsg = new ButtonAdapter(
screen,
UID + ":ChatSendMsg",
new Vector2f(getWidth()-indents.z-buttonWidth, getHeight()-controlSize-indents.w),
new Vector2f(buttonWidth,controlSize)
) {
@Override
public void onButtonMouseLeftUp(MouseButtonEvent evt, boolean toggled) {
sendMsg();
}
};
btnChatSendMsg.setScaleEW(false);
btnChatSendMsg.setScaleNS(false);
btnChatSendMsg.setDockS(true);
btnChatSendMsg.setDockE(true);
btnChatSendMsg.setText("Send");
chatForm.addFormElement(btnChatSendMsg);
addChild(btnChatSendMsg);
chatForm.addFormElement(tfChatInput);
addChild(tfChatInput);
populateEffects("Window");
}
public ScrollArea getChatArea() {
return saChatArea;
}
private void sendMsg() {
if (tfChatInput.getText().length() > 0) {
if (!tfChatInput.getText().equals("")) {
String command = (String)sbDefaultChannel.getSelectedListItem().getValue();
onSendMsg(command, tfChatInput.getText());
tfChatInput.setText("");
}
}
}
/**
* Call this method to display a message
* @param command The object associated with the appropriate ChatChannel
* @param msg The String message to display
*/
public void receiveMsg(Object command, String msg) {
// System.out.println(command);
ChatChannel channel = null;
if (command instanceof String)
channel = getChannelByStringCommand((String)command);
else
channel = getChannelByCommand(command);
chatMessages.add(new ChatMessage(channel, msg));
updateChatHistory();
}
private void updateChatHistory() {
if (chatMessages.size() > chatHistorySize) {
chatMessages.remove(0);
}
rebuildChat();
}
private void rebuildChat() {
String displayText = "";
int index = 0;
saChatArea.getScrollableArea().removeAllChildren();
saChatArea.getScrollableArea().setY(0);
saChatArea.getScrollableArea().setHeight(saChatArea.getHeight());
displayMessages.clear();
float totalHeight = 0;
for (ChatMessage cm : chatMessages) {
if (!cm.getChannel().getIsFiltered()) {
Label l = createMessageLabel(index, cm);
displayMessages.add(l);
saChatArea.addScrollableChild(l);
l.setHeight(l.getTextElement().getHeight());
totalHeight += l.getHeight();
index++;
}
}
saChatArea.getScrollableArea().setHeight(totalHeight+(saChatArea.getPadding()*2));
totalHeight = 0;
for (Label l : displayMessages) {
totalHeight += l.getHeight();
l.setX(saContentPadding);
l.setWidth(saChatArea.getWidth()-(saContentPadding*2));
l.setY(saChatArea.getScrollableArea().getHeight()-totalHeight);
}
saChatArea.scrollToBottom();
}
private Label createMessageLabel(int index, ChatMessage cm) {
String s = cm.getMsg();
Label l = new Label(
screen,
getUID() + ":Label" + index,
new Vector2f(0, 0),
new Vector2f(saChatArea.getWidth(),25)
);
l.setTextWrap(LineWrapMode.Word);
l.setScaleEW(true);
l.setScaleNS(false);
l.setDockN(true);
l.setDockW(true);
l.setIsResizable(false);
l.setIsMovable(false);
l.setIgnoreMouse(true);
l.setClippingLayer(saChatArea);
l.setClipPadding(saContentPadding);
l.setFontColor(cm.getChannel().getColor());
l.setFontSize(saChatArea.getFontSize());
String channelLabel = "";
if (showChannelLabels) channelLabel = "[" + cm.getChannel().getName() + "] ";
l.setText(channelLabel + s);
l.setHeight(l.getTextElement().getHeight());
l.setIgnoreMouse(true);
return l;
}
/**
* Sets the keyboard key code to send messages (in place of the send button)
* @param sendKey
*/
public void setSendKey(int sendKey) {
this.sendKey = sendKey;
}
/**
* Abstract event method called when the user sends a message
* @param command The Object associated with the appropriate ChatChannel for the message
* @param msg The String message to display
*/
public abstract void onSendMsg(Object command, String msg);
/**
* Adds a ChatChannel that messages are display under and are filtered by
* @param UID The unique string identifier of the ChatChannel
* @param name The ChatChannel display name
* @param command The command associated with the ChatChannel (e.g. /group /say /ooc etc)
* @param filterDisplayText The text to display for this ChatChannel in the Chat Filters window
* @param color The ColorRGBA to use when displaying messages associated with the ChatChannel
* @param visibleToUser
*/
public final void addChatChannel(String UID, String name, Object command, String filterDisplayText, ColorRGBA color, boolean visibleToUser) {
channels.add(new ChatChannel(UID, name, command, filterDisplayText, color, visibleToUser));
if (visibleToUser) {
this.sbDefaultChannel.addListItem(name, command);
this.sbDefaultChannel.pack();
}
}
public void removeChatChannel(String name) {
ChatChannel channel = getChannelByName(name);
if (channel != null) {
channels.remove(channel);
this.sbDefaultChannel.removeListItem(name);
this.sbDefaultChannel.pack();
}
}
private ChatChannel getChannelByCommand(Object command) {
ChatChannel c = null;
for (ChatChannel channel : channels) {
if (channel.getCommand() == command) {
c = channel;
break;
}
}
return c;
}
private ChatChannel getChannelByStringCommand(String command) {
ChatChannel c = null;
for (ChatChannel channel : channels) {
if (((String)channel.getCommand()).equals(command)) {
c = channel;
break;
}
}
return c;
}
private ChatChannel getChannelByName(String name) {
ChatChannel c = null;
for (ChatChannel channel : channels) {
if (channel.getName().equals(name)) {
c = channel;
break;
}
}
return c;
}
/**
* Hides/Shows the Filter Window button
* @param showFilterButton
*/
public void showFilterButton(boolean showFilterButton) {
if (showFilterButton) {
if (btnChatFilter.getParent() == null) {
this.attachChild(btnChatFilter);
chatForm.addFormElement(btnChatFilter);
sbDefaultChannel.setX(indents.y+controlSize);
tfChatInput.setX(indents.y+sbDefaultChannel.getWidth()+(controlSize*2));
if (showSendButton)
tfChatInput.setWidth(getWidth()-sbDefaultChannel.getWidth()-(controlSize*2)-indents.y-indents.z-buttonWidth);
else
tfChatInput.setWidth(getWidth()-sbDefaultChannel.getWidth()-(controlSize*2)-indents.y-indents.z);
}
} else {
if (btnChatFilter.getParent() != null) {
btnChatFilter.removeFromParent();
chatForm.removeFormElement(btnChatFilter);
sbDefaultChannel.setX(indents.y);
tfChatInput.setX(indents.y+sbDefaultChannel.getWidth()+controlSize);
if (showSendButton)
tfChatInput.setWidth(getWidth()-sbDefaultChannel.getWidth()-controlSize-indents.y-indents.z-buttonWidth);
else
tfChatInput.setWidth(getWidth()-sbDefaultChannel.getWidth()-controlSize-indents.y-indents.z);
}
}
this.showFilterButton = showFilterButton;
}
/**
* Hides/Shows the Send Button
* @param showSendButton
*/
public void showSendButton(boolean showSendButton) {
if (showSendButton) {
if (btnChatSendMsg.getParent() == null) {
this.attachChild(btnChatSendMsg);
chatForm.addFormElement(btnChatSendMsg);
tfChatInput.setWidth(tfChatInput.getWidth()-btnChatSendMsg.getWidth());
}
} else {
if (btnChatSendMsg.getParent() != null) {
btnChatSendMsg.removeFromParent();
chatForm.removeFormElement(btnChatSendMsg);
tfChatInput.setWidth(tfChatInput.getWidth()+btnChatSendMsg.getWidth());
}
}
this.showSendButton = showSendButton;
}
public void setShowChannelLabels(boolean showChannelLabels) {
this.showChannelLabels = showChannelLabels;
}
public class ChatMessage {
private ChatChannel channel;
private String msg;
public ChatMessage(ChatChannel channel, String msg) {
this.channel = channel;
this.msg = msg;
}
public ChatChannel getChannel() {
return channel;
}
public String getMsg() {
return this.msg;
}
}
public class ChatChannel {
private String UID;
private String name;
private String filterDisplayText;
private Object command;
private ColorRGBA color;
private boolean visibleToUser;
private boolean isFiltered = false;
public ChatChannel(String UID, String name, Object command, String filterDisplayText, ColorRGBA color, boolean visibleToUser) {
this.UID = UID;
this.name = name;
this.command = command;
this.filterDisplayText = filterDisplayText;
this.color = color;
this.visibleToUser = visibleToUser;
}
public String getUID() { return this.UID; }
public String getName() {
return this.name;
}
public Object getCommand() {
return this.command;
}
public ColorRGBA getColor() {
return this.color;
}
public boolean getVisibleToUser() { return visibleToUser; }
public void setIsFiltered(boolean isFiltered) { this.isFiltered = isFiltered; }
public boolean getIsFiltered() { return this.isFiltered; }
public String getFilterDisplayText() { return filterDisplayText; }
}
/**
* Called by the Chat Filter Window.
* @param channel
* @param filter
*/
public void setChannelFiltered(ChatChannel channel, boolean filter) {
channel.setIsFiltered(filter);
rebuildChat();
}
protected void showFiltersWindow() {
Element scrollableArea = filtersScrollArea.getScrollableArea();
filtersScrollArea.setPadding(2);
scrollableArea.removeAllChildren();
scrollableArea.setY(filtersScrollArea.getHeight());
scrollableArea.setHeight(0);
boolean init = true;
String finalString = "";
float currentHeight = 0;
int index = 0;
filterLineHeight = BitmapTextUtil.getTextLineHeight(scrollableArea, "Xg");
for (ChatChannel channel : channels) {
if (!channel.getFilterDisplayText().equals("")) {
if (init) {
finalString = " " + channel.getFilterDisplayText() + " ";
init = false;
} else {
finalString += "\n " + channel.getFilterDisplayText() + " ";
}
currentHeight += filterLineHeight;
}
}
currentHeight += scrollableArea.getTextPadding()*2;
scrollableArea.setHeight(currentHeight);
scrollableArea.setWidth(getWidth());
scrollableArea.setText(finalString);
// scrollableArea.setTextPosition(0,-filterLineHeight);
index = 0;
for (ChatChannel channel : channels) {
this.addCheckBox(index, channel);
index++;
}
filtersScrollArea.scrollToTop();
filters.showWindow();
}
private void addCheckBox(int index, ChatChannel channel) {
CheckBox checkbox = new CheckBox(screen, filtersScrollArea.getUID() + ":CheckBox:" + index,
new Vector2f(8,filtersScrollArea.getTextPadding()+(index*filterLineHeight))
) {
@Override
public void onButtonMouseLeftUp(MouseButtonEvent evt, boolean isToggled) {
((ChatChannel)getElementUserData()).setIsFiltered(!isToggled);
rebuildChat();
}
};
checkbox.setElementUserData(channel);
checkbox.setScaleEW(false);
checkbox.setScaleNS(false);
checkbox.setDockS(true);
checkbox.setDockW(true);
checkbox.setIsResizable(false);
checkbox.setIsMovable(false);
checkbox.setIgnoreMouse(false);
checkbox.setClippingLayer(filtersScrollArea);
checkbox.setClipPadding(filtersScrollArea.getScrollableArea().getTextPadding());
if (!channel.getIsFiltered())
checkbox.setIsChecked(true);
filtersScrollArea.addScrollableChild(checkbox);
// if (!getIsVisible())
// checkbox.hide();
}
/**
* Sets the ToolTip text to display for mouse focus of the TextField input
* @param tip
*/
public void setToolTipTextInput(String tip) {
this.tfChatInput.setToolTipText(tip);
}
/**
* Sets the ToolTip text to display for mouse focus of the Send button
* @param tip
*/
public void setToolTipSendButton(String tip) {
this.btnChatSendMsg.setToolTipText(tip);
}
}
|
package net.coobird.thumbnailator;
import java.awt.Point;
import net.coobird.thumbnailator.filters.Caption;
import net.coobird.thumbnailator.filters.ImageFilter;
import net.coobird.thumbnailator.filters.Watermark;
/**
* An enum of predefined {@link Position}s.
* <p>
* Primary use of this enum is for selecting a position to place watermarks
* (using the {@link Watermark} class), captions (using the {@link Caption}
* class) and other {@link ImageFilter}s.
*
* @author coobird
*
*/
public enum Positions implements Position
{
/**
* Calculates the {@link Point} at which an enclosed image should be placed
* if it is to be placed at the top left-hand corner of the enclosing
* image.
*/
TOP_LEFT()
{
public Point calculate(int enclosingWidth, int enclosingHeight,
int width, int height, int insetLeft, int insetRight,
int insetTop, int insetBottom)
{
int x = insetLeft;
int y = insetTop;
return new Point(x, y);
}
},
/**
* Calculates the {@link Point} at which an enclosed image should be placed
* if it is to be horizontally centered at the top of the enclosing image.
*/
TOP_CENTER()
{
public Point calculate(int enclosingWidth, int enclosingHeight,
int width, int height, int insetLeft, int insetRight,
int insetTop, int insetBottom)
{
int x = (enclosingWidth / 2) - (width / 2);
int y = insetTop;
return new Point(x, y);
}
},
/**
* Calculates the {@link Point} at which an enclosed image should be placed
* if it is to be placed at the top right-hand corner of the enclosing
* image.
*/
TOP_RIGHT()
{
public Point calculate(int enclosingWidth, int enclosingHeight,
int width, int height, int insetLeft, int insetRight,
int insetTop, int insetBottom)
{
int x = enclosingWidth - width - insetRight;
int y = insetTop;
return new Point(x, y);
}
},
/**
* Calculates the {@link Point} at which an enclosed image should be placed
* if it is to be placed vertically centered at the left-hand corner of
* the enclosing image.
*/
CENTER_LEFT()
{
public Point calculate(int enclosingWidth, int enclosingHeight,
int width, int height, int insetLeft, int insetRight,
int insetTop, int insetBottom)
{
int x = insetLeft;
int y = (enclosingHeight / 2) - (height / 2);
return new Point(x, y);
}
},
/**
* Calculates the {@link Point} at which an enclosed image should be placed
* horizontally and vertically centered in the enclosing image.
*/
CENTER()
{
public Point calculate(int enclosingWidth, int enclosingHeight,
int width, int height, int insetLeft, int insetRight,
int insetTop, int insetBottom)
{
int x = (enclosingWidth / 2) - (width / 2);
int y = (enclosingHeight / 2) - (height / 2);
return new Point(x, y);
}
},
/**
* Calculates the {@link Point} at which an enclosed image should be placed
* if it is to be placed vertically centered at the right-hand corner of
* the enclosing image.
*/
CENTER_RIGHT()
{
public Point calculate(int enclosingWidth, int enclosingHeight,
int width, int height, int insetLeft, int insetRight,
int insetTop, int insetBottom)
{
int x = enclosingWidth - width - insetRight;
int y = (enclosingHeight / 2) - (height / 2);
return new Point(x, y);
}
},
/**
* Calculates the {@link Point} at which an enclosed image should be placed
* if it is to be placed at the bottom left-hand corner of the enclosing
* image.
*/
BOTTOM_LEFT()
{
public Point calculate(int enclosingWidth, int enclosingHeight,
int width, int height, int insetLeft, int insetRight,
int insetTop, int insetBottom)
{
int x = insetLeft;
int y = enclosingHeight - height - insetBottom;
return new Point(x, y);
}
},
/**
* Calculates the {@link Point} at which an enclosed image should be placed
* if it is to be horizontally centered at the bottom of the enclosing
* image.
*/
BOTTOM_CENTER()
{
public Point calculate(int enclosingWidth, int enclosingHeight,
int width, int height, int insetLeft, int insetRight,
int insetTop, int insetBottom)
{
int x = (enclosingWidth / 2) - (width / 2);
int y = enclosingHeight - height - insetBottom;
return new Point(x, y);
}
},
/**
* Calculates the {@link Point} at which an enclosed image should be placed
* if it is to be placed at the bottom right-hand corner of the enclosing
* image.
*/
BOTTOM_RIGHT()
{
public Point calculate(int enclosingWidth, int enclosingHeight,
int width, int height, int insetLeft, int insetRight,
int insetTop, int insetBottom)
{
int x = enclosingWidth - width - insetRight;
int y = enclosingHeight - height - insetBottom;
return new Point(x, y);
}
},
;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.