answer stringlengths 17 10.2M |
|---|
package org.zstack.rest;
import okhttp3.*;
import org.apache.commons.beanutils.PropertyUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.reflections.Reflections;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.util.AntPathMatcher;
import org.springframework.web.util.UriComponentsBuilder;
import org.zstack.core.Platform;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.cloudbus.CloudBusEventListener;
import org.zstack.core.retry.Retry;
import org.zstack.core.retry.RetryCondition;
import org.zstack.header.Component;
import org.zstack.header.MapField;
import org.zstack.header.apimediator.ApiMediatorConstant;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.header.identity.SessionInventory;
import org.zstack.header.identity.SuppressCredentialCheck;
import org.zstack.header.message.*;
import org.zstack.header.query.APIQueryMessage;
import org.zstack.header.query.APIQueryReply;
import org.zstack.header.query.QueryCondition;
import org.zstack.header.query.QueryOp;
import org.zstack.header.rest.*;
import org.zstack.rest.sdk.DocumentGenerator;
import org.zstack.rest.sdk.SdkFile;
import org.zstack.rest.sdk.SdkTemplate;
import org.zstack.utils.*;
import org.zstack.utils.gson.JSONObjectUtil;
import org.zstack.utils.logging.CLogger;
import org.zstack.utils.path.PathUtil;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Modifier;
import java.net.URLDecoder;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static java.util.Arrays.asList;
public class RestServer implements Component, CloudBusEventListener {
private static final CLogger logger = Utils.getLogger(RestServer.class);
private static final Logger requestLogger = LogManager.getLogger("api.request");
private static ThreadLocal<RequestInfo> requestInfo = new ThreadLocal<>();
private static final OkHttpClient http = new OkHttpClient();
private MediaType JSON = MediaType.parse("application/json; charset=utf-8");
@Autowired
private CloudBus bus;
@Autowired
private AsyncRestApiStore asyncStore;
@Autowired
private RESTFacade restf;
static class RequestInfo {
// don't save session to database as JSON
// it's not JSON-dumpable
transient HttpSession session;
String remoteHost;
String requestUrl;
HttpHeaders headers = new HttpHeaders();
public RequestInfo(HttpServletRequest req) {
session = req.getSession();
remoteHost = req.getRemoteHost();
for (Enumeration e = req.getHeaderNames(); e.hasMoreElements() ;) {
String name = e.nextElement().toString();
headers.add(name, req.getHeader(name));
}
try {
requestUrl = URLDecoder.decode(req.getRequestURI(), "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new CloudRuntimeException(e);
}
}
}
private static final String ASYNC_JOB_PATH_PATTERN = String.format("%s/%s/{uuid}", RestConstants.API_VERSION, RestConstants.ASYNC_JOB_PATH);
public static void generateDocTemplate(String path, DocumentGenerator.DocMode mode) {
DocumentGenerator rg = GroovyUtils.newInstance("scripts/RestDocumentationGenerator.groovy");
rg.generateDocTemplates(path, mode);
}
public static void generateMarkdownDoc(String path) {
DocumentGenerator rg = GroovyUtils.newInstance("scripts/RestDocumentationGenerator.groovy");
rg.generateMarkDown(path, PathUtil.join(System.getProperty("user.home"), "zstack-markdown"));
}
public static void generateJavaSdk() {
String path = PathUtil.join(System.getProperty("user.home"), "zstack-sdk/java");
File folder = new File(path);
if (!folder.exists()) {
folder.mkdirs();
}
try {
Class clz = GroovyUtils.getClass("scripts/SdkApiTemplate.groovy", RestServer.class.getClassLoader());
Set<Class<?>> apiClasses = Platform.getReflections().getTypesAnnotatedWith(RestRequest.class)
.stream().filter(it -> it.isAnnotationPresent(RestRequest.class)).collect(Collectors.toSet());
List<SdkFile> allFiles = new ArrayList<>();
for (Class apiClz : apiClasses) {
if (Modifier.isAbstract(apiClz.getModifiers())) {
continue;
}
SdkTemplate tmp = (SdkTemplate) clz.getConstructor(Class.class).newInstance(apiClz);
allFiles.addAll(tmp.generate());
}
SdkTemplate tmp = GroovyUtils.newInstance("scripts/SdkDataStructureGenerator.groovy", RestServer.class.getClassLoader());
allFiles.addAll(tmp.generate());
for (SdkFile f : allFiles) {
//logger.debug(String.format("\n%s", f.getContent()));
String fpath = PathUtil.join(path, f.getFileName());
FileUtils.writeStringToFile(new File(fpath), f.getContent());
}
} catch (Exception e) {
logger.warn(e.getMessage(), e);
throw new CloudRuntimeException(e);
}
}
@Override
public boolean handleEvent(Event e) {
if (e instanceof APIEvent) {
RequestData d = asyncStore.complete((APIEvent) e);
if (d != null && d.webHook != null) {
try {
callWebHook(d);
} catch (Throwable t) {
throw new CloudRuntimeException(t);
}
}
}
return false;
}
static class WebHookRetryException extends RuntimeException {
public WebHookRetryException() {
}
public WebHookRetryException(String message) {
super(message);
}
public WebHookRetryException(String message, Throwable cause) {
super(message, cause);
}
public WebHookRetryException(Throwable cause) {
super(cause);
}
public WebHookRetryException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
private void callWebHook(RequestData d) throws IllegalAccessException, NoSuchMethodException, InvocationTargetException {
requestInfo.set(d.requestInfo);
AsyncRestQueryResult ret = asyncStore.query(d.apiMessage.getId());
ApiResponse response = new ApiResponse();
// task is done
APIEvent evt = ret.getResult();
if (evt.isSuccess()) {
RestResponseWrapper w = responseAnnotationByClass.get(evt.getClass());
if (w == null) {
throw new CloudRuntimeException(String.format("cannot find RestResponseWrapper for the class[%s]", evt.getClass()));
}
writeResponse(response, w, ret.getResult());
} else {
response.setError(evt.getError());
}
String body = JSONObjectUtil.toJsonString(response);
HttpUrl url = HttpUrl.parse(d.webHook);
Request.Builder rb = new Request.Builder().url(url)
.post(RequestBody.create(JSON, body))
.addHeader(RestConstants.HEADER_JOB_UUID, d.apiMessage.getId())
.addHeader(RestConstants.HEADER_JOB_SUCCESS, String.valueOf(evt.isSuccess()));
Request request = rb.build();
new Retry<Void>() {
String __name__ = String.format("call-webhook-%s", d.webHook);
@Override
@RetryCondition(onExceptions = {WebHookRetryException.class}, times = 15, interval = 2)
protected Void call() {
try {
if (requestLogger.isTraceEnabled()) {
StringBuilder sb = new StringBuilder(String.format("Call Web-Hook[%s] (to %s%s)", d.webHook, d.requestInfo.remoteHost, d.requestInfo.requestUrl));
sb.append(String.format(" Body: %s", body));
requestLogger.trace(sb.toString());
}
try (Response r = http.newCall(request).execute()) {
if (r.code() < 200 || r.code() >= 300) {
throw new WebHookRetryException(String.format("failed to post to the webhook[%s], %s",
d.webHook, r.toString()));
}
}
} catch (IOException e) {
throw new WebHookRetryException(e);
}
return null;
}
}.run();
}
class Api {
Class apiClass;
Class apiResponseClass;
RestRequest requestAnnotation;
RestResponse responseAnnotation;
Map<String, String> requestMappingFields;
String path;
List<String> optionalPaths = new ArrayList<>();
String actionName;
Map<String, Field> allApiClassFields = new HashMap<>();
@Override
public String toString() {
return String.format("%s-%s", requestAnnotation.method(), "null".equals(requestAnnotation.path()) ? apiClass.getName() : path);
}
Api(Class clz, RestRequest at) {
apiClass = clz;
requestAnnotation = at;
apiResponseClass = at.responseClass();
path = String.format("%s%s", RestConstants.API_VERSION, at.path());
if (at.mappingFields().length > 0) {
requestMappingFields = new HashMap<>();
for (String mf : at.mappingFields()) {
String[] kv = mf.split("=");
if (kv.length != 2) {
throw new CloudRuntimeException(String.format("bad requestMappingField[%s] of %s", mf, apiClass));
}
requestMappingFields.put(kv[0].trim(), kv[1].trim());
}
}
responseAnnotation = (RestResponse) apiResponseClass.getAnnotation(RestResponse.class);
DebugUtils.Assert(responseAnnotation != null, String.format("%s must be annotated with @RestResponse", apiResponseClass));
Collections.addAll(optionalPaths, at.optionalPaths());
optionalPaths = optionalPaths.stream().map( p -> String.format("%s%s", RestConstants.API_VERSION, p)).collect(Collectors.toList());
if (at.isAction()) {
actionName = StringUtils.removeStart(apiClass.getSimpleName(), "API");
actionName = StringUtils.removeEnd(actionName, "Msg");
actionName = StringUtils.uncapitalize(actionName);
}
if (!at.isAction() && requestAnnotation.parameterName().isEmpty() && requestAnnotation.method() == HttpMethod.PUT) {
throw new CloudRuntimeException(String.format("Invalid @RestRequest of %s, either isAction must be set to true or" +
" parameterName is set to a non-empty string", apiClass.getName()));
}
List<Field> fs = FieldUtils.getAllFields(apiClass);
fs = fs.stream().filter(f -> !f.isAnnotationPresent(APINoSee.class) && !Modifier.isStatic(f.getModifiers())).collect(Collectors.toList());
for (Field f : fs) {
allApiClassFields.put(f.getName(), f);
if (requestAnnotation.method() == HttpMethod.GET) {
if (APIQueryMessage.class.isAssignableFrom(apiClass)) {
// query messages are specially handled
continue;
}
if (Collection.class.isAssignableFrom(f.getType())) {
Class gtype = FieldUtils.getGenericType(f);
if (gtype == null) {
throw new CloudRuntimeException(String.format("%s.%s is of collection type but doesn't not have" +
" a generic type", apiClass, f.getName()));
}
if (!gtype.getName().startsWith("java.")) {
throw new CloudRuntimeException(String.format("%s.%s is of collection type with a generic type" +
"[%s] not belonging to JDK", apiClass, f.getName(), gtype));
}
} else if (Map.class.isAssignableFrom(f.getType())) {
throw new CloudRuntimeException(String.format("%s.%s is of map type, however, the GET method doesn't" +
" support query parameters of map type", apiClass, f.getName()));
}
}
}
}
String getMappingField(String key) {
if (requestMappingFields == null) {
return null;
}
return requestMappingFields.get(key);
}
private void mapQueryParameterToApiFieldValue(String name, String[] vals, Map<String, Object> params) throws RestException {
String[] pairs = name.split("\\.");
String fname = pairs[0];
String key = pairs[1];
Field f = allApiClassFields.get(fname);
if (f == null) {
logger.warn(String.format("unknown map query parameter[%s], ignore", name));
return;
}
MapField at = f.getAnnotation(MapField.class);
DebugUtils.Assert(at!=null, String.format("%s::%s must be annotated by @MapField", apiClass, fname));
Map m = (Map) params.get(fname);
if (m == null) {
m = new HashMap();
params.put(fname, m);
}
if (m.containsKey(key)) {
throw new RestException(HttpStatus.BAD_REQUEST.value(),
String.format("duplicate map query parameter[%s], there has been a parameter with the same map key", name));
}
if (Collection.class.isAssignableFrom(at.valueType())) {
m.put(key, asList(vals));
} else {
if (vals.length > 1) {
throw new RestException(HttpStatus.BAD_REQUEST.value(),
String.format("Invalid query parameter[%s], only one value is allowed for the parameter but" +
" multiple values found", name));
}
m.put(key, vals[0]);
}
}
Object queryParameterToApiFieldValue(String name, String[] vals) throws RestException {
Field f = allApiClassFields.get(name);
if (f == null) {
return null;
}
if (Collection.class.isAssignableFrom(f.getType())) {
Class gtype = FieldUtils.getGenericType(f);
List lst = new ArrayList();
for (String v : vals) {
lst.add(TypeUtils.stringToValue(v, gtype));
}
return lst;
} else {
if (vals.length > 1) {
throw new RestException(HttpStatus.BAD_REQUEST.value(),
String.format("Invalid query parameter[%s], only one value is allowed for the parameter but" +
" multiple values found", name));
}
return TypeUtils.stringToValue(vals[0], f.getType());
}
}
}
class RestException extends Exception {
private int statusCode;
private String error;
public RestException(int statusCode, String error) {
this.statusCode = statusCode;
this.error = error;
}
}
class RestResponseWrapper {
RestResponse annotation;
Map<String, String> responseMappingFields = new HashMap<>();
Class apiResponseClass;
public RestResponseWrapper(RestResponse annotation, Class apiResponseClass) {
this.annotation = annotation;
this.apiResponseClass = apiResponseClass;
if (annotation.fieldsTo().length > 0) {
responseMappingFields = new HashMap<>();
if (annotation.fieldsTo().length == 1 && "all".equals(annotation.fieldsTo()[0])) {
List<Field> apiFields = FieldUtils.getAllFields(apiResponseClass);
apiFields = apiFields.stream().filter(f -> !f.isAnnotationPresent(APINoSee.class) && !Modifier.isStatic(f.getModifiers())).collect(Collectors.toList());
for (Field f : apiFields) {
responseMappingFields.put(f.getName(), f.getName());
}
} else {
for (String mf : annotation.fieldsTo()) {
String[] kv = mf.split("=");
if (kv.length == 2) {
responseMappingFields.put(kv[0].trim(), kv[1].trim());
} else if (kv.length == 1) {
responseMappingFields.put(kv[0].trim(), kv[0].trim());
} else {
throw new CloudRuntimeException(String.format("bad mappingFields[%s] of %s", mf, apiResponseClass));
}
}
}
}
}
}
void init() throws IllegalAccessException, InstantiationException {
bus.subscribeEvent(this, new APIEvent());
}
private AntPathMatcher matcher = new AntPathMatcher();
private Map<String, Object> apis = new HashMap<>();
private Map<Class, RestResponseWrapper> responseAnnotationByClass = new HashMap<>();
private HttpEntity<String> toHttpEntity(HttpServletRequest req) {
try {
String body = IOUtils.toString(req.getReader());
req.getReader().close();
HttpHeaders header = new HttpHeaders();
for (Enumeration e = req.getHeaderNames(); e.hasMoreElements() ;) {
String name = e.nextElement().toString();
header.add(name, req.getHeader(name));
}
return new HttpEntity<>(body, header);
} catch (Exception e) {
logger.warn(e.getMessage(), e);
throw new CloudRuntimeException(e);
}
}
private void sendResponse(int statusCode, String body, HttpServletResponse rsp) throws IOException {
if (requestLogger.isTraceEnabled()) {
RequestInfo info = requestInfo.get();
StringBuilder sb = new StringBuilder(String.format("[ID: %s] Response to %s (%s),", info.session.getId(),
info.remoteHost, info.requestUrl));
sb.append(String.format(" Status Code: %s,", statusCode));
sb.append(String.format(" Body: %s", body == null || body.isEmpty() ? null : body));
requestLogger.trace(sb.toString());
}
rsp.setStatus(statusCode);
rsp.getWriter().write(body == null ? "" : body);
}
private String getDecodedUrl(HttpServletRequest req) {
try {
if (req.getContextPath() == null) {
return URLDecoder.decode(req.getRequestURI(), "UTF-8");
} else {
return URLDecoder.decode(StringUtils.removeStart(req.getRequestURI(), req.getContextPath()), "UTF-8");
}
} catch (UnsupportedEncodingException e) {
throw new CloudRuntimeException(e);
}
}
void handle(HttpServletRequest req, HttpServletResponse rsp) throws IOException, IllegalAccessException, NoSuchMethodException, InvocationTargetException {
requestInfo.set(new RequestInfo(req));
rsp.setCharacterEncoding("utf-8");
String path = getDecodedUrl(req);
HttpEntity<String> entity = toHttpEntity(req);
if (requestLogger.isTraceEnabled()) {
StringBuilder sb = new StringBuilder(String.format("[ID: %s, Method: %s] Request from %s (to %s), ",
req.getSession().getId(), req.getMethod(),
req.getRemoteHost(), URLDecoder.decode(req.getRequestURI(), "UTF-8")));
sb.append(String.format(" Headers: %s,", JSONObjectUtil.toJsonString(entity.getHeaders())));
if (req.getQueryString() != null && !req.getQueryString().isEmpty()) {
sb.append(String.format(" Query: %s,", URLDecoder.decode(req.getQueryString(), "UTF-8")));
}
sb.append(String.format(" Body: %s", entity.getBody().isEmpty() ? null : entity.getBody()));
requestLogger.trace(sb.toString());
}
if (matcher.match(ASYNC_JOB_PATH_PATTERN, path)) {
handleJobQuery(req, rsp);
return;
}
Object api = apis.get(path);
if (api == null) {
for (String p : apis.keySet()) {
if (matcher.match(p, path)) {
api = apis.get(p);
break;
}
}
}
if (api == null) {
sendResponse(HttpStatus.NOT_FOUND.value(), String.format("no api mapping to %s", path), rsp);
return;
}
try {
if (api instanceof Api) {
handleUniqueApi((Api) api, entity, req, rsp);
} else {
handleNonUniqueApi((Collection)api, entity, req, rsp);
}
} catch (RestException e) {
sendResponse(e.statusCode, e.error, rsp);
} catch (Throwable e) {
logger.warn(String.format("failed to handle API to %s", path), e);
sendResponse(HttpStatus.INTERNAL_SERVER_ERROR.value(), e.getMessage(), rsp);
}
}
private void handleJobQuery(HttpServletRequest req, HttpServletResponse rsp) throws IOException, IllegalAccessException, NoSuchMethodException, InvocationTargetException {
if (!req.getMethod().equals(HttpMethod.GET.name())) {
sendResponse(HttpStatus.METHOD_NOT_ALLOWED.value(), "only GET method is allowed for querying job status", rsp);
return;
}
Map<String, String> vars = matcher.extractUriTemplateVariables(ASYNC_JOB_PATH_PATTERN, getDecodedUrl(req));
String uuid = vars.get("uuid");
AsyncRestQueryResult ret = asyncStore.query(uuid);
if (ret.getState() == AsyncRestState.expired) {
sendResponse(HttpStatus.NOT_FOUND.value(), "the job has been expired", rsp);
return;
}
ApiResponse response = new ApiResponse();
if (ret.getState() == AsyncRestState.processing) {
sendResponse(HttpStatus.ACCEPTED.value(), response, rsp);
return;
}
// task is done
APIEvent evt = ret.getResult();
if (evt.isSuccess()) {
RestResponseWrapper w = responseAnnotationByClass.get(evt.getClass());
if (w == null) {
throw new CloudRuntimeException(String.format("cannot find RestResponseWrapper for the class[%s]", evt.getClass()));
}
writeResponse(response, w, ret.getResult());
sendResponse(HttpStatus.OK.value(), response, rsp);
} else {
response.setError(evt.getError());
sendResponse(HttpStatus.SERVICE_UNAVAILABLE.value(), response, rsp);
}
}
private void sendResponse(int statusCode, ApiResponse response, HttpServletResponse rsp) throws IOException {
sendResponse(statusCode, response.isEmpty() ? "" : JSONObjectUtil.toJsonString(response), rsp);
}
private void handleNonUniqueApi(Collection<Api> apis, HttpEntity<String> entity, HttpServletRequest req, HttpServletResponse rsp) throws RestException, InvocationTargetException, NoSuchMethodException, InstantiationException, IllegalAccessException, IOException {
Map m = JSONObjectUtil.toObject(entity.getBody(), LinkedHashMap.class);
Api api;
String parameterName = null;
if ("POST".equals(req.getMethod())) {
// create API
Optional<Api> o = apis.stream().filter(a -> a.requestAnnotation.method().name().equals("POST")).findAny();
if (!o.isPresent()) {
throw new RestException(HttpStatus.INTERNAL_SERVER_ERROR.value(), String.format("No creational API found" +
" for the path[%s]", req.getRequestURI()));
}
api = o.get();
} else if ("PUT".equals(req.getMethod())) {
// action API
Optional<Api> o = apis.stream().filter(a -> m.containsKey(a.actionName)).findAny();
if (!o.isPresent()) {
throw new RestException(HttpStatus.BAD_REQUEST.value(), String.format("the body doesn't contain action mapping" +
" to the URL[%s]", getDecodedUrl(req)));
}
api = o.get();
parameterName = api.actionName;
} else if ("GET".equals(req.getMethod())) {
// query API
Optional<Api> o = apis.stream().filter(a -> a.requestAnnotation.method().name().equals("GET")).findAny();
if (!o.isPresent()) {
throw new RestException(HttpStatus.INTERNAL_SERVER_ERROR.value(), String.format("No query API found" +
" for the path[%s]", req.getRequestURI()));
}
api = o.get();
} else if ("DELETE".equals(req.getMethod())) {
// DELETE API
Optional<Api> o = apis.stream().filter(a -> a.requestAnnotation.method().name().equals("DELETE")).findAny();
if (!o.isPresent()) {
throw new RestException(HttpStatus.INTERNAL_SERVER_ERROR.value(), String.format("No delete API found" +
" for the path[%s]", req.getRequestURI()));
}
api = o.get();
} else {
throw new RestException(HttpStatus.METHOD_NOT_ALLOWED.value(), String.format("The method[%s] is not allowed for" +
" the path[%s]", req.getMethod(), req.getRequestURI()));
}
parameterName = parameterName == null ? api.requestAnnotation.parameterName() : parameterName;
handleApi(api, m, parameterName, entity, req, rsp);
}
private void handleApi(Api api, Map body, String parameterName, HttpEntity<String> entity, HttpServletRequest req, HttpServletResponse rsp) throws RestException, IllegalAccessException, InstantiationException, InvocationTargetException, NoSuchMethodException, IOException {
if (body == null) {
// for some POST request, the body may be null, for example, attach primary storage to a cluster
body = new HashMap();
}
String sessionId = null;
if (!api.apiClass.isAnnotationPresent(SuppressCredentialCheck.class)) {
String auth = entity.getHeaders().getFirst("Authorization");
if (auth == null) {
throw new RestException(HttpStatus.BAD_REQUEST.value(), "missing header 'Authorization'");
}
auth = auth.trim();
if (!auth.startsWith(RestConstants.HEADER_OAUTH)) {
throw new RestException(HttpStatus.BAD_REQUEST.value(), String.format("Authorization type must be '%s'", RestConstants.HEADER_OAUTH));
}
sessionId = auth.replaceFirst("OAuth", "").trim();
}
if (APIQueryMessage.class.isAssignableFrom(api.apiClass)) {
handleQueryApi(api, sessionId, req, rsp);
return;
}
Object parameter;
if (req.getMethod().equals(HttpMethod.GET.toString()) || req.getMethod().equals(HttpMethod.DELETE.toString())) {
// GET uses query string to pass parameters
Map<String, Object> m = new HashMap<>();
Map<String, String[]> queryParameters = req.getParameterMap();
for (Map.Entry<String, String[]> e : queryParameters.entrySet()) {
String k = e.getKey();
String[] vals = e.getValue();
if (k.contains(".")) {
// this is a map parameter
api.mapQueryParameterToApiFieldValue(k, vals, m);
} else {
Object val = api.queryParameterToApiFieldValue(k, vals);
if (val == null) {
logger.warn(String.format("unknown query parameter[%s], ignored", k));
continue;
}
m.put(k, val);
}
}
parameter = m;
} else {
parameter = body.get(parameterName);
}
APIMessage msg;
if (parameter == null) {
msg = (APIMessage) api.apiClass.newInstance();
} else {
// check boolean type parameters
for (Field f : api.apiClass.getDeclaredFields()) {
if (f.getType().isAssignableFrom(boolean.class)) {
Object booleanObject = ((Map) parameter).get(f.getName());
if (booleanObject == null) {
continue;
}
String booleanValue = booleanObject.toString();
if (!(booleanValue.equalsIgnoreCase("true") ||
booleanValue.equalsIgnoreCase("false"))) {
throw new RestException(HttpStatus.BAD_REQUEST.value(),
String.format("Invalid value for boolean field [%s]," +
" [%s] is not a valid boolean string[true, false].",
f.getName(), booleanValue));
}
}
}
msg = JSONObjectUtil.rehashObject(parameter, (Class<APIMessage>) api.apiClass);
}
if (requestInfo.get().headers.containsKey(RestConstants.HEADER_JOB_UUID)) {
String jobUuid = requestInfo.get().headers.get(RestConstants.HEADER_JOB_UUID).get(0);
if (jobUuid.length() != 32) {
throw new RestException(HttpStatus.BAD_REQUEST.value(), String.format("Invalid header[%s], it" +
" must be a UUID with '-' stripped", RestConstants.HEADER_JOB_UUID));
}
msg.setId(jobUuid);
}
if (sessionId != null) {
SessionInventory session = new SessionInventory();
session.setUuid(sessionId);
msg.setSession(session);
}
if (!req.getMethod().equals(HttpMethod.GET.toString()) && !req.getMethod().equals(HttpMethod.DELETE.toString())) {
Object systemTags = body.get("systemTags");
if (systemTags != null) {
msg.setSystemTags((List<String>) systemTags);
}
Object userTags = body.get("userTags");
if (userTags != null) {
msg.setUserTags((List<String>) userTags);
}
}
Map<String, String> vars = matcher.extractUriTemplateVariables(api.path, getDecodedUrl(req));
for (Map.Entry<String, String> e : vars.entrySet()) {
// set fields parsed from the URL
String key = e.getKey();
String mappingKey = api.getMappingField(key);
PropertyUtils.setProperty(msg, mappingKey == null ? key : mappingKey, e.getValue());
}
msg.setServiceId(ApiMediatorConstant.SERVICE_ID);
sendMessage(msg, api, rsp);
}
private static final LinkedHashMap<String, String> QUERY_OP_MAPPING = new LinkedHashMap();
static {
// DO NOT change the order
// an operator contained by another operator must be placed
// after the containing operator. For example, "=" is contained
// by "!=" so it must sit after "!="
QUERY_OP_MAPPING.put("!=", QueryOp.NOT_EQ.toString());
QUERY_OP_MAPPING.put(">=", QueryOp.GT_AND_EQ.toString());
QUERY_OP_MAPPING.put("<=", QueryOp.LT_AND_EQ.toString());
QUERY_OP_MAPPING.put("!?=", QueryOp.NOT_IN.toString());
QUERY_OP_MAPPING.put("!~=", QueryOp.NOT_LIKE.toString());
QUERY_OP_MAPPING.put("~=", QueryOp.LIKE.toString());
QUERY_OP_MAPPING.put("?=", QueryOp.IN.toString());
QUERY_OP_MAPPING.put("=", QueryOp.EQ.toString());
QUERY_OP_MAPPING.put(">", QueryOp.GT.toString());
QUERY_OP_MAPPING.put("<", QueryOp.LT.toString());
QUERY_OP_MAPPING.put("is null", QueryOp.IS_NULL.toString());
QUERY_OP_MAPPING.put("not null", QueryOp.NOT_NULL.toString());
}
private void handleQueryApi(Api api, String sessionId, HttpServletRequest req, HttpServletResponse rsp) throws IllegalAccessException, InstantiationException, RestException, IOException, NoSuchMethodException, InvocationTargetException {
Map<String, String[]> vars = req.getParameterMap();
APIQueryMessage msg = (APIQueryMessage) api.apiClass.newInstance();
SessionInventory session = new SessionInventory();
session.setUuid(sessionId);
msg.setSession(session);
msg.setServiceId(ApiMediatorConstant.SERVICE_ID);
Map<String, String> urlvars = matcher.extractUriTemplateVariables(api.path, getDecodedUrl(req));
String uuid = urlvars.get("uuid");
if (uuid != null) {
// this is a GET /xxxx/uuid
// return the resource directly
QueryCondition qc = new QueryCondition();
qc.setName("uuid");
qc.setOp("=");
qc.setValue(uuid);
msg.getConditions().add(qc);
sendMessage(msg, api, rsp);
return;
}
// a query with conditions
for (Map.Entry<String, String[]> e : vars.entrySet()) {
String varname = e.getKey().trim();
String varvalue = e.getValue()[0].trim();
if ("limit".equals(varname)) {
try {
msg.setLimit(Integer.valueOf(varvalue));
} catch (NumberFormatException ex) {
throw new RestException(HttpStatus.BAD_REQUEST.value(), "Invalid query parameter. 'limit' must be an integer");
}
} else if ("start".equals(varname)) {
try {
msg.setStart(Integer.valueOf(varvalue));
} catch (NumberFormatException ex) {
throw new RestException(HttpStatus.BAD_REQUEST.value(), "Invalid query parameter. 'start' must be an integer");
}
} else if ("count".equals(varname)) {
msg.setCount(Boolean.valueOf(varvalue));
} else if ("groupBy".equals(varname)) {
msg.setGroupBy(varvalue);
} else if ("replyWithCount".equals(varname)) {
msg.setReplyWithCount(Boolean.valueOf(varvalue));
} else if ("sort".equals(varname)) {
if (varvalue.startsWith("+")) {
msg.setSortDirection("asc");
varvalue = StringUtils.stripStart(varvalue, "+");
} else if (varvalue.startsWith("-")) {
msg.setSortDirection("desc");
varvalue = StringUtils.stripStart(varvalue, "-");
} else {
msg.setSortDirection("asc");
}
msg.setSortBy(varvalue);
} else if ("q".startsWith(varname)) {
String[] conds = e.getValue();
for (String cond : conds) {
String OP = null;
String delimiter = null;
for (String op : QUERY_OP_MAPPING.keySet()) {
if (cond.contains(op)) {
OP = QUERY_OP_MAPPING.get(op);
delimiter = op;
break;
}
}
if (OP == null) {
throw new RestException(HttpStatus.BAD_REQUEST.value(), String.format("Invalid query parameter." +
" The '%s' in the parameter[q] doesn't contain any query operator. Valid query operators are" +
" %s", cond, asList(QUERY_OP_MAPPING.keySet())));
}
QueryCondition qc = new QueryCondition();
String[] ks = StringUtils.splitByWholeSeparator(cond, delimiter, 2);
if (OP.equals(QueryOp.IS_NULL.toString()) || OP.equals(QueryOp.NOT_NULL.toString())) {
String cname = ks[0].trim();
qc.setName(cname);
qc.setOp(OP);
} else {
if (ks.length != 2) {
throw new RestException(HttpStatus.BAD_REQUEST.value(), String.format("Invalid query parameter." +
" The '%s' in parameter[q] is not a key-value pair split by %s", cond, OP));
}
String cname = ks[0].trim();
String cvalue = ks[1]; // don't trim the value, a space is valid in some conditions
qc.setName(cname);
qc.setOp(OP);
qc.setValue(cvalue);
}
msg.getConditions().add(qc);
}
} else if ("fields".equals(varname)) {
List<String> fs = new ArrayList<>();
for (String f : varvalue.split(",")) {
fs.add(f.trim());
}
if (fs.isEmpty()) {
throw new RestException(HttpStatus.BAD_REQUEST.value(), String.format("Invalid query parameter. 'fields'" +
" contains zero field"));
}
msg.setFields(fs);
}
}
if (msg.getConditions() == null) {
// no condition specified, query all
msg.setConditions(new ArrayList<>());
}
sendMessage(msg, api, rsp);
}
private void handleUniqueApi(Api api, HttpEntity<String> entity, HttpServletRequest req, HttpServletResponse rsp) throws RestException, IllegalAccessException, InstantiationException, InvocationTargetException, NoSuchMethodException, IOException {
handleApi(api, JSONObjectUtil.toObject(entity.getBody(), LinkedHashMap.class),
api.requestAnnotation.isAction() ? api.actionName : api.requestAnnotation.parameterName(), entity, req, rsp);
}
private void writeResponse(ApiResponse response, RestResponseWrapper w, Object replyOrEvent) throws IllegalAccessException, NoSuchMethodException, InvocationTargetException {
if (!w.annotation.allTo().equals("")) {
response.put(w.annotation.allTo(),
PropertyUtils.getProperty(replyOrEvent, w.annotation.allTo()));
} else {
for (Map.Entry<String, String> e : w.responseMappingFields.entrySet()) {
response.put(e.getKey(),
PropertyUtils.getProperty(replyOrEvent, e.getValue()));
}
}
// TODO: fix hard code hack
if (APIQueryReply.class.isAssignableFrom(w.apiResponseClass)) {
Object total = PropertyUtils.getProperty(replyOrEvent, "total");
if (total != null) {
response.put("total", total);
}
}
if (requestInfo.get().headers.containsKey(RestConstants.HEADER_JSON_SCHEMA)
// set schema anyway if it's a query API
|| APIQueryReply.class.isAssignableFrom(w.apiResponseClass)) {
response.setSchema(new JsonSchemaBuilder(response).build());
}
}
private void sendReplyResponse(MessageReply reply, Api api, HttpServletResponse rsp) throws IOException, IllegalAccessException, NoSuchMethodException, InvocationTargetException {
ApiResponse response = new ApiResponse();
if (!reply.isSuccess()) {
response.setError(reply.getError());
sendResponse(HttpStatus.SERVICE_UNAVAILABLE.value(), JSONObjectUtil.toJsonString(response), rsp);
return;
}
// the api succeeded
writeResponse(response, responseAnnotationByClass.get(api.apiResponseClass), reply);
sendResponse(HttpStatus.OK.value(), response, rsp);
}
private void sendMessage(APIMessage msg, Api api, HttpServletResponse rsp) throws IOException, IllegalAccessException, NoSuchMethodException, InvocationTargetException {
if (msg instanceof APISyncCallMessage) {
MessageReply reply = bus.call(msg);
sendReplyResponse(reply, api, rsp);
} else {
RequestData d = new RequestData();
d.apiMessage = msg;
d.requestInfo = requestInfo.get();
List<String> webHook = requestInfo.get().headers.get(RestConstants.HEADER_WEBHOOK);
if (webHook != null && !webHook.isEmpty()) {
d.webHook = webHook.get(0);
}
asyncStore.save(d);
UriComponentsBuilder ub = UriComponentsBuilder.fromHttpUrl(restf.getBaseUrl());
ub.path(RestConstants.API_VERSION);
ub.path(RestConstants.ASYNC_JOB_PATH);
ub.path("/" + msg.getId());
ApiResponse response = new ApiResponse();
response.setLocation(ub.build().toUriString());
bus.send(msg);
sendResponse(HttpStatus.ACCEPTED.value(), response, rsp);
}
}
@Override
public boolean start() {
build();
return true;
}
private String substituteUrl(String url, Map<String, String> tokens) {
Pattern pattern = Pattern.compile("\\{(.+?)\\}");
Matcher matcher = pattern.matcher(url);
StringBuffer buffer = new StringBuffer();
while (matcher.find()) {
String varName = matcher.group(1);
Object replacement = tokens.get(varName);
if (replacement == null) {
throw new CloudRuntimeException(String.format("cannot find value for URL variable[%s]", varName));
}
matcher.appendReplacement(buffer, "");
buffer.append(replacement.toString());
}
matcher.appendTail(buffer);
return buffer.toString();
}
private List<String> getVarNamesFromUrl(String url) {
Pattern pattern = Pattern.compile("\\{(.+?)\\}");
Matcher matcher = pattern.matcher(url);
List<String> urlVars = new ArrayList<>();
while (matcher.find()) {
urlVars.add(matcher.group(1));
}
return urlVars;
}
private String normalizePath(String p) {
// normalize the path,
// paths for example /backup-storage/{backupStorageUuid}/actions
// and /backup-storage/{uuid}/actions are treated as equal,
// and will be normalized to /backup-storage/{0}/actions
List<String> varNames = getVarNamesFromUrl(p);
if (varNames.isEmpty()) {
return p;
}
Map<String, String> m = new HashMap<>();
for (int i=0; i<varNames.size(); i++) {
m.put(varNames.get(i), String.format("{%s}", i));
}
return substituteUrl(p, m);
}
private void collectRestRequestErrConfigApi(List<String> errorApiList, Class apiClass, RestRequest apiRestRequest){
if (apiRestRequest.isAction() && !RESTConstant.DEFAULT_PARAMETER_NAME.equals(apiRestRequest.parameterName())) {
errorApiList.add(String.format("[%s] RestRequest config error, Setting parameterName is not allowed when isAction set true", apiClass.getName()));
} else if (apiRestRequest.isAction() && HttpMethod.PUT != apiRestRequest.method()) {
errorApiList.add(String.format("[%s] RestRequest config error, method can only be set to HttpMethod.PUT when isAction set true", apiClass.getName()));
}else if (!RESTConstant.DEFAULT_PARAMETER_NAME.equals(apiRestRequest.parameterName()) && (HttpMethod.PUT == apiRestRequest.method() || HttpMethod.DELETE == apiRestRequest.method())){
errorApiList.add(String.format("[%s] RestRequest config error, method is not allowed to set to HttpMethod.PUT(HttpMethod.DELETE) when parameterName set a value", apiClass.getName()));
}else if(HttpMethod.GET == apiRestRequest.method() && !RESTConstant.DEFAULT_PARAMETER_NAME.equals(apiRestRequest.parameterName())){
errorApiList.add(String.format("[%s] RestRequest config error, Setting parameterName is not allowed when method set HttpMethod.GET", apiClass.getName()));
}
}
private void build() {
Reflections reflections = Platform.getReflections();
Set<Class<?>> classes = reflections.getTypesAnnotatedWith(RestRequest.class).stream()
.filter(it -> it.isAnnotationPresent(RestRequest.class)).collect(Collectors.toSet());
List<String> errorApiList = new ArrayList();
for (Class clz : classes) {
RestRequest at = (RestRequest) clz.getAnnotation(RestRequest.class);
Api api = new Api(clz, at);
collectRestRequestErrConfigApi(errorApiList, clz, at);
List<String> paths = new ArrayList<>();
if (!"null".equals(api.path)) {
paths.add(api.path);
}
paths.addAll(api.optionalPaths);
for (String path : paths) {
String normalizedPath = normalizePath(path);
api = new Api(clz, at);
api.path = path;
if (!apis.containsKey(normalizedPath)) {
apis.put(normalizedPath, api);
} else {
Object c = apis.get(normalizedPath);
List lst;
if (c instanceof Api) {
// merge to a list
lst = new ArrayList();
lst.add(c);
apis.put(normalizedPath, lst);
} else {
lst = (List) c;
}
lst.add(api);
}
}
responseAnnotationByClass.put(api.apiResponseClass, new RestResponseWrapper(api.responseAnnotation, api.apiResponseClass));
}
responseAnnotationByClass.put(APIEvent.class, new RestResponseWrapper(new RestResponse(){
@Override
public Class<? extends Annotation> annotationType() {
return null;
}
@Override
public String allTo() {
return "";
}
@Override
public String[] fieldsTo() {
return new String[0];
}
}, APIEvent.class));
if (errorApiList.size() > 0){
logger.error(String.format("Error Api list : %s", errorApiList));
throw new RuntimeException(String.format("Error Api list : %s", errorApiList));
}
// below codes are checking if there
// are duplicated APIs
for (Object o : apis.values()) {
if (!(o instanceof List)) {
continue;
}
List<Api> as = (List<Api>) o;
List<Api> nonActions = as.stream().filter(a -> !a.requestAnnotation.isAction()).collect(Collectors.toList());
Map<String, Api> set = new HashMap<>();
for (Api a : nonActions) {
Api old = set.get(a.toString());
if (old != null) {
throw new CloudRuntimeException(String.format("duplicate rest API[%s, %s], they both have the same" +
" HTTP methods and paths, and both are not actions. %s", a.apiClass, old.apiClass, a.toString()));
}
set.put(a.toString(), a);
}
List<Api> actions = as.stream().filter(a -> a.requestAnnotation.isAction()).collect(Collectors.toList());
set = new HashMap<>();
for (Api a : actions) {
Api old = set.get(a.actionName);
if (old != null) {
throw new CloudRuntimeException(String.format("duplicate rest API[%s, %s], they are both actions with the" +
" same action name[%s]", a.apiClass, old.apiClass, a.actionName));
}
set.put(a.actionName, a);
}
}
}
@Override
public boolean stop() {
return true;
}
} |
package org.xbill.DNS;
import java.io.*;
import java.net.*;
import org.xbill.DNS.utils.*;
/**
* A simple clone of the java.net.InetAddress class, using dnsjava routines.
*
* @author Brian Wellington
*/
public final class Address {
private
Address() {}
/**
* Convert a string containing an IP address to an array of 4 integers.
* @param s The string
* @return The address
*/
public static int []
toArray(String s) {
int numDigits;
int currentOctet;
int [] values = new int[4];
int length = s.length();
currentOctet = 0;
numDigits = 0;
for (int i = 0; i < length; i++) {
char c = s.charAt(i);
if(c >= '0' && c <= '9') {
/* Can't have more than 3 digits per octet. */
if (numDigits == 3)
return null;
numDigits++;
values[currentOctet] *= 10;
values[currentOctet] += (c - '0');
/* 255 is the maximum value for an octet. */
if (values[currentOctet] > 255)
return null;
} else if (c == '.') {
/* Can't have more than 3 dots. */
if (currentOctet == 3)
return null;
/* Two consecutive dots are bad. */
if (numDigits == 0)
return null;
currentOctet++;
numDigits = 0;
} else
return null;
}
/* Must have 4 octets. */
if (currentOctet != 3)
return null;
/* The fourth octet can't be empty. */
if (numDigits == 0)
return null;
return values;
}
/**
* Determines if a string contains a valid IP address.
* @param s The string
* @return Whether the string contains a valid IP address
*/
public static boolean
isDottedQuad(String s) {
int [] address = Address.toArray(s);
return (address != null);
}
/**
* Converts a byte array containing an IPv4 address into a dotted quad string.
* @param attr The byte array
* @return The string representation
*/
public static String
toDottedQuad(byte [] addr) {
return ((addr[0] & 0xFF) + "." + (addr[1] & 0xFF) + "." +
(addr[2] & 0xFF) + "." + (addr[3] & 0xFF));
}
/**
* Determines the IP address of a host
* @param name The hostname to look up
* @return The first matching IP address
* @exception UnknownHostException The hostname does not have any addresses
*/
public static InetAddress
getByName(String name) throws UnknownHostException {
if (isDottedQuad(name))
return InetAddress.getByName(name);
Record [] records = dns.getRecords(name, Type.A);
if (records == null)
throw new UnknownHostException("unknown host");
ARecord a = (ARecord) records[0];
return a.getAddress();
}
/**
* Determines all IP address of a host
* @param name The hostname to look up
* @return All matching IP addresses
* @exception UnknownHostException The hostname does not have any addresses
*/
public static InetAddress []
getAllByName(String name) throws UnknownHostException {
if (isDottedQuad(name))
return InetAddress.getAllByName(name);
Record [] records = dns.getRecords(name, Type.A);
if (records == null)
throw new UnknownHostException("unknown host");
InetAddress [] addrs = new InetAddress[records.length];
for (int i = 0; i < records.length; i++) {
ARecord a = (ARecord) records[i];
addrs[i] = a.getAddress();
}
return addrs;
}
/**
* Determines the hostname for an address
* @param addr The address to look up
* @return The associated host name
* @exception UnknownHostException There is no hostname for the address
*/
public static String
getHostName(InetAddress addr) throws UnknownHostException {
Record [] records = dns.getRecordsByAddress(addr.getHostAddress(),
Type.PTR);
if (records == null)
throw new UnknownHostException("unknown address");
PTRRecord ptr = (PTRRecord) records[0];
return ptr.getTarget().toString();
}
} |
package ui;
import com.codeborne.selenide.ElementsCollection;
import com.codeborne.selenide.SelenideElement;
import com.codeborne.selenide.WebDriverRunner;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.webapp.WebAppContext;
import org.junit.*;
import org.openqa.selenium.By;
import org.openqa.selenium.NoSuchElementException;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.URL;
import static com.codeborne.selenide.Condition.*;
import static com.codeborne.selenide.Selenide.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeNoException;
public class RedPenDemoTest {
private static String redpenServerUrl;
private static Server server;
@BeforeClass
public static void beforeClass() throws Exception {
PhantomJSInstaller.ensureInstalled(System.getProperty("user.home") + File.separator + "phantomjs");
// Run tests in PhantomJS by default if browser property is not set
if (System.getProperty("browser") == null) {
System.setProperty("browser", "phantomjs");
}
Socket socket = new Socket();
for (int port = 8080; port < 65535; port++) {
redpenServerUrl = String.format("http://localhost:%s/", port);
try {
socket.connect(new InetSocketAddress(port), 200);
socket.close();
// something is listening on the port
// ensure that is RedPen
open(redpenServerUrl);
try {
$("#redpen-editor");
}catch(NoSuchElementException ignored){
}
} catch (IOException e) {
// nothing is listening on the port
WebAppContext context = new WebAppContext();
File webapp = new File("redpen-server/src/main/webapp/");
if (!webapp.exists()) {
// working directory is redpen-server
webapp = new File("src/main/webapp/");
}
context.setWar(webapp.getAbsolutePath());
context.setContextPath("/");
server = new Server(port);
server.setHandler(context);
server.start();
break;
}
}
}
@AfterClass
public static void afterClass() throws Exception {
if (server != null) {
server.stop();
}
try {
// ensure phantomjs to quit
WebDriverRunner.getWebDriver().quit();
} catch (IllegalStateException | UnsatisfiedLinkError ignored) {
}
}
@Before
public void loadRedPen() throws IOException {
System.out.println(redpenServerUrl);
try {
new URL(redpenServerUrl).openConnection().connect();
open(redpenServerUrl);
} catch (IllegalStateException e) {
assumeNoException("Please install " + System.getProperty("browser") + " for UI tests to run", e);
}
}
@Test
public void redpenEditorIsPrepopulated() throws Exception {
String value = $("#redpen-editor").getAttribute("class");
assertEquals("redpen-superimposed-editor-panel", value);
}
@Test
public void userCanChooseSampleTexts() throws Exception {
if ($(".navbar-toggle").isDisplayed())
$(".navbar-toggle").click();
$("#themes").click();
$(By.linkText("JAPANESE TEXT")).click();
String value = $("#redpen-editor").shouldBe(visible).val();
assertTrue(value.startsWith(""));
}
@Test
public void userCanClearTheText() throws Exception {
$("[title='Clear text']").click();
Thread.sleep(1000); // probably this can workaround timing issue
$("#redpen-editor").shouldBe(empty);
}
@Test
public void textIsValidatedAsItEntered() throws Exception {
$("#redpen-editor").val("Hello Wodrl");
$("#redpen-errors").shouldHave(text("RedPen found 1 error"));
ElementsCollection errors = $$(".redpen-error-list .redpen-error-message").shouldHaveSize(1);
errors.get(0).shouldHave(text("Found possibly misspelled word \"Wodrl\"."));
errors.get(0).find(".redpen-error-validator").shouldHave(text("Spelling"));
}
@Test
public void validatorsCanBeDisabled() throws Exception {
$("input[type=checkbox][value=Spelling]").click();
$("#redpen-editor").val("Hello Wodrl");
$("#redpen-errors").shouldHave(text("RedPen found 0 errors"));
}
@Test
public void validatorPropertiesCanBeChanged() throws Exception {
SelenideElement validatorProperties = $(".redpen-validator-properties[name=SentenceLength]");
validatorProperties.click();
$(".popover-title").should(appear).shouldHave(text("SentenceLength properties"));
$(".popover-content input[type=text]").shouldHave(value("max_len=120")).val("max_len=10");
$(".popover-content button[type=submit]").click();
validatorProperties.shouldHave(text("max_len=10"));
$("#redpen-editor").val("This is a very long sentence of over ten words.");
$("#redpen-errors").shouldHave(text("RedPen found 2 errors"));
$$(".redpen-error-message").get(0).shouldHave(text("The length of the sentence (47) exceeds the maximum of 10."), text("SentenceLength"));
$$(".redpen-error-message").get(1).shouldHave(text("\"very\" is considered a weak expression."), text("WeakExpression"));
}
} |
import ge.edu.freeuni.sdp.iot.simulator.bath.jaxb.VentSwitchPostRequest;
import ge.edu.freeuni.sdp.iot.simulator.bath.service.BathroomService;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.test.JerseyTest;
import org.junit.Test;
import javax.ws.rs.core.Application;
import javax.ws.rs.core.Response;
import javax.ws.rs.client.Entity;
import javax.ws.rs.core.MediaType;
import static org.junit.Assert.assertEquals;
public class VentSwitchTest extends JerseyTest {
@Override
protected Application configure() {
return new ResourceConfig(BathroomService.class);
}
@Test
public void testAddMeasurement(){
VentSwitchPostRequest ventSwitchPostRequest = new VentSwitchPostRequest();
ventSwitchPostRequest.setStatus("ON");
Entity<VentSwitchPostRequest> switchEntity = Entity.entity(ventSwitchPostRequest,
MediaType.APPLICATION_JSON);
Response response = target("bath/vent-switch").request(MediaType.APPLICATION_JSON)
.post(switchEntity, Response.class);
// assertEquals(response.getStatus(), Response.Status.OK.getStatusCode());
}
} |
package risk.data;
import risk.components.Map;
import risk.utils.command.*;
import risk.utils.listeners.PlayerChangedListener;
import risk.utils.listeners.StateChangeListener;
import risk.utils.states.IState;
import risk.utils.states.NewState;
import risk.utils.states.State;
import java.awt.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
public class Game {
private HashMap<String, Territory> territories;
private HashMap<String, Continent> continents;
private Player[] players;
private Map map; // Map reference
private State state = new State(new NewState());
private ArrayList<PlayerChangedListener> listeners = new ArrayList<>();
private ArrayList<Territory> leftTerritories;
private int turn = 0;
private Player currentSelector = null;
public Game() {
this("world.map");
}
public Game(String mapFile, Game initiator) {
this(mapFile);
this.map = initiator.map;
this.state = new State(new NewState());
}
/**
* 1. load map file and create Map data
*/
public Game(String mapFile) {
territories = new HashMap<>();
continents = new HashMap<>();
// loading the map file
CommandParser parser = new CommandParser("src/assets/" + mapFile);
for (Command cmd : parser.getCommands()) {
try {
switch (cmd.getCommandName()) {
case "patch-of":
this.patchOf(new PatchOfCommand(cmd));
break;
case "capital-of":
this.capitalOf(new CapitalOfCommand(cmd));
break;
case "neighbors-of":
this.neighborsOf(new NeighborsOfCommand(cmd));
break;
case "continent":
this.continent(new ContinentCommand(cmd));
break;
default:
System.out.println("invalid command: `" + cmd.original + "`");
}
} catch (InvalidCommandException e) {
System.out.println(e);
}
}
// set Players
this.players = new Player[5];
this.players[0] = new Computer("Computer 1", new Color(255, 99, 72));
this.players[1] = new Person("Computer 2", new Color(44, 255, 144));
this.players[2] = new Computer("Computer 3", new Color(179, 77, 255));
this.players[3] = new Computer("Computer 4", new Color(255, 210, 90));
this.players[4] = new Computer("Computer 5", new Color(90, 119, 121));
}
public void showReinforcement() {
for (Player player : this.players) {
System.out.println(player.getName() + ": " + player.getReinforcementCount(this));
}
}
public void addPlayerChangedListener(PlayerChangedListener listener) {
this.listeners.add(listener);
}
public void selectMap() {
this.leftTerritories = new ArrayList<>(this.territories.values());
this.turn = 0;
this.state.next();
this.setNextPerson();
}
public void setNextPerson() {
if (this.leftTerritories.isEmpty()) {
this.state.next();
return;
}
this.currentSelector = this.players[this.turn % this.players.length];
if (this.currentSelector instanceof Computer) {
Territory territory = this.findTerritory(this.currentSelector.chooseCountry(leftTerritories));
territory.setPlayer(this.currentSelector, 1);
this.turn += 1;
this.map.repaint();
this.setNextPerson();
} else {
this.turn += 1;
this.updatePlayer(this.currentSelector);
}
}
private void updatePlayer(Player changedPlayer) {
for (PlayerChangedListener listener : this.listeners) {
listener.playerChanged(changedPlayer);
}
}
public void addStateChangeListener(StateChangeListener listener) {
this.state.addStateChangeListener(listener);
}
public Player getCurrentPlayer() {
return this.currentSelector;
}
public void setMap(Map map) {
this.map = map;
}
public Collection<Territory> getTerritories() {
return this.territories.values();
}
private void patchOf(PatchOfCommand cmd) {
Territory tmp = this.findTerritory(cmd.getCountry());
tmp.addPatch( new Patch(cmd.getCountry(), cmd.getPoints()) );
this.territories.put(cmd.getCountry(), tmp);
}
private void capitalOf(CapitalOfCommand cmd) {
Territory tmp = this.findTerritory(cmd.getCountry());
tmp.setCapital(new Point(cmd.getX(), cmd.getY()));
this.territories.put(cmd.getCountry(), tmp);
}
private void neighborsOf(NeighborsOfCommand cmd) {
Territory home = this.findTerritory(cmd.getCountry());
for (String neighbor : cmd.getNeighbors()) {
Territory other = this.findTerritory(neighbor);
home.addNeighbor(other);
other.addNeighbor(home);
this.territories.put(neighbor, other);
}
this.territories.put(cmd.getCountry(), home);
}
private void continent(ContinentCommand cmd) {
ArrayList<Territory> territories = new ArrayList<>();
for (String country : cmd.getCountries()) {
territories.add(this.findTerritory(country));
}
Continent tmp = new Continent(cmd.getContinent(), territories, cmd.getBonus());
this.continents.put(cmd.getContinent(), tmp);
}
public IState getState() {
return this.state.getState();
}
public Territory findTerritory(String country) {
if (this.territories.containsKey(country)) {
return this.territories.get(country);
} else {
return new Territory(country);
}
}
public ArrayList<Territory> getLeftTerritories() {
return this.leftTerritories;
}
public void setLeftTerritories(ArrayList<Territory> leftTerritories) {
this.leftTerritories = leftTerritories;
}
public void updateTerritory(Territory tmp) {
this.territories.put(tmp.getName(), tmp);
}
public Collection<Continent> getContinents() {
return this.continents.values();
}
} |
package springs;
import masses.Mass;
public class Muscle extends Spring {
private double mAmplitude;
private double mLength;
private int mCount;
public static final int DEFAULT_PERIOD = 60;
public static final double AMPLITUDE_INCREMENT = 0.05;
public Muscle (Mass m1, Mass m2, double amplitude) {
super(m1, m2);
mAmplitude = amplitude;
mLength = super.getLength();
}
public Muscle (Mass m1, Mass m2, double length, double amplitude) {
super(m1, m2, length);
mAmplitude = amplitude;
mLength = length;
}
public Muscle (Mass m1, Mass m2, double length, double k, double amplitude) {
super(m1, m2, length, k);
mAmplitude = amplitude;
mLength = length;
}
@Override
public void move () {
setLength(mLength + mAmplitude
* Math.cos((mCount * Math.PI) / (DEFAULT_PERIOD / 2)));
mCount++;
}
public void increaseAmplitude () {
mAmplitude += AMPLITUDE_INCREMENT;
}
public void decreaseAmplitude () {
mAmplitude -= AMPLITUDE_INCREMENT;
}
} |
package stack;
import java.util.Stack;
public class MinStack {
//TAG: Google
//TAG: Uber
//TAG: Stack
//Difficulty: Easy
/**
* 155. Min Stack
* Design a stack that supports push, pop, top, and retrieving the minimum element in constant time.
push(x) -- Push element x onto stack.
pop() -- Removes the element on top of the stack.
top() -- Get the top element.
getMin() -- Retrieve the minimum element in the stack.
Example:
stack.MinStack minStack = new stack.MinStack();
minStack.push(-2);
minStack.push(0);
minStack.push(-3);
minStack.getMin(); --> Returns -3.
minStack.pop();
minStack.top(); --> Returns 0.
minStack.getMin(); --> Returns -2.
*/
/**
* Solution:
* Use two stacks, one stack with regular push and pop, the other stack, when current push value is smaller than
* stack2.peek(), push, when stack1 pop and it's the value of stack2.peek() too, pop from stack2, so that stack2
* will keep have the min value of current stack1
*
* Time: O(n)
* Space: O(n) extra stack2
*/
private Stack<Integer> stack1, stack2;
/** initialize your data structure here. */
public MinStack() {
stack1 = new Stack<>();
stack2 = new Stack<>();
}
public void push(int x) {
stack1.push(x);
//Be aware that if x == stack2.peek() need to push to stack2, same min value need to push
if (stack2.isEmpty() || stack2.peek() >= x)
stack2.push(x);
}
public void pop() {
if (!stack1.isEmpty()) {
Integer pop = stack1.pop();
if (pop.equals(stack2.peek())) stack2.pop();
}
}
public int top() {
return stack1.isEmpty() ? 0 : stack1.peek();
}
public int getMin() {
return stack2.isEmpty() ? 0 : stack2.peek();
}
/**
* Your stack.MinStack object will be instantiated and called as such:
* stack.MinStack obj = new stack.MinStack();
* obj.push(x);
* obj.pop();
* int param_3 = obj.top();
* int param_4 = obj.getMin();
*/
} |
package opendap.semantics.IRISail;
import net.sf.saxon.s9api.SaxonApiException;
import org.jdom.Element;
import org.slf4j.Logger;
import java.util.*;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.MalformedURLException;
import java.net.URLConnection;
import java.util.List;
import org.openrdf.model.Resource;
import org.openrdf.model.URI;
import org.openrdf.model.Value;
import org.openrdf.model.ValueFactory;
import org.openrdf.model.impl.URIImpl;
import org.openrdf.query.BindingSet;
import org.openrdf.query.MalformedQueryException;
import org.openrdf.query.QueryEvaluationException;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.TupleQuery;
import org.openrdf.query.TupleQueryResult;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.RepositoryResult;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFParseException;
import org.openrdf.sail.inferencer.fc.ForwardChainingRDFSInferencer;
import org.openrdf.sail.memory.MemoryStore;
public class RdfPersistence {
private Logger log;
private IRISailRepository owlse2;
private Vector<String> repositoryContexts;
private HashMap<String, Boolean> downService;
private Vector<String> imports; //list of imported and skipped
public RdfPersistence(IRISailRepository repository) {
log = org.slf4j.LoggerFactory.getLogger(this.getClass());
owlse2 = repository;
repositoryContexts = new Vector<String>();
downService = new HashMap<String, Boolean>();
imports = new Vector<String>();
}
public void updateSemanticRepository(Vector<String> importURLs)
throws InterruptedException,RepositoryException {
Vector<String> dropList = new Vector<String>();
Vector<String> startingPoints = new Vector<String>();
boolean isNewRepository = true;
Date startTime = new Date();
log.info("
log.info("updateSemanticRepository() Start.");
log.debug(RepositoryUtility.showContexts(owlse2));
RepositoryConnection con = null;
try {
for (String startingPointUrl : importURLs){
startingPoints.add(startingPointUrl); // startingpoint from input file
}
Vector<String> newStartingPoints = null;
Vector<String> startingPointsToDrop = null;
try {
con = owlse2.getConnection();
if (con.isOpen()) {
log.info("Connection is OPEN!");
isNewRepository = RepositoryUtility.isNewRepository(con);
newStartingPoints = RepositoryUtility.findNewStartingPoints(con,startingPoints);
dropList.addAll(findUnneededRDFDocuments(con));
startingPointsToDrop = RepositoryUtility.findChangedStartingPoints(con, startingPoints);
dropList.addAll(startingPointsToDrop);
dropList.addAll(findChangedRDFDocuments(con));
}
} catch (RepositoryException e) {
log.error("Caught RepositoryException updateSemanticRepository(Vector<String> importURLs)" +
e.getMessage());
}
catch (QueryEvaluationException e) {
log.error("Caught QueryEvaluationException updateSemanticRepository(Vector<String> importURLs)" +
e.getMessage());
} catch (MalformedQueryException e) {
log.error("Caught MalformedQueryException updateSemanticRepository(Vector<String> importURLs)" +
e.getMessage());
} finally {
if (con != null)
con.close();
log.info("Connection is Closed!");
}
log.debug(RepositoryUtility.showContexts(owlse2));
if (isNewRepository) {
try {
RepositoryUtility.addInternalStartingPoint(owlse2);
con = owlse2.getConnection();
RepositoryUtility.addStartingPoints(con, owlse2.getValueFactory(), newStartingPoints);
}
finally {
if (con != null)
con.close();
}
log.debug("Updating repository ...");
boolean modelChanged = true;
//if(updateIriRepository()){
while(modelChanged){
log.debug("Repository update complete. Changes detected.");
log.debug("Running construct rules ...");
ingestSwrlRules();
log.debug("Finished running construct rules.");
modelChanged = updateIriRepository();
}
//} else{
// log.debug("Repository update complete. No changes detected, rules not rerun..");
log.debug(RepositoryUtility.showContexts(owlse2));
} else {
if (!dropList.isEmpty()) {
dropList.addAll(findExternalInferencingContexts());
try {
con = owlse2.getConnection();
log.debug("Dropping starting points ...");
RepositoryUtility.dropStartingPoints(con, owlse2.getValueFactory(), startingPointsToDrop);
}
finally {
if (con != null)
con.close();
}
log.debug("Finished dropping starting point.");
log.debug(RepositoryUtility.showContexts(owlse2));
dropContexts(dropList);
log.debug(RepositoryUtility.showContexts(owlse2));
}
if (!newStartingPoints.isEmpty()) {
try {
con = owlse2.getConnection();
log.debug("Adding new starting point ...");
RepositoryUtility.addStartingPoints(con, owlse2.getValueFactory(), newStartingPoints);
log.debug("Finished adding nrew starting point.");
}
finally {
if (con != null)
con.close();
}
log.debug(RepositoryUtility.showContexts(owlse2));
}
log.debug("Updating repository ...");
boolean modelChanged = updateIriRepository();
if(modelChanged || !dropList.isEmpty()) {
log.debug("Running construct rules ...");
ingestSwrlRules();
log.debug("Finished running construct rules.");
modelChanged = updateIriRepository();
while(modelChanged){
log.debug(RepositoryUtility.showContexts(owlse2));
log.debug("Repository update complete. Changes detected.");
log.debug("Running construct rules ...");
ingestSwrlRules();
log.debug("Finished running construct rules.");
log.debug(RepositoryUtility.showContexts(owlse2));
modelChanged = updateIriRepository();
}
} else{
log.debug("Repository update complete. No changes detected, rules not rerun..");
log.debug(RepositoryUtility.showContexts(owlse2));
}
}
} catch (RepositoryException e) {
log.error("Caught RepositoryException in main(): "
+ e.getMessage());
}
long elapsedTime = new Date().getTime() - startTime.getTime();
log.info("Imports Evaluated. Elapsed time: " + elapsedTime + "ms");
log.info("updateSemanticRepository2() End.");
log.info("
}
/*******************************************
* Update repository
*/
private boolean updateIriRepository() {
boolean repositoryChanged = false;
Vector<String> rdfDocList = new Vector<String>();
findNeededRDFDocuments(rdfDocList);
while (!rdfDocList.isEmpty()) {
repositoryChanged = true;
addNeededRDFDocuments(rdfDocList);
findNeededRDFDocuments(rdfDocList);
}
return repositoryChanged;
}
/*
* Add all rdfcache:RDFDocuments that are needed
*/
// private void addNeededRDFDocuments(RepositoryConnection con) {
private void addNeededRDFDocuments(Vector<String> rdfDocs) {
URI uriaddress;
long inferStartTime, inferEndTime;
inferStartTime = new Date().getTime();
String importURL = "";
RepositoryConnection con = null;
int notimport = 0;
try {
con = owlse2.getConnection();
log.debug("rdfDocs.size=" + rdfDocs.size());
notimport = 0;
while (!rdfDocs.isEmpty()) {
importURL = rdfDocs.remove(0).toString();
log.debug("Checking import URL: " + importURL);
URL myurl = new URL(importURL);
HttpURLConnection hc = (HttpURLConnection) myurl
.openConnection();
String contentType = hc.getContentType();
log.debug("Connected to import URL: " + importURL);
int rsCode = -1;
try {
rsCode = hc.getResponseCode();
} catch (IOException e) {
log.error("Unable to get HTTP status code for " + importURL
+ " Caught IOException! Msg: " + e.getMessage());
}
log.debug("Got HTTP status code: " + rsCode);
if (downService.containsValue(importURL)
&& downService.get(importURL)) {
log.error("Server error, Skip " + importURL);
} else if (rsCode == -1) {
log.error("Unable to get an HTTP status code for resource "
+ importURL + " WILL NOT IMPORT!");
downService.put(importURL, true);
} else if (rsCode > 500) { // server error
if (rsCode == 503) {
log.error("Error 503 Skipping " + importURL);
} else{
log.error("Server Error? Received HTTP Status code "
+ rsCode + " for URL: " + importURL);
}
downService.put(importURL, true);
} else if (rsCode == 304) {
log.info("Not modified " + importURL);
downService.put(importURL, true);
} else if (rsCode == 404) {
log.error("Received HTTP 404 status for resource: "
+ importURL);
downService.put(importURL, true);
} else if (rsCode == 403) {
log.error("Received HTTP 403 status for resource: "
+ importURL);
downService.put(importURL, true);
} else {
log.debug("Import URL appears valid ( " + importURL + " )");
//@todo make this a more robust
String urlsufix = importURL.substring(
(importURL.length() - 4), importURL.length());
if (urlsufix.equals(".owl") || urlsufix.equals(".rdf")) {
uriaddress = new URIImpl(importURL);
URL url;
url = new URL(importURL);
log.info("Importing URL " + url);
con.add(url, importURL, RDFFormat.RDFXML,
(Resource) uriaddress);
owlse2.setLTMODContext(importURL, con); // set last modified
// time of the context
owlse2.setContentTypeContext(importURL,contentType, con);
log.info("Finished importing URL " + url);
} else if (importURL.substring((importURL.length() - 4),
importURL.length()).equals(".xsd")) {
uriaddress = new URIImpl(importURL);
ByteArrayInputStream inStream;
log.info("Transforming URL " + importURL);
inStream = new ByteArrayInputStream(owlse2
.transformXSD(importURL).toByteArray());
log.info("Finished transforming URL " + importURL);
log.debug("Importing URL " + importURL);
con.add(inStream, importURL, RDFFormat.RDFXML,
(Resource) uriaddress);
owlse2.setLTMODContext(importURL, con); // set last modified
// time for the context
owlse2.setContentTypeContext(importURL,contentType, con);
log.debug("Finished importing URL " + importURL);
} else {
//urlc.setRequestProperty("Accept",
// "application/rdf+xml,application/xml,text/xml,*/*");
// urlc.setRequestProperty("Accept",
// "application/rdf+xml, application/xml;
// q=0.9,text/xml; q=0.9, */*; q=0.2");
try {
InputStream inStream = hc.getInputStream();
uriaddress = new URIImpl(importURL);
if (contentType.equalsIgnoreCase("text/xml")||contentType.equalsIgnoreCase("application/xml")
|| contentType.equalsIgnoreCase("application/rdf+xml")) {
con.add(inStream, importURL, RDFFormat.RDFXML,
(Resource) uriaddress);
log.info("Imported xml = " + importURL);
}else{
notimport++;
log.info("Skip " + importURL);
log.info("Total skipped = " + notimport);
}
owlse2.setLTMODContext(importURL, con);
owlse2.setContentTypeContext(importURL,contentType, con);
} catch (IOException e) {
log.error("Caught an IOException! in urlc.getInputStream() Msg: "
+ e.getMessage());
}
}
}
imports.add(importURL); //files touched will not import again
} // while (!rdfDocs.isEmpty()
} catch (IOException e) {
log.error("Caught an IOException! Msg: " + e.getMessage());
} catch (SaxonApiException e) {
log.error("Caught a SaxsonException! Msg: " + e.getMessage());
} catch (RDFParseException e) {
log.error("Caught an RDFParseException! Msg: " + e.getMessage());
} catch (RepositoryException e) {
log.error("Caught an RepositoryException! Msg: " + e.getMessage());
} finally {
try {
imports.add(importURL); //skip this file
con.close();
} catch (RepositoryException e) {
log.error("Caught an RepositoryException! in addNeededRDFDocuments() Msg: "
+ e.getMessage());
}
inferEndTime = new Date().getTime();
double inferTime = (inferEndTime - inferStartTime) / 1000.0;
log.debug("Import takes " + inferTime + " seconds");
}
}
/*
* Drop URIs in the drop list
*/
// private void dropContexts(RepositoryConnection con) {
public void dropContexts(Vector<String> dropList) {
RepositoryConnection con = null;
log.debug("Dropping changed RDFDocuments and external inferencing contexts...");
try {
con = owlse2.getConnection();
Thread thread = Thread.currentThread();
URI uriDrop = null;
log.info("Deleting contexts in drop list ...");
ValueFactory f = owlse2.getValueFactory();
for (String drop : dropList) {
uriDrop = new URIImpl(drop);
log.info("Dropping URI: " + drop);
String pred = RepositoryUtility.internalStartingPoint +"#last_modified";
String contURL = RepositoryUtility.internalStartingPoint + "#cachecontext";
URI sbj = f.createURI(drop);
URI predicate = f.createURI(pred);
URI cont = f.createURI(contURL);
log.info("Removing context: " + sbj);
con.remove((Resource) null, null, null, (Resource) sbj);
con.remove((Resource) sbj, null, null);
log.info("Removing last_modified: " + sbj);
con.remove(sbj, predicate, null, cont); // remove last_modified
log.info("Finished removing context: " + sbj);
}
if (thread.isInterrupted()) {
log.warn("dropContexts(): WARNING! Thread "
+ thread.getName() + " was interrupted!");
return;
}
} catch (RepositoryException e) {
log.error("Caught RepositoryException! Msg: " + e.getMessage());
}
finally {
try {
con.close();
} catch (RepositoryException e) {
log.error("Caught RepositoryException! while closing connection: "
+ e.getMessage());
}
}
log.debug("Finished dropping changed RDFDocuments and external inferencing contexts.");
}
/**
* Locate all of the of the contexts generated by externbal inferencing (construct rule) activities.
* @return A lists of contexts that were generated by construct rules (i.e. external inferencing)
*/
private Vector<String> findExternalInferencingContexts() {
RepositoryConnection con = null;
TupleQueryResult result = null;
List<String> bindingNames;
Vector<String> externalInferencing = new Vector<String>();
log.debug("Finding ExternalInferencing ...");
try {
con = owlse2.getConnection();
String queryString = "select distinct crule from context crule {} prop {} "
+ "WHERE crule != rdfcache:cachecontext "
+ "AND crule != rdfcache:startingPoints "
+ "AND NOT EXISTS (SELECT time FROM CONTEXT rdfcache:cachecontext "
+ "{crule} rdfcache:last_modified {time}) "
+ "using namespace "
+ "rdfcache = <"+ RepositoryUtility.rdfCacheNamespace+">";
log.debug("queryString: " + queryString);
TupleQuery tupleQuery = con.prepareTupleQuery(QueryLanguage.SERQL,
queryString);
result = tupleQuery.evaluate();
if (result != null) {
bindingNames = result.getBindingNames();
while (result.hasNext()) {
BindingSet bindingSet = (BindingSet) result.next();
Value firstValue = bindingSet.getValue("crule");
if (!externalInferencing.contains(firstValue.stringValue())) {
externalInferencing.add(firstValue.stringValue());
log.debug("Adding to external inferencing list: " + firstValue.toString());
}
}
} else {
log.debug("No construct rule found!");
}
} catch (QueryEvaluationException e) {
log.error("Caught an QueryEvaluationException! Msg: "
+ e.getMessage());
} catch (RepositoryException e) {
log.error("Caught RepositoryException! Msg: " + e.getMessage());
} catch (MalformedQueryException e) {
log.error("Caught MalformedQueryException! Msg: " + e.getMessage());
}
finally {
if (result != null) {
try {
result.close();
} catch (QueryEvaluationException e) {
log.error("Caught a QueryEvaluationException! Msg: "
+ e.getMessage());
}
}
try {
con.close();
} catch (RepositoryException e) {
log
.error("Caught RepositoryException! in dropExternalInferencing() Msg: "
+ e.getMessage());
}
}
log.info("Located "
+ externalInferencing.size()+ " context generated by external inferencing (construct rules).");
return externalInferencing;
}
/*
* Find all rdfcache:RDFDocuments that are needed
*/
// private void findNeededRDFDocuments(RepositoryConnection con) {
private void findNeededRDFDocuments(Vector<String> rdfDocs) {
TupleQueryResult result = null;
List<String> bindingNames;
RepositoryConnection con = null;
try {
con = owlse2.getConnection();
String queryString = "(SELECT doc "
+ "FROM {doc} rdf:type {rdfcache:StartingPoint} "
+ "union "
+ "SELECT doc "
+ "FROM {tp} rdf:type {rdfcache:StartingPoint}; rdfcache:dependsOn {doc}) "
+ "MINUS "
+ "SELECT doc "
+ "FROM CONTEXT rdfcache:cachecontext {doc} rdfcache:last_modified {lastmod} "
+ "USING NAMESPACE "
+ "rdfcache = <"+ RepositoryUtility.rdfCacheNamespace+">";
log.debug("Query for NeededRDFDocuments: " + queryString);
TupleQuery tupleQuery = con.prepareTupleQuery(QueryLanguage.SERQL,
queryString);
result = tupleQuery.evaluate();
while (result.hasNext()) {
BindingSet bindingSet = result.next();
Value firstValue = bindingSet.getValue("doc");
String doc = firstValue.stringValue();
if (!rdfDocs.contains(doc) && !imports.contains(doc)
&& !downService.containsValue(doc)
&& doc.startsWith("http:
rdfDocs.add(doc);
log.debug("Adding to rdfDocs: " + doc);
}
}
} catch (QueryEvaluationException e) {
log.error("Caught an QueryEvaluationException! Msg: "
+ e.getMessage());
} catch (RepositoryException e) {
log.error("Caught RepositoryException! Msg: " + e.getMessage());
} catch (MalformedQueryException e) {
log.error("Caught MalformedQueryException! Msg: " + e.getMessage());
}
finally {
if (result != null) {
try {
result.close();
} catch (QueryEvaluationException e) {
log.error("Caught a QueryEvaluationException! Msg: "
+ e.getMessage());
}
}
try {
con.close();
} catch (RepositoryException e) {
log.error("Caught a RepositoryException! in findNeededRDFDocuments() Msg: "
+ e.getMessage());
}
}
log.info("Number of needed files identified: "
+ rdfDocs.size());
}
/*
* Find all rdfcache:RDFDocuments that are not needed and do not belong to
* rdfcache:StartingPoints and add them to the drop-list
*/
private Vector<String> findUnneededRDFDocuments(RepositoryConnection con) {
TupleQueryResult result = null;
List<String> bindingNames;
Vector<String> unneededRdfDocs = new Vector<String>();
log.debug("Locating unneeded RDF files left over from last update ...");
try {
String queryString = "(SELECT doc "
+ "FROM CONTEXT rdfcache:cachecontext "
+ "{doc} rdfcache:last_modified {lmt} "
//+ "WHERE doc != <" + RepositoryUtility.rdfCacheNamespace+"externalInferencing> "
+ "MINUS "
+ "SELECT doc "
+ "FROM {doc} rdf:type {rdfcache:StartingPoint}) "
+ "MINUS "
+ "SELECT doc "
+ "FROM {tp} rdf:type {rdfcache:StartingPoint}; rdfcache:dependsOn {doc} "
+ "USING NAMESPACE "
+ "rdfcache = <"+ RepositoryUtility.rdfCacheNamespace+">";
log.debug("queryUnneededRDFDocuments: " + queryString);
TupleQuery tupleQuery = con.prepareTupleQuery(QueryLanguage.SERQL,
queryString);
result = tupleQuery.evaluate();
if (result != null) {
bindingNames = result.getBindingNames();
while (result.hasNext()) {
BindingSet bindingSet = (BindingSet) result.next();
Value firstValue = bindingSet.getValue("doc");
if (!unneededRdfDocs.contains(firstValue.stringValue())) {
unneededRdfDocs.add(firstValue.stringValue());
log.debug("Found unneeded RDF Document: "
+ firstValue.toString());
}
}
} else {
log.debug("No query result!");
}
} catch (QueryEvaluationException e) {
log.error("Caught an QueryEvaluationException! Msg: "
+ e.getMessage());
} catch (RepositoryException e) {
log.error("Caught RepositoryException! Msg: " + e.getMessage());
} catch (MalformedQueryException e) {
log.error("Caught MalformedQueryException! Msg: " + e.getMessage());
}
finally {
if (result != null) {
try {
result.close();
} catch (QueryEvaluationException e) {
log.error("Caught a QueryEvaluationException! Msg: "
+ e.getMessage());
}
}
}
log.info("Identified " + unneededRdfDocs.size()+ " unneeded RDF documents.");
return unneededRdfDocs;
}
/*
* Find all rdfcache:RDFDocuments that has changed and add them to the
* drop-list
*/
private Vector<String> findChangedRDFDocuments(RepositoryConnection con) {
TupleQueryResult result = null;
List<String> bindingNames;
Vector<String> changedRdfDocuments = new Vector<String>();
log.debug("Locating changeded files ...");
try {
String queryString = "SELECT doc,lastmod "
+ "FROM CONTEXT rdfcache:cachecontext "
+ "{doc} rdfcache:last_modified {lastmod} "
+ "USING NAMESPACE "
+ "rdfcache = <"+ RepositoryUtility.rdfCacheNamespace+">";
log.debug("queryChangedRDFDocuments: " + queryString);
TupleQuery tupleQuery = con.prepareTupleQuery(QueryLanguage.SERQL,
queryString);
result = tupleQuery.evaluate();
if (result != null) {
bindingNames = result.getBindingNames();
while (result.hasNext()) {
BindingSet bindingSet = (BindingSet) result.next();
Value firstValue = bindingSet.getValue("doc");
String importURL = firstValue.stringValue();
// Value secondtValue = bindingSet.getValue("lastmod");
// log.debug("DOC: " + importURL);
// log.debug("LASTMOD: " + secondtValue.stringValue());
if (owlse2.olderContext(importURL) && !changedRdfDocuments.contains(importURL)) {
changedRdfDocuments.add(importURL);
log.debug("Found changed RDF document: " + importURL);
}
}
} else {
log.debug("No query result!");
}
} catch (QueryEvaluationException e) {
log.error("Caught an QueryEvaluationException! Msg: "
+ e.getMessage());
} catch (RepositoryException e) {
log.error("Caught RepositoryException! Msg: " + e.getMessage());
} catch (MalformedQueryException e) {
log.error("Caught MalformedQueryException! Msg: " + e.getMessage());
}
finally {
if (result != null) {
try {
result.close();
} catch (QueryEvaluationException e) {
log.error("Caught a QueryEvaluationException! Msg: "
+ e.getMessage());
}
}
}
log.info("Number of changed RDF documents detected: "
+ changedRdfDocuments.size());
return changedRdfDocuments;
}
public void destroy() {
try {
log.info("Shutting Down Semantic Repository.");
// con.close();
owlse2.shutDown();
log.info("Semantic Repository Has Been Shutdown.");
} catch (RepositoryException e) {
log.error("destroy(): Failed to shutdown Semantic Repository.");
} finally {
}
}
private Vector<String> getRdfImports(Element config) {
Vector<String> rdfImports = new Vector<String>();
Element e;
String s;
/**
* Load individual dataset references
*/
Iterator i = config.getChildren("dataset").iterator();
String datasetURL;
while (i.hasNext()) {
e = (Element) i.next();
datasetURL = e.getTextNormalize();
if (!datasetURL.endsWith(".rdf")) {
if (datasetURL.endsWith(".ddx") | datasetURL.endsWith(".dds")
| datasetURL.endsWith(".das")) {
datasetURL = datasetURL.substring(0, datasetURL
.lastIndexOf("."));
}
datasetURL += ".rdf";
}
rdfImports.add(datasetURL);
log.info("Added dataset reference " + datasetURL
+ " to RDF imports list.");
}
/**
* Load RDF Imports
*/
i = config.getChildren("RdfImport").iterator();
while (i.hasNext()) {
e = (Element) i.next();
s = e.getTextNormalize();
rdfImports.add(s);
log.info("Added reference " + s + " to RDF imports list.");
}
return rdfImports;
}
private void processContexts(RepositoryConnection con)
throws RepositoryException {
// retrieve context
RepositoryResult<Resource> contextID = con.getContextIDs();
int contextTol = 0;
while (contextID != null && contextID.hasNext()) {
String ctstr = contextID.next().toString();
log.info("Context: " + ctstr);
owlse2.printRDFContext(ctstr);
repositoryContexts.add(ctstr);
owlse2.printLTMODContext(ctstr);
contextTol++;
}
contextID.close(); // needed to release resources
log.info("Found " + contextTol + " Contexts");
}
private void ingestSwrlRules() throws RepositoryException {
log.info("Running runConstruct ..");
owlse2.runConstruct();
log.info("Complete running runConstruct ..");
}
public void dropStartingPoints(SailRepository repo, Vector<String> startingPointUrls) {
RepositoryConnection con = null;
ValueFactory valueFactory;
try {
con = repo.getConnection();
valueFactory = repo.getValueFactory();
dropStartingPoints(con, valueFactory, startingPointUrls);
}
catch (RepositoryException e) {
log.error(e.getClass().getName()+": Failed to open repository connection. Msg: "
+ e.getMessage());
} finally {
if (con != null) {
try {
con.close();
} catch (RepositoryException e) {
log.error(e.getClass().getName()+": Failed to close repository connection. Msg: "
+ e.getMessage());
}
}
}
}
public void dropStartingPoints(RepositoryConnection con, ValueFactory valueFactory, Vector<String> startingPointUrls) {
String pred = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type";
URI startingPointValue = null;
URI isa = valueFactory.createURI(pred);
URI context = valueFactory.createURI(RepositoryUtility.rdfCacheNamespace+"startingPoints");
URI startingPointType = valueFactory.createURI(RepositoryUtility.rdfCacheNamespace+"StartingPoint");
URL url;
try {
for (String importURL : startingPointUrls) {
url = new URL(importURL);
startingPointValue = valueFactory.createURI(importURL);
con.remove((Resource) startingPointValue, isa, (Value) startingPointType, (Resource) context);
log.info("Removed starting point " + importURL + " from the repository. (N-Triple: <" + startingPointValue + "> <" + isa
+ "> " + "<" + startingPointType + "> " + "<" + context + "> )");
}
} catch (RepositoryException e) {
log.error("In addStartingPoints, caught an RepositoryException! Msg: "
+ e.getMessage());
} catch (MalformedURLException e) {
log.error("In addStartingPoints, caught an MalformedURLException! Msg: "
+ e.getMessage());
//} catch (RDFParseException e) {
// log.error("In addStartingPoints, caught an RDFParseException! Msg: "
// + e.getMessage());
} catch (IOException e) {
log.error("In addStartingPoints, caught an IOException! Msg: "
+ e.getMessage());
}
}
} |
package opendap.semantics.IRISail;
import org.slf4j.Logger;
import java.util.*;
import org.openrdf.model.Resource;
import org.openrdf.model.URI;
import org.openrdf.model.Value;
import org.openrdf.model.ValueFactory;
import org.openrdf.query.BindingSet;
import org.openrdf.query.MalformedQueryException;
import org.openrdf.query.QueryEvaluationException;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.TupleQuery;
import org.openrdf.query.TupleQueryResult;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
public class RdfPersistence {
private static Logger log = org.slf4j.LoggerFactory.getLogger(RdfPersistence.class);
/**
* @param repository The repository on which to operate.
* @param startingPointUrls The list pof starting point URLs from the configuration file (aka "THE starting point")
* @return Returns true if the update results in changes to the repository.
* @throws InterruptedException If the thread of execution is interrupted.
* @throws RepositoryException When there are problems working with the repository.
*/
public static boolean updateSemanticRepository(IRISailRepository repository, Vector<String> startingPointUrls)
throws InterruptedException, RepositoryException {
Vector<String> dropList = new Vector<String>();
Vector<String> startingPoints = new Vector<String>();
boolean isNewRepository = true;
boolean repositoryHasBeenChanged = false;
RdfImporter rdfImporter = new RdfImporter();
Date startTime = new Date();
log.info("
log.info("updateSemanticRepository() Start.");
log.debug(RepositoryUtility.showContexts(repository));
RepositoryConnection con = null;
try {
for (String startingPointUrl : startingPointUrls) {
startingPoints.add(startingPointUrl); // starting point from input file
}
Vector<String> newStartingPoints = new Vector<String>();
Vector<String> startingPointsToDrop = null;
try {
con = repository.getConnection();
if (con.isOpen()) {
log.info("Connection is OPEN!");
newStartingPoints = RepositoryUtility.findNewStartingPoints(con, startingPoints);
dropList.addAll(findUnneededRDFDocuments(con));
startingPointsToDrop = RepositoryUtility.findChangedStartingPoints(con, startingPoints);
dropList.addAll(startingPointsToDrop);
dropList.addAll(findChangedRDFDocuments(con));
}
} catch (RepositoryException e) {
log.error("Caught RepositoryException updateSemanticRepository(Vector<String> startingPointUrls)" +
e.getMessage());
} finally {
if (con != null)
con.close();
log.info("Connection is Closed!");
}
log.debug(RepositoryUtility.showContexts(repository));
boolean modelChanged = false;
if (!dropList.isEmpty()) {
log.debug("Add external inferencing contexts to dropList");
dropList.addAll(findExternalInferencingContexts(repository));
log.debug("Dropping starting points ...");
RepositoryUtility.dropStartingPoints(repository, startingPointsToDrop);
log.debug("Finished dropping starting points.");
log.debug(RepositoryUtility.showContexts(repository));
log.debug("Dropping contexts.");
dropContexts(repository, dropList);
log.debug(RepositoryUtility.showContexts(repository));
modelChanged = true;
}
if (!newStartingPoints.isEmpty()) {
log.debug("Adding new starting point ...");
RepositoryUtility.addStartingPoints(repository, newStartingPoints);
log.debug("Finished adding new starting point.");
log.debug(RepositoryUtility.showContexts(repository));
modelChanged = true;
}
if(rdfImporter.importReferencedRdfDocs(repository)){
modelChanged = true;
}
if (modelChanged) {
log.debug("Updating repository ...");
while (modelChanged) {
log.debug("Repository changes detected.");
log.debug(RepositoryUtility.showContexts(repository));
log.debug("Running construct rules ...");
repository.runConstruct();
log.debug("Finished running construct rules.");
log.debug(RepositoryUtility.showContexts(repository));
modelChanged = rdfImporter.importReferencedRdfDocs(repository);
}
repositoryHasBeenChanged = true;
} else {
log.debug("Repository update complete. No changes detected, rules not rerun..");
log.debug(RepositoryUtility.showContexts(repository));
}
} catch (RepositoryException e) {
log.error("Caught RepositoryException in main(): "
+ e.getMessage());
}
long elapsedTime = new Date().getTime() - startTime.getTime();
log.info("Imports Evaluated. Elapsed time: " + elapsedTime + "ms");
log.info("updateSemanticRepository() End.");
log.info("
return repositoryHasBeenChanged;
}
/*
* Drop URIs in the drop list
*/
// private void dropContexts(RepositoryConnection con) {
public static void dropContexts(IRISailRepository repository, Vector<String> dropList) {
RepositoryConnection con = null;
log.debug("Dropping changed RDFDocuments and external inferencing contexts...");
try {
con = repository.getConnection();
Thread thread = Thread.currentThread();
log.info("Deleting contexts in drop list ...");
ValueFactory valueFactory = repository.getValueFactory();
for (String drop : dropList) {
log.info("Dropping URI: " + drop);
String pred = RepositoryUtility.internalStartingPoint + "#last_modified";
String contURL = RepositoryUtility.internalStartingPoint + "#cachecontext";
URI sbj = valueFactory.createURI(drop);
URI predicate = valueFactory.createURI(pred);
URI cont = valueFactory.createURI(contURL);
log.info("Removing context: " + sbj);
con.remove((Resource) null, null, null, (Resource) sbj);
con.remove(sbj, null, null);
log.info("Removing last_modified: " + sbj);
con.remove(sbj, predicate, null, cont); // remove last_modified
log.info("Finished removing context: " + sbj);
}
if (thread.isInterrupted()) {
log.warn("dropContexts(): WARNING! Thread "
+ thread.getName() + " was interrupted!");
return;
}
} catch (RepositoryException e) {
log.error("Caught RepositoryException! Msg: " + e.getMessage());
}
finally {
try {
if (con != null)
con.close();
} catch (RepositoryException e) {
log.error("Caught RepositoryException! while closing connection: "
+ e.getMessage());
}
}
log.debug("Finished dropping changed RDFDocuments and external inferencing contexts.");
}
/**
* Locate all of the of the contexts generated by externbal inferencing (construct rule) activities.
*
* @param repository The repository to operate on.
* @return A lists of contexts that were generated by construct rules (i.e. external inferencing)
*/
private static Vector<String> findExternalInferencingContexts(IRISailRepository repository) {
RepositoryConnection con = null;
TupleQueryResult result = null;
//List<String> bindingNames;
Vector<String> externalInferencing = new Vector<String>();
log.debug("Finding ExternalInferencing ...");
try {
con = repository.getConnection();
String queryString = "select distinct crule from context crule {} prop {} "
+ "WHERE crule != rdfcache:cachecontext "
+ "AND crule != rdfcache:startingPoints "
+ "AND NOT EXISTS (SELECT time FROM CONTEXT rdfcache:cachecontext "
+ "{crule} rdfcache:last_modified {time}) "
+ "using namespace "
+ "rdfcache = <" + RepositoryUtility.rdfCacheNamespace + ">";
log.debug("queryString: " + queryString);
TupleQuery tupleQuery = con.prepareTupleQuery(QueryLanguage.SERQL,
queryString);
result = tupleQuery.evaluate();
if (result != null) {
//bindingNames = result.getBindingNames();
while (result.hasNext()) {
BindingSet bindingSet = result.next();
Value firstValue = bindingSet.getValue("crule");
if (!externalInferencing.contains(firstValue.stringValue())) {
externalInferencing.add(firstValue.stringValue());
log.debug("Adding to external inferencing list: " + firstValue.toString());
}
}
} else {
log.debug("No construct rule found!");
}
} catch (QueryEvaluationException e) {
log.error("Caught an QueryEvaluationException! Msg: "
+ e.getMessage());
} catch (RepositoryException e) {
log.error("Caught RepositoryException! Msg: " + e.getMessage());
} catch (MalformedQueryException e) {
log.error("Caught MalformedQueryException! Msg: " + e.getMessage());
}
finally {
if (result != null) {
try {
result.close();
} catch (QueryEvaluationException e) {
log.error("Caught a QueryEvaluationException! Msg: "
+ e.getMessage());
}
}
try {
con.close();
} catch (RepositoryException e) {
log
.error("Caught RepositoryException! in dropExternalInferencing() Msg: "
+ e.getMessage());
}
}
log.info("Located "
+ externalInferencing.size() + " context generated by external inferencing (construct rules).");
return externalInferencing;
}
/*
* Find all rdfcache:RDFDocuments that are not needed and do not belong to
* rdfcache:StartingPoints and add them to the drop-list
*/
private static Vector<String> findUnneededRDFDocuments(RepositoryConnection con) {
TupleQueryResult result = null;
//List<String> bindingNames;
Vector<String> unneededRdfDocs = new Vector<String>();
log.debug("Locating unneeded RDF files left over from last update ...");
try {
String queryString = "(SELECT doc "
+ "FROM CONTEXT rdfcache:cachecontext "
+ "{doc} rdfcache:last_modified {lmt} "
//+ "WHERE doc != <" + RepositoryUtility.rdfCacheNamespace+"externalInferencing> "
+ "MINUS "
+ "SELECT doc "
+ "FROM {doc} rdf:type {rdfcache:StartingPoint}) "
+ "MINUS "
+ "SELECT doc "
+ "FROM {tp} rdf:type {rdfcache:StartingPoint}; rdfcache:dependsOn {doc} "
+ "USING NAMESPACE "
+ "rdfcache = <" + RepositoryUtility.rdfCacheNamespace + ">";
log.debug("queryUnneededRDFDocuments: " + queryString);
TupleQuery tupleQuery = con.prepareTupleQuery(QueryLanguage.SERQL,
queryString);
result = tupleQuery.evaluate();
if (result != null) {
//bindingNames = result.getBindingNames();
while (result.hasNext()) {
BindingSet bindingSet = result.next();
Value firstValue = bindingSet.getValue("doc");
if (!unneededRdfDocs.contains(firstValue.stringValue())) {
unneededRdfDocs.add(firstValue.stringValue());
log.debug("Found unneeded RDF Document: "
+ firstValue.toString());
}
}
} else {
log.debug("No query result!");
}
} catch (QueryEvaluationException e) {
log.error("Caught an QueryEvaluationException! Msg: "
+ e.getMessage());
} catch (RepositoryException e) {
log.error("Caught RepositoryException! Msg: " + e.getMessage());
} catch (MalformedQueryException e) {
log.error("Caught MalformedQueryException! Msg: " + e.getMessage());
}
finally {
if (result != null) {
try {
result.close();
} catch (QueryEvaluationException e) {
log.error("Caught a QueryEvaluationException! Msg: "
+ e.getMessage());
}
}
}
log.info("Identified " + unneededRdfDocs.size() + " unneeded RDF documents.");
return unneededRdfDocs;
}
/*
* Find all rdfcache:RDFDocuments that has changed and add them to the
* drop-list.
* @param repository The repository on which to operate.
*/
private static Vector<String> findChangedRDFDocuments(RepositoryConnection con) {
TupleQueryResult result = null;
//List<String> bindingNames;
Vector<String> changedRdfDocuments = new Vector<String>();
log.debug("Locating changeded files ...");
try {
String queryString = "SELECT doc,lastmod "
+ "FROM CONTEXT rdfcache:cachecontext "
+ "{doc} rdfcache:last_modified {lastmod} "
+ "USING NAMESPACE "
+ "rdfcache = <" + RepositoryUtility.rdfCacheNamespace + ">";
log.debug("queryChangedRDFDocuments: " + queryString);
TupleQuery tupleQuery = con.prepareTupleQuery(QueryLanguage.SERQL,
queryString);
result = tupleQuery.evaluate();
if (result != null) {
//bindingNames = result.getBindingNames();
while (result.hasNext()) {
BindingSet bindingSet = result.next();
Value firstValue = bindingSet.getValue("doc");
String importURL = firstValue.stringValue();
// Value secondtValue = bindingSet.getValue("lastmod");
// log.debug("DOC: " + importURL);
// log.debug("LASTMOD: " + secondtValue.stringValue());
if (RepositoryUtility.olderContext(con, importURL) && !changedRdfDocuments.contains(importURL)) {
changedRdfDocuments.add(importURL);
log.debug("Found changed RDF document: " + importURL);
}
}
} else {
log.debug("No query result!");
}
} catch (QueryEvaluationException e) {
log.error("Caught an QueryEvaluationException! Msg: "
+ e.getMessage());
} catch (RepositoryException e) {
log.error("Caught RepositoryException! Msg: " + e.getMessage());
} catch (MalformedQueryException e) {
log.error("Caught MalformedQueryException! Msg: " + e.getMessage());
}
finally {
if (result != null) {
try {
result.close();
} catch (QueryEvaluationException e) {
log.error("Caught a QueryEvaluationException! Msg: "
+ e.getMessage());
}
}
}
log.info("Number of changed RDF documents detected: "
+ changedRdfDocuments.size());
return changedRdfDocuments;
}
} |
package org.apache.fop.svg;
import org.apache.fop.pdf.*;
import org.apache.fop.fonts.*;
import org.apache.fop.render.pdf.FontSetup;
import org.apache.fop.layout.*;
import org.apache.fop.apps.FOPException;
import java.awt.Graphics;
import java.awt.Font;
import java.awt.Image;
import java.awt.Color;
import java.io.OutputStream;
import java.io.IOException;
import org.apache.batik.ext.awt.g2d.GraphicContext;
/**
* This class is a wrapper for the <tt>PDFGraphics2D</tt> that
* is used to create a full document around the pdf rendering from
* <tt>PDFGraphics2D</tt>.
*
* @author <a href="mailto:keiron@aftexsw.com">Keiron Liddle</a>
* @version $Id$
* @see org.apache.fop.svg.PDFGraphics2D
*/
public class PDFDocumentGraphics2D extends PDFGraphics2D {
OutputStream stream;
PDFStream pdfStream;
int width;
int height;
/**
* Create a new PDFDocumentGraphics2D.
* This is used to create a new pdf document of the given height
* and width.
* The resulting document is written to the stream after rendering.
*
* @param textAsShapes set this to true so that text will be rendered
* using curves and not the font.
* @param stream the stream that the final document should be written to.
* @param width the width of the document
* @param height the height of the document
*/
public PDFDocumentGraphics2D(boolean textAsShapes,
OutputStream stream, int width, int height) {
super(textAsShapes);
if(!textAsShapes) {
FontInfo fontInfo = new FontInfo();
FontSetup.setup(fontInfo);
try {
fontState = new FontState(fontInfo, "Helvetica", "normal", "normal", 12, 0);
} catch(FOPException e) {
}
}
standalone = true;
this.stream = stream;
this.pdfDoc = new PDFDocument();
this.pdfDoc.setProducer("FOP SVG Renderer");
pdfStream = this.pdfDoc.makeStream();
this.width = width;
this.height = height;
currentFontName = "";
currentFontSize = 0;
currentYPosition = 0;
currentXPosition = 0;
// fontState = fs;
currentStream.write("1 0 0 -1 0 " + height + " cm\n");
// end part
/*
FontSetup.addToResources(this.pdfDoc, fontInfo);
*/
}
/**
* Set the dimensions of the svg document that will be drawn.
* This is useful if the dimensions of the svg document are different
* from the pdf document that is to be created.
* The result is scaled so that the svg fits correctly inside the pdf document.
*/
public void setSVGDimension(float w, float h) {
currentStream.write("" + PDFNumber.doubleOut(width / w) + " 0 0 " + PDFNumber.doubleOut(height / h) + " 0 0 cm\n");
}
/**
* Set the background of the pdf document.
* This is used to set the background for the pdf document
* Rather than leaving it as the default white.
*/
public void setBackgroundColor(Color col) {
Color c = col;
currentColour = new PDFColor(c.getRed(), c.getGreen(), c.getBlue());
currentStream.write("q\n");
currentStream.write(currentColour.getColorSpaceOut(true));
currentStream.write("0 0 " + width + " " + height + " re\n");
currentStream.write("f\n");
currentStream.write("Q\n");
}
/**
* The rendering process has finished.
* This should be called after the rendering has completed as there is
* no other indication it is complete.
* This will then write the results to the output stream.
*/
public void finish() throws IOException {
pdfStream.add(getString());
PDFResources pdfResources = this.pdfDoc.getResources();
PDFPage currentPage =
this.pdfDoc.makePage(pdfResources, pdfStream, width,
height, null);
this.pdfDoc.output(stream);
}
public void setGraphicContext(GraphicContext c) {
gc = c;
}
/**
* This constructor supports the create method
*/
public PDFDocumentGraphics2D(PDFDocumentGraphics2D g) {
super(g);
}
/**
* Creates a new <code>Graphics</code> object that is
* a copy of this <code>Graphics</code> object.
* @return a new graphics context that is a copy of
* this graphics context.
*/
public Graphics create() {
return new PDFDocumentGraphics2D(this);
}
} |
package org.apache.xerces.impl.xpath.regex;
import java.util.Vector;
import java.util.Hashtable;
/**
* This class represents a node in parse tree.
*
* @version $Id$
*/
class Token implements java.io.Serializable {
static final boolean COUNTTOKENS = true;
static int tokens = 0;
static final int CHAR = 0; // Literal char
static final int DOT = 11;
static final int CONCAT = 1;
static final int UNION = 2; // X|Y|Z
static final int CLOSURE = 3;
static final int RANGE = 4; // [a-zA-Z] etc.
static final int NRANGE = 5; // [^a-zA-Z] etc.
static final int PAREN = 6; // (X) or (?:X)
static final int EMPTY = 7;
static final int ANCHOR = 8; // ^ $ \b \B \< \> \A \Z \z
static final int NONGREEDYCLOSURE = 9;
static final int STRING = 10; // strings
static final int BACKREFERENCE = 12; // back references
static final int LOOKAHEAD = 20;
static final int NEGATIVELOOKAHEAD = 21;
static final int LOOKBEHIND = 22;
static final int NEGATIVELOOKBEHIND = 23;
static final int INDEPENDENT = 24;
static final int MODIFIERGROUP = 25; // (?ims-ims:...)
static final int CONDITION = 26; // (?(...)yes|no)
static final int UTF16_MAX = 0x10ffff;
int type;
static Token token_dot;
static Token token_0to9;
static Token token_wordchars;
static Token token_not_0to9;
static Token token_not_wordchars;
static Token token_spaces;
static Token token_not_spaces;
static Token token_empty;
static Token token_linebeginning;
static Token token_linebeginning2;
static Token token_lineend;
static Token token_stringbeginning;
static Token token_stringend;
static Token token_stringend2;
static Token token_wordedge;
static Token token_not_wordedge;
static Token token_wordbeginning;
static Token token_wordend;
static {
Token.token_empty = new Token(Token.EMPTY);
Token.token_linebeginning = Token.createAnchor('^');
Token.token_linebeginning2 = Token.createAnchor('@');
Token.token_lineend = Token.createAnchor('$');
Token.token_stringbeginning = Token.createAnchor('A');
Token.token_stringend = Token.createAnchor('z');
Token.token_stringend2 = Token.createAnchor('Z');
Token.token_wordedge = Token.createAnchor('b');
Token.token_not_wordedge = Token.createAnchor('B');
Token.token_wordbeginning = Token.createAnchor('<');
Token.token_wordend = Token.createAnchor('>');
Token.token_dot = new Token(Token.DOT);
Token.token_0to9 = Token.createRange();
Token.token_0to9.addRange('0', '9');
Token.token_wordchars = Token.createRange();
Token.token_wordchars.addRange('0', '9');
Token.token_wordchars.addRange('A', 'Z');
Token.token_wordchars.addRange('_', '_');
Token.token_wordchars.addRange('a', 'z');
Token.token_spaces = Token.createRange();
Token.token_spaces.addRange('\t', '\t');
Token.token_spaces.addRange('\n', '\n');
Token.token_spaces.addRange('\f', '\f');
Token.token_spaces.addRange('\r', '\r');
Token.token_spaces.addRange(' ', ' ');
Token.token_not_0to9 = Token.complementRanges(Token.token_0to9);
Token.token_not_wordchars = Token.complementRanges(Token.token_wordchars);
Token.token_not_spaces = Token.complementRanges(Token.token_spaces);
}
static Token.ParenToken createLook(int type, Token child) {
if (COUNTTOKENS) Token.tokens ++;
return new Token.ParenToken(type, child, 0);
}
static Token.ParenToken createParen(Token child, int pnumber) {
if (COUNTTOKENS) Token.tokens ++;
return new Token.ParenToken(Token.PAREN, child, pnumber);
}
static Token.ClosureToken createClosure(Token tok) {
if (COUNTTOKENS) Token.tokens ++;
return new Token.ClosureToken(Token.CLOSURE, tok);
}
static Token.ClosureToken createNGClosure(Token tok) {
if (COUNTTOKENS) Token.tokens ++;
return new Token.ClosureToken(Token.NONGREEDYCLOSURE, tok);
}
static Token.ConcatToken createConcat(Token tok1, Token tok2) {
if (COUNTTOKENS) Token.tokens ++;
return new Token.ConcatToken(tok1, tok2);
}
static Token.UnionToken createConcat() {
if (COUNTTOKENS) Token.tokens ++;
return new Token.UnionToken(Token.CONCAT);
}
static Token.UnionToken createUnion() {
if (COUNTTOKENS) Token.tokens ++;
return new Token.UnionToken(Token.UNION);
}
static Token createEmpty() {
return Token.token_empty;
}
static RangeToken createRange() {
if (COUNTTOKENS) Token.tokens ++;
return new RangeToken(Token.RANGE);
}
static RangeToken createNRange() {
if (COUNTTOKENS) Token.tokens ++;
return new RangeToken(Token.NRANGE);
}
static Token.CharToken createChar(int ch) {
if (COUNTTOKENS) Token.tokens ++;
return new Token.CharToken(Token.CHAR, ch);
}
static private Token.CharToken createAnchor(int ch) {
if (COUNTTOKENS) Token.tokens ++;
return new Token.CharToken(Token.ANCHOR, ch);
}
static Token.StringToken createBackReference(int refno) {
if (COUNTTOKENS) Token.tokens ++;
return new Token.StringToken(Token.BACKREFERENCE, null, refno);
}
static Token.StringToken createString(String str) {
if (COUNTTOKENS) Token.tokens ++;
return new Token.StringToken(Token.STRING, str, 0);
}
static Token.ModifierToken createModifierGroup(Token child, int add, int mask) {
if (COUNTTOKENS) Token.tokens ++;
return new Token.ModifierToken(child, add, mask);
}
static Token.ConditionToken createCondition(int refno, Token condition,
Token yespat, Token nopat) {
if (COUNTTOKENS) Token.tokens ++;
return new Token.ConditionToken(refno, condition, yespat, nopat);
}
protected Token(int type) {
this.type = type;
}
/**
* A number of children.
*/
int size() {
return 0;
}
Token getChild(int index) {
return null;
}
void addChild(Token tok) {
throw new RuntimeException("Not supported.");
}
// for RANGE or NRANGE
protected void addRange(int start, int end) {
throw new RuntimeException("Not supported.");
}
protected void sortRanges() {
throw new RuntimeException("Not supported.");
}
protected void compactRanges() {
throw new RuntimeException("Not supported.");
}
protected void mergeRanges(Token tok) {
throw new RuntimeException("Not supported.");
}
protected void subtractRanges(Token tok) {
throw new RuntimeException("Not supported.");
}
protected void intersectRanges(Token tok) {
throw new RuntimeException("Not supported.");
}
static Token complementRanges(Token tok) {
return RangeToken.complementRanges(tok);
}
void setMin(int min) { // for CLOSURE
}
void setMax(int max) { // for CLOSURE
}
int getMin() { // for CLOSURE
return -1;
}
int getMax() { // for CLOSURE
return -1;
}
int getReferenceNumber() { // for STRING
return 0;
}
String getString() { // for STRING
return null;
}
int getParenNumber() {
return 0;
}
int getChar() {
return -1;
}
public String toString() {
return this.toString(0);
}
public String toString(int options) {
return this.type == Token.DOT ? "." : "";
}
/**
* How many characters are needed?
*/
final int getMinLength() {
switch (this.type) {
case CONCAT:
int sum = 0;
for (int i = 0; i < this.size(); i ++)
sum += this.getChild(i).getMinLength();
return sum;
case CONDITION:
case UNION:
if (this.size() == 0)
return 0;
int ret = this.getChild(0).getMinLength();
for (int i = 1; i < this.size(); i ++) {
int min = this.getChild(i).getMinLength();
if (min < ret) ret = min;
}
return ret;
case CLOSURE:
case NONGREEDYCLOSURE:
if (this.getMin() >= 0)
return this.getMin() * this.getChild(0).getMinLength();
return 0;
case EMPTY:
case ANCHOR:
return 0;
case DOT:
case CHAR:
case RANGE:
case NRANGE:
return 1;
case INDEPENDENT:
case PAREN:
case MODIFIERGROUP:
return this.getChild(0).getMinLength();
case BACKREFERENCE:
return 0;
case STRING:
return this.getString().length();
case LOOKAHEAD:
case NEGATIVELOOKAHEAD:
case LOOKBEHIND:
case NEGATIVELOOKBEHIND:
return 0;
default:
throw new RuntimeException("Token#getMinLength(): Invalid Type: "+this.type);
}
}
final int getMaxLength() {
switch (this.type) {
case CONCAT:
int sum = 0;
for (int i = 0; i < this.size(); i ++) {
int d = this.getChild(i).getMaxLength();
if (d < 0) return -1;
sum += d;
}
return sum;
case CONDITION:
case UNION:
if (this.size() == 0)
return 0;
int ret = this.getChild(0).getMaxLength();
for (int i = 1; ret >= 0 && i < this.size(); i ++) {
int max = this.getChild(i).getMaxLength();
if (max < 0) { // infinity
ret = -1;
break;
}
if (max > ret) ret = max;
}
return ret;
case CLOSURE:
case NONGREEDYCLOSURE:
if (this.getMax() >= 0)
// When this.child.getMaxLength() < 0,
// this returns minus value
return this.getMax() * this.getChild(0).getMaxLength();
return -1;
case EMPTY:
case ANCHOR:
return 0;
case CHAR:
return 1;
case DOT:
case RANGE:
case NRANGE:
return 2;
case INDEPENDENT:
case PAREN:
case MODIFIERGROUP:
return this.getChild(0).getMaxLength();
case BACKREFERENCE:
return -1;
case STRING:
return this.getString().length();
case LOOKAHEAD:
case NEGATIVELOOKAHEAD:
case LOOKBEHIND:
case NEGATIVELOOKBEHIND:
return 0;
default:
throw new RuntimeException("Token#getMaxLength(): Invalid Type: "+this.type);
}
}
static final int FC_CONTINUE = 0;
static final int FC_TERMINAL = 1;
static final int FC_ANY = 2;
private static final boolean isSet(int options, int flag) {
return (options & flag) == flag;
}
final int analyzeFirstCharacter(RangeToken result, int options) {
switch (this.type) {
case CONCAT:
int ret = FC_CONTINUE;
for (int i = 0; i < this.size(); i ++)
if ((ret = this.getChild(i).analyzeFirstCharacter(result, options)) != FC_CONTINUE)
break;
return ret;
case UNION:
if (this.size() == 0)
return FC_CONTINUE;
/*
* a|b|c -> FC_TERMINAL
* a|.|c -> FC_ANY
* a|b| -> FC_CONTINUE
*/
int ret2 = FC_CONTINUE;
boolean hasEmpty = false;
for (int i = 0; i < this.size(); i ++) {
ret2 = this.getChild(i).analyzeFirstCharacter(result, options);
if (ret2 == FC_ANY)
break;
else if (ret2 == FC_CONTINUE)
hasEmpty = true;
}
return hasEmpty ? FC_CONTINUE : ret2;
case CONDITION:
int ret3 = this.getChild(0).analyzeFirstCharacter(result, options);
if (this.size() == 1) return FC_CONTINUE;
if (ret3 == FC_ANY) return ret3;
int ret4 = this.getChild(1).analyzeFirstCharacter(result, options);
if (ret4 == FC_ANY) return ret4;
return ret3 == FC_CONTINUE || ret4 == FC_CONTINUE ? FC_CONTINUE : FC_TERMINAL;
case CLOSURE:
case NONGREEDYCLOSURE:
this.getChild(0).analyzeFirstCharacter(result, options);
return FC_CONTINUE;
case EMPTY:
case ANCHOR:
return FC_CONTINUE;
case CHAR:
int ch = this.getChar();
result.addRange(ch, ch);
if (ch < 0x10000 && isSet(options, RegularExpression.IGNORE_CASE)) {
ch = Character.toUpperCase((char)ch);
result.addRange(ch, ch);
ch = Character.toLowerCase((char)ch);
result.addRange(ch, ch);
}
return FC_TERMINAL;
case DOT:
if (isSet(options, RegularExpression.SINGLE_LINE)) {
return FC_CONTINUE;
} else {
return FC_CONTINUE;
/*
result.addRange(0, RegularExpression.LINE_FEED-1);
result.addRange(RegularExpression.LINE_FEED+1, RegularExpression.CARRIAGE_RETURN-1);
result.addRange(RegularExpression.CARRIAGE_RETURN+1,
RegularExpression.LINE_SEPARATOR-1);
result.addRange(RegularExpression.PARAGRAPH_SEPARATOR+1, UTF16_MAX);
return 1;
*/
}
case RANGE:
if (isSet(options, RegularExpression.IGNORE_CASE)) {
result.mergeRanges(((RangeToken)this).getCaseInsensitiveToken());
} else {
result.mergeRanges(this);
}
return FC_TERMINAL;
case NRANGE:
if (isSet(options, RegularExpression.IGNORE_CASE)) {
result.mergeRanges(Token.complementRanges(((RangeToken)this).getCaseInsensitiveToken()));
} else {
result.mergeRanges(Token.complementRanges(this));
}
return FC_TERMINAL;
case INDEPENDENT:
case PAREN:
return this.getChild(0).analyzeFirstCharacter(result, options);
case MODIFIERGROUP:
options |= ((ModifierToken)this).getOptions();
options &= ~((ModifierToken)this).getOptionsMask();
return this.getChild(0).analyzeFirstCharacter(result, options);
case BACKREFERENCE:
result.addRange(0, UTF16_MAX);
return FC_ANY;
case STRING:
int cha = this.getString().charAt(0);
int ch2;
if (REUtil.isHighSurrogate(cha)
&& this.getString().length() >= 2
&& REUtil.isLowSurrogate((ch2 = this.getString().charAt(1))))
cha = REUtil.composeFromSurrogates(cha, ch2);
result.addRange(cha, cha);
if (cha < 0x10000 && isSet(options, RegularExpression.IGNORE_CASE)) {
cha = Character.toUpperCase((char)cha);
result.addRange(cha, cha);
cha = Character.toLowerCase((char)cha);
result.addRange(cha, cha);
}
return FC_TERMINAL;
case LOOKAHEAD:
case NEGATIVELOOKAHEAD:
case LOOKBEHIND:
case NEGATIVELOOKBEHIND:
return FC_CONTINUE;
default:
throw new RuntimeException("Token#analyzeHeadCharacter(): Invalid Type: "+this.type);
}
}
private final boolean isShorterThan(Token tok) {
if (tok == null) return false;
int mylength;
if (this.type == STRING) mylength = this.getString().length();
else throw new RuntimeException("Internal Error: Illegal type: "+this.type);
int otherlength;
if (tok.type == STRING) otherlength = tok.getString().length();
else throw new RuntimeException("Internal Error: Illegal type: "+tok.type);
return mylength < otherlength;
}
static class FixedStringContainer {
Token token = null;
int options = 0;
FixedStringContainer() {
}
}
final void findFixedString(FixedStringContainer container, int options) {
switch (this.type) {
case CONCAT:
Token prevToken = null;
int prevOptions = 0;
for (int i = 0; i < this.size(); i ++) {
this.getChild(i).findFixedString(container, options);
if (prevToken == null || prevToken.isShorterThan(container.token)) {
prevToken = container.token;
prevOptions = container.options;
}
}
container.token = prevToken;
container.options = prevOptions;
return;
case UNION:
case CLOSURE:
case NONGREEDYCLOSURE:
case EMPTY:
case ANCHOR:
case RANGE:
case DOT:
case NRANGE:
case BACKREFERENCE:
case LOOKAHEAD:
case NEGATIVELOOKAHEAD:
case LOOKBEHIND:
case NEGATIVELOOKBEHIND:
case CONDITION:
container.token = null;
return;
case CHAR: // Ignore CHAR tokens.
container.token = null;
return;
case STRING:
container.token = this;
container.options = options;
return;
case INDEPENDENT:
case PAREN:
this.getChild(0).findFixedString(container, options);
return;
case MODIFIERGROUP:
options |= ((ModifierToken)this).getOptions();
options &= ~((ModifierToken)this).getOptionsMask();
this.getChild(0).findFixedString(container, options);
return;
default:
throw new RuntimeException("Token#findFixedString(): Invalid Type: "+this.type);
}
}
boolean match(int ch) {
throw new RuntimeException("NFAArrow#match(): Internal error: "+this.type);
}
private final static Hashtable categories = new Hashtable();
private final static Hashtable categories2 = new Hashtable();
private static final String[] categoryNames = {
"Cn", "Lu", "Ll", "Lt", "Lm", "Lo", "Mn", "Me", "Mc", "Nd",
"Nl", "No", "Zs", "Zl", "Zp", "Cc", "Cf", null, "Co", "Cs",
"Pd", "Ps", "Pe", "Pc", "Po", "Sm", "Sc", "Sk", "So",
"Pi", "Pf", // 29, 30
"L", "M", "N", "Z", "C", "P", "S", // 31-37
};
// Schema Rec. {Datatypes} - Punctuation
static final int CHAR_INIT_QUOTE = 29; // Pi - initial quote
static final int CHAR_FINAL_QUOTE = 30; // Pf - final quote
static final int CHAR_LETTER = 31;
static final int CHAR_MARK = 32;
static final int CHAR_NUMBER = 33;
static final int CHAR_SEPARATOR = 34;
static final int CHAR_OTHER = 35;
static final int CHAR_PUNCTUATION = 36;
static final int CHAR_SYMBOL = 37;
//blockNames in UNICODE 3.1 that supported by XML Schema REC
private static final String[] blockNames = {
/*0000..007F;*/ "Basic Latin",
/*0080..00FF;*/ "Latin-1 Supplement",
/*0100..017F;*/ "Latin Extended-A",
/*0180..024F;*/ "Latin Extended-B",
/*0250..02AF;*/ "IPA Extensions",
/*02B0..02FF;*/ "Spacing Modifier Letters",
/*0300..036F;*/ "Combining Diacritical Marks",
/*0370..03FF;*/ "Greek",
/*0400..04FF;*/ "Cyrillic",
/*0530..058F;*/ "Armenian",
/*0590..05FF;*/ "Hebrew",
/*0600..06FF;*/ "Arabic",
/*0700..074F;*/ "Syriac",
/*0780..07BF;*/ "Thaana",
/*0900..097F;*/ "Devanagari",
/*0980..09FF;*/ "Bengali",
/*0A00..0A7F;*/ "Gurmukhi",
/*0A80..0AFF;*/ "Gujarati",
/*0B00..0B7F;*/ "Oriya",
/*0B80..0BFF;*/ "Tamil",
/*0C00..0C7F;*/ "Telugu",
/*0C80..0CFF;*/ "Kannada",
/*0D00..0D7F;*/ "Malayalam",
/*0D80..0DFF;*/ "Sinhala",
/*0E00..0E7F;*/ "Thai",
/*0E80..0EFF;*/ "Lao",
/*0F00..0FFF;*/ "Tibetan",
/*1000..109F;*/ "Myanmar",
/*10A0..10FF;*/ "Georgian",
/*1100..11FF;*/ "Hangul Jamo",
/*1200..137F;*/ "Ethiopic",
/*13A0..13FF;*/ "Cherokee",
/*1400..167F;*/ "Unified Canadian Aboriginal Syllabics",
/*1680..169F;*/ "Ogham",
/*16A0..16FF;*/ "Runic",
/*1780..17FF;*/ "Khmer",
/*1800..18AF;*/ "Mongolian",
/*1E00..1EFF;*/ "Latin Extended Additional",
/*1F00..1FFF;*/ "Greek Extended",
/*2000..206F;*/ "General Punctuation",
/*2070..209F;*/ "Superscripts and Subscripts",
/*20A0..20CF;*/ "Currency Symbols",
/*20D0..20FF;*/ "Combining Marks for Symbols",
/*2100..214F;*/ "Letterlike Symbols",
/*2150..218F;*/ "Number Forms",
/*2190..21FF;*/ "Arrows",
/*2200..22FF;*/ "Mathematical Operators",
/*2300..23FF;*/ "Miscellaneous Technical",
/*2400..243F;*/ "Control Pictures",
/*2440..245F;*/ "Optical Character Recognition",
/*2460..24FF;*/ "Enclosed Alphanumerics",
/*2500..257F;*/ "Box Drawing",
/*2580..259F;*/ "Block Elements",
/*25A0..25FF;*/ "Geometric Shapes",
/*2600..26FF;*/ "Miscellaneous Symbols",
/*2700..27BF;*/ "Dingbats",
/*2800..28FF;*/ "Braille Patterns",
/*2E80..2EFF;*/ "CJK Radicals Supplement",
/*2F00..2FDF;*/ "Kangxi Radicals",
/*2FF0..2FFF;*/ "Ideographic Description Characters",
/*3000..303F;*/ "CJK Symbols and Punctuation",
/*3040..309F;*/ "Hiragana",
/*30A0..30FF;*/ "Katakana",
/*3100..312F;*/ "Bopomofo",
/*3130..318F;*/ "Hangul Compatibility Jamo",
/*3190..319F;*/ "Kanbun",
/*31A0..31BF;*/ "Bopomofo Extended",
/*3200..32FF;*/ "Enclosed CJK Letters and Months",
/*3300..33FF;*/ "CJK Compatibility",
/*3400..4DB5;*/ "CJK Unified Ideographs Extension A",
/*4E00..9FFF;*/ "CJK Unified Ideographs",
/*A000..A48F;*/ "Yi Syllables",
/*A490..A4CF;*/ "Yi Radicals",
/*AC00..D7A3;*/ "Hangul Syllables",
/*E000..F8FF;*/ "Private Use",
/*F900..FAFF;*/ "CJK Compatibility Ideographs",
/*FB00..FB4F;*/ "Alphabetic Presentation Forms",
/*FB50..FDFF;*/ "Arabic Presentation Forms-A",
/*FE20..FE2F;*/ "Combining Half Marks",
/*FE30..FE4F;*/ "CJK Compatibility Forms",
/*FE50..FE6F;*/ "Small Form Variants",
/*FE70..FEFE;*/ "Arabic Presentation Forms-B",
/*FEFF..FEFF;*/ "Specials",
/*FF00..FFEF;*/ "Halfwidth and Fullwidth Forms",
//missing Specials add manually
/*10300..1032F;*/ "Old Italic", // 84
/*10330..1034F;*/ "Gothic",
/*10400..1044F;*/ "Deseret",
/*1D000..1D0FF;*/ "Byzantine Musical Symbols",
/*1D100..1D1FF;*/ "Musical Symbols",
/*1D400..1D7FF;*/ "Mathematical Alphanumeric Symbols",
/*20000..2A6D6;*/ "CJK Unified Ideographs Extension B",
/*2F800..2FA1F;*/ "CJK Compatibility Ideographs Supplement",
/*E0000..E007F;*/ "Tags",
//missing 2 private use add manually
};
//ADD THOSE MANUALLY
//F0000..FFFFD; "Private Use",
//100000..10FFFD; "Private Use"
//FFF0..FFFD; "Specials",
static final String blockRanges =
"\u0000\u007F\u0080\u00FF\u0100\u017F\u0180\u024F\u0250\u02AF\u02B0\u02FF\u0300\u036F"
+"\u0370\u03FF\u0400\u04FF\u0530\u058F\u0590\u05FF\u0600\u06FF\u0700\u074F\u0780\u07BF"
+"\u0900\u097F\u0980\u09FF\u0A00\u0A7F\u0A80\u0AFF\u0B00\u0B7F\u0B80\u0BFF\u0C00\u0C7F\u0C80\u0CFF"
+"\u0D00\u0D7F\u0D80\u0DFF\u0E00\u0E7F\u0E80\u0EFF\u0F00\u0FFF\u1000\u109F\u10A0\u10FF\u1100\u11FF"
+"\u1200\u137F\u13A0\u13FF\u1400\u167F\u1680\u169F\u16A0\u16FF\u1780\u17FF\u1800\u18AF\u1E00\u1EFF"
+"\u1F00\u1FFF\u2000\u206F\u2070\u209F\u20A0\u20CF\u20D0\u20FF\u2100\u214F\u2150\u218F\u2190\u21FF\u2200\u22FF"
+"\u2300\u23FF\u2400\u243F\u2440\u245F\u2460\u24FF\u2500\u257F\u2580\u259F\u25A0\u25FF\u2600\u26FF\u2700\u27BF"
+"\u2800\u28FF\u2E80\u2EFF\u2F00\u2FDF\u2FF0\u2FFF\u3000\u303F\u3040\u309F\u30A0\u30FF\u3100\u312F\u3130\u318F"
+"\u3190\u319F\u31A0\u31BF\u3200\u32FF\u3300\u33FF\u3400\u4DB5\u4E00\u9FFF\uA000\uA48F\uA490\uA4CF"
+"\uAC00\uD7A3\uE000\uF8FF\uF900\uFAFF\uFB00\uFB4F\uFB50\uFDFF"
+"\uFE20\uFE2F\uFE30\uFE4F\uFE50\uFE6F\uFE70\uFEFE\uFEFF\uFEFF\uFF00\uFFEF";
static final int[] nonBMPBlockRanges = {
0x10300, 0x1032F,
0x10330, 0x1034F,
0x10400, 0x1044F,
0x1D000, 0x1D0FF,
0x1D100, 0x1D1FF,
0x1D400, 0x1D7FF,
0x20000, 0x2A6D6,
0x2F800, 0x2FA1F,
0xE0000, 0xE007F
};
private static final int NONBMP_BLOCK_START = 84;
static protected RangeToken getRange(String name, boolean positive) {
if (Token.categories.size() == 0) {
synchronized (Token.categories) {
Token[] ranges = new Token[Token.categoryNames.length];
for (int i = 0; i < ranges.length; i ++) {
ranges[i] = Token.createRange();
}
int type;
for (int i = 0; i < 0x10000; i ++) {
type = Character.getType((char)i);
if (type == Character.START_PUNCTUATION ||
type == Character.END_PUNCTUATION) {
//build table of Pi values
if (i == 0x00AB || i == 0x2018 || i == 0x201B || i == 0x201C ||
i == 0x201F || i == 0x2039) {
type = CHAR_INIT_QUOTE;
}
//build table of Pf values
if (i == 0x00BB || i == 0x2019 || i == 0x201D || i == 0x203A ) {
type = CHAR_FINAL_QUOTE;
}
}
ranges[type].addRange(i, i);
switch (type) {
case Character.UPPERCASE_LETTER:
case Character.LOWERCASE_LETTER:
case Character.TITLECASE_LETTER:
case Character.MODIFIER_LETTER:
case Character.OTHER_LETTER:
type = CHAR_LETTER;
break;
case Character.NON_SPACING_MARK:
case Character.COMBINING_SPACING_MARK:
case Character.ENCLOSING_MARK:
type = CHAR_MARK;
break;
case Character.DECIMAL_DIGIT_NUMBER:
case Character.LETTER_NUMBER:
case Character.OTHER_NUMBER:
type = CHAR_NUMBER;
break;
case Character.SPACE_SEPARATOR:
case Character.LINE_SEPARATOR:
case Character.PARAGRAPH_SEPARATOR:
type = CHAR_SEPARATOR;
break;
case Character.CONTROL:
case Character.FORMAT:
case Character.SURROGATE:
case Character.PRIVATE_USE:
case Character.UNASSIGNED:
type = CHAR_OTHER;
break;
case Character.CONNECTOR_PUNCTUATION:
case Character.DASH_PUNCTUATION:
case Character.START_PUNCTUATION:
case Character.END_PUNCTUATION:
case CHAR_INIT_QUOTE:
case CHAR_FINAL_QUOTE:
case Character.OTHER_PUNCTUATION:
type = CHAR_PUNCTUATION;
break;
case Character.MATH_SYMBOL:
case Character.CURRENCY_SYMBOL:
case Character.MODIFIER_SYMBOL:
case Character.OTHER_SYMBOL:
type = CHAR_SYMBOL;
break;
default:
throw new RuntimeException("org.apache.xerces.utils.regex.Token#getRange(): Unknown Unicode category: "+type);
}
ranges[type].addRange(i, i);
} // for all characters
ranges[Character.UNASSIGNED].addRange(0x10000, Token.UTF16_MAX);
for (int i = 0; i < ranges.length; i ++) {
if (Token.categoryNames[i] != null) {
if (i == Character.UNASSIGNED) { // Unassigned
ranges[i].addRange(0x10000, Token.UTF16_MAX);
}
Token.categories.put(Token.categoryNames[i], ranges[i]);
Token.categories2.put(Token.categoryNames[i],
Token.complementRanges(ranges[i]));
}
}
//REVISIT: do we really need to support block names as in Unicode 3.1
// or we can just create all the names in IsBLOCKNAME format (XML Schema REC)?
StringBuffer buffer = new StringBuffer(50);
for (int i = 0; i < Token.blockNames.length; i ++) {
Token r1 = Token.createRange();
int location;
if (i < NONBMP_BLOCK_START) {
location = i*2;
int rstart = Token.blockRanges.charAt(location);
int rend = Token.blockRanges.charAt(location+1);
//DEBUGING
//System.out.println(n+" " +Integer.toHexString(rstart)
// +"-"+ Integer.toHexString(rend));
r1.addRange(rstart, rend);
} else {
location = (i - NONBMP_BLOCK_START) * 2;
r1.addRange(Token.nonBMPBlockRanges[location],
Token.nonBMPBlockRanges[location + 1]);
}
String n = Token.blockNames[i];
if (n.equals("Specials"))
r1.addRange(0xfff0, 0xfffd);
if (n.equals("Private Use")) {
r1.addRange(0xF0000,0xFFFFD);
r1.addRange(0x100000,0x10FFFD);
}
Token.categories.put(n, r1);
Token.categories2.put(n, Token.complementRanges(r1));
buffer.setLength(0);
buffer.append("Is");
if (n.indexOf(' ') >= 0) {
for (int ci = 0; ci < n.length(); ci ++)
if (n.charAt(ci) != ' ') buffer.append((char)n.charAt(ci));
}
else {
buffer.append(n);
}
Token.setAlias(buffer.toString(), n, true);
}
// TR#18 1.2
Token.setAlias("ASSIGNED", "Cn", false);
Token.setAlias("UNASSIGNED", "Cn", true);
Token all = Token.createRange();
all.addRange(0, Token.UTF16_MAX);
Token.categories.put("ALL", all);
Token.categories2.put("ALL", Token.complementRanges(all));
Token.registerNonXS("ASSIGNED");
Token.registerNonXS("UNASSIGNED");
Token.registerNonXS("ALL");
Token isalpha = Token.createRange();
isalpha.mergeRanges(ranges[Character.UPPERCASE_LETTER]);
isalpha.mergeRanges(ranges[Character.LOWERCASE_LETTER]);
isalpha.mergeRanges(ranges[Character.OTHER_LETTER]);
Token.categories.put("IsAlpha", isalpha);
Token.categories2.put("IsAlpha", Token.complementRanges(isalpha));
Token.registerNonXS("IsAlpha");
Token isalnum = Token.createRange();
isalnum.mergeRanges(isalpha); // Lu Ll Lo
isalnum.mergeRanges(ranges[Character.DECIMAL_DIGIT_NUMBER]);
Token.categories.put("IsAlnum", isalnum);
Token.categories2.put("IsAlnum", Token.complementRanges(isalnum));
Token.registerNonXS("IsAlnum");
Token isspace = Token.createRange();
isspace.mergeRanges(Token.token_spaces);
isspace.mergeRanges(ranges[CHAR_SEPARATOR]);
Token.categories.put("IsSpace", isspace);
Token.categories2.put("IsSpace", Token.complementRanges(isspace));
Token.registerNonXS("IsSpace");
Token isword = Token.createRange();
isword.mergeRanges(isalnum); // Lu Ll Lo Nd
isword.addRange('_', '_');
Token.categories.put("IsWord", isword);
Token.categories2.put("IsWord", Token.complementRanges(isword));
Token.registerNonXS("IsWord");
Token isascii = Token.createRange();
isascii.addRange(0, 127);
Token.categories.put("IsASCII", isascii);
Token.categories2.put("IsASCII", Token.complementRanges(isascii));
Token.registerNonXS("IsASCII");
Token isnotgraph = Token.createRange();
isnotgraph.mergeRanges(ranges[CHAR_OTHER]);
isnotgraph.addRange(' ', ' ');
Token.categories.put("IsGraph", Token.complementRanges(isnotgraph));
Token.categories2.put("IsGraph", isnotgraph);
Token.registerNonXS("IsGraph");
Token isxdigit = Token.createRange();
isxdigit.addRange('0', '9');
isxdigit.addRange('A', 'F');
isxdigit.addRange('a', 'f');
Token.categories.put("IsXDigit", Token.complementRanges(isxdigit));
Token.categories2.put("IsXDigit", isxdigit);
Token.registerNonXS("IsXDigit");
Token.setAlias("IsDigit", "Nd", true);
Token.setAlias("IsUpper", "Lu", true);
Token.setAlias("IsLower", "Ll", true);
Token.setAlias("IsCntrl", "C", true);
Token.setAlias("IsPrint", "C", false);
Token.setAlias("IsPunct", "P", true);
Token.registerNonXS("IsDigit");
Token.registerNonXS("IsUpper");
Token.registerNonXS("IsLower");
Token.registerNonXS("IsCntrl");
Token.registerNonXS("IsPrint");
Token.registerNonXS("IsPunct");
Token.setAlias("alpha", "IsAlpha", true);
Token.setAlias("alnum", "IsAlnum", true);
Token.setAlias("ascii", "IsASCII", true);
Token.setAlias("cntrl", "IsCntrl", true);
Token.setAlias("digit", "IsDigit", true);
Token.setAlias("graph", "IsGraph", true);
Token.setAlias("lower", "IsLower", true);
Token.setAlias("print", "IsPrint", true);
Token.setAlias("punct", "IsPunct", true);
Token.setAlias("space", "IsSpace", true);
Token.setAlias("upper", "IsUpper", true);
Token.setAlias("word", "IsWord", true); // Perl extension
Token.setAlias("xdigit", "IsXDigit", true);
Token.registerNonXS("alpha");
Token.registerNonXS("alnum");
Token.registerNonXS("ascii");
Token.registerNonXS("cntrl");
Token.registerNonXS("digit");
Token.registerNonXS("graph");
Token.registerNonXS("lower");
Token.registerNonXS("print");
Token.registerNonXS("punct");
Token.registerNonXS("space");
Token.registerNonXS("upper");
Token.registerNonXS("word");
Token.registerNonXS("xdigit");
} // synchronized
} // if null
RangeToken tok = positive ? (RangeToken)Token.categories.get(name)
: (RangeToken)Token.categories2.get(name);
//if (tok == null) System.out.println(name);
return tok;
}
static protected RangeToken getRange(String name, boolean positive, boolean xs) {
RangeToken range = Token.getRange(name, positive);
if (xs && range != null && Token.isRegisterNonXS(name))
range = null;
return range;
}
static Hashtable nonxs = null;
/**
* This method is called by only getRange().
* So this method need not MT-safe.
*/
static protected void registerNonXS(String name) {
if (Token.nonxs == null)
Token.nonxs = new Hashtable();
Token.nonxs.put(name, name);
}
static protected boolean isRegisterNonXS(String name) {
if (Token.nonxs == null)
return false;
//DEBUG
//System.err.println("isRegisterNonXS: "+name);
return Token.nonxs.containsKey(name);
}
private static void setAlias(String newName, String name, boolean positive) {
Token t1 = (Token)Token.categories.get(name);
Token t2 = (Token)Token.categories2.get(name);
if (positive) {
Token.categories.put(newName, t1);
Token.categories2.put(newName, t2);
} else {
Token.categories2.put(newName, t1);
Token.categories.put(newName, t2);
}
}
static final String viramaString =
"\u094D"// ;DEVANAGARI SIGN VIRAMA;Mn;9;ON;;;;;N;;;;;
+"\u09CD"//;BENGALI SIGN VIRAMA;Mn;9;ON;;;;;N;;;;;
+"\u0A4D"//;GURMUKHI SIGN VIRAMA;Mn;9;ON;;;;;N;;;;;
+"\u0ACD"//;GUJARATI SIGN VIRAMA;Mn;9;ON;;;;;N;;;;;
+"\u0B4D"//;ORIYA SIGN VIRAMA;Mn;9;ON;;;;;N;;;;;
+"\u0BCD"//;TAMIL SIGN VIRAMA;Mn;9;ON;;;;;N;;;;;
+"\u0C4D"//;TELUGU SIGN VIRAMA;Mn;9;ON;;;;;N;;;;;
+"\u0CCD"//;KANNADA SIGN VIRAMA;Mn;9;ON;;;;;N;;;;;
+"\u0D4D"//;MALAYALAM SIGN VIRAMA;Mn;9;ON;;;;;N;;;;;
+"\u0E3A"//;THAI CHARACTER PHINTHU;Mn;9;ON;;;;;N;THAI VOWEL SIGN PHINTHU;;;;
+"\u0F84";//;TIBETAN MARK HALANTA;Mn;9;ON;;;;;N;TIBETAN VIRAMA;;;;
static private Token token_grapheme = null;
static synchronized Token getGraphemePattern() {
if (Token.token_grapheme != null)
return Token.token_grapheme;
Token base_char = Token.createRange(); // [{ASSIGNED}]-[{M},{C}]
base_char.mergeRanges(Token.getRange("ASSIGNED", true));
base_char.subtractRanges(Token.getRange("M", true));
base_char.subtractRanges(Token.getRange("C", true));
Token virama = Token.createRange();
for (int i = 0; i < Token.viramaString.length(); i ++) {
int ch = viramaString.charAt(i);
virama.addRange(i, i);
}
Token combiner_wo_virama = Token.createRange();
combiner_wo_virama.mergeRanges(Token.getRange("M", true));
combiner_wo_virama.addRange(0x1160, 0x11ff); // hangul_medial and hangul_final
combiner_wo_virama.addRange(0xff9e, 0xff9f); // extras
Token left = Token.createUnion(); // base_char?
left.addChild(base_char);
left.addChild(Token.token_empty);
Token foo = Token.createUnion();
foo.addChild(Token.createConcat(virama, Token.getRange("L", true)));
foo.addChild(combiner_wo_virama);
foo = Token.createClosure(foo);
foo = Token.createConcat(left, foo);
Token.token_grapheme = foo;
return Token.token_grapheme;
}
/**
* Combing Character Sequence in Perl 5.6.
*/
static private Token token_ccs = null;
static synchronized Token getCombiningCharacterSequence() {
if (Token.token_ccs != null)
return Token.token_ccs;
Token foo = Token.createClosure(Token.getRange("M", true));
foo = Token.createConcat(Token.getRange("M", false), foo); // \PM + \pM*
Token.token_ccs = foo;
return Token.token_ccs;
}
/**
* This class represents a node in parse tree.
*/
static class StringToken extends Token implements java.io.Serializable {
String string;
int refNumber;
StringToken(int type, String str, int n) {
super(type);
this.string = str;
this.refNumber = n;
}
int getReferenceNumber() { // for STRING
return this.refNumber;
}
String getString() { // for STRING
return this.string;
}
public String toString(int options) {
if (this.type == BACKREFERENCE)
return "\\"+this.refNumber;
else
return REUtil.quoteMeta(this.string);
}
}
/**
* This class represents a node in parse tree.
*/
static class ConcatToken extends Token implements java.io.Serializable {
Token child;
Token child2;
ConcatToken(Token t1, Token t2) {
super(Token.CONCAT);
this.child = t1;
this.child2 = t2;
}
int size() {
return 2;
}
Token getChild(int index) {
return index == 0 ? this.child : this.child2;
}
public String toString(int options) {
String ret;
if (this.child2.type == CLOSURE && this.child2.getChild(0) == this.child) {
ret = this.child.toString(options)+"+";
} else if (this.child2.type == NONGREEDYCLOSURE && this.child2.getChild(0) == this.child) {
ret = this.child.toString(options)+"+?";
} else
ret = this.child.toString(options)+this.child2.toString(options);
return ret;
}
}
/**
* This class represents a node in parse tree.
*/
static class CharToken extends Token implements java.io.Serializable {
int chardata;
CharToken(int type, int ch) {
super(type);
this.chardata = ch;
}
int getChar() {
return this.chardata;
}
public String toString(int options) {
String ret;
switch (this.type) {
case CHAR:
switch (this.chardata) {
case '|': case '*': case '+': case '?':
case '(': case ')': case '.': case '[':
case '{': case '\\':
ret = "\\"+(char)this.chardata;
break;
case '\f': ret = "\\f"; break;
case '\n': ret = "\\n"; break;
case '\r': ret = "\\r"; break;
case '\t': ret = "\\t"; break;
case 0x1b: ret = "\\e"; break;
//case 0x0b: ret = "\\v"; break;
default:
if (this.chardata >= 0x10000) {
String pre = "0"+Integer.toHexString(this.chardata);
ret = "\\v"+pre.substring(pre.length()-6, pre.length());
} else
ret = ""+(char)this.chardata;
}
break;
case ANCHOR:
if (this == Token.token_linebeginning || this == Token.token_lineend)
ret = ""+(char)this.chardata;
else
ret = "\\"+(char)this.chardata;
break;
default:
ret = null;
}
return ret;
}
boolean match(int ch) {
if (this.type == CHAR) {
return ch == this.chardata;
} else
throw new RuntimeException("NFAArrow#match(): Internal error: "+this.type);
}
}
/**
* This class represents a node in parse tree.
*/
static class ClosureToken extends Token implements java.io.Serializable {
int min;
int max;
Token child;
ClosureToken(int type, Token tok) {
super(type);
this.child = tok;
this.setMin(-1);
this.setMax(-1);
}
int size() {
return 1;
}
Token getChild(int index) {
return this.child;
}
final void setMin(int min) {
this.min = min;
}
final void setMax(int max) {
this.max = max;
}
final int getMin() {
return this.min;
}
final int getMax() {
return this.max;
}
public String toString(int options) {
String ret;
if (this.type == CLOSURE) {
if (this.getMin() < 0 && this.getMax() < 0) {
ret = this.child.toString(options)+"*";
} else if (this.getMin() == this.getMax()) {
ret = this.child.toString(options)+"{"+this.getMin()+"}";
} else if (this.getMin() >= 0 && this.getMax() >= 0) {
ret = this.child.toString(options)+"{"+this.getMin()+","+this.getMax()+"}";
} else if (this.getMin() >= 0 && this.getMax() < 0) {
ret = this.child.toString(options)+"{"+this.getMin()+",}";
} else
throw new RuntimeException("Token#toString(): CLOSURE "
+this.getMin()+", "+this.getMax());
} else {
if (this.getMin() < 0 && this.getMax() < 0) {
ret = this.child.toString(options)+"*?";
} else if (this.getMin() == this.getMax()) {
ret = this.child.toString(options)+"{"+this.getMin()+"}?";
} else if (this.getMin() >= 0 && this.getMax() >= 0) {
ret = this.child.toString(options)+"{"+this.getMin()+","+this.getMax()+"}?";
} else if (this.getMin() >= 0 && this.getMax() < 0) {
ret = this.child.toString(options)+"{"+this.getMin()+",}?";
} else
throw new RuntimeException("Token#toString(): NONGREEDYCLOSURE "
+this.getMin()+", "+this.getMax());
}
return ret;
}
}
/**
* This class represents a node in parse tree.
*/
static class ParenToken extends Token implements java.io.Serializable {
Token child;
int parennumber;
ParenToken(int type, Token tok, int paren) {
super(type);
this.child = tok;
this.parennumber = paren;
}
int size() {
return 1;
}
Token getChild(int index) {
return this.child;
}
int getParenNumber() {
return this.parennumber;
}
public String toString(int options) {
String ret = null;
switch (this.type) {
case PAREN:
if (this.parennumber == 0) {
ret = "(?:"+this.child.toString(options)+")";
} else {
ret = "("+this.child.toString(options)+")";
}
break;
case LOOKAHEAD:
ret = "(?="+this.child.toString(options)+")";
break;
case NEGATIVELOOKAHEAD:
ret = "(?!"+this.child.toString(options)+")";
break;
case LOOKBEHIND:
ret = "(?<="+this.child.toString(options)+")";
break;
case NEGATIVELOOKBEHIND:
ret = "(?<!"+this.child.toString(options)+")";
break;
case INDEPENDENT:
ret = "(?>"+this.child.toString(options)+")";
break;
}
return ret;
}
}
/**
* (?(condition)yes-pattern|no-pattern)
*/
static class ConditionToken extends Token implements java.io.Serializable {
int refNumber;
Token condition;
Token yes;
Token no;
ConditionToken(int refno, Token cond, Token yespat, Token nopat) {
super(Token.CONDITION);
this.refNumber = refno;
this.condition = cond;
this.yes = yespat;
this.no = nopat;
}
int size() {
return this.no == null ? 1 : 2;
}
Token getChild(int index) {
if (index == 0) return this.yes;
if (index == 1) return this.no;
throw new RuntimeException("Internal Error: "+index);
}
public String toString(int options) {
String ret;
if (refNumber > 0) {
ret = "(?("+refNumber+")";
} else if (this.condition.type == Token.ANCHOR) {
ret = "(?("+this.condition+")";
} else {
ret = "(?"+this.condition;
}
if (this.no == null) {
ret += this.yes+")";
} else {
ret += this.yes+"|"+this.no+")";
}
return ret;
}
}
/**
* (ims-ims: .... )
*/
static class ModifierToken extends Token implements java.io.Serializable {
Token child;
int add;
int mask;
ModifierToken(Token tok, int add, int mask) {
super(Token.MODIFIERGROUP);
this.child = tok;
this.add = add;
this.mask = mask;
}
int size() {
return 1;
}
Token getChild(int index) {
return this.child;
}
int getOptions() {
return this.add;
}
int getOptionsMask() {
return this.mask;
}
public String toString(int options) {
return "(?"
+(this.add == 0 ? "" : REUtil.createOptionString(this.add))
+(this.mask == 0 ? "" : REUtil.createOptionString(this.mask))
+":"
+this.child.toString(options)
+")";
}
}
/**
* This class represents a node in parse tree.
* for UNION or CONCAT.
*/
static class UnionToken extends Token implements java.io.Serializable {
Vector children;
UnionToken(int type) {
super(type);
}
void addChild(Token tok) {
if (tok == null) return;
if (this.children == null) this.children = new Vector();
if (this.type == UNION) {
this.children.addElement(tok);
return;
}
// This is CONCAT, and new child is CONCAT.
if (tok.type == CONCAT) {
for (int i = 0; i < tok.size(); i ++)
this.addChild(tok.getChild(i)); // Recursion
return;
}
int size = this.children.size();
if (size == 0) {
this.children.addElement(tok);
return;
}
Token previous = (Token)this.children.elementAt(size-1);
if (!((previous.type == CHAR || previous.type == STRING)
&& (tok.type == CHAR || tok.type == STRING))) {
this.children.addElement(tok);
return;
}
//System.err.println("Merge '"+previous+"' and '"+tok+"'.");
StringBuffer buffer;
int nextMaxLength = (tok.type == CHAR ? 2 : tok.getString().length());
if (previous.type == CHAR) { // Replace previous token by STRING
buffer = new StringBuffer(2 + nextMaxLength);
int ch = previous.getChar();
if (ch >= 0x10000)
buffer.append(REUtil.decomposeToSurrogates(ch));
else
buffer.append((char)ch);
previous = Token.createString(null);
this.children.setElementAt(previous, size-1);
} else { // STRING
buffer = new StringBuffer(previous.getString().length() + nextMaxLength);
buffer.append(previous.getString());
}
if (tok.type == CHAR) {
int ch = tok.getChar();
if (ch >= 0x10000)
buffer.append(REUtil.decomposeToSurrogates(ch));
else
buffer.append((char)ch);
} else {
buffer.append(tok.getString());
}
((StringToken)previous).string = new String(buffer);
}
int size() {
return this.children == null ? 0 : this.children.size();
}
Token getChild(int index) {
return (Token)this.children.elementAt(index);
}
public String toString(int options) {
String ret;
if (this.type == CONCAT) {
if (this.children.size() == 2) {
Token ch = this.getChild(0);
Token ch2 = this.getChild(1);
if (ch2.type == CLOSURE && ch2.getChild(0) == ch) {
ret = ch.toString(options)+"+";
} else if (ch2.type == NONGREEDYCLOSURE && ch2.getChild(0) == ch) {
ret = ch.toString(options)+"+?";
} else
ret = ch.toString(options)+ch2.toString(options);
} else {
StringBuffer sb = new StringBuffer();
for (int i = 0; i < this.children.size(); i ++) {
sb.append(((Token)this.children.elementAt(i)).toString(options));
}
ret = new String(sb);
}
return ret;
}
if (this.children.size() == 2 && this.getChild(1).type == EMPTY) {
ret = this.getChild(0).toString(options)+"?";
} else if (this.children.size() == 2
&& this.getChild(0).type == EMPTY) {
ret = this.getChild(1).toString(options)+"??";
} else {
StringBuffer sb = new StringBuffer();
sb.append(((Token)this.children.elementAt(0)).toString(options));
for (int i = 1; i < this.children.size(); i ++) {
sb.append((char)'|');
sb.append(((Token)this.children.elementAt(i)).toString(options));
}
ret = new String(sb);
}
return ret;
}
}
} |
package org.bouncycastle.asn1.cms;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Vector;
import org.bouncycastle.asn1.ASN1EncodableVector;
import org.bouncycastle.asn1.ASN1Set;
import org.bouncycastle.asn1.DEREncodableVector;
import org.bouncycastle.asn1.DERObjectIdentifier;
public class AttributeTable
{
private Hashtable attributes = new Hashtable();
public AttributeTable(
Hashtable attrs)
{
attributes = copyTable(attrs);
}
public AttributeTable(
DEREncodableVector v)
{
for (int i = 0; i != v.size(); i++)
{
Attribute a = Attribute.getInstance(v.get(i));
addAttribute(a.getAttrType(), a);
}
}
public AttributeTable(
ASN1Set s)
{
for (int i = 0; i != s.size(); i++)
{
Attribute a = Attribute.getInstance(s.getObjectAt(i));
addAttribute(a.getAttrType(), a);
}
}
private void addAttribute(
DERObjectIdentifier oid,
Attribute a)
{
Object value = attributes.get(oid);
if (value == null)
{
attributes.put(oid, a);
}
else
{
Vector v;
if (value instanceof Attribute)
{
v = new Vector();
v.addElement(value);
v.addElement(a);
}
else
{
v = (Vector)value;
v.addElement(a);
}
attributes.put(oid, v);
}
}
/**
* Return the first attribute matching the OBJECT IDENTIFIER oid.
*
* @param oid type of attribute required.
* @return first attribute found of type oid.
*/
public Attribute get(
DERObjectIdentifier oid)
{
Object value = attributes.get(oid);
if (value instanceof Vector)
{
return (Attribute)((Vector)value).elementAt(0);
}
return (Attribute)value;
}
/**
* Return all the attributes matching the OBJECT IDENTIFIER oid. The vector will be
* empty if there are no attributes of the required type present.
*
* @param oid type of attribute required.
* @return a vector of all the attributes found of type oid.
*/
public ASN1EncodableVector getAll(
DERObjectIdentifier oid)
{
ASN1EncodableVector v = new ASN1EncodableVector();
Object value = attributes.get(oid);
if (value instanceof Vector)
{
Enumeration e = ((Vector)value).elements();
while (e.hasMoreElements())
{
v.add((Attribute)e.nextElement());
}
}
else if (value != null)
{
v.add((Attribute)value);
}
return v;
}
public Hashtable toHashtable()
{
return copyTable(attributes);
}
public ASN1EncodableVector toASN1EncodableVector()
{
ASN1EncodableVector v = new ASN1EncodableVector();
Enumeration e = attributes.elements();
while (e.hasMoreElements())
{
Object value = e.nextElement();
if (value instanceof Vector)
{
Enumeration en = ((Vector)value).elements();
while (en.hasMoreElements())
{
v.add(Attribute.getInstance(en.nextElement()));
}
}
else
{
v.add(Attribute.getInstance(value));
}
}
return v;
}
private Hashtable copyTable(
Hashtable in)
{
Hashtable out = new Hashtable();
Enumeration e = in.keys();
while (e.hasMoreElements())
{
Object key = e.nextElement();
out.put(key, in.get(key));
}
return out;
}
} |
package org.concord.datagraph.ui;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.GradientPaint;
import java.awt.Graphics2D;
import java.awt.Paint;
import java.awt.Point;
import java.awt.Shape;
import java.awt.Stroke;
import java.awt.geom.Line2D;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import org.concord.framework.data.stream.DataConsumer;
import org.concord.framework.data.stream.DataListener;
import org.concord.framework.data.stream.DataProducer;
import org.concord.framework.data.stream.DataStreamEvent;
import org.concord.graph.engine.CoordinateSystem;
import org.concord.graph.engine.DefaultControllable;
import org.concord.graph.engine.Graphable;
import org.concord.graph.util.engine.DrawingObject;
import org.concord.graph.util.ui.ImageStamp;
/**
* DrawingShape
* Class name and description
*
* Date created: Mar 24, 2005
*
* @author imoncada<p>
*
*/
public class DataFlowingLine extends DefaultControllable
implements DrawingObject, DataListener, DataConsumer
{
float forwardThreshold;
float reverseThreshold;
Color forwardColor1, forwardColor2;
Color reverseColor1, reverseColor2;
Color stopColor;
boolean flowing = false;
Color color1, color2;
int channelNumber = 0;
private Stroke stroke;
private float cycleDistance;
private float cycleOffset;
// this should be generalized to handle any drawable
// but currently there isn't a way to get drawbles
// locations.
private ImageStamp image1;
private ImageStamp image2;
private DrawingObject[] allSelectedDrawingObjects;
/**
* @see org.concord.graph.util.engine.DrawingObject#setColor(java.awt.Color)
*/
public void setFowardColor1(Color color)
{
this.forwardColor1 = color;
notifyChange();
}
/**
* @see org.concord.graph.util.engine.DrawingObject#getColor()
*/
public Color getForwardColor1()
{
if (forwardColor1 == null) return Color.black;
return forwardColor1;
}
/**
* @see org.concord.graph.util.engine.DrawingObject#setColor(java.awt.Color)
*/
public void setForwardColor2(Color color)
{
this.forwardColor2 = color;
notifyChange();
}
/**
* @see org.concord.graph.util.engine.DrawingObject#getColor()
*/
public Color getForwardColor2()
{
if (forwardColor2 == null) return Color.green;
return forwardColor1;
}
/**
* @see org.concord.graph.util.engine.DrawingObject#setColor(java.awt.Color)
*/
public void setReverseColor1(Color color)
{
this.reverseColor1 = color;
notifyChange();
}
/**
* @see org.concord.graph.util.engine.DrawingObject#getColor()
*/
public Color getReverseColor1()
{
if (reverseColor1 == null) return Color.black;
return reverseColor1;
}
/**
* @see org.concord.graph.util.engine.DrawingObject#setColor(java.awt.Color)
*/
public void setReverseColor2(Color color)
{
this.reverseColor2 = color;
notifyChange();
}
/**
* @see org.concord.graph.util.engine.DrawingObject#getColor()
*/
public Color getReverseColor2()
{
if (reverseColor2 == null) return Color.red;
return reverseColor1;
}
public void setImage1(ImageStamp image)
{
image1 = image;
}
public void setImage2(ImageStamp image)
{
image2 = image;
}
public void setCycleDistance(float distance)
{
cycleDistance = distance;
}
public void setCycleOffset(float offset)
{
cycleOffset = offset;
}
public Stroke getStroke()
{
return stroke;
}
public void setStroke(Stroke stroke)
{
this.stroke = stroke;
}
/**
* @see org.concord.graph.engine.Graphable#getCopy()
*/
public Graphable getCopy()
{
return null;
}
/*
* return a point a given a distance from the start
* point on the line. The direction of the line is
* from start point to the end point.
*
* I know this can be done with matrix calcs, but
* I don't see and java untils for this.
*/
public Point2D getLinePoint(double distance, Point2D start,
Point2D end, Point2D linePoint)
{
// find the lines theta:
double xOffset = end.getX() - start.getX();
double yOffset = end.getY() - start.getY();
double theta = Math.atan2(yOffset, xOffset);
double ptY = distance*Math.sin(theta);
double ptX = distance*Math.cos(theta);
if(linePoint == null){
linePoint = new Point2D.Double();
}
linePoint.setLocation(start.getX() + ptX, start.getY() + ptY);
return linePoint;
}
public void start()
{
// This is over kill creating a new thread for each instance
// It would be nice if the graph provided a framework for this
// but in the meantime we
Thread runner = new Thread(){
float currCycleOffset = 0f;
public void run()
{
while(true) {
setCycleOffset(currCycleOffset);
currCycleOffset += 2;
notifyChange();
try {
Thread.sleep(100);
} catch (Exception e) {
e.printStackTrace();
}
}
}
};
runner.start();
}
/**
* @see org.concord.graph.engine.Drawable#draw(java.awt.Graphics2D)
*/
public void draw(Graphics2D g)
{
Paint oldPaint = g.getPaint();
Stroke oldStroke = g.getStroke();
Shape oldClip = g.getClip();
CoordinateSystem cs = graphArea.getCoordinateSystem();
Point2D start = cs.transformToDisplay(image1.getLocation());
Point2D end = cs.transformToDisplay(image2.getLocation());
if(flowing) {
Point2D cycleStart = getLinePoint(cycleOffset, start, end, null);
Point2D cycleEnd = getLinePoint(cycleDistance, cycleStart, end, null);
Paint cyclePaint = new GradientPaint(cycleStart, color1, cycleEnd, color2, true);
// probably we need to save the old gradient
g.setPaint(cyclePaint);
//Stroke lineStroke = getStroke();
if(stroke == null) {
stroke = new BasicStroke(20, BasicStroke.CAP_BUTT, BasicStroke.JOIN_ROUND);;
}
g.setStroke(stroke);
} else {
g.setColor(stopColor);
}
Line2D line = new Line2D.Double(start, end);
graphArea.clipGraphics(g);
g.draw(line);
g.setPaint(oldPaint);
g.setStroke(oldStroke);
g.setClip(oldClip);
}
/* (non-Javadoc)
* @see org.concord.graph.engine.MouseSensitive#isPointInProximity(java.awt.Point)
*/
public boolean isPointInProximity(Point p)
{
return false;
}
/* (non-Javadoc)
* @see org.concord.graph.util.engine.DrawingObject#getDrawingDragMode()
*/
public int getDrawingDragMode()
{
return DRAWING_DRAG_MODE_NONE;
}
public boolean erase(Rectangle2D rectDisplay)
{
// TODO Auto-generated method stub
return false;
}
public Color getColor()
{
// TODO Auto-generated method stub
return null;
}
public Rectangle2D getBoundingRectangle()
{
//TODO change to return actual bounding box
Rectangle2D fakeRect = new Rectangle2D.Double();
return fakeRect;
}
public boolean isResizeEnabled()
{
// TODO Auto-generated method stub
return false;
}
public boolean setDrawingDragMode(int mode)
{
// TODO Auto-generated method stub
return false;
}
public void setColor(Color color)
{
// TODO Auto-generated method stub
}
public void addDataProducer(DataProducer source)
{
source.addDataListener(this);
}
public void removeDataProducer(DataProducer source)
{
source.removeDataListener(this);
}
public void dataReceived(DataStreamEvent dataEvent)
{
int numSamples = dataEvent.getNumSamples();
//int channelsPerSample = dataEvent.getDataDescription().getChannelsPerSample();
int nextSampleOff = dataEvent.getDataDescription().getNextSampleOffset();
float value = dataEvent.data[(numSamples-1)*nextSampleOff+channelNumber];
if(value > forwardThreshold) {
flowing = true;
color1 = getForwardColor1();
color2 = getForwardColor2();
} else if(value < reverseThreshold) {
flowing = true;
color1 = getReverseColor1();
color2 = getReverseColor2();
} else {
flowing = false;
}
}
public void dataStreamEvent(DataStreamEvent dataEvent)
{
// TODO Auto-generated method stub
}
public boolean isInsideBox(Rectangle2D box) {
// TODO Auto-generated method stub
return false;
}
public boolean isMouseReceiving() {
// TODO Auto-generated method stub
return false;
}
public boolean mousePressed(Point p) {
for (int i = 0; i < allSelectedDrawingObjects.length; i++) {
if (!allSelectedDrawingObjects[i].equals(this)){
allSelectedDrawingObjects[i].setCurrentLocationAsOriginal();
}
}
return super.mousePressed(p);
}
public boolean mouseDragged(Point p) {
for (int i = 0; i < allSelectedDrawingObjects.length; i++) {
if (!allSelectedDrawingObjects[i].equals(this)){
allSelectedDrawingObjects[i].moveInRelation(originalLocation, location);
}
}
return super.mouseDragged(p);
}
public boolean mouseEntered(Point p) {
// TODO Auto-generated method stub
return false;
}
public boolean mouseExited(Point p) {
// TODO Auto-generated method stub
return false;
}
public boolean mouseMoved(Point p) {
// TODO Auto-generated method stub
return false;
}
public void setAllSelectedDrawingObjects(DrawingObject[] objects) {
allSelectedDrawingObjects = objects;
}
public void moveInRelation(Point2D start2d, Point2D end2d) {
Point2D.Double start = (Point2D.Double)start2d;
Point2D.Double end = (Point2D.Double)end2d;
double deltaX = end.x - start.x;
double deltaY = end.y - start.y;
Point2D newP = new Point2D.Double(originalLocation.getX() + deltaX,
originalLocation.getY() + deltaY);
if (validateNewLocation(newP)){
setLocation(newP);
//originalLocation = newP;
}
}
public void setCurrentLocationAsOriginal(){
originalLocation.setLocation(this.location);
}
} |
package org.dita.dost.reader;
import static org.dita.dost.util.Constants.*;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.Stack;
import java.util.Map.Entry;
import org.apache.xerces.xni.grammars.XMLGrammarPool;
import org.dita.dost.exception.DITAOTException;
import org.dita.dost.exception.DITAOTXMLErrorHandler;
import org.dita.dost.log.MessageBean;
import org.dita.dost.log.MessageUtils;
import org.dita.dost.module.GenMapAndTopicListModule.KeyDef;
import org.dita.dost.util.CatalogUtils;
import org.dita.dost.util.DITAAttrUtils;
import org.dita.dost.util.FileUtils;
import org.dita.dost.util.FilterUtils;
import org.dita.dost.util.OutputUtils;
import org.dita.dost.util.StringUtils;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.SAXNotRecognizedException;
import org.xml.sax.SAXNotSupportedException;
import org.xml.sax.SAXParseException;
import org.xml.sax.XMLReader;
/**
* This class extends AbstractReader, used to parse relevant dita topics
* and ditamap files for GenMapAndTopicListModule.
*
* <p><strong>Not thread-safe</strong>. Instances can be reused by calling
* {@link #reset()} between calls to {@link #parse(File)}.</p>
*
* @version 1.0 2004-11-25
*
* @author Wu, Zhi Qiang
*/
public final class GenListModuleReader extends AbstractXMLReader {
/** XMLReader instance for parsing dita file */
private XMLReader reader = null;
/** Map of XML catalog info */
private Map<String, String> catalogMap = null;
/** Filter utils */
private FilterUtils filterUtils;
/** Basedir of the current parsing file */
private String currentDir = null;
/** Flag for conref in parsing file */
private boolean hasConRef = false;
/** Flag for href in parsing file */
private boolean hasHref = false;
/** Flag for keyref in parsing file */
private boolean hasKeyRef = false;
/** Flag for whether parsing file contains coderef */
private boolean hasCodeRef = false;
/** Set of all the non-conref and non-copyto targets refered in current parsing file */
private final Set<String> nonConrefCopytoTargets;
/** Set of conref targets refered in current parsing file */
private final Set<String> conrefTargets;
/** Set of href nonConrefCopytoTargets refered in current parsing file */
private final Set<String> hrefTargets;
/** Set of href targets with anchor appended */
private final Set<String> hrefTopicSet;
/** Set of chunk targets */
private final Set<String> chunkTopicSet;
/** Set of subject schema files */
private final Set<String> schemeSet;
/** Set of subsidiary files */
private final Set<String> subsidiarySet;
/** Set of sources of those copy-to that were ignored */
private final Set<String> ignoredCopytoSourceSet;
/** Map of copy-to target to souce */
private final Map<String, String> copytoMap;
/** Map of key definitions */
private final Map<String, KeyDef> keysDefMap;
//Added on 20100826 for bug:3052913 start
/** Map to store multi-level keyrefs */
private final Map<String, String>keysRefMap;
//Added on 20100826 for bug:3052913 end
/** Flag for conrefpush */
private boolean hasconaction = false;
/** Flag used to mark if parsing entered into excluded element */
private boolean insideExcludedElement = false;
/** Used to record the excluded level */
private int excludedLevel = 0;
/** foreign/unknown nesting level */
private int foreignLevel = 0;
/** chunk nesting level */
private int chunkLevel = 0;
//Added by William on 2010-06-17 for bug:3016739 start
/** mark topics in reltables */
private int relTableLevel = 0;
//Added by William on 2010-06-17 for bug:3016739 end
/** chunk to-navigation level */
private int chunkToNavLevel = 0;
/** Topic group nesting level */
private int topicGroupLevel = 0;
/** Flag used to mark if current file is still valid after filtering */
private boolean isValidInput = false;
/** Contains the attribution specialization from props */
private String[][] props;
/** Set of outer dita files */
private final Set<String> outDitaFilesSet;
private String rootDir = null;
private String currentFile=null;
private String rootFilePath=null;
//Added on 2010-08-24 for bug:3086552 start
private boolean setSystemid = true;
//Added on 2010-08-24 for bug:3086552 end
/** Stack for @processing-role value */
private final Stack<String> processRoleStack;
/** Depth inside a @processing-role parent */
private int processRoleLevel;
/** Topics with processing role of "resource-only" */
private final Set<String> resourceOnlySet;
/** Topics with processing role of "normal" */
private final Set<String> crossSet;
private final Set<String> schemeRefSet;
/** Subject scheme document root */
//private Document schemeRoot = null;
/** Current processing node */
//private Element currentElement = null;
/** Relationship graph between subject schema */
private Map<String, Set<String>> relationGraph = null;
//Added by William on 2009-06-25 for req #12014 start
/** StringBuffer to store <exportanchors> elements */
private StringBuffer result = new StringBuffer();
/** Flag to show whether a file has <exportanchors> tag */
private boolean hasExport = false;
/** For topic/dita files whether a </file> tag should be added */
private boolean shouldAppendEndTag = false;
/** Store the href of topicref tag */
private String topicHref = "";
/** Topicmeta set for merge multiple exportanchors into one.
* Each topicmeta/prolog can define many exportanchors */
private final Set<String> topicMetaSet;
/** Refered topic id */
private String topicId = "";
/** Map to store plugin id */
private final Map<String, Set<String>> pluginMap = new HashMap<String, Set<String>>();
/** Transtype */
private String transtype;
//Added by William on 2010-03-01 for update onlytopicinmap option start
/** Map to store referenced branches. */
private final Map<String, List<String>> vaildBranches;
/** Int to mark referenced nested elements. */
private int level;
/** Topicref stack */
private final Stack<String> topicrefStack;
/** Store the primary ditamap file name. */
private String primaryDitamap = "";
//Added by William on 2010-03-01 for update onlytopicinmap option end.
//Added by William on 2010-06-01 for bug:3005748 start
/** Get DITAAttrUtil */
private final DITAAttrUtils ditaAttrUtils = DITAAttrUtils.getInstance();
//Added by William on 2010-06-01 for bug:3005748 end
//Added by William on 2010-06-09 for bug:3013079 start
/** Store the external/peer keydefs */
private final Map<String, String> exKeysDefMap;
//Added by William on 2010-06-09 for bug:3013079 end
/**
* Get transtype.
* @return the transtype
*/
public String getTranstype() {
return transtype;
}
/**
* Set transtype.
* @param transtype the transtype to set
*/
public void setTranstype(final String transtype) {
this.transtype = transtype;
}
/**
* @return the pluginMap
*/
public Map<String, Set<String>> getPluginMap() {
return pluginMap;
}
/**
* @return the result
*/
public StringBuffer getResult() {
return result;
}
//Added by William on 2009-06-25 for req #12014 end
/**
* Constructor.
*/
public GenListModuleReader() {
nonConrefCopytoTargets = new HashSet<String>(INT_64);
hrefTargets = new HashSet<String>(INT_32);
hrefTopicSet = new HashSet<String>(INT_32);
chunkTopicSet = new HashSet<String>(INT_32);
schemeSet = new HashSet<String>(INT_32);
schemeRefSet = new HashSet<String>(INT_32);
conrefTargets = new HashSet<String>(INT_32);
copytoMap = new HashMap<String, String>(INT_16);
subsidiarySet = new HashSet<String>(INT_16);
ignoredCopytoSourceSet = new HashSet<String>(INT_16);
outDitaFilesSet=new HashSet<String>(INT_64);
keysDefMap = new HashMap<String, KeyDef>();
keysRefMap = new HashMap<String, String>();
exKeysDefMap = new HashMap<String, String>();
processRoleLevel = 0;
processRoleStack = new Stack<String>();
resourceOnlySet = new HashSet<String>(INT_32);
crossSet = new HashSet<String>(INT_32);
//store the topicmeta element
topicMetaSet = new HashSet<String>(INT_16);
vaildBranches = new HashMap<String, List<String>>(INT_32);
level = 0;
topicrefStack = new Stack<String>();
//schemeRoot = null;
//currentElement = null;
props = null;
try {
reader = StringUtils.getXMLReader();
} catch (final SAXException e) {
throw new RuntimeException("Unable to create XML parser: " + e.getMessage(), e);
}
reader.setContentHandler(this);
try {
reader.setProperty(LEXICAL_HANDLER_PROPERTY,this);
} catch (final SAXNotRecognizedException e1) {
logger.logException(e1);
} catch (final SAXNotSupportedException e1) {
logger.logException(e1);
}
}
/**
* Set content filter.
*
* @param filterUtils filter utils
*/
public void setFilterUtils(final FilterUtils filterUtils) {
this.filterUtils = filterUtils;
}
/**
* Init xml reader used for pipeline parsing.
*
* @param ditaDir ditaDir
* @param validate whether validate input file
* @param rootFile input file
* @throws SAXException parsing exception
*/
public void initXMLReader(final String ditaDir,final boolean validate,final String rootFile, final boolean arg_setSystemid) throws SAXException {
//final DITAOTJavaLogger javaLogger=new DITAOTJavaLogger();
//to check whether the current parsing file's href value is out of inputmap.dir
rootDir=new File(rootFile).getAbsoluteFile().getParent();
rootDir = FileUtils.removeRedundantNames(rootDir);
rootFilePath=new File(rootFile).getAbsolutePath();
rootFilePath = FileUtils.removeRedundantNames(rootFilePath);
reader.setFeature(FEATURE_NAMESPACE_PREFIX, true);
if(validate==true){
reader.setFeature(FEATURE_VALIDATION, true);
reader.setFeature(FEATURE_VALIDATION_SCHEMA, true);
}else{
final String msg=MessageUtils.getMessage("DOTJ037W").toString();
logger.logWarn(msg);
}
final XMLGrammarPool grammarPool = GrammarPoolManager.getGrammarPool();
setGrammarPool(reader, grammarPool);
CatalogUtils.setDitaDir(ditaDir);
catalogMap = CatalogUtils.getCatalog(ditaDir);
//Added on 2010-08-24 for bug:3086552 start
setSystemid= arg_setSystemid;
//Added on 2010-08-24 for bug:3086552 end
try {
Class.forName(RESOLVER_CLASS);
reader.setEntityResolver(CatalogUtils.getCatalogResolver());
}catch (final ClassNotFoundException e){
reader.setEntityResolver(this);
}
}
/**
*
* Reset the internal variables.
*/
public void reset() {
hasKeyRef = false;
hasConRef = false;
hasHref = false;
hasCodeRef = false;
currentDir = null;
insideExcludedElement = false;
excludedLevel = 0;
foreignLevel = 0;
chunkLevel = 0;
relTableLevel = 0;
chunkToNavLevel = 0;
topicGroupLevel = 0;
isValidInput = false;
hasconaction = false;
nonConrefCopytoTargets.clear();
hrefTargets.clear();
hrefTopicSet.clear();
chunkTopicSet.clear();
conrefTargets.clear();
copytoMap.clear();
ignoredCopytoSourceSet.clear();
outDitaFilesSet.clear();
keysDefMap.clear();
keysRefMap.clear();
exKeysDefMap.clear();
schemeSet.clear();
schemeRefSet.clear();
//clear level
level = 0;
//clear stack
topicrefStack.clear();
//@processing-role
processRoleLevel = 0;
processRoleStack.clear();
//reset utils
ditaAttrUtils.reset();
/*
* Don't clean up these sets, we need them through
* the whole phase to determine a topic's processing-role.
*/
//resourceOnlySet.clear();
//crossSet.clear();
}
/**
* To see if the parsed file has conref inside.
*
* @return true if has conref and false otherwise
*/
public boolean hasConRef() {
return hasConRef;
}
/**
* To see if the parsed file has keyref inside.
*
* @return true if has keyref and false otherwise
*/
public boolean hasKeyRef(){
return hasKeyRef;
}
/**
* To see if the parsed file has coderef inside.
*
* @return true if has coderef and false otherwise
*/
public boolean hasCodeRef(){
return hasCodeRef;
}
/**
* To see if the parsed file has href inside.
*
* @return true if has href and false otherwise
*/
public boolean hasHref() {
return hasHref;
}
/**
* Get all targets except copy-to.
*
* @return Returns allTargets.
*/
public Set<String> getNonCopytoResult() {
final Set<String> nonCopytoSet = new HashSet<String>(INT_128);
nonCopytoSet.addAll(nonConrefCopytoTargets);
nonCopytoSet.addAll(conrefTargets);
nonCopytoSet.addAll(copytoMap.values());
nonCopytoSet.addAll(ignoredCopytoSourceSet);
//Added by William on 2010-03-04 for bug:2957938 start
addCoderefFiles(nonCopytoSet);
//Added by William on 2010-03-04 for bug:2957938 end
return nonCopytoSet;
}
/**
* Add coderef outside coderef files.
* @param nonCopytoSet
*/
private void addCoderefFiles(final Set<String> nonCopytoSet) {
for(final String filename : subsidiarySet){
//only activated on /generateout:3 & is out file.
if(isOutFile(filename) && OutputUtils.getGeneratecopyouter()
== OutputUtils.Generate.OLDSOLUTION){
nonCopytoSet.add(filename);
}
}
//nonCopytoSet.addAll(subsidiarySet);
}
/**
* Get the href target.
*
* @return Returns the hrefTargets.
*/
public Set<String> getHrefTargets() {
return hrefTargets;
}
/**
* Get conref targets.
*
* @return Returns the conrefTargets.
*/
public Set<String> getConrefTargets() {
return conrefTargets;
}
/**
* Get subsidiary targets.
*
* @return Returns the subsidiarySet.
*/
public Set<String> getSubsidiaryTargets() {
return subsidiarySet;
}
/**
* Get outditafileslist.
*
* @return Returns the outditafileslist.
*/
public Set<String> getOutDitaFilesSet(){
return outDitaFilesSet;
}
/**
* Get non-conref and non-copyto targets.
*
* @return Returns the nonConrefCopytoTargets.
*/
public Set<String> getNonConrefCopytoTargets() {
return nonConrefCopytoTargets;
}
/**
* Returns the ignoredCopytoSourceSet.
*
* @return Returns the ignoredCopytoSourceSet.
*/
public Set<String> getIgnoredCopytoSourceSet() {
return ignoredCopytoSourceSet;
}
/**
* Get the copy-to map.
*
* @return copy-to map
*/
public Map<String, String> getCopytoMap() {
return copytoMap;
}
/**
* Get the Key definitions.
*
* @return Key definitions map
*/
public Map<String, KeyDef> getKeysDMap(){
return keysDefMap;
}
//Added by William on 2010-06-09 for bug:3013079 start
public Map<String, String> getExKeysDefMap() {
return exKeysDefMap;
}
//Added by William on 2010-06-09 for bug:3013079 end
/**
* Set the relative directory of current file.
*
* @param dir dir
*/
public void setCurrentDir(final String dir) {
this.currentDir = dir;
}
/**
* Check if the current file is valid after filtering.
*
* @return true if valid and false otherwise
*/
public boolean isValidInput() {
return isValidInput;
}
/**
* Check if the current file has conaction.
* @return true if has conaction and false otherwise
*/
public boolean hasConaction(){
return hasconaction;
}
/**
* Parse input xml file.
*
* @param file file
* @throws SAXException SAXException
* @throws IOException IOException
* @throws FileNotFoundException FileNotFoundException
*/
public void parse(final File file) throws FileNotFoundException, IOException, SAXException {
currentFile=file.getAbsolutePath();
reader.setErrorHandler(new DITAOTXMLErrorHandler(file.getName()));
//Added on 2010-08-24 for bug:3086552 start
final InputSource is = new InputSource(new FileInputStream(file));
//Set the system ID
if(setSystemid) {
//is.setSystemId(URLUtil.correct(file).toString());
is.setSystemId(file.toURI().toURL().toString());
}
//Added on 2010-08-24 for bug:3086552 end
reader.parse(is);
}
@Override
public void startElement(final String uri, final String localName, final String qName,
final Attributes atts) throws SAXException {
String domains = null;
final Properties params = new Properties();
//Added by William on 2010-06-01 for bug:3005748 start
final String printValue = atts.getValue(ATTRIBUTE_NAME_PRINT);
//increase element level for nested tags.
ditaAttrUtils.increasePrintLevel(printValue);
//Exclude the topic if it is needed.
if(ditaAttrUtils.needExcludeForPrintAttri(transtype)){
return;
}
//Added by William on 2010-06-01 for bug:3005748 end
final String processingRole = atts.getValue(ATTRIBUTE_NAME_PROCESSING_ROLE);
final String href = atts.getValue(ATTRIBUTE_NAME_HREF);
final String scope = atts.getValue(ATTRIBUTE_NAME_SCOPE);
if (processingRole != null) {
processRoleStack.push(processingRole);
processRoleLevel++;
if (ATTR_SCOPE_VALUE_EXTERNAL.equals(scope)) {
} else if (ATTR_PROCESSING_ROLE_VALUE_RESOURCE_ONLY.equals(processingRole)) {
if (href != null) {
resourceOnlySet.add(FileUtils.resolveFile(currentDir, href));
}
} else if (ATTR_PROCESSING_ROLE_VALUE_NORMAL.equalsIgnoreCase(processingRole)) {
if (href != null) {
crossSet.add(FileUtils.resolveFile(currentDir, href));
}
}
} else if (processRoleLevel > 0) {
processRoleLevel++;
if (ATTR_SCOPE_VALUE_EXTERNAL.equals(scope)) {
} else if (ATTR_PROCESSING_ROLE_VALUE_RESOURCE_ONLY.equalsIgnoreCase(
processRoleStack.peek())) {
if (href != null) {
resourceOnlySet.add(FileUtils.resolveFile(currentDir, href));
}
} else if (ATTR_PROCESSING_ROLE_VALUE_NORMAL.equalsIgnoreCase(
processRoleStack.peek())) {
if (href != null) {
crossSet.add(FileUtils.resolveFile(currentDir, href));
}
}
} else {
if (href != null) {
crossSet.add(FileUtils.resolveFile(currentDir, href));
}
}
final String classValue = atts.getValue(ATTRIBUTE_NAME_CLASS);
//Added by William on 2009-06-24 for req #12014 start
//has class attribute
if(classValue!=null){
//when meets topic tag
if(TOPIC_TOPIC.matches(classValue)){
topicId = atts.getValue(ATTRIBUTE_NAME_ID);
//relpace place holder with first topic id
//Get relative file name
final String filename = FileUtils.getRelativePathFromMap(
rootFilePath, currentFile);
if(result.indexOf(filename + QUESTION) != -1){
result = new StringBuffer(result.toString().replace(filename + QUESTION, topicId));
}
}
// WEK: As of 14 Dec 2009, transtype is sometimes null, not sure under what conditions.
// System.out.println(" + [DEBUG] transtype=" + transtype);
//get plugin id only transtype = eclipsehelp
if(FileUtils.isDITAMapFile(currentFile)&&
rootFilePath.equals(currentFile)&&
MAP_MAP.matches(classValue)&&
INDEX_TYPE_ECLIPSEHELP.equals(transtype)){
String pluginId = atts.getValue(ATTRIBUTE_NAME_ID);
if(pluginId == null){
pluginId = "org.sample.help.doc";
}
final Set<String> set = StringUtils.restoreSet(pluginId);
pluginMap.put("pluginId", set);
}
//merge multiple exportanchors into one
//Each <topicref> can only have one <topicmeta>.
//Each <topic> can only have one <prolog>
//and <metadata> can have more than one exportanchors
if (INDEX_TYPE_ECLIPSEHELP.equals(transtype)) {
if (MAP_TOPICMETA.matches(classValue)
|| TOPIC_PROLOG.matches(classValue)) {
topicMetaSet.add(qName);
}
// If the file has <exportanchors> tags only transtype =
// eclipsehelp
if (DELAY_D_EXPORTANCHORS.matches(classValue)) {
hasExport = true;
// If current file is a ditamap file
if (FileUtils.isDITAMapFile(currentFile)) {
// if dita file's extension name is ".xml"
String editedHref = "";
if (topicHref.endsWith(FILE_EXTENSION_XML)) {
// change the extension to ".dita" for latter
// compare
editedHref = topicHref.replace(
FILE_EXTENSION_XML,
FILE_EXTENSION_DITA);
} else {
editedHref = topicHref;
}
// editedHref = editedHref.replace(File.separator, "/");
// create file element in the StringBuffer
result.append("<file name=\"" + editedHref + "\">");
// if <exportanchors> is defined in topicmeta(topicref),
// there is only one topic id
result.append("<topicid name=\"" + topicId + "\"/>");
// If current file is topic file
} else if (FileUtils.isDITATopicFile(currentFile)) {
String filename = FileUtils.getRelativePathFromMap(
rootFilePath, currentFile);
// if dita file's extension name is ".xml"
if (filename.endsWith(FILE_EXTENSION_XML)) {
// change the extension to ".dita" for latter
// compare
filename = filename.replace(
FILE_EXTENSION_XML,
FILE_EXTENSION_DITA);
}
// filename = FileUtils.normalizeDirectory(currentDir,
// filename);
filename = filename.replace(WINDOWS_SEPARATOR,
UNIX_SEPARATOR);
// create file element in the StringBuffer
result.append("<file name=\"" + filename + "\">");
// if <exportanchors> is defined in metadata(topic),
// there can be many topic ids
result.append("<topicid name=\"" + topicId + "\">");
shouldAppendEndTag = true;
}
// meet <anchorkey> tag
} else if (DELAY_D_ANCHORKEY.matches(classValue)) {
// create keyref element in the StringBuffer
// TODO in topic file is no keys
final String keyref = atts
.getValue(ATTRIBUTE_NAME_KEYREF);
result.append("<keyref name=\"" + keyref + "\"/>");
// meet <anchorid> tag
} else if (DELAY_D_ANCHORID.matches(classValue)) {
// create keyref element in the StringBuffer
final String id = atts.getValue(ATTRIBUTE_NAME_ID);
// If current file is a ditamap file
// The id can only be element id within a topic
if (FileUtils.isDITAMapFile(currentFile)) {
// only for dita format
/*
* if(!"".equals(topicHref)){ String absolutePathToFile
* = FileUtils.resolveFile((new
* File(rootFilePath)).getParent(),topicHref); //whether
* the id is a topic id
* if(FileUtils.isDITAFile(absolutePathToFile)){ found =
* DelayConrefUtils
* .getInstance().findTopicId(absolutePathToFile, id); }
* //other format file }else{ found = false; }
*/
// id shouldn't be same as topic id in the case of duplicate insert
if (!topicId.equals(id)) {
result.append("<id name=\"" + id + "\"/>");
}
} else if (FileUtils.isDITATopicFile(currentFile)) {
// id shouldn't be same as topic id in the case of duplicate insert
if (!topicId.equals(id)) {
// topic id found
result.append("<id name=\"" + id + "\"/>");
}
}
}
}
}
//Added by William on 2009-06-24 for req #12014 end
// Generate Scheme relationship graph
if (classValue != null) {
if (SUBJECTSCHEME_SUBJECTSCHEME.matches(classValue)) {
if (this.relationGraph == null) {
this.relationGraph = new LinkedHashMap<String, Set<String>>();
}
//Make it easy to do the BFS later.
Set<String> children = this.relationGraph.get("ROOT");
if (children == null || children.isEmpty()) {
children = new LinkedHashSet<String>();
}
children.add(this.currentFile);
this.relationGraph.put("ROOT", children);
schemeRefSet.add(FileUtils.getRelativePathFromMap(rootFilePath, currentFile));
} else if (SUBJECTSCHEME_SCHEMEREF.matches(classValue)) {
Set<String> children = this.relationGraph.get(this.currentFile);
if (children == null) {
children = new LinkedHashSet<String>();
this.relationGraph.put(currentFile, children);
}
if (href != null) {
children.add(FileUtils.resolveFile(rootDir, href));
}
}
}
if(foreignLevel > 0){
//if it is an element nested in foreign/unknown element
//do not parse it
foreignLevel ++;
return;
} else if(classValue != null &&
(TOPIC_FOREIGN.matches(classValue) ||
TOPIC_UNKNOWN.matches(classValue))){
foreignLevel ++;
}
if(chunkLevel > 0) {
chunkLevel++;
} else if(atts.getValue(ATTRIBUTE_NAME_CHUNK) != null) {
chunkLevel++;
}
//Added by William on 2010-6-17 for bug:3016739 start
if(relTableLevel > 0) {
relTableLevel ++;
} else if(classValue != null &&
MAP_RELTABLE.matches(classValue)){
relTableLevel++;
}
//Added by William on 2010-6-17 for bug:3016739 end
if(chunkToNavLevel > 0) {
chunkToNavLevel++;
} else if(atts.getValue(ATTRIBUTE_NAME_CHUNK) != null
&& atts.getValue(ATTRIBUTE_NAME_CHUNK).indexOf("to-navigation") != -1){
chunkToNavLevel++;
}
if(topicGroupLevel > 0) {
topicGroupLevel++;
} else if (atts.getValue(ATTRIBUTE_NAME_CLASS) != null
&& atts.getValue(ATTRIBUTE_NAME_CLASS).contains(MAPGROUP_D_TOPICGROUP.matcher)) {
topicGroupLevel++;
}
if(classValue==null && !ELEMENT_NAME_DITA.equals(localName)){
params.clear();
params.put("%1", localName);
logger.logInfo(MessageUtils.getMessage("DOTJ030I", params).toString());
}
if (classValue != null && TOPIC_TOPIC.matches(classValue)){
domains = atts.getValue(ATTRIBUTE_NAME_DOMAINS);
if(domains==null){
params.clear();
params.put("%1", localName);
logger.logInfo(MessageUtils.getMessage("DOTJ029I", params).toString());
} else {
props = StringUtils.getExtProps(domains);
}
}
if (insideExcludedElement) {
++excludedLevel;
return;
}
// Ignore element that has been filtered out.
if (filterUtils.needExclude(atts, props)) {
insideExcludedElement = true;
++excludedLevel;
return;
}
/*
* For ditamap, set it to valid if element <map> or extended from
* <map> was found, this kind of element's class attribute must
* contains 'map/map';
* For topic files, set it to valid if element <title> or extended
* from <title> was found, this kind of element's class attribute
* must contains 'topic/title'.
*/
if (classValue != null) {
if ((MAP_MAP.matches(classValue))
|| (TOPIC_TITLE.matches(classValue))) {
isValidInput = true;
}else if (TOPIC_OBJECT.matches(classValue)){
parseAttribute(atts, ATTRIBUTE_NAME_DATA);
}
}
//Added by William on 2010-03-02 for /onlytopicinmap update start.
//onlyTopicInMap is on.
if(OutputUtils.getOnlyTopicInMap() && this.canResolved()){
//topicref(only defined in ditamap file.)
if(MAP_TOPICREF.matches(classValue)){
//get href attribute value.
final String hrefValue = atts.getValue(ATTRIBUTE_NAME_HREF);
//get conref attribute value.
final String conrefValue = atts.getValue(ATTRIBUTE_NAME_CONREF);
//has href attribute and refer to ditamap file.
if(!StringUtils.isEmptyString(hrefValue)){
//exclude external resources
final String attrScope = atts.getValue(ATTRIBUTE_NAME_SCOPE);
if ("external".equalsIgnoreCase(attrScope)
|| "peer".equalsIgnoreCase(attrScope)
|| hrefValue.indexOf(COLON_DOUBLE_SLASH) != -1
|| hrefValue.startsWith(SHARP)) {
return;
}
//normalize href value.
final File target=new File(hrefValue);
//caculate relative path for href value.
String fileName = null;
if(target.isAbsolute()){
fileName = FileUtils.getRelativePathFromMap(rootFilePath,hrefValue);
}
fileName = FileUtils.normalizeDirectory(currentDir, hrefValue);
//change '\' to '/' for comparsion.
fileName = fileName.replace(WINDOWS_SEPARATOR,
UNIX_SEPARATOR);
final boolean canParse = parseBranch(atts, hrefValue, fileName);
if(!canParse){
return;
}else{
topicrefStack.push(localName);
}
}else if(!StringUtils.isEmptyString(conrefValue)){
//exclude external resources
final String attrScope = atts.getValue(ATTRIBUTE_NAME_SCOPE);
if ("external".equalsIgnoreCase(attrScope)
|| "peer".equalsIgnoreCase(attrScope)
|| conrefValue.indexOf(COLON_DOUBLE_SLASH) != -1
|| conrefValue.startsWith(SHARP)) {
return;
}
//normalize href value.
final File target=new File(conrefValue);
//caculate relative path for href value.
String fileName = null;
if(target.isAbsolute()){
fileName = FileUtils.getRelativePathFromMap(rootFilePath,conrefValue);
}
fileName = FileUtils.normalizeDirectory(currentDir, conrefValue);
//change '\' to '/' for comparsion.
fileName = fileName.replace(WINDOWS_SEPARATOR,
UNIX_SEPARATOR);
final boolean canParse = parseBranch(atts, conrefValue, fileName);
if(!canParse){
return;
}else{
topicrefStack.push(localName);
}
}
}
}
//Added by William on 2010-03-02 for /onlytopicinmap update end.
parseAttribute(atts, ATTRIBUTE_NAME_CONREF);
parseAttribute(atts, ATTRIBUTE_NAME_HREF);
parseAttribute(atts, ATTRIBUTE_NAME_COPY_TO);
parseAttribute(atts, ATTRIBUTE_NAME_IMG);
parseAttribute(atts, ATTRIBUTE_NAME_CONACTION);
parseAttribute(atts, ATTRIBUTE_NAME_KEYS);
parseAttribute(atts, ATTRIBUTE_NAME_CONKEYREF);
parseAttribute(atts, ATTRIBUTE_NAME_KEYREF);
}
/**
* Method for see whether a branch should be parsed.
* @param atts {@link Attributes}
* @param hrefValue {@link String}
* @param fileName normalized file name(remove '#')
* @return boolean
*/
private boolean parseBranch(final Attributes atts, final String hrefValue, final String fileName) {
//current file is primary ditamap file.
//parse every branch.
final String currentFileRelative = FileUtils.getRelativePathFromMap(
rootFilePath, currentFile);
if(currentDir == null && currentFileRelative.equals(primaryDitamap)){
//add branches into map
addReferredBranches(hrefValue, fileName);
return true;
}else{
//current file is a sub-ditamap one.
//get branch's id
final String id = atts.getValue(ATTRIBUTE_NAME_ID);
//this branch is not referenced
if(level == 0 && StringUtils.isEmptyString(id)){
//There is occassion that the whole ditamap should be parsed
final boolean found = searchBrachesMap(id);
if(found){
//Add this branch into map for parsing.
addReferredBranches(hrefValue, fileName);
//update level
level++;
return true;
}else{
return false;
}
//this brach is a decendent of a referenced one
}else if(level != 0){
//Add this branch into map for parsing.
addReferredBranches(hrefValue, fileName);
//update level
level++;
return true;
//This branch has an id but is a new one
}else if(!StringUtils.isEmptyString(id)){
//search branches map.
final boolean found = searchBrachesMap(id);
//branch is referenced
if(found){
//Add this branch into map for parsing.
addReferredBranches(hrefValue, fileName);
//update level
level ++;
return true;
}else{
//this branch is not referenced
return false;
}
}else{
return false;
}
}
}
/**
* Search braches map with branch id and current file name.
* @param id String branch id.
* @return boolean true if found and false otherwise.
*/
private boolean searchBrachesMap(final String id) {
//caculate relative path for current file.
final String currentFileRelative = FileUtils.getRelativePathFromMap(
rootFilePath, currentFile);
//seach the map with id & current file name.
if(vaildBranches.containsKey(currentFileRelative)){
final List<String> branchIdList = vaildBranches.get(currentFileRelative);
//the branch is referenced.
if(branchIdList.contains(id)){
return true;
}else if(branchIdList.size() == 0){
//the whole map is referenced
return true;
}else{
//the branch is not referred
return false;
}
}else{
//current file is not refered
return false;
}
}
/**
* Add branches into map.
* @param hrefValue
* @param fileName
*/
private void addReferredBranches(final String hrefValue, final String fileName) {
String branchId = null;
//href value has branch id.
if(hrefValue.contains(SHARP)){
branchId = hrefValue.substring(hrefValue.lastIndexOf(SHARP) + 1);
//The map contains the file name
if(vaildBranches.containsKey(fileName)){
final List<String> branchIdList = vaildBranches.get(fileName);
branchIdList.add(branchId);
}else{
final List<String> branchIdList = new ArrayList<String>();
branchIdList.add(branchId);
vaildBranches.put(fileName, branchIdList);
}
//href value has no branch id
}else{
vaildBranches.put(fileName, new ArrayList<String>());
}
}
/**
* Clean up.
*/
@Override
public void endDocument() throws SAXException {
if (processRoleLevel > 0) {
processRoleLevel
processRoleStack.pop();
}
//Added by William on 2009-07-15 for req #12014 start
if(FileUtils.isDITATopicFile(currentFile) && shouldAppendEndTag){
result.append("</file>");
//should reset
shouldAppendEndTag = false;
}
//Added by William on 2009-07-15 for req #12014 end
//update keysDefMap for multi-level keys for bug:3052913
checkMultiLevelKeys(keysDefMap, keysRefMap);
}
/**
* Check if the current file is a ditamap with "@processing-role=resource-only".
*/
@Override
public void startDocument() throws SAXException {
final String href = FileUtils.getRelativePathFromMap(rootFilePath, currentFile);
if (FileUtils.isDITAMapFile(currentFile)
&& resourceOnlySet.contains(href)
&& !crossSet.contains(href)) {
processRoleLevel++;
processRoleStack.push(ATTR_PROCESSING_ROLE_VALUE_RESOURCE_ONLY);
}
}
@Override
public void endElement(final String uri, final String localName, final String qName) throws SAXException {
//subject scheme
//if (currentElement != null && currentElement != schemeRoot.getDocumentElement()) {
// currentElement = (Element)currentElement.getParentNode();
//@processing-role
if (processRoleLevel > 0) {
if (processRoleLevel == processRoleStack.size()) {
processRoleStack.pop();
}
processRoleLevel
}
if (foreignLevel > 0){
foreignLevel
return;
}
if (chunkLevel > 0) {
chunkLevel
}
//Added by William on 2010-06-17 for bug:3016739 start
if (relTableLevel > 0) {
relTableLevel
}
//Added by William on 2010-06-17 for bug:3016739 end
if (chunkToNavLevel > 0) {
chunkToNavLevel
}
if (topicGroupLevel > 0) {
topicGroupLevel
}
if (insideExcludedElement) {
// end of the excluded element, mark the flag as false
if (excludedLevel == 1) {
insideExcludedElement = false;
}
--excludedLevel;
}
//Added by William on 2009-06-24 for req #12014 start
//<exportanchors> over should write </file> tag
if(topicMetaSet.contains(qName) && hasExport){
//If current file is a ditamap file
if(FileUtils.isDITAMapFile(currentFile)){
result.append("</file>");
//If current file is topic file
}else if(FileUtils.isDITATopicFile(currentFile)){
result.append("</topicid>");
}
hasExport = false;
topicMetaSet.clear();
}
//Added by William on 2009-06-24 for req #12014 start
//Added by William on 2010-03-02 for /onlytopicinmap update start
if(!topicrefStack.isEmpty() && localName.equals(topicrefStack.peek())){
level
topicrefStack.pop();
}
//Added by William on 2010-03-02 for /onlytopicinmap update end
//Added by William on 2010-06-01 for bug:3005748 start
//decrease element level.
ditaAttrUtils.decreasePrintLevel();
//Added by William on 2010-06-01 for bug:3005748 end
}
/**
* Resolve the publicId used in XMLCatalog.
* @see org.dita.dost.reader.AbstractXMLReader#resolveEntity(String, String)
* @param publicId publicId in doctype declarations
* @param systemId systemId in doctype declarations
* @throws java.io.IOException if dita-catalog.xml is not available
* @exception org.xml.sax.SAXException if dita-catalog.xml is not in valid format.
*/
@Override
public InputSource resolveEntity(final String publicId, final String systemId)
throws SAXException, IOException {
if (catalogMap.get(publicId) != null) {
final File dtdFile = new File(catalogMap.get(publicId));
return new InputSource(dtdFile.getAbsolutePath());
}else if (catalogMap.get(systemId) != null){
final File schemaFile = new File(catalogMap.get(systemId));
return new InputSource(schemaFile.getAbsolutePath());
}
return null;
}
/**
* Parse the input attributes for needed information.
*/
private void parseAttribute(final Attributes atts, final String attrName) throws SAXException {
String attrValue = atts.getValue(attrName);
String filename = null;
final String attrClass = atts.getValue(ATTRIBUTE_NAME_CLASS);
final String attrScope = atts.getValue(ATTRIBUTE_NAME_SCOPE);
final String attrFormat = atts.getValue(ATTRIBUTE_NAME_FORMAT);
final String attrType = atts.getValue(ATTRIBUTE_NAME_TYPE);
//Added on 20100830 for bug:3052156 start
final String codebase = atts.getValue(ATTRIBUTE_NAME_CODEBASE);
//Added on 20100830 for bug:3052156 end
if (attrValue == null) {
return;
}
// @conkeyref will be resolved to @conref in Debug&Fileter step
if (ATTRIBUTE_NAME_CONREF.equals(attrName) || ATTRIBUTE_NAME_CONKEYREF.equals(attrName)) {
hasConRef = true;
} else if (ATTRIBUTE_NAME_HREF.equals(attrName)) {
if(attrClass != null &&
PR_D_CODEREF.matches(attrClass) ){
//if current element is <coderef> or its specialization
//set hasCodeRef to true
hasCodeRef = true;
}else{
hasHref = true;
}
} else if(ATTRIBUTE_NAME_KEYREF.equals(attrName)){
hasKeyRef = true;
}
// collect the key definitions
if(ATTRIBUTE_NAME_KEYS.equals(attrName) && attrValue.length() != 0){
String target = atts.getValue(ATTRIBUTE_NAME_HREF);
final String keyRef = atts.getValue(ATTRIBUTE_NAME_KEYREF);
final String copy_to = atts.getValue(ATTRIBUTE_NAME_COPY_TO);
if (!StringUtils.isEmptyString(copy_to)) {
target = copy_to;
}
//Added on 20100825 for bug:3052904 start
//avoid NullPointException
if(target == null){
target = "";
}
//Added on 20100825 for bug:3052904 end
//store the target
final String temp = target;
// Many keys can be defined in a single definition, like keys="a b c", a, b and c are seperated by blank.
for(final String key: attrValue.split(" ")){
if(!keysDefMap.containsKey(key) && !key.equals("")){
if(target != null && target.length() != 0){
if(attrScope!=null && (attrScope.equals("external") || attrScope.equals("peer"))){
//Added by William on 2010-06-09 for bug:3013079 start
//store external or peer resources.
exKeysDefMap.put(key, target);
//Added by William on 2010-06-09 for bug:3013079 end
keysDefMap.put(key, new KeyDef(key, target, null));
}else{
String tail = "";
if(target.indexOf(SHARP) != -1){
tail = target.substring(target.indexOf(SHARP));
target = target.substring(0, target.indexOf(SHARP));
}
if(new File(target).isAbsolute()) {
target = FileUtils.getRelativePathFromMap(rootFilePath, target);
}
target = FileUtils.normalizeDirectory(currentDir, target);
keysDefMap.put(key, new KeyDef(key, target + tail, null));
}
}else if(!StringUtils.isEmptyString(keyRef)){
//store multi-level keys.
keysRefMap.put(key, keyRef);
}else{
// target is null or empty, it is useful in the future when consider the content of key definition
keysDefMap.put(key, new KeyDef(key, null, null));
}
}else{
final Properties prop = new Properties();
prop.setProperty("%1", key);
prop.setProperty("%2", target);
// DOTJ045W also exists
logger.logInfo(MessageUtils.getMessage("DOTJ045I", prop).toString());
}
//restore target
target = temp;
}
}
/*
if (attrValue.startsWith(SHARP)
|| attrValue.indexOf(COLON_DOUBLE_SLASH) != -1){
return;
}
*/
/*
* SF Bug 2724090, broken links in conref'ed footnotes.
*
* NOTE: Need verification.
if (attrValue.startsWith(SHARP)) {
attrValue = currentFile;
}
*/
//external resource is filtered here.
if ("external".equalsIgnoreCase(attrScope)
|| "peer".equalsIgnoreCase(attrScope)
|| attrValue.indexOf(COLON_DOUBLE_SLASH) != -1
|| attrValue.startsWith(SHARP)) {
return;
}
//Added by William on 2010-01-05 for bug:2926417 start
if(attrValue.startsWith("file:/") && attrValue.indexOf("file:
attrValue = attrValue.substring("file:/".length());
//Unix like OS
if(UNIX_SEPARATOR.equals(File.separator)){
attrValue = UNIX_SEPARATOR + attrValue;
}
}
//Added by William on 2010-01-05 for bug:2926417 end
final File target=new File(attrValue);
if(target.isAbsolute() &&
!ATTRIBUTE_NAME_DATA.equals(attrName)){
attrValue=FileUtils.getRelativePathFromMap(rootFilePath,attrValue);
//for object tag bug:3052156
}else if(ATTRIBUTE_NAME_DATA.equals(attrName)){
if(!StringUtils.isEmptyString(codebase)){
filename = FileUtils.normalizeDirectory(codebase, attrValue);
}else{
filename = FileUtils.normalizeDirectory(currentDir, attrValue);
}
}else{
//noraml process.
filename = FileUtils.normalizeDirectory(currentDir, attrValue);
}
if (filename != null) {
try{
filename = URLDecoder.decode(filename, UTF8);
}catch(final UnsupportedEncodingException e){
}
}
if (MAP_TOPICREF.matches(attrClass)) {
if (ATTR_TYPE_VALUE_SUBJECT_SCHEME.equalsIgnoreCase(attrType)) {
schemeSet.add(filename);
}
//Added by William on 2009-06-24 for req #12014 start
//only transtype = eclipsehelp
if(INDEX_TYPE_ECLIPSEHELP.equals(transtype)){
//For only format of the href is dita topic
if (attrFormat == null ||
ATTR_FORMAT_VALUE_DITA.equalsIgnoreCase(attrFormat)){
if(attrName.equals(ATTRIBUTE_NAME_HREF)){
topicHref = filename;
topicHref = topicHref.replace(WINDOWS_SEPARATOR, UNIX_SEPARATOR);
//attrValue has topicId
if(attrValue.lastIndexOf(SHARP) != -1){
//get the topicId position
final int position = attrValue.lastIndexOf(SHARP);
topicId = attrValue.substring(position + 1);
}else{
//get the first topicId(vaild href file)
if(FileUtils.isDITAFile(topicHref)){
//topicId = MergeUtils.getInstance().getFirstTopicId(topicHref, (new File(rootFilePath)).getParent(), true);
//to be unique
topicId = topicHref + QUESTION;
}
}
}
}else{
topicHref = "";
topicId = "";
}
}
//Added by William on 2009-06-24 for req #12014 end
}
//files referred by coderef won't effect the uplevels, code has already returned.
if (("DITA-foreign".equals(attrType) &&
ATTRIBUTE_NAME_DATA.equals(attrName))
|| attrClass!=null && PR_D_CODEREF.matches(attrClass)){
subsidiarySet.add(filename);
return;
}
/*
* Collect non-conref and non-copyto targets
*/
if (filename != null && FileUtils.isValidTarget(filename.toLowerCase()) &&
(StringUtils.isEmptyString(atts.getValue(ATTRIBUTE_NAME_COPY_TO)) ||
!FileUtils.isTopicFile(atts.getValue(ATTRIBUTE_NAME_COPY_TO).toLowerCase()) ||
(atts.getValue(ATTRIBUTE_NAME_CHUNK)!=null && atts.getValue(ATTRIBUTE_NAME_CHUNK).contains("to-content")) )
&& !ATTRIBUTE_NAME_CONREF.equals(attrName)
&& !ATTRIBUTE_NAME_COPY_TO.equals(attrName) &&
(canResolved() || FileUtils.isSupportedImageFile(filename.toLowerCase()))) {
//edited by william on 2009-08-06 for bug:2832696 start
if(attrFormat!=null){
nonConrefCopytoTargets.add(filename + STICK + attrFormat);
}else{
nonConrefCopytoTargets.add(filename);
}
//nonConrefCopytoTargets.add(filename);
//edited by william on 2009-08-06 for bug:2832696 end
}
//outside ditamap files couldn't cause warning messages, it is stopped here
if (attrFormat != null &&
!ATTR_FORMAT_VALUE_DITA.equalsIgnoreCase(attrFormat)){
//The format of the href is not dita topic
//The logic after this "if" clause is not related to files other than dita topic.
//Therefore, we need to return here to filter out those files in other format.
return;
}
/*
* Collect only href target topic files for index extracting.
*/
if (ATTRIBUTE_NAME_HREF.equals(attrName)
&& FileUtils.isTopicFile(filename) && canResolved()) {
hrefTargets.add(new File(filename).getPath());
toOutFile(new File(filename).getPath());
//use filename instead(It has already been resolved before-hand) bug:3058124
//String pathWithoutID = FileUtils.resolveFile(currentDir, attrValue);
if (chunkLevel > 0 && chunkToNavLevel == 0 && topicGroupLevel == 0 && relTableLevel == 0) {
chunkTopicSet.add(filename);
} else {
hrefTopicSet.add(filename);
}
}
//Added on 20100827 for bug:3052156 start
//add a warning message for outer files refered by @data
/*if(ATTRIBUTE_NAME_DATA.equals(attrName)){
toOutFile(new File(filename).getPath());
}*/
//Added on 20100827 for bug:3052156 end
/*
* Collect only conref target topic files.
*/
if (ATTRIBUTE_NAME_CONREF.equals(attrName)
&& FileUtils.isDITAFile(filename)) {
conrefTargets.add(filename);
toOutFile(new File(filename).getPath());
}
// Collect copy-to (target,source) into hash map
if (ATTRIBUTE_NAME_COPY_TO.equals(attrName)
&& FileUtils.isTopicFile(filename)) {
final String href = atts.getValue(ATTRIBUTE_NAME_HREF);
if (StringUtils.isEmptyString(href)) {
final StringBuffer buff = new StringBuffer();
buff.append("[WARN]: Copy-to task [href=\"\" copy-to=\"");
buff.append(filename);
buff.append("\"] was ignored.");
logger.logWarn(buff.toString());
} else if (copytoMap.get(filename) != null){
//edited by Alan on Date:2009-11-02 for Work Item:#1590 start
/*StringBuffer buff = new StringBuffer();
buff.append("Copy-to task [href=\"");
buff.append(href);
buff.append("\" copy-to=\"");
buff.append(filename);
buff.append("\"] which points to another copy-to target");
buff.append(" was ignored.");
javaLogger.logWarn(buff.toString());*/
final Properties prop = new Properties();
prop.setProperty("%1", href);
prop.setProperty("%2", filename);
logger.logWarn(MessageUtils.getMessage("DOTX065W", prop).toString());
//edited by Alan on Date:2009-11-02 for Work Item:#1590 end
ignoredCopytoSourceSet.add(href);
} else if (!(atts.getValue(ATTRIBUTE_NAME_CHUNK) != null && atts.getValue(ATTRIBUTE_NAME_CHUNK).contains("to-content"))){
copytoMap.put(filename, FileUtils.normalizeDirectory(currentDir, href));
}
final String pathWithoutID = FileUtils.resolveFile(currentDir, attrValue);
if (chunkLevel > 0 && chunkToNavLevel == 0 && topicGroupLevel == 0) {
chunkTopicSet.add(pathWithoutID);
} else {
hrefTopicSet.add(pathWithoutID);
}
}
/*
* Collect the conaction source topic file
*/
if(ATTRIBUTE_NAME_CONACTION.equals(attrName)){
if(attrValue.equals("mark")||attrValue.equals("pushreplace")){
hasconaction = true;
}
}
}
//Added on 20100826 for bug:3052913 start
/**
* Get multi-level keys list
*/
private List<String> getKeysList(final String key, final Map<String, String> keysRefMap) {
final List<String> list = new ArrayList<String>();
//Iterate the map to look for multi-level keys
final Iterator<Entry<String, String>> iter = keysRefMap.entrySet().iterator();
while (iter.hasNext()) {
final Map.Entry<String, String> entry = iter.next();
//Multi-level key found
if(entry.getValue().equals(key)){
//add key into the list
final String entryKey = entry.getKey();
list.add(entryKey);
//still have multi-level keys
if(keysRefMap.containsValue(entryKey)){
//rescuive point
final List<String> tempList = getKeysList(entryKey, keysRefMap);
list.addAll(tempList);
}
}
}
return list;
}
/**
* Update keysDefMap for multi-level keys
*/
private void checkMultiLevelKeys(final Map<String, KeyDef> keysDefMap,
final Map<String, String> keysRefMap) {
String key = null;
KeyDef value = null;
//tempMap storing values to avoid ConcurrentModificationException
final Map<String, KeyDef> tempMap = new HashMap<String, KeyDef>();
final Iterator<Entry<String, KeyDef>> iter = keysDefMap.entrySet().iterator();
while (iter.hasNext()) {
final Map.Entry<String, KeyDef> entry = iter.next();
key = entry.getKey();
value = entry.getValue();
//there is multi-level keys exist.
if(keysRefMap.containsValue(key)){
//get multi-level keys
final List<String> keysList = getKeysList(key, keysRefMap);
for (final String multikey : keysList) {
//update tempMap
tempMap.put(multikey, value);
}
}
}
//update keysDefMap.
keysDefMap.putAll(tempMap);
}
//Added on 20100826 for bug:3052913 end
private boolean isOutFile(final String toCheckPath) {
if (!toCheckPath.startsWith("..")) {
return false;
} else {
return true;
}
}
private boolean isMapFile() {
final String current=FileUtils.removeRedundantNames(currentFile);
if(FileUtils.isDITAMapFile(current)) {
return true;
} else {
return false;
}
}
private boolean canResolved(){
if ((OutputUtils.getOnlyTopicInMap() == false) || isMapFile() ) {
return true;
} else {
return false;
}
}
private void addToOutFilesSet(final String hrefedFile) {
if (canResolved()) {
outDitaFilesSet.add(hrefedFile);
}
}
/*
private Element createElement(String uri, String qName,
Attributes atts) {
if (schemeRoot != null) {
Element element = schemeRoot.createElementNS(uri, qName);
for (int i = 0; i < atts.getLength(); i++) {
element.setAttribute(atts.getQName(i), atts.getValue(i));
}
return element;
}
return null;
}
*/
private void toOutFile(final String filename) throws SAXException {
//the filename is a relative path from the dita input file
final Properties prop=new Properties();
prop.put("%1", FileUtils.normalizeDirectory(rootDir, filename));
prop.put("%2", FileUtils.removeRedundantNames(currentFile));
if ((OutputUtils.getGeneratecopyouter() == OutputUtils.Generate.NOT_GENERATEOUTTER)
|| (OutputUtils.getGeneratecopyouter() == OutputUtils.Generate.GENERATEOUTTER)) {
if (isOutFile(filename)) {
if (OutputUtils.getOutterControl().equals(OutputUtils.OutterControl.FAIL)){
final MessageBean msgBean=MessageUtils.getMessage("DOTJ035F", prop);
throw new SAXParseException(null,null,new DITAOTException(msgBean,null,msgBean.toString()));
}
if (OutputUtils.getOutterControl().equals(OutputUtils.OutterControl.WARN)){
final String message=MessageUtils.getMessage("DOTJ036W",prop).toString();
logger.logWarn(message);
}
addToOutFilesSet(filename);
}
}
}
/**
* Get out file set.
* @return out file set
*/
public Set<String> getOutFilesSet(){
return outDitaFilesSet;
}
/**
* @return the hrefTopicSet
*/
public Set<String> getHrefTopicSet() {
return hrefTopicSet;
}
/**
* @return the chunkTopicSet
*/
public Set<String> getChunkTopicSet() {
return chunkTopicSet;
}
/**
* Get scheme set.
* @return scheme set
*/
public Set<String> getSchemeSet() {
return this.schemeSet;
}
/**
* Get scheme ref set.
* @return scheme ref set
*/
public Set<String> getSchemeRefSet() {
return this.schemeRefSet;
}
/**
* List of files with "@processing-role=resource-only".
* @return the resource-only set
*/
public Set<String> getResourceOnlySet() {
resourceOnlySet.removeAll(crossSet);
return resourceOnlySet;
}
/**
* Get document root of the merged subject schema.
* @return
*/
//public Document getSchemeRoot() {
// return schemeRoot;
/**
* Get getRelationshipGrap.
* @return relationship grap
*/
public Map<String, Set<String>> getRelationshipGrap() {
return this.relationGraph;
}
/**
* @return the catalogMap
*/
public Map<String, String> getCatalogMap() {
return catalogMap;
}
public String getPrimaryDitamap() {
return primaryDitamap;
}
public void setPrimaryDitamap(final String primaryDitamap) {
this.primaryDitamap = primaryDitamap;
}
} |
package org.exist.storage.txn;
import net.jcip.annotations.ThreadSafe;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.exist.EXistException;
import org.exist.security.PermissionDeniedException;
import org.exist.storage.*;
import org.exist.storage.journal.JournalException;
import org.exist.storage.journal.JournalManager;
import org.exist.storage.sync.Sync;
import org.exist.util.LockException;
import org.exist.xmldb.XmldbURI;
import java.io.IOException;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
/**
* The Transaction Manager provides methods to begin, commit, and abort
* transactions.
*
* This implementation of the transaction manager is non-blocking lock-free.
* It makes use of several CAS variables to ensure thread-safe concurrent
* access. The most important of which is {@link #state} which indicates
* either:
* 1) the number of active transactions
* 2) that the Transaction Manager is executing system
* tasks ({@link #STATE_SYSTEM}), during which time no
* other transactions are active.
* 3) that the Transaction Manager has (or is)
* been shutdown ({@link #STATE_SHUTDOWN}).
*
* NOTE: the Transaction Manager may optimistically briefly enter
* the state {@link #STATE_SYSTEM} to block the initiation of
* new transactions and then NOT execute system tasks if it
* detects concurrent active transactions.
*
* System tasks are mutually exclusive with any other operation
* including shutdown. When shutdown is requested, if system tasks
* are executing, then the thread will spin until they are finished.
*
* There's only one TransactionManager per database instance, it can be
* accessed via {@link BrokerPool#getTransactionManager()}.
*
* @author Adam Retter <adam@evolvedbinary.com>
* @author wolf
*/
@ThreadSafe
public class TransactionManager implements BrokerPoolService {
private static final Logger LOG = LogManager.getLogger(TransactionManager.class);
private final BrokerPool pool;
private final Optional<JournalManager> journalManager;
private final SystemTaskManager systemTaskManager;
/**
* The next transaction id
*/
private final AtomicLong nextTxnId = new AtomicLong();
/**
* Currently active transactions and their operations journal write count.
* Key is the transaction id
* Value is the transaction's operations journal write count.
*/
private final ConcurrentHashMap<Long, TxnCounter> transactions = new ConcurrentHashMap<>();
/**
* State for when the Transaction Manager has been shutdown.
*/
private static final int STATE_SHUTDOWN = -2;
/**
* State for when the Transaction Manager has executing system tasks.
*/
private static final int STATE_SYSTEM = -1;
/**
* State for when the Transaction Manager is idle, i.e. no active transactions.
*/
private static final int STATE_IDLE = 0;
/**
* State of the transaction manager.
*
* Will be either {@link #STATE_SHUTDOWN}, {@link #STATE_SYSTEM},
* {@link #STATE_IDLE} or a non-zero positive integer which
* indicates the number of active transactions.
*/
private final AtomicInteger state = new AtomicInteger(STATE_IDLE);
/**
* Id of the thread which is executing system tasks when
* the {@link #state} == {@link #STATE_SYSTEM}. This
* is used for reentrancy when system tasks need to
* make transactional operations.
*/
private final AtomicLong systemThreadId = new AtomicLong(-1);
/**
* Constructs a transaction manager for a Broker Pool.
*
* @param pool the broker pool
* @param journalManager the journal manager
* @param systemTaskManager the system task manager
*/
public TransactionManager(final BrokerPool pool, final Optional<JournalManager> journalManager,
final SystemTaskManager systemTaskManager) {
this.pool = pool;
this.journalManager = journalManager;
this.systemTaskManager = systemTaskManager;
}
private static void throwShutdownException() {
//TODO(AR) API should be revised in future so that this is a TransactionException
throw new RuntimeException("Transaction Manager is shutdown");
}
/**
* Create a new transaction.
*
* @return the new transaction
*/
public Txn beginTransaction() {
try {
// CAS loop
while (true) {
final int localState = state.get();
// can NOT begin transaction when shutdown!
if (localState == STATE_SHUTDOWN) {
throwShutdownException();
}
// must NOT begin transaction when another thread is processing system tasks!
if (localState == STATE_SYSTEM) {
final long thisThreadId = Thread.currentThread().getId();
if (systemThreadId.compareAndSet(thisThreadId, thisThreadId)) {
// our thread is executing system tasks, allow reentrancy from our thread!
// done... return from CAS loop!
return doBeginTransaction();
} else {
// spin whilst another thread executes the system tasks
// sleep a small time to save CPU
Thread.sleep(10);
continue;
}
}
// if we are operational and are not preempted by another thread, begin transaction
if (localState >= STATE_IDLE && state.compareAndSet(localState, localState + 1)) {
// done... return from CAS loop!
return doBeginTransaction();
}
}
} catch (final InterruptedException e) {
// thrown by Thread.sleep
Thread.currentThread().interrupt();
//TODO(AR) API should be revised in future so that this is a TransactionException
throw new RuntimeException(e);
}
}
private Txn doBeginTransaction() {
final long txnId = nextTxnId.getAndIncrement();
if (journalManager.isPresent()) {
try {
journalManager.get().journal(new TxnStart(txnId));
} catch (final JournalException e) {
LOG.error("Failed to create transaction. Error writing to Journal", e);
}
}
/*
* NOTE: we intentionally increment the txn counter here
* to set the counter to 1 to represent the TxnStart,
* as that will not be done
* by {@link JournalManager#journal(Loggable)} or
* {@link Journal#writeToLog(loggable)}.
*/
transactions.put(txnId, new TxnCounter().increment());
final Txn txn = new Txn(this, txnId);
// TODO(AR) ultimately we should be doing away with DBBroker#setCurrentTransaction
try(final DBBroker broker = pool.getBroker()) {
broker.setCurrentTransaction(txn);
} catch(final EXistException ee) {
LOG.fatal(ee.getMessage(), ee);
throw new RuntimeException(ee);
}
return txn;
}
/**
* Commit a transaction.
*
* @param txn the transaction to commit.
*
* @throws TransactionException if the transaction could not be committed.
*/
public void commit(final Txn txn) throws TransactionException {
Objects.requireNonNull(txn);
if(txn instanceof Txn.ReusableTxn) {
txn.commit();
return;
}
//we can only commit something which is in the STARTED state
if (txn.getState() != Txn.State.STARTED) {
return;
}
// CAS loop
try {
while (true) {
final int localState = state.get();
// can NOT commit transaction when shutdown!
if (localState == STATE_SHUTDOWN) {
throwShutdownException();
}
// must NOT commit transaction when another thread is processing system tasks!
if (localState == STATE_SYSTEM) {
final long thisThreadId = Thread.currentThread().getId();
if (systemThreadId.compareAndSet(thisThreadId, thisThreadId)) {
// our thread is executing system tasks, allow reentrancy from our thread!
doCommitTransaction(txn);
// done... exit CAS loop!
return;
} else {
// spin whilst another thread executes the system tasks
// sleep a small time to save CPU
Thread.sleep(10);
continue;
}
}
// if we are have active transactions and are not preempted by another thread, commit transaction
if (localState > STATE_IDLE && state.compareAndSet(localState, localState - 1)) {
doCommitTransaction(txn);
// done... exit CAS loop!
return;
}
}
} catch (final InterruptedException e) {
// thrown by Thread.sleep
Thread.currentThread().interrupt();
//TODO(AR) API should be revised in future so that this is a TransactionException
throw new RuntimeException(e);
}
}
private void doCommitTransaction(final Txn txn) throws TransactionException {
if (journalManager.isPresent()) {
try {
journalManager.get().journalGroup(new TxnCommit(txn.getId()));
} catch (final JournalException e) {
throw new TransactionException("Failed to write commit record to journal: " + e.getMessage(), e);
}
}
txn.signalCommit();
txn.releaseAll();
transactions.remove(txn.getId());
processSystemTasks();
if (LOG.isDebugEnabled()) {
LOG.debug("Committed transaction: " + txn.getId());
}
}
/**
* Abort a transaction.
*
* @param txn the transaction to abort.
*/
public void abort(final Txn txn) {
Objects.requireNonNull(txn);
//we can only abort something which is in the STARTED state
if (txn.getState() != Txn.State.STARTED) {
return;
}
// CAS loop
try {
while (true) {
final int localState = state.get();
// can NOT abort transaction when shutdown!
if (localState == STATE_SHUTDOWN) {
throwShutdownException();
}
// must NOT abort transaction when another thread is processing system tasks!
if (localState == STATE_SYSTEM) {
final long thisThreadId = Thread.currentThread().getId();
if (systemThreadId.compareAndSet(thisThreadId, thisThreadId)) {
// our thread is executing system tasks, allow reentrancy from our thread!
doAbortTransaction(txn);
// done... exit CAS loop!
return;
} else {
// spin whilst another thread executes the system tasks
// sleep a small time to save CPU
Thread.sleep(10);
continue;
}
}
// if we are have active transactions and are not preempted by another thread, abort transaction
if (localState > STATE_IDLE && state.compareAndSet(localState, localState - 1)) {
doAbortTransaction(txn);
// done... exit CAS loop!
return;
}
}
} catch (final InterruptedException e) {
// thrown by Thread.sleep
Thread.currentThread().interrupt();
//TODO(AR) API should be revised in future so that this is a TransactionException
throw new RuntimeException(e);
}
}
private void doAbortTransaction(final Txn txn) {
if (journalManager.isPresent()) {
try {
journalManager.get().journalGroup(new TxnAbort(txn.getId()));
} catch (final JournalException e) {
//TODO(AR) should revise the API in future to throw TransactionException
LOG.error("Failed to write abort record to journal: " + e.getMessage(), e);
}
}
txn.signalAbort();
txn.releaseAll();
transactions.remove(txn.getId());
processSystemTasks();
if (LOG.isDebugEnabled()) {
LOG.debug("Aborted transaction: " + txn.getId());
}
}
/**
* Close the transaction.
*
* Ensures that the transaction has either been committed or aborted.
*
* @param txn the transaction to close
*/
public void close(final Txn txn) {
Objects.requireNonNull(txn);
//if the transaction is already closed, do nothing
if (txn.getState() == Txn.State.CLOSED) {
return;
}
try {
//if the transaction is started, then we should auto-abort the uncommitted transaction
if (txn.getState() == Txn.State.STARTED) {
LOG.warn("Transaction was not committed or aborted, auto aborting!");
abort(txn);
}
// TODO(AR) ultimately we should be doing away with DBBroker#setCurrentTransaction
try(final DBBroker broker = pool.getBroker()) {
broker.setCurrentTransaction(null);
} catch(final EXistException ee) {
LOG.fatal(ee.getMessage(), ee);
throw new RuntimeException(ee);
}
} finally {
txn.setState(Txn.State.CLOSED); //transaction is now closed!
}
}
/**
* Keep track of a new operation within the given transaction.
*
* @param txnId the transaction id.
*/
public void trackOperation(final long txnId) {
transactions.get(txnId).increment();
}
/**
* Create a new checkpoint. A checkpoint fixes the current database state. All dirty pages
* are written to disk and the journal file is cleaned.
*
* This method is called from
* {@link org.exist.storage.BrokerPool#sync(DBBroker, Sync)} within pre-defined periods. It
* should not be called from somewhere else. The database needs to
* be in a stable state (all transactions completed, no operations running).
*
* @param switchFiles Indicates whether a new journal file should be started
*
* @throws TransactionException if an error occurs whilst writing the checkpoint.
*/
public void checkpoint(final boolean switchFiles) throws TransactionException {
if (state.get() == STATE_SHUTDOWN) {
throwShutdownException();
}
if(journalManager.isPresent()) {
try {
final long txnId = nextTxnId.getAndIncrement();
journalManager.get().checkpoint(txnId, switchFiles);
} catch(final JournalException e) {
throw new TransactionException(e.getMessage(), e);
}
}
}
/**
* @deprecated This mixes concerns and should not be here!
*/
@Deprecated
public void reindex(final DBBroker broker) throws IOException {
broker.pushSubject(broker.getBrokerPool().getSecurityManager().getSystemSubject());
try(final Txn transaction = beginTransaction()) {
broker.reindexCollection(transaction, XmldbURI.ROOT_COLLECTION_URI);
commit(transaction);
} catch (final PermissionDeniedException | LockException | TransactionException e) {
LOG.error("Exception during reindex: " + e.getMessage(), e);
} finally {
broker.popSubject();
}
}
@Override
public void shutdown() {
try {
while (true) {
final int localState = state.get();
if (localState == STATE_SHUTDOWN) {
// already shutdown!
return;
}
// can NOT shutdown whilst system tasks are executing
if (localState == STATE_SYSTEM) {
// spin whilst another thread executes the system tasks
// sleep a small time to save CPU
Thread.sleep(10);
continue;
}
if (state.compareAndSet(localState, STATE_SHUTDOWN)) {
// CAS above guarantees that only a single thread will ever enter this block once!
final int uncommitted = uncommittedTransaction();
final boolean checkpoint = uncommitted == 0;
final long txnId = nextTxnId.getAndIncrement();
if (journalManager.isPresent()) {
journalManager.get().shutdown(txnId, checkpoint);
}
transactions.clear();
if (LOG.isDebugEnabled()) {
LOG.debug("Shutting down transaction manager. Uncommitted transactions: " + transactions.size());
}
// done... exit CAS loop!
return;
}
}
} catch (final InterruptedException e) {
// thrown by Thread.sleep
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
}
private int uncommittedTransaction() {
final Integer uncommittedCount = transactions.reduce(1000,
(txnId, txnCounter) -> {
if (txnCounter.getCount() > 0) {
LOG.warn("Found an uncommitted transaction with id " + txnId + ". Pending operations: " + txnCounter.getCount());
return 1;
} else {
return 0;
}
},
(a, b) -> a + b
);
if (uncommittedCount == null) {
return 0;
}
if (uncommittedCount > 0) {
LOG.warn("There are uncommitted transactions. A recovery run may be triggered upon restart.");
}
return uncommittedCount;
}
public void triggerSystemTask(final SystemTask task) {
systemTaskManager.addSystemTask(task);
processSystemTasks();
}
private void processSystemTasks() {
// no new transactions can begin, commit, or abort whilst processing system tasks
// only process system tasks if there are no active transactions, i.e. the state == IDLE
if (state.compareAndSet(STATE_IDLE, STATE_SYSTEM)) {
// CAS above guarantees that only a single thread will ever enter this block at once
try {
this.systemThreadId.set(Thread.currentThread().getId());
// we have to check that `transactions` is empty
// otherwise we might be in SYSTEM state but `abort` or `commit`
// functions are still finishing
if (transactions.isEmpty()) {
systemTaskManager.processTasks();
}
} finally {
this.systemThreadId.set(-1);
// restore IDLE state
state.set(STATE_IDLE);
}
}
}
/**
* Keep track of the number of operations processed within a transaction.
* This is used to determine if there are any uncommitted transactions
* during shutdown.
*/
private static final class TxnCounter {
/**
* The counter variable is declared volatile as it is only ever
* written from one thread (via {@link #increment()} which is
* the `transaction` for which it is maintaining a count, whilst
* it is read from (potentially) a different thread
* (via {@link #getCount()} when {@link TransactionManager#shutdown()}
* calls {@link TransactionManager#uncommittedTransaction()}.
*/
private volatile long counter = 0;
public TxnCounter increment() {
counter++;
return this;
}
public long getCount() {
return counter;
}
}
} |
package com.orm;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteException;
import android.database.sqlite.SQLiteStatement;
import android.text.TextUtils;
import android.util.Log;
import com.orm.dsl.Table;
import com.orm.util.NamingHelper;
import com.orm.util.ReflectionUtil;
import com.orm.util.QueryBuilder;
import java.lang.String;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import static com.orm.SugarContext.getSugarContext;
public class SugarRecord {
protected Long id = null;
public static <T> void deleteAll(Class<T> type) {
SugarDb db = getSugarContext().getSugarDb();
SQLiteDatabase sqLiteDatabase = db.getDB();
sqLiteDatabase.delete(NamingHelper.toSQLName(type), null, null);
}
public static <T> void deleteAll(Class<T> type, String whereClause, String... whereArgs) {
SugarDb db = getSugarContext().getSugarDb();
SQLiteDatabase sqLiteDatabase = db.getDB();
sqLiteDatabase.delete(NamingHelper.toSQLName(type), whereClause, whereArgs);
}
@SuppressWarnings("deprecation")
public static <T> void saveInTx(T... objects) {
saveInTx(Arrays.asList(objects));
}
@SuppressWarnings("deprecation")
public static <T> void saveInTx(Collection<T> objects) {
SQLiteDatabase sqLiteDatabase = getSugarContext().getSugarDb().getDB();
try {
sqLiteDatabase.beginTransaction();
sqLiteDatabase.setLockingEnabled(false);
for (T object: objects) {
SugarRecord.save(object);
}
sqLiteDatabase.setTransactionSuccessful();
} catch (Exception e) {
Log.i("Sugar", "Error in saving in transaction " + e.getMessage());
} finally {
sqLiteDatabase.endTransaction();
sqLiteDatabase.setLockingEnabled(true);
}
}
@SuppressWarnings("deprecation")
public static <T> void deleteInTx(T... objects) {
deleteInTx(Arrays.asList(objects));
}
@SuppressWarnings("deprecation")
public static <T> void deleteInTx(Collection<T> objects) {
SQLiteDatabase sqLiteDatabase = getSugarContext().getSugarDb().getDB();
try {
sqLiteDatabase.beginTransaction();
sqLiteDatabase.setLockingEnabled(false);
for (T object : objects) {
SugarRecord.delete(object);
}
sqLiteDatabase.setTransactionSuccessful();
} catch (Exception e) {
Log.i("Sugar", "Error in deleting in transaction " + e.getMessage());
} finally {
sqLiteDatabase.endTransaction();
sqLiteDatabase.setLockingEnabled(true);
}
}
public static <T> List<T> listAll(Class<T> type) {
return find(type, null, null, null, null, null);
}
public static <T> List<T> listAll(Class<T> type, String orderBy) {
return find(type, null, null, null, orderBy, null);
}
public static <T> T findById(Class<T> type, Long id) {
List<T> list = find(type, "id=?", new String[]{String.valueOf(id)}, null, null, "1");
if (list.isEmpty()) return null;
return list.get(0);
}
public static <T> T findById(Class<T> type, Integer id) {
return findById(type, Long.valueOf(id));
}
public static <T> List<T> findById(Class<T> type, String[] ids) {
String whereClause = "id IN (" + QueryBuilder.generatePlaceholders(ids.length) + ")";
return find(type, whereClause, ids);
}
public static <T> T first(Class<T>type){
return findById(type, 0);
}
public static <T> T last(Class<T>type){
return findById(type, count(type) - 1);
}
public static <T> Iterator<T> findAll(Class<T> type) {
return findAsIterator(type, null, null, null, null, null);
}
public static <T> Iterator<T> findAsIterator(Class<T> type, String whereClause, String... whereArgs) {
return findAsIterator(type, whereClause, whereArgs, null, null, null);
}
public static <T> Iterator<T> findWithQueryAsIterator(Class<T> type, String query, String... arguments) {
SugarDb db = getSugarContext().getSugarDb();
SQLiteDatabase sqLiteDatabase = db.getDB();
Cursor c = sqLiteDatabase.rawQuery(query, arguments);
return new CursorIterator<T>(type, c);
}
public static <T> Iterator<T> findAsIterator(Class<T> type, String whereClause, String[] whereArgs, String groupBy, String orderBy, String limit) {
SugarDb db = getSugarContext().getSugarDb();
SQLiteDatabase sqLiteDatabase = db.getDB();
Cursor c = sqLiteDatabase.query(NamingHelper.toSQLName(type), null, whereClause, whereArgs,
groupBy, null, orderBy, limit);
return new CursorIterator<T>(type, c);
}
public static <T> List<T> find(Class<T> type, String whereClause, String... whereArgs) {
return find(type, whereClause, whereArgs, null, null, null);
}
public static <T> List<T> findWithQuery(Class<T> type, String query, String... arguments) {
SugarDb db = getSugarContext().getSugarDb();
SQLiteDatabase sqLiteDatabase = db.getDB();
T entity;
List<T> toRet = new ArrayList<T>();
Cursor c = sqLiteDatabase.rawQuery(query, arguments);
try {
while (c.moveToNext()) {
entity = type.getDeclaredConstructor().newInstance();
SugarRecord.inflate(c, entity);
toRet.add(entity);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
c.close();
}
return toRet;
}
public static void executeQuery(String query, String... arguments) {
getSugarContext().getSugarDb().getDB().execSQL(query, arguments);
}
public static <T> List<T> find(Class<T> type, String whereClause, String[] whereArgs, String groupBy, String orderBy, String limit) {
SugarDb db = getSugarContext().getSugarDb();
SQLiteDatabase sqLiteDatabase = db.getDB();
T entity;
List<T> toRet = new ArrayList<T>();
Cursor c = sqLiteDatabase.query(NamingHelper.toSQLName(type), null, whereClause, whereArgs,
groupBy, null, orderBy, limit);
try {
while (c.moveToNext()) {
entity = type.getDeclaredConstructor().newInstance();
SugarRecord.inflate(c, entity);
toRet.add(entity);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
c.close();
}
return toRet;
}
public static <T> long count(Class<?> type) {
return count(type, null, null, null, null, null);
}
public static <T> long count(Class<?> type, String whereClause, String[] whereArgs) {
return count(type, whereClause, whereArgs, null, null, null);
}
public static <T> long count(Class<?> type, String whereClause, String[] whereArgs, String groupBy, String orderBy, String limit) {
SugarDb db = getSugarContext().getSugarDb();
SQLiteDatabase sqLiteDatabase = db.getDB();
long toRet = -1;
String filter = (!TextUtils.isEmpty(whereClause)) ? " where " + whereClause : "";
SQLiteStatement sqliteStatement;
try {
sqliteStatement = sqLiteDatabase.compileStatement("SELECT count(*) FROM " + NamingHelper.toSQLName(type) + filter);
} catch (SQLiteException e) {
e.printStackTrace();
return toRet;
}
if (whereArgs != null) {
for (int i = whereArgs.length; i != 0; i
sqliteStatement.bindString(i, whereArgs[i - 1]);
}
}
try {
toRet = sqliteStatement.simpleQueryForLong();
} finally {
sqliteStatement.close();
}
return toRet;
}
public static long save(Object object) {
return save(getSugarContext().getSugarDb().getDB(), object);
}
static long save(SQLiteDatabase db, Object object) {
List<Field> columns = ReflectionUtil.getTableFields(object.getClass());
ContentValues values = new ContentValues(columns.size());
Field idField = null;
for (Field column : columns) {
ReflectionUtil.addFieldValueToColumn(values, column, object);
if (column.getName() == "id") {
idField = column;
}
}
long id = db.insertWithOnConflict(NamingHelper.toSQLName(object.getClass()), null, values,
SQLiteDatabase.CONFLICT_REPLACE);
if (object.getClass().isAnnotationPresent(Table.class)) {
if (idField != null) {
idField.setAccessible(true);
try {
idField.set(object, new Long(id));
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
} else if (SugarRecord.class.isAssignableFrom(object.getClass())) {
((SugarRecord) object).setId(id);
}
Log.i("Sugar", object.getClass().getSimpleName() + " saved : " + id);
return id;
}
private static void inflate(Cursor cursor, Object object) {
List<Field> columns = ReflectionUtil.getTableFields(object.getClass());
for (Field field : columns) {
field.setAccessible(true);
Class<?> fieldType = field.getType();
if (fieldType.isAnnotationPresent(Table.class) ||
SugarRecord.class.isAssignableFrom(fieldType)) {
try {
long id = cursor.getLong(cursor.getColumnIndex(NamingHelper.toSQLName(field)));
field.set(object, (id > 0) ? findById(fieldType, id) : null);
} catch (IllegalAccessException e) {
e.printStackTrace();
}
} else {
ReflectionUtil.setFieldValueFromCursor(cursor, field, object);
}
}
}
public void delete() {
Long id = getId();
Class<?> type = getClass();
if (id != null && id > 0L) {
SQLiteDatabase db = getSugarContext().getSugarDb().getDB();
db.delete(NamingHelper.toSQLName(type), "Id=?", new String[]{id.toString()});
Log.i("Sugar", type.getSimpleName() + " deleted : " + id.toString());
} else {
Log.i("Sugar", "Cannot delete object: " + type.getSimpleName() + " - object has not been saved");
return;
}
}
public static void delete(Object object) {
Class<?> type = object.getClass();
if (type.isAnnotationPresent(Table.class)) {
try {
Field field = type.getDeclaredField("id");
field.setAccessible(true);
Long id = (Long) field.get(object);
if (id != null && id > 0L) {
SQLiteDatabase db = getSugarContext().getSugarDb().getDB();
db.delete(NamingHelper.toSQLName(type), "Id=?", new String[]{id.toString()});
Log.i("Sugar", type.getSimpleName() + " deleted : " + id.toString());
} else {
Log.i("Sugar", "Cannot delete object: " + object.getClass().getSimpleName() + " - object has not been saved");
return;
}
} catch (NoSuchFieldException e) {
Log.i("Sugar", "Cannot delete object: " + object.getClass().getSimpleName() + " - annotated object has no id");
return;
} catch (IllegalAccessException e) {
Log.i("Sugar", "Cannot delete object: " + object.getClass().getSimpleName() + " - can't access id");
return;
}
} else if (SugarRecord.class.isAssignableFrom(type)) {
((SugarRecord) object).delete();
} else {
Log.i("Sugar", "Cannot delete object: " + object.getClass().getSimpleName() + " - not persisted");
return;
}
}
public long save() {
return save(getSugarContext().getSugarDb().getDB(), this);
}
@SuppressWarnings("unchecked")
void inflate(Cursor cursor) {
inflate(cursor, this);
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
static class CursorIterator<E> implements Iterator<E> {
Class<E> type;
Cursor cursor;
public CursorIterator(Class<E> type, Cursor cursor) {
this.type = type;
this.cursor = cursor;
}
@Override
public boolean hasNext() {
return cursor != null && !cursor.isClosed() && !cursor.isAfterLast();
}
@Override
public E next() {
E entity = null;
if (cursor == null || cursor.isAfterLast()) {
throw new NoSuchElementException();
}
if (cursor.isBeforeFirst()) {
cursor.moveToFirst();
}
try {
entity = type.getDeclaredConstructor().newInstance();
SugarRecord.inflate(cursor, entity);
} catch (Exception e) {
e.printStackTrace();
} finally {
cursor.moveToNext();
if (cursor.isAfterLast()) {
cursor.close();
}
}
return entity;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
} |
package org.nusco.swimmers.creature.genetics;
import java.util.Random;
public class DNA {
public static final int MIRROR_ORGAN = 0b00000001;
public static final double MUTATION_RATE = 0.03;
private int[] genes;
public DNA(int... genes) {
this.genes = normalize(genes);
}
private int[] normalize(int[] genes) {
int[] result = new int[genes.length];
for (int i = 0; i < genes.length; i++) {
int normalized = genes[i] % 256;
if(normalized < 0)
result[i] = -normalized;
else
result[i] = normalized;
}
return result;
}
public int[] getGenes() {
return genes;
}
public DNA mutate() {
int[] resultGenes = new int[genes.length];
for (int i = 0; i < resultGenes.length; i++) {
if(Math.random() < MUTATION_RATE)
resultGenes[i] = mutate(genes, i);
else
resultGenes[i] = genes[i];
}
return new DNA(resultGenes);
}
private int mutate(int[] resultGenes, int i) {
int randomFactor = ((int)(Math.random() * 40)) - 20;
return resultGenes[i] + randomFactor;
}
public static DNA random() {
long seed = new Random().nextLong();
System.out.println("Generate DNA based on seed: " + seed);
return random(seed);
}
public static DNA ancestor() {
return DNA.random(9018779372573137080L);
}
private static DNA random(long seed) {
Random random = new Random(seed);
final int genomeSize = 60 * Byte.SIZE;
int[] genes = new int[genomeSize];
for (int i = 0; i < genes.length; i++) {
genes[i] = rnd(0, 255, random);
}
return new DNA(genes);
}
private static int rnd(int min, int max, Random random) {
return (int)(random.nextDouble() * (max - min)) + min;
}
} |
package org.oasis.plugin.responders;
import java.awt.Dimension;
import java.awt.Toolkit;
import javax.crypto.Cipher;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
import javax.crypto.spec.SecretKeySpec;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import fitnesse.FitNesseContext;
import fitnesse.authentication.SecureOperation;
import fitnesse.authentication.SecureReadOperation;
import fitnesse.authentication.SecureResponder;
import fitnesse.http.Request;
import fitnesse.http.Response;
import fitnesse.http.SimpleResponse;
import fitnesse.wiki.MockingPageCrawler;
import fitnesse.wiki.PageCrawler;
import fitnesse.wiki.PageData;
import fitnesse.wiki.PathParser;
import fitnesse.wiki.WikiPage;
import fitnesse.wiki.WikiPagePath;
@SuppressWarnings("serial")
public class SqlResponder implements SecureResponder{
protected PageData pageData;
protected WikiPage page;
protected WikiPagePath path;
protected PageCrawler crawler;
protected WikiPage root;
protected Request request;
protected String content;
public SqlResponder(){
}
public class GetJDBC extends JPanel {
private String driver = null;
private String url = null;
private String username = null;
private String password = null;
private String query = null;
JFrame parentFrame;
SimpleResponse response;
public GetJDBC(SimpleResponse response){
this.response=response;
setDriver();
setUrl();
setUserName();
setPassword();
setQuery();
String encryptedPassword = encryptString(password);
if(driver==null || driver=="" || url==null || url=="" || username==null || username=="" || password==null || password=="" || query==null || query=="")
response.setContent("Parameter is missing.");
else
response.setContent("| set driver | " + driver + " | " + "set url | " + url + " | " + "set username | " + username + " | " + " set password | " + encryptedPassword + " | " + " set query | " + query + " | ");
}
public void setDriver(){
parentFrame = createOnTopJFrameParent();
Object[] options = {"SQLServer", "MySQL", "Oracle"};
driver = (String)JOptionPane.showInputDialog(parentFrame, "Select the Driver", "Select the Driver", JOptionPane.PLAIN_MESSAGE, null, options, "SQLServer");
parentFrame.dispose();
}
public void setUrl(){
if(driver == "MySQL"){
url = "jdbc:mysql:
driver = "com.mysql.jdbc.Driver";
}
else if(driver == "SQLServer"){
url = "jdbc:sqlserver:
driver = "com.microsoft.sqlserver.jdbc.SQLServerDriver";
}
else{
url = "jdbc:oracle:thin:@";
driver = "oracle.jdbc.driver.OracleDriver";
}
parentFrame = createOnTopJFrameParent();
url = url + (String)JOptionPane.showInputDialog(parentFrame, "Please enter the database URL:", "");
parentFrame.dispose();
}
public void setUserName(){
parentFrame = createOnTopJFrameParent();
username = (String)JOptionPane.showInputDialog(parentFrame, "Please enter the username:", "");
parentFrame.dispose();
}
public void setPassword(){
parentFrame = createOnTopJFrameParent();
password = (String)JOptionPane.showInputDialog(parentFrame, "Please enter the password:", "");
parentFrame.dispose();
}
public void setQuery(){
parentFrame = createOnTopJFrameParent();
query = (String)JOptionPane.showInputDialog(parentFrame, "Please enter the query:", "");
parentFrame.dispose();
}
private String encryptString(String password) {
if (password != null) {
Cipher aes = null;
try {
aes = Cipher.getInstance("AES/ECB/PKCS5Padding");
aes.init(Cipher.ENCRYPT_MODE, generateKey());
byte[] ciphertext = aes.doFinal(password.getBytes());
password = "decrypt:" + bytesToHex(ciphertext);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return password;
}
public String bytesToHex(byte[] bytes) {
final char[] hexArray = "0123456789ABCDEF".toCharArray();
char[] hexChars = new char[bytes.length * 2];
int v;
for ( int j = 0; j < bytes.length; j++ ) {
v = bytes[j] & 0xFF;
hexChars[j * 2] = hexArray[v >>> 4];
hexChars[j * 2 + 1] = hexArray[v & 0x0F];
}
return new String(hexChars);
}
public SecretKeySpec generateKey() {
String passStr="XebiumIsConnectedToTheSikuliBone";
SecretKey tmp = null;
try{
String saltStr = "fijsd@#saltr9jsfizxnv";
byte[] salt = saltStr.getBytes();
int iterations = 43210;
SecretKeyFactory factory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1");
tmp = factory.generateSecret(new PBEKeySpec(passStr.toCharArray(), salt, iterations, 128));
}
catch(Exception e){
e.printStackTrace();
}
return new SecretKeySpec(tmp.getEncoded(), "AES");
}
}
public Response makeResponse(FitNesseContext context, Request request) {
boolean nonExistent = request.hasInput("nonExistent");
return doMakeResponse(context, request, nonExistent);
}
public Response makeResponseForNonExistentPage(FitNesseContext context, Request request) {
return doMakeResponse(context, request, true);
}
protected Response doMakeResponse(FitNesseContext context, Request request, boolean firstTimeForNewPage) {
initializeResponder(context.root, request);
SimpleResponse response = new SimpleResponse();
String resource = request.getResource();
WikiPagePath path = PathParser.parse(resource);
PageCrawler crawler = context.root.getPageCrawler();
page = crawler.getPage(path, new MockingPageCrawler());
pageData = page.getData();
content = createPageContent();
String loadedFile = (String) request.getInput("sql");
if (request.hasInput("jdbc")) {
GetJDBC getJDBC = new GetJDBC(response);
//response.setContent(jdbc);
//html += "get image " + img + " ";
}
else if (request.hasInput("test")){
response.setContent("test is successful");
}
else {
response.setContent("not valid input var");
}
//captureScreen();
//response.setContent(html);
response.setMaxAge(0);
return response;
}
public SecureOperation getSecureOperation() {
return new SecureReadOperation();
}
protected String createPageContent() {
return pageData.getContent();
}
protected void initializeResponder(WikiPage root, Request request) {
this.root = root;
this.request = request;
}
private static JFrame createOnTopJFrameParent() {
@SuppressWarnings("serial")
//had to create a jframe parent for the JFileChooser to have foreground focus.
JFrame frame = new JFrame("parent"){
@Override
public void setVisible(final boolean visible) {
// make sure that frame is marked as not disposed if it is asked to be visible
if (visible) {
//setDisposed(false);
}
// let's handle visibility...
if (!visible || !isVisible()) { // have to check this condition simply because super.setVisible(true) invokes toFront if frame was already visible
super.setVisible(visible);
}
// ...and bring frame to the front.. in a strange and weird way
if (visible) {
int state = super.getExtendedState();
state &= ~JFrame.ICONIFIED;
super.setExtendedState(state);
super.setAlwaysOnTop(true);
super.toFront();
super.requestFocus();
super.setAlwaysOnTop(false);
}
}
};
Dimension dim = Toolkit.getDefaultToolkit().getScreenSize();
frame.setLocation(dim.width/2-frame.getSize().width/2, dim.height/2-frame.getSize().height/2);
frame.setSize(frame.WIDTH+300, 130);
frame.setVisible(true);
return frame;
}
} |
package org.objectweb.proactive.examples.mpi;
import org.apache.log4j.Logger;
import org.objectweb.proactive.ProActive;
import org.objectweb.proactive.core.ProActiveException;
import org.objectweb.proactive.core.config.ProActiveConfiguration;
import org.objectweb.proactive.core.descriptor.data.ProActiveDescriptor;
import org.objectweb.proactive.core.descriptor.data.VirtualNode;
import org.objectweb.proactive.core.util.log.Loggers;
import org.objectweb.proactive.core.util.log.ProActiveLogger;
/**
* This example uses a simple mpi program (cpi) which calculates
* an approximation of PI number on localhost.
* One purpose is the possibility to launch several times consecutively
* this program just by calling the startMPI() method on the virtualnode
* which with the MPI program is associated.
* It permitts to manage as many MPI program as the user define some virtual nodes.
*
*/
public class Cpi {
static public void main(String[] args) {
Logger logger = ProActiveLogger.getLogger(Loggers.EXAMPLES);
if (args.length != 2) {
logger.error("Usage: java " + Cpi.class.getName() +
" <number of iterations> <deployment file>");
System.exit(0);
}
ProActiveConfiguration.load();
VirtualNode vnCpi;
ProActiveDescriptor pad = null;
int count;
int exitValue;
try {
pad = ProActive.getProactiveDescriptor("file:" + args[1]);
count = new Integer(args[0]).intValue();
int initValue = count-1;
// gets virtual node
vnCpi = pad.getVirtualNode("CPI");
// activates VN
vnCpi.activate();
while ((count
logger.info(" -> Iteration [" + (initValue-count) + "]");
exitValue = vnCpi.startMPI();
if (exitValue != 0){
logger.error("ERROR : try to run \"lamboot\" command");
break;
}else{
logger.info(" MPI code returned value "+exitValue );
}
}
vnCpi.killAll(false);
System.exit(0);
} catch (ProActiveException e) {
// TODO Auto-generated catch block
e.printStackTrace();
logger.error("Pb when reading descriptor");
}
}
} |
package org.opencms.relations;
import org.opencms.file.CmsObject;
import org.opencms.file.CmsProperty;
import org.opencms.file.CmsPropertyDefinition;
import org.opencms.file.CmsResource;
import org.opencms.file.CmsResourceFilter;
import org.opencms.file.CmsVfsResourceNotFoundException;
import org.opencms.file.types.CmsResourceTypeFolder;
import org.opencms.lock.CmsLock;
import org.opencms.main.CmsException;
import org.opencms.main.CmsLog;
import org.opencms.main.OpenCms;
import org.opencms.util.CmsStringUtil;
import org.opencms.util.CmsUUID;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import org.apache.commons.logging.Log;
import com.google.common.collect.Lists;
/**
* Provides several simplified methods for manipulating category relations.<p>
*
* @since 6.9.2
*
* @see CmsCategory
*/
public class CmsCategoryService {
/** The centralized path for categories. */
public static final String CENTRALIZED_REPOSITORY = "/system/categories/";
/** The folder for the local category repositories. */
public static final String REPOSITORY_BASE_FOLDER = "/.categories/";
/** The log object for this class. */
private static final Log LOG = CmsLog.getLog(CmsCategoryService.class);
/** The singleton instance. */
private static CmsCategoryService m_instance;
/**
* Returns the singleton instance.<p>
*
* @return the singleton instance
*/
public static CmsCategoryService getInstance() {
if (m_instance == null) {
m_instance = new CmsCategoryService();
}
return m_instance;
}
/**
* Adds a resource identified by the given resource name to the given category.<p>
*
* The resource has to be locked.<p>
*
* @param cms the current cms context
* @param resourceName the site relative path to the resource to add
* @param category the category to add the resource to
*
* @throws CmsException if something goes wrong
*/
public void addResourceToCategory(CmsObject cms, String resourceName, CmsCategory category) throws CmsException {
if (readResourceCategories(cms, cms.readResource(resourceName, CmsResourceFilter.IGNORE_EXPIRATION)).contains(
category)) {
return;
}
String sitePath = cms.getRequestContext().removeSiteRoot(category.getRootPath());
cms.addRelationToResource(resourceName, sitePath, CmsRelationType.CATEGORY.getName());
String parentCatPath = category.getPath();
// recursively add to higher level categories
if (parentCatPath.endsWith("/")) {
parentCatPath = parentCatPath.substring(0, parentCatPath.length() - 1);
}
if (parentCatPath.lastIndexOf('/') > 0) {
addResourceToCategory(cms, resourceName, parentCatPath.substring(0, parentCatPath.lastIndexOf('/') + 1));
}
}
/**
* Adds a resource identified by the given resource name to the category
* identified by the given category path.<p>
*
* Only the most global category matching the given category path for the
* given resource will be affected.<p>
*
* The resource has to be locked.<p>
*
* @param cms the current cms context
* @param resourceName the site relative path to the resource to add
* @param categoryPath the path of the category to add the resource to
*
* @throws CmsException if something goes wrong
*/
public void addResourceToCategory(CmsObject cms, String resourceName, String categoryPath) throws CmsException {
CmsCategory category = readCategory(cms, categoryPath, resourceName);
addResourceToCategory(cms, resourceName, category);
}
/**
* Removes the given resource from all categories.<p>
*
* @param cms the cms context
* @param resourcePath the resource to reset the categories for
*
* @throws CmsException if something goes wrong
*/
public void clearCategoriesForResource(CmsObject cms, String resourcePath) throws CmsException {
CmsRelationFilter filter = CmsRelationFilter.TARGETS;
filter = filter.filterType(CmsRelationType.CATEGORY);
cms.deleteRelationsFromResource(resourcePath, filter);
}
/**
* Adds all categories from one resource to another, skipping categories that are not available for the resource copied to.
*
* The resource where categories are copied to has to be locked.
*
* @param cms the CmsObject used for reading and writing.
* @param fromResource the resource to copy the categories from.
* @param toResourceSitePath the full site path of the resource to copy the categories to.
* @throws CmsException thrown if copying the resources fails.
*/
public void copyCategories(CmsObject cms, CmsResource fromResource, String toResourceSitePath) throws CmsException {
List<CmsCategory> categories = readResourceCategories(cms, fromResource);
for (CmsCategory category : categories) {
addResourceToCategory(cms, toResourceSitePath, category);
}
}
/**
* Creates a new category.<p>
*
* Will use the same category repository as the parent if specified,
* or the closest category repository to the reference path if specified,
* or the centralized category repository in all other cases.<p>
*
* @param cms the current cms context
* @param parent the parent category or <code>null</code> for a new top level category
* @param name the name of the new category
* @param title the title
* @param description the description
* @param referencePath the reference path for the category repository
*
* @return the new created category
*
* @throws CmsException if something goes wrong
*/
public CmsCategory createCategory(
CmsObject cms,
CmsCategory parent,
String name,
String title,
String description,
String referencePath)
throws CmsException {
List<CmsProperty> properties = new ArrayList<CmsProperty>();
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(title)) {
properties.add(new CmsProperty(CmsPropertyDefinition.PROPERTY_TITLE, title, null));
}
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(description)) {
properties.add(new CmsProperty(CmsPropertyDefinition.PROPERTY_DESCRIPTION, description, null));
}
String folderPath = "";
if (parent != null) {
folderPath += parent.getRootPath();
} else {
if (referencePath == null) {
folderPath += CmsCategoryService.CENTRALIZED_REPOSITORY;
} else {
List<String> repositories = getCategoryRepositories(cms, referencePath);
// take the last one
folderPath = repositories.get(repositories.size() - 1);
}
}
folderPath = cms.getRequestContext().removeSiteRoot(internalCategoryRootPath(folderPath, name));
CmsResource resource;
try {
resource = cms.createResource(folderPath, CmsResourceTypeFolder.RESOURCE_TYPE_ID, null, properties);
} catch (CmsVfsResourceNotFoundException e) {
// may be is the centralized repository missing, try to create it
cms.createResource(CmsCategoryService.CENTRALIZED_REPOSITORY, CmsResourceTypeFolder.RESOURCE_TYPE_ID);
// now try again
resource = cms.createResource(folderPath, CmsResourceTypeFolder.RESOURCE_TYPE_ID, null, properties);
}
return getCategory(cms, resource);
}
/**
* Deletes the category identified by the given path.<p>
*
* Only the most global category matching the given category path for the
* given resource will be affected.<p>
*
* This method will try to lock the involved resource.<p>
*
* @param cms the current cms context
* @param categoryPath the path of the category to delete
* @param referencePath the reference path to find the category repositories
*
* @throws CmsException if something goes wrong
*/
public void deleteCategory(CmsObject cms, String categoryPath, String referencePath) throws CmsException {
CmsCategory category = readCategory(cms, categoryPath, referencePath);
String folderPath = cms.getRequestContext().removeSiteRoot(category.getRootPath());
CmsLock lock = cms.getLock(folderPath);
if (lock.isNullLock()) {
cms.lockResource(folderPath);
} else if (lock.isLockableBy(cms.getRequestContext().getCurrentUser())) {
cms.changeLock(folderPath);
}
cms.deleteResource(folderPath, CmsResource.DELETE_PRESERVE_SIBLINGS);
}
/**
* Creates a category from the given resource.<p>
*
* @param cms the cms context
* @param resource the resource
*
* @return a category object
*
* @throws CmsException if something goes wrong
*/
public CmsCategory getCategory(CmsObject cms, CmsResource resource) throws CmsException {
CmsProperty title = cms.readPropertyObject(resource, CmsPropertyDefinition.PROPERTY_TITLE, false);
CmsProperty description = cms.readPropertyObject(resource, CmsPropertyDefinition.PROPERTY_DESCRIPTION, false);
return new CmsCategory(
resource.getStructureId(),
resource.getRootPath(),
title.getValue(resource.getName()),
description.getValue(""),
getRepositoryBaseFolderName(cms));
}
/**
* Creates a category from the given category root path.<p>
*
* @param cms the cms context
* @param categoryRootPath the category root path
*
* @return a category object
*
* @throws CmsException if something goes wrong
*/
public CmsCategory getCategory(CmsObject cms, String categoryRootPath) throws CmsException {
CmsResource resource = cms.readResource(cms.getRequestContext().removeSiteRoot(categoryRootPath));
return getCategory(cms, resource);
}
/**
* Returns all category repositories for the given reference path.<p>
*
* @param cms the cms context
* @param referencePath the reference path
*
* @return a list of root paths
*/
public List<String> getCategoryRepositories(CmsObject cms, String referencePath) {
List<String> ret = new ArrayList<String>();
if (referencePath == null) {
ret.add(CmsCategoryService.CENTRALIZED_REPOSITORY);
return ret;
}
String path = referencePath;
if (!CmsResource.isFolder(path)) {
path = CmsResource.getParentFolder(path);
}
if (CmsStringUtil.isEmptyOrWhitespaceOnly(path)) {
path = "/";
}
String categoryBase = getRepositoryBaseFolderName(cms);
do {
String repositoryPath = internalCategoryRootPath(path, categoryBase);
if (cms.existsResource(repositoryPath)) {
ret.add(repositoryPath);
}
path = CmsResource.getParentFolder(path);
} while (path != null);
ret.add(CmsCategoryService.CENTRALIZED_REPOSITORY);
// the order is important in case of conflicts
Collections.reverse(ret);
return ret;
}
/**
* Returns the category repositories base folder name.<p>
*
* @param cms the cms context
*
* @return the category repositories base folder name
*/
public String getRepositoryBaseFolderName(CmsObject cms) {
String value = "";
try {
value = cms.readPropertyObject(
CmsCategoryService.CENTRALIZED_REPOSITORY,
CmsPropertyDefinition.PROPERTY_DEFAULT_FILE,
false).getValue();
} catch (CmsException e) {
if (LOG.isErrorEnabled()) {
LOG.error(e.getLocalizedMessage(), e);
}
}
if (CmsStringUtil.isEmptyOrWhitespaceOnly(value)) {
value = OpenCms.getWorkplaceManager().getCategoryFolder();
}
if (CmsStringUtil.isEmptyOrWhitespaceOnly(value)) {
value = REPOSITORY_BASE_FOLDER;
}
if (!value.endsWith("/")) {
value += "/";
}
if (!value.startsWith("/")) {
value = "/" + value;
}
return value;
}
/**
* Localizes a list of categories by reading locale-specific properties for their title and description, if possible.<p>
*
* This method does not modify its input list of categories, or the categories in it.
*
* @param cms the CMS context to use for reading resources
* @param categories the list of categories
* @param locale the locale to use
*
* @return the list of localized categories
*/
public List<CmsCategory> localizeCategories(CmsObject cms, List<CmsCategory> categories, Locale locale) {
List<CmsCategory> result = Lists.newArrayList();
for (CmsCategory category : categories) {
result.add(localizeCategory(cms, category, locale));
}
return result;
}
/**
* Localizes a single category by reading its locale-specific properties for title and description, if possible.<p>
*
* @param cms the CMS context to use for reading resources
* @param category the category to localize
* @param locale the locale to use
*
* @return the localized category
*/
public CmsCategory localizeCategory(CmsObject cms, CmsCategory category, Locale locale) {
try {
CmsUUID id = category.getId();
CmsResource categoryRes = cms.readResource(id, CmsResourceFilter.IGNORE_EXPIRATION);
String title = cms.readPropertyObject(
categoryRes,
CmsPropertyDefinition.PROPERTY_TITLE,
false,
locale).getValue();
String description = cms.readPropertyObject(
categoryRes,
CmsPropertyDefinition.PROPERTY_DESCRIPTION,
false,
locale).getValue();
return new CmsCategory(category, title, description);
} catch (Exception e) {
LOG.error("Could not read localized category: " + e.getLocalizedMessage(), e);
return category;
}
}
/**
* Renames/Moves a category from the old path to the new one.<p>
*
* This method will keep all categories in their original repository.<p>
*
* @param cms the current cms context
* @param oldCatPath the path of the category to move
* @param newCatPath the new category path
* @param referencePath the reference path to find the category
*
* @throws CmsException if something goes wrong
*/
public void moveCategory(CmsObject cms, String oldCatPath, String newCatPath, String referencePath)
throws CmsException {
CmsCategory category = readCategory(cms, oldCatPath, referencePath);
String catPath = cms.getRequestContext().removeSiteRoot(category.getRootPath());
CmsLock lock = cms.getLock(catPath);
if (lock.isNullLock()) {
cms.lockResource(catPath);
} else if (lock.isLockableBy(cms.getRequestContext().getCurrentUser())) {
cms.changeLock(catPath);
}
cms.moveResource(
catPath,
cms.getRequestContext().removeSiteRoot(internalCategoryRootPath(category.getBasePath(), newCatPath)));
}
/**
* Returns all categories given some search parameters.<p>
*
* @param cms the current cms context
* @param parentCategoryPath the path of the parent category to get the categories for
* @param includeSubCats if to include all categories, or first level child categories only
* @param referencePath the reference path to find all the category repositories
*
* @return a list of {@link CmsCategory} objects
*
* @throws CmsException if something goes wrong
*/
public List<CmsCategory> readCategories(
CmsObject cms,
String parentCategoryPath,
boolean includeSubCats,
String referencePath)
throws CmsException {
List<String> repositories = getCategoryRepositories(cms, referencePath);
return readCategoriesForRepositories(cms, parentCategoryPath, includeSubCats, repositories);
}
/**
* Returns all categories given some search parameters.<p>
*
* @param cms the current cms context
* @param parentCategoryPath the path of the parent category to get the categories for
* @param includeSubCats if to include all categories, or first level child categories only
* @param repositories a list of root paths
* @return a list of {@link CmsCategory} objects
* @throws CmsException if something goes wrong
*/
public List<CmsCategory> readCategoriesForRepositories(
CmsObject cms,
String parentCategoryPath,
boolean includeSubCats,
List<String> repositories)
throws CmsException {
String catPath = parentCategoryPath;
if (catPath == null) {
catPath = "";
}
Set<CmsCategory> cats = new HashSet<CmsCategory>();
// traverse in reverse order, to ensure the set will contain most global categories
Iterator<String> it = repositories.iterator();
while (it.hasNext()) {
String repository = it.next();
try {
cats.addAll(
internalReadSubCategories(cms, internalCategoryRootPath(repository, catPath), includeSubCats));
} catch (CmsVfsResourceNotFoundException e) {
// it may be that the given category is not defined in this repository
// just ignore
}
}
List<CmsCategory> ret = new ArrayList<CmsCategory>(cats);
Collections.sort(ret);
return ret;
}
/**
* Reads all categories identified by the given category path for the given reference path.<p>
*
* @param cms the current cms context
* @param categoryPath the path of the category to read
* @param referencePath the reference path to find all the category repositories
*
* @return a list of matching categories, could also be empty, if no category exists with the given path
*
* @throws CmsException if something goes wrong
*/
public CmsCategory readCategory(CmsObject cms, String categoryPath, String referencePath) throws CmsException {
// iterate all possible category repositories, starting with the most global one
Iterator<String> it = getCategoryRepositories(cms, referencePath).iterator();
while (it.hasNext()) {
String repository = it.next();
try {
return getCategory(cms, internalCategoryRootPath(repository, categoryPath));
} catch (CmsVfsResourceNotFoundException e) {
// throw the exception if no repository left
if (!it.hasNext()) {
throw e;
}
}
}
// this will never be executed
return null;
}
/**
* Reads the resources for a category identified by the given category path.<p>
*
* @param cms the current cms context
* @param categoryPath the path of the category to read the resources for
* @param recursive <code>true</code> if including sub-categories
* @param referencePath the reference path to find all the category repositories
*
* @return a list of {@link CmsResource} objects
*
* @throws CmsException if something goes wrong
*/
public List<CmsResource> readCategoryResources(
CmsObject cms,
String categoryPath,
boolean recursive,
String referencePath)
throws CmsException {
return readCategoryResources(cms, categoryPath, recursive, referencePath, CmsResourceFilter.DEFAULT);
}
/**
* Reads the resources for a category identified by the given category path.<p>
*
* @param cms the current cms context
* @param categoryPath the path of the category to read the resources for
* @param recursive <code>true</code> if including sub-categories
* @param referencePath the reference path to find all the category repositories
* @param resFilter the resource filter to use
*
* @return a list of {@link CmsResource} objects
*
* @throws CmsException if something goes wrong
*/
public List<CmsResource> readCategoryResources(
CmsObject cms,
String categoryPath,
boolean recursive,
String referencePath,
CmsResourceFilter resFilter)
throws CmsException {
Set<CmsResource> resources = new HashSet<CmsResource>();
CmsRelationFilter filter = CmsRelationFilter.SOURCES.filterType(CmsRelationType.CATEGORY);
if (recursive) {
filter = filter.filterIncludeChildren();
}
CmsCategory category = readCategory(cms, categoryPath, referencePath);
Iterator<CmsRelation> itRelations = cms.getRelationsForResource(
cms.getRequestContext().removeSiteRoot(category.getRootPath()),
filter).iterator();
while (itRelations.hasNext()) {
CmsRelation relation = itRelations.next();
try {
resources.add(relation.getSource(cms, resFilter));
} catch (CmsException e) {
// source does not match the filter
if (LOG.isDebugEnabled()) {
LOG.debug(e.getLocalizedMessage(), e);
}
}
}
List<CmsResource> result = new ArrayList<CmsResource>(resources);
Collections.sort(result);
return result;
}
/**
* Reads the categories for a resource.<p>
*
* @param cms the current cms context
* @param resource the resource to get the categories for
*
* @return the categories list
*
* @throws CmsException if something goes wrong
*/
public List<CmsCategory> readResourceCategories(CmsObject cms, CmsResource resource) throws CmsException {
return internalReadResourceCategories(cms, resource, false);
}
/**
* Reads the categories for a resource identified by the given resource name.<p>
*
* @param cms the current cms context
* @param resourceName the path of the resource to get the categories for
*
* @return the categories list
*
* @throws CmsException if something goes wrong
*/
public List<CmsCategory> readResourceCategories(CmsObject cms, String resourceName) throws CmsException {
return internalReadResourceCategories(cms, cms.readResource(resourceName), false);
}
/**
* Removes a resource identified by the given resource name from the given category.<p>
*
* The resource has to be previously locked.<p>
*
* @param cms the current cms context
* @param resourceName the site relative path to the resource to remove
* @param category the category to remove the resource from
*
* @throws CmsException if something goes wrong
*/
public void removeResourceFromCategory(CmsObject cms, String resourceName, CmsCategory category)
throws CmsException {
// remove the resource just from this category
CmsRelationFilter filter = CmsRelationFilter.TARGETS;
filter = filter.filterType(CmsRelationType.CATEGORY);
filter = filter.filterResource(
cms.readResource(cms.getRequestContext().removeSiteRoot(category.getRootPath())));
filter = filter.filterIncludeChildren();
cms.deleteRelationsFromResource(resourceName, filter);
}
/**
* Removes a resource identified by the given resource name from the category
* identified by the given category path.<p>
*
* The resource has to be previously locked.<p>
*
* @param cms the current cms context
* @param resourceName the site relative path to the resource to remove
* @param categoryPath the path of the category to remove the resource from
*
* @throws CmsException if something goes wrong
*/
public void removeResourceFromCategory(CmsObject cms, String resourceName, String categoryPath)
throws CmsException {
CmsCategory category = readCategory(cms, categoryPath, resourceName);
removeResourceFromCategory(cms, resourceName, category);
}
/**
* Repairs broken category relations.<p>
*
* This could be caused by renaming/moving a category folder,
* or changing the category repositories base folder name.<p>
*
* Also repairs problems when creating/deleting conflicting
* category folders across several repositories.<p>
*
* The resource has to be previously locked.<p>
*
* @param cms the cms context
* @param resource the resource to repair
*
* @throws CmsException if something goes wrong
*/
public void repairRelations(CmsObject cms, CmsResource resource) throws CmsException {
internalReadResourceCategories(cms, resource, true);
}
/**
* Repairs broken category relations.<p>
*
* This could be caused by renaming/moving a category folder,
* or changing the category repositories base folder name.<p>
*
* Also repairs problems when creating/deleting conflicting
* category folders across several repositories.<p>
*
* The resource has to be previously locked.<p>
*
* @param cms the cms context
* @param resourceName the site relative path to the resource to repair
*
* @throws CmsException if something goes wrong
*/
public void repairRelations(CmsObject cms, String resourceName) throws CmsException {
repairRelations(cms, cms.readResource(resourceName));
}
/**
* Composes the category root path by appending the category path to the given category repository path.<p>
*
* @param basePath the category repository path
* @param categoryPath the category path
*
* @return the category root path
*/
private String internalCategoryRootPath(String basePath, String categoryPath) {
if (categoryPath.startsWith("/") && basePath.endsWith("/")) {
// one slash too much
return basePath + categoryPath.substring(1);
} else if (!categoryPath.startsWith("/") && !basePath.endsWith("/")) {
// one slash too less
return basePath + "/" + categoryPath;
} else {
return basePath + categoryPath;
}
}
/**
* Reads/Repairs the categories for a resource identified by the given resource name.<p>
*
* For reparation, the resource has to be previously locked.<p>
*
* @param cms the current cms context
* @param resource the resource to get the categories for
* @param repair if to repair broken relations
*
* @return the categories list
*
* @throws CmsException if something goes wrong
*/
private List<CmsCategory> internalReadResourceCategories(CmsObject cms, CmsResource resource, boolean repair)
throws CmsException {
List<CmsCategory> result = new ArrayList<CmsCategory>();
String baseFolder = null;
Iterator<CmsRelation> itRelations = cms.getRelationsForResource(
resource,
CmsRelationFilter.TARGETS.filterType(CmsRelationType.CATEGORY)).iterator();
if (repair && itRelations.hasNext()) {
baseFolder = getRepositoryBaseFolderName(cms);
}
String resourceName = cms.getSitePath(resource);
boolean repaired = false;
while (itRelations.hasNext()) {
CmsRelation relation = itRelations.next();
try {
CmsResource res = relation.getTarget(cms, CmsResourceFilter.DEFAULT_FOLDERS);
CmsCategory category = getCategory(cms, res);
if (!repair) {
result.add(category);
} else {
CmsCategory actualCat = readCategory(cms, category.getPath(), resourceName);
if (!category.getId().equals(actualCat.getId())) {
// repair broken categories caused by creation/deletion of
// category folders across several repositories
CmsRelationFilter filter = CmsRelationFilter.TARGETS.filterType(
CmsRelationType.CATEGORY).filterResource(res);
cms.deleteRelationsFromResource(resourceName, filter);
repaired = true;
// set the right category
String catPath = cms.getRequestContext().removeSiteRoot(actualCat.getRootPath());
cms.addRelationToResource(resourceName, catPath, CmsRelationType.CATEGORY.getName());
}
result.add(actualCat);
}
} catch (CmsException e) {
if (!repair) {
if (LOG.isErrorEnabled()) {
LOG.error(e.getLocalizedMessage(), e);
}
} else {
// repair broken categories caused by moving category folders
// could also happen when deleting an assigned category folder
if (LOG.isDebugEnabled()) {
LOG.debug(e.getLocalizedMessage(), e);
}
CmsRelationFilter filter = CmsRelationFilter.TARGETS.filterType(
CmsRelationType.CATEGORY).filterPath(relation.getTargetPath());
if (!relation.getTargetId().isNullUUID()) {
filter = filter.filterStructureId(relation.getTargetId());
}
cms.deleteRelationsFromResource(resourceName, filter);
repaired = true;
// try to set the right category again
try {
CmsCategory actualCat = readCategory(
cms,
CmsCategory.getCategoryPath(relation.getTargetPath(), baseFolder),
resourceName);
addResourceToCategory(cms, resourceName, actualCat);
result.add(actualCat);
} catch (CmsException ex) {
if (LOG.isDebugEnabled()) {
LOG.debug(e.getLocalizedMessage(), ex);
}
}
}
}
}
if (!repair) {
Collections.sort(result);
} else if (repaired) {
// be sure that no higher level category is missing
Iterator<CmsCategory> it = result.iterator();
while (it.hasNext()) {
CmsCategory category = it.next();
addResourceToCategory(cms, resourceName, category.getPath());
}
}
return result;
}
/**
* Returns all sub categories of the given one, including sub sub categories if needed.<p>
*
* @param cms the current cms context
* @param rootPath the base category's root path (this category is not part of the result)
* @param includeSubCats flag to indicate if sub categories should also be read
*
* @return a list of {@link CmsCategory} objects
*
* @throws CmsException if something goes wrong
*/
private List<CmsCategory> internalReadSubCategories(CmsObject cms, String rootPath, boolean includeSubCats)
throws CmsException {
List<CmsCategory> categories = new ArrayList<CmsCategory>();
List<CmsResource> resources = cms.readResources(
cms.getRequestContext().removeSiteRoot(rootPath),
CmsResourceFilter.DEFAULT.addRequireType(CmsResourceTypeFolder.RESOURCE_TYPE_ID),
includeSubCats);
Iterator<CmsResource> it = resources.iterator();
while (it.hasNext()) {
CmsResource resource = it.next();
categories.add(getCategory(cms, resource));
}
return categories;
}
} |
package org.lp20.aikuma.model;
import android.util.Log;
import org.lp20.aikuma.Aikuma;
import org.lp20.aikuma.util.FileIO;
import org.lp20.aikuma.util.StandardDateFormat;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.text.ParseException;
import java.util.Arrays;
import java.util.Date;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.FileUtils;
import org.json.simple.JSONObject;
import org.json.simple.JSONArray;
/**
* The class that stores the metadata of a recording, including it's UUID,
* creator's UUID, name, date, originalUUID (if applicable), and languages.
*
* @author Oliver Adams <oliver.adams@gmail.com>
* @author Florian Hanke <florian.hanke@gmail.com>
*/
public class Recording {
/**
* Constructs a new Recording using a specified UUID, name, date,
* languages, originalUUID and sample rate.
*
* @param uuid the recording's UUID.
* @param name The recording's name.
* @param date The date of creation.
* @param languages The languages associated with the recording
* @param speakersIds The UUIDs of the speakers associated with the
* recording
* @param androidID The android ID of the device that created the
* recording
* @param originalUUID The UUID of the recording that this recording
* is a respeaking of
* @param sampleRate The sample rate of the recording.
* @param durationMsec The duration of the recording in milliseconds.
*/
public Recording(UUID uuid, String name, Date date,
List<Language> languages, List<String> speakersIds,
String androidID, UUID originalUUID, long sampleRate,
int durationMsec) {
setUUID(uuid);
setName(name);
setDate(date);
setLanguages(languages);
setSpeakersIds(speakersIds);
setAndroidID(androidID);
setOriginalUUID(originalUUID);
setSampleRate(sampleRate);
setDurationMsec(durationMsec);
}
public UUID getUUID() {
return uuid;
}
/**
* Returns a File that refers to the actual recording file.
*
* @return The file the recording is stored in.
*/
public File getFile() {
return new File(getRecordingsPath(), getUUID() + ".wav");
}
/**
* Name accessor; returns an empty string if the name is null
*
* @return The name of the recording.
*/
public String getName() {
if (name != null) {
return name;
} else {
return "";
}
}
public Date getDate() {
return date;
}
public List<Language> getLanguages() {
return languages;
}
/**
* Returns the first language code as a string, or an empty string if there
* is none.
*
* @return The language code of the first language associated with the
* recording.
*/
public String getFirstLangCode() {
if (getLanguages().size() > 0) {
return getLanguages().get(0).getCode();
} else {
return "";
}
}
/**
* Returns the name and language of the recording in a single string.
*
* @return The name and langugage of the recording in a string.
*/
public String getNameAndLang() {
if (getFirstLangCode().equals("")) {
return getName();
} else {
return getName() + " (" + getFirstLangCode() + ")";
}
}
/**
* speakersIds accessor.
*
* @return A list of UUIDs representing the speakers of the recording.
*/
public List<String> getSpeakersIds() {
return speakersIds;
}
/**
* Returns true if the Recording has at least one language; false otherwise.
*
* @return true if the Recording has at least one language; false otherwise.
*/
public boolean hasALanguage() {
if (this.languages.size() == 0) {
return false;
}
return true;
}
/**
* androidID accessor
*
* @return The Andorid of the device that made the recording.
*/
public String getAndroidID() {
return androidID;
}
/**
* originalUUID accessor.
*
* @return The UUID of the original recording that this is a respeaking
* of.
*/
public UUID getOriginalUUID() {
if (originalUUID == null) {
throw new IllegalStateException(
"Cannot call getOriginalUUID when originalUUID is null." +
" Call isOriginal().");
}
return originalUUID;
}
/**
* sampleRate accessor
*
* @return The sample rate of the recording as a long.
*/
public long getSampleRate() {
return sampleRate;
}
/**
* durationMsec accessor
*
* @return The duration of the recording in milliseconds as an int.
*/
public int getDurationMsec() {
return durationMsec;
}
/**
* Returns true if the Recording is an original; false if respeaking
*
* @return True if the recording is an original.
*/
public boolean isOriginal() {
if (originalUUID == null) {
return true;
} else {
return false;
}
}
/**
* Encode the Recording as a corresponding JSONObject.
*
* @return A JSONObject instance representing the Recording;
*/
public JSONObject encode() {
JSONObject encodedRecording = new JSONObject();
encodedRecording.put("uuid", this.uuid.toString());
encodedRecording.put("name", this.name);
encodedRecording.put("date", new StandardDateFormat().format(this.date));
encodedRecording.put("languages", Language.encodeList(languages));
JSONArray speakersIdsArray = new JSONArray();
for (String id : speakersIds) {
speakersIdsArray.add(id.toString());
}
encodedRecording.put("speakersIds", speakersIdsArray);
encodedRecording.put("androidID", this.androidID);
encodedRecording.put("sampleRate", getSampleRate());
encodedRecording.put("durationMsec", getDurationMsec());
if (this.originalUUID == null) {
encodedRecording.put("originalUUID", null);
} else {
encodedRecording.put("originalUUID", this.originalUUID.toString());
}
return encodedRecording;
}
/**
* Write the Recording to file in a subdirectory of the recordings and move
* the recording WAV data to that directory
*
* @throws IOException If the recording metadata could not be written.
*/
public void write() throws IOException {
// Determine the recording identifier
// make the directory
// Grab the user ID.
// Make the whole identifier
// Write the file.
JSONObject encodedRecording = this.encode();
FileIO.writeJSONObject(new File(
getRecordingsPath(), this.getUUID().toString() + ".json"),
encodedRecording);
}
/**
* Deletes the JSON File associated with the recording.
*
* @return true if successful; false otherwise.
*/
public boolean delete() {
File file = new File(getRecordingsPath(), this.getUUID().toString() +
".json");
if (!isOriginal()) {
File mapFile = new File(getRecordingsPath(),
this.getUUID().toString() + ".map");
boolean result;
result = mapFile.delete();
if (!result) {
return false;
}
}
return file.delete();
}
/**
* Read a recording from the file containing JSON describing the Recording
*
* @param uuid The uuid of the recording to be read.
* @return A Recording object corresponding to the uuid.
* @throws IOException If the recording metadata cannot be read.
*/
public static Recording read(UUID uuid) throws IOException {
JSONObject jsonObj = FileIO.readJSONObject(
new File(getRecordingsPath(), uuid.toString() + ".json"));
String uuidString = (String) jsonObj.get("uuid");
if (uuidString == null) {
throw new IOException("Null UUID in the JSON file.");
}
UUID readUUID = UUID.fromString(uuidString);
if (!readUUID.equals(uuid)) {
throw new IOException("UUID of the filename is different to UUID" +
"in the file's JSON");
}
String name = (String) jsonObj.get("name");
String dateString = (String) jsonObj.get("date");
if (dateString == null) {
throw new IOException("Null date in the JSON file.");
}
Date date;
try {
date = new StandardDateFormat().parse(dateString);
} catch (ParseException e) {
throw new IOException(e);
}
JSONArray languageArray = (JSONArray) jsonObj.get("languages");
if (languageArray == null) {
throw new IOException("Null languages in the JSON file.");
}
List<Language> languages = Language.decodeJSONArray(languageArray);
JSONArray speakerIdArray = (JSONArray) jsonObj.get("speakersIds");
if (speakerIdArray == null) {
throw new IOException("Null speakersIds in the JSON file.");
}
List<String> speakersIds = Speaker.decodeJSONArray(speakerIdArray);
String androidID = (String) jsonObj.get("androidID");
if (androidID == null) {
throw new IOException("Null androidID in the JSON file.");
}
UUID originalUUID;
if (jsonObj.get("originalUUID") == null) {
originalUUID = null;
} else {
originalUUID = UUID.fromString((String) jsonObj.get("originalUUID"));
}
long sampleRate;
if (jsonObj.get("sampleRate") == null) {
sampleRate = -1;
} else {
sampleRate = (Long) jsonObj.get("sampleRate");
}
int durationMsec;
if (jsonObj.get("durationMsec") == null) {
durationMsec = -1;
Log.i("duration", "reading: null");
} else {
durationMsec = ((Long) jsonObj.get("durationMsec")).intValue();
Log.i("duration", "reading: " + durationMsec);
}
Recording recording = new Recording(
uuid, name, date, languages, speakersIds, androidID,
originalUUID, sampleRate, (Integer) durationMsec);
return recording;
}
/**
* Returns a list of all the respeakings of this Recording.
*
* @return A list of all the respeakings of the recording.
*/
public List<Recording> getRespeakings() {
List<Recording> allRecordings = readAll();
List<Recording> respeakings = new ArrayList();
for (Recording recording : allRecordings) {
if (!recording.isOriginal()) {
if (recording.getOriginalUUID().equals(getUUID())) {
respeakings.add(recording);
}
}
}
return respeakings;
}
/**
* Read all recordings from file
*
* @return A list of the users found in the users directory.
*/
public static List<Recording> readAll() {
String[] recordingUUIDsArray =
getRecordingsPath().list(new FilenameFilter() {
public boolean accept(File dir, String filename) {
return filename.endsWith(".json");
}
});
List<String> recordingUUIDs;
// Get a list of all the UUIDs of users in the "recordings" directory.
if (recordingUUIDsArray != null) {
recordingUUIDs = Arrays.asList(recordingUUIDsArray);
} else {
return new ArrayList<Recording>();
}
for (int i = 0; i < recordingUUIDs.size(); i++) {
recordingUUIDs.set(i,
FilenameUtils.removeExtension(recordingUUIDs.get(i)));
}
// Get the recordings data from the metadata.json files.
List<Recording> recordings = new ArrayList<Recording>();
for (String recordingUUID : recordingUUIDs) {
try {
recordings.add(Recording.read(UUID.fromString(recordingUUID)));
} catch (IOException e) {
// Couldn't read that recording for whateve rreason (perhaps
// json file wasn't formatted correctly). Let's just ignore
// that user.
}
}
return recordings;
}
/**
* Compares the given object with the Recording, and returns true if the
* Recordings uuid, name, date, languages, androidID and originalUUID are
* equal
*
* @param obj The object to be compared.
* @return true if the uuid, name, date, languages, androidID and
* originalUUID are equal; false otherwise.
*/
public boolean equals(Object obj) {
if (obj == null) {return false;}
if (obj == this) {return true;}
if (obj.getClass() != getClass()) {return false;}
Recording rhs = (Recording) obj;
return new EqualsBuilder()
.append(uuid, rhs.uuid)
.append(name, rhs.name)
.append(date, rhs.date)
.append(languages, rhs.languages)
.append(speakersIds, rhs.speakersIds)
.append(androidID, rhs.androidID)
.append(originalUUID, rhs.originalUUID)
.append(sampleRate, rhs.sampleRate)
.isEquals();
}
// Sets the UUID; it cannot be null.
private void setUUID(UUID uuid) {
if (uuid == null) {
throw new IllegalArgumentException(
"Recording UUID cannot be null.");
}
this.uuid = uuid;
}
/**
* Name mutator.
*/
private void setName(String name) {
this.name = name;
}
// Sets the date; the date cannot be null.
private void setDate(Date date) {
if (date == null) {
throw new IllegalArgumentException(
"Recording date cannot be null.");
}
this.date = date;
}
// Sets the languages
private void setLanguages(List<Language> languages) {
if (languages == null) {
throw new IllegalArgumentException(
"Recording languages cannot be null. " +
"Set as an empty List<Language> instead.");
}
this.languages = languages;
}
/**
* Add's another language to the Recording's language list
*
* @param language The language to be added to the Recording's list of
* languages.
*/
private void addLanguage(Language language) {
if (language == null) {
throw new IllegalArgumentException(
"A language for the recording cannot be null");
}
this.languages.add(language);
}
// Sets the speakers UUID, but won't accept a null list (empty lists are
// fine).
private void setSpeakersIds(List<String> speakersIds) {
if (speakersIds == null) {
throw new IllegalArgumentException(
"Recording speakersIds cannot be null. " +
"Set as an empty List<String> instead.");
}
this.speakersIds = speakersIds;
}
/**
* Add's another speaker to the Recording's speaker list
*
* @param speaker The speaker to be added to the Recording's list of
* speaker.
*/
private void addSpeakerId(Speaker speaker) {
if (speaker == null) {
throw new IllegalArgumentException(
"A speaker for the recording cannot be null");
}
this.speakersIds.add(speaker.getId());
}
// Sets the android ID but won't accept a null string.
private void setAndroidID(String androidID) {
if (androidID == null) {
throw new IllegalArgumentException(
"The androidID for the recording cannot be null");
}
this.androidID = androidID;
}
private void setOriginalUUID(UUID originalUUID) {
this.originalUUID = originalUUID;
}
private void setSampleRate(long sampleRate) {
this.sampleRate = sampleRate;
}
private void setDurationMsec(int durationMsec) {
this.durationMsec = durationMsec;
}
/**
* Get the applications recordings directory
*
* @return A File representing the path of the recordings directory
*/
public static File getRecordingsPath() {
File path = new File(FileIO.getAppRootPath(), "recordings");
path.mkdirs();
return path;
}
/**
* Get the applications recording directory that isn't synced.
*
* @return A File representing the path of the recordings directory in the
* no-sync Aikuma directory.
*/
public static File getNoSyncRecordingsPath() {
File path = new File(FileIO.getNoSyncPath(), "recordings");
path.mkdirs();
return path;
}
/**
* Indicates that this recording is allowed to be synced by moving it to a
* directory that the SyncUtil synchronizes.
*
* @param uuid The UUID of the recording to sync.
* @throws IOException If it cannot be moved to the synced directory.
*/
public static void enableSync(UUID uuid) throws IOException {
File wavFile = new File(getNoSyncRecordingsPath(), uuid + ".wav");
FileUtils.moveFileToDirectory(wavFile, getRecordingsPath(), false);
}
/**
* The recording's UUID.
*/
private UUID uuid;
/**
* The recording's name.
*/
private String name;
/**
* The recording's date.
*/
private Date date;
/**
* The languages of the recording.
*/
private List<Language> languages;
/**
* The speakers of the recording.
*/
private List<String> speakersIds;
/**
* The Android ID of the device that the recording was made on.
*/
private String androidID;
/**
* The UUID of the original of the recording if it is a respeaking.
*/
private UUID originalUUID;
/**
* The sample rate of the recording in Hz
*/
private long sampleRate;
/**
* The duration of the recording in seconds (floored)
*/
private int durationMsec;
} |
package org.openid4java.consumer;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.cookie.CookiePolicy;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.httpclient.methods.StringRequestEntity;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.openid4java.message.*;
import org.openid4java.association.Association;
import org.openid4java.association.DiffieHellmanSession;
import org.openid4java.association.AssociationException;
import org.openid4java.association.AssociationSessionType;
import org.openid4java.discovery.Identifier;
import org.openid4java.discovery.Discovery;
import org.openid4java.discovery.DiscoveryException;
import org.openid4java.discovery.DiscoveryInformation;
import org.openid4java.server.NonceGenerator;
import org.openid4java.server.IncrementalNonceGenerator;
import org.openid4java.server.RealmVerifier;
import org.openid4java.OpenIDException;
import org.openid4java.util.HttpClientFactory;
import javax.crypto.spec.DHParameterSpec;
import java.net.*;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.*;
/**
* Manages OpenID communications with an OpenID Provider (Server).
* <p>
* The Consumer site needs to have the same instance of this class throughout
* the lifecycle of a OpenID authentication session.
*
* @author Marius Scurtescu, Johnny Bufu
*/
public class ConsumerManager
{
private static Log _log = LogFactory.getLog(ConsumerManager.class);
private static final boolean DEBUG = _log.isDebugEnabled();
/**
* Discovery process manager.
*/
private Discovery _discovery = new Discovery();
/**
* Store for keeping track of the established associations.
*/
private ConsumerAssociationStore _associations = new InMemoryConsumerAssociationStore();
/**
* Consumer-side nonce generator, needed for compatibility with OpenID 1.1.
*/
private NonceGenerator _consumerNonceGenerator = new IncrementalNonceGenerator();
/**
* Private association used for signing consumer nonces when operating in
* compatibility (v1.x) mode.
*/
private Association _privateAssociation;
/**
* Verifier for the nonces in authentication responses;
* prevents replay attacks.
*/
private NonceVerifier _nonceVerifier = new InMemoryNonceVerifier(60);
/**
* Handles HTTP calls to the Server / OpenID Provider.
*/
private HttpClient _httpClient;
/**
* Maximum number of attmpts for establishing an association.
*/
private int _maxAssocAttempts = 4;
/**
* Flag for enabling or disabling stateless mode.
*/
private boolean _allowStateless = true;
/**
* The lowest encryption level session accepted for association sessions.
*/
private AssociationSessionType _minAssocSessEnc
= AssociationSessionType.NO_ENCRYPTION_SHA1MAC;
/**
* The preferred association session type; will be attempted first.
*/
private AssociationSessionType _prefAssocSessEnc;
/**
* Parameters (modulus and generator) for the Diffie-Hellman sessions.
*/
private DHParameterSpec _dhParams = DiffieHellmanSession.getDefaultParameter();
/**
* Timeout (in seconds) for keeping track of failed association attempts.
* Default 5 minutes.
*/
private int _failedAssocExpire = 300;
/**
* Interval before the expiration of an association (in seconds)
* in which the association should not be used, in order to avoid
* the expiration from occurring in the middle of an authentication
* transaction. Default: 300s.
*/
private int _preExpiryAssocLockInterval = 300;
/**
* Flag for generating checkid_immediate authentication requests.
*/
private boolean _immediateAuth = false;
/**
* Used to perform verify realms against return_to URLs.
*/
private RealmVerifier _realmVerifier;
/**
* Connect timeout for HTTP calls in miliseconds. Default 10s
*/
private int _connectTimeout = 10000;
/**
* Socket (read) timeout for HTTP calls in miliseconds. Default 10s.
*/
private int _socketTimeout = 10000;
/**
* Maximum number of redirects to be followed. Default 0.
*/
private int _maxRedirects = 0;
/**
* Instantiates a ConsumerManager with default settings.
*/
public ConsumerManager() throws ConsumerException
{
// global httpclient configuration parameters
_httpClient = HttpClientFactory.getInstance(
_maxRedirects, Boolean.FALSE, _socketTimeout, _connectTimeout,
CookiePolicy.IGNORE_COOKIES);
_realmVerifier = new RealmVerifier();
// don't verify own (RP) identity, disable RP discovery
_realmVerifier.setEnforceRpId(false);
if (Association.isHmacSha256Supported())
_prefAssocSessEnc = AssociationSessionType.DH_SHA256;
else
_prefAssocSessEnc = AssociationSessionType.DH_SHA1;
try
{
// initialize the private association for compat consumer nonces
_privateAssociation = Association.generate(
getPrefAssocSessEnc().getAssociationType(), "", 0);
}
catch (AssociationException e)
{
throw new ConsumerException(
"Cannot initialize private association, " +
"needed for consumer nonces.");
}
}
/**
* Returns discovery process manager.
*
* @return discovery process manager.
*/
public Discovery getDiscovery()
{
return _discovery;
}
/**
* Sets discovery process manager.
*
* @param discovery discovery process manager.
*/
public void setDiscovery(Discovery discovery)
{
_discovery = discovery;
}
/**
* Gets the association store that holds established associations with
* OpenID providers.
*
* @see ConsumerAssociationStore
*/
public ConsumerAssociationStore getAssociations()
{
return _associations;
}
/**
* Configures the ConsumerAssociationStore that will be used to store the
* associations established with OpenID providers.
*
* @param associations ConsumerAssociationStore implementation
* @see ConsumerAssociationStore
*/
public void setAssociations(ConsumerAssociationStore associations)
{
this._associations = associations;
}
/**
* Gets the NonceVerifier implementation used to keep track of the nonces
* that have been seen in authentication response messages.
*
* @see NonceVerifier
*/
public NonceVerifier getNonceVerifier()
{
return _nonceVerifier;
}
/**
* Configures the NonceVerifier that will be used to keep track of the
* nonces in the authentication response messages.
*
* @param nonceVerifier NonceVerifier implementation
* @see NonceVerifier
*/
public void setNonceVerifier(NonceVerifier nonceVerifier)
{
this._nonceVerifier = nonceVerifier;
}
/**
* Sets the Diffie-Hellman base parameters that will be used for encoding
* the MAC key exchange.
* <p>
* If not provided the default set specified by the Diffie-Hellman algorithm
* will be used.
*
* @param dhParams Object encapsulating modulus and generator numbers
* @see DHParameterSpec DiffieHellmanSession
*/
public void setDHParams(DHParameterSpec dhParams)
{
this._dhParams = dhParams;
}
/**
* Gets the Diffie-Hellman base parameters (modulus and generator).
*
* @see DHParameterSpec DiffieHellmanSession
*/
public DHParameterSpec getDHParams()
{
return _dhParams;
}
/**
* Maximum number of attempts (HTTP calls) the RP is willing to make
* for trying to establish an association with the OP.
*
* Default: 4;
* 0 = don't use associations
*
* Associations and stateless mode cannot be both disabled at the same time.
*/
public void setMaxAssocAttempts(int maxAssocAttempts)
throws ConsumerException
{
if (maxAssocAttempts > 0 || _allowStateless)
this._maxAssocAttempts = maxAssocAttempts;
else
throw new IllegalArgumentException(
"Associations and stateless mode " +
"cannot be both disabled at the same time.");
if (_maxAssocAttempts == 0) _log.info("Associations disabled.");
}
/**
* Gets the value configured for the maximum number of association attempts
* that will be performed for a given OpenID provider.
* <p>
* If an association cannot be established after this number of attempts the
* ConsumerManager will fallback to stateless mode, provided the
* #allowStateless preference is enabled.
* <p>
* See also: {@link #allowStateless(boolean)} {@link #statelessAllowed()}
*/
public int getMaxAssocAttempts()
{
return _maxAssocAttempts;
}
/**
* Flag used to enable / disable the use of stateless mode.
* <p>
* Default: enabled.
* <p>
* Associations and stateless mode cannot be both disabled at the same time.
* @deprecated
* @see #setAllowStateless(boolean)
*/
public void allowStateless(boolean allowStateless)
{
setAllowStateless(allowStateless);
}
/**
* Flag used to enable / disable the use of stateless mode.
* <p>
* Default: enabled.
* <p>
* Associations and stateless mode cannot be both disabled at the same time.
*/
public void setAllowStateless(boolean allowStateless)
{
if (_allowStateless || _maxAssocAttempts > 0)
this._allowStateless = allowStateless;
else
throw new IllegalArgumentException(
"Associations and stateless mode " +
"cannot be both disabled at the same time.");
}
/**
* Returns true if the ConsumerManager is configured to fallback to
* stateless mode when failing to associate with an OpenID Provider.
*
* @deprecated
* @see isAllowStateless()
*/
public boolean statelessAllowed()
{
return _allowStateless;
}
/**
* Returns true if the ConsumerManager is configured to fallback to
* stateless mode when failing to associate with an OpenID Provider.
*/
public boolean isAllowStateless()
{
return _allowStateless;
}
/**
* Configures the minimum level of encryption accepted for association
* sessions.
* <p>
* Default: no-encryption session, SHA1 MAC association.
* <p>
* See also: {@link #allowStateless(boolean)}
*/
public void setMinAssocSessEnc(AssociationSessionType minAssocSessEnc)
{
this._minAssocSessEnc = minAssocSessEnc;
}
/**
* Gets the minimum level of encryption that will be accepted for
* association sessions.
* <p>
* Default: no-encryption session, SHA1 MAC association
* <p>
*/
public AssociationSessionType getMinAssocSessEnc()
{
return _minAssocSessEnc;
}
/**
* Sets the preferred encryption type for the association sessions.
* <p>
* Default: DH-SHA256
*/
public void setPrefAssocSessEnc(AssociationSessionType prefAssocSessEnc)
{
this._prefAssocSessEnc = prefAssocSessEnc;
}
/**
* Gets the preferred encryption type for the association sessions.
*/
public AssociationSessionType getPrefAssocSessEnc()
{
return _prefAssocSessEnc;
}
/**
* Sets the expiration timeout (in seconds) for keeping track of failed
* association attempts.
* <p>
* If an association cannot be establish with an OP, subsequesnt
* authentication request to that OP will not try to establish an
* association within the timeout period configured here.
* <p>
* Default: 300s
* 0 = disabled (attempt to establish an association with every
* authentication request)
*
* @param _failedAssocExpire time in seconds to remember failed
* association attempts
*/
public void setFailedAssocExpire(int _failedAssocExpire)
{
this._failedAssocExpire = _failedAssocExpire;
}
/**
* Gets the timeout (in seconds) configured for keeping track of failed
* association attempts.
* <p>
* See also: {@link #setFailedAssocExpire(int)}
*/
public int getFailedAssocExpire()
{
return _failedAssocExpire;
}
/**
* Gets the interval before the expiration of an association
* (in seconds) in which the association should not be used,
* in order to avoid the expiration from occurring in the middle
* of a authentication transaction. Default: 300s.
*/
public int getPreExpiryAssocLockInterval()
{
return _preExpiryAssocLockInterval;
}
/**
* Sets the interval before the expiration of an association
* (in seconds) in which the association should not be used,
* in order to avoid the expiration from occurring in the middle
* of a authentication transaction. Default: 300s.
*
* @param preExpiryAssocLockInterval The number of seconds for the
* pre-expiry lock inteval.
*/
public void setPreExpiryAssocLockInterval(int preExpiryAssocLockInterval)
{
this._preExpiryAssocLockInterval = preExpiryAssocLockInterval;
}
/**
* Configures the authentication request mode:
* checkid_immediate (true) or checkid_setup (false).
* <p>
* Default: false / checkid_setup
*/
public void setImmediateAuth(boolean _immediateAuth)
{
this._immediateAuth = _immediateAuth;
}
/**
* Returns true if the ConsumerManager is configured to attempt
* checkid_immediate authentication requests.
* <p>
* Default: false
*/
public boolean isImmediateAuth()
{
return _immediateAuth;
}
/**
* Gets the RealmVerifier used to verify realms against return_to URLs.
*/
public RealmVerifier getRealmVerifier()
{
return _realmVerifier;
}
/**
* Sets the RealmVerifier used to verify realms against return_to URLs.
*/
public void setRealmVerifier(RealmVerifier realmVerifier)
{
this._realmVerifier = realmVerifier;
}
/**
* Gets the max age (in seconds) configured for keeping track of nonces.
* <p>
* Nonces older than the max age will be removed from the store and
* authentication responses will be considered failures.
*/
public long getMaxNonceAge()
{
return _nonceVerifier.getMaxAge();
}
/**
* Does discover on an identifier. It delegates the call to its
* discovery manager.
*
* @return A List of {@link DiscoveryInformation} objects.
* The list could be empty if no discovery information can
* be retrieved.
*
* @throws DiscoveryException if the discovery process runs into errors.
*/
public List discover(String identifier) throws DiscoveryException
{
return _discovery.discover(identifier);
}
/**
* Configures a private association for signing consumer nonces.
* <p>
* Consumer nonces are needed to prevent replay attacks in compatibility
* mode, because OpenID 1.x Providers to not attach nonces to
* authentication responses.
* <p>
* One way for the Consumer to know that a consumer nonce in an
* authentication response was indeed issued by itself (and thus prevent
* denial of service attacks), is by signing them.
*
* @param assoc The association to be used for signing consumer nonces;
* signing can be deactivated by setting this to null.
* Signing is enabled by default.
*/
public void setPrivateAssociation(Association assoc)
throws ConsumerException
{
if (assoc == null)
throw new ConsumerException(
"Cannot set null private association, " +
"needed for consumer nonces.");
_privateAssociation = assoc;
}
/**
* Gets the private association used for signing consumer nonces.
*
* @see #setPrivateAssociation(org.openid4java.association.Association)
*/
public Association getPrivateAssociation()
{
return _privateAssociation;
}
public void setConnectTimeout(int connectTimeout)
{
_connectTimeout = connectTimeout;
_httpClient.getHttpConnectionManager()
.getParams().setConnectionTimeout(_connectTimeout);
}
public void setSocketTimeout(int socketTimeout)
{
_socketTimeout = socketTimeout;
_httpClient.getParams().setSoTimeout(_socketTimeout);
}
public void setMaxRedirects(int maxRedirects)
{
_maxRedirects = maxRedirects;
_httpClient.getParams().setParameter(
"http.protocol.max-redirects", new Integer(_maxRedirects));
}
/**
* Makes a HTTP call to the specified URL with the parameters specified
* in the Message.
*
* @param url URL endpoint for the HTTP call
* @param request Message containing the parameters
* @param response ParameterList that will hold the parameters received in
* the HTTP response
* @return the status code of the HTTP call
*/
private int call(String url, Message request, ParameterList response)
throws MessageException
{
int responseCode = -1;
// build the post message with the parameters from the request
PostMethod post = new PostMethod(url);
try
{
// can't follow redirects on a POST (w/o user intervention)
//post.setFollowRedirects(true);
post.setRequestEntity(new StringRequestEntity(
request.wwwFormEncoding(),
"application/x-www-form-urlencoded", "UTF-8"));
// place the http call to the OP
if (DEBUG) _log.debug("Performing HTTP POST on " + url);
responseCode = _httpClient.executeMethod(post);
String postResponse = post.getResponseBodyAsString();
response.copyOf(ParameterList.createFromKeyValueForm(postResponse));
if (DEBUG) _log.debug("Retrived response:\n" + postResponse);
}
catch (IOException e)
{
_log.error("Error talking to " + url +
" response code: " + responseCode, e);
}
finally
{
post.releaseConnection();
}
return responseCode;
}
/**
* Tries to establish an association with on of the service endpoints in
* the list of DiscoveryInformation.
* <p>
* Iterates over the items in the discoveries parameter a maximum of
* #_maxAssocAttempts times trying to esablish an association.
*
* @param discoveries The DiscoveryInformation list obtained by
* performing dicovery on the User-supplied OpenID
* identifier. Should be ordered by the priority
* of the service endpoints.
* @return The DiscoveryInformation instance with which
* an association was established, or the one
* with the highest priority if association failed.
*
* @see Discovery#discover(org.openid4java.discovery.Identifier)
*/
public DiscoveryInformation associate(List discoveries)
{
DiscoveryInformation discovered;
Association assoc;
int attemptsLeft = _maxAssocAttempts;
Iterator itr = discoveries.iterator();
while (itr.hasNext() && attemptsLeft > 0)
{
discovered = (DiscoveryInformation) itr.next();
attemptsLeft -= associate(discovered, attemptsLeft);
// check if an association was established
assoc = _associations.load(discovered.getOPEndpoint().toString());
if ( assoc != null &&
! Association.FAILED_ASSOC_HANDLE.equals(assoc.getHandle()))
return discovered;
}
if (discoveries.size() > 0)
{
// no association established, return the first service endpoint
DiscoveryInformation d0 = (DiscoveryInformation) discoveries.get(0);
_log.warn("Association failed; using first entry: " +
d0.getOPEndpoint());
return d0;
}
else
{
_log.error("Association attempt, but no discovey endpoints provided.");
return null;
}
}
/**
* Tries to establish an association with the OpenID Provider.
* <p>
* The resulting association information will be kept on storage for later
* use at verification stage.
*
* @param discovered DiscoveryInformation obtained during the discovery
* @return The number of association attempts performed.
*/
private int associate(DiscoveryInformation discovered, int maxAttempts)
{
if (_maxAssocAttempts == 0) return 0; // associations disabled
URL opUrl = discovered.getOPEndpoint();
String opEndpoint = opUrl.toString();
_log.info("Trying to associate with " + opEndpoint +
" attempts left: " + maxAttempts);
// check if there's an already established association
Association a = _associations.load(opEndpoint);
if (a != null && a.getHandle() != null)
{
_log.info("Found an existing association: " + a.getHandle());
return 0;
}
String handle = Association.FAILED_ASSOC_HANDLE;
// build a list of association types, with the preferred one at the end
LinkedHashMap requests = new LinkedHashMap();
if (discovered.isVersion2())
{
requests.put(AssociationSessionType.NO_ENCRYPTION_SHA1MAC, null);
requests.put(AssociationSessionType.NO_ENCRYPTION_SHA256MAC, null);
requests.put(AssociationSessionType.DH_SHA1, null);
requests.put(AssociationSessionType.DH_SHA256, null);
}
else
{
requests.put(AssociationSessionType.NO_ENCRYPTION_COMPAT_SHA1MAC, null);
requests.put(AssociationSessionType.DH_COMPAT_SHA1, null);
}
if (_prefAssocSessEnc.isVersion2() == discovered.isVersion2())
requests.put(_prefAssocSessEnc, null);
// build a stack of Association Request objects
// and keep only the allowed by the configured preferences
// the most-desirable entry is always at the top of the stack
Stack reqStack = new Stack();
Iterator iter = requests.keySet().iterator();
while(iter.hasNext())
{
AssociationSessionType type = (AssociationSessionType) iter.next();
// create the appropriate Association Request
AssociationRequest newReq = createAssociationRequest(type, opUrl);
if (newReq != null) reqStack.push(newReq);
}
// perform the association attempts
int attemptsLeft = maxAttempts;
LinkedHashMap alreadyTried = new LinkedHashMap();
while (attemptsLeft > 0 && ! reqStack.empty())
{
try
{
attemptsLeft
AssociationRequest assocReq =
(AssociationRequest) reqStack.pop();
if (DEBUG)
_log.debug("Trying association type: " + assocReq.getType());
// was this association / session type attempted already?
if (alreadyTried.keySet().contains(assocReq.getType()))
{
if (DEBUG) _log.debug("Already tried.");
continue;
}
// mark the current request type as already tried
alreadyTried.put(assocReq.getType(), null);
ParameterList respParams = new ParameterList();
int status = call(opEndpoint, assocReq, respParams);
// process the response
if (status == HttpStatus.SC_OK) // success response
{
AssociationResponse assocResp;
assocResp = AssociationResponse
.createAssociationResponse(respParams);
// valid association response
Association assoc =
assocResp.getAssociation(assocReq.getDHSess());
handle = assoc.getHandle();
AssociationSessionType respType = assocResp.getType();
if ( respType.equals(assocReq.getType()) ||
// v1 OPs may return a success no-encryption resp
( ! discovered.isVersion2() &&
respType.getHAlgorithm() == null &&
createAssociationRequest(respType,opUrl) != null))
{
// store the association and do no try alternatives
_associations.save(opEndpoint, assoc);
_log.info("Associated with " + discovered.getOPEndpoint()
+ " handle: " + assoc.getHandle());
break;
}
else
_log.info("Discarding association response, " +
"not matching consumer criteria");
}
else if (status == HttpStatus.SC_BAD_REQUEST) // error response
{
_log.info("Association attempt failed.");
// retrieve fallback sess/assoc/encryption params set by OP
// and queue a new attempt
AssociationError assocErr =
AssociationError.createAssociationError(respParams);
AssociationSessionType opType =
AssociationSessionType.create(
assocErr.getSessionType(),
assocErr.getAssocType());
if (alreadyTried.keySet().contains(opType))
continue;
// create the appropriate Association Request
AssociationRequest newReq =
createAssociationRequest(opType, opUrl);
if (newReq != null)
{
if (DEBUG) _log.debug("Retrieved association type " +
"from the association error: " +
newReq.getType());
reqStack.push(newReq);
}
}
}
catch (OpenIDException e)
{
_log.error("Error encountered during association attempt.", e);
}
}
// store OPs with which an association could not be established
// so that association attempts are not performed with each auth request
if (Association.FAILED_ASSOC_HANDLE.equals(handle)
&& _failedAssocExpire > 0)
_associations.save(opEndpoint,
Association.getFailedAssociation(_failedAssocExpire));
return maxAttempts - attemptsLeft;
}
/**
* Constructs an Association Request message of the specified session and
* association type, taking into account the user preferences (encryption
* level, default Diffie-Hellman parameters).
*
* @param type The type of the association (session and association)
* @param opUrl The OP for which the association request is created
* @return An AssociationRequest message ready to be sent back
* to the OpenID Provider, or null if an association
* of the requested type cannot be built.
*/
private AssociationRequest createAssociationRequest(
AssociationSessionType type, URL opUrl)
{
try
{
if (_minAssocSessEnc.isBetter(type))
return null;
AssociationRequest assocReq = null;
DiffieHellmanSession dhSess;
if (type.getHAlgorithm() != null) // DH session
{
dhSess = DiffieHellmanSession.create(type, _dhParams);
if (DiffieHellmanSession.isDhSupported(type)
&& Association.isHmacSupported(type.getAssociationType()))
assocReq = AssociationRequest.createAssociationRequest(type, dhSess);
}
else if ( opUrl.getProtocol().equals("https") && // no-enc sess
Association.isHmacSupported(type.getAssociationType()))
assocReq = AssociationRequest.createAssociationRequest(type);
if (assocReq == null)
_log.warn("Could not create association of type: " + type);
return assocReq;
}
catch (OpenIDException e)
{
_log.error("Error trying to create association request.", e);
return null;
}
}
/**
* Builds a authentication request message for the user specified in the
* discovery information provided as a parameter.
* <p>
* If the discoveries parameter contains more than one entry, it will
* iterate over them trying to establish an association. If an association
* cannot be established, the first entry is used with stateless mode.
*
* @see #associate(java.util.List)
* @param discoveries The DiscoveryInformation list obtained by
* performing dicovery on the User-supplied OpenID
* identifier. Should be ordered by the priority
* of the service endpoints.
* @param returnToUrl The URL on the Consumer site where the OpenID
* Provider will return the user after generating
* the authentication response. <br>
* Null if the Consumer does not with to for the
* End User to be returned to it (something else
* useful will have been performed via an
* extension). <br>
* Must not be null in OpenID 1.x compatibility
* mode.
* @return Authentication request message to be sent to the
* OpenID Provider.
*/
public AuthRequest authenticate(List discoveries,
String returnToUrl)
throws ConsumerException, MessageException
{
return authenticate(discoveries, returnToUrl, returnToUrl);
}
/**
* Builds a authentication request message for the user specified in the
* discovery information provided as a parameter.
* <p>
* If the discoveries parameter contains more than one entry, it will
* iterate over them trying to establish an association. If an association
* cannot be established, the first entry is used with stateless mode.
*
* @see #associate(java.util.List)
* @param discoveries The DiscoveryInformation list obtained by
* performing dicovery on the User-supplied OpenID
* identifier. Should be ordered by the priority
* of the service endpoints.
* @param returnToUrl The URL on the Consumer site where the OpenID
* Provider will return the user after generating
* the authentication response. <br>
* Null if the Consumer does not with to for the
* End User to be returned to it (something else
* useful will have been performed via an
* extension). <br>
* Must not be null in OpenID 1.x compatibility
* mode.
* @param realm The URL pattern that will be presented to the
* user when he/she will be asked to authorize the
* authentication transaction. Must be a super-set
* of the @returnToUrl.
* @return Authentication request message to be sent to the
* OpenID Provider.
*/
public AuthRequest authenticate(List discoveries,
String returnToUrl, String realm)
throws ConsumerException, MessageException
{
// try to associate with one OP in the discovered list
DiscoveryInformation discovered = associate(discoveries);
return authenticate(discovered, returnToUrl, realm);
}
/**
* Builds a authentication request message for the user specified in the
* discovery information provided as a parameter.
*
* @param discovered A DiscoveryInformation endpoint from the list
* obtained by performing dicovery on the
* User-supplied OpenID identifier.
* @param returnToUrl The URL on the Consumer site where the OpenID
* Provider will return the user after generating
* the authentication response. <br>
* Null if the Consumer does not with to for the
* End User to be returned to it (something else
* useful will have been performed via an
* extension). <br>
* Must not be null in OpenID 1.x compatibility
* mode.
* @return Authentication request message to be sent to the
* OpenID Provider.
*/
public AuthRequest authenticate(DiscoveryInformation discovered,
String returnToUrl)
throws MessageException, ConsumerException
{
return authenticate(discovered, returnToUrl, returnToUrl);
}
/**
* Builds a authentication request message for the user specified in the
* discovery information provided as a parameter.
*
* @param discovered A DiscoveryInformation endpoint from the list
* obtained by performing dicovery on the
* User-supplied OpenID identifier.
* @param returnToUrl The URL on the Consumer site where the OpenID
* Provider will return the user after generating
* the authentication response. <br>
* Null if the Consumer does not with to for the
* End User to be returned to it (something else
* useful will have been performed via an
* extension). <br>
* Must not be null in OpenID 1.x compatibility
* mode.
* @param realm The URL pattern that will be presented to the
* user when he/she will be asked to authorize the
* authentication transaction. Must be a super-set
* of the @returnToUrl.
* @return Authentication request message to be sent to the
* OpenID Provider.
*/
public AuthRequest authenticate(DiscoveryInformation discovered,
String returnToUrl, String realm)
throws MessageException, ConsumerException
{
if (discovered == null)
throw new ConsumerException("Authentication cannot continue: " +
"no discovery information provided.");
Association assoc =
_associations.load(discovered.getOPEndpoint().toString());
// is the association about to expire?
if ( assoc != null &&
! Association.FAILED_ASSOC_HANDLE.equals(assoc.getHandle()) &&
assoc.getExpiry().getTime() - System.currentTimeMillis() < _preExpiryAssocLockInterval * 1000)
{
_log.info("Association " + assoc.getHandle() +
" is within " + _preExpiryAssocLockInterval + " seconds of expiration, removing.");
_associations.remove(
discovered.getOPEndpoint().toString(),
assoc.getHandle());
assoc = null;
}
if (assoc == null)
{
associate(discovered, _maxAssocAttempts);
assoc = _associations.load(discovered.getOPEndpoint().toString());
}
String handle = assoc != null ?
assoc.getHandle() : Association.FAILED_ASSOC_HANDLE;
// get the Claimed ID
String claimedId;
if (discovered.hasClaimedIdentifier())
claimedId = discovered.getClaimedIdentifier().getIdentifier();
else
claimedId = AuthRequest.SELECT_ID;
// set the Delegate ID (aka OP-specific identifier)
String delegate = claimedId;
if (discovered.hasDelegateIdentifier())
delegate = discovered.getDelegateIdentifier();
// stateless mode disabled ?
if ( !_allowStateless && Association.FAILED_ASSOC_HANDLE.equals(handle))
throw new ConsumerException("Authentication cannot be performed: " +
"no association available and stateless mode is disabled");
_log.info("Creating authentication request for" +
" OP-endpoint: " + discovered.getOPEndpoint() +
" claimedID: " + claimedId +
" OP-specific ID: " + delegate);
AuthRequest authReq = AuthRequest.createAuthRequest(claimedId, delegate,
! discovered.isVersion2(), returnToUrl, handle, realm, _realmVerifier);
authReq.setOPEndpoint(discovered.getOPEndpoint());
if (! discovered.isVersion2())
authReq.setReturnTo(insertConsumerNonce(authReq.getReturnTo()));
// ignore the immediate flag for OP-directed identifier selection
if (! AuthRequest.SELECT_ID.equals(claimedId))
authReq.setImmediate(_immediateAuth);
authReq.validate();
return authReq;
}
/**
* Performs verification on the Authentication Response (assertion)
* received from the OpenID Provider.
* <p>
* Three verification steps are performed:
* <ul>
* <li> nonce: the same assertion will not be accepted more
* than once
* <li> signatures: verifies that the message was indeed sent
* by the OpenID Provider that was contacted
* earlier after discovery
* <li> discovered information: the information contained in the assertion
* matches the one obtained during the
* discovery (the OpenID Provider is
* authoritative for the claimed identifier;
* the received assertion is not meaningful
* otherwise
* </ul>
*
* @param receivingUrl The URL where the Consumer (Relying Party) has
* accepted the incoming message.
* @param response ParameterList of the authentication response
* being verified.
* @param discovered Previously discovered information (which can
* therefore be trusted) obtained during the discovery
* phase; this should be stored and retrieved by the RP
* in the user's session.
*
* @return A VerificationResult, containing a verified
* identifier; the verified identifier is null if
* the verification failed).
*/
public VerificationResult verify(String receivingUrl,
ParameterList response,
DiscoveryInformation discovered)
throws MessageException, DiscoveryException, AssociationException
{
VerificationResult result = new VerificationResult();
_log.info("Verifying authentication response...");
// non-immediate negative response
if ( "cancel".equals(response.getParameterValue("openid.mode")) )
{
result.setAuthResponse(AuthFailure.createAuthFailure(response));
_log.info("Received auth failure.");
return result;
}
// immediate negative response
if ( "setup_needed".equals(response.getParameterValue("openid.mode")) ||
("id_res".equals(response.getParameterValue("openid.mode"))
&& response.hasParameter("openid.user_setup_url") ) )
{
AuthImmediateFailure fail =
AuthImmediateFailure.createAuthImmediateFailure(response);
result.setAuthResponse(fail);
result.setOPSetupUrl(fail.getUserSetupUrl());
_log.info("Received auth immediate failure.");
return result;
}
AuthSuccess authResp = AuthSuccess.createAuthSuccess(response);
_log.info("Received positive auth response.");
authResp.validate();
result.setAuthResponse(authResp);
// [1/4] return_to verification
if (! verifyReturnTo(receivingUrl, authResp))
{
result.setStatusMsg("Return_To URL verification failed.");
_log.error("Return_To URL verification failed.");
return result;
}
// [2/4] : discovered info verification
discovered = verifyDiscovered(authResp, discovered);
if (discovered == null || ! discovered.hasClaimedIdentifier())
{
result.setStatusMsg("Discovered information verification failed.");
_log.error("Discovered information verification failed.");
return result;
}
// [3/4] : nonce verification
if (! verifyNonce(authResp, discovered))
{
result.setStatusMsg("Nonce verification failed.");
_log.error("Nonce verification failed.");
return result;
}
// [4/4] : signature verification
return (verifySignature(authResp, discovered, result));
}
/**
* Verifies that the URL where the Consumer (Relying Party) received the
* authentication response matches the value of the "openid.return_to"
* parameter in the authentication response.
*
* @param receivingUrl The URL where the Consumer received the
* authentication response.
* @param response The authentication response.
* @return True if the two URLs match, false otherwise.
*/
public boolean verifyReturnTo(String receivingUrl, AuthSuccess response)
{
if (DEBUG)
_log.debug("Verifying return URL; receiving: " + receivingUrl +
"\nmessage: " + response.getReturnTo());
URL receiving;
URL returnTo;
try
{
receiving = new URL(receivingUrl);
returnTo = new URL(response.getReturnTo());
}
catch (MalformedURLException e)
{
_log.error("Invalid return URL.", e);
return false;
}
// [1/2] schema, authority (includes port) and path
// deal manually with the trailing slash in the path
StringBuffer receivingPath = new StringBuffer(receiving.getPath());
if ( receivingPath.length() > 0 &&
receivingPath.charAt(receivingPath.length() -1) != '/')
receivingPath.append('/');
StringBuffer returnToPath = new StringBuffer(returnTo.getPath());
if ( returnToPath.length() > 0 &&
returnToPath.charAt(returnToPath.length() -1) != '/')
returnToPath.append('/');
if ( ! receiving.getProtocol().equals(returnTo.getProtocol()) ||
! receiving.getAuthority().equals(returnTo.getAuthority()) ||
! receivingPath.toString().equals(returnToPath.toString()) )
{
if (DEBUG)
_log.debug("Return URL schema, authority or " +
"path verification failed.");
return false;
}
// [2/2] query parameters
try
{
Map returnToParams = extractQueryParams(returnTo);
Map receivingParams = extractQueryParams(receiving);
if (returnToParams == null) return true;
if (receivingParams == null)
{
if (DEBUG)
_log.debug("Return URL query parameters verification failed.");
return false;
}
Iterator iter = returnToParams.keySet().iterator();
while (iter.hasNext())
{
String key = (String) iter.next();
List receivingValues = (List) receivingParams.get(key);
List returnToValues = (List) returnToParams.get(key);
if ( receivingValues == null ||
receivingValues.size() != returnToValues.size() ||
! receivingValues.containsAll( returnToValues ) )
{
if (DEBUG)
_log.debug("Return URL query parameters verification failed.");
return false;
}
}
}
catch (UnsupportedEncodingException e)
{
_log.error("Error verifying return URL query parameters.", e);
return false;
}
return true;
}
/**
* Returns a Map(key, List(values)) with the URL's query params, or null if
* the URL doesn't have a query string.
*/
public Map extractQueryParams(URL url) throws UnsupportedEncodingException
{
if (url.getQuery() == null) return null;
Map paramsMap = new HashMap();
List paramList = Arrays.asList(url.getQuery().split("&"));
Iterator iter = paramList.iterator();
while (iter.hasNext())
{
String keyValue = (String) iter.next();
int equalPos = keyValue.indexOf("=");
String key = equalPos > -1 ?
URLDecoder.decode(keyValue.substring(0, equalPos), "UTF-8") :
URLDecoder.decode(keyValue, "UTF-8");
String value;
if (equalPos <= -1)
value = null;
else if (equalPos + 1 > keyValue.length())
value = "";
else
value = URLDecoder.decode(keyValue.substring(equalPos + 1), "UTF-8");
List existingValues = (List) paramsMap.get(key);
if (existingValues == null)
{
List newValues = new ArrayList();
newValues.add(value);
paramsMap.put(key, newValues);
}
else
existingValues.add(value);
}
return paramsMap;
}
/**
* Verifies the nonce in an authentication response.
*
* @param authResp The authentication response containing the nonce
* to be verified.
* @param discovered The discovery information associated with the
* authentication transaction.
* @return True if the nonce is valid, false otherwise.
*/
public boolean verifyNonce(AuthSuccess authResp,
DiscoveryInformation discovered)
{
String nonce = authResp.getNonce();
if (nonce == null) // compatibility mode
nonce = extractConsumerNonce(authResp.getReturnTo());
if (nonce == null) return false;
// using the same nonce verifier for both server and consumer nonces
return (NonceVerifier.OK == _nonceVerifier.seen(
discovered.getOPEndpoint().toString(), nonce));
}
/**
* Inserts a consumer-side nonce as a custom parameter in the return_to
* parameter of the authentication request.
* <p>
* Needed for preventing replay attack when running compatibility mode.
* OpenID 1.1 OpenID Providers do not generate nonces in authentication
* responses.
*
* @param returnTo The return_to URL to which a custom nonce
* parameter will be added.
* @return The return_to URL containing the nonce.
*/
public String insertConsumerNonce(String returnTo)
{
String nonce = _consumerNonceGenerator.next();
returnTo += (returnTo.indexOf('?') != -1) ? '&' : '?';
try
{
returnTo += "openid.rpnonce=" + URLEncoder.encode(nonce, "UTF-8");
returnTo += "&openid.rpsig=" +
URLEncoder.encode(_privateAssociation.sign(returnTo),
"UTF-8");
_log.info("Inserted consumer nonce: " + nonce);
if (DEBUG) _log.debug("return_to:" + returnTo);
}
catch (Exception e)
{
_log.error("Error inserting consumre nonce.", e);
return null;
}
return returnTo;
}
/**
* Extracts the consumer-side nonce from the return_to parameter in
* authentication response from a OpenID 1.1 Provider.
*
* @param returnTo return_to URL from the authentication response
* @return The nonce found in the return_to URL, or null if
* it wasn't found.
*/
public String extractConsumerNonce(String returnTo)
{
if (DEBUG)
_log.debug("Extracting consumer nonce...");
String nonce = null;
String signature = null;
URL returnToUrl;
try
{
returnToUrl = new URL(returnTo);
}
catch (MalformedURLException e)
{
_log.error("Invalid return_to: " + returnTo, e);
return null;
}
String query = returnToUrl.getQuery();
String[] params = query.split("&");
for (int i=0; i < params.length; i++)
{
String keyVal[] = params[i].split("=", 2);
try
{
if (keyVal.length == 2 && "openid.rpnonce".equals(keyVal[0]))
{
nonce = URLDecoder.decode(keyVal[1], "UTF-8");
if (DEBUG) _log.debug("Extracted consumer nonce: " + nonce);
}
if (keyVal.length == 2 && "openid.rpsig".equals(keyVal[0]))
{
signature = URLDecoder.decode(keyVal[1], "UTF-8");
if (DEBUG) _log.debug("Extracted consumer nonce signature: "
+ signature);
}
}
catch (UnsupportedEncodingException e)
{
_log.error("Error extracting consumer nonce / signarure.", e);
return null;
}
}
// check the signature
if (signature == null)
{
_log.error("Null consumer nonce signature.");
return null;
}
String signed = returnTo.substring(0, returnTo.indexOf("&openid.rpsig="));
if (DEBUG) _log.debug("Consumer signed text:\n" + signed);
try
{
if (_privateAssociation.verifySignature(signed, signature))
{
_log.info("Consumer nonce signature verified.");
return nonce;
}
else
{
_log.error("Consumer nonce signature failed.");
return null;
}
}
catch (AssociationException e)
{
_log.error("Error verifying consumer nonce signature.", e);
return null;
}
}
/**
* Verifies the dicovery information matches the data received in a
* authentication response from an OpenID Provider.
*
* @param authResp The authentication response to be verified.
* @param discovered The discovery information obtained earlier during
* the discovery stage, associated with the
* identifier(s) in the request. Stateless operation
* is assumed if null.
* @return The discovery information associated with the
* claimed identifier, that can be used further in
* the verification process. Null if the discovery
* on the claimed identifier does not match the data
* in the assertion.
*/
private DiscoveryInformation verifyDiscovered(AuthSuccess authResp,
DiscoveryInformation discovered)
throws DiscoveryException
{
if (authResp == null || authResp.getIdentity() == null)
{
_log.info("Assertion is not about an identifier");
return null;
}
if (authResp.isVersion2())
return verifyDiscovered2(authResp, discovered);
else
return verifyDiscovered1(authResp, discovered);
}
/**
* Verifies the discovered information associated with a OpenID 1.x
* response.
*
* @param authResp The authentication response to be verified.
* @param discovered The discovery information obtained earlier during
* the discovery stage, associated with the
* identifier(s) in the request. Stateless operation
* is assumed if null.
* @return The discovery information associated with the
* claimed identifier, that can be used further in
* the verification process. Null if the discovery
* on the claimed identifier does not match the data
* in the assertion.
*/
private DiscoveryInformation verifyDiscovered1(AuthSuccess authResp,
DiscoveryInformation discovered)
throws DiscoveryException
{
if ( authResp == null || authResp.isVersion2() ||
authResp.getIdentity() == null )
{
if (DEBUG)
_log.error("Invalid authentication response: " +
"cannot verify v1 discovered information");
return null;
}
// asserted identifier in the AuthResponse
String assertId = authResp.getIdentity();
if ( discovered != null && ! discovered.isVersion2() &&
discovered.getClaimedIdentifier() != null )
{
// statefull mode
if (DEBUG)
_log.debug("Verifying discovered information " +
"for OpenID1 assertion about ClaimedID: " +
discovered.getClaimedIdentifier().getIdentifier());
String discoveredId = discovered.hasDelegateIdentifier() ?
discovered.getDelegateIdentifier() :
discovered.getClaimedIdentifier().getIdentifier();
if (assertId.equals(discoveredId))
return discovered;
}
// stateless, bare response, or the user changed the ID at the OP
_log.info("Proceeding with stateless mode / bare response verification...");
DiscoveryInformation firstServiceMatch = null;
// assuming openid.identity is the claimedId
// (delegation can't work with stateless/bare resp v1 operation)
if (DEBUG) _log.debug(
"Performing discovery on the ClaimedID in the assertion: " + assertId);
List discoveries = _discovery.discover(assertId);
Iterator iter = discoveries.iterator();
while (iter.hasNext())
{
DiscoveryInformation service = (DiscoveryInformation) iter.next();
if (service.isVersion2() || // only interested in v1
! service.hasClaimedIdentifier() || // need a claimedId
service.hasDelegateIdentifier() || // not allowing delegates
! assertId.equals(service.getClaimedIdentifier()))
continue;
if (DEBUG) _log.debug("Found matching service: " + service);
// keep the first endpoint that matches
if (firstServiceMatch == null)
firstServiceMatch = service;
Association assoc = _associations.load(
service.getOPEndpoint().toString(),
authResp.getHandle());
// don't look further if there is an association with this endpoint
if (assoc != null)
{
if (DEBUG)
_log.debug("Found existing association for " + service +
" Not looking for another service endpoint.");
return service;
}
}
if (firstServiceMatch == null)
_log.error("No service element found to match " +
"the identifier in the assertion.");
return firstServiceMatch;
}
/**
* Verifies the discovered information associated with a OpenID 2.0
* response.
*
* @param authResp The authentication response to be verified.
* @param discovered The discovery information obtained earlier during
* the discovery stage, associated with the
* identifier(s) in the request. Stateless operation
* is assumed if null.
* @return The discovery information associated with the
* claimed identifier, that can be used further in
* the verification process. Null if the discovery
* on the claimed identifier does not match the data
* in the assertion.
*/
private DiscoveryInformation verifyDiscovered2(AuthSuccess authResp,
DiscoveryInformation discovered)
throws DiscoveryException
{
if (authResp == null || ! authResp.isVersion2() ||
authResp.getIdentity() == null || authResp.getClaimed() == null)
{
if (DEBUG)
_log.debug("Discovered information doesn't match " +
"auth response / version");
return null;
}
// asserted identifier in the AuthResponse
String assertId = authResp.getIdentity();
// claimed identifier in the AuthResponse
Identifier respClaimed =
Discovery.parseIdentifier(authResp.getClaimed(), true);
// the OP endpoint sent in the response
String respEndpoint = authResp.getOpEndpoint();
if (DEBUG)
_log.debug("Verifying discovered information for OpenID2 assertion " +
"about ClaimedID: " + respClaimed.getIdentifier());
// was the claimed identifier in the assertion previously discovered?
if (discovered != null && discovered.hasClaimedIdentifier() &&
discovered.getClaimedIdentifier().equals(respClaimed) )
{
// OP-endpoint, OP-specific ID and protocol version must match
String opSpecific = discovered.hasDelegateIdentifier() ?
discovered.getDelegateIdentifier() :
discovered.getClaimedIdentifier().getIdentifier();
if ( opSpecific.equals(assertId) &&
discovered.isVersion2() &&
discovered.getOPEndpoint().toString().equals(respEndpoint))
{
if (DEBUG) _log.debug(
"ClaimedID in the assertion was previously discovered: "
+ respClaimed);
return discovered;
}
}
// stateless, bare response, or the user changed the ID at the OP
DiscoveryInformation firstServiceMatch = null;
// perform discovery on the claim identifier in the assertion
if(DEBUG) _log.debug(
"Performing discovery on the ClaimedID in the assertion: "
+ respClaimed);
List discoveries = _discovery.discover(respClaimed);
// find the newly discovered service endpoint that matches the assertion
// - OP endpoint, OP-specific ID and protocol version must match
// - prefer (first = highest priority) endpoint with an association
if (DEBUG)
_log.debug("Looking for a service element to match " +
"the ClaimedID and OP endpoint in the assertion...");
Iterator iter = discoveries.iterator();
while (iter.hasNext())
{
DiscoveryInformation service = (DiscoveryInformation) iter.next();
if (DiscoveryInformation.OPENID2_OP.equals(service.getVersion()))
continue;
String opSpecific = service.hasDelegateIdentifier() ?
service.getDelegateIdentifier() :
service.getClaimedIdentifier().getIdentifier();
if ( ! opSpecific.equals(assertId) ||
! service.isVersion2() ||
! service.getOPEndpoint().toString().equals(respEndpoint) )
continue;
// keep the first endpoint that matches
if (firstServiceMatch == null)
{
if (DEBUG) _log.debug("Found matching service: " + service);
firstServiceMatch = service;
}
Association assoc = _associations.load(
service.getOPEndpoint().toString(),
authResp.getHandle());
// don't look further if there is an association with this endpoint
if (assoc != null)
{
if (DEBUG)
_log.debug("Found existing association, " +
"not looking for another service endpoint.");
return service;
}
}
if (firstServiceMatch == null)
_log.error("No service element found to match " +
"the ClaimedID / OP-endpoint in the assertion.");
return firstServiceMatch;
}
/**
* Verifies the signature in a authentication response message.
*
* @param authResp Authentication response to be verified.
* @param discovered The discovery information obtained earlier during
* the discovery stage.
* @return True if the verification succeeded, false otherwise.
*/
private VerificationResult verifySignature(AuthSuccess authResp,
DiscoveryInformation discovered,
VerificationResult result)
throws AssociationException, MessageException, DiscoveryException
{
if (discovered == null || authResp == null)
{
_log.error("Can't verify signature: " +
"null assertion or discovered information.");
result.setStatusMsg("Can't verify signature: " +
"null assertion or discovered information.");
return result;
}
Identifier claimedId = discovered.isVersion2() ?
Discovery.parseIdentifier(authResp.getClaimed()) : //may have frag
discovered.getClaimedIdentifier(); //assert id may be delegate in v1
String handle = authResp.getHandle();
URL op = discovered.getOPEndpoint();
Association assoc = _associations.load(op.toString(), handle);
if (assoc != null) // association available, local verification
{
_log.info("Found association: " + assoc.getHandle() +
" verifying signature locally...");
String text = authResp.getSignedText();
String signature = authResp.getSignature();
if (assoc.verifySignature(text, signature))
{
result.setVerifiedId(claimedId);
if (DEBUG) _log.debug("Local signature verification succeeded.");
}
else if (DEBUG) _log.debug("Local signature verification failed.");
}
else // no association, verify with the OP
{
_log.info("No association found, " +
"contacting the OP for direct verification...");
VerifyRequest vrfy = VerifyRequest.createVerifyRequest(authResp);
ParameterList responseParams = new ParameterList();
int respCode = call(op.toString(), vrfy, responseParams);
if (HttpStatus.SC_OK == respCode)
{
VerifyResponse vrfyResp =
VerifyResponse.createVerifyResponse(responseParams);
vrfyResp.validate();
if (vrfyResp.isSignatureVerified())
{
// process the optional invalidate_handle first
String invalidateHandle = vrfyResp.getInvalidateHandle();
if (invalidateHandle != null)
_associations.remove(op.toString(), invalidateHandle);
result.setVerifiedId(claimedId);
if (DEBUG)
_log.debug("Direct signature verification succeeded " +
"with OP: " + op);
}
else
{
if (DEBUG)
_log.debug("Direct signature verification failed " +
"with OP: " + op);
result.setStatusMsg("Direct signature verification failed.");
}
}
else
{
DirectError err = DirectError.createDirectError(responseParams);
if (DEBUG) _log.debug("Error verifying signature with the OP: "
+ op + " error message: " + err.keyValueFormEncoding());
result.setStatusMsg("Error verifying signature with the OP: "
+ err.getErrorMsg());
}
}
Identifier verifiedID = result.getVerifiedId();
if (verifiedID != null)
_log.info("Verification succeeded for: " + verifiedID);
else
_log.error("Verification failed for: " + verifiedID
+ " reason: " + result.getStatusMsg());
return result;
}
} |
package de.cyberkatze.iroot;
import android.content.Context;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.os.Build;
import org.apache.cordova.LOG;
import java.io.File;
import java.util.List;
public class InternalRootDetection {
public boolean isRooted(final Context context) {
return isExistBuildTags()
|| isExistSuperUserApk()
|| isExistSUPath()
|| checkDirPermissions()
|| checkExecutingCommands()
|| checkInstalledPackages(context)
|| checkOTACertificates()
|| isRunningOnEmulator();
}
/**
* Checks whether any of the system directories are writable or the /data directory is readable.
* This test will usually result in a false negative on rooted devices.
*/
private boolean checkDirPermissions() {
boolean isWritableDir;
boolean isReadableDataDir;
for (String dirName : Constants.PATHS_THAT_SHOULD_NOT_BE_WRITABLE) {
final File currentDir = new File(dirName);
isWritableDir = currentDir.exists() && currentDir.canWrite();
isReadableDataDir = (dirName.equals("/data") && currentDir.canRead());
if (isWritableDir || isReadableDataDir) {
return true;
}
}
return false;
}
/**
* Checking rooted or not by 'android.os.Build.TAGS' contains test-keys.
*/
private boolean isExistBuildTags() {
try {
String buildTags = Constants.ANDROID_OS_BUILD_TAGS;
LOG.d(Constants.LOG_TAG, "buildTags: " + buildTags);
return (buildTags != null) && buildTags.contains("test-keys");
} catch (Exception e) {
LOG.e(Constants.LOG_TAG, e.getMessage());
}
return false;
}
/**
* Checks whether the Superuser.apk is present in the system applications.
*/
private boolean isExistSuperUserApk() {
for (String path : Constants.SUPER_USER_APK_FILES) {
final File suAPK = new File(path);
if (suAPK.exists()) {
return true;
}
}
return false;
}
/**
* Checking if SU path exist (case sensitive).
*/
private boolean isExistSUPath() {
for (String path : Constants.SU_PATHES) {
if (new File(path).exists()) {
return true;
}
}
return false;
}
/**
* Checks for installed packages which are known to be present on rooted devices.
*
* @param context Used for accessing the package manager.
*/
private boolean checkInstalledPackages(final Context context) {
final PackageManager pm = context.getPackageManager();
final List<PackageInfo> installedPackages = pm.getInstalledPackages(0);
int rootOnlyAppCount = 0;
for (PackageInfo packageInfo : installedPackages) {
final String packageName = packageInfo.packageName;
LOG.d(Constants.LOG_TAG, "[checkInstalledPackages] Check package [" + packageName + "]");
try {
LOG.d(Constants.LOG_TAG, "[checkInstalledPackages] PackageManager.getPackageInfo: " + pm.getPackageInfo(packageName, PackageManager.GET_ACTIVITIES));
LOG.d(Constants.LOG_TAG, "[checkInstalledPackages] PackageManager.getPackageInfo: [" + packageName + "] installed");
} catch (PackageManager.NameNotFoundException e) {
LOG.d(Constants.LOG_TAG, "[checkInstalledPackages] PackageManager.getPackageInfo: [" + packageName + "] not installed");
}
if (Constants.BLACKLISTED_PACKAGES.contains(packageName)) {
LOG.d(Constants.LOG_TAG, "[checkInstalledPackages] Package [" + packageName + "] found in BLACKLISTED_PACKAGES");
return true;
}
if (Constants.ROOT_ONLY_APPLICATIONS.contains(packageName)) {
LOG.d(Constants.LOG_TAG, "[checkInstalledPackages] Package [" + packageName + "] found in ROOT_ONLY_APPLICATIONS");
rootOnlyAppCount += 1;
}
// Check to see if the Cydia Substrate exists.
if (Constants.CYDIA_SUBSTRATE_PACKAGE.equals(packageName)) {
LOG.d(Constants.LOG_TAG, "[checkInstalledPackages] Package [" + packageName + "] found in CYDIA_SUBSTRATE_PACKAGE");
rootOnlyAppCount += 1;
}
}
return rootOnlyAppCount > 2; // todo: why?
}
/**
* Check to see if the file /etc/security/otacerts.zip exists.
*/
private boolean checkOTACertificates() {
String otacerts = Constants.OTA_CERTIFICATES_PATH;
return new File(otacerts).exists();
}
private boolean checkExecutingCommands() {
return Utils.canExecuteCommand("/system/xbin/which su")
|| Utils.canExecuteCommand("/system/bin/which su")
|| Utils.canExecuteCommand("which su");
}
public boolean isRunningOnEmulator() {
Utils.getDeviceInfo();
return Build.FINGERPRINT.startsWith("generic")
// ||Build.FINGERPRINT.startsWith("unknown") // Meizu Mx Pro will return unknown, so comment it!
|| Build.MODEL.contains("google_sdk")
|| Build.MODEL.contains("Emulator")
|| Build.MODEL.contains("Android SDK built for x86")
|| Build.BOARD.equals("QC_Reference_Phone") //bluestacks
|| Build.HOST.startsWith("Build") //MSI App Player
|| Build.MANUFACTURER.contains("Genymotion")
|| Build.BRAND.startsWith("generic") && Build.DEVICE.startsWith("generic")
|| "google_sdk".equals(Build.PRODUCT);
}
// TODO: https://github.com/tansiufang54/fncgss/blob/master/app/src/main/java/co/id/franknco/controller/RootUtil.java#L126
// private boolean checkServerSocket() {
// try {
// ServerSocket ss = new ServerSocket(81);
// ss.close();
// return true;
// } catch (Exception e) {
// // not sure
// return false;
} |
package org.pentaho.di.trans.steps.janino;
import java.math.BigDecimal;
import java.util.Date;
import org.codehaus.janino.ExpressionEvaluator;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.core.row.RowDataUtil;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStep;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
/**
* Calculate new field values using pre-defined functions.
*
* @author Matt
* @since 8-sep-2005
*/
public class Janino extends BaseStep implements StepInterface
{
private JaninoMeta meta;
private JaninoData data;
public Janino(StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans)
{
super(stepMeta, stepDataInterface, copyNr, transMeta, trans);
}
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException
{
meta=(JaninoMeta)smi;
data=(JaninoData)sdi;
Object[] r=getRow(); // get row, set busy!
if (r==null) // no more input to be expected...
{
setOutputDone();
return false;
}
if (first)
{
first = false;
data.outputRowMeta = getInputRowMeta().clone();
meta.getFields(data.outputRowMeta, getStepname(), null, null, this);
// Calculate replace indexes...
data.replaceIndex = new int[meta.getFormula().length];
for (int i=0;i<meta.getFormula().length;i++) {
JaninoMetaFunction fn = meta.getFormula()[i];
if (!Const.isEmpty(fn.getReplaceField())) {
data.replaceIndex[i] = getInputRowMeta().indexOfValue(fn.getReplaceField());
if (data.replaceIndex[i]<0) {
throw new KettleException("Unknown field specified to replace with a formula result: ["+fn.getReplaceField()+"]");
}
} else {
data.replaceIndex[i] = -1;
}
}
}
if (log.isRowLevel()) log.logRowlevel(toString(), "Read row #"+getLinesRead()+" : "+r);
Object[] outputRowData = calcFields(getInputRowMeta(), r);
putRow(data.outputRowMeta, outputRowData); // copy row to possible alternate rowset(s).
if (log.isRowLevel()) log.logRowlevel(toString(), "Wrote row #"+getLinesWritten()+" : "+r);
if (checkFeedback(getLinesRead())) logBasic("Linenr "+getLinesRead());
return true;
}
private Object[] calcFields(RowMetaInterface rowMeta, Object[] r) throws KettleValueException
{
try
{
Object[] outputRowData = RowDataUtil.createResizedCopy(r, data.outputRowMeta.size());
int tempIndex = rowMeta.size();
// Initialize evaluators etc. Only do it once.
if (data.expressionEvaluators==null) {
data.expressionEvaluators = new ExpressionEvaluator[meta.getFormula().length];
String[] parameterNames = new String[data.outputRowMeta.size()];
Class<?>[] parameterTypes = new Class[data.outputRowMeta.size()];
for (int i=0;i<data.outputRowMeta.size();i++) {
switch(data.outputRowMeta.getValueMeta(i).getType()) {
case ValueMetaInterface.TYPE_STRING : parameterTypes[i] = String.class; break;
case ValueMetaInterface.TYPE_NUMBER : parameterTypes[i] = Double.class; break;
case ValueMetaInterface.TYPE_INTEGER : parameterTypes[i] = Long.class; break;
case ValueMetaInterface.TYPE_DATE : parameterTypes[i] = Date.class; break;
case ValueMetaInterface.TYPE_BIGNUMBER : parameterTypes[i] = BigDecimal.class; break;
case ValueMetaInterface.TYPE_BOOLEAN : parameterTypes[i] = Boolean.class; break;
case ValueMetaInterface.TYPE_BINARY : parameterTypes[i] = byte[].class; break;
default: parameterTypes[i] = String.class; break;
}
parameterNames[i] = data.outputRowMeta.getValueMeta(i).getName();
}
for (int i=0;i<meta.getFormula().length;i++) {
JaninoMetaFunction fn = meta.getFormula()[i];
if (!Const.isEmpty( fn.getFieldName())) {
// Create the expression evaluator: is relatively slow so we do it only for the first row...
data.expressionEvaluators[i] = new ExpressionEvaluator();
data.expressionEvaluators[i].setParameters(parameterNames, parameterTypes);
data.expressionEvaluators[i].setReturnType(Object.class);
data.expressionEvaluators[i].cook(fn.getFormula());
} else {
throw new KettleException("Unable to find field name for formula ["+Const.NVL(fn.getFormula(), "")+"]");
}
}
}
for (int i=0;i<meta.getFormula().length;i++)
{
JaninoMetaFunction fn = meta.getFormula()[i];
// This method can only accept the specified number of values...
Object[] rowData = new Object[data.outputRowMeta.size()];
System.arraycopy(outputRowData, 0, rowData, 0, rowData.length);
Object formulaResult = data.expressionEvaluators[i].evaluate(rowData);
// Calculate the return type on the first row...
if (data.returnType[i]<0) {
if (formulaResult instanceof String) {
data.returnType[i] = JaninoData.RETURN_TYPE_STRING;
if (fn.getValueType()!=ValueMetaInterface.TYPE_STRING) {
throw new KettleValueException("Please specify a String type to parse ["+formulaResult.getClass().getName()+"] for field ["+fn.getFieldName()+"] as a result of formula ["+fn.getFormula()+"]");
}
} else if (formulaResult instanceof Integer) {
data.returnType[i] = JaninoData.RETURN_TYPE_INTEGER;
if (fn.getValueType()!=ValueMetaInterface.TYPE_INTEGER) {
throw new KettleValueException("Please specify an Integer type to parse ["+formulaResult.getClass().getName()+"] for field ["+fn.getFieldName()+"] as a result of formula ["+fn.getFormula()+"]");
}
} else if (formulaResult instanceof Long) {
data.returnType[i] = JaninoData.RETURN_TYPE_LONG;
if (fn.getValueType()!=ValueMetaInterface.TYPE_INTEGER) {
throw new KettleValueException("Please specify an Integer type to parse ["+formulaResult.getClass().getName()+"] for field ["+fn.getFieldName()+"] as a result of formula ["+fn.getFormula()+"]");
}
} else if (formulaResult instanceof Number) {
data.returnType[i] = JaninoData.RETURN_TYPE_NUMBER;
if (fn.getValueType()!=ValueMetaInterface.TYPE_NUMBER) {
throw new KettleValueException("Please specify a Number type to parse ["+formulaResult.getClass().getName()+"] for field ["+fn.getFieldName()+"] as a result of formula ["+fn.getFormula()+"]");
}
} else if (formulaResult instanceof Date) {
data.returnType[i] = JaninoData.RETURN_TYPE_DATE;
if (fn.getValueType()!=ValueMetaInterface.TYPE_DATE) {
throw new KettleValueException("Please specify a Date type to parse ["+formulaResult.getClass().getName()+"] for field ["+fn.getFieldName()+"] as a result of formula ["+fn.getFormula()+"]");
}
} else if (formulaResult instanceof BigDecimal) {
data.returnType[i] = JaninoData.RETURN_TYPE_BIGDECIMAL;
if (fn.getValueType()!=ValueMetaInterface.TYPE_BIGNUMBER) {
throw new KettleValueException("Please specify a BigNumber type to parse ["+formulaResult.getClass().getName()+"] for field ["+fn.getFieldName()+"] as a result of formula ["+fn.getFormula()+"]");
}
} else if (formulaResult instanceof byte[]) {
data.returnType[i] = JaninoData.RETURN_TYPE_BYTE_ARRAY;
if (fn.getValueType()!=ValueMetaInterface.TYPE_BINARY) {
throw new KettleValueException("Please specify a Binary type to parse ["+formulaResult.getClass().getName()+"] for field ["+fn.getFieldName()+"] as a result of formula ["+fn.getFormula()+"]");
}
} else if (formulaResult instanceof Boolean) {
data.returnType[i] = JaninoData.RETURN_TYPE_BOOLEAN;
if (fn.getValueType()!=ValueMetaInterface.TYPE_BOOLEAN) {
throw new KettleValueException("Please specify a Boolean type to parse ["+formulaResult.getClass().getName()+"] for field ["+fn.getFieldName()+"] as a result of formula ["+fn.getFormula()+"]");
}
} else {
data.returnType[i] = JaninoData.RETURN_TYPE_STRING;
}
}
Object value;
if (formulaResult==null) {
value=null;
} else {
switch(data.returnType[i]) {
case JaninoData.RETURN_TYPE_STRING : value = formulaResult.toString(); break;
case JaninoData.RETURN_TYPE_NUMBER : value = new Double(((Number)formulaResult).doubleValue()); break;
case JaninoData.RETURN_TYPE_INTEGER : value = new Long( ((Integer)formulaResult).intValue() ); break;
case JaninoData.RETURN_TYPE_LONG : value = (Long)formulaResult; break;
case JaninoData.RETURN_TYPE_DATE : value = (Date)formulaResult; break;
case JaninoData.RETURN_TYPE_BIGDECIMAL : value = (BigDecimal)formulaResult; break;
case JaninoData.RETURN_TYPE_BYTE_ARRAY : value = (byte[])formulaResult; break;
case JaninoData.RETURN_TYPE_BOOLEAN : value = (Boolean)formulaResult; break;
default: value = null;
}
}
// We're done, store it in the row with all the data, including the temporary data...
if (data.replaceIndex[i]<0) {
outputRowData[tempIndex++] = value;
} else {
outputRowData[data.replaceIndex[i]] = value;
}
}
return outputRowData;
}
catch(Exception e)
{
throw new KettleValueException(e);
}
}
public boolean init(StepMetaInterface smi, StepDataInterface sdi)
{
meta=(JaninoMeta)smi;
data=(JaninoData)sdi;
if (super.init(smi, sdi))
{
// Add init code here.
// Return data type discovery is expensive, let's discover them one time only.
data.returnType = new int[meta.getFormula().length];
for (int i=0;i<meta.getFormula().length;i++) {
data.returnType[i] = -1;
}
return true;
}
return false;
}
// Run is were the action happens!
public void run()
{
try
{
logBasic("Starting to run...");
while (processRow(meta, data) && !isStopped());
}
catch(Exception e)
{
logError("Unexpected error in "+" : "+e.toString());
logError(Const.getStackTracker(e));
setErrors(1);
stopAll();
}
finally
{
dispose(meta, data);
logSummary();
markStop();
}
}
} |
package api.web.gw2.mapping.v2.guild.id.treasury;
import api.web.gw2.mapping.core.IdValue;
import api.web.gw2.mapping.core.QuantityValue;
import api.web.gw2.mapping.v2.APIv2;
import java.util.Set;
@APIv2(endpoint = "v2/guid/:id/treasury", requiresAuthentication = true, scope = "guilds") // NOI18N.
public interface Treasury {
/**
* Gets the id of this item this treasury.
* @return An {@code int} > 0.
*/
@IdValue
int getItemId();
/**
* Gets the number of this item in the treasury.
* @return An {@code int} ≥ 0.
*/
@QuantityValue
int getCount();
/**
* Gets the set of guild upgrades which need this treasury item.
* @return A non-modifiable {@code Set<TreasuryUpgrade>}, never {@code null}.
*/
Set<TreasuryUpgrade> getNeededBy();
} |
package cgeo.geocaching;
import cgeo.geocaching.activity.ShowcaseViewBuilder;
import cgeo.geocaching.connector.ConnectorFactory;
import cgeo.geocaching.connector.ILoggingManager;
import cgeo.geocaching.connector.ImageResult;
import cgeo.geocaching.connector.LogResult;
import cgeo.geocaching.connector.trackable.TrackableConnector;
import cgeo.geocaching.connector.trackable.TrackableLoggingManager;
import cgeo.geocaching.enumerations.LoadFlags;
import cgeo.geocaching.enumerations.LogType;
import cgeo.geocaching.enumerations.LogTypeTrackable;
import cgeo.geocaching.enumerations.StatusCode;
import cgeo.geocaching.gcvote.GCVote;
import cgeo.geocaching.gcvote.GCVoteRatingBarUtil;
import cgeo.geocaching.gcvote.GCVoteRatingBarUtil.OnRatingChangeListener;
import cgeo.geocaching.settings.Settings;
import cgeo.geocaching.twitter.Twitter;
import cgeo.geocaching.ui.dialog.DateDialog;
import cgeo.geocaching.ui.dialog.Dialogs;
import cgeo.geocaching.utils.AsyncTaskWithProgressText;
import cgeo.geocaching.utils.CalendarUtils;
import cgeo.geocaching.utils.Formatter;
import cgeo.geocaching.utils.Log;
import cgeo.geocaching.utils.LogTemplateProvider;
import cgeo.geocaching.utils.LogTemplateProvider.LogContext;
import cgeo.geocaching.utils.RxUtils;
import com.github.amlcurran.showcaseview.targets.ActionItemTarget;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import android.R.string;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.AlertDialog.Builder;
import android.content.DialogInterface;
import android.content.DialogInterface.OnClickListener;
import android.content.Intent;
import android.os.Bundle;
import android.util.SparseArray;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import butterknife.Bind;
import butterknife.ButterKnife;
import rx.Observable;
import rx.android.app.AppObservable;
import rx.functions.Action1;
import rx.functions.Func0;
import rx.functions.Func1;
public class LogCacheActivity extends AbstractLoggingActivity implements DateDialog.DateDialogParent {
private static final String SAVED_STATE_RATING = "cgeo.geocaching.saved_state_rating";
private static final String SAVED_STATE_TYPE = "cgeo.geocaching.saved_state_type";
private static final String SAVED_STATE_DATE = "cgeo.geocaching.saved_state_date";
private static final String SAVED_STATE_IMAGE = "cgeo.geocaching.saved_state_image";
private static final int SELECT_IMAGE = 101;
private LayoutInflater inflater = null;
private Geocache cache = null;
private String geocode = null;
private String text = null;
private List<LogType> possibleLogTypes = new ArrayList<>();
private final Set<TrackableLog> trackables = new HashSet<>();
protected @Bind(R.id.tweet) CheckBox tweetCheck;
protected @Bind(R.id.tweet_box) LinearLayout tweetBox;
protected @Bind(R.id.log_password_box) LinearLayout logPasswordBox;
private SparseArray<TrackableLog> actionButtons;
private ILoggingManager loggingManager;
// Data to be saved while reconfiguring
private float rating;
private LogType typeSelected;
private Calendar date;
private Image image;
private boolean sendButtonEnabled;
public void onLoadFinished() {
if (loggingManager.hasLoaderError()) {
showErrorLoadingData();
return;
}
trackables.addAll(loggingManager.getTrackables());
possibleLogTypes = loggingManager.getPossibleLogTypes();
if (possibleLogTypes.isEmpty()) {
showErrorLoadingData();
return;
}
if (!possibleLogTypes.contains(typeSelected)) {
typeSelected = possibleLogTypes.get(0);
setType(typeSelected);
showToast(res.getString(R.string.info_log_type_changed));
}
initializeRatingBar();
enablePostButton(true);
initializeTrackablesAction();
updateTrackablesList();
showProgress(false);
}
private void showErrorLoadingData() {
showToast(res.getString(R.string.err_log_load_data));
showProgress(false);
}
private void initializeTrackablesAction() {
if (Settings.isTrackableAutoVisit()) {
for (final TrackableLog trackable : trackables) {
trackable.action = LogTypeTrackable.VISITED;
}
}
}
private void updateTrackablesList() {
if (CollectionUtils.isEmpty(trackables)) {
return;
}
if (inflater == null) {
inflater = getLayoutInflater();
}
actionButtons = new SparseArray<>();
final LinearLayout inventoryView = ButterKnife.findById(this, R.id.inventory);
inventoryView.removeAllViews();
for (final TrackableLog tb : getSortedTrackables()) {
final LinearLayout inventoryItem = (LinearLayout) inflater.inflate(R.layout.logcache_trackable_item, inventoryView, false);
final ImageView brandView = ButterKnife.findById(inventoryItem, R.id.trackable_image_brand);
brandView.setImageResource(tb.brand.getIconResource());
final TextView codeView = ButterKnife.findById(inventoryItem, R.id.trackcode);
codeView.setText(tb.trackCode);
final TextView nameView = ButterKnife.findById(inventoryItem, R.id.name);
nameView.setText(tb.name);
final TextView actionButton = ButterKnife.findById(inventoryItem, R.id.action);
actionButton.setId(tb.id);
actionButtons.put(actionButton.getId(), tb);
actionButton.setText(tb.action.getLabel() + " ");
actionButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(final View view) {
selectTrackableAction(view);
}
});
final String tbCode = (StringUtils.isNotEmpty(tb.geocode) ? tb.geocode : tb.trackCode);
inventoryItem.setClickable(true);
ButterKnife.findById(inventoryItem, R.id.info).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(final View view) {
final Intent trackablesIntent = new Intent(LogCacheActivity.this, TrackableActivity.class);
trackablesIntent.putExtra(Intents.EXTRA_GEOCODE, tbCode);
trackablesIntent.putExtra(Intents.EXTRA_BRAND, tb.brand.getId());
startActivity(trackablesIntent);
}
});
inventoryView.addView(inventoryItem);
}
if (inventoryView.getChildCount() > 0) {
ButterKnife.findById(this, R.id.inventory_box).setVisibility(View.VISIBLE);
}
if (inventoryView.getChildCount() > 1) {
final LinearLayout inventoryChangeAllView = ButterKnife.findById(this, R.id.inventory_changeall);
final Button changeButton = ButterKnife.findById(inventoryChangeAllView, R.id.changebutton);
changeButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(final View view) {
selectAllTrackablesAction();
}
});
inventoryChangeAllView.setVisibility(View.VISIBLE);
}
}
private ArrayList<TrackableLog> getSortedTrackables() {
final ArrayList<TrackableLog> sortedTrackables = new ArrayList<>(trackables);
Collections.sort(sortedTrackables, new Comparator<TrackableLog>() {
@Override
public int compare(final TrackableLog lhs, final TrackableLog rhs) {
return lhs.name.compareToIgnoreCase(rhs.name);
}
});
return sortedTrackables;
}
private void enablePostButton(final boolean enabled) {
sendButtonEnabled = enabled;
}
@Override
public void onCreate(final Bundle savedInstanceState) {
onCreate(savedInstanceState, R.layout.logcache_activity);
// Get parameters from intent and basic cache information from database
final Bundle extras = getIntent().getExtras();
if (extras != null) {
geocode = extras.getString(Intents.EXTRA_GEOCODE);
if (StringUtils.isBlank(geocode)) {
final String cacheid = extras.getString(Intents.EXTRA_ID);
if (StringUtils.isNotBlank(cacheid)) {
geocode = DataStore.getGeocodeForGuid(cacheid);
}
}
}
cache = DataStore.loadCache(geocode, LoadFlags.LOAD_CACHE_OR_DB);
invalidateOptionsMenuCompatible();
possibleLogTypes = cache.getPossibleLogTypes();
if (StringUtils.isNotBlank(cache.getName())) {
setTitle(res.getString(R.string.log_new_log) + ": " + cache.getName());
} else {
setTitle(res.getString(R.string.log_new_log) + ": " + cache.getGeocode());
}
initializeRatingBar();
// initialize with default values
setDefaultValues();
// Restore previous state
if (savedInstanceState != null) {
rating = savedInstanceState.getFloat(SAVED_STATE_RATING);
typeSelected = LogType.getById(savedInstanceState.getInt(SAVED_STATE_TYPE));
date.setTimeInMillis(savedInstanceState.getLong(SAVED_STATE_DATE));
image = savedInstanceState.getParcelable(SAVED_STATE_IMAGE);
} else {
// If log had been previously saved, load it now, otherwise initialize signature as needed
final LogEntry log = DataStore.loadLogOffline(geocode);
if (log != null) {
typeSelected = log.type;
date.setTime(new Date(log.date));
text = log.log;
} else if (StringUtils.isNotBlank(Settings.getSignature())
&& Settings.isAutoInsertSignature()
&& StringUtils.isBlank(currentLogText())) {
insertIntoLog(LogTemplateProvider.applyTemplates(Settings.getSignature(), new LogContext(cache, null)), false);
}
}
if (image == null) {
image = Image.NONE;
}
enablePostButton(false);
final Button typeButton = ButterKnife.findById(this, R.id.type);
typeButton.setText(typeSelected.getL10n());
typeButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(final View view) {
selectLogType();
}
});
final Button dateButton = ButterKnife.findById(this, R.id.date);
setDate(date);
dateButton.setOnClickListener(new DateListener());
final EditText logView = ButterKnife.findById(this, R.id.log);
if (StringUtils.isBlank(currentLogText()) && StringUtils.isNotBlank(text)) {
logView.setText(text);
Dialogs.moveCursorToEnd(logView);
}
tweetCheck.setChecked(true);
updateTweetBox(typeSelected);
updateLogPasswordBox(typeSelected);
loggingManager = cache.getLoggingManager(this);
loggingManager.init();
// Load Generic Trackables
AppObservable.bindActivity(this,
// Obtain the actives connectors
Observable.from(ConnectorFactory.getGenericTrackablesConnectors())
.flatMap(new Func1<TrackableConnector, Observable<TrackableLog>>() {
@Override
public Observable<TrackableLog> call(final TrackableConnector trackableConnector) {
return Observable.defer(new Func0<Observable<TrackableLog>>() {
@Override
public Observable<TrackableLog> call() {
return trackableConnector.trackableLogInventory();
}
}).subscribeOn(RxUtils.networkScheduler);
}
}).toList()
).subscribe(new Action1<List<TrackableLog>>() {
@Override
public void call(final List<TrackableLog> trackableLogs) {
// Store trackables
trackables.addAll(trackableLogs);
// Update the UI
initializeTrackablesAction();
updateTrackablesList();
}
});
requestKeyboardForLogging();
}
private void initializeRatingBar() {
if (GCVote.isVotingPossible(cache)) {
GCVoteRatingBarUtil.initializeRatingBar(cache, getWindow().getDecorView().getRootView(), new OnRatingChangeListener() {
@Override
public void onRatingChanged(final float stars) {
rating = stars;
}
});
}
}
private void setDefaultValues() {
date = Calendar.getInstance();
rating = GCVote.NO_RATING;
typeSelected = cache.getDefaultLogType();
// it this is an attended event log, use the event date by default instead of the current date
if (cache.isEventCache() && CalendarUtils.isPastEvent(cache) && typeSelected == LogType.ATTENDED) {
date.setTime(cache.getHiddenDate());
}
text = null;
image = Image.NONE;
}
private void clearLog() {
cache.clearOfflineLog();
setDefaultValues();
setType(typeSelected);
setDate(date);
final EditText logView = ButterKnife.findById(this, R.id.log);
logView.setText(StringUtils.EMPTY);
final EditText logPasswordView = ButterKnife.findById(this, R.id.log_password);
logPasswordView.setText(StringUtils.EMPTY);
showToast(res.getString(R.string.info_log_cleared));
}
@Override
public void finish() {
saveLog(false);
super.finish();
}
@Override
public void onStop() {
saveLog(false);
super.onStop();
}
@Override
protected void onSaveInstanceState(final Bundle outState) {
super.onSaveInstanceState(outState);
outState.putDouble(SAVED_STATE_RATING, rating);
outState.putInt(SAVED_STATE_TYPE, typeSelected.id);
outState.putLong(SAVED_STATE_DATE, date.getTimeInMillis());
outState.putParcelable(SAVED_STATE_IMAGE, image);
}
@Override
public void setDate(final Calendar dateIn) {
date = dateIn;
final Button dateButton = ButterKnife.findById(this, R.id.date);
dateButton.setText(Formatter.formatShortDateVerbally(date.getTime().getTime()));
}
public void setType(final LogType type) {
final Button typeButton = ButterKnife.findById(this, R.id.type);
typeSelected = type;
typeButton.setText(typeSelected.getL10n());
updateTweetBox(type);
updateLogPasswordBox(type);
}
private void updateTweetBox(final LogType type) {
if (type == LogType.FOUND_IT && Settings.isUseTwitter() && Settings.isTwitterLoginValid()) {
tweetBox.setVisibility(View.VISIBLE);
} else {
tweetBox.setVisibility(View.GONE);
}
}
private void updateLogPasswordBox(final LogType type) {
if (type == LogType.FOUND_IT && cache.isLogPasswordRequired()) {
logPasswordBox.setVisibility(View.VISIBLE);
} else {
logPasswordBox.setVisibility(View.GONE);
}
}
private class DateListener implements View.OnClickListener {
@Override
public void onClick(final View arg0) {
final DateDialog dateDialog = DateDialog.getInstance(date);
dateDialog.setCancelable(true);
dateDialog.show(getSupportFragmentManager(), "date_dialog");
}
}
private class Poster extends AsyncTaskWithProgressText<String, StatusCode> {
public Poster(final Activity activity, final String progressMessage) {
super(activity, res.getString(image.isEmpty() ?
R.string.log_posting_log :
R.string.log_saving_and_uploading), progressMessage);
}
@Override
protected StatusCode doInBackgroundInternal(final String[] logTexts) {
final String log = logTexts[0];
final String logPwd = logTexts.length > 1 ? logTexts[1] : null;
try {
final LogResult logResult = loggingManager.postLog(typeSelected, date, log, logPwd, new ArrayList<>(trackables));
if (logResult.getPostLogResult() == StatusCode.NO_ERROR) {
// update geocache in DB
if (typeSelected.isFoundLog()) {
cache.setFound(true);
cache.setVisitedDate(date.getTimeInMillis());
}
DataStore.saveChangedCache(cache);
// update logs in DB
final List<LogEntry> newLogs = new ArrayList<>(cache.getLogs());
final LogEntry logNow = new LogEntry(date.getTimeInMillis(), typeSelected, log);
logNow.friend = true;
newLogs.add(0, logNow);
DataStore.saveLogs(cache.getGeocode(), newLogs);
// update offline log in DB
cache.clearOfflineLog();
if (typeSelected == LogType.FOUND_IT) {
if (tweetCheck.isChecked() && tweetBox.getVisibility() == View.VISIBLE) {
publishProgress(res.getString(R.string.log_posting_twitter));
Twitter.postTweetCache(geocode, logNow);
}
}
if (GCVote.isValidRating(rating) && GCVote.isVotingPossible(cache)) {
publishProgress(res.getString(R.string.log_posting_gcvote));
if (GCVote.setRating(cache, rating)) {
cache.setMyVote(rating);
DataStore.saveChangedCache(cache);
} else {
showToast(res.getString(R.string.err_gcvote_send_rating));
}
}
// Posting Generic Trackables
for (final TrackableConnector connector: ConnectorFactory.getGenericTrackablesConnectors()) {
final TrackableLoggingManager manager = connector.getTrackableLoggingManager((AbstractLoggingActivity) activity);
if (manager != null) {
// Filter trackables logs by action and brand
final Set<TrackableLog> trackablesMoved = new HashSet<>();
for (final TrackableLog trackableLog : trackables) {
if (trackableLog.action != LogTypeTrackable.DO_NOTHING && trackableLog.brand == connector.getBrand()) {
trackablesMoved.add(trackableLog);
}
}
// Posting trackables logs
int trackableLogcounter = 1;
for (final TrackableLog trackableLog : trackablesMoved) {
publishProgress(res.getString(R.string.log_posting_generic_trackable, trackableLog.brand.getLabel(), trackableLogcounter, trackablesMoved.size()));
manager.postLog(cache, trackableLog, date, log);
trackableLogcounter++;
}
}
}
if (!image.isEmpty()) {
publishProgress(res.getString(R.string.log_posting_image));
final ImageResult imageResult = loggingManager.postLogImage(logResult.getLogId(), image);
final String uploadedImageUrl = imageResult.getImageUri();
if (StringUtils.isNotEmpty(uploadedImageUrl)) {
image = image.buildUpon()
.setUrl(uploadedImageUrl)
.build();
logNow.addLogImage(image);
DataStore.saveLogs(cache.getGeocode(), newLogs);
}
return imageResult.getPostResult();
}
}
return logResult.getPostLogResult();
} catch (final RuntimeException e) {
Log.e("LogCacheActivity.Poster.doInBackgroundInternal", e);
}
return StatusCode.LOG_POST_ERROR;
}
@Override
protected void onPostExecuteInternal(final StatusCode status) {
if (status == StatusCode.NO_ERROR) {
showToast(res.getString(R.string.info_log_posted));
// No need to save the log when quitting if it has been posted.
text = currentLogText();
finish();
} else if (status == StatusCode.LOG_SAVED) {
showToast(res.getString(R.string.info_log_saved));
finish();
} else {
Dialogs.confirmPositiveNegativeNeutral(activity, R.string.info_log_post_failed,
res.getString(R.string.info_log_post_failed_reason, status.getErrorString(res)),
R.string.info_log_post_retry, // Positive Button
string.cancel, // Negative Button
R.string.info_log_post_save, // Neutral Button
// Positive button: Retry
new OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int which) {
sendLogInternal();
}
},
// Negative button: dismiss popup
null,
// Neutral Button: SaveLog
new OnClickListener() {
@Override
public void onClick(final DialogInterface dialogInterface, final int i) {
saveLog(true);
finish();
}
});
}
}
}
private void saveLog(final boolean force) {
final String log = currentLogText();
// Do not erase the saved log if the user has removed all the characters
// without using "Clear". This may be a manipulation mistake, and erasing
// again will be easy using "Clear" while retyping the text may not be.
if (force || (StringUtils.isNotEmpty(log) && !StringUtils.equals(log, text))) {
cache.logOffline(this, log, date, typeSelected);
Settings.setLastCacheLog(log);
}
text = log;
}
private String currentLogText() {
return ButterKnife.<EditText>findById(this, R.id.log).getText().toString();
}
private String currentLogPassword() {
return ButterKnife.<EditText>findById(this, R.id.log_password).getText().toString();
}
@Override
protected LogContext getLogContext() {
return new LogContext(cache, null);
}
private void selectAllTrackablesAction() {
final Builder alert = new AlertDialog.Builder(this);
alert.setTitle(res.getString(R.string.log_tb_changeall));
final List<LogTypeTrackable> tbLogTypeValues = LogTypeTrackable.getLogTypeTrackableForLogCache();
final String[] tbLogTypes = getTBLogTypes(tbLogTypeValues);
alert.setItems(tbLogTypes, new OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int position) {
final LogTypeTrackable logType = tbLogTypeValues.get(position);
for (final TrackableLog tb : trackables) {
tb.action = logType;
Log.i("Trackable " + tb.trackCode + " (" + tb.name + ") has new action: #" + logType);
}
updateTrackablesList();
dialog.dismiss();
}
});
alert.create().show();
}
private static String[] getTBLogTypes(final List<LogTypeTrackable> tbLogTypeValues) {
final String[] tbLogTypes = new String[tbLogTypeValues.size()];
for (int i = 0; i < tbLogTypes.length; i++) {
tbLogTypes[i] = tbLogTypeValues.get(i).getLabel();
}
return tbLogTypes;
}
private void selectLogType() {
// use a local copy of the possible types, as that one might be modified in the background by the loader
final List<LogType> possible = new ArrayList<>(possibleLogTypes);
final Builder alert = new AlertDialog.Builder(this);
final String[] choices = new String[possible.size()];
for (int i = 0; i < choices.length; i++) {
choices[i] = possible.get(i).getL10n();
}
alert.setSingleChoiceItems(choices, possible.indexOf(typeSelected), new OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int position) {
setType(possible.get(position));
dialog.dismiss();
}
});
alert.create().show();
}
private void selectTrackableAction(final View view) {
final int realViewId = view.getId();
final Builder alert = new AlertDialog.Builder(this);
final TrackableLog trackableLog = actionButtons.get(realViewId);
alert.setTitle(trackableLog.name);
final List<LogTypeTrackable> tbLogTypeValues = LogTypeTrackable.getLogTypeTrackableForLogCache();
final String[] tbLogTypes = getTBLogTypes(tbLogTypeValues);
alert.setItems(tbLogTypes, new OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int position) {
final LogTypeTrackable logType = tbLogTypeValues.get(position);
trackableLog.action = logType;
Log.i("Trackable " + trackableLog.trackCode + " (" + trackableLog.name + ") has new action: #" + logType);
updateTrackablesList();
dialog.dismiss();
}
});
alert.create().show();
}
private void selectImage() {
final Intent selectImageIntent = new Intent(this, ImageSelectActivity.class);
selectImageIntent.putExtra(Intents.EXTRA_IMAGE, image);
startActivityForResult(selectImageIntent, SELECT_IMAGE);
}
@Override
protected void onActivityResult(final int requestCode, final int resultCode, final Intent data) {
if (requestCode == SELECT_IMAGE) {
if (resultCode == RESULT_OK) {
image = data.getParcelableExtra(Intents.EXTRA_IMAGE);
} else if (resultCode != RESULT_CANCELED) {
// Image capture failed, advise user
showToast(getResources().getString(R.string.err_select_logimage_failed));
}
}
}
@Override
public boolean onOptionsItemSelected(final MenuItem item) {
switch (item.getItemId()) {
case R.id.menu_send:
sendLogAndConfirm();
return true;
case R.id.menu_image:
selectImage();
return true;
case R.id.save:
saveLog(true);
finish();
return true;
case R.id.clear:
clearLog();
return true;
default:
break;
}
return super.onOptionsItemSelected(item);
}
private void sendLogAndConfirm() {
if (!sendButtonEnabled) {
Dialogs.message(this, R.string.log_post_not_possible);
return;
}
if (CalendarUtils.isFuture(date)) {
Dialogs.message(this, R.string.log_date_future_not_allowed);
return;
}
if (typeSelected.mustConfirmLog()) {
Dialogs.confirm(this, R.string.confirm_log_title, res.getString(R.string.confirm_log_message, typeSelected.getL10n()), new OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int which) {
sendLogInternal();
}
});
}
else {
sendLogInternal();
}
}
private void sendLogInternal() {
new Poster(this, res.getString(R.string.log_saving)).execute(currentLogText(), currentLogPassword());
Settings.setLastCacheLog(currentLogText());
}
@Override
public boolean onCreateOptionsMenu(final Menu menu) {
super.onCreateOptionsMenu(menu);
menu.findItem(R.id.menu_image).setVisible(cache.supportsLogImages());
menu.findItem(R.id.save).setVisible(true);
menu.findItem(R.id.clear).setVisible(true);
presentShowcase();
return true;
}
@Override
public ShowcaseViewBuilder getShowcase() {
return new ShowcaseViewBuilder(this)
.setTarget(new ActionItemTarget(this, R.id.menu_send))
.setContent(R.string.showcase_logcache_title, R.string.showcase_logcache_text);
}
public static Intent getLogCacheIntent(final Activity context, final String cacheId, final String geocode) {
final Intent logVisitIntent = new Intent(context, LogCacheActivity.class);
logVisitIntent.putExtra(Intents.EXTRA_ID, cacheId);
logVisitIntent.putExtra(Intents.EXTRA_GEOCODE, geocode);
return logVisitIntent;
}
@Override
protected String getLastLog() {
return Settings.getLastCacheLog();
}
} |
package cgeo.geocaching.export;
import cgeo.geocaching.LogEntry;
import cgeo.geocaching.R;
import cgeo.geocaching.Settings;
import cgeo.geocaching.cgCache;
import cgeo.geocaching.cgWaypoint;
import cgeo.geocaching.cgeoapplication;
import cgeo.geocaching.activity.ActivityMixin;
import cgeo.geocaching.activity.Progress;
import cgeo.geocaching.enumerations.CacheAttribute;
import cgeo.geocaching.enumerations.LoadFlags;
import cgeo.geocaching.geopoint.Geopoint;
import cgeo.geocaching.utils.BaseUtils;
import cgeo.geocaching.utils.Log;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.lang3.StringUtils;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.Intent;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Environment;
import android.view.View;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.TextView;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
class GpxExport extends AbstractExport {
private static final SimpleDateFormat dateFormatZ = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
protected GpxExport() {
super(getString(R.string.export_gpx));
}
@Override
public void export(final List<cgCache> caches, final Activity activity) {
if (null == activity) {
// No activity given, so no user interaction possible.
// Start export with default parameters.
new ExportTask(caches, activity).execute((Void) null);
} else {
// Show configuration dialog
new ExportOptionsDialog(caches, activity).show();
}
}
/**
* A dialog to allow the user to set options for the export.
*
* Currently available option is: opening of share menu after successful export
*/
private class ExportOptionsDialog extends AlertDialog {
public ExportOptionsDialog(final List<cgCache> caches, final Activity activity) {
super(activity);
View layout = activity.getLayoutInflater().inflate(R.layout.gpx_export_dialog, null);
setView(layout);
final TextView text = (TextView) layout.findViewById(R.id.info);
text.setText(getString(R.string.export_gpx_info, Settings.getGpxExportDir()));
final CheckBox shareOption = (CheckBox) layout.findViewById(R.id.share);
shareOption.setChecked(Settings.getShareAfterExport());
shareOption.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Settings.setShareAfterExport(shareOption.isChecked());
}
});
((Button) layout.findViewById(R.id.export)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dismiss();
new ExportTask(caches, activity).execute((Void) null);
}
});
}
}
private class ExportTask extends AsyncTask<Void, Integer, Boolean> {
private final List<cgCache> caches;
private final Activity activity;
private final Progress progress = new Progress();
private File exportFile;
private Writer gpx;
/**
* Instantiates and configures the task for exporting field notes.
*
* @param caches
* The {@link List} of {@link cgCache} to be exported
* @param activity
* optional: Show a progress bar and toasts
*/
public ExportTask(final List<cgCache> caches, final Activity activity) {
this.caches = caches;
this.activity = activity;
}
@Override
protected void onPreExecute() {
if (null != activity) {
progress.show(activity, null, getString(R.string.export) + ": " + getName(), ProgressDialog.STYLE_HORIZONTAL, null);
progress.setMaxProgressAndReset(caches.size());
}
}
@Override
protected Boolean doInBackground(Void... params) {
// quick check for being able to write the GPX file
if (!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
return false;
}
try {
final File exportLocation = new File(Settings.getGpxExportDir());
exportLocation.mkdirs();
final SimpleDateFormat fileNameDateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
exportFile = new File(Settings.getGpxExportDir() + File.separatorChar + "export_" + fileNameDateFormat.format(new Date()) + ".gpx");
gpx = new BufferedWriter(new FileWriter(exportFile));
gpx.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
gpx.write("<gpx version=\"1.0\" creator=\"c:geo - http:
for (int i = 0; i < caches.size(); i++) {
// reload the cache. otherwise logs, attributes and other detailed information is not available
final cgCache cache = cgeoapplication.getInstance().loadCache(caches.get(i).getGeocode(), LoadFlags.LOAD_ALL_DB_ONLY);
gpx.write("<wpt ");
gpx.write("lat=\"");
gpx.write(Double.toString(cache.getCoords().getLatitude()));
gpx.write("\" ");
gpx.write("lon=\"");
gpx.write(Double.toString(cache.getCoords().getLongitude()));
gpx.write("\">");
final Date hiddenDate = cache.getHiddenDate();
if (hiddenDate != null) {
gpx.write("<time>");
gpx.write(StringEscapeUtils.escapeXml(dateFormatZ.format(hiddenDate)));
gpx.write("</time>");
}
gpx.write("<name>");
gpx.write(StringEscapeUtils.escapeXml(cache.getGeocode()));
gpx.write("</name>");
gpx.write("<desc>");
gpx.write(StringEscapeUtils.escapeXml(cache.getName()));
gpx.write("</desc>");
gpx.write("<url>");
gpx.write(cache.getUrl());
gpx.write("</url>");
gpx.write("<urlname>");
gpx.write(StringEscapeUtils.escapeXml(cache.getName()));
gpx.write("</urlname>");
gpx.write("<sym>");
gpx.write(cache.isFound() ? "Geocache Found" : "Geocache");
gpx.write("</sym>");
gpx.write("<type>");
gpx.write(StringEscapeUtils.escapeXml("Geocache|" + cache.getType().pattern));
gpx.write("</type>");
gpx.write("<groundspeak:cache ");
gpx.write("id=\"");
gpx.write(cache.getCacheId());
gpx.write("\" available=\"");
gpx.write(!cache.isDisabled() ? "True" : "False");
gpx.write("\" archived=\"");
gpx.write(cache.isArchived() ? "True" : "False");
gpx.write("\" ");
gpx.write("xmlns:groundspeak=\"http:
gpx.write("<groundspeak:name>");
gpx.write(StringEscapeUtils.escapeXml(cache.getName()));
gpx.write("</groundspeak:name>");
gpx.write("<groundspeak:placed_by>");
gpx.write(StringEscapeUtils.escapeXml(cache.getOwnerDisplayName()));
gpx.write("</groundspeak:placed_by>");
gpx.write("<groundspeak:owner>");
gpx.write(StringEscapeUtils.escapeXml(cache.getOwnerUserId()));
gpx.write("</groundspeak:owner>");
gpx.write("<groundspeak:type>");
gpx.write(StringEscapeUtils.escapeXml(cache.getType().pattern));
gpx.write("</groundspeak:type>");
gpx.write("<groundspeak:container>");
gpx.write(StringEscapeUtils.escapeXml(cache.getSize().id));
gpx.write("</groundspeak:container>");
writeAttributes(cache);
gpx.write("<groundspeak:difficulty>");
gpx.write(Float.toString(cache.getDifficulty()));
gpx.write("</groundspeak:difficulty>");
gpx.write("<groundspeak:terrain>");
gpx.write(Float.toString(cache.getTerrain()));
gpx.write("</groundspeak:terrain>");
gpx.write("<groundspeak:country>");
gpx.write(StringEscapeUtils.escapeXml(cache.getLocation()));
gpx.write("</groundspeak:country>");
gpx.write("<groundspeak:state></groundspeak:state>"); // c:geo cannot manage 2 separate fields, so we export as country
gpx.write("<groundspeak:short_description html=\"");
gpx.write(BaseUtils.containsHtml(cache.getShortDescription()) ? "True" : "False");
gpx.write("\">");
gpx.write(StringEscapeUtils.escapeXml(cache.getShortDescription()));
gpx.write("</groundspeak:short_description>");
gpx.write("<groundspeak:long_description html=\"");
gpx.write(BaseUtils.containsHtml(cache.getDescription()) ? "True" : "False");
gpx.write("\">");
gpx.write(StringEscapeUtils.escapeXml(cache.getDescription()));
gpx.write("</groundspeak:long_description>");
gpx.write("<groundspeak:encoded_hints>");
gpx.write(StringEscapeUtils.escapeXml(cache.getHint()));
gpx.write("</groundspeak:encoded_hints>");
writeLogs(cache);
gpx.write("</groundspeak:cache>");
gpx.write("</wpt>");
writeWaypoints(cache);
publishProgress(i + 1);
}
gpx.write("</gpx>");
gpx.close();
} catch (Exception e) {
Log.e("GpxExport.ExportTask export", e);
if (gpx != null) {
try {
gpx.close();
} catch (IOException ee) {
}
}
// delete partial gpx file on error
if (exportFile.exists()) {
exportFile.delete();
}
return false;
}
return true;
}
private void writeWaypoints(final cgCache cache) throws IOException {
List<cgWaypoint> waypoints = cache.getWaypoints();
List<cgWaypoint> ownWaypoints = new ArrayList<cgWaypoint>(waypoints.size());
List<cgWaypoint> originWaypoints = new ArrayList<cgWaypoint>(waypoints.size());
for (cgWaypoint wp : cache.getWaypoints()) {
if (wp.isUserDefined()) {
ownWaypoints.add(wp);
} else {
originWaypoints.add(wp);
}
}
int maxPrefix = 0;
for (cgWaypoint wp : originWaypoints) {
String prefix = wp.getPrefix();
try {
maxPrefix = Math.max(Integer.parseInt(prefix), maxPrefix);
} catch (NumberFormatException ex) {
Log.e("Unexpected origin waypoint prefix='" + prefix + "'", ex);
}
writeCacheWaypoint(wp, prefix);
}
for (cgWaypoint wp : ownWaypoints) {
maxPrefix++;
String prefix = StringUtils.leftPad(String.valueOf(maxPrefix), 2, '0');
writeCacheWaypoint(wp, prefix);
}
}
/**
* Writes one waypoint entry for cache waypoint.
*
* @param cache
* The
* @param wp
* @param prefix
* @throws IOException
*/
private void writeCacheWaypoint(final cgWaypoint wp, final String prefix) throws IOException {
gpx.write("<wpt lat=\"");
final Geopoint coords = wp.getCoords();
gpx.write(coords != null ? Double.toString(coords.getLatitude()) : ""); // TODO: check whether is the best way to handle unknown waypoint coordinates
gpx.write("\" lon=\"");
gpx.write(coords != null ? Double.toString(coords.getLongitude()) : "");
gpx.write("\">");
gpx.write("<name>");
gpx.write(StringEscapeUtils.escapeXml(prefix));
gpx.write(StringEscapeUtils.escapeXml(wp.getGeocode().substring(2)));
gpx.write("</name>");
gpx.write("<cmt>");
gpx.write(StringEscapeUtils.escapeXml(wp.getNote()));
gpx.write("</cmt>");
gpx.write("<desc>");
gpx.write(StringEscapeUtils.escapeXml(wp.getName()));
gpx.write("</desc>");
gpx.write("<sym>");
gpx.write(StringEscapeUtils.escapeXml(wp.getWaypointType().toString())); //TODO: Correct identifier string
gpx.write("</sym>");
gpx.write("<type>Waypoint|");
gpx.write(StringEscapeUtils.escapeXml(wp.getWaypointType().toString())); //TODO: Correct identifier string
gpx.write("</type>");
gpx.write("</wpt>");
}
private void writeLogs(final cgCache cache) throws IOException {
if (cache.getLogs().size() <= 0) {
return;
}
gpx.write("<groundspeak:logs>");
for (LogEntry log : cache.getLogs()) {
gpx.write("<groundspeak:log id=\"");
gpx.write(Integer.toString(log.id));
gpx.write("\">");
gpx.write("<groundspeak:date>");
gpx.write(StringEscapeUtils.escapeXml(dateFormatZ.format(new Date(log.date))));
gpx.write("</groundspeak:date>");
gpx.write("<groundspeak:type>");
gpx.write(StringEscapeUtils.escapeXml(log.type.type));
gpx.write("</groundspeak:type>");
gpx.write("<groundspeak:finder id=\"\">");
gpx.write(StringEscapeUtils.escapeXml(log.author));
gpx.write("</groundspeak:finder>");
gpx.write("<groundspeak:text encoded=\"False\">");
gpx.write(StringEscapeUtils.escapeXml(log.log));
gpx.write("</groundspeak:text>");
gpx.write("</groundspeak:log>");
}
gpx.write("</groundspeak:logs>");
}
private void writeAttributes(final cgCache cache) throws IOException {
if (!cache.hasAttributes()) {
return;
}
//TODO: Attribute conversion required: English verbose name, gpx-id
gpx.write("<groundspeak:attributes>");
for (String attribute : cache.getAttributes()) {
final CacheAttribute attr = CacheAttribute.getByGcRawName(CacheAttribute.trimAttributeName(attribute));
final boolean enabled = CacheAttribute.isEnabled(attribute);
gpx.write("<groundspeak:attribute id=\"");
gpx.write(Integer.toString(attr.id));
gpx.write("\" inc=\"");
if (enabled) {
gpx.write('1');
} else {
gpx.write('0');
}
gpx.write("\">");
gpx.write(StringEscapeUtils.escapeXml(attr.getL10n(enabled)));
gpx.write("</groundspeak:attribute>");
}
gpx.write("</groundspeak:attributes>");
}
@Override
protected void onPostExecute(Boolean result) {
if (null != activity) {
progress.dismiss();
if (result) {
ActivityMixin.showToast(activity, getName() + ' ' + getString(R.string.export_exportedto) + ": " + exportFile.toString());
if (Settings.getShareAfterExport()) {
Intent shareIntent = new Intent();
shareIntent.setAction(Intent.ACTION_SEND);
shareIntent.putExtra(Intent.EXTRA_STREAM, Uri.fromFile(exportFile));
shareIntent.setType("application/xml");
activity.startActivity(Intent.createChooser(shareIntent, getString(R.string.export_gpx_to)));
}
} else {
ActivityMixin.showToast(activity, getString(R.string.export_failed));
}
}
}
@Override
protected void onProgressUpdate(Integer... status) {
if (null != activity) {
progress.setProgress(status[0]);
}
}
}
} |
package cn.cerc.mis.tools;
import lombok.extern.slf4j.Slf4j;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
/**
* summer-dbCurl
*/
@Slf4j
@Deprecated
public class CURL {
private static int connectTimeOut = 5000;
private static int readTimeOut = 10000;
private static String requestEncoding = "UTF-8";
// GETHTTP, reqUrl HTTPURL param parameters return
// HTTP
public static String doGet(String reqUrl) {
Map<String, Object> parameters = new HashMap<String, Object>();
return CURL.doGet(reqUrl, parameters, "UTF-8");
}
public static String doGet(String reqUrl, Map<String, Object> parameters, String recvEncoding) {
HttpURLConnection url_con = null;
String responseContent = null;
try {
StringBuffer params = new StringBuffer();
for (Iterator<?> iter = parameters.entrySet().iterator(); iter.hasNext(); ) {
Entry<?, ?> element = (Entry<?, ?>) iter.next();
params.append(element.getKey().toString());
params.append("=");
params.append(URLEncoder.encode(element.getValue().toString(), CURL.requestEncoding));
params.append("&");
}
if (params.length() > 0) {
params = params.deleteCharAt(params.length() - 1);
}
URL url = new URL(reqUrl);
url_con = (HttpURLConnection) url.openConnection();
url_con.setRequestMethod("GET");
System.setProperty("sun.net.client.defaultConnectTimeout", String.valueOf(CURL.connectTimeOut));// jdk1.4,
System.setProperty("sun.net.client.defaultReadTimeout", String.valueOf(CURL.readTimeOut)); // jdk1.4,
// url_con.setConnectTimeout(5000);//jdk
// url_con.setReadTimeout(5000);//jdk 1.5,
url_con.setDoOutput(true);
byte[] b = params.toString().getBytes();
url_con.getOutputStream().write(b, 0, b.length);
url_con.getOutputStream().flush();
url_con.getOutputStream().close();
InputStream in = url_con.getInputStream();
BufferedReader rd = new BufferedReader(new InputStreamReader(in, recvEncoding));
String tempLine = rd.readLine();
StringBuffer temp = new StringBuffer();
String crlf = System.getProperty("line.separator");
while (tempLine != null) {
temp.append(tempLine);
temp.append(crlf);
tempLine = rd.readLine();
}
responseContent = temp.toString();
rd.close();
in.close();
} catch (IOException e) {
log.error("", e);
} finally {
if (url_con != null) {
url_con.disconnect();
}
}
return responseContent;
}
// GETHTTP, reqUrl HTTPURL return HTTP
public static String doGet(String reqUrl, String recvEncoding) {
HttpURLConnection url_con = null;
String responseContent = null;
try {
StringBuffer params = new StringBuffer();
String queryUrl = reqUrl;
int paramIndex = reqUrl.indexOf("?");
if (paramIndex > 0) {
queryUrl = reqUrl.substring(0, paramIndex);
String parameters = reqUrl.substring(paramIndex + 1, reqUrl.length());
String[] paramArray = parameters.split("&");
for (int i = 0; i < paramArray.length; i++) {
String string = paramArray[i];
int index = string.indexOf("=");
if (index > 0) {
String parameter = string.substring(0, index);
String value = string.substring(index + 1, string.length());
params.append(parameter);
params.append("=");
params.append(URLEncoder.encode(value, CURL.requestEncoding));
params.append("&");
}
}
params = params.deleteCharAt(params.length() - 1);
}
URL url = new URL(queryUrl);
url_con = (HttpURLConnection) url.openConnection();
url_con.setRequestMethod("GET");
System.setProperty("sun.net.client.defaultConnectTimeout", String.valueOf(CURL.connectTimeOut));// jdk1.4,
System.setProperty("sun.net.client.defaultReadTimeout", String.valueOf(CURL.readTimeOut)); // jdk1.4,
// url_con.setConnectTimeout(5000);//jdk
// url_con.setReadTimeout(5000);//jdk 1.5,
url_con.setDoOutput(true);
byte[] b = params.toString().getBytes();
url_con.getOutputStream().write(b, 0, b.length);
url_con.getOutputStream().flush();
url_con.getOutputStream().close();
InputStream in = url_con.getInputStream();
BufferedReader rd = new BufferedReader(new InputStreamReader(in, recvEncoding));
String tempLine = rd.readLine();
StringBuffer temp = new StringBuffer();
String crlf = System.getProperty("line.separator");
while (tempLine != null) {
temp.append(tempLine);
temp.append(crlf);
tempLine = rd.readLine();
}
responseContent = temp.toString();
rd.close();
in.close();
} catch (IOException e) {
log.error("", e);
} finally {
if (url_con != null) {
url_con.disconnect();
}
}
return responseContent;
}
// POSTHTTP, reqUrl HTTPURL param parameters return
// HTTP
public static String doPost(String reqUrl, Map<String, String> parameters, String recvEncoding) {
try {
StringBuffer params = new StringBuffer();
for (Iterator<?> iter = parameters.entrySet().iterator(); iter.hasNext(); ) {
Entry<?, ?> element = (Entry<?, ?>) iter.next();
Object val = element.getValue();
if (val != null) {
params.append(element.getKey().toString());
params.append("=");
params.append(URLEncoder.encode(val.toString(), CURL.requestEncoding));
params.append("&");
}
}
if (params.length() > 0) {
params = params.deleteCharAt(params.length() - 1);
}
return doPost(reqUrl, params, recvEncoding);
} catch (UnsupportedEncodingException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage());
}
}
public static String doPost(String reqUrl, StringBuffer params, String recvEncoding) {
HttpURLConnection url_con = null;
String responseContent = null;
try {
URL url = new URL(reqUrl);
url_con = (HttpURLConnection) url.openConnection();
url_con.setRequestMethod("POST");
// System.setProperty("sun.net.client.defaultConnectTimeout",
// String.valueOf(CURL.connectTimeOut));// jdk1.4,
// System.setProperty("sun.net.client.defaultReadTimeout",
// String.valueOf(CURL.readTimeOut)); // jdk1.4,
url_con.setConnectTimeout(CURL.connectTimeOut);// jdk
url_con.setReadTimeout(CURL.readTimeOut);// jdk 1.5,
url_con.setDoOutput(true);
byte[] b = params.toString().getBytes();
url_con.getOutputStream().write(b, 0, b.length);
url_con.getOutputStream().flush();
url_con.getOutputStream().close();
InputStream in = url_con.getInputStream();
BufferedReader rd = new BufferedReader(new InputStreamReader(in, recvEncoding));
String tempLine = rd.readLine();
StringBuffer tempStr = new StringBuffer();
String crlf = System.getProperty("line.separator");
while (tempLine != null) {
tempStr.append(tempLine);
tempStr.append(crlf);
tempLine = rd.readLine();
}
responseContent = tempStr.toString();
rd.close();
in.close();
} catch (IOException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage());
} finally {
if (url_con != null) {
url_con.disconnect();
}
}
return responseContent;
}
// return ()
public static int getConnectTimeOut() {
return CURL.connectTimeOut;
}
public static void setConnectTimeOut(int connectTimeOut) {
CURL.connectTimeOut = connectTimeOut;
}
public static int getReadTimeOut() {
return CURL.readTimeOut;
}
public static void setReadTimeOut(int readTimeOut) {
CURL.readTimeOut = readTimeOut;
}
public static String getRequestEncoding() {
return requestEncoding;
}
public static void setRequestEncoding(String requestEncoding) {
CURL.requestEncoding = requestEncoding;
}
} |
package cec.net;
import java.util.ArrayList;
import java.util.logging.Logger;
import cec.net.ChangeSetFields;
import cec.net.ChangeSetState;
import cec.net.CommunicationChangeSet;
//Responsible to create a new change set and update the values on server side
public class ServerMeetingMerger {
static Logger logger = Logger.getLogger(ServerMeetingMerger.class.getName());
static {
logger.setParent( Logger.getLogger( ServerMeetingMerger.class.getPackage().getName() ) );
}
public CommunicationChangeSet getChangeSet(MeetingDataWrapper serverCurrent, CommunicationChangeSet clientsBefore) {
CommunicationChangeSet ccs = new CommunicationChangeSet(ChangeSetState.CHANGE_REJECTED, clientsBefore.getId());
ArrayList<Change> changes = clientsBefore.getChanges();
for(Change c: changes) {
if(c.field.equals(ChangeSetFields.ATTENDEES))
if (!serverCurrent.meetingObj.getAttendees().equals(c.before)){
ccs.addChange(ChangeSetFields.ATTENDEES,serverCurrent.meetingObj.getAttendees(), serverCurrent.meetingObj.getAttendees());
logger.info("Field: "+ ChangeSetFields.ATTENDEES + " Client's Before: "+c.before + " Servers' Before: " + serverCurrent.meetingObj.getAttendees());
}
/*if(c.field.equals(ChangeSetFields.BODY))
if (!serverCurrent.meetingObj.getBody().equals(c.before)){
ccs.addChange(ChangeSetFields.BODY,serverCurrent.meetingObj.getBody(), serverCurrent.meetingObj.getBody());
logger.info("Field: "+ ChangeSetFields.BODY + " Client's Before: "+c.before + " Servers' Before: " + serverCurrent.meetingObj.getBody());
}*/
if(c.field.equals(ChangeSetFields.SUBJECT))
if (!serverCurrent.meetingObj.getSubject().equals(c.before)){
ccs.addChange(ChangeSetFields.SUBJECT, serverCurrent.meetingObj.getSubject(), serverCurrent.meetingObj.getSubject());
logger.info("Field: "+ ChangeSetFields.SUBJECT + " Client's Before: "+c.before + " Servers' Before: " + serverCurrent.meetingObj.getSubject());
}
if(c.field.equals(ChangeSetFields.PLACE))
if (!serverCurrent.meetingObj.getPlace().equals(c.before)){
ccs.addChange(ChangeSetFields.PLACE, serverCurrent.meetingObj.getPlace(), serverCurrent.meetingObj.getPlace());
logger.info("Field: "+ ChangeSetFields.PLACE + " Client's Before: "+c.before + " Servers' Before: " + serverCurrent.meetingObj.getPlace());
}
if(c.field.equals(ChangeSetFields.START_DATE))
if (!serverCurrent.meetingObj.getStartDate().equals(c.before)){
ccs.addChange(ChangeSetFields.START_DATE, serverCurrent.meetingObj.getStartDate(), serverCurrent.meetingObj.getStartDate());
logger.info("Field: "+ ChangeSetFields.START_DATE + " Client's Before: "+c.before + " Servers' Before: " + serverCurrent.meetingObj.getStartDate());
}
if(c.field.equals(ChangeSetFields.START_TIME))
if (!serverCurrent.meetingObj.getStartTime().equals(c.before)){
ccs.addChange(ChangeSetFields.START_TIME, serverCurrent.meetingObj.getStartTime(), serverCurrent.meetingObj.getStartTime());
logger.info("Field: "+ ChangeSetFields.START_TIME + " Client's Before: "+c.before + " Servers' Before: " + serverCurrent.meetingObj.getStartTime());
}
if(c.field.equals(ChangeSetFields.END_DATE))
if (!serverCurrent.meetingObj.getEndDate().equals(c.before)){
ccs.addChange(ChangeSetFields.END_DATE, serverCurrent.meetingObj.getEndDate(), serverCurrent.meetingObj.getEndDate());
logger.info("Field: "+ ChangeSetFields.END_DATE + " Client's Before: "+c.before + " Servers' Before: " + serverCurrent.meetingObj.getEndDate());
}
if(c.field.equals(ChangeSetFields.END_TIME))
if (!serverCurrent.meetingObj.getEndTime().equals(c.before)){
ccs.addChange(ChangeSetFields.END_TIME, serverCurrent.meetingObj.getEndTime(), serverCurrent.meetingObj.getEndTime());
logger.info("Field: "+ ChangeSetFields.END_TIME + " Client's Before: "+c.before + " Servers' Before: " + serverCurrent.meetingObj.getEndTime());
}
}
//If client's Meeting before's state = Server's current Meeting object > Accept the changes
if (ccs.getChanges().size() == 0){
ccs = new CommunicationChangeSet(ChangeSetState.CHANGE_ACCEPTED, clientsBefore.getId());
logger.info("Meeting change request ACCEPTED for clients before meeting Id : " +clientsBefore.getId() );
}else{
logger.info("Meeting change request NOT ACCEPTED for this client before meeting Id : " +clientsBefore.getId() );
}
return ccs;
}
} |
package tdc2014.temporal;
import java.time.LocalDate;
import java.time.temporal.Temporal;
import java.time.temporal.TemporalAdjuster;
import java.util.stream.Stream;
public class DiasUteisAdjuster implements TemporalAdjuster {
private long quantidadeDias;
private final DiaUtilQuery diaUtilQuery;
public DiasUteisAdjuster() {
this(1);
}
public DiasUteisAdjuster(final long quantidadeDias) {
this.quantidadeDias = quantidadeDias;
this.diaUtilQuery = new DiaUtilQuery(FeriadosFactory.create());
}
@Override
public Temporal adjustInto(final Temporal temporal) {
final LocalDate from = LocalDate.from(temporal);
return Stream.iterate(from, day -> day.plusDays(1))
.filter(day -> day.query(diaUtilQuery))
.skip(quantidadeDias - 1)
.findFirst().get();
}
} |
package br.com.blackhubos.eventozero.handlers;
import java.io.File;
import java.util.Vector;
import org.bukkit.plugin.Plugin;
import br.com.blackhubos.eventozero.EventoZero;
import br.com.blackhubos.eventozero.shop.Shop;
import br.com.blackhubos.eventozero.util.Framework.Configuration;
public class ShopHandler
{
private final Vector<Shop> shops;
public ShopHandler()
{
this.shops = new Vector<>();
}
public void loadShops(final Plugin plugin)
{
final File file = new File(plugin.getDataFolder() + File.separator + "shop" + File.separator + "shops.yml");
final Configuration configuration = new Configuration(file);
for (final String key : configuration.getConfigurationSection("shops").getKeys(false))
{
// final Shop shop = new Shop(configuration.getString("shops." + key + ".name"), new ItemFactory(configuration.getString("shops." + key + ".icon")).getItem());
for (final String s : configuration.getStringList("shops." + key + ".items"))
{
if (EventoZero.getKitHandler().getKitByName(s.replace("{|}", "")) != null)
{
// shop.addItem(EventoZero.getKitHandler().getKitByName(s.replace("{|}", "")));
}
else
{
}
}
}
}
} |
package net.sf.saffron.sql2rel;
import junit.framework.AssertionFailedError;
import junit.framework.TestCase;
import net.sf.saffron.jdbc.SaffronJdbcConnection;
import net.sf.saffron.oj.OJPlannerFactory;
import openjava.mop.*;
import openjava.ptree.ClassDeclaration;
import openjava.ptree.MemberDeclarationList;
import openjava.ptree.ModifierList;
import org.eigenbase.oj.*;
import org.eigenbase.oj.util.JavaRexBuilder;
import org.eigenbase.oj.util.OJUtil;
import org.eigenbase.rel.RelNode;
import org.eigenbase.relopt.RelOptConnection;
import org.eigenbase.relopt.RelOptPlanWriter;
import org.eigenbase.sql.SqlNode;
import org.eigenbase.sql.validate.*;
import org.eigenbase.sql.parser.SqlParseException;
import org.eigenbase.sql.parser.SqlParser;
import org.eigenbase.sql.fun.*;
import org.eigenbase.sql2rel.SqlToRelConverter;
import org.eigenbase.util.*;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.regex.Pattern;
/**
* Unit test for {@link SqlToRelConverter}.
*
* @version $Id$
*/
public class ConverterTest extends TestCase
{
private static final String NL = System.getProperty("line.separator");
private static TestContext testContext;
private static final Pattern pattern =
Pattern.compile(
"net.sf.saffron.oj.OJConnectionRegistry.instance.get\\( \"[0-9]+\" \\)");
private static final Pattern pattern2 =
Pattern.compile(
"\\(\\(sales.SalesInMemory\\) \\(\\(net.sf.saffron.jdbc.SaffronJdbcConnection.MyConnection\\) \\{con\\}\\).target\\)");
protected void setUp()
throws Exception
{
super.setUp();
// Create a type factory.
OJTypeFactory typeFactory =
OJUtil.threadTypeFactory();
if (typeFactory == null) {
typeFactory = new OJTypeFactoryImpl();
OJUtil.setThreadTypeFactory(typeFactory);
}
// And a planner factory.
if (OJPlannerFactory.threadInstance() == null) {
OJPlannerFactory.setThreadInstance(new OJPlannerFactory());
}
}
private void check(
String sql,
String plan)
{
TestContext testContext = getTestContext();
final SqlNode sqlQuery;
try {
sqlQuery = new SqlParser(sql).parseQuery();
} catch (SqlParseException e) {
throw new AssertionFailedError(e.toString());
}
final SqlValidator validator =
SqlValidatorUtil.newValidator(
SqlStdOperatorTable.instance(),
testContext.seeker,
testContext.connection.getRelOptSchema().getTypeFactory());
final SqlToRelConverter converter =
new SqlToRelConverter(validator,
testContext.connection.getRelOptSchema(), testContext.env,
OJPlannerFactory.threadInstance().newPlanner(),
testContext.connection,
new JavaRexBuilder(testContext.connection.getRelOptSchema()
.getTypeFactory()));
final RelNode rel = converter.convertQuery(sqlQuery, true, true);
assertTrue(rel != null);
final StringWriter sw = new StringWriter();
final RelOptPlanWriter planWriter =
new RelOptPlanWriter(new PrintWriter(sw));
planWriter.setIdPrefix(false);
rel.explain(planWriter);
planWriter.flush();
String actual = sw.toString();
String actual2 = pattern.matcher(actual).replaceAll("{con}");
String actual3 = pattern2.matcher(actual2).replaceAll("{sales}");
TestUtil.assertEqualsVerbose(plan, actual3);
}
static TestContext getTestContext()
{
if (testContext == null) {
testContext = new TestContext();
}
return testContext;
}
/**
* Contains context shared between unit tests.
*
* <p>Lots of nasty stuff to set up the Openjava environment, should be
* removed when we're not dependent upon Openjava.</p>
*/
static class TestContext
{
private final SqlValidatorCatalogReader seeker;
private final Connection jdbcConnection;
private final RelOptConnection connection;
Environment env;
private int executionCount;
TestContext()
{
try {
Class.forName("net.sf.saffron.jdbc.SaffronJdbcDriver");
} catch (ClassNotFoundException e) {
throw Util.newInternal(e, "Error loading JDBC driver");
}
try {
jdbcConnection =
DriverManager.getConnection(
"jdbc:saffron:schema=sales.SalesInMemory");
} catch (SQLException e) {
throw Util.newInternal(e);
}
connection =
((SaffronJdbcConnection) jdbcConnection).saffronConnection;
seeker =
new SqlToRelConverter.SchemaCatalogReader(
connection.getRelOptSchema(),
false);
// Nasty OJ stuff
env = OJSystem.env;
String packageName = getTempPackageName();
String className = getTempClassName();
env = new FileEnvironment(env, packageName, className);
ClassDeclaration decl =
new ClassDeclaration(new ModifierList(ModifierList.PUBLIC),
className, null, null, new MemberDeclarationList());
OJClass clazz = new OJClass(env, null, decl);
env.record(
clazz.getName(),
clazz);
env = new ClosedEnvironment(clazz.getEnvironment());
// Ensure that the thread has factories for types and planners. (We'd
// rather that the client sets these.)
OJTypeFactory typeFactory =
OJUtil.threadTypeFactory();
if (typeFactory == null) {
typeFactory = new OJTypeFactoryImpl();
OJUtil.setThreadTypeFactory(typeFactory);
}
if (OJPlannerFactory.threadInstance() == null) {
OJPlannerFactory.setThreadInstance(
new OJPlannerFactory());
}
OJUtil.threadDeclarers.set(clazz);
}
protected static String getClassRoot()
{
return SaffronProperties.instance().classDir.get(true);
}
protected String getTempClassName()
{
return "Dummy_"
+ Integer.toHexString(this.hashCode() + executionCount++);
}
protected static String getJavaRoot()
{
return SaffronProperties.instance().javaDir.get();
}
protected String getTempPackageName()
{
return SaffronProperties.instance().packageName.get();
}
}
public void testIntegerLiteral()
{
check("select 1 from \"emps\"",
"ProjectRel(EXPR$0=[1])" + NL
+ " ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])"
+ NL);
}
public void testStringLiteral()
{
check("select 'foo' from \"emps\"",
"ProjectRel(EXPR$0=[_ISO-8859-1'foo'])"
+ NL
+ " ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])"
+ NL);
}
public void testSelectListAlias()
{
check("select 1 as one, 'foo' foo, 1 bar from \"emps\"",
"ProjectRel(ONE=[1], FOO=[_ISO-8859-1'foo'], BAR=[1])"
+ NL
+ " ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])"
+ NL);
}
public void testSelectListColumns()
{
check("select \"emps\".\"gender\", \"empno\", \"deptno\" as \"d\" from \"emps\"",
"ProjectRel(gender=[$3], empno=[$0], d=[$2])" + NL
+ " ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])"
+ NL);
}
public void testFromList()
{
// "FROM x, y" == "x INNER JOIN y ON true"
check("select 1 from \"emps\", \"depts\"",
"ProjectRel(EXPR$0=[1])" + NL
+ " JoinRel(condition=[true], joinType=[inner])" + NL
+ " ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])"
+ NL
+ " ExpressionReaderRel(expression=[Java((sales.Dept[]) {sales}.depts)])"
+ NL);
}
public void testFromAlias()
{
check("select 1 from \"emps\" as \"e\"",
"ProjectRel(EXPR$0=[1])" + NL
+ " ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])"
+ NL);
}
public void testFromJoin()
{
check("select 1 from \"emps\" join \"depts\" on \"emps\".\"deptno\" = \"depts\".\"deptno\"",
"ProjectRel(EXPR$0=[1])" + NL
+ " JoinRel(condition=[=($2, $6)], joinType=[inner])" + NL
+ " ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])"
+ NL
+ " ExpressionReaderRel(expression=[Java((sales.Dept[]) {sales}.depts)])"
+ NL);
}
// todo: Enable when validator can handle USING
public void _testFromLeftJoinUsing()
{
check("select 1 from \"emps\" left join \"depts\" using (\"deptno\")",
"?");
}
public void testFromFullJoin()
{
check("select 1 from \"emps\" full join \"depts\" on \"emps\".\"deptno\" = \"depts\".\"deptno\"",
"ProjectRel(EXPR$0=[1])" + NL
+ " JoinRel(condition=[=($2, $6)], joinType=[full])" + NL
+ " ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])" + NL
+ " ExpressionReaderRel(expression=[Java((sales.Dept[]) {sales}.depts)])" + NL);
}
public void testFromJoin3()
{
check("select 1 from \"emps\" "
+ "join \"depts\" on \"emps\".\"deptno\" = \"depts\".\"deptno\" "
+ "join (select * from \"emps\" where \"gender\" = 'F') as \"femaleEmps\" on \"femaleEmps\".\"empno\" = \"emps\".\"empno\"",
"ProjectRel(EXPR$0=[1])" + NL
+ " JoinRel(condition=[=($8, $0)], joinType=[inner])" + NL
+ " JoinRel(condition=[=($2, $6)], joinType=[inner])" + NL
+ " ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])" + NL
+ " ExpressionReaderRel(expression=[Java((sales.Dept[]) {sales}.depts)])" + NL
+ " ProjectRel(empno=[$0], name=[$1], deptno=[$2], gender=[$3], city=[$4], slacker=[$5])" + NL
+ " FilterRel(condition=[=($3, _ISO-8859-1'F')])" + NL
+ " ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])" + NL);
}
// todo: Enable when validator can handle NATURAL JOIN
public void _testFromNaturalRightJoin()
{
check("select 1 from \"emps\" natural right join \"depts\"", "?");
}
public void testWhereSimple()
{
check("select 1 from \"emps\" where \"gender\" = 'F'",
"ProjectRel(EXPR$0=[1])" + NL
+ " FilterRel(condition=[=($3, _ISO-8859-1'F')])"
+ NL
+ " ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])"
+ NL);
}
public void testWhereAnd()
{
check("select 1 from \"emps\" where \"gender\" = 'F' and \"deptno\" = 10",
"ProjectRel(EXPR$0=[1])" + NL
+ " FilterRel(condition=[AND(=($3, _ISO-8859-1'F'), =($2, 10))])"
+ NL
+ " ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])"
+ NL);
}
public void _testOrder()
{
check("select * from \"emps\" order by empno asc, salary, deptno desc",
"?");
}
public void _testOrderOrdinal()
{
check("select * from \"emps\" order by 3 asc, salary, 1 desc", "?");
}
public void _testOrderLiteral()
{
check("select * from \"emps\" order by 'A string', salary, true, null desc",
"?");
}
public void _testFromQuery()
{
check("select 1 from (select * from \"emps\")",
"ProjectRel(EXPR$0=[1])" + NL
+ " ProjectRel(city=[emps.city], gender=[emps.gender], name=[emps.name], deptno=[emps.deptno], empno=[emps.empno])"
+ NL
+ " ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])"
+ NL);
}
public void testQueryInSelect()
{
check("select \"gender\", (select \"name\" from \"depts\" where \"deptno\" = \"e\".\"deptno\") from \"emps\" as \"e\"",
TestUtil.fold(new String[]{
"ProjectRel(gender=[$3], EXPR$1=[IS NULL($6)])",
" CorrelatorRel(condition=[true], joinType=[inner], correlations=[[var0=offset2]])",
" ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])",
" ProjectRel(name=[$1])",
" FilterRel(condition=[=($0, $cor0.deptno)])",
" ExpressionReaderRel(expression=[Java((sales.Dept[]) {sales}.depts)])",
""}));
}
public void testExistsUncorrelated()
{
check("select * from \"emps\" where exists (select 1 from \"depts\")",
TestUtil.fold(new String[]{
"ProjectRel(empno=[$0], name=[$1], deptno=[$2], gender=[$3], city=[$4], slacker=[$5])",
" FilterRel(condition=[IS NULL($7)])",
" JoinRel(condition=[true], joinType=[left])",
" ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])",
" ProjectRel(EXPR$0=[$0], $indicator=[true])",
" ProjectRel(EXPR$0=[1])",
" ExpressionReaderRel(expression=[Java((sales.Dept[]) {sales}.depts)])",
""}));
}
// todo: implement IN
public void _testQueryInWhereUncorrelated()
{
check("select * from \"depts\" where \"deptno\" in (select \"deptno\" from \"depts\")",
"");
}
public void testExistsCorrelated()
{
check("select * from \"emps\" "
+ "where exists (select 1 + 2 from \"depts\" where \"deptno\" > 10) "
+ "or exists (select 'foo' from \"emps\" where \"gender\" = 'Pig')",
TestUtil.fold(new String[]{
"ProjectRel(empno=[$0], name=[$1], deptno=[$2], gender=[$3], city=[$4], slacker=[$5])",
" FilterRel(condition=[OR(IS NULL($7), IS NULL($9))])",
" JoinRel(condition=[true], joinType=[left])",
" JoinRel(condition=[true], joinType=[left])",
" ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])",
" ProjectRel(EXPR$0=[$0], $indicator=[true])",
" ProjectRel(EXPR$0=[+(1, 2)])",
" FilterRel(condition=[>($0, 10)])",
" ExpressionReaderRel(expression=[Java((sales.Dept[]) {sales}.depts)])",
" ProjectRel(EXPR$0=[$0], $indicator=[true])",
" ProjectRel(EXPR$0=[_ISO-8859-1'foo'])",
" FilterRel(condition=[=($3, _ISO-8859-1'Pig')])",
" ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])",
""}));
}
public void testUnion()
{
check("select 1 from \"emps\" union select 2 from \"depts\"",
TestUtil.fold(new String[] {
"UnionRel(all=[false])",
" ProjectRel(EXPR$0=[1])",
" ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])",
" ProjectRel(EXPR$0=[2])",
" ExpressionReaderRel(expression=[Java((sales.Dept[]) {sales}.depts)])",
""}));
}
public void testUnionAll()
{
check("select 1 from \"emps\" union all select 2 from \"depts\"",
TestUtil.fold(new String[] {
"UnionRel(all=[true])",
" ProjectRel(EXPR$0=[1])",
" ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])",
" ProjectRel(EXPR$0=[2])",
" ExpressionReaderRel(expression=[Java((sales.Dept[]) {sales}.depts)])",
""}));
}
public void testUnionInFrom()
{
check("select * from (select 1 as \"i\", 3 as \"j\" from \"emps\" union select 2, 5 from \"depts\") where \"j\" > 4",
TestUtil.fold(new String[] {
"ProjectRel(i=[$0], j=[$1])",
" FilterRel(condition=[>($1, 4)])",
" UnionRel(all=[false])",
" ProjectRel(i=[1], j=[3])",
" ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])",
" ProjectRel(EXPR$0=[2], EXPR$1=[5])",
" ExpressionReaderRel(expression=[Java((sales.Dept[]) {sales}.depts)])",
""}));
}
public void testJoinOfValues()
{
// NOTE jvs 15-Nov-2003: I put this test in when I fixed a
// converter bug; the individual rows were getting registered as
// leaves, rather than the entire VALUES expression (as required to
// get the join references correct).
check("select * from (values (1), (2)), (values (3))",
"ProjectRel(EXPR$0=[$0], EXPR$00=[$1])" + NL
+ " JoinRel(condition=[true], joinType=[inner])" + NL
+ " ProjectRel(EXPR$0=[$0])" + NL
+ " UnionRel(all=[true])" + NL
+ " ProjectRel(EXPR$0=[1])" + NL + " OneRowRel"
+ NL + " ProjectRel(EXPR$0=[2])" + NL
+ " OneRowRel" + NL + " ProjectRel(EXPR$0=[$0])" + NL
+ " ProjectRel(EXPR$0=[3])" + NL + " OneRowRel" + NL);
}
// todo: implement EXISTS
public void _testComplexCorrelation()
{
// This query is an example of relational division: it finds all of
// the genders which exist in all departments.
check("select distinct \"gender\" from \"emps\" as \"e1\" "
+ "where not exists (" + " select * from \"depts\" as \"d\" "
+ " where not exists (" + " select * from \"emps\" as \"e2\" "
+ " where \"e1\".\"gender\" = \"e2\".\"gender\" "
+ " and \"e2\".\"deptno\" = \"d\".\"deptno\"))", "");
}
// FIXME jvs 15-Nov-2003: I disabled this because it was failing and the
// expected output looks very wrong.
public void _testInList()
{
check("select * from \"emps\" where \"deptno\" in (10,20,30)",
"ProjectRel(city=[$input0.$f0.city], gender=[$input0.$f0.gender], name=[$input0.$f0.name], deptno=[$input0.$f0.deptno], empno=[$input0.$f0.empno])"
+ NL + " FilterRel(condition=[$input1.$f0])" + NL
+ " JoinRel(condition=[true], joinType=[left])" + NL
+ " ExpressionReaderRel(expression=[Java((sales.Emp[]) {sales}.emps)])"
+ NL + " ProjectRel($indicator=[true])" + NL
+ " FilterRel(condition=[$input0.deptno == $input0.$f0])"
+ NL + " UnionRel(all=[true])" + NL
+ " ProjectRel(EXPR$0=[10])" + NL
+ " OneRowRel" + NL
+ " ProjectRel(EXPR$0=[20])" + NL
+ " OneRowRel" + NL
+ " ProjectRel(EXPR$0=[30])" + NL
+ " OneRowRel" + NL);
}
// todo: make parser handle IN VALUES
// REVIEW jvs 15-Nov-2003: I'm not sure what you're after here. You
// should be able to put VALUES inside parentheses, and in that case it's
// producing a row, not a table of scalars (same as everywhere else). If
// you want to construct a table, you do it like this:
// ROW(X,Y,Z) IN (VALUES (1,2,3), (4,5,6)); in this case the
// VALUES evaluates to a table of two rows with three columns. I think.
public void _testInValues()
{
check("select * from \"emps\" where \"deptno\" in values (10,20,30)",
"");
}
// todo: make parser handle (1,deptno)
public void _testInCompound()
{
check("select * from \"emps\" where (1,deptno) in (select 1,deptno from \"depts\")",
"");
}
// TODO jvs 15-Nov-2003: Parser handles this monstrosity OK, but converter
// chokes. The only reason I thought of it is that I was reading
// CommonParser.jj and noticed that it allowed an arbitrary number of IN's
// in sequence. At first I thought it was a mistake, but then I remembered
// boolean values.
public void _testInSquared()
{
check("select * from \"depts\" where deptno in (10) in (true)", "");
}
}
// End ConverterTest.java |
package nl.mpi.kinnate.ui;
import java.awt.Color;
import java.awt.Point;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.MouseEvent;
import javax.swing.JTextPane;
import javax.swing.text.Style;
import javax.swing.text.StyleConstants;
import javax.swing.text.StyledDocument;
import nl.mpi.kinnate.kintypestrings.KinType;
import nl.mpi.kinnate.kintypestrings.ParserHighlight;
import nl.mpi.kinnate.svg.DataStoreSvg;
public class KinTypeStringInput extends JTextPane {
private String previousKinTypeStrings = null;
private String lastDefaultString;
private Color defaultColour = Color.GRAY;
protected Style styleComment;
protected Style styleKinType;
protected Style styleQuery;
protected Style styleParamater;
protected Style styleError;
protected Style styleUnknown;
private ParserHighlight[] parserHighlight = null;
DataStoreSvg dataStore;
public KinTypeStringInput(DataStoreSvg dataStore) {
this.dataStore = dataStore;
this.setToolTipText("");
this.setDragEnabled(true);
// set the styles for the kin type string text
styleComment = this.addStyle("Comment", null);
// StyleConstants.setForeground(styleComment, new Color(247,158,9));
StyleConstants.setForeground(styleComment, Color.GRAY);
styleKinType = this.addStyle("KinType", null);
StyleConstants.setForeground(styleKinType, new Color(43, 32, 161));
styleQuery = this.addStyle("Query", null);
StyleConstants.setForeground(styleQuery, new Color(183, 7, 140));
styleParamater = this.addStyle("Parameter", null);
StyleConstants.setForeground(styleParamater, new Color(103, 7, 200));
styleError = this.addStyle("Error", null);
// StyleConstants.setForeground(styleError, new Color(172,3,57));
StyleConstants.setForeground(styleError, Color.RED);
styleUnknown = this.addStyle("Unknown", null);
StyleConstants.setForeground(styleUnknown, Color.BLACK);
this.addFocusListener(new FocusListener() {
public void focusGained(FocusEvent e) {
clearDefaultKinTypeInput();
}
public void focusLost(FocusEvent e) {
checkKinTypeInput();
}
});
this.setForeground(defaultColour);
this.setText(getDefaultText());
}
private String getDefaultText() {
StringBuilder defaultString = new StringBuilder();
defaultString.append("# The kin type strings entered in this text area will determine the diagram drawn below. For more information see the sample diagram \"Freeform Diagram Syntax\".\n");
defaultString.append("# <KinType>:<id>;<label>;<label...>;<DOB>-<DOD>:<KinType...>");
// defaultString.append("# The tool tip over this text will give more information and details of any syntax errors.\n");
// defaultString.append("# The extended format includes data between colons after they kin type (the < and > indicate a value that can be entered).\n");
// defaultString.append("# :<id>;<label>;<label...>;<DOB>-<DOD>:\n");
// defaultString.append("# The available kin types are as follows:\n");
// for (KinType kinType : dataStore.getKinTypeDefinitions()) {
// defaultString.append("
// defaultString.append(kinType.getCodeString());
// defaultString.append(" = ");
// defaultString.append(kinType.getDisplayString());
// defaultString.append("\n");
lastDefaultString = defaultString.toString();
return lastDefaultString;
}
public void setDefaultText() {
// this.setForeground(defaultColour);
this.setText(getDefaultText());
StyledDocument styledDocument = this.getStyledDocument();
styledDocument.setCharacterAttributes(0, styledDocument.getLength(), this.getStyle("Comment"), true);
this.setCaretPosition(0);
}
public void clearDefaultKinTypeInput() {
if (this.getText().equals(lastDefaultString)) {
this.setText("");
}
}
public void checkKinTypeInput() {
if (this.getText().length() == 0) {
setDefaultText();
}
}
public String[] getCurrentStrings() {
previousKinTypeStrings = this.getText();
return this.getText().split("\n");
}
public boolean hasChanges() {
return (previousKinTypeStrings == null || !previousKinTypeStrings.equals(this.getText()));
}
protected void highlightKinTypeStrings(ParserHighlight[] parserHighlight, String[] kinTypeStrings) {
this.parserHighlight = parserHighlight;
StyledDocument styledDocument = this.getStyledDocument();
int lineStart = 0;
for (int lineCounter = 0; lineCounter < parserHighlight.length; lineCounter++) {
ParserHighlight currentHighlight = parserHighlight[lineCounter];
// int lineStart = styledDocument.getParagraphElement(lineCounter).getStartOffset();
// int lineEnd = styledDocument.getParagraphElement(lineCounter).getEndOffset();
int lineEnd = lineStart + kinTypeStrings[lineCounter].length();
styledDocument.setCharacterAttributes(lineStart, lineEnd, this.getStyle("Unknown"), true);
while (currentHighlight.highlight != null) {
int startPos = lineStart + currentHighlight.startChar;
int charCount = lineEnd - lineStart;
if (currentHighlight.nextHighlight.highlight != null) {
charCount = currentHighlight.nextHighlight.startChar - currentHighlight.startChar;
}
if (currentHighlight.highlight != null) {
String styleName = currentHighlight.highlight.name();
styledDocument.setCharacterAttributes(startPos, charCount, this.getStyle(styleName), true);
}
currentHighlight = currentHighlight.nextHighlight;
}
lineStart += kinTypeStrings[lineCounter].length() + 1;
}
}
@Override
public Point getToolTipLocation(MouseEvent event) {
if (parserHighlight != null && !previousKinTypeStrings.isEmpty()) {
int textPosition = this.viewToModel(event.getPoint());
final String[] lineStrings = previousKinTypeStrings.substring(0, textPosition).split("\n");
int linePosition = lineStrings.length;
int lineChar = lineStrings[linePosition - 1].length();
ParserHighlight currentHighlight = parserHighlight[linePosition - 1];
while (currentHighlight.highlight != null && currentHighlight.nextHighlight.highlight != null && currentHighlight.nextHighlight.startChar <= lineChar) {
currentHighlight = currentHighlight.nextHighlight;
}
// this.setToolTipText("loc: " + textPosition + " line: " + linePosition + " char: " + lineChar + " startChar: " + currentHighlight.startChar + " : " + currentHighlight.tooltipText);
this.setToolTipText(currentHighlight.tooltipText);
} else {
this.setToolTipText(null);
}
return super.getToolTipLocation(event);
}
} |
package com.eclipserunner.model;
import static com.eclipserunner.Messages.Message_uncategorized;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.eclipse.debug.core.ILaunchConfiguration;
import org.eclipse.jface.viewers.ITreeContentProvider;
import org.eclipse.jface.viewers.Viewer;
import org.eclipse.ui.IViewPart;
/**
* Class implementing {@link ITreeContentProvider} acts as a model for launch configuration tree.
* By default provides "uncategorized" category.
*
* @author vachacz
*/
public class LaunchTreeContentProvider implements ITreeContentProvider, ICategoryChangeListener, IRunnerModel {
private static LaunchTreeContentProvider model = new LaunchTreeContentProvider();
private List<IModelChangeListener> modelChangeListeners = new ArrayList<IModelChangeListener>();
private Set<ILaunchConfigurationCategory> launchConfigurationCategorySet;
private ILaunchConfigurationCategory uncategorizedCategory;
private IViewPart viewPart;
private LaunchTreeContentProvider() {
uncategorizedCategory = new LaunchConfigurationCategory();
uncategorizedCategory.setName(Message_uncategorized);
uncategorizedCategory.addCategoryChangeListener(this);
launchConfigurationCategorySet = new HashSet<ILaunchConfigurationCategory>();
launchConfigurationCategorySet.add(uncategorizedCategory);
}
public static LaunchTreeContentProvider getDefault() {
return model;
}
public Set<ILaunchConfigurationCategory> getLaunchConfigurationCategorySet() {
return launchConfigurationCategorySet;
}
public void addLaunchConfiguration(ILaunchConfiguration configuration) {
uncategorizedCategory.add(configuration);
fireModelChangedEvent();
}
public Object[] getChildren(Object object) {
if (launchConfigurationCategorySet.contains(object)) {
ILaunchConfigurationCategory launchConfigrationCategory = (ILaunchConfigurationCategory) object;
return launchConfigrationCategory.getLaunchConfigurationSet().toArray();
}
return null;
}
public Object getParent(Object object) {
if (object instanceof ILaunchConfiguration) {
return getParentCategory((ILaunchConfiguration) object);
}
return null;
}
public ILaunchConfigurationCategory getParentCategory(ILaunchConfiguration launchConfiguration) {
for (ILaunchConfigurationCategory category : launchConfigurationCategorySet) {
if (category.contains(launchConfiguration)) {
return category;
}
}
return null;
}
public boolean hasChildren(Object parent) {
if (launchConfigurationCategorySet.contains(parent)) {
ILaunchConfigurationCategory launchConfigrationCategory = (ILaunchConfigurationCategory) parent;
return !launchConfigrationCategory.isEmpty();
}
return false;
}
public Object[] getElements(Object parent) {
if (parent.equals(viewPart.getViewSite())) {
return launchConfigurationCategorySet.toArray();
}
return getChildren(parent);
}
public void dispose() {
}
public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
}
public void setViewPart(IViewPart viewPart) {
this.viewPart = viewPart;
}
public ILaunchConfigurationCategory addLaunchConfigurationCategory(String name) {
ILaunchConfigurationCategory category = new LaunchConfigurationCategory();
category.setName(name);
category.addCategoryChangeListener(this);
launchConfigurationCategorySet.add(category);
fireModelChangedEvent();
return category;
}
public void addChangeListener(IModelChangeListener listener) {
modelChangeListeners.add(listener);
}
public void removeChangeListener(IModelChangeListener listener) {
modelChangeListeners.remove(listener);
}
private void fireModelChangedEvent() {
for (IModelChangeListener listener : modelChangeListeners) {
listener.modelChanged();
}
}
public ILaunchConfigurationCategory getUncategorizedCategory() {
return uncategorizedCategory;
}
public void removeCategory(ILaunchConfigurationCategory category) {
// TODO/FIXME: BARY LWA java.util.ConcurrentModificationException
//for(ILaunchConfiguration launchConfiguration : category.getLaunchConfigurationSet()) {
// category.remove(launchConfiguration);
// uncategorizedCategory.add(launchConfiguration);
launchConfigurationCategorySet.remove(category);
fireModelChangedEvent();
}
public void categoryChanged() {
fireModelChangedEvent();
}
public ILaunchConfigurationCategory getLaunchConfigurationCategory(String name) {
for (ILaunchConfigurationCategory launchConfigurationCategory : launchConfigurationCategorySet) {
if (launchConfigurationCategory.getName().equals(name)) {
return launchConfigurationCategory;
}
}
return null;
}
} |
package org.reldb.rel.client;
import java.io.*;
import java.lang.reflect.InvocationTargetException;
import java.net.MalformedURLException;
import java.util.Stack;
import java.util.Vector;
import org.reldb.rel.client.Error;
import org.reldb.rel.client.connection.CrashHandler;
import org.reldb.rel.client.connection.CrashHandlerDefault;
import org.reldb.rel.client.connection.stream.ClientFromURL;
import org.reldb.rel.client.connection.stream.ClientLocalConnection;
import org.reldb.rel.client.connection.stream.InputStreamInterceptor;
import org.reldb.rel.client.connection.stream.StreamReceiverClient;
import org.reldb.rel.client.parser.ResponseAdapter;
import org.reldb.rel.client.parser.ResponseToHTML;
import org.reldb.rel.client.parser.ResponseToHTMLProgressive;
import org.reldb.rel.client.parser.core.ParseException;
import org.reldb.rel.client.parser.core.ResponseParser;
import org.reldb.rel.exceptions.DatabaseFormatVersionException;
/**
* Connection to a Rel database.
*
* @author dave
*/
public class Connection implements AutoCloseable {
private String dbURL;
private String serverAnnouncement = "";
private CrashHandler crashHandler;
private String[] additionalJars;
private final static String errorPrefix = "ERROR:";
private static class Snippet {
private StringBuffer buffer = new StringBuffer();
public void clear() {
buffer = null;
}
public void create() {
buffer = new StringBuffer();
}
public boolean isClear() {
return (buffer == null);
}
public void append(int n) {
buffer.append((char)n);
}
public String toString() {
if (buffer == null)
return "";
return buffer.toString();
}
}
/** Creates new connection, with additional JAR support for database development. */
public Connection(String dbURL, boolean createDbAllowed, CrashHandler crashHandler, String[] additionalJars) throws MalformedURLException, IOException, DatabaseFormatVersionException, ClassNotFoundException, NoSuchMethodException, SecurityException, InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
this.dbURL = dbURL;
this.crashHandler = crashHandler;
this.additionalJars = additionalJars;
// Make sure it exists.
ClientFromURL.openConnection(dbURL, createDbAllowed, crashHandler, additionalJars).close();
}
/** Creates new connection. */
public Connection(String dbURL, boolean createDbAllowed, CrashHandler crashHandler) throws MalformedURLException, IOException, DatabaseFormatVersionException, ClassNotFoundException, NoSuchMethodException, SecurityException, InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
this(dbURL, createDbAllowed, crashHandler, new String[0]);
}
/** Creates new connection using CrashHandlerDefault. */
public Connection(String dbURL, boolean createDbAllowed) throws MalformedURLException, IOException, DatabaseFormatVersionException, ClassNotFoundException, NoSuchMethodException, SecurityException, InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
this(dbURL, createDbAllowed, new CrashHandlerDefault());
}
/** Creates new connection. Error thrown if database doesn't exist. */
public Connection(String dbURL) throws MalformedURLException, IOException, DatabaseFormatVersionException, ClassNotFoundException, NoSuchMethodException, SecurityException, InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
this(dbURL, false);
}
@Override
public void close() throws Exception {
}
/** Attempts update of a database.
* @throws IOException
* @throws DatabaseFormatVersionException */
public static void convertToLatestFormat(String dbURL, PrintStream conversionOutput, String[] additionalJars) throws DatabaseFormatVersionException, IOException {
ClientLocalConnection.convertToLatestFormat(dbURL, conversionOutput, additionalJars);
}
public String getDbURL() {
return dbURL;
}
public CrashHandler getCrashHandler() {
return crashHandler;
}
public String[] getAdditionalJars() {
return additionalJars;
}
private abstract class Action {
public abstract void run(StreamReceiverClient client) throws IOException;
};
public interface CharacterListener {
public void receive(int character);
}
private Vector<CharacterListener> characterListeners = new Vector<CharacterListener>();
/** Add listener which receives every character in the response. */
public void addCharacterListener(CharacterListener listener) {
characterListeners.addElement(listener);
}
/** Remove listener which receives every character in the response. */
public void removeCharacterListener(CharacterListener listener) {
characterListeners.removeElement(listener);
}
/** Override to obtain every character received in the response. If super.capturedResponseStream(character) is not
* called in an overridden capturedResponseStream, addCharacterListener becomes a no-op. */
protected void capturedResponseStream(int character) {
for (CharacterListener listener: characterListeners)
listener.receive(character);
}
private class ErrorMessageTrap extends InputStreamInterceptor {
private Snippet errorMessageTrap = new Snippet();
ErrorMessageTrap(InputStream input) {
super(input);
}
public void interceptedRead(int r) {
capturedResponseStream(r);
if (errorMessageTrap.isClear()) {
if (r == '\n')
errorMessageTrap.create();
} else {
errorMessageTrap.append(r);
String possibleErrorStr = errorMessageTrap.toString();
if (possibleErrorStr.length() >= errorPrefix.length() && !possibleErrorStr.startsWith(errorPrefix))
errorMessageTrap.clear();
}
}
public String toString() {
return errorMessageTrap.toString();
}
}
private void launchTransmitter(final StreamReceiverClient client, final Action action) {
// Transmit needs to run in separate thread for local connection, or the pipe will deadlock.
Thread sendRunner = new Thread() {
public void run() {
try {
action.run(client);
} catch (IOException e) {
e.printStackTrace();
return;
}
}
};
sendRunner.start();
}
private Response launchParser(final Action sendAction, final Action receiveComplete) {
final Response response = new Response();
final StreamReceiverClient client;
try {
client = ClientFromURL.openConnection(dbURL, false, crashHandler, additionalJars);
} catch (Exception e) {
response.setResult(new Error(e.toString()));
return response;
}
Thread parseRunner = new Thread() {
public void run() {
ErrorMessageTrap errorMessageTrap;
ResponseParser parser;
try {
errorMessageTrap = new ErrorMessageTrap(client.getServerResponseInputStream());
parser = new ResponseParser(errorMessageTrap);
} catch (Throwable e1) {
e1.printStackTrace();
return;
}
parser.setResponseHandler(new ResponseAdapter() {
Stack<Value> valueReceiver = new Stack<Value>();
Stack<Heading> headingReceiver = new Stack<Heading>();
private void endData() {
Value value = valueReceiver.pop();
if (valueReceiver.size() > 0)
valueReceiver.peek().addValue(value, false);
else
response.setResult(value);
}
public void beginHeading(String typeName) {
Heading heading = new Heading(typeName);
headingReceiver.push(heading);
}
public Heading endHeading() {
Heading heading = headingReceiver.pop();
if (headingReceiver.size() > 0)
headingReceiver.peek().addAttributeType(heading);
return heading;
}
public void attributeName(String name) {
headingReceiver.peek().addAttributeName(name);
}
public void typeReference(String name) {
headingReceiver.peek().addAttributeType(new ScalarType(name));
}
public void beginScalar(int depth) {
valueReceiver.push(new Scalar());
}
public void endScalar(int depth) {
endData();
}
public void beginPossrep(String name) {
valueReceiver.push(new Selector(name));
}
public void endPossrep() {
endData();
}
public void primitive(String value, boolean quoted) {
valueReceiver.peek().addValue(new Scalar(value, quoted), quoted);
}
public void beginContainerBody(int depth, Heading heading, String typeName) {
Tuples tuples = (heading == null) ? new Tuples(typeName) : new Tuples(heading);
if (depth == 0)
response.setResult(tuples);
valueReceiver.push(tuples);
}
public void endContainer(int depth) {
Tuples tuples = (Tuples)valueReceiver.peek();
tuples.insertNullTuple();
endData();
}
public void beginTuple(int depth) {
valueReceiver.push(new Tuple());
}
public void endTuple(int depth) {
endData();
}
public void attributeNameInTuple(int depth, String name) {
((Tuple)valueReceiver.peek()).addAttributeName(name);
}
});
try {
parser.parse();
} catch (ParseException e) {
// Debug client-side response parser problems here.
// System.out.println("Connection: " + e);
response.setResult(new Error(errorMessageTrap.toString()));
}
try {
if (receiveComplete != null)
receiveComplete.run(client);
client.close();
} catch (IOException e) {
System.out.println("Connection: run failed: " + e);
e.printStackTrace();
}
}
};
parseRunner.start();
launchTransmitter(client, sendAction);
return response;
}
public interface HTMLReceiver {
public void emitInitialHTML(String s);
public void endInitialHTML();
public void emitProgressiveHTML(String s);
public void endProgressiveHTMLRow();
}
private void launchParserToHTML(final Action action, final HTMLReceiver htmlReceiver) {
final StreamReceiverClient client;
try {
client = ClientFromURL.openConnection(dbURL, false, crashHandler, additionalJars);
} catch (Exception e) {
htmlReceiver.emitInitialHTML("Unable to open connection: " + e.toString().replace(" ", " "));
return;
}
Thread parseRunner = new Thread() {
public void run() {
ErrorMessageTrap errorMessageTrap;
ResponseToHTML parser;
try {
errorMessageTrap = new ErrorMessageTrap(client.getServerResponseInputStream());
parser = new ResponseToHTMLProgressive(errorMessageTrap) {
public void emitInitialHTML(String s) {
htmlReceiver.emitInitialHTML(s);
}
public void endInitialHTML() {
htmlReceiver.endInitialHTML();
}
public void emitProgressiveHTML(String s) {
htmlReceiver.emitProgressiveHTML(s);
}
public void endProgressiveHTMLRow() {
htmlReceiver.endProgressiveHTMLRow();
}
};
} catch (Throwable e1) {
e1.printStackTrace();
return;
}
try {
parser.parse();
} catch (ParseException e) {
// Debug client-side response parser problems here.
// System.out.println("Connection: " + e);
htmlReceiver.emitInitialHTML(errorMessageTrap.toString().replace(" ", " "));
}
try {
client.close();
} catch (IOException e) {
System.out.println("Connection: close failed: " + e);
e.printStackTrace();
}
}
};
parseRunner.start();
launchTransmitter(client, action);
}
private static class Indicator {
boolean indicated = false;
public void setIndicated(boolean indicated) {this.indicated = indicated;}
boolean isIndicated() {return this.indicated;}
}
/** Execute query and return Response. */
public Response execute(final String input) throws IOException {
final Indicator finished = new Indicator();
Response response = launchParser(
new Action() {
public void run(StreamReceiverClient client) throws IOException {
client.sendExecute(input);
}
},
new Action() {
public void run(StreamReceiverClient client) throws IOException {
synchronized (finished) {
finished.setIndicated(true);
finished.notify();
}
}
}
);
synchronized (finished) {
while (!finished.isIndicated())
try {
finished.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
return response;
}
/** Get server announcement. */
public String getServerAnnouncement() {
return serverAnnouncement;
}
/** Evaluate query and return Response. */
public Response evaluate(final String input) throws IOException {
return launchParser(new Action() {
public void run(StreamReceiverClient client) throws IOException {
client.sendEvaluate(input);
}
}, null);
}
/** Evaluate query and emit response as HTML. */
public void evaluate(final String input, final HTMLReceiver htmlReceiver) {
launchParserToHTML(new Action() {
public void run(StreamReceiverClient client) throws IOException {
client.sendEvaluate(input);
}
}, htmlReceiver);
}
public static class ExecuteResult {
private Response response;
public ExecuteResult(Response response) {
this.response = response;
}
public boolean failed() {
return (response == null || response.getResult() instanceof Error);
}
public String getErrorMessage() {
if (response == null)
return "Connection failed.";
if (response.getResult() instanceof Error) {
String error = ((Error)response.getResult()).getErrorMsg();
int EOTposition = error.indexOf("<EOT>");
if (EOTposition >= 0)
error = error.substring(0, EOTposition);
return error;
}
return "Unknown error.";
}
}
/** Execute query. */
public ExecuteResult exec(String query) {
try {
return new ExecuteResult(execute(query));
} catch (IOException e1) {
return new ExecuteResult(null);
}
}
/** Evaluate query. */
public Value eval(String query, int queryWaitMilliseconds) {
Value response;
try {
response = evaluate(query).awaitResult(queryWaitMilliseconds);
} catch (IOException e) {
System.out.println("Connection: Error: " + e);
e.printStackTrace();
return null;
}
if (response instanceof Error) {
Error error = (Error)response;
System.out.println("Connection: Query evaluate returns error. " + query + "\n" + error.getErrorMsg());
return null;
}
if (response == null) {
System.out.println("Connection: Unable to obtain query results.");
return null;
}
return response;
}
/** Evaluate query that returns tuples. */
public Tuples getTuples(String query, int queryWaitMilliseconds) {
return (Tuples)eval(query, queryWaitMilliseconds);
}
} |
package nl.mpi.kinnate.ui;
import java.awt.Color;
import java.awt.Point;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.MouseEvent;
import javax.swing.JTextPane;
import javax.swing.text.Style;
import javax.swing.text.StyleConstants;
import javax.swing.text.StyledDocument;
import nl.mpi.kinnate.kintypestrings.ParserHighlight;
public class KinTypeStringInput extends JTextPane {
private String previousKinTypeStrings = null;
private String defaultString;
private Color defaultColour = Color.GRAY;
protected Style styleComment;
protected Style styleKinType;
protected Style styleQuery;
protected Style styleParamater;
protected Style styleError;
protected Style styleUnknown;
private ParserHighlight[] parserHighlight = null;
public KinTypeStringInput(String defaultString) {
this.defaultString = defaultString;
this.setToolTipText("");
// set the styles for the kin type string text
styleComment = this.addStyle("Comment", null);
// StyleConstants.setForeground(styleComment, new Color(247,158,9));
StyleConstants.setForeground(styleComment, Color.GRAY);
styleKinType = this.addStyle("KinType", null);
StyleConstants.setForeground(styleKinType, new Color(43, 32, 161));
styleQuery = this.addStyle("Query", null);
StyleConstants.setForeground(styleQuery, new Color(183, 7, 140));
styleParamater = this.addStyle("Parameter", null);
StyleConstants.setForeground(styleParamater, new Color(103, 7, 200));
styleError = this.addStyle("Error", null);
// StyleConstants.setForeground(styleError, new Color(172,3,57));
StyleConstants.setForeground(styleError, Color.RED);
styleUnknown = this.addStyle("Unknown", null);
StyleConstants.setForeground(styleUnknown, Color.BLACK);
this.addFocusListener(new FocusListener() {
public void focusGained(FocusEvent e) {
clearDefaultKinTypeInput();
}
public void focusLost(FocusEvent e) {
checkKinTypeInput();
}
});
this.setText(defaultString);
this.setForeground(defaultColour);
}
public void setDefaultText() {
this.setText(defaultString);
this.setForeground(defaultColour);
}
public void clearDefaultKinTypeInput() {
if (this.getText().equals(defaultString)) {
this.setText("");
// kinTypeStringInput.setForeground(Color.BLACK);
}
}
public void checkKinTypeInput() {
if (this.getText().length() == 0) {
this.setText(defaultString);
this.setForeground(defaultColour);
}
}
public String[] getCurrentStrings() {
previousKinTypeStrings = this.getText();
return this.getText().split("\n");
}
public boolean hasChanges() {
return (previousKinTypeStrings == null || !previousKinTypeStrings.equals(this.getText()));
}
protected void highlightKinTerms(ParserHighlight[] parserHighlight, String[] kinTypeStrings) {
this.parserHighlight = parserHighlight;
StyledDocument styledDocument = this.getStyledDocument();
int lineStart = 0;
for (int lineCounter = 0; lineCounter < parserHighlight.length; lineCounter++) {
ParserHighlight currentHighlight = parserHighlight[lineCounter];
// int lineStart = styledDocument.getParagraphElement(lineCounter).getStartOffset();
// int lineEnd = styledDocument.getParagraphElement(lineCounter).getEndOffset();
int lineEnd = lineStart + kinTypeStrings[lineCounter].length();
styledDocument.setCharacterAttributes(lineStart, lineEnd, this.getStyle("Unknown"), true);
while (currentHighlight.highlight != null) {
int startPos = lineStart + currentHighlight.startChar;
int charCount = lineEnd - lineStart;
if (currentHighlight.nextHighlight.highlight != null) {
charCount = currentHighlight.nextHighlight.startChar - currentHighlight.startChar;
}
if (currentHighlight.highlight != null) {
String styleName = currentHighlight.highlight.name();
styledDocument.setCharacterAttributes(startPos, charCount, this.getStyle(styleName), true);
}
currentHighlight = currentHighlight.nextHighlight;
}
lineStart += kinTypeStrings[lineCounter].length() + 1;
}
}
@Override
public Point getToolTipLocation(MouseEvent event) {
if (parserHighlight != null) {
int textPosition = this.viewToModel(event.getPoint());
final String[] lineStrings = previousKinTypeStrings.substring(0, textPosition).split("\n");
int linePosition = lineStrings.length;
int lineChar = lineStrings[linePosition - 1].length();
ParserHighlight currentHighlight = parserHighlight[linePosition - 1];
while (currentHighlight.highlight != null && currentHighlight.nextHighlight.highlight != null && currentHighlight.nextHighlight.startChar <= lineChar) {
currentHighlight = currentHighlight.nextHighlight;
}
// this.setToolTipText("loc: " + textPosition + " line: " + linePosition + " char: " + lineChar + " startChar: " + currentHighlight.startChar + " : " + currentHighlight.tooltipText);
this.setToolTipText(currentHighlight.tooltipText);
} else {
this.setToolTipText(null);
}
return super.getToolTipLocation(event);
}
} |
package com.godroup.plugins.movetogroup;
import java.io.File;
import java.util.ArrayList;
import static java.util.Arrays.asList;
import java.util.Collection;
import java.util.List;
import java.util.prefs.BackingStoreException;
import java.util.prefs.Preferences;
import static java.util.stream.Collectors.toList;
import org.netbeans.api.project.Project;
import org.netbeans.api.project.ProjectInformation;
import org.netbeans.api.project.ProjectUtils;
import org.netbeans.api.project.ui.OpenProjects;
import org.netbeans.api.project.ui.ProjectGroup;
import org.openide.util.Exceptions;
import org.openide.util.NbPreferences;
import org.openide.util.Utilities;
import org.openide.windows.TopComponent;
public final class ProjectGroupUtils {
/**
* Get active project group name.
*
* @return
*/
public static String getActiveProjectGroup() {
ProjectGroup activeProjectGroup = OpenProjects.getDefault().getActiveProjectGroup();
if (activeProjectGroup == null) {
return null;
}
return activeProjectGroup.getName();
}
/**
* Get selected project information list.
*
* @return
*/
public static List<ProjectInformation> getCurrentProjectList() {
Collection<? extends Project> currentProjects = TopComponent.getRegistry().getActivated().getLookup().lookupAll(Project.class);
if (currentProjects == null) {
return null;
}
return currentProjects.stream().map(t -> ProjectUtils.getInformation(t)).collect(toList());
}
/**
* Add project path to group, and close selected project(s).
*
* @param groupId
* @param projectInfo
* @return
*/
public static boolean moveToGroup(String groupId, ProjectInformation projectInfo) {
String preferPath = "org/netbeans/modules/projectui/groups/" + groupId;
Preferences groupNode = getPreferences(preferPath);
if (null != groupNode) {
// Append new project path
String oldPath = groupNode.get("path", null);
String newPath = Utilities.toURI(new File(projectInfo.getProject().getProjectDirectory().getPath())).toString();
if (oldPath != null) {
newPath = oldPath + " " + newPath;
}
groupNode.put("path", newPath.replace("null ", ""));
// Close current project.
OpenProjects.getDefault().close(new Project[]{projectInfo.getProject()});
return true;
}
return false;
}
/**
* Get all project groups.
*
* @return String[0]=groupId, String[1]=groupName
*/
public static List<String[]> getProjectGroups() {
Preferences groupNode = getPreferences("org/netbeans/modules/projectui/groups");
if (groupNode != null) {
try {
List<String> childrenNames = asList(groupNode.childrenNames());
return childrenNames.stream().map(t -> {
String groupId = t;
String groupName = groupId;
Preferences childGroupNode = getPreferences("org/netbeans/modules/projectui/groups/" + groupId);
if (childGroupNode != null) {
groupName = childGroupNode.get("name", null);
}
return new String[] {groupId, groupName};
}).collect(toList());
} catch (BackingStoreException ex) {
Exceptions.printStackTrace(ex);
}
}
return new ArrayList<>(0);
}
/**
* Get the preference for the given node path.
*
* @param path configuration path like "org/netbeans/modules/projectui"
* @return {@link Preferences} or null
*/
private static Preferences getPreferences(String path) {
try {
if (NbPreferences.root().nodeExists(path)) {
return NbPreferences.root().node(path);
}
} catch (BackingStoreException ex) {
Exceptions.printStackTrace(ex);
}
return null;
}
} |
// Package
package com.hp.hpl.jena.ontology.impl.test;
// Imports
import java.io.*;
import java.util.*;
import com.hp.hpl.jena.graph.*;
import com.hp.hpl.jena.graph.impl.*;
import com.hp.hpl.jena.mem.GraphMem;
import com.hp.hpl.jena.ontology.*;
import com.hp.hpl.jena.rdf.model.*;
import com.hp.hpl.jena.vocabulary.OWL;
import junit.framework.*;
/**
* <p>
* Unit tests that are derived from user bug reports
* </p>
*
* @author Ian Dickinson, HP Labs
* (<a href="mailto:Ian.Dickinson@hp.com" >email</a>)
* @version CVS $Id: TestBugReports.java,v 1.9 2003-08-20 09:41:44 ian_dickinson Exp $
*/
public class TestBugReports
extends TestCase
{
// Constants
public static String NS = "http://example.org/test
// Static variables
// Instance variables
public TestBugReports( String name ) {
super( name );
}
// Constructors
// External signature methods
/**
* Bug report by Mariano Rico Almodvar [Mariano.Rico@uam.es] on June 16th. Said to raise exception.
*/
public void test_mra_01() {
OntModel m = ModelFactory.createOntologyModel(
OntModelSpec.DAML_MEM,
null,
null);
String myDicURI = "http://somewhere/myDictionaries/1.0
String damlURI = "http://www.daml.org/2001/03/daml+oil
m.setNsPrefix("DAML", damlURI);
String c1_uri = myDicURI + "C1";
OntClass c1 = m.createClass(c1_uri);
DatatypeProperty p1 = m.createDatatypeProperty( myDicURI + "P1");
p1.setDomain(c1);
ByteArrayOutputStream strOut = new ByteArrayOutputStream();
m.write(strOut,"RDF/XML-ABBREV", myDicURI);
//m.write(System.out,"RDF/XML-ABBREV", myDicURI);
}
/** Bug report from Holger Knublauch on July 25th 2003. Cannot convert owl:Class to an OntClass */
public void test_hk_01() {
// synthesise a mini-document
String base = "http://jena.hpl.hp.com/test
String doc = "<rdf:RDF" +
" xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns
" xmlns:owl=\"http://www.w3.org/2002/07/owl
" <owl:Ontology rdf:about=\"\">" +
" <owl:imports rdf:resource=\"http:
" </owl:Ontology>" +
"</rdf:RDF>";
// read in the base ontology, which includes the owl language definition
// note OWL_MEM => no reasoner is used
OntModel m = ModelFactory.createOntologyModel( OntModelSpec.OWL_MEM, null );
m.read( new ByteArrayInputStream( doc.getBytes() ), base );
// we need a resource corresponding to OWL Class but in m
Resource owlClassRes = m.getResource( OWL.Class.getURI() );
// now can we see this as an OntClass?
OntClass c = (OntClass) owlClassRes.as( OntClass.class );
assertNotNull( "OntClass c should not be null", c );
//(OntClass) (ontModel.getProfile().CLASS()).as(OntClass.class);
}
/** Bug report from Hoger Knublauch on Aug 19th 2003. NPE when setting all distinct members */
public void test_hk_02() {
OntModelSpec spec = new OntModelSpec(OntModelSpec.OWL_MEM);
spec.setReasoner(null);
OntModel ontModel = ModelFactory.createOntologyModel(spec, null); // ProfileRegistry.OWL_LANG);
ontModel.createAllDifferent();
assertTrue(ontModel.listAllDifferent().hasNext());
AllDifferent allDifferent = (AllDifferent)ontModel.listAllDifferent().next();
//allDifferent.setDistinct(ontModel.createList());
assertFalse(allDifferent.listDistinctMembers().hasNext());
}
/** Bug report from Holger Knublauch on Aug 19th, 2003. Initialisation error */
public void test_hk_03() {
OntModelSpec spec = new OntModelSpec(OntModelSpec.OWL_MEM);
spec.setReasoner(null);
OntModel ontModel = ModelFactory.createOntologyModel(spec, null);
OntProperty property = ontModel.createObjectProperty("http://www.aldi.de#property");
MinCardinalityRestriction testClass = ontModel.createMinCardinalityRestriction( null, property, 42);
}
/**
* Bug report by federico.carbone@bt.com, 30-July-2003. A literal can be
* turned into an individual.
*/
public void test_fc_01() {
OntModel m = ModelFactory.createOntologyModel();
ObjectProperty p = m.createObjectProperty( NS + "p" );
Restriction r = m.createRestriction( p );
HasValueRestriction hv = r.convertToHasValueRestriction( m.createLiteral( 1 ) );
RDFNode n = hv.getHasValue();
assertFalse( "Should not be able to convert literal to individual", n.canAs( Individual.class ) );
}
/**
* Bug report by Christoph Kunze (Christoph.Kunz@iao.fhg.de). 18/Aug/03
* No transaction support in ontmodel.
*/
public void test_ck_01() {
Graph g = new GraphMem() {
TransactionHandler m_t = new MockTransactionHandler();
public TransactionHandler getTransactionHandler() {return m_t;}
};
Model m0 = ModelFactory.createModelForGraph( g );
OntModel m1 = ModelFactory.createOntologyModel( OntModelSpec.OWL_LITE_MEM, m0 );
assertFalse( "Transaction not started yet", ((MockTransactionHandler) m1.getGraph().getTransactionHandler()).m_inTransaction );
m1.begin();
assertTrue( "Transaction started", ((MockTransactionHandler) m1.getGraph().getTransactionHandler()).m_inTransaction );
m1.abort();
assertFalse( "Transaction aborted", ((MockTransactionHandler) m1.getGraph().getTransactionHandler()).m_inTransaction );
assertTrue( "Transaction aborted", ((MockTransactionHandler) m1.getGraph().getTransactionHandler()).m_aborted);
m1.begin();
assertTrue( "Transaction started", ((MockTransactionHandler) m1.getGraph().getTransactionHandler()).m_inTransaction );
m1.commit();
assertFalse( "Transaction committed", ((MockTransactionHandler) m1.getGraph().getTransactionHandler()).m_inTransaction );
assertTrue( "Transaction committed", ((MockTransactionHandler) m1.getGraph().getTransactionHandler()).m_committed);
}
/**
* Bug report by sjooseng [sjooseng@hotmail.com]. CCE in listOneOf in Enumerated
* Class with DAML profile.
*/
public void test_sjooseng_01() {
String source =
"<rdf:RDF xmlns:daml='http://www.daml.org/2001/03/daml+oil
" xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns
" xmlns:rdfs='http://www.w3.org/2000/01/rdf-schema
" <daml:Class rdf:about='http://localhost:8080/kc2c
" <daml:subClassOf>" +
" <daml:Restriction>" +
" <daml:onProperty rdf:resource='http://localhost:8080/kc2c
" <daml:hasClass>" +
" <daml:Class>" +
" <daml:oneOf rdf:parseType=\"daml:collection\">" +
" <daml:Thing rdf:about='http://localhost:8080/kc2c
" <daml:Thing rdf:about='http://localhost:8080/kc2c
" </daml:oneOf>" +
" </daml:Class>" +
" </daml:hasClass>" +
" </daml:Restriction>" +
" </daml:subClassOf>" +
" </daml:Class>" +
" <daml:ObjectProperty rdf:about='http://localhost:8080/kc2c
" <rdfs:label>p1</rdfs:label>" +
" </daml:ObjectProperty>" +
"</rdf:RDF>" ;
OntModel m = ModelFactory.createOntologyModel( ProfileRegistry.DAML_LANG );
m.read( new ByteArrayInputStream( source.getBytes() ), "http://localhost:8080/kc2c" );
OntClass kc1 = m.getOntClass( "http://localhost:8080/kc2c
boolean found = false;
Iterator it = kc1.listSuperClasses( false );
while ( it.hasNext() ) {
OntClass oc = (OntClass)it.next();
if ( oc.isRestriction() ) {
Restriction r = oc.asRestriction();
if ( r.isSomeValuesFromRestriction() ) {
SomeValuesFromRestriction sr = r.asSomeValuesFromRestriction();
OntClass sc = (OntClass) sr.getSomeValuesFrom();
if ( sc.isEnumeratedClass() ) {
EnumeratedClass ec = sc.asEnumeratedClass();
assertEquals( "Enumeration size should be 2", 2, ec.getOneOf().size() );
found = true;
}
}
}
}
assertTrue( found );
}
// Internal implementation methods
// Inner class definitions
class MockTransactionHandler extends SimpleTransactionHandler {
boolean m_inTransaction = false;
boolean m_aborted = false;
boolean m_committed = false;
public void begin() {m_inTransaction = true;}
public void abort() {m_inTransaction = false; m_aborted = true;}
public void commit() {m_inTransaction = false; m_committed = true;}
}
} |
package org.displaytag.util;
import java.net.URLEncoder;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.lang.StringUtils;
/**
* A RequestHelper object is used to read parameters from the request. Main feature are handling of numeric parameters
* and the ability to create Href objects from the current request.
*
* @author fgiust
* @version $Revision$ ($Author$)
* @see org.displaytag.util.Href
*/
public class RequestHelper
{
/**
* logger.
*/
private static Log log = LogFactory.getLog(RequestHelper.class);
/**
* original HttpServletRequest.
*/
private HttpServletRequest request;
/**
* Construct a new RequestHelper for the given request.
*
* @param servletRequest HttpServletRequest
*/
public RequestHelper(HttpServletRequest servletRequest)
{
this.request = servletRequest;
}
/**
* Read a String parameter from the request.
*
* @param key String parameter name
* @return String parameter value
*/
public final String getParameter(String key)
{
// actually simply return the parameter, this behaviour could be changed
return this.request.getParameter(key);
}
/**
* Read a Integer parameter from the request.
*
* @param key String parameter name
* @return Integer parameter value or null if the parameter is not found or it can't be transformed to an Integer
*/
public final Integer getIntParameter(String key)
{
String value = this.request.getParameter(key);
if (value != null)
{
try
{
return new Integer(value);
}
catch (NumberFormatException e)
{
// It's ok to ignore, simply return null
log.debug("Invalid \"" + key + "\" parameter from request: value=\"" + value + "\"");
}
}
return null;
}
/**
* Returns a Map containing all the parameters in the request.
*
* @return Map
*/
public final Map getParameterMap()
{
Map map = new HashMap();
// get the parameters names
Enumeration parametersName = this.request.getParameterNames();
while (parametersName.hasMoreElements())
{
// ... get the value
String paramName = (String) parametersName.nextElement();
// put key/value in the map
String[] values = this.request.getParameterValues(paramName);
for (int i = 0; i < values.length; i++)
{
// values[i] = URLEncoder.encode(values[i], "UTF-8");
// deprecated in java 1.4, but still need this for jre 1.3 compatibility
values[i] = URLEncoder.encode(StringUtils.defaultString(values[i]));
}
map.put(paramName, values);
}
// return the Map
return map;
}
/**
* return the current Href for the request (base url and parameters).
*
* @return Href
*/
public final Href getHref()
{
Href href = new Href(this.request.getRequestURI());
href.setParameterMap(getParameterMap());
return href;
}
} |
package com.jcwhatever.nucleus.utils.items;
import com.jcwhatever.nucleus.Nucleus;
import com.jcwhatever.nucleus.managed.items.serializer.IItemStackDeserializer;
import com.jcwhatever.nucleus.managed.items.serializer.IItemStackSerializer;
import com.jcwhatever.nucleus.managed.items.serializer.InvalidItemStackStringException;
import com.jcwhatever.nucleus.utils.CollectionUtils;
import com.jcwhatever.nucleus.utils.PreCon;
import com.jcwhatever.nucleus.utils.materials.Materials;
import com.jcwhatever.nucleus.utils.materials.NamedMaterialData;
import com.jcwhatever.nucleus.utils.potions.PotionNames;
import com.jcwhatever.nucleus.utils.text.TextUtils;
import org.bukkit.Color;
import org.bukkit.Material;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.ItemMeta;
import org.bukkit.inventory.meta.LeatherArmorMeta;
import org.bukkit.inventory.meta.PotionMeta;
import org.bukkit.material.MaterialData;
import org.bukkit.potion.Potion;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.regex.Pattern;
/**
* {@link ItemStack} utilities.
*/
public final class ItemStackUtils {
private static final Pattern PATTERN_REPLACE_UNDERSCORE = Pattern.compile("_", Pattern.LITERAL);
private ItemStackUtils() {}
public static final ItemStack AIR = new ItemStack(Material.AIR);
/**
* Used to specify if a display name for an {@link ItemStack} is required or
* optional.
*/
public enum DisplayNameOption {
/**
* Name is required and the items material name can be used as a substitute
* if a display name is not set.
*/
REQUIRED,
/**
* Name is optional and null is expected if the display name is not set.
*/
OPTIONAL
}
/**
* Convert an object to an {@link ItemStack} if possible.
*
* <p>The following are valid arguments that can be converted:</p>
* <ul>
* <li>{@link ItemStack}</li>
* <li>{@link Material}</li>
* <li>{@link MaterialData}</li>
* <li>The name or alternate name of a material. Valid names
* are from {@link NamedMaterialData}.</li>
* <li>A item stack string serialized by an {@link IItemStackSerializer}.</li>
* </ul>
*
* @param object The object to retrieve an {@link ItemStack} from.
*
* @return An {@link ItemStack} or null if failed.
*/
public static ItemStack getItemStack(Object object) {
ItemStack result = null;
if (object instanceof ItemStack) {
result = (ItemStack) object;
}
else if (object instanceof String) {
String str = (String)object;
MaterialData data = NamedMaterialData.get(str);
if (data != null) {
result = data.toItemStack();
}
else {
try {
ItemStack[] itemStacks = parse(str);
if (itemStacks != null && itemStacks.length > 0)
result = itemStacks[0];
} catch (InvalidItemStackStringException ignore) {}
}
}
else if (object instanceof Material) {
result = new ItemStack((Material)object);
}
else if (object instanceof MaterialData) {
result = ((MaterialData)object).toItemStack();
}
if (result != null && result.getAmount() <= 0) {
result.setAmount(1);
}
return result;
}
/**
* Set the lore on an {@link ItemStack}.
*
* @param stack The {@link ItemStack}.
* @param lore The lore to set.
*/
public static void setLore(ItemStack stack, @Nullable List<String> lore) {
PreCon.notNull(stack);
ItemMeta meta = stack.getItemMeta();
if (meta == null) {
return;
}
meta.setLore(lore);
stack.setItemMeta(meta);
}
/**
* Set the lore on an {@link ItemStack}.
*
* @param stack The item stack.
* @param lore The lore to set.
*/
public static void setLore(ItemStack stack, @Nullable String lore) {
PreCon.notNull(stack);
ItemMeta meta = stack.getItemMeta();
if (meta == null) {
return;
}
List<String> list = new ArrayList<String>(5);
if (lore != null)
list.add(lore);
meta.setLore(list);
stack.setItemMeta(meta);
}
/**
* Gets the lore from an {@link ItemStack}.
*
* @param stack The {@link ItemStack}.
*/
public static List<String> getLore(ItemStack stack) {
PreCon.notNull(stack);
ItemMeta meta = stack.getItemMeta();
if (meta == null)
return CollectionUtils.unmodifiableList();
List<String> result = meta.getLore();
if (result == null)
return CollectionUtils.unmodifiableList();
return CollectionUtils.unmodifiableList(result);
}
/**
* Repair an item stack
*
* @param item The item stack.
*/
public static void repair(ItemStack item) {
PreCon.notNull(item);
if (!Materials.isRepairable(item.getType()))
return;
item.setDurability((short)0);
}
/**
* Repair an array of {@link ItemStack}.
*
* @param items The array of {@link ItemStack} to repair.
*/
public static void repair(ItemStack[] items) {
PreCon.notNull(items);
if (items.length == 0)
return;
for (ItemStack item : items) {
repair(item);
}
}
/**
* Gets the display name of an {@link ItemStack}. Returns empty string if
* the item has no display name.
*
* @param stack The {@link ItemStack} to get a display name from.
* @param nameResult Specify how a missing display name should be returned.
*/
@Nullable
public static String getDisplayName(ItemStack stack, DisplayNameOption nameResult) {
PreCon.notNull(stack);
ItemMeta meta = stack.getItemMeta();
if (meta == null || !meta.hasDisplayName()) {
switch (nameResult) {
case REQUIRED:
if (meta instanceof PotionMeta) {
Potion potion = new Potion(stack.getDurability());
return PotionNames.getSimple(potion);
}
String materialName = NamedMaterialData.getAlternate(stack.getData());
if (materialName == null) {
materialName = stack.getType().name().toLowerCase();
}
String spaced = PATTERN_REPLACE_UNDERSCORE.matcher(materialName).replaceAll(" ");
return TextUtils.titleCase(spaced);
case OPTIONAL:
// fall through
default:
return null;
}
}
return meta.getDisplayName();
}
/**
* Sets the display name of an {@link ItemStack}.
*
* @param stack The {@link ItemStack}.
* @param displayName The display name.
*/
public static void setDisplayName(ItemStack stack, @Nullable String displayName) {
PreCon.notNull(stack);
ItemMeta meta = stack.getItemMeta();
meta.setDisplayName(displayName);
stack.setItemMeta(meta);
}
/**
* Sets 32 bit color of an {@link ItemStack}, if possible.
*
* @param item The item stack.
* @param red The red component
* @param green The green component
* @param blue The blue component
*
* @return True if color changed.
*/
public static boolean setColor(ItemStack item, int red, int green, int blue){
PreCon.notNull(item);
return setColor(item, Color.fromRGB(red, green, blue));
}
/**
* Sets RGB Color of an {@link ItemStack}, if possible.
*
* @param item The item stack.
* @param color The 32 bit RGB integer color.
*
* @return True if color changed.
*/
public static boolean setColor(ItemStack item, int color){
PreCon.notNull(item);
return setColor(item, Color.fromRGB(color));
}
/**
* Sets RGB color of an {@link ItemStack}, if possible.
*
* @param item The {@link ItemStack}.
* @param color The color to set.
*
* @return True if color changed.
*/
public static boolean setColor(ItemStack item, Color color){
PreCon.notNull(item);
PreCon.notNull(color);
ItemMeta meta = item.getItemMeta();
if (meta instanceof LeatherArmorMeta) {
LeatherArmorMeta laMeta = (LeatherArmorMeta)meta;
laMeta.setColor(color);
item.setItemMeta(laMeta);
return true;
}
return false;
}
/**
* Gets the 32-bit color of an {@link ItemStack} if it has any.
*
* @param item The {@link ItemStack}.
*
* @return The {@link Color} or null if item does not have 32-bit color.
*/
@Nullable
public static Color getColor(ItemStack item) {
PreCon.notNull(item);
ItemMeta meta = item.getItemMeta();
if (meta instanceof LeatherArmorMeta) {
LeatherArmorMeta laMeta = (LeatherArmorMeta)meta;
return laMeta.getColor();
}
return null;
}
/**
* Parses serialized item string to {@link ItemStack} array.
*
* @param itemString The {@link ItemStack} string to parse.
*
* @throws InvalidItemStackStringException
*
* @return Null if the string could not be parsed.
*
* @see IItemStackSerializer
* @see IItemStackDeserializer
*/
@Nullable
public static ItemStack[] parse(String itemString) throws InvalidItemStackStringException {
if (itemString == null || itemString.length() == 0)
return new ItemStack[0];
IItemStackDeserializer parser;
parser = Nucleus.getItemSerialization().parse(itemString);
return parser.getArray();
}
/**
* Serialize a collection of {@link ItemStack} into a string.
*
* @param stacks The {@link ItemStack} collection.
*/
public static String serialize(Collection<ItemStack> stacks) {
PreCon.notNull(stacks);
return Nucleus.getItemSerialization()
.createSerializer(stacks.size()).appendAll(stacks).toString();
}
/**
* Serialize an {@link ItemStack} array into a string.
*
* @param stacks The {@link ItemStack}'s to serialize.
*/
public static String serialize(ItemStack... stacks) {
PreCon.notNull(stacks);
return Nucleus.getItemSerialization()
.createSerializer(stacks.length).appendAll(stacks).toString();
}
} |
package com.jwetherell.algorithms.data_structures;
public class TrieMap<C extends CharSequence> extends Trie<C> {
public TrieMap() {
root = new MapNode<C>(null);
}
public boolean add(C key, int value) {
int length = (key.length()-1);
Node<C> prev = root;
for (int i=0; i<length; i++) {
Node<C> n = null;
Character c = key.charAt(i);
int index = prev.childIndex(c);
if (index>=0) {
n = prev.getChild(index);
} else {
n = new MapNode<C>(c);
prev.children.add(n);
}
prev = n;
}
MapNode<C> n = null;
Character c = key.charAt(length);
int index = prev.childIndex(c);
if (index>=0) {
n = (MapNode<C>) prev.getChild(index);
if (n.value==Integer.MIN_VALUE) {
n.character = c;
n.string = key;
n.value = value;
return true;
} else {
return false;
}
} else {
n = new MapNode<C>(c,key,value);
prev.children.add(n);
return true;
}
}
public int get(String key) {
if (root==null) return Integer.MIN_VALUE;
MapNode<C> n = (MapNode<C>) root;
int length = (key.length()-1);
for (int i=0; i<=length; i++) {
char c = key.charAt(i);
int index = n.childIndex(c);
if (index>=0) {
n = (MapNode<C>) n.getChild(index);
} else {
return Integer.MIN_VALUE;
}
}
if (n!=null) return n.value;
return Integer.MIN_VALUE;
}
protected static class MapNode<C extends CharSequence> extends Node<C> {
protected int value = Integer.MIN_VALUE;
protected MapNode(Character character) {
super(character);
}
protected MapNode(Character character, C string, int value) {
super(character,string);
this.value = value;
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
if (value!=Integer.MIN_VALUE) builder.append("key=").append(string).append(" value=").append(value).append("\n");
for (Node<C> c : children) {
builder.append(c.toString());
}
return builder.toString();
}
}
public static class TrieMapPrinter {
public static <C extends CharSequence> void printNode(Node<C> root) {
print(root, "", true);
}
protected static <C extends CharSequence> void print(Node<C> node, String prefix, boolean isTail) {
MapNode<C> hashNode = (MapNode<C>) node;
System.out.println(prefix + (isTail ? " " : " ") + ((node.string!=null)?("("+node.character+") "+node.string+" = "+hashNode.value):node.character));
if (node.children != null) {
for (int i = 0; i < node.children.size() - 1; i++) {
print(node.children.get(i), prefix + (isTail ? " " : " "), false);
}
if (node.children.size() >= 1) {
print(node.children.get(node.children.size() - 1), prefix + (isTail ?" " : " "), true);
}
}
}
}
} |
package com.milkenknights.burgundyballista;
import edu.wpi.first.wpilibj.Talon;
/**
*
* @author Jake
*/
public class DriveSubsystem extends Subsystem {
JStick xbox;
Drive drive;
SolenoidPair driveGear;
boolean normalDriveGear;
boolean slowMode;
public DriveSubsystem(RobotConfig config) {
xbox = JStickMultiton.getJStick(1);
drive = new Drive(new Talon(config.getAsInt("tLeftWheel")),
new Talon(config.getAsInt("tRightWheel")));
// this solenoid pair is TRUE if the robot is in high gear
driveGear = new SolenoidPair(config.getAsInt("sDriveGearHigh"),
config.getAsInt("sDriveGearLow"), true, false, true);
}
public void teleopPeriodic() {
if (xbox.isReleased(JStick.XBOX_LB)) {
driveGear.toggle();
normalDriveGear = driveGear.get();
}
if (xbox.isReleased(JStick.XBOX_Y)) {
slowMode =! slowMode;
if (slowMode) {
driveGear.set(false);
} else {
driveGear.set(normalDriveGear);
}
}
double power = xbox.getAxis(JStick.XBOX_LSY);
double turn = xbox.getAxis(JStick.XBOX_RSX);
boolean trigDown = Math.abs(xbox.getAxis(JStick.XBOX_TRIG)) > 0.5;
if (slowMode) {
power = power * .5;
}
drive.cheesyDrive(power, turn, trigDown);
}
} |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.untamedears.JukeAlert.storage;
import com.untamedears.JukeAlert.JukeAlert;
import com.untamedears.JukeAlert.chat.ChatFiller;
import com.untamedears.JukeAlert.group.GroupMediator;
import com.untamedears.JukeAlert.manager.ConfigManager;
import com.untamedears.JukeAlert.model.LoggedAction;
import com.untamedears.JukeAlert.model.Snitch;
import com.untamedears.JukeAlert.tasks.GetSnitchInfoTask;
import com.untamedears.JukeAlert.util.SparseQuadTree;
import com.untamedears.citadel.entity.Faction;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.block.Block;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import org.bukkit.inventory.ItemStack;
/**
*
* @author Dylan Holmes
*/
public class JukeAlertLogger {
private ConfigManager configManager;
private Database db;
private String snitchsTbl;
private String snitchDetailsTbl;
private PreparedStatement getSnitchIdFromLocationStmt;
private PreparedStatement getAllSnitchesStmt;
private PreparedStatement getLastSnitchID;
private PreparedStatement getSnitchLogStmt;
private PreparedStatement deleteSnitchLogStmt;
private PreparedStatement insertSnitchLogStmt;
private PreparedStatement insertNewSnitchStmt;
private PreparedStatement deleteSnitchStmt;
private PreparedStatement updateGroupStmt;
private PreparedStatement updateCuboidVolumeStmt;
private PreparedStatement updateSnitchNameStmt;
private PreparedStatement updateSnitchGroupStmt;
private int logsPerPage;
private int lastSnitchID;
// The following are used by SnitchEnumerator
protected JukeAlert plugin;
protected GroupMediator groupMediator;
protected PreparedStatement getAllSnitchesByWorldStmt;
public JukeAlertLogger() {
plugin = JukeAlert.getInstance();
configManager = plugin.getConfigManager();
groupMediator = plugin.getGroupMediator();
String host = configManager.getHost();
int port = configManager.getPort();
String dbname = configManager.getDatabase();
String username = configManager.getUsername();
String password = configManager.getPassword();
String prefix = configManager.getPrefix();
snitchsTbl = prefix + "snitchs";
snitchDetailsTbl = prefix + "snitch_details";
db = new Database(host, port, dbname, username, password, prefix, this.plugin.getLogger());
boolean connected = db.connect();
if (connected) {
genTables();
initializeStatements();
initializeLastSnitchId();
} else {
this.plugin.getLogger().log(Level.SEVERE, "Could not connect to the database! Fill out your config.yml!");
}
logsPerPage = configManager.getLogsPerPage();
Bukkit.getScheduler().runTaskTimer(plugin, new Runnable() {
@Override
public void run() {
saveAllSnitches();
}
}, 40, 40); // 2 sec
}
public Database getDb() {
return db;
}
/**
* Table generator
*/
private void genTables() {
//Snitches
db.execute("CREATE TABLE IF NOT EXISTS `" + snitchsTbl + "` ("
+ "`snitch_id` int(10) unsigned NOT NULL AUTO_INCREMENT,"
+ "`snitch_world` varchar(40) NOT NULL,"
+ "`snitch_name` varchar(40) NOT NULL,"
+ "`snitch_x` int(10) NOT NULL,"
+ "`snitch_y` int(10) NOT NULL,"
+ "`snitch_z` int(10) NOT NULL,"
+ "`snitch_group` varchar(40) NOT NULL,"
+ "`snitch_cuboid_x` int(10) NOT NULL,"
+ "`snitch_cuboid_y` int(10) NOT NULL,"
+ "`snitch_cuboid_z` int(10) NOT NULL,"
+ "`snitch_should_log` BOOL,"
+ "PRIMARY KEY (`snitch_id`),"
+ "INDEX `idx_y` (`snitch_y` ASC));");
//Snitch Details
// need to know:
// action: (killed, block break, block place, etc), can't be null
// person who initiated the action (player name), can't be null
// victim of action (player name, entity), can be null
// x, (for things like block place, bucket empty, etc, NOT the snitch x,y,z) can be null
// y, can be null
// z, can be null
// block_id, can be null (block id for block place, block use, block break, etc)
db.execute("CREATE TABLE IF NOT EXISTS `" + snitchDetailsTbl + "` ("
+ "`snitch_details_id` int(10) unsigned NOT NULL AUTO_INCREMENT,"
+ "`snitch_id` int(10) unsigned NOT NULL," // reference to the column in the main snitches table
+ "`snitch_log_time` datetime,"
+ "`snitch_logged_action` tinyint unsigned NOT NULL,"
+ "`snitch_logged_initiated_user` varchar(16) NOT NULL,"
+ "`snitch_logged_victim_user` varchar(16), "
+ "`snitch_logged_x` int(10), "
+ "`snitch_logged_Y` int(10), "
+ "`snitch_logged_z` int(10), "
+ "`snitch_logged_materialid` smallint unsigned," // can be either a block, item, etc
+ "PRIMARY KEY (`snitch_details_id`),"
+ "INDEX `idx_snitch_id` (`snitch_id` ASC),"
+ "CONSTRAINT `fk_snitchs_snitch_id` FOREIGN KEY (`snitch_id`)"
+ " REFERENCES `snitchs` (`snitch_id`) ON DELETE CASCADE ON UPDATE CASCADE);");
}
public PreparedStatement getNewInsertSnitchLogStmt() {
return db.prepareStatement(String.format(
"INSERT INTO %s (snitch_id, snitch_log_time, snitch_logged_action, snitch_logged_initiated_user,"
+ " snitch_logged_victim_user, snitch_logged_x, snitch_logged_y, snitch_logged_z, snitch_logged_materialid) "
+ " VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)",
snitchDetailsTbl));
}
private void initializeStatements() {
getAllSnitchesStmt = db.prepareStatement(String.format(
"SELECT * FROM %s", snitchsTbl));
getAllSnitchesByWorldStmt = db.prepareStatement(String.format(
"SELECT * FROM %s WHERE snitch_world = ?", snitchsTbl));
getLastSnitchID = db.prepareStatement(String.format(
"SHOW TABLE STATUS LIKE '%s'", snitchsTbl));
// statement to get LIMIT entries OFFSET from a number from the snitchesDetailsTbl based on a snitch_id from the main snitchesTbl
// LIMIT ?,? means offset followed by max rows to return
getSnitchLogStmt = db.prepareStatement(String.format(
"SELECT * FROM %s"
+ " WHERE snitch_id=? ORDER BY snitch_log_time DESC LIMIT ?,?",
snitchDetailsTbl));
// statement to get the ID of a snitch in the main snitchsTbl based on a Location (x,y,z, world)
getSnitchIdFromLocationStmt = db.prepareStatement(String.format("SELECT snitch_id FROM %s"
+ " WHERE snitch_x=? AND snitch_y=? AND snitch_z=? AND snitch_world=?", snitchsTbl));
// statement to insert a log entry into the snitchesDetailsTable
insertSnitchLogStmt = db.prepareStatement(String.format(
"INSERT INTO %s (snitch_id, snitch_log_time, snitch_logged_action, snitch_logged_initiated_user,"
+ " snitch_logged_victim_user, snitch_logged_x, snitch_logged_y, snitch_logged_z, snitch_logged_materialid) "
+ " VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)",
snitchDetailsTbl));
insertNewSnitchStmt = db.prepareStatement(String.format(
"INSERT INTO %s (snitch_world, snitch_name, snitch_x, snitch_y, snitch_z, snitch_group, snitch_cuboid_x, snitch_cuboid_y, snitch_cuboid_z)"
+ " VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)",
snitchsTbl));
deleteSnitchLogStmt = db.prepareStatement(String.format(
"DELETE FROM %s WHERE snitch_id=?",
snitchDetailsTbl));
deleteSnitchStmt = db.prepareStatement(String.format(
"DELETE FROM %s WHERE snitch_world=? AND snitch_x=? AND snitch_y=? AND snitch_z=?",
snitchsTbl));
updateGroupStmt = db.prepareStatement(String.format(
"UPDATE %s SET snitch_group=? WHERE snitch_world=? AND snitch_x=? AND snitch_y=? AND snitch_z=?",
snitchsTbl));
updateCuboidVolumeStmt = db.prepareStatement(String.format(
"UPDATE %s SET snitch_cuboid_x=?, snitch_cuboid_y=?, snitch_cuboid_z=?"
+ " WHERE snitch_world=? AND snitch_x=? AND snitch_y=? AND snitch_z=?",
snitchsTbl));
updateSnitchNameStmt = db.prepareStatement(String.format(
"UPDATE %s SET snitch_name=?"
+ " WHERE snitch_id=?",
snitchsTbl));
updateSnitchGroupStmt = db.prepareStatement(String.format(
"UPDATE %s SET snitch_group=?"
+ " WHERE snitch_id=?",
snitchsTbl));
}
private void initializeLastSnitchId() {
lastSnitchID = -1;
try {
ResultSet rsKey = getLastSnitchID.executeQuery();
if (rsKey.next()) {
lastSnitchID = rsKey.getInt("Auto_increment");
}
} catch (SQLException ex) {
lastSnitchID = -1;
}
if (lastSnitchID == -1) {
this.plugin.getLogger().log(Level.SEVERE, "Could not determine the last snitch id!");
}
}
public static String snitchKey(final Location loc) {
return String.format(
"World: %s X: %d Y: %d Z: %d",
loc.getWorld().getName(), loc.getBlockX(), loc.getBlockY(), loc.getBlockZ());
}
protected class SnitchEnumerator implements Enumeration<Snitch> {
JukeAlertLogger logger_;
World world_;
ResultSet rs_;
Snitch next_;
public SnitchEnumerator(JukeAlertLogger logger, World world) {
logger_ = logger;
world_ = world;
try {
getAllSnitchesByWorldStmt.setString(1, world_.getName());
rs_ = getAllSnitchesByWorldStmt.executeQuery();
next_ = getNextSnitch();
} catch (SQLException ex) {
logger_.plugin.getLogger().log(Level.SEVERE, "Couldn't get first snitch in World " + world_ + "! " + ex.toString());
rs_ = null;
next_ = null;
}
}
@Override
public boolean hasMoreElements() {
return next_ != null;
}
private Snitch getNextSnitch() {
try {
if (rs_ == null || !rs_.next()) {
rs_ = null;
return null;
}
double x = rs_.getInt("snitch_x");
double y = rs_.getInt("snitch_y");
double z = rs_.getInt("snitch_z");
String groupName = rs_.getString("snitch_group");
Faction group = groupMediator.getGroupByName(groupName);
Location location = new Location(world_, x, y, z);
Snitch snitch = new Snitch(location, group);
snitch.setId(rs_.getInt("snitch_id"));
snitch.setName(rs_.getString("snitch_name"));
return snitch;
} catch (SQLException ex) {
logger_.plugin.getLogger().log(Level.SEVERE, "Could not get all Snitches from World " + world_ + "! " + ex.toString());
rs_ = null;
}
return null;
}
@Override
public Snitch nextElement() {
if (next_ == null) {
throw new NoSuchElementException();
}
Snitch retval = next_;
next_ = getNextSnitch();
if (next_ != null && lastSnitchID < next_.getId()) {
lastSnitchID = next_.getId();
}
return retval;
}
}
public Enumeration<Snitch> getAllSnitches(World world) {
return new SnitchEnumerator(this, world);
}
public void saveAllSnitches() {
//TODO: Save snitches.
jukeinfobatch.flush();
}
/**
* Gets
*
* @limit events about that snitch.
* @param loc - the location of the snitch
* @param offset - the number of entries to start at (10 means you start at
* the 10th entry and go to
* @limit)
* @param limit - the number of entries to limit
* @return a Map of String/Date objects of the snitch entries, formatted
* nicely
*/
public List<String> getSnitchInfo(Location loc, int offset) {
List<String> info = new ArrayList<String>();
// get the snitch's ID based on the location, then use that to get the snitch details from the snitchesDetail table
int interestedSnitchId = -1;
try {
// params are x(int), y(int), z(int), world(tinyint), column returned: snitch_id (int)
getSnitchIdFromLocationStmt.setInt(1, loc.getBlockX());
getSnitchIdFromLocationStmt.setInt(2, loc.getBlockY());
getSnitchIdFromLocationStmt.setInt(3, loc.getBlockZ());
getSnitchIdFromLocationStmt.setString(4, loc.getWorld().getName());
ResultSet snitchIdSet = getSnitchIdFromLocationStmt.executeQuery();
// make sure we got a result
boolean didFind = false;
while (snitchIdSet.next()) {
didFind = true;
interestedSnitchId = snitchIdSet.getInt("snitch_id");
}
// only continue if we actually got a result from the first query
if (!didFind) {
this.plugin.getLogger().log(Level.SEVERE, "Didn't get any results trying to find a snitch in the snitches table at location " + loc);
} else {
GetSnitchInfoTask task = new GetSnitchInfoTask(plugin, interestedSnitchId, offset);
Bukkit.getScheduler().runTaskAsynchronously(plugin, task);
return task.getInfo();
}
} catch (SQLException ex1) {
this.plugin.getLogger().log(Level.SEVERE, "Could not get Snitch Details! loc: " + loc, ex1);
}
return info;
}
public List<String> getSnitchInfo(int snitchId, int offset) {
List<String> info = new ArrayList<String>();
try {
getSnitchLogStmt.setInt(1, snitchId);
getSnitchLogStmt.setInt(2, offset);
getSnitchLogStmt.setInt(3, logsPerPage);
ResultSet set = getSnitchLogStmt.executeQuery();
if (!set.isBeforeFirst()) {
System.out.println("No data");
} else {
while (set.next()) {
// TODO: need a function to create a string based upon what things we have / don't have in this result set
// so like if we have a block place action, then we include the x,y,z, but if its a KILL action, then we just say
// x killed y, etc
info.add(createInfoString(set));
}
}
} catch (SQLException ex) {
this.plugin.getLogger().log(Level.SEVERE, "Could not get Snitch Details from the snitchesDetail table using the snitch id " + snitchId, ex);
}
return info;
}
public Boolean deleteSnitchInfo(Location loc) {
Boolean completed = false;
// get the snitch's ID based on the location, then use that to get the snitch details from the snitchesDetail table
int interestedSnitchId = -1;
jukeinfobatch.flush();
try {
// params are x(int), y(int), z(int), world(tinyint), column returned: snitch_id (int)
getSnitchIdFromLocationStmt.setInt(1, loc.getBlockX());
getSnitchIdFromLocationStmt.setInt(2, loc.getBlockY());
getSnitchIdFromLocationStmt.setInt(3, loc.getBlockZ());
getSnitchIdFromLocationStmt.setString(4, loc.getWorld().getName());
ResultSet snitchIdSet = getSnitchIdFromLocationStmt.executeQuery();
// make sure we got a result
boolean didFind = false;
while (snitchIdSet.next()) {
didFind = true;
interestedSnitchId = snitchIdSet.getInt("snitch_id");
}
// only continue if we actually got a result from the first query
if (!didFind) {
this.plugin.getLogger().log(Level.SEVERE, "Didn't get any results trying to find a snitch in the snitches table at location " + loc);
} else {
deleteSnitchInfo(interestedSnitchId);
}
} catch (SQLException ex1) {
completed = false;
this.plugin.getLogger().log(Level.SEVERE, "Could not get Snitch Details! loc: " + loc, ex1);
}
return completed;
}
public Boolean deleteSnitchInfo(int snitchId) {
try {
deleteSnitchLogStmt.setInt(1, snitchId);
deleteSnitchLogStmt.execute();
return true;
} catch (SQLException ex) {
this.plugin.getLogger().log(Level.SEVERE, "Could not delete Snitch Details from the snitchesDetail table using the snitch id " + snitchId, ex);
return false;
}
}
public JukeInfoBatch jukeinfobatch = new JukeInfoBatch(this);
public void logSnitchInfo(Snitch snitch, Material material, Location loc, Date date, LoggedAction action, String initiatedUser, String victimUser) {
jukeinfobatch.addSet(snitch, material, loc, date, action, initiatedUser, victimUser);
/*try {
// snitchid
insertSnitchLogStmt.setInt(1, snitch.getId());
// snitch log time
insertSnitchLogStmt.setTimestamp(2, new java.sql.Timestamp(new java.util.Date().getTime()));
// snitch logged action
insertSnitchLogStmt.setByte(3, (byte) action.getLoggedActionId());
// initiated user
insertSnitchLogStmt.setString(4, initiatedUser);
// These columns, victimUser, location and materialid can all be null so check if it is an insert SQL null if it is
// victim user
if (victimUser != null) {
insertSnitchLogStmt.setString(5, victimUser);
} else {
insertSnitchLogStmt.setNull(5, java.sql.Types.VARCHAR);
}
// location, x, y, z
if (loc != null) {
insertSnitchLogStmt.setInt(6, loc.getBlockX());
insertSnitchLogStmt.setInt(7, loc.getBlockY());
insertSnitchLogStmt.setInt(8, loc.getBlockZ());
} else {
insertSnitchLogStmt.setNull(6, java.sql.Types.INTEGER);
insertSnitchLogStmt.setNull(7, java.sql.Types.INTEGER);
insertSnitchLogStmt.setNull(8, java.sql.Types.INTEGER);
}
// materialid
if (material != null) {
insertSnitchLogStmt.setShort(9, (short) material.getId());
} else {
insertSnitchLogStmt.setNull(9, java.sql.Types.SMALLINT);
}
Bukkit.getScheduler().runTaskAsynchronously(plugin, new Runnable() {
@Override
public void run() {
try {
insertSnitchLogStmt.execute();
} catch (SQLException ex) {
Logger.getLogger(JukeAlertLogger.class.getName()).log(Level.SEVERE, null, ex);
}
}
});
//To change body of generated methods, choose Tools | Templates.
} catch (SQLException ex) {
this.plugin.getLogger().log(Level.SEVERE, String.format("Could not create snitch log entry! with snitch %s, "
+ "material %s, date %s, initiatedUser %s, victimUser %s", snitch, material, date, initiatedUser, victimUser), ex);
}*/
}
/**
* logs a message that someone killed an entity
*
* @param snitch - the snitch that recorded this event
* @param player - the player that did the killing
* @param entity - the entity that died
*/
public void logSnitchEntityKill(Snitch snitch, Player player, Entity entity) {
// There is no material or location involved in this event
this.logSnitchInfo(snitch, null, null, new Date(), LoggedAction.KILL, player.getPlayerListName(), entity.getType().toString());
}
/**
* Logs a message that someone killed another player
*
* @param snitch - the snitch that recorded this event
* @param player - the player that did the killing
* @param victim - the player that died
*/
public void logSnitchPlayerKill(Snitch snitch, Player player, Player victim) {
// There is no material or location involved in this event
this.logSnitchInfo(snitch, null, null, new Date(), LoggedAction.KILL, player.getPlayerListName(), victim.getPlayerListName());
}
/**
* Logs a message that someone ignited a block within the snitch's field
*
* @param snitch - the snitch that recorded this event
* @param player - the player that did the ignition
* @param block - the block that was ignited
*/
public void logSnitchIgnite(Snitch snitch, Player player, Block block) {
// There is no material or location involved in this event
this.logSnitchInfo(snitch, block.getType(), block.getLocation(), new Date(), LoggedAction.IGNITED, player.getPlayerListName(), null);
}
/**
* Logs a message that someone entered the snitch's field
*
* @param snitch - the snitch that recorded this event
* @param player - the player that entered the snitch's field
* @param loc - the location of where the player entered
*/
public void logSnitchEntry(Snitch snitch, Location loc, Player player) {
// no material or victimUser for this event
this.logSnitchInfo(snitch, null, loc, new Date(), LoggedAction.ENTRY, player.getPlayerListName(), null);
}
/**
* Logs a message that someone logged in in the snitch's field
*
* @param snitch - the snitch that recorded this event
* @param player - the player that logged in in the snitch's field
* @param loc - the location of where the player logged in at
*/
public void logSnitchLogin(Snitch snitch, Location loc, Player player) {
// no material or victimUser for this event
this.logSnitchInfo(snitch, null, loc, new Date(), LoggedAction.LOGIN, player.getPlayerListName(), null);
}
/**
* Logs a message that someone logged out in the snitch's field
*
* @param snitch - the snitch that recorded this event
* @param player - the player that logged out in the snitch's field
* @param loc - the location of where the player logged out at
*/
public void logSnitchLogout(Snitch snitch, Location loc, Player player) {
// no material or victimUser for this event
this.logSnitchInfo(snitch, null, loc, new Date(), LoggedAction.LOGOUT, player.getPlayerListName(), null);
}
/**
* Logs a message that someone broke a block within the snitch's field
*
* @param snitch - the snitch that recorded this event
* @param player - the player that broke the block
* @param block - the block that was broken
*/
public void logSnitchBlockBreak(Snitch snitch, Player player, Block block) {
// no victim user in this event
this.logSnitchInfo(snitch, block.getType(), block.getLocation(), new Date(), LoggedAction.BLOCK_BREAK, player.getPlayerListName(), null);
}
/**
* Logs a message that someone placed a block within the snitch's field
*
* @param snitch - the snitch that recorded this event
* @param player - the player that placed the block
* @param block - the block that was placed
*/
public void logSnitchBlockPlace(Snitch snitch, Player player, Block block) {
// no victim user in this event
this.logSnitchInfo(snitch, block.getType(), block.getLocation(), new Date(), LoggedAction.BLOCK_PLACE, player.getPlayerListName(), null);
}
/**
* Logs a message that someone emptied a bucket within the snitch's field
*
* @param snitch - the snitch that recorded this event
* @param player - the player that emptied the bucket
* @param loc - the location of where the bucket empty occurred
* @param item - the ItemStack representing the bucket that the player
* emptied
*/
public void logSnitchBucketEmpty(Snitch snitch, Player player, Location loc, ItemStack item) {
// no victim user in this event
this.logSnitchInfo(snitch, item.getType(), loc, new Date(), LoggedAction.BUCKET_EMPTY, player.getPlayerListName(), null);
}
/**
* Logs a message that someone filled a bucket within the snitch's field
*
* @param snitch - the snitch that recorded this event
* @param player - the player that filled the bucket
* @param block - the block that was 'put into' the bucket
*/
public void logSnitchBucketFill(Snitch snitch, Player player, Block block) {
// TODO: should we take a block or a ItemStack as a parameter here?
// JM: I think it'll be fine either way, most griefing is done with with block placement and this could be updated fairly easily
// no victim user in this event
this.logSnitchInfo(snitch, block.getType(), block.getLocation(), new Date(), LoggedAction.BUCKET_FILL, player.getPlayerListName(), null);
}
/**
* Logs a message that someone used a block within the snitch's field
*
* @param snitch - the snitch that recorded this event
* @param player - the player that used something
* @param block - the block that was used
*/
public void logUsed(Snitch snitch, Player player, Block block) {
// TODO: what should we use to identify what was used? Block? Material?
//JM: Let's keep this consistent with block plament
this.logSnitchInfo(snitch, block.getType(), block.getLocation(), new Date(), LoggedAction.BLOCK_USED, player.getPlayerListName(), null);
}
//Logs the snitch being placed at World, x, y, z in the database.
public void logSnitchPlace(final String world, final String group, final String name, final int x, final int y, final int z) {
final ConfigManager lockedConfigManager = this.configManager;
Bukkit.getScheduler().runTaskAsynchronously(plugin, new Runnable() {
@Override
public void run() {
try {
jukeinfobatch.flush();
synchronized(insertNewSnitchStmt) {
insertNewSnitchStmt.setString(1, world);
insertNewSnitchStmt.setString(2, name);
insertNewSnitchStmt.setInt(3, x);
insertNewSnitchStmt.setInt(4, y);
insertNewSnitchStmt.setInt(5, z);
insertNewSnitchStmt.setString(6, group);
insertNewSnitchStmt.setInt(7, lockedConfigManager.getDefaultCuboidSize());
insertNewSnitchStmt.setInt(8, lockedConfigManager.getDefaultCuboidSize());
insertNewSnitchStmt.setInt(9, lockedConfigManager.getDefaultCuboidSize());
insertNewSnitchStmt.execute();
}
} catch (SQLException ex) {
Logger.getLogger(JukeAlertLogger.class.getName()).log(Level.SEVERE, null, ex);
}
}
});
}
//Removes the snitch at the location of World, X, Y, Z from the database.
public void logSnitchBreak(final String world, final int x, final int y, final int z) {
Bukkit.getScheduler().runTaskAsynchronously(plugin, new Runnable() {
@Override
public void run() {
try {
jukeinfobatch.flush();
synchronized(deleteSnitchStmt) {
deleteSnitchStmt.setString(1, world);
deleteSnitchStmt.setInt(2, (int) Math.floor(x));
deleteSnitchStmt.setInt(3, (int) Math.floor(y));
deleteSnitchStmt.setInt(4, (int) Math.floor(z));
deleteSnitchStmt.execute();
}
} catch (SQLException ex) {
Logger.getLogger(JukeAlertLogger.class.getName()).log(Level.SEVERE, null, ex);
}
}
});
}
//Changes the group of which the snitch is registered to at the location of loc in the database.
public void updateGroupSnitch(final Location loc, final String group) {
Bukkit.getScheduler().runTaskAsynchronously(plugin, new Runnable() {
@Override
public void run() {
try {
synchronized(updateGroupStmt) {
updateGroupStmt.setString(1, group);
updateGroupStmt.setString(2, loc.getWorld().getName());
updateGroupStmt.setInt(3, loc.getBlockX());
updateGroupStmt.setInt(4, loc.getBlockY());
updateGroupStmt.setInt(5, loc.getBlockZ());
updateGroupStmt.execute();
}
} catch (SQLException ex) {
Logger.getLogger(JukeAlertLogger.class.getName()).log(Level.SEVERE, null, ex);
}
}
});
}
//Updates the cuboid size of the snitch in the database.
public void updateCubiodSize(final Location loc, final int x, final int y, final int z) {
Bukkit.getScheduler().runTaskAsynchronously(plugin, new Runnable() {
@Override
public void run() {
try {
synchronized(updateCuboidVolumeStmt) {
updateCuboidVolumeStmt.setInt(1, x);
updateCuboidVolumeStmt.setInt(2, y);
updateCuboidVolumeStmt.setInt(3, z);
updateCuboidVolumeStmt.setString(4, loc.getWorld().getName());
updateCuboidVolumeStmt.setInt(5, loc.getBlockX());
updateCuboidVolumeStmt.setInt(6, loc.getBlockY());
updateCuboidVolumeStmt.setInt(7, loc.getBlockZ());
updateCuboidVolumeStmt.execute();
}
} catch (SQLException ex) {
Logger.getLogger(JukeAlertLogger.class.getName()).log(Level.SEVERE, null, ex);
}
}
});
}
//Updates the name of the snitch in the database.
public void updateSnitchName(final Snitch snitch, final String name) {
Bukkit.getScheduler().runTaskAsynchronously(plugin, new Runnable() {
@Override
public void run() {
try {
synchronized(updateSnitchNameStmt) {
updateSnitchNameStmt.setString(1, name);
updateSnitchNameStmt.setInt(2, snitch.getId());
updateSnitchNameStmt.execute();
}
} catch (SQLException ex) {
Logger.getLogger(JukeAlertLogger.class.getName()).log(Level.SEVERE, null, ex);
}
}
});
}
//Updates the group of the snitch in the database.
public void updateSnitchGroup(final Snitch snitch, final String group) {
Bukkit.getScheduler().runTaskAsynchronously(plugin, new Runnable() {
@Override
public void run() {
try {
synchronized(updateSnitchGroupStmt) {
updateSnitchGroupStmt.setString(1, group);
updateSnitchGroupStmt.setInt(2, snitch.getId());
updateSnitchGroupStmt.execute();
}
} catch (SQLException ex) {
Logger.getLogger(JukeAlertLogger.class.getName()).log(Level.SEVERE, null, ex);
}
}
});
}
public Integer getLastSnitchID() {
return lastSnitchID;
}
public void increaseLastSnitchID() {
lastSnitchID++;
}
public void logSnitchBlockBurn(Snitch snitch, Block block) {
this.logSnitchInfo(snitch, block.getType(), block.getLocation(), new Date(), LoggedAction.BLOCK_BURN, null, snitchDetailsTbl);
}
public String createInfoString(ResultSet set) {
String resultString = ChatColor.RED + "Error!";
try {
int id = set.getInt("snitch_details_id");
String initiator = set.getString("snitch_logged_initiated_user");
String victim = set.getString("snitch_logged_victim_user");
int action = (int) set.getByte("snitch_logged_action");
int material = set.getInt("snitch_logged_materialid");
int x = set.getInt("snitch_logged_X");
int y = set.getInt("snitch_logged_Y");
int z = set.getInt("snitch_logged_Z");
String timestamp = new SimpleDateFormat("MM-dd HH:mm").format(set.getTimestamp("snitch_log_time"));
if (action == LoggedAction.ENTRY.getLoggedActionId()) {
resultString = String.format(" %s %s %s", ChatColor.GOLD + ChatFiller.fillString(initiator, (double) 25), ChatColor.BLUE + ChatFiller.fillString("Entry", (double) 15), ChatColor.WHITE + ChatFiller.fillString(timestamp, (double) 15));
} else if (action == LoggedAction.LOGIN.getLoggedActionId()) {
resultString = String.format(" %s %s %s", ChatColor.GOLD + ChatFiller.fillString(initiator, (double) 25), ChatColor.GREEN + ChatFiller.fillString("Login", (double) 15), ChatColor.WHITE + ChatFiller.fillString(timestamp, (double) 15));
} else if (action == LoggedAction.LOGOUT.getLoggedActionId()) {
resultString = String.format(" %s %s %s", ChatColor.GOLD + ChatFiller.fillString(initiator, (double) 25), ChatColor.GREEN + ChatFiller.fillString("Logout", (double) 15), ChatColor.WHITE + ChatFiller.fillString(timestamp, (double) 15));
} else if (action == LoggedAction.BLOCK_BREAK.getLoggedActionId()) {
resultString = String.format(" %s %s %s", ChatColor.GOLD + ChatFiller.fillString(initiator, (double) 25), ChatColor.DARK_RED + ChatFiller.fillString("Block Break", (double) 15), ChatColor.WHITE + ChatFiller.fillString(String.format("%d [%d %d %d]", material, x, y, z), (double) 30));
} else if (action == LoggedAction.BLOCK_PLACE.getLoggedActionId()) {
resultString = String.format(" %s %s %s", ChatColor.GOLD + ChatFiller.fillString(initiator, (double) 25), ChatColor.DARK_RED + ChatFiller.fillString("Block Place", (double) 15), ChatColor.WHITE + ChatFiller.fillString(String.format("%d [%d %d %d]", material, x, y, z), (double) 30));
} else if (action == LoggedAction.BLOCK_BURN.getLoggedActionId()) {
resultString = String.format(" %s %s %s", ChatColor.GOLD + ChatFiller.fillString(initiator, (double) 25), ChatColor.DARK_RED + ChatFiller.fillString("Block Burn", (double) 15), ChatColor.WHITE + ChatFiller.fillString(String.format("%d [%d %d %d]", material, x, y, z), (double) 30));
} else if (action == LoggedAction.IGNITED.getLoggedActionId()) {
resultString = String.format(" %s %s %s", ChatColor.GOLD + ChatFiller.fillString(initiator, (double) 25), ChatColor.GOLD + ChatFiller.fillString("Ignited", (double) 15), ChatColor.WHITE + ChatFiller.fillString(String.format("%d [%d %d %d]", material, x, y, z), (double) 30));
} else if (action == LoggedAction.USED.getLoggedActionId()) {
resultString = String.format(" %s %s %s", ChatColor.GOLD + ChatFiller.fillString(initiator, (double) 25), ChatColor.GREEN + ChatFiller.fillString("Used", (double) 15), ChatColor.WHITE + ChatFiller.fillString(String.format("%d [%d %d %d]", material, x, y, z), (double) 30));
} else if (action == LoggedAction.BLOCK_USED.getLoggedActionId()) {
resultString = String.format(" %s %s %s", ChatColor.GOLD + ChatFiller.fillString(initiator, (double) 25), ChatColor.GREEN + ChatFiller.fillString("Used", (double) 15), ChatColor.WHITE + ChatFiller.fillString(String.format("%d [%d %d %d]", material, x, y, z), (double) 30));
} else if (action == LoggedAction.BUCKET_EMPTY.getLoggedActionId()) {
resultString = String.format(" %s %s %s", ChatColor.GOLD + ChatFiller.fillString(initiator, (double) 25), ChatColor.DARK_RED + ChatFiller.fillString("Bucket Empty", (double) 15), ChatColor.WHITE + ChatFiller.fillString(String.format("%d [%d %d %d]", material, x, y, z), (double) 30));
} else if (action == LoggedAction.BUCKET_FILL.getLoggedActionId()) {
resultString = String.format(" %s %s %s", ChatColor.GOLD + ChatFiller.fillString(initiator, (double) 25), ChatColor.GREEN + ChatFiller.fillString("Bucket Fill", (double) 15), ChatColor.WHITE + ChatFiller.fillString(String.format("%d [%d %d %d]", material, x, y, z), (double) 30));
} else if (action == LoggedAction.KILL.getLoggedActionId()) {
resultString = String.format(" %s %s %s", ChatColor.GOLD + ChatFiller.fillString(initiator, (double) 25), ChatColor.DARK_RED + ChatFiller.fillString("Killed", (double) 15), ChatColor.WHITE + ChatFiller.fillString(victim, (double) 30));
} else {
resultString = ChatColor.RED + "Action not found. Please contact your administrator with log ID " + id;
}
} catch (SQLException ex) {
this.plugin.getLogger().log(Level.SEVERE, "Could not get Snitch Details!");
}
return resultString;
}
} |
package me.nallar.patched.entity;
import java.util.List;
import net.minecraft.block.Block;
import net.minecraft.entity.EntityLiving;
import net.minecraft.entity.item.EntityItem;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.item.Item;
import net.minecraft.item.ItemArmor;
import net.minecraft.item.ItemStack;
import net.minecraft.item.ItemSword;
import net.minecraft.profiler.Profiler;
import net.minecraft.util.MathHelper;
import net.minecraft.world.World;
public abstract class PatchEntityLiving extends EntityLiving {
public PatchEntityLiving(World par1World) {
super(par1World);
}
@Override
public void moveEntityWithHeading(float par1, float par2) {
double var9;
@SuppressWarnings ("RedundantCast")
boolean isPlayer = (Object) this instanceof EntityPlayer;
boolean canFly = isPlayer && ((EntityPlayer) (Object) this).capabilities.isFlying;
if (!canFly && this.isInWater()) {
var9 = this.posY;
this.moveFlying(par1, par2, this.isAIEnabled() ? 0.04F : 0.02F);
this.moveEntity(this.motionX, this.motionY, this.motionZ);
this.motionX *= 0.800000011920929D;
this.motionY *= 0.800000011920929D;
this.motionZ *= 0.800000011920929D;
this.motionY -= 0.02D;
if (this.isCollidedHorizontally && this.isOffsetPositionInLiquid(this.motionX, this.motionY + 0.6000000238418579D - this.posY + var9, this.motionZ)) {
this.motionY = 0.30000001192092896D;
}
} else if (!canFly && this.handleLavaMovement()) {
var9 = this.posY;
this.moveFlying(par1, par2, 0.02F);
this.moveEntity(this.motionX, this.motionY, this.motionZ);
this.motionX *= 0.5D;
this.motionY *= 0.5D;
this.motionZ *= 0.5D;
this.motionY -= 0.02D;
if (this.isCollidedHorizontally && this.isOffsetPositionInLiquid(this.motionX, this.motionY + 0.6000000238418579D - this.posY + var9, this.motionZ)) {
this.motionY = 0.30000001192092896D;
}
} else {
float var3 = 0.91F;
if (this.onGround) {
var3 = 0.54600006F;
int var4 = this.worldObj.getBlockId(MathHelper.floor_double(this.posX), MathHelper.floor_double(this.boundingBox.minY) - 1, MathHelper.floor_double(this.posZ));
if (var4 > 0) {
var3 = Block.blocksList[var4].slipperiness * 0.91F;
}
}
float var8 = 0.16277136F / (var3 * var3 * var3);
float var5;
if (this.onGround) {
if (this.isAIEnabled()) {
var5 = this.getAIMoveSpeed();
} else {
var5 = this.landMovementFactor;
}
var5 *= var8;
} else {
var5 = this.jumpMovementFactor;
}
this.moveFlying(par1, par2, var5);
var3 = 0.91F;
if (this.onGround) {
var3 = 0.54600006F;
int var6 = this.worldObj.getBlockId(MathHelper.floor_double(this.posX), MathHelper.floor_double(this.boundingBox.minY) - 1, MathHelper.floor_double(this.posZ));
if (var6 > 0) {
var3 = Block.blocksList[var6].slipperiness * 0.91F;
}
}
boolean onLadder = this.isOnLadder();
if (onLadder) {
double maxLadderSpeed = 0.15F;
if (this.motionX < -maxLadderSpeed) {
this.motionX = -maxLadderSpeed;
}
if (this.motionX > maxLadderSpeed) {
this.motionX = maxLadderSpeed;
}
if (this.motionZ < -maxLadderSpeed) {
this.motionZ = -maxLadderSpeed;
}
if (this.motionZ > maxLadderSpeed) {
this.motionZ = maxLadderSpeed;
}
this.fallDistance = 0.0F;
if (this.motionY < -0.15D) {
this.motionY = -0.15D;
}
if (isPlayer && this.motionY < 0.0D && this.isSneaking()) {
this.motionY = 0.0D;
}
}
this.moveEntity(this.motionX, this.motionY, this.motionZ);
if (this.isCollidedHorizontally && onLadder) {
this.motionY = 0.2D;
}
if (this.worldObj.isRemote && (!this.worldObj.blockExists((int) this.posX, 0, (int) this.posZ) || !this.worldObj.getChunkFromBlockCoords((int) this.posX, (int) this.posZ).isChunkLoaded)) {
if (this.posY > 0.0D) {
this.motionY = -0.1D;
} else {
this.motionY = 0.0D;
}
} else {
this.motionY -= 0.08D;
}
this.motionY *= 0.9800000190734863D;
this.motionX *= (double) var3;
this.motionZ *= (double) var3;
}
this.prevLegYaw = this.legYaw;
var9 = this.posX - this.prevPosX;
double var12 = this.posZ - this.prevPosZ;
float var11 = MathHelper.sqrt_double(var9 * var9 + var12 * var12) * 4.0F;
if (var11 > 1.0F) {
var11 = 1.0F;
}
this.legYaw += (var11 - this.legYaw) * 0.4F;
this.legSwing += this.legYaw;
}
@Override
public void onLivingUpdate() {
final World worldObj = this.worldObj;
final Profiler theProfiler = worldObj.theProfiler;
final boolean isServer = !this.isClientWorld();
if (this.jumpTicks > 0) {
--this.jumpTicks;
}
if (this.newPosRotationIncrements > 0) {
double var1 = this.posX + (this.newPosX - this.posX) / (double) this.newPosRotationIncrements;
double var3 = this.posY + (this.newPosY - this.posY) / (double) this.newPosRotationIncrements;
double var5 = this.posZ + (this.newPosZ - this.posZ) / (double) this.newPosRotationIncrements;
double var7 = MathHelper.wrapAngleTo180_double(this.newRotationYaw - (double) this.rotationYaw);
this.rotationYaw = (float) ((double) this.rotationYaw + var7 / (double) this.newPosRotationIncrements);
this.rotationPitch = (float) ((double) this.rotationPitch + (this.newRotationPitch - (double) this.rotationPitch) / (double) this.newPosRotationIncrements);
--this.newPosRotationIncrements;
this.setPosition(var1, var3, var5);
this.setRotation(this.rotationYaw, this.rotationPitch);
} else if (isServer) {
this.motionX *= 0.98D;
this.motionY *= 0.98D;
this.motionZ *= 0.98D;
}
if (Math.abs(this.motionX) < 0.005D) {
this.motionX = 0.0D;
}
if (Math.abs(this.motionY) < 0.005D) {
this.motionY = 0.0D;
}
if (Math.abs(this.motionZ) < 0.005D) {
this.motionZ = 0.0D;
}
if (--collisionSkipCounter >= 0) {
return;
}
theProfiler.startSection("ai");
if (this.isMovementBlocked()) {
this.isJumping = false;
this.moveStrafing = 0.0F;
this.moveForward = 0.0F;
this.randomYawVelocity = 0.0F;
} else if (this.isClientWorld()) {
if (this.isAIEnabled()) {
theProfiler.startSection("newAi");
this.updateAITasks();
theProfiler.endSection();
} else {
theProfiler.startSection("oldAi");
this.updateEntityActionState();
theProfiler.endSection();
this.rotationYawHead = this.rotationYaw;
}
}
theProfiler.endSection();
theProfiler.startSection("jump");
if (this.isJumping) {
if (!this.isInWater() && !this.handleLavaMovement()) {
if (this.onGround && this.jumpTicks == 0) {
this.jump();
this.jumpTicks = 10;
}
} else {
this.motionY += 0.03999999910593033D;
}
} else {
this.jumpTicks = 0;
}
theProfiler.endSection();
theProfiler.startSection("travel");
this.moveStrafing *= 0.98F;
this.moveForward *= 0.98F;
this.randomYawVelocity *= 0.9F;
float oldLandMovementFactor = this.landMovementFactor;
this.landMovementFactor *= this.getSpeedModifier();
this.moveEntityWithHeading(this.moveStrafing, this.moveForward);
this.landMovementFactor = oldLandMovementFactor;
theProfiler.endSection();
theProfiler.startSection("push");
if (!worldObj.isRemote) {
this.func_85033_bc();
}
theProfiler.endSection();
theProfiler.startSection("looting");
if (!worldObj.isRemote && this.canPickUpLoot && !this.dead && !this.isDead && this.getHealth() > 0 && worldObj.getGameRules().getGameRuleBooleanValue("mobGriefing")) {
List<EntityItem> entityItemList = worldObj.getEntitiesWithinAABB(EntityItem.class, this.boundingBox.expand(1.0D, 0.0D, 1.0D));
for (EntityItem entityItem : entityItemList) {
if (!entityItem.isDead && entityItem.getEntityItem() != null) {
ItemStack itemStack = entityItem.getEntityItem();
// This isn't actually redundant, because patcher.
//noinspection RedundantCast
boolean isPlayer = (Object) this instanceof EntityPlayerMP;
Item item = itemStack.getItem();
if (item == null || (!(item instanceof ItemArmor) && (!isPlayer || entityItem.delayBeforeCanPickup > 8))) {
continue;
}
int targetSlot = func_82159_b(itemStack);
if (targetSlot > -1) {
boolean shouldEquip = true;
ItemStack oldItem = this.getCurrentItemOrArmor(targetSlot);
if (oldItem != null) {
if (isPlayer) {
continue;
}
if (targetSlot == 0) {
if (item instanceof ItemSword && !(oldItem.getItem() instanceof ItemSword)) {
shouldEquip = true;
} else if (item instanceof ItemSword && oldItem.getItem() instanceof ItemSword) {
ItemSword newSword = (ItemSword) item;
ItemSword oldSword = (ItemSword) oldItem.getItem();
if (newSword.func_82803_g() == oldSword.func_82803_g()) {
shouldEquip = itemStack.getItemDamage() > oldItem.getItemDamage() || itemStack.hasTagCompound() && !oldItem.hasTagCompound();
} else {
shouldEquip = newSword.func_82803_g() > oldSword.func_82803_g();
}
} else {
shouldEquip = false;
}
} else if (item instanceof ItemArmor && !(oldItem.getItem() instanceof ItemArmor)) {
shouldEquip = true;
} else if (item instanceof ItemArmor && oldItem.getItem() instanceof ItemArmor) {
ItemArmor newArmor = (ItemArmor) item;
ItemArmor oldArmor = (ItemArmor) oldItem.getItem();
if (newArmor.damageReduceAmount == oldArmor.damageReduceAmount) {
shouldEquip = itemStack.getItemDamage() > oldItem.getItemDamage() || itemStack.hasTagCompound() && !oldItem.hasTagCompound();
} else {
shouldEquip = newArmor.damageReduceAmount > oldArmor.damageReduceAmount;
}
} else {
shouldEquip = false;
}
}
if (shouldEquip) {
if (oldItem != null && this.rand.nextFloat() - 0.1F < this.equipmentDropChances[targetSlot]) {
this.entityDropItem(oldItem, 0.0F);
}
this.setCurrentItemOrArmor(targetSlot, itemStack);
this.equipmentDropChances[targetSlot] = 2.0F;
this.persistenceRequired = true;
this.onItemPickup(entityItem, 1);
entityItem.setDead();
}
}
}
}
}
theProfiler.endSection();
}
} |
package fi.aalto.tripchain.route;
import java.io.IOException;
import java.util.List;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.DefaultHttpClient;
import org.json.JSONObject;
import fi.aalto.tripchain.Client;
import fi.aalto.tripchain.Configuration;
import android.app.Service;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.pm.PackageInfo;
import android.os.AsyncTask;
import android.util.Log;
public class TripRecorder {
private final static String TAG = TripRecorder.class.getSimpleName();
private ActivityListener activityListener;
private LocationListener locationListener;
private Trip trip;
private Service context;
private SharedPreferences preferences;
public void stop() {
this.activityListener.stop();
this.locationListener.stop();
this.trip.stop();
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
report();
context.stopForeground(true);
return null;
}
}.execute();
}
public void start() {
this.activityListener.start();
this.locationListener.start();
this.trip.start();
}
public TripRecorder(Service context, List<Client> clients) {
this.context = context;
this.trip = new Trip(clients);
this.activityListener = new ActivityListener(context, trip);
this.locationListener = new LocationListener(context, trip);
preferences = context.getSharedPreferences(Configuration.SHARED_PREFERENCES, Context.MODE_MULTI_PROCESS);
}
private void report() {
try {
JSONObject j = trip.toJson();
j.put("userId", preferences.getString(Configuration.KEY_LOGIN_ID, null));
PackageInfo pInfo = context.getPackageManager().getPackageInfo(context.getPackageName(), 0);
j.put("clientVersion", pInfo.versionName);
Log.d(TAG, j.toString(2));
postTrip(j);
} catch (Exception e) {
Log.d(TAG, "Failed to post trip", e);
}
}
private void postTrip(JSONObject trip) throws ClientProtocolException, IOException {
HttpClient client = new DefaultHttpClient();
client.getParams().setParameter("http.protocol.content-charset", "UTF-8");
HttpPost httpPost = new HttpPost("http:
httpPost.addHeader("Content-Type", "application/json");
httpPost.setEntity(new StringEntity(trip.toString(), "UTF-8"));
HttpResponse response = client.execute(httpPost);
Log.d(TAG, "post status: " + response.getStatusLine());
}
} |
package docs.home.persistence;
import java.util.Optional;
import akka.Done;
//#post1
import com.lightbend.lagom.javadsl.persistence.PersistentEntity;
public class Post2 extends PersistentEntity<BlogCommand, BlogEvent, BlogState> {
@Override
public Behavior initialBehavior(Optional<BlogState> snapshotState) {
BehaviorBuilder b = newBehaviorBuilder(
snapshotState.orElse(BlogState.EMPTY));
//#command-handler
// Command handlers are invoked for incoming messages (commands).
// A command handler must "return" the events to be persisted (if any).
b.setCommandHandler(AddPost.class, (AddPost cmd, CommandContext<AddPostDone> ctx) -> {
final PostAdded postAdded =
PostAdded.builder().content(cmd.getContent()).postId(entityId()).build();
return ctx.thenPersist(postAdded, (PostAdded evt) ->
// After persist is done additional side effects can be performed
ctx.reply(AddPostDone.of(entityId())));
});
//#command-handler
//#validate-command
b.setCommandHandler(AddPost.class, (AddPost cmd, CommandContext<AddPostDone> ctx) -> {
if (cmd.getContent().getTitle() == null || cmd.getContent().getTitle().equals("")) {
ctx.invalidCommand("Title must be defined");
return ctx.done();
}
//#validate-command
final PostAdded postAdded =
PostAdded.builder().content(cmd.getContent()).postId(entityId()).build();
return ctx.thenPersist(postAdded, (PostAdded evt) ->
// After persist is done additional side effects can be performed
ctx.reply(AddPostDone.of(entityId())));
});
//#event-handler
// Event handlers are used both when persisting new events
// and when replaying events.
b.setEventHandler(PostAdded.class, evt ->
state().withContent(Optional.of(evt.getContent())));
//#event-handler
//#change-behavior
b.setEventHandlerChangingBehavior(PostAdded.class, evt ->
becomePostAdded(state().withContent(Optional.of(evt.getContent()))));
//#change-behavior
return b.build();
}
//#change-behavior-become
private Behavior becomePostAdded(BlogState newState) {
BehaviorBuilder b = newBehaviorBuilder(newState);
b.setReadOnlyCommandHandler(GetPost.class, (cmd, ctx) ->
ctx.reply(state().getContent().get()));
b.setCommandHandler(ChangeBody.class,
(cmd, ctx) -> ctx.thenPersist(BodyChanged.of(cmd.getBody()), evt ->
ctx.reply(Done.getInstance())));
b.setEventHandler(BodyChanged.class, evt -> state().withBody(evt.getBody()));
return b.build();
}
//#change-behavior-become
} |
// of this software and associated documentation files(the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions :
// all copies or substantial portions of the Software.
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package com.microsoft.aad.adal;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.security.DigestException;
import java.security.GeneralSecurityException;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.SecureRandom;
import java.security.UnrecoverableKeyException;
import java.security.cert.CertificateException;
import java.security.spec.AlgorithmParameterSpec;
import java.util.Calendar;
import java.util.Date;
import java.util.Locale;
import javax.crypto.Cipher;
import javax.crypto.KeyGenerator;
import javax.crypto.Mac;
import javax.crypto.SecretKey;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import javax.security.auth.x500.X500Principal;
import android.annotation.TargetApi;
import android.content.Context;
import android.os.Build;
import android.security.KeyPairGeneratorSpec;
import android.util.Base64;
/**
* Shared preferences store clear text. This class helps to encrypt/decrypt text
* to store. API SDK >= 18 has more security with AndroidKeyStore.
*/
public class StorageHelper {
private static final String MAC_KEY_HASH_ALGORITHM = "SHA256";
private static final String KEY_STORE_CERT_ALIAS = "AdalKey";
private static final String ADALKS = "adalks";
private static final String KEYSPEC_ALGORITHM = "AES";
private static final String WRAP_ALGORITHM = "RSA/ECB/PKCS1Padding";
private static final String TAG = "StorageHelper";
/**
* AES is 16 bytes (128 bits), thus PKCS#5 padding should not work, but in
* Java AES/CBC/PKCS5Padding is default(!) algorithm name, thus PKCS5 here
* probably doing PKCS7. We decide to go with Java default string.
*/
private static final String CIPHER_ALGORITHM = "AES/CBC/PKCS5Padding";
private static final String MAC_ALGORITHM = "HmacSHA256";
private final SecureRandom mRandom;
private static final int KEY_SIZE = 256;
/**
* IV Key length for AES-128.
*/
public static final int DATA_KEY_LENGTH = 16;
/**
* 256 bits output for signing message.
*/
public static final int MAC_LENGTH = 32;
/**
* it is needed for AndroidKeyStore.
*/
private KeyPair mKeyPair;
private Context mContext;
public static final String VERSION_ANDROID_KEY_STORE = "A001";
public static final String VERSION_USER_DEFINED = "U001";
private static final int KEY_VERSION_BLOB_LENGTH = 4;
/**
* To keep track of encoding version and related flags.
*/
private static final String ENCODE_VERSION = "E1";
private static final Object LOCK_OBJ = new Object();
private String mBlobVersion;
private SecretKey mKey = null, mMacKey = null;
private static SecretKey sSecretKeyFromAndroidKeyStore = null;
public StorageHelper(Context ctx) {
mContext = ctx;
mRandom = new SecureRandom();
}
/**
* Get Secret Key based on API level to use in encryption. Decryption key
* depends on version# since user can migrate to new Android.OS
*
* @return
* @throws NoSuchAlgorithmException
*/
public synchronized SecretKey loadSecretKeyForAPI() throws IOException, GeneralSecurityException {
// Loading key only once for performance. If API is upgraded, it will
// restart the device anyway. It will load the correct key for new API.
if (mKey != null && mMacKey != null)
return mKey;
final byte[] secretKeyData = AuthenticationSettings.INSTANCE.getSecretKeyData();
if(secretKeyData == null && Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
throw new IllegalStateException("Secret key must be provided for API < 18. " +
"Use AuthenticationSettings.INSTANCE.setSecretKey()");
}
if (secretKeyData != null) {
Logger.v(TAG, "Encryption will use secret key from Settings");
mKey = getSecretKey(secretKeyData);
mMacKey = getMacKey(mKey);
mBlobVersion = VERSION_USER_DEFINED;
} else {
try {
// androidKeyStore can store app specific self signed cert.
// Asymmetric cryptography is used to protect the session
// key for Encryption and HMac.
// If user specifies secret key, it will use the provided
// key.
mKey = getSecretKeyFromAndroidKeyStore();
mMacKey = getMacKey(mKey);
mBlobVersion = VERSION_ANDROID_KEY_STORE;
} catch (IOException | GeneralSecurityException e) {
Logger.e(TAG, "Failed to get private key from AndroidKeyStore", "",
ADALError.ANDROIDKEYSTORE_FAILED, e);
throw e;
}
}
return mKey;
}
/**
* load key based on version for migration
*
* @throws GeneralSecurityException
* @throws IOException
*/
private SecretKey getKeyForVersion(String keyVersion) throws GeneralSecurityException, IOException {
if (keyVersion.equals(VERSION_USER_DEFINED)) {
return getSecretKey(AuthenticationSettings.INSTANCE.getSecretKeyData());
}
if (keyVersion.equals(VERSION_ANDROID_KEY_STORE)) {
if (Build.VERSION.SDK_INT >= 18) {
try {
// androidKeyStore can store app specific self signed cert.
// Asymmetric cryptography is used to protect the session
// key
// used for Encryption and HMac
return getSecretKeyFromAndroidKeyStore();
} catch (IOException | GeneralSecurityException e) {
Logger.e(TAG, "Failed to get private key from AndroidKeyStore", "",
ADALError.ANDROIDKEYSTORE_FAILED, e);
throw e;
}
} else {
throw new IllegalArgumentException(
String.format(
"keyVersion '%s' is not supported in this SDK. AndroidKeyStore is supported API18 and above.",
keyVersion));
}
}
throw new IllegalArgumentException("keyVersion = " + keyVersion);
}
private SecretKey getSecretKey(byte[] rawBytes) {
if (rawBytes != null)
return new SecretKeySpec(rawBytes, KEYSPEC_ALGORITHM);
throw new IllegalArgumentException("rawBytes");
}
/**
* Derive mac key from given key
*
* @param key
* @return
* @throws NoSuchAlgorithmException
*/
private SecretKey getMacKey(SecretKey key) throws NoSuchAlgorithmException {
// Some keys may not produce byte[] with getEncoded
byte[] encodedKey = key.getEncoded();
if (encodedKey != null) {
MessageDigest digester = MessageDigest.getInstance(MAC_KEY_HASH_ALGORITHM);
return new SecretKeySpec(digester.digest(encodedKey), KEYSPEC_ALGORITHM);
}
return key;
}
public String decrypt(String value) throws GeneralSecurityException, IOException {
Logger.v(TAG, "Starting decryption");
if (StringExtensions.IsNullOrBlank(value)) {
throw new IllegalArgumentException("Input is empty or null");
}
int encodeVersionLength = value.charAt(0) - 'a';
if (encodeVersionLength <= 0) {
throw new IllegalArgumentException(String.format(
"Encode version length: '%s' is not valid, it must be greater of equal to 0",
encodeVersionLength));
}
if (!value.substring(1, 1 + encodeVersionLength).equals(ENCODE_VERSION)) {
throw new IllegalArgumentException(String.format(
"Encode version received was: '%s', Encode version supported is: '%s'", value,
ENCODE_VERSION));
}
final byte[] bytes = Base64
.decode(value.substring(1 + encodeVersionLength), Base64.DEFAULT);
// get key version used for this data. If user upgraded to different
// API level, data needs to be updated
String keyVersionCheck = new String(bytes, 0, KEY_VERSION_BLOB_LENGTH,
AuthenticationConstants.ENCODING_UTF8);
Logger.v(TAG, "Encrypt version:" + keyVersionCheck);
SecretKey versionKey = getKeyForVersion(keyVersionCheck);
SecretKey versionMacKey = getMacKey(versionKey);
// byte input array: encryptedData-iv-macDigest
int ivIndex = bytes.length - DATA_KEY_LENGTH - MAC_LENGTH;
int macIndex = bytes.length - MAC_LENGTH;
int encryptedLength = ivIndex - KEY_VERSION_BLOB_LENGTH;
if (ivIndex < 0 || macIndex < 0 || encryptedLength < 0) {
throw new IllegalArgumentException(
"Given value is smaller than the IV vector and MAC length");
}
// Calculate digest again and compare to the appended value
// incoming message: version+encryptedData+IV+Digest
// Digest of EncryptedData+IV excluding key Version and digest
Cipher cipher = Cipher.getInstance(CIPHER_ALGORITHM);
Mac mac = Mac.getInstance(MAC_ALGORITHM);
mac.init(versionMacKey);
mac.update(bytes, 0, macIndex);
byte[] macDigest = mac.doFinal();
// Compare digest of input message and calculated digest
assertMac(bytes, macIndex, bytes.length, macDigest);
// Get IV related bytes from the end and set to decrypt mode with
// that IV.
// It is using same cipher for different version since version# change
// will mean upgrade to AndroidKeyStore and new Key.
cipher.init(Cipher.DECRYPT_MODE, versionKey, new IvParameterSpec(bytes, ivIndex,
DATA_KEY_LENGTH));
// Decrypt data bytes from 0 to ivindex
String decrypted = new String(cipher.doFinal(bytes, KEY_VERSION_BLOB_LENGTH,
encryptedLength), AuthenticationConstants.ENCODING_UTF8);
Logger.v(TAG, "Finished decryption");
return decrypted;
}
private char getEncodeVersionLengthPrefix() {
return (char)('a' + ENCODE_VERSION.length());
}
/**
* encrypt text with current key based on API level
*
* @param clearText
* @return
* @throws GeneralSecurityException
* @throws UnsupportedEncodingException
*/
public String encrypt(String clearText)
throws GeneralSecurityException, IOException {
Logger.v(TAG, "Starting encryption");
if (StringExtensions.IsNullOrBlank(clearText)) {
throw new IllegalArgumentException("Input is empty or null");
}
// load key for encryption if not loaded
loadSecretKeyForAPI();
Logger.v(TAG, "Encrypt version:" + mBlobVersion);
final byte[] blobVersion = mBlobVersion.getBytes(AuthenticationConstants.ENCODING_UTF8);
final byte[] bytes = clearText.getBytes(AuthenticationConstants.ENCODING_UTF8);
// IV: Initialization vector that is needed to start CBC
byte[] iv = new byte[DATA_KEY_LENGTH];
mRandom.nextBytes(iv);
IvParameterSpec ivSpec = new IvParameterSpec(iv);
// Set to encrypt mode
Cipher cipher = Cipher.getInstance(CIPHER_ALGORITHM);
Mac mac = Mac.getInstance(MAC_ALGORITHM);
cipher.init(Cipher.ENCRYPT_MODE, mKey, ivSpec);
byte[] encrypted = cipher.doFinal(bytes);
// Mac output to sign encryptedData+IV. Keyversion is not included
// in the digest. It defines what to use for Mac Key.
mac.init(mMacKey);
mac.update(blobVersion);
mac.update(encrypted);
mac.update(iv);
byte[] macDigest = mac.doFinal();
// Init array to store blobVersion, encrypted data, iv, macdigest
byte[] blobVerAndEncryptedDataAndIVAndMacDigest = new byte[blobVersion.length
+ encrypted.length + iv.length + macDigest.length];
System.arraycopy(blobVersion, 0, blobVerAndEncryptedDataAndIVAndMacDigest, 0,
blobVersion.length);
System.arraycopy(encrypted, 0, blobVerAndEncryptedDataAndIVAndMacDigest,
blobVersion.length, encrypted.length);
System.arraycopy(iv, 0, blobVerAndEncryptedDataAndIVAndMacDigest, blobVersion.length
+ encrypted.length, iv.length);
System.arraycopy(macDigest, 0, blobVerAndEncryptedDataAndIVAndMacDigest, blobVersion.length
+ encrypted.length + iv.length, macDigest.length);
String encryptedText = new String(Base64.encode(blobVerAndEncryptedDataAndIVAndMacDigest,
Base64.NO_WRAP), AuthenticationConstants.ENCODING_UTF8);
Logger.v(TAG, "Finished encryption");
return getEncodeVersionLengthPrefix() + ENCODE_VERSION + encryptedText;
}
private void assertMac(byte[] digest, int start, int end, byte[] calculated)
throws DigestException {
if (calculated.length != (end - start)) {
throw new IllegalArgumentException("Unexpected MAC length");
}
byte result = 0;
// It does not fail fast on the first not equal byte to protect against
// timing attack.
for (int i = start; i < end; i++) {
result |= calculated[i - start] ^ digest[i];
}
if (result != 0) {
throw new DigestException();
}
}
/**
* generate secretKey to store after wrapping with KeyStore
*
* @return
* @throws NoSuchAlgorithmException
*/
final private SecretKey generateSecretKey() throws NoSuchAlgorithmException {
KeyGenerator keygen = KeyGenerator.getInstance(KEYSPEC_ALGORITHM);
keygen.init(KEY_SIZE, mRandom);
return keygen.generateKey();
}
/**
* Supported API >= 18 PrivateKey is stored in AndroidKeyStore. Loads key
* from the file if it exists. If not exist, it will generate one.
*
* @return
* @throws IOException
* @throws GeneralSecurityException
*/
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
final synchronized private SecretKey getSecretKeyFromAndroidKeyStore() throws IOException,
GeneralSecurityException {
// Loading file and unwrapping this key is causing performance issue.
if (sSecretKeyFromAndroidKeyStore != null) {
return sSecretKeyFromAndroidKeyStore;
}
// Store secret key in a file after wrapping
File keyFile = new File(mContext.getDir(mContext.getPackageName(), Context.MODE_PRIVATE),
ADALKS);
if (mKeyPair == null) {
mKeyPair = getKeyPairFromAndroidKeyStore();
Logger.v(TAG, "Retrived keypair from androidKeyStore");
}
Cipher wrapCipher = Cipher.getInstance(WRAP_ALGORITHM);
// If keyfile does not exist, it needs to generate one
if (!keyFile.exists()) {
Logger.v(TAG, "Key file does not exists");
final SecretKey key = generateSecretKey();
Logger.v(TAG, "Wrapping SecretKey");
final byte[] keyWrapped = wrap(wrapCipher, key);
Logger.v(TAG, "Writing SecretKey");
writeKeyData(keyFile, keyWrapped);
Logger.v(TAG, "Finished writing SecretKey");
}
// Read from file again
Logger.v(TAG, "Reading SecretKey");
try {
final byte[] encryptedKey = readKeyData(keyFile);
if (encryptedKey == null || encryptedKey.length == 0) {
throw new UnrecoverableKeyException("Couldn't find encrypted key in file");
}
// Check if the retrieved keypair is empty. With the current limitation of
// AndroidKeyStore, there is possibility that the alias is not wiped but
// the key data is wiped, if this is the case, the retrieved keypair will
// be empty, and when we use the private key to do unwrap, we'll encounter
final PrivateKey privateKey = mKeyPair.getPrivate();
if (privateKey == null || privateKey.getEncoded() == null || privateKey.getEncoded().length == 0) {
throw new UnrecoverableKeyException("Retrieved private key is empty.");
}
sSecretKeyFromAndroidKeyStore = unwrap(wrapCipher, encryptedKey);
Logger.v(TAG, "Finished reading SecretKey");
} catch (GeneralSecurityException | IOException ex) {
// Reset KeyPair info so that new request will generate correct KeyPairs.
// All tokens with previous SecretKey are not possible to decrypt.
Logger.e(TAG, "Unwrap failed for AndroidKeyStore", "", ADALError.ANDROIDKEYSTORE_FAILED, ex);
mKeyPair = null;
sSecretKeyFromAndroidKeyStore = null;
deleteKeyFile();
resetKeyPairFromAndroidKeyStore();
Logger.v(TAG, "Removed previous key pair info.");
throw ex;
}
return sSecretKeyFromAndroidKeyStore;
}
private void deleteKeyFile() {
// Store secret key in a file after wrapping
File keyFile = new File(mContext.getDir(mContext.getPackageName(), Context.MODE_PRIVATE),
ADALKS);
if (keyFile.exists()) {
Logger.v(TAG, "Delete KeyFile");
keyFile.delete();
}
}
/**
* Get key pair from AndroidKeyStore.
*
* @return
* @throws GeneralSecurityException
* @throws IOException
*/
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
private synchronized KeyPair getKeyPairFromAndroidKeyStore() throws GeneralSecurityException, IOException {
KeyStore keyStore = KeyStore.getInstance("AndroidKeyStore");
keyStore.load(null);
if (!keyStore.containsAlias(KEY_STORE_CERT_ALIAS)) {
Logger.v(TAG, "Key entry is not available");
Calendar start = Calendar.getInstance();
Calendar end = Calendar.getInstance();
end.add(Calendar.YEAR, 100);
// self signed cert stored in AndroidKeyStore to asym. encrypt key
// to a file
final KeyPairGenerator generator = KeyPairGenerator.getInstance("RSA",
"AndroidKeyStore");
generator.initialize(getKeyPairGeneratorSpec(mContext, start.getTime(), end.getTime()));
generator.generateKeyPair();
Logger.v(TAG, "Key entry is generated");
} else {
Logger.v(TAG, "Key entry is available");
}
// Read key pair again
Logger.v(TAG, "Reading Key entry");
try {
final KeyStore.PrivateKeyEntry entry = (KeyStore.PrivateKeyEntry)keyStore.getEntry(
KEY_STORE_CERT_ALIAS, null);
return new KeyPair(entry.getCertificate().getPublicKey(), entry.getPrivateKey());
} catch (RuntimeException e) {
// There is an issue in android keystore that resets keystore
// Issue 61989: AndroidKeyStore deleted after changing screen lock type
// in this case getEntry throws
// java.lang.RuntimeException: error:0D07207B:asn1 encoding routines:ASN1_get_object:header too long
// handle it as regular KeyStoreException
throw new KeyStoreException(e);
}
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
private AlgorithmParameterSpec getKeyPairGeneratorSpec(Context context, Date start, Date end) {
String certInfo = String.format(Locale.ROOT, "CN=%s, OU=%s", KEY_STORE_CERT_ALIAS,
context.getPackageName());
return new KeyPairGeneratorSpec.Builder(context)
.setAlias(KEY_STORE_CERT_ALIAS)
.setSubject(new X500Principal(certInfo))
.setSerialNumber(BigInteger.ONE)
.setStartDate(start)
.setEndDate(end)
.build();
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
private synchronized void resetKeyPairFromAndroidKeyStore() throws KeyStoreException,
NoSuchAlgorithmException, CertificateException, IOException {
KeyStore keyStore = KeyStore.getInstance("AndroidKeyStore");
keyStore.load(null);
keyStore.deleteEntry(KEY_STORE_CERT_ALIAS);
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
private byte[] wrap(Cipher wrapCipher, SecretKey key) throws GeneralSecurityException {
wrapCipher.init(Cipher.WRAP_MODE, mKeyPair.getPublic());
return wrapCipher.wrap(key);
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
private SecretKey unwrap(Cipher wrapCipher, byte[] keyBlob) throws GeneralSecurityException {
wrapCipher.init(Cipher.UNWRAP_MODE, mKeyPair.getPrivate());
return (SecretKey)wrapCipher.unwrap(keyBlob, KEYSPEC_ALGORITHM, Cipher.SECRET_KEY);
}
private static void writeKeyData(File file, byte[] data) throws IOException {
Logger.v(TAG, "Writing key data to a file");
final OutputStream out = new FileOutputStream(file);
try {
out.write(data);
} finally {
out.close();
}
}
private static byte[] readKeyData(File file) throws IOException {
Logger.v(TAG, "Reading key data from a file");
final InputStream in = new FileInputStream(file);
try {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
byte[] buffer = new byte[1024];
int count;
while ((count = in.read(buffer)) != -1) {
bytes.write(buffer, 0, count);
}
return bytes.toByteArray();
} finally {
in.close();
}
}
} |
package com.jme3.bullet.objects;
import com.jme3.math.Vector3f;
import com.jme3.scene.Spatial;
import com.jme3.bullet.PhysicsSpace;
import com.jme3.bullet.collision.PhysicsCollisionObject;
import com.jme3.bullet.collision.shapes.CollisionShape;
import com.jme3.bullet.collision.shapes.MeshCollisionShape;
import com.jme3.bullet.joints.PhysicsJoint;
import com.jme3.bullet.objects.infos.RigidBodyMotionState;
import com.jme3.export.InputCapsule;
import com.jme3.export.JmeExporter;
import com.jme3.export.JmeImporter;
import com.jme3.export.OutputCapsule;
import com.jme3.math.Matrix3f;
import com.jme3.math.Quaternion;
import com.jme3.scene.Geometry;
import com.jme3.scene.Node;
import com.jme3.scene.debug.Arrow;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* <p>PhysicsRigidBody - Basic physics object</p>
* @author normenhansen
*/
public class PhysicsRigidBody extends PhysicsCollisionObject {
protected RigidBodyMotionState motionState = new RigidBodyMotionState();
protected float mass = 1.0f;
protected boolean kinematic = false;
protected ArrayList<PhysicsJoint> joints = new ArrayList<PhysicsJoint>();
public PhysicsRigidBody() {
}
/**
* Creates a new PhysicsRigidBody with the supplied collision shape
* @param child
* @param shape
*/
public PhysicsRigidBody(CollisionShape shape) {
collisionShape = shape;
rebuildRigidBody();
}
public PhysicsRigidBody(CollisionShape shape, float mass) {
collisionShape = shape;
this.mass = mass;
rebuildRigidBody();
}
/**
* Builds/rebuilds the phyiscs body when parameters have changed
*/
protected void rebuildRigidBody() {
boolean removed = false;
if (collisionShape instanceof MeshCollisionShape && mass != 0) {
throw new IllegalStateException("Dynamic rigidbody can not have mesh collision shape!");
}
if (objectId != 0) {
if (isInWorld(objectId)) {
PhysicsSpace.getPhysicsSpace().remove(this);
removed = true;
}
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Clearing RigidBody {0}", Long.toHexString(objectId));
finalizeNative(objectId);
}
preRebuild();
objectId = createRigidBody(mass, motionState.getObjectId(), collisionShape.getObjectId());
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Created RigidBody {0}", Long.toHexString(objectId));
postRebuild();
if (removed) {
PhysicsSpace.getPhysicsSpace().add(this);
}
}
protected void preRebuild() {
}
private native long createRigidBody(float mass, long motionStateId, long collisionShapeId);
protected void postRebuild() {
if (mass == 0.0f) {
setStatic(objectId, true);
} else {
setStatic(objectId, false);
}
}
/**
* @return the motionState
*/
public RigidBodyMotionState getMotionState() {
return motionState;
}
public boolean isInWorld() {
return isInWorld(objectId);
}
private native boolean isInWorld(long objectId);
/**
* Sets the physics object location
* @param location the location of the actual physics object
*/
public void setPhysicsLocation(Vector3f location) {
setPhysicsLocation(objectId, location);
}
private native void setPhysicsLocation(long objectId, Vector3f location);
/**
* Sets the physics object rotation
* @param rotation the rotation of the actual physics object
*/
public void setPhysicsRotation(Matrix3f rotation) {
setPhysicsRotation(objectId, rotation);
}
private native void setPhysicsRotation(long objectId, Matrix3f rotation);
/**
* Sets the physics object rotation
* @param rotation the rotation of the actual physics object
*/
public void setPhysicsRotation(Quaternion rotation) {
setPhysicsRotation(objectId, rotation);
}
private native void setPhysicsRotation(long objectId, Quaternion rotation);
/**
* @return the physicsLocation
*/
public Vector3f getPhysicsLocation(Vector3f trans) {
if (trans == null) {
trans = new Vector3f();
}
getPhysicsLocation(objectId, trans);
return trans;
}
private native void getPhysicsLocation(long objectId, Vector3f vector);
/**
* @return the physicsLocation
*/
public Quaternion getPhysicsRotation(Quaternion rot) {
if (rot == null) {
rot = new Quaternion();
}
getPhysicsRotation(objectId, rot);
return rot;
}
private native void getPhysicsRotation(long objectId, Quaternion rot);
/**
* @return the physicsLocation
*/
public Matrix3f getPhysicsRotationMatrix(Matrix3f rot) {
if (rot == null) {
rot = new Matrix3f();
}
getPhysicsRotationMatrix(objectId, rot);
return rot;
}
private native void getPhysicsRotationMatrix(long objectId, Matrix3f rot);
/**
* @return the physicsLocation
*/
public Vector3f getPhysicsLocation() {
Vector3f vec = new Vector3f();
getPhysicsLocation(objectId, vec);
return vec;
}
/**
* @return the physicsLocation
*/
public Quaternion getPhysicsRotation() {
Quaternion quat = new Quaternion();
getPhysicsRotation(objectId, quat);
return quat;
}
public Matrix3f getPhysicsRotationMatrix() {
Matrix3f mtx = new Matrix3f();
getPhysicsRotationMatrix(objectId, mtx);
return mtx;
}
// /**
// * Gets the physics object location
// * @param location the location of the actual physics object is stored in this Vector3f
// */
// public Vector3f getInterpolatedPhysicsLocation(Vector3f location) {
// if (location == null) {
// location = new Vector3f();
// rBody.getInterpolationWorldTransform(tempTrans);
// return Converter.convert(tempTrans.origin, location);
// /**
// * Gets the physics object rotation
// * @param rotation the rotation of the actual physics object is stored in this Matrix3f
// */
// public Matrix3f getInterpolatedPhysicsRotation(Matrix3f rotation) {
// if (rotation == null) {
// rotation = new Matrix3f();
// rBody.getInterpolationWorldTransform(tempTrans);
// return Converter.convert(tempTrans.basis, rotation);
/**
* Sets the node to kinematic mode. in this mode the node is not affected by physics
* but affects other physics objects. Iits kinetic force is calculated by the amount
* of movement it is exposed to and its weight.
* @param kinematic
*/
public void setKinematic(boolean kinematic) {
this.kinematic = kinematic;
setKinematic(objectId, kinematic);
}
private native void setKinematic(long objectId, boolean kinematic);
public boolean isKinematic() {
return kinematic;
}
public void setCcdSweptSphereRadius(float radius) {
setCcdSweptSphereRadius(objectId, radius);
}
private native void setCcdSweptSphereRadius(long objectId, float radius);
/**
* Sets the amount of motion that has to happen in one physics tick to trigger the continuous motion detection<br/>
* This avoids the problem of fast objects moving through other objects, set to zero to disable (default)
* @param threshold
*/
public void setCcdMotionThreshold(float threshold) {
setCcdMotionThreshold(objectId, threshold);
}
private native void setCcdMotionThreshold(long objectId, float threshold);
public float getCcdSweptSphereRadius() {
return getCcdSweptSphereRadius(objectId);
}
private native float getCcdSweptSphereRadius(long objectId);
public float getCcdMotionThreshold() {
return getCcdMotionThreshold(objectId);
}
private native float getCcdMotionThreshold(long objectId);
public float getCcdSquareMotionThreshold() {
return getCcdSquareMotionThreshold(objectId);
}
private native float getCcdSquareMotionThreshold(long objectId);
public float getMass() {
return mass;
}
/**
* Sets the mass of this PhysicsRigidBody, objects with mass=0 are static.
* @param mass
*/
public void setMass(float mass) {
this.mass = mass;
if (collisionShape instanceof MeshCollisionShape && mass != 0) {
throw new IllegalStateException("Dynamic rigidbody can not have mesh collision shape!");
}
if (objectId != 0) {
if (collisionShape != null) {
updateMassProps(objectId, collisionShape.getObjectId(), mass);
}
if (mass == 0.0f) {
setStatic(objectId, true);
} else {
setStatic(objectId, false);
}
}
}
private native void setStatic(long objectId, boolean state);
private native long updateMassProps(long objectId, long collisionShapeId, float mass);
public Vector3f getGravity() {
return getGravity(null);
}
public Vector3f getGravity(Vector3f gravity) {
if (gravity == null) {
gravity = new Vector3f();
}
getGravity(objectId, gravity);
return gravity;
}
private native void getGravity(long objectId, Vector3f gravity);
/**
* Set the local gravity of this PhysicsRigidBody<br/>
* Set this after adding the node to the PhysicsSpace,
* the PhysicsSpace assigns its current gravity to the physics node when its added.
* @param gravity the gravity vector to set
*/
public void setGravity(Vector3f gravity) {
setGravity(objectId, gravity);
}
private native void setGravity(long objectId, Vector3f gravity);
public float getFriction() {
return getFriction(objectId);
}
private native float getFriction(long objectId);
/**
* Sets the friction of this physics object
* @param friction the friction of this physics object
*/
public void setFriction(float friction) {
setFriction(objectId, friction);
}
private native void setFriction(long objectId, float friction);
public void setDamping(float linearDamping, float angularDamping) {
setDamping(objectId, linearDamping, angularDamping);
}
private native void setDamping(long objectId, float linearDamping, float angularDamping);
// private native void setRestitution(long objectId, float factor);
// public void setLinearDamping(float linearDamping) {
// constructionInfo.linearDamping = linearDamping;
// rBody.setDamping(linearDamping, constructionInfo.angularDamping);
// private native void setRestitution(long objectId, float factor);
// public void setAngularDamping(float angularDamping) {
//// constructionInfo.angularDamping = angularDamping;
//// rBody.setDamping(constructionInfo.linearDamping, angularDamping);
private native void setAngularDamping(long objectId, float factor);
public float getLinearDamping() {
return getLinearDamping(objectId);
}
private native float getLinearDamping(long objectId);
public float getAngularDamping() {
return getAngularDamping(objectId);
}
private native float getAngularDamping(long objectId);
public float getRestitution() {
return getRestitution(objectId);
}
private native float getRestitution(long objectId);
/**
* The "bouncyness" of the PhysicsRigidBody, best performance if restitution=0
* @param restitution
*/
public void setRestitution(float restitution) {
setRestitution(objectId, mass);
}
private native void setRestitution(long objectId, float factor);
/**
* Get the current angular velocity of this PhysicsRigidBody
* @return the current linear velocity
*/
public Vector3f getAngularVelocity() {
Vector3f vec = new Vector3f();
getAngularVelocity(objectId, vec);
return vec;
}
private native void getAngularVelocity(long objectId, Vector3f vec);
/**
* Get the current angular velocity of this PhysicsRigidBody
* @param vec the vector to store the velocity in
*/
public void getAngularVelocity(Vector3f vec) {
getAngularVelocity(objectId, vec);
}
/**
* Sets the angular velocity of this PhysicsRigidBody
* @param vec the angular velocity of this PhysicsRigidBody
*/
public void setAngularVelocity(Vector3f vec) {
setAngularVelocity(objectId, vec);
activate();
}
private native void setAngularVelocity(long objectId, Vector3f vec);
/**
* Get the current linear velocity of this PhysicsRigidBody
* @return the current linear velocity
*/
public Vector3f getLinearVelocity() {
Vector3f vec = new Vector3f();
getLinearVelocity(objectId, vec);
return vec;
}
private native void getLinearVelocity(long objectId, Vector3f vec);
/**
* Get the current linear velocity of this PhysicsRigidBody
* @param vec the vector to store the velocity in
*/
public void getLinearVelocity(Vector3f vec) {
getLinearVelocity(objectId, vec);
}
/**
* Sets the linear velocity of this PhysicsRigidBody
* @param vec the linear velocity of this PhysicsRigidBody
*/
public void setLinearVelocity(Vector3f vec) {
setLinearVelocity(objectId, vec);
activate();
}
private native void setLinearVelocity(long objectId, Vector3f vec);
/**
* Apply a force to the PhysicsRigidBody, only applies force if the next physics update call
* updates the physics space.<br>
* To apply an impulse, use applyImpulse, use applyContinuousForce to apply continuous force.
* @param force the force
* @param location the location of the force
*/
public void applyForce(Vector3f force, Vector3f location) {
applyForce(objectId, force, location);
activate();
}
private native void applyForce(long objectId, Vector3f force, Vector3f location);
/**
* Apply a force to the PhysicsRigidBody, only applies force if the next physics update call
* updates the physics space.<br>
* To apply an impulse, use applyImpulse.
*
* @param force the force
*/
public void applyCentralForce(Vector3f force) {
applyCentralForce(objectId, force);
activate();
}
private native void applyCentralForce(long objectId, Vector3f force);
/**
* Apply a force to the PhysicsRigidBody, only applies force if the next physics update call
* updates the physics space.<br>
* To apply an impulse, use applyImpulse.
*
* @param torque the torque
*/
public void applyTorque(Vector3f torque) {
applyTorque(objectId, torque);
activate();
}
private native void applyTorque(long objectId, Vector3f vec);
/**
* Apply an impulse to the PhysicsRigidBody in the next physics update.
* @param impulse applied impulse
* @param rel_pos location relative to object
*/
public void applyImpulse(Vector3f impulse, Vector3f rel_pos) {
applyImpulse(objectId, impulse, rel_pos);
activate();
}
private native void applyImpulse(long objectId, Vector3f impulse, Vector3f rel_pos);
/**
* Apply a torque impulse to the PhysicsRigidBody in the next physics update.
* @param vec
*/
public void applyTorqueImpulse(Vector3f vec) {
applyTorqueImpulse(objectId, vec);
activate();
}
private native void applyTorqueImpulse(long objectId, Vector3f vec);
/**
* Clear all forces from the PhysicsRigidBody
*
*/
public void clearForces() {
clearForces(objectId);
}
private native void clearForces(long objectId);
public void setCollisionShape(CollisionShape collisionShape) {
super.setCollisionShape(collisionShape);
if (collisionShape instanceof MeshCollisionShape && mass != 0) {
throw new IllegalStateException("Dynamic rigidbody can not have mesh collision shape!");
}
if (objectId == 0) {
rebuildRigidBody();
} else {
setCollisionShape(objectId, collisionShape.getObjectId());
updateMassProps(objectId, collisionShape.getObjectId(), mass);
}
}
private native void setCollisionShape(long objectId, long collisionShapeId);
/**
* reactivates this PhysicsRigidBody when it has been deactivated because it was not moving
*/
public void activate() {
activate(objectId);
}
private native void activate(long objectId);
public boolean isActive() {
return isActive(objectId);
}
private native boolean isActive(long objectId);
/**
* sets the sleeping thresholds, these define when the object gets deactivated
* to save ressources. Low values keep the object active when it barely moves
* @param linear the linear sleeping threshold
* @param angular the angular sleeping threshold
*/
public void setSleepingThresholds(float linear, float angular) {
setSleepingThresholds(objectId, linear, angular);
}
private native void setSleepingThresholds(long objectId, float linear, float angular);
public void setLinearSleepingThreshold(float linearSleepingThreshold) {
setLinearSleepingThreshold(objectId, linearSleepingThreshold);
}
private native void setLinearSleepingThreshold(long objectId, float linearSleepingThreshold);
public void setAngularSleepingThreshold(float angularSleepingThreshold) {
setAngularSleepingThreshold(objectId, angularSleepingThreshold);
}
private native void setAngularSleepingThreshold(long objectId, float angularSleepingThreshold);
public float getLinearSleepingThreshold() {
return getLinearSleepingThreshold(objectId);
}
private native float getLinearSleepingThreshold(long objectId);
public float getAngularSleepingThreshold() {
return getAngularSleepingThreshold(objectId);
}
private native float getAngularSleepingThreshold(long objectId);
public float getAngularFactor() {
return getAngularFactor(objectId);
}
private native float getAngularFactor(long objectId);
public void setAngularFactor(float factor) {
setAngularFactor(objectId, factor);
}
private native void setAngularFactor(long objectId, float factor);
/**
* do not use manually, joints are added automatically
*/
public void addJoint(PhysicsJoint joint) {
if (!joints.contains(joint)) {
joints.add(joint);
}
updateDebugShape();
}
public void removeJoint(PhysicsJoint joint) {
joints.remove(joint);
}
/**
* Returns a list of connected joints. This list is only filled when
* the PhysicsRigidBody is actually added to the physics space or loaded from disk.
* @return list of active joints connected to this PhysicsRigidBody
*/
public List<PhysicsJoint> getJoints() {
return joints;
}
@Override
protected Spatial getDebugShape() {
//add joints
Spatial shape = super.getDebugShape();
Node node = null;
if (shape instanceof Node) {
node = (Node) shape;
} else {
node = new Node("DebugShapeNode");
node.attachChild(shape);
}
int i = 0;
for (Iterator<PhysicsJoint> it = joints.iterator(); it.hasNext();) {
PhysicsJoint physicsJoint = it.next();
Vector3f pivot = null;
if (physicsJoint.getBodyA() == this) {
pivot = physicsJoint.getPivotA();
} else {
pivot = physicsJoint.getPivotB();
}
Arrow arrow = new Arrow(pivot);
Geometry geom = new Geometry("DebugBone" + i, arrow);
geom.setMaterial(debugMaterialGreen);
node.attachChild(geom);
i++;
}
return node;
}
@Override
public void write(JmeExporter e) throws IOException {
super.write(e);
OutputCapsule capsule = e.getCapsule(this);
capsule.write(getMass(), "mass", 1.0f);
capsule.write(getGravity(), "gravity", Vector3f.ZERO);
capsule.write(getFriction(), "friction", 0.5f);
capsule.write(getRestitution(), "restitution", 0);
capsule.write(getAngularFactor(), "angularFactor", 1);
capsule.write(kinematic, "kinematic", false);
capsule.write(getLinearDamping(), "linearDamping", 0);
capsule.write(getAngularDamping(), "angularDamping", 0);
capsule.write(getLinearSleepingThreshold(), "linearSleepingThreshold", 0.8f);
capsule.write(getAngularSleepingThreshold(), "angularSleepingThreshold", 1.0f);
capsule.write(getCcdMotionThreshold(), "ccdMotionThreshold", 0);
capsule.write(getCcdSweptSphereRadius(), "ccdSweptSphereRadius", 0);
capsule.write(getPhysicsLocation(new Vector3f()), "physicsLocation", new Vector3f());
capsule.write(getPhysicsRotationMatrix(new Matrix3f()), "physicsRotation", new Matrix3f());
capsule.writeSavableArrayList(joints, "joints", null);
}
@Override
public void read(JmeImporter e) throws IOException {
super.read(e);
InputCapsule capsule = e.getCapsule(this);
float mass = capsule.readFloat("mass", 1.0f);
this.mass = mass;
rebuildRigidBody();
setGravity((Vector3f) capsule.readSavable("gravity", Vector3f.ZERO.clone()));
setFriction(capsule.readFloat("friction", 0.5f));
setKinematic(capsule.readBoolean("kinematic", false));
setRestitution(capsule.readFloat("restitution", 0));
setAngularFactor(capsule.readFloat("angularFactor", 1));
setDamping(capsule.readFloat("linearDamping", 0), capsule.readFloat("angularDamping", 0));
setSleepingThresholds(capsule.readFloat("linearSleepingThreshold", 0.8f), capsule.readFloat("angularSleepingThreshold", 1.0f));
setCcdMotionThreshold(capsule.readFloat("ccdMotionThreshold", 0));
setCcdSweptSphereRadius(capsule.readFloat("ccdSweptSphereRadius", 0));
setPhysicsLocation((Vector3f) capsule.readSavable("physicsLocation", new Vector3f()));
setPhysicsRotation((Matrix3f) capsule.readSavable("physicsRotation", new Matrix3f()));
joints = capsule.readSavableArrayList("joints", null);
}
} |
package com.sailthru.client.params;
import com.google.gson.Gson;
import com.sailthru.client.SailthruUtil;
import java.util.HashMap;
import java.util.Map;
import junit.framework.TestCase;
/**
*
* @author ianwhite
*/
public class UserTest extends TestCase {
public Gson gson = SailthruUtil.createGson();
public User user = new User();
public void testSerializationNull() {
Map<String, Object> vars = new HashMap<String, Object>();
vars.put("baz", null);
user.setVars(vars);
String expected = "{\"vars\":{\"baz\":null}}";
String result = gson.toJson(user);
assertEquals(expected, result);
}
public void testSetId() {
User user = new User("foo@bar.com");
String expected = "{\"id\":\"foo@bar.com\"}";
String result = gson.toJson(user);
assertEquals(expected, result);
}
public void testSetKey() {
user.setKey("email");
String expected = "{\"key\":\"email\"}";
String result = gson.toJson(user);
assertEquals(expected, result);
}
public void testSetFields() {
Map<String, Object> fields = new HashMap<String, Object>();
fields.put("keys", 1);
user.setFields(fields);
String expected = "{\"fields\":{\"keys\":1}}";
String result = gson.toJson(user);
assertEquals(expected, result);
}
public void testSetKeys() {
Map<String, String> keys = new HashMap<String, String>();
keys.put("email", "foo@bar.com");
user.setKeys(keys);
String expected = "{\"keys\":{\"email\":\"foo@bar.com\"}}";
String result = gson.toJson(user);
assertEquals(expected, result);
}
public void testSetKeysConflict() {
user.setKeysConflict("error");
String expected = "{\"keysconflict\":\"error\"}";
String result = gson.toJson(user);
assertEquals(expected, result);
}
public void testSetLists() {
Map<String, Integer> lists = new HashMap<String, Integer>();
lists.put("test list", 1);
user.setLists(lists);
String expected = "{\"lists\":{\"test list\":1}}";
String result = gson.toJson(user);
assertEquals(expected, result);
}
public void testSetOptoutEmail() {
user.setOptoutEmail("none");
String expected = "{\"optout_email\":\"none\"}";
String result = gson.toJson(user);
assertEquals(expected, result);
}
public void testSetLogin() {
Map<String, Object> login = new HashMap<String, Object>();
login.put("ip", "123.456.789.0");
user.setLogin(login);
String expected = "{\"login\":{\"ip\":\"123.456.789.0\"}}";
String result = gson.toJson(user);
assertEquals(expected, result);
}
} |
package com.danpeter.postson;
import org.junit.*;
import java.sql.Connection;
import java.sql.DriverManager;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.*;
public class QueryTest {
public static final SystemUser DAN_P = new SystemUser(UUID.randomUUID(), "Dan", "Peterstrm", new Address("Vintervgen", "17777"));
private JdbcDatastore datastore;
private Connection connection;
@Before
public void setUp() throws Exception {
datastore = new JdbcDatastore("localhost", "5432", "test", "test", "test");
connection = DriverManager.getConnection(
"jdbc:postgresql://localhost:5432/test", "test", "test");
}
@After
public void tearDown() throws Exception {
connection.createStatement().execute("DELETE FROM system_user");
connection.close();
}
@Test
public void findByRootField() throws Exception {
datastore.save(DAN_P);
List<SystemUser> systemUsers = datastore.createQuery(SystemUser.class)
.field("firstName")
.equal("Dan")
.asList();
assertThat(systemUsers.size(), is(1));
}
@Test
public void findByNestedField() throws Exception {
datastore.save(DAN_P);
List<SystemUser> systemUsers = datastore.createQuery(SystemUser.class)
.field("address.street")
.equal("Vintervgen")
.asList();
assertThat(systemUsers.size(), is(1));
}
@Test(expected = DatastoreException.class)
public void idIsUnique() throws Exception {
datastore.save(DAN_P);
datastore.save(DAN_P);
}
@Test
public void getSingleByField() throws Exception {
datastore.save(DAN_P);
Optional<SystemUser> systemUserOptional = datastore.createQuery(SystemUser.class)
.field("id")
.equal(DAN_P.id())
.singleResult();
assertThat(systemUserOptional.isPresent(), is(true));
}
@Test
public void getCountByField() throws Exception {
datastore.save(DAN_P);
int count = datastore.createQuery(SystemUser.class)
.field("id")
.equal(DAN_P.id())
.count();
assertThat(count, is(1));
}
@Test
public void getById() throws Exception {
datastore.save(DAN_P);
Optional<SystemUser> user = datastore.get(SystemUser.class, DAN_P.id());
assertThat(user.isPresent(), is(true));
}
@Test
public void getByCompundId() throws Exception {
Trip trip = new Trip(new Trip.TripId("Sweden", "Bahamas", LocalDateTime.now()), "Super vacation!");
datastore.save(trip);
assertThat(datastore.get(Trip.class, trip.id()).isPresent(), is(true));
}
@Test
public void deleteEntityReturnsTrue() throws Exception {
datastore.save(DAN_P);
assertThat(datastore.delete(SystemUser.class, DAN_P.id()), is(true));
assertThat(datastore.get(SystemUser.class, DAN_P.id()).isPresent(), is(false));
}
@Test
public void deleteEntityThatDoesNotExistReturnsFalse() throws Exception {
assertThat(datastore.delete(SystemUser.class, "does_not_exist"), is(false));
}
} |
package com.pollistics.models;
//import nl.jqno.equalsverifier.EqualsVerifier;
import org.junit.Test;
import java.util.HashMap;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertNotEquals;
//import static org.junit.Assert.fail;
public class PollTest {
@Test
public void membersTest() {
HashMap<String, Integer> options = new HashMap<>();
options.put("Blauw", 1);
Poll p = new Poll("Kleuren", options);
assertThat(p.getName().equals("Kleuren"));
p.setName("andere naam");
assertThat(p.getName().equals("andere naam"));
assertThat(p.getOptions().keySet().contains("Blauw"));
assertThat(p.getOptions().entrySet().contains(1));
HashMap<String, Integer> newOptions = new HashMap<>();
newOptions.put("Rood", 5);
p.setOptions(newOptions);
assertThat(p.getOptions().keySet().contains("Rood"));
assertThat(p.getOptions().entrySet().contains(4));
assertThat(p.getOptions().equals(newOptions));
}
@Test
public void userTest() {
HashMap<String, Integer> options = new HashMap<>();
options.put("Blauw", 1);
User user = new User("someone", "somepass");
Poll p = new Poll("Kleuren", options, user);
assertThat(p.getUser().equals(user));
User newUser = new User("other person", "pass");
p.setUser(newUser);
assertThat(p.getUser().equals(newUser));
}
@Test
public void voteTest() {
HashMap<String, Integer> options = new HashMap<>();
options.put("Blauw", 1);
Poll p = new Poll("Kleuren", options);
boolean worked = p.vote("Blauw");
assert worked;
assertThat(p.getOptions().get("Blauw").equals(2));
}
@Test
public void equalsTest() {
// not equal to meme
Object o = new Object();
HashMap<String, Integer> options = new HashMap<>();
options.put("Blauw", 1);
Poll p = new Poll("Kleuren", options);
assertNotEquals(p, o);
}
/*
@Test
public void equalsContract() {
try {
EqualsVerifier.forClass(Poll.class)
.withPrefabValues(User.class, new User("user", "pass"), new User("bla", "bla"))
.usingGetClass()
.verify();
} catch (Exception e) {
fail(e.getMessage());
}
}*/
} |
package me.jezza.oc.client.renderer;
import net.minecraft.block.Block;
import net.minecraft.client.Minecraft;
import net.minecraft.client.renderer.EntityRenderer;
import net.minecraft.client.renderer.RenderBlocks;
import net.minecraft.client.renderer.Tessellator;
import net.minecraft.util.IIcon;
import net.minecraft.util.ResourceLocation;
import net.minecraft.world.IBlockAccess;
import org.lwjgl.opengl.GL11;
public class BlockRenderer {
public static float OFFSET_1 = 0.0625F;
public static float OFFSET_2 = 0.125F;
public static float OFFSET_3 = 0.1875F;
public static float OFFSET_4 = 0.25F;
public static float OFFSET_5 = 0.3125F;
public static float OFFSET_6 = 0.375F;
public static float OFFSET_7 = 0.4375F;
public static float OFFSET_8 = 0.5F;
public static float OFFSET_9 = 0.5625F;
public static float OFFSET_10 = 0.625F;
public static float OFFSET_11 = 0.6875F;
public static float OFFSET_12 = 0.75F;
public static float OFFSET_13 = 0.8125F;
public static float OFFSET_14 = 0.875F;
public static float OFFSET_15 = 0.9375F;
public BlockRenderer() {
}
public static void bindTexture(ResourceLocation texture) {
Minecraft.getMinecraft().renderEngine.bindTexture(texture);
}
public static void drawFaces(RenderBlocks renderblocks, Block block, IIcon icon, boolean st) {
drawFaces(renderblocks, block, icon, icon, icon, icon, icon, icon, st);
}
public static void drawFaces(RenderBlocks renderblocks, Block block, IIcon i1, IIcon i2, IIcon i3, IIcon i4, IIcon i5, IIcon i6, boolean solidTop) {
Tessellator tessellator = Tessellator.instance;
GL11.glTranslatef(-0.5F, -0.5F, -0.5F);
tessellator.startDrawingQuads();
tessellator.setNormal(0.0F, -1.0F, 0.0F);
renderblocks.renderFaceYNeg(block, 0.0D, 0.0D, 0.0D, i1);
tessellator.draw();
if (solidTop) {
GL11.glDisable(3008);
}
tessellator.startDrawingQuads();
tessellator.setNormal(0.0F, 1.0F, 0.0F);
renderblocks.renderFaceYPos(block, 0.0D, 0.0D, 0.0D, i2);
tessellator.draw();
if (solidTop) {
GL11.glEnable(3008);
}
tessellator.startDrawingQuads();
tessellator.setNormal(0.0F, 0.0F, 1.0F);
renderblocks.renderFaceXNeg(block, 0.0D, 0.0D, 0.0D, i3);
tessellator.draw();
tessellator.startDrawingQuads();
tessellator.setNormal(0.0F, 0.0F, -1.0F);
renderblocks.renderFaceXPos(block, 0.0D, 0.0D, 0.0D, i4);
tessellator.draw();
tessellator.startDrawingQuads();
tessellator.setNormal(1.0F, 0.0F, 0.0F);
renderblocks.renderFaceZNeg(block, 0.0D, 0.0D, 0.0D, i5);
tessellator.draw();
tessellator.startDrawingQuads();
tessellator.setNormal(-1.0F, 0.0F, 0.0F);
renderblocks.renderFaceZPos(block, 0.0D, 0.0D, 0.0D, i6);
tessellator.draw();
GL11.glTranslatef(0.5F, 0.5F, 0.5F);
}
public static int setBrightness(IBlockAccess blockAccess, int i, int j, int k, Block block) {
return setBrightness(blockAccess, i, j, k, block, 1.0F);
}
public static int setBrightness(IBlockAccess blockAccess, int i, int j, int k, Block block, float brightness) {
Tessellator tessellator = Tessellator.instance;
int mb = block.getMixedBrightnessForBlock(blockAccess, i, j, k);
mb = 14680112;
tessellator.setBrightness(mb);
int l = block.colorMultiplier(blockAccess, i, j, k);
float f1 = (float) (l >> 16 & 255) / 255.0F;
float f2 = (float) (l >> 8 & 255) / 255.0F;
float f3 = (float) (l & 255) / 255.0F;
if (EntityRenderer.anaglyphEnable) {
float f6 = (f1 * 30.0F + f2 * 59.0F + f3 * 11.0F) / 100.0F;
float f4 = (f1 * 30.0F + f2 * 70.0F) / 100.0F;
float f7 = (f1 * 30.0F + f3 * 70.0F) / 100.0F;
f1 = f6;
f2 = f4;
f3 = f7;
}
tessellator.setColorOpaque_F(brightness * f1, brightness * f2, brightness * f3);
return mb;
}
protected static void renderAllSides(IBlockAccess world, int x, int y, int z, Block block, RenderBlocks renderer, IIcon tex) {
renderAllSides(world, x, y, z, block, renderer, tex, true);
}
protected static void renderAllSides(IBlockAccess world, int x, int y, int z, Block block, RenderBlocks renderer, IIcon tex, boolean allSides) {
if (allSides || block.shouldSideBeRendered(world, x + 1, y, z, 6)) {
renderer.renderFaceXPos(block, (double) x, (double) y, (double) z, tex);
}
if (allSides || block.shouldSideBeRendered(world, x - 1, y, z, 6)) {
renderer.renderFaceXNeg(block, (double) x, (double) y, (double) z, tex);
}
if (allSides || block.shouldSideBeRendered(world, x, y, z + 1, 6)) {
renderer.renderFaceZPos(block, (double) x, (double) y, (double) z, tex);
}
if (allSides || block.shouldSideBeRendered(world, x, y, z - 1, 6)) {
renderer.renderFaceZNeg(block, (double) x, (double) y, (double) z, tex);
}
if (allSides || block.shouldSideBeRendered(world, x, y + 1, z, 6)) {
renderer.renderFaceYPos(block, (double) x, (double) y, (double) z, tex);
}
if (allSides || block.shouldSideBeRendered(world, x, y - 1, z, 6)) {
renderer.renderFaceYNeg(block, (double) x, (double) y, (double) z, tex);
}
}
protected static void renderAllSides(IBlockAccess world, int x, int y, int z, Block block, RenderBlocks renderer, boolean allSides) {
if (allSides || block.shouldSideBeRendered(world, x + 1, y, z, 6)) {
renderer.renderFaceXPos(block, (double) x, (double) y, (double) z, block.getIcon(world, x, y, z, 5));
}
if (allSides || block.shouldSideBeRendered(world, x - 1, y, z, 6)) {
renderer.renderFaceXNeg(block, (double) x, (double) y, (double) z, block.getIcon(world, x, y, z, 4));
}
if (allSides || block.shouldSideBeRendered(world, x, y, z + 1, 6)) {
renderer.renderFaceZPos(block, (double) x, (double) y, (double) z, block.getIcon(world, x, y, z, 3));
}
if (allSides || block.shouldSideBeRendered(world, x, y, z - 1, 6)) {
renderer.renderFaceZNeg(block, (double) x, (double) y, (double) z, block.getIcon(world, x, y, z, 2));
}
if (allSides || block.shouldSideBeRendered(world, x, y + 1, z, 6)) {
renderer.renderFaceYPos(block, (double) x, (double) y, (double) z, block.getIcon(world, x, y, z, 1));
}
if (allSides || block.shouldSideBeRendered(world, x, y - 1, z, 6)) {
renderer.renderFaceYNeg(block, (double) x, (double) y, (double) z, block.getIcon(world, x, y, z, 0));
}
}
protected static void renderAllSides(int x, int y, int z, Block block, RenderBlocks renderer, IIcon tex) {
renderer.renderFaceXPos(block, (double) (x - 1), (double) y, (double) z, tex);
renderer.renderFaceXNeg(block, (double) (x + 1), (double) y, (double) z, tex);
renderer.renderFaceZPos(block, (double) x, (double) y, (double) (z - 1), tex);
renderer.renderFaceZNeg(block, (double) x, (double) y, (double) (z + 1), tex);
renderer.renderFaceYPos(block, (double) x, (double) (y - 1), (double) z, tex);
renderer.renderFaceYNeg(block, (double) x, (double) (y + 1), (double) z, tex);
}
} |
package com.test.generics;
import java.math.*;
import java.util.Optional;
import org.junit.jupiter.api.*;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.MatcherAssert.*;
@DisplayName("Test cases to demonstrate how generics work")
public class GenericsTest {
/**
* This test illustrates that with ordinary classes, we may simply assign an
* object of type <code>Integer</code> to a variable of type <code>RealNumber</code>.
* This is an illustration of polymorphism: because any Integer "is-a" ComplexNumber, this assignment works
*/
@DisplayName("Verify that is-a relationship works as expected for number types")
@Test
public void testIsARelationshipWithSimpleObjects() {
ComplexNumber complexNumber = createComplexNumber();
ComplexNumber realNumber = createRealNumber();
ComplexNumber rationalNumber = createRationalNumber();
ComplexNumber integer = createInteger();
assertThat(complexNumber, instanceOf(ComplexNumber.class));
assertThat(realNumber, instanceOf(ComplexNumber.class));
assertThat(rationalNumber, instanceOf(ComplexNumber.class));
assertThat(integer, instanceOf(ComplexNumber.class));
assertThat(realNumber, instanceOf(RealNumber.class));
assertThat(rationalNumber, instanceOf(RealNumber.class));
assertThat(integer, instanceOf(RealNumber.class));
assertThat(rationalNumber, instanceOf(RationalNumber.class));
assertThat(integer, instanceOf(RationalNumber.class));
assertThat(integer, instanceOf(Integer.class));
}
@DisplayName("Show that is-a relationship is non-trivial when using generics")
@Test public void testIsARelationshipWithOptionalObjects() {
Optional<? extends ComplexNumber> complexNumber = createOptionalComplexNumber();
Optional<? extends ComplexNumber> realNumber = createOptionalRealNumber();
Optional<? extends ComplexNumber> rationalNumber = createOptionalRationalNumber();
Optional<? extends ComplexNumber> integer = createOptionalInteger();
assertThat(realNumber, instanceOf(complexNumber.getClass()));
assertThat(rationalNumber, instanceOf(complexNumber.getClass()));
assertThat(integer, instanceOf(complexNumber.getClass()));
}
private ComplexNumber createComplexNumber() {
return new ComplexNumber(BigDecimal.TEN, BigDecimal.ONE);
}
private RealNumber createRealNumber() {
return new RealNumber(new BigDecimal("42.123456789"));
}
private RationalNumber createRationalNumber() {
return new RationalNumber(new BigInteger("420"), BigInteger.TEN);
}
private Integer createInteger() {
return new Integer(new BigInteger("4200"));
}
private Optional<ComplexNumber> createOptionalComplexNumber() {
return Optional.of(createComplexNumber());
}
private Optional<RealNumber> createOptionalRealNumber() {
return Optional.of(createRealNumber());
}
private Optional<RationalNumber> createOptionalRationalNumber() {
return Optional.of(createRationalNumber());
}
private Optional<Integer> createOptionalInteger() {
return Optional.of(createInteger());
}
} |
package net.anyflow.menton;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.junit.Assert.assertThat;
import io.netty.handler.codec.http.QueryStringDecoder;
import java.util.List;
import java.util.Map;
import net.anyflow.menton.http.HttpClient;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* @author anyflow
*/
public class UtilityTest {
@SuppressWarnings("unused")
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(UtilityTest.class);
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
}
/**
* @throws java.lang.Exception
*/
@After
public void tearDown() throws Exception {
}
@Test
public void testQueryStringDecoderParameter() throws Exception {
String queryString = "/asdfasd/qwerew";
Map<String, List<String>> parameters = (new QueryStringDecoder(queryString)).parameters();
assertThat(parameters, is(not(nullValue())));
}
@Test
public void testURIport() throws Exception {
String uriString = "http://10.0.0.1/getporttest";
HttpClient client = new HttpClient(uriString);
assertThat(client.httpRequest().uri().getPort(), is(80));
}
} |
package org.apache.ibatis;
import org.apache.ibatis.datasource.pooled.PooledDataSource;
import org.apache.ibatis.datasource.unpooled.UnpooledDataSource;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.jdbc.ScriptRunner;
import org.junit.Test;
import javax.sql.DataSource;
import java.io.IOException;
import java.io.Reader;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
public abstract class BaseDataTest {
public static final String BLOG_PROPERTIES = "databases/blog/blog-derby.properties";
public static final String BLOG_DDL = "databases/blog/blog-derby-schema.sql";
public static final String BLOG_DATA = "databases/blog/blog-derby-dataload.sql";
public static final String JPETSTORE_PROPERTIES = "databases/jpetstore/jpetstore-hsqldb.properties";
public static final String JPETSTORE_DDL = "databases/jpetstore/jpetstore-hsqldb-schema.sql";
public static final String JPETSTORE_DATA = "databases/jpetstore/jpetstore-hsqldb-dataload.sql";
public static UnpooledDataSource createUnpooledDataSource(String resource) throws IOException {
Properties props = Resources.getResourceAsProperties(resource);
UnpooledDataSource ds = new UnpooledDataSource();
ds.setDriver(props.getProperty("driver"));
ds.setUrl(props.getProperty("url"));
ds.setUsername(props.getProperty("username"));
ds.setPassword(props.getProperty("password"));
return ds;
}
public static PooledDataSource createPooledDataSource(String resource) throws IOException {
Properties props = Resources.getResourceAsProperties(resource);
PooledDataSource ds = new PooledDataSource();
ds.setDriver(props.getProperty("driver"));
ds.setUrl(props.getProperty("url"));
ds.setUsername(props.getProperty("username"));
ds.setPassword(props.getProperty("password"));
return ds;
}
public static void runScript(DataSource ds, String resource) throws IOException, SQLException {
Connection connection = ds.getConnection();
try {
ScriptRunner runner = new ScriptRunner(connection);
runner.setAutoCommit(true);
runner.setStopOnError(false);
runner.setLogWriter(null);
runner.setErrorLogWriter(null);
runScript(runner, resource);
} finally {
connection.close();
}
}
public static void runScript(ScriptRunner runner, String resource) throws IOException, SQLException {
Reader reader = Resources.getResourceAsReader(resource);
try {
runner.runScript(reader);
} finally {
reader.close();
}
}
public static DataSource createBlogDataSource() throws IOException, SQLException {
DataSource ds = createUnpooledDataSource(BLOG_PROPERTIES);
runScript(ds, BLOG_DDL);
runScript(ds, BLOG_DATA);
return ds;
}
public static DataSource createJPetstoreDataSource() throws IOException, SQLException {
DataSource ds = createUnpooledDataSource(JPETSTORE_PROPERTIES);
runScript(ds, JPETSTORE_DDL);
runScript(ds, JPETSTORE_DATA);
return ds;
}
} |
package web;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Before;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.firefox.MarionetteDriver;
import org.openqa.selenium.phantomjs.PhantomJSDriver;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.remote.RemoteWebDriver;
public class AbstractWebIntegrationTest {
protected WebDriver webDriver = null;
protected String firstHandle = null;
protected String baseUrl = "http://localhost:58080";
@Before
public void init() {
webDriver = createWebDriver();
}
@After
public void cleanUp() {
webDriver.quit();
}
private static WebDriver createWebDriver() {
final Map<String, CreateWebDriverStrategy> map = new LinkedHashMap<>();
map.put("firefox", new CreateFirefoxDriverStrategy());
map.put("chrome", new CreateChromeDriverStrategy());
map.put("phantomjs", new CreatePhantomJSDriverStrategy());
final String key = readKey();
final CreateWebDriverStrategy createWebDriverStrategy = map.get(key);
if (createWebDriverStrategy != null) {
return createWebDriverStrategy.create();
} else {
throw new RuntimeException("no browser setting detected in browser.txt");
}
}
private static String readKey() {
try {
final File file = new File("browser.txt");
final byte[] content = FileUtils.readFileToByteArray(file);
return new String(content);
} catch (final IOException e) {
return "phantomjs";
}
}
public static interface CreateWebDriverStrategy {
WebDriver create();
}
public static class CreatePhantomJSDriverStrategy implements CreateWebDriverStrategy {
@Override
public WebDriver create() {
final WebDriver webDriver = new PhantomJSDriver();
return webDriver;
}
}
public static class CreateFirefoxDriverStrategy implements CreateWebDriverStrategy {
@Override
public WebDriver create() {
return createRemote();
}
protected WebDriver createMarionette() {
final DesiredCapabilities capabilities = DesiredCapabilities.firefox();
capabilities.setCapability("marionette", "true");
final WebDriver webDriver = new MarionetteDriver(null, capabilities, 25_600);
return webDriver;
}
protected WebDriver createRemote() {
try {
final URL url = new URL("http://localhost:25600");
final DesiredCapabilities capabilities = DesiredCapabilities.firefox();
capabilities.setCapability("marionette", true);
final WebDriver webDriver = new RemoteWebDriver(url, capabilities);
return webDriver;
} catch (final MalformedURLException e) {
throw new RuntimeException(e);
}
}
}
public static class CreateChromeDriverStrategy implements CreateWebDriverStrategy {
@Override
public WebDriver create() {
final ChromeDriver webDriver = new ChromeDriver();
return webDriver;
}
}
} |
package web;
import java.io.File;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Before;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebDriver.Options;
import org.openqa.selenium.WebDriver.Window;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.firefox.MarionetteDriver;
import org.openqa.selenium.phantomjs.PhantomJSDriver;
import org.openqa.selenium.remote.DesiredCapabilities;
public class AbstractWebIntegrationTest {
protected WebDriver webDriver;
protected String baseUrl = "http://localhost:58080";
@Before
public void init() {
webDriver = createWebDriver();
final Options options = webDriver.manage();
final Window window = options.window();
window.maximize();
}
@After
public void cleanUp() {
webDriver.quit();
}
private WebDriver createWebDriver() {
final Map<String, CreateWebDriverStrategy> map = new LinkedHashMap<>();
map.put("firefox", new CreateFirefoxDriverStrategy());
map.put("chrome", new CreateChromeDriverStrategy());
map.put("phantomjs", new CreatePhantomJSDriverStrategy());
final String key = readKey();
final CreateWebDriverStrategy createWebDriverStrategy = map.get(key);
if (createWebDriverStrategy != null) {
createWebDriverStrategy.sleepIfNeeded();
return createWebDriverStrategy.create();
} else {
throw new RuntimeException("no browser setting detected in browser.txt");
}
}
private String readKey() {
try {
final File file = new File("browser.txt");
final byte[] content = FileUtils.readFileToByteArray(file);
return new String(content);
} catch (final IOException e) {
return "phantomjs";
}
}
public static interface CreateWebDriverStrategy {
WebDriver create();
void sleepIfNeeded();
}
public static class CreatePhantomJSDriverStrategy implements CreateWebDriverStrategy {
@Override
public WebDriver create() {
final WebDriver webDriver = new PhantomJSDriver();
return webDriver;
}
@Override
public void sleepIfNeeded() {
}
}
public static class CreateFirefoxDriverStrategy implements CreateWebDriverStrategy {
@Override
public WebDriver create() {
return createRemote();
}
protected WebDriver createRemote() {
final DesiredCapabilities capabilities = DesiredCapabilities.firefox();
capabilities.setCapability("marionette", "true");
final WebDriver webDriver = new MarionetteDriver();
return webDriver;
}
protected WebDriver createFirefox() {
try {
final DesiredCapabilities capabilities = DesiredCapabilities.firefox();
capabilities.setCapability("marionette", "true");
final WebDriver webDriver = new FirefoxDriver(capabilities);
return webDriver;
} catch (final RuntimeException e) {
throw e;
} catch (final Exception e) {
throw new RuntimeException(e);
}
}
@Override
public void sleepIfNeeded() {
try {
TimeUnit.SECONDS.sleep(10);
} catch (final InterruptedException e) {
throw new RuntimeException(e);
}
}
}
public static class CreateChromeDriverStrategy implements CreateWebDriverStrategy {
@Override
public WebDriver create() {
final ChromeDriver webDriver = new ChromeDriver();
return webDriver;
}
@Override
public void sleepIfNeeded() {
}
}
} |
package random;
import org.junit.Test;
import java.util.Arrays;
import static org.junit.Assert.assertEquals;
public abstract class AbstractSelectionTest {
private static final int LARGE_EXPERIMENTS = 10_000_000;
private static final int SMALL_EXPERIMENTS = 100;
@Test
public void testNonZeroWeights() {
test(new double[]{10.0, 20.0, 1.0});
}
@Test
public void testNonZeroSmallWeights() {
test(new double[]{0.1, 0.3, 0.6});
}
protected abstract int randomIndex(double[] weights);
private void test(final double[] weights) {
final int[] counts = new int[weights.length];
for (int i = 0; i < LARGE_EXPERIMENTS; i++) {
counts[randomIndex(weights)]++;
}
System.out.println("Counts: " + Arrays.toString(counts));
final int totalCount = Arrays.stream(counts).sum();
assertEquals(LARGE_EXPERIMENTS, totalCount);
for (int i = 0; i < weights.length; i++) {
for (int j = 0; j < weights.length; j++) {
final double ratioC = (double) counts[i] / (double) counts[j];
final double ratioW = weights[i] / weights[j];
// ratios should be the same (do not try this at home)
assertEquals(ratioW, ratioC, 0.2);
}
}
}
@Test
public void testEmptyElements() {
for (int i = 0; i < SMALL_EXPERIMENTS; i++) {
assertEquals(-1, randomIndex(new double[0]));
}
}
@Test
public void testSingleElementWithNonZeroWeight() {
for (int i = 0; i < SMALL_EXPERIMENTS; i++) {
assertEquals(0, randomIndex(new double[]{42.0}));
}
}
@Test
public void testSingleElementWithZeroWeight() {
for (int i = 0; i < SMALL_EXPERIMENTS; i++) {
assertEquals(0, randomIndex(new double[]{0.0}));
}
}
} |
package org.exist.storage;
import org.exist.EXistException;
import org.exist.storage.journal.Journal;
import org.exist.util.Configuration;
import org.exist.util.DatabaseConfigurationException;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import java.io.IOException;
import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.*;
import static junit.framework.TestCase.assertTrue;
import static org.junit.Assert.*;
/**
* @author <a href="mailto:patrick@reini.net">Patrick Reinhart</a>
*/
public class BrokerPoolsTest {
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder();
@Test
public void shutdownConcurrent() throws InterruptedException, ExecutionException, EXistException, DatabaseConfigurationException, IOException {
final int testThreads = 5;
final CountDownLatch shutdownLatch = new CountDownLatch(1);
final CountDownLatch acquiredLatch = new CountDownLatch(testThreads);
final List<Future<Exception>> shutdownTasks = new ArrayList<>();
final ExecutorService executorService = Executors.newFixedThreadPool(testThreads);
for (int i = 0; i < testThreads; i ++) {
final Path dataDir = temporaryFolder.newFolder("exist" + i).toPath().normalize().toAbsolutePath();
// load config from classpath and override data and journal dir
final Configuration configuration = new Configuration("conf.xml");
configuration.setProperty(BrokerPool.PROPERTY_DATA_DIR, dataDir);
configuration.setProperty(Journal.PROPERTY_RECOVERY_JOURNAL_DIR, dataDir);
BrokerPool.configure("instance" + i, 0, 1, configuration);
shutdownTasks.add(executorService.submit(new BrokerPoolShutdownTask(acquiredLatch, shutdownLatch)));
}
// wait for all shutdown threads to be acquired
acquiredLatch.await();
shutdownLatch.countDown();
executorService.shutdown();
assertTrue(executorService.awaitTermination(4, TimeUnit.SECONDS));
for (final Future<Exception> shutdownTask: shutdownTasks) {
assertNull(shutdownTask.get());
}
}
public static class BrokerPoolShutdownTask implements Callable<Exception> {
private final CountDownLatch acquiredLatch;
private final CountDownLatch shutdownLatch;
public BrokerPoolShutdownTask(final CountDownLatch acquiredLatch, final CountDownLatch shutdownLatch) {
this.acquiredLatch = acquiredLatch;
this.shutdownLatch = shutdownLatch;
}
@Override
public Exception call() throws Exception {
try {
acquiredLatch.countDown();
// wait for signal to release the broker
shutdownLatch.await();
// shutdown
BrokerPools.stopAll(true);
return null;
} catch (final Exception e) {
return e;
}
}
}
} |
package edu.umd.cs.findbugs;
public interface BugAnnotationVisitor {
public void visitClassAnnotation(ClassAnnotation classAnnotation);
public void visitFieldAnnotation(FieldAnnotation fieldAnnotation);
public void visitMethodAnnotation(MethodAnnotation methodAnnotation);
public void visitIntAnnotation(IntAnnotation intAnnotation);
public void visitSourceLineAnnotation(SourceLineAnnotation sourceLineAnnotation);
}
// vim:ts=4 |
package edu.umd.cs.findbugs.ba.constant;
import javax.annotation.Nullable;
/**
* Abstract dataflow value representing a value which may or may not be a
* constant.
*
* @see edu.umd.cs.findbugs.ba.constant.ConstantAnalysis
* @author David Hovemeyer
*/
public class Constant {
private final Object value;
/**
* Single instance representing all non-constant values.
*/
public static final Constant NOT_CONSTANT = new Constant(null);
/**
* Constructor for a constant value.
*
* @param value
* the constant value; must be a String, Integer, etc.
*/
public Constant(@Nullable Object value) {
this.value = value;
}
public Object getConstantValue() {
return value;
}
/**
* Return whether or not this value is a constant.
*
* @return true if the value is a constant, false if not
*/
public boolean isConstant() {
return value != null;
}
/**
* Return whether or not this value is a constant String.
*
* @return true if the value is a constant String, false if not
*/
public boolean isConstantString() {
return isConstant() && (value instanceof String);
}
/**
* Get the constant String value of this value.
*
* @return the constant String value
*/
public String getConstantString() {
return (String) value;
}
/**
* Return whether or not this value is a constant int/Integer.
*
* @return true if the value is a constant int/Integer, false if not
*/
public boolean isConstantInteger() {
return isConstant() && (value instanceof Integer);
}
/**
* Get the constant int value of this value.
*
* @return the constant int value
*/
public int getConstantInt() {
return ((Integer) value).intValue();
}
/**
* Merge two Constants.
*
* @param a
* a StaticConstant
* @param b
* another StaticConstant
* @return the merge (dataflow meet) of the two Constants
*/
public static Constant merge(Constant a, Constant b) {
if (!a.isConstant() || !b.isConstant())
return NOT_CONSTANT;
if (a.value.getClass() != b.value.getClass() || !a.value.equals(b.value))
return NOT_CONSTANT;
return a;
}
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != this.getClass())
return false;
Constant other = (Constant) obj;
if (other.value == this.value)
return true;
else if (other.value == null || this.value == null)
return false;
else
return this.value.equals(other.value);
}
@Override
public int hashCode() {
return (value == null) ? 123 : value.hashCode();
}
@Override
public String toString() {
if (!isConstant()) {
return "-";
} else {
return "<" + value.toString() + ">";
}
}
} |
package edu.umd.cs.findbugs.detect;
import edu.umd.cs.findbugs.*;
import org.apache.bcel.classfile.*;
import java.util.zip.*;
import java.io.*;
import edu.umd.cs.findbugs.visitclass.DismantleBytecode;
import edu.umd.cs.findbugs.visitclass.Constants2;
public class TestingGround extends BytecodeScanningDetector implements Constants2 {
private BugReporter bugReporter;
private final boolean active = false;
private int state = 0;
public TestingGround(BugReporter bugReporter) {
this.bugReporter = bugReporter;
}
public void visit(JavaClass obj) {
}
public void visit(Method obj) {
}
public void visit(Code obj) {
// unless active, don't bother dismantling bytecode
if (active) {
System.out.println("TestingGround: " + getFullyQualifiedMethodName());
super.visit(obj);
}
}
public void sawOpcode(int seen) {
printOpCode(seen);
}
private void printOpCode(int seen) {
System.out.print(" TestingGround: " + OPCODE_NAMES[seen]);
if ((seen == INVOKEVIRTUAL) || (seen == INVOKESPECIAL) || (seen == INVOKEINTERFACE))
System.out.print(" " + getClassConstantOperand() + "." + getNameConstantOperand() + " " + getSigConstantOperand());
else if (seen == LDC || seen == LDC_W || seen == LDC2_W) {
Constant c = getConstantRefOperand();
if (c instanceof ConstantString)
System.out.print(" \"" + getStringConstantOperand() + "\"");
else if (c instanceof ConstantClass)
System.out.print(" " + getClassConstantOperand());
else
System.out.print(" " + c);
}
else if ((seen == ALOAD) || (seen == ASTORE))
System.out.print(" " + getRegisterOperand());
System.out.println();
}
} |
package com.foc.vaadin.gui.components;
import org.xml.sax.Attributes;
import com.foc.Globals;
import com.foc.property.FDouble;
import com.foc.property.FProperty;
import com.foc.property.FTime;
@SuppressWarnings({ "serial"})
public class FVTimeField extends FVTextField {
public FVTimeField(FProperty property, Attributes attributes) {
super(property, attributes);
}
@Override
public boolean copyGuiToMemory() {
String value = getValue();
if(getFocData() instanceof FTime){
if(!value.contains(":")){
if(value.length() == 4){
value = value.substring(0,2) + ":" + value.substring(2,4);
}else if(value.length() == 3){
value = "0" + value.substring(0,1) + ":" + value.substring(2,4);
}
}
}
((FProperty)getFocData()).setString(value);
return false;
}
} |
package io.tetrapod.core.tasks;
import io.tetrapod.core.ServiceException;
import io.tetrapod.core.StructureFactory;
import io.tetrapod.core.rpc.ErrorResponseException;
import io.tetrapod.core.rpc.RequestContext;
import io.tetrapod.core.rpc.Response;
import io.tetrapod.core.utils.Util;
import io.tetrapod.protocol.core.CoreContract;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* @author paulm
* Created: 6/20/16
*/
/**
* Task are CompletableFutures were with a few changes.
* <p>
* <ul>
* <li>complete and completeExceptionally are not publicly accessible.</li>
* <li>few utility methods (thenRun, thenReturn)</li>
* <li>TODO: all callbacks are async by default using the current executor</li>
* </ul>
* </p>
*
* @param <T> the type of the returned object.
* @see java.util.concurrent.CompletableFuture
*/
public class Task<T> extends CompletableFuture<T> {
private static final Logger logger = LoggerFactory.getLogger(Task.class);
private static Void NIL = null;
private static Executor commonPool = newScalingThreadPool(100);
public static ExecutorService newScalingThreadPool(final int maxThreads) {
return new ForkJoinPool(maxThreads, ForkJoinPool.defaultForkJoinWorkerThreadFactory,
(t, e) -> logger.error("Uncaught Exception", e), false);
}
private static ScheduledExecutorService schedulerExecutor = new ScheduledThreadPoolExecutor(10, runnable -> {
Thread thread = Executors.defaultThreadFactory().newThread(runnable);
thread.setDaemon(true);
return thread;
});
// TODO: make all callbacks async by default and using the current executor
// what "current executor' means will have to be defined.
// the idea is to use a framework supplied executor to serve
// single point to capture all activity derived from the execution
// of one application request.
// Including logs, stats, exception and timing information.
// TODO: consider creating a public class CTask = "Completable Task"
final Executor defaultExecutor;
public Task() {
TaskContext context = TaskContext.current();
if (context != null) {
this.defaultExecutor = context.getDefaultExecutor();
} else {
this.defaultExecutor = null;
}
}
/**
* Creates an already completed task from the given value.
*
* @param value the value to be wrapped with a task
*/
public static <T> Task<T> from(T value) {
final Task<T> t = new Task<>();
t.internalComplete(value);
return t;
}
/**
* Creates a completed null task.
* @param <T>
* @return
*/
public static <T> Task<T> fromNull() {
final Task<T> t = new Task<>();
t.internalComplete(null);
return t;
}
public static <T> Task<T> from(Throwable ex) {
final Task<T> t = new Task<>();
t.internalCompleteExceptionally(ex);
return t;
}
protected boolean internalComplete(T value) {
return super.complete(value);
}
protected boolean internalCompleteExceptionally(Throwable ex) {
return super.completeExceptionally(ex);
}
/**
* Wraps a CompletionStage as a Task or just casts it if it is already a Task.
*
* @param stage the stage to be wrapped or casted to Task
* @return stage cast as Task of a new Task that is dependent on the completion of that stage.
*/
public static <T> Task<T> from(CompletionStage<T> stage) {
if (stage instanceof Task) {
return (Task<T>) stage;
}
final Task<T> t = new Task<>();
stage.handle((T v, Throwable ex) -> {
if (ex != null) {
t.internalCompleteExceptionally(ex);
} else {
t.internalComplete(v);
}
return null;
});
return t;
}
/**
* Wraps a Future as a Task or just casts it if it is already a Task.
* <p>If future implements CompletionStage CompletionStage.handle will be used.</p>
* <p>
* If future is a plain old future, a runnable will executed using a default task pool.
* This runnable will wait on the future using Future.get(timeout,timeUnit), with a timeout of 5ms.
* Every time this task times out it will be rescheduled.
* Starvation of the pool is prevented by the rescheduling behaviour.
* </p>
*
* @param future the future to be wrapped or casted to Task
* @return future cast as Task of a new Task that is dependent on the completion of that future.
*/
@SuppressWarnings("unchecked")
public static <T> Task<T> fromFuture(Future<T> future) {
if (future instanceof Task) {
return (Task<T>) future;
}
if (future instanceof CompletionStage) {
return from((CompletionStage<T>) future);
}
final Task<T> t = new Task<>();
if (future.isDone()) {
try {
t.internalComplete(future.get());
} catch (Throwable ex) {
t.internalCompleteExceptionally(ex);
}
return t;
}
// potentially very expensive
commonPool.execute(new TaskFutureAdapter<>(t, future, commonPool, 5, TimeUnit.MILLISECONDS));
return t;
}
/**
* Returns a new task that will fail if the original is not completed withing the given timeout.
* This doesn't modify the original task in any way.
* <p/>
* Example:
* <pre><code>Task<String> result = aTask.failAfter(60, TimeUnit.SECONDS);</code></pre>
*
* @param timeout the time from now
* @param timeUnit the time unit of the timeout parameter
* @return a new task
*/
public Task<T> failAfter(final long timeout, final TimeUnit timeUnit) {
final Task<T> t = new Task<>();
// TODO: find a way to inject time for testing
// also consider letting the application override this with the TaskContext
final ScheduledFuture<?> rr = schedulerExecutor.schedule(
() -> {
// using t.isDone insteadof this.isDone
// because the propagation of this.isDone can be delayed by slow listeners.
if (!t.isDone()) {
t.internalCompleteExceptionally(new TimeoutException());
}
}, timeout, timeUnit);
this.handle((T v, Throwable ex) -> {
// removing the scheduled timeout to clear some memory
rr.cancel(false);
if (!t.isDone()) {
if (ex != null) {
t.internalCompleteExceptionally(ex);
} else {
t.internalComplete(v);
}
}
return null;
});
return t;
}
/**
* Returns a new task that will fail if the original is not completed withing the given timeout.
*
* @param time the time from now
* @param timeUnit the time unit of the timeout parameter
* @return a new task
*/
public static Task<Void> sleep(final long time, final TimeUnit timeUnit) {
final Task<Void> t = new Task<>();
// TODO: find a way to inject time for testing
// also consider letting the application override this with the TaskContext
@SuppressWarnings("unused")
final ScheduledFuture<?> rr = schedulerExecutor.schedule(
() -> {
if (!t.isDone()) {
t.internalComplete(null);
}
}, time, timeUnit);
return t;
}
public Response toResponse(RequestContext ctx) {
thenApply(res -> {
if (res instanceof Response) {
ctx.respondWith((Response)res);
} else {
throw new ServiceException("Tried to convert a Task to a response that isn't a Response. " + res.getClass());
}
return null;
}).exceptionally(parentEx -> {
ErrorResponseException e = Util.getThrowableInChain(parentEx, ErrorResponseException.class);
if (e != null && e.errorCode != CoreContract.ERROR_UNKNOWN) {
ctx.respondWith(e.errorCode);
} else {
logger.error("**TASK ERROR** Chain failed while dispatching {} Error: {} {} ",
makeRequestName(StructureFactory.getName(ctx.header.contractId, ctx.header.structId)),
parentEx.getMessage(),
ctx.header.dump(), parentEx);
ctx.respondWith(CoreContract.ERROR_UNKNOWN);
}
return null;
});
return Response.ASYNC;
}
private String makeRequestName(String name) {
if (name == null || name.length() < 7) {
return "";
}
return "request" + name.substring(0, name.length() - 7) + "()";
}
static class TaskFutureAdapter<T> implements Runnable {
Task<T> task;
Future<T> future;
Executor executor;
long waitTimeout;
TimeUnit waitTimeoutUnit;
public TaskFutureAdapter(
final Task<T> task,
final Future<T> future,
final Executor executor,
long waitTimeout, TimeUnit waitTimeoutUnit) {
this.task = task;
this.future = future;
this.executor = executor;
this.waitTimeout = waitTimeout;
this.waitTimeoutUnit = waitTimeoutUnit;
}
@Override
public void run() {
try {
while (!task.isDone()) {
try {
task.internalComplete(future.get(waitTimeout, waitTimeoutUnit));
return;
} catch (TimeoutException ex) {
if (future.isDone()) {
// in this case something completed the future with a timeout exception.
try {
task.internalComplete(future.get(waitTimeout, waitTimeoutUnit));
return;
} catch (Throwable tex0) {
task.internalCompleteExceptionally(tex0);
}
return;
}
try {
// reschedule
// potentially very expensive, might limit request throughput
executor.execute(this);
return;
} catch (RejectedExecutionException rex) {
// ignoring and continuing.
// might potentially worsen an already starving system.
// adding the redundant continue here to highlight the code path
continue;
} catch (Throwable tex) {
task.internalCompleteExceptionally(tex);
return;
}
} catch (Throwable ex) {
task.internalCompleteExceptionally(ex);
return;
}
}
} catch (Throwable ex) {
task.internalCompleteExceptionally(ex);
}
}
}
public static Task<Void> done() {
final Task<Void> task = new Task<>();
task.internalComplete(NIL);
return task;
}
public <U> Task<U> thenApplyT(final ThrowableFunction<? super T, ? extends U> fn) {
return thenApply(r -> {
try {
return fn.apply(r);
} catch (Throwable throwable) {
throw ServiceException.wrap(throwable);
}
});
}
@Override
public <U> Task<U> thenApply(final Function<? super T, ? extends U> fn) {
final Function<? super T, ? extends U> wrap = TaskContext.wrap(fn);
if (defaultExecutor != null) {
return from(super.thenApplyAsync(wrap, defaultExecutor));
}
return from(super.thenApply(wrap));
}
@Override
public Task<Void> thenAccept(final Consumer<? super T> action) {
final Consumer<? super T> wrap = TaskContext.wrap(action);
if (defaultExecutor != null) {
return from(super.thenAcceptAsync(wrap, defaultExecutor));
}
return from(super.thenAccept(wrap));
}
@Override
public Task<T> whenComplete(final BiConsumer<? super T, ? super Throwable> action) {
final BiConsumer<? super T, ? super Throwable> wrap = TaskContext.wrap(action);
if (defaultExecutor != null) {
return from(super.whenCompleteAsync(wrap, defaultExecutor));
}
return from(super.whenComplete(wrap));
}
/**
* Returns a new Task that is executed when this task completes normally.
* The result of the new Task will be the result of the Supplier passed as parameter.
* <p/>
* See the {@link CompletionStage} documentation for rules
* covering exceptional completion.
*
* @param supplier the Supplier that will provider the value
* the returned Task
* @param <U> the supplier's return type
* @return the new Task
*/
public <U> Task<U> thenReturn(final Supplier<U> supplier) {
// must be separated otherwise the execution of the wrap could happen in another thread
final Supplier<U> wrap = TaskContext.wrap(supplier);
if (defaultExecutor != null) {
return from(super.thenApplyAsync(x -> wrap.get(), defaultExecutor));
}
return from(super.thenApply(x -> wrap.get()));
}
public <U> Task<U> thenComposeT(ThrowableFunction<? super T, ? extends CompletionStage<U>> fn) {
return thenCompose(r-> {
try {
return fn.apply(r);
} catch (Throwable t) {
throw ServiceException.wrap(t);
}
});
}
public <U> Task<U> thenCompose(Function<? super T, ? extends CompletionStage<U>> fn) {
final Function<? super T, ? extends CompletionStage<U>> wrap = TaskContext.wrap(fn);
if (defaultExecutor != null) {
return Task.from(super.thenComposeAsync(wrap, defaultExecutor));
}
return Task.from(super.thenCompose(wrap));
}
public <U> Task<U> thenCompose(Supplier<? extends CompletionStage<U>> fn) {
// must be separated otherwise the execution of the wrap could happen in another thread
final Supplier<? extends CompletionStage<U>> wrap = TaskContext.wrap(fn);
if (defaultExecutor != null) {
return Task.from(super.thenComposeAsync((T x) -> wrap.get(), defaultExecutor));
}
return Task.from(super.thenCompose((T x) -> wrap.get()));
}
@Override
public <U> Task<U> handle(final BiFunction<? super T, Throwable, ? extends U> fn) {
final BiFunction<? super T, Throwable, ? extends U> wrap = TaskContext.wrap(fn);
if (defaultExecutor != null) {
return from(super.handleAsync(wrap, defaultExecutor));
}
return from(super.handle(wrap));
}
/**
* In the event of an error code exception, execute the following function, otherwise rethrows the exception
* @param code The error code for the exception
* @param fn The function to execute
* @return The task that was returned from the function
*/
public Task<T> exceptionallyOnErrorCode(int code, final Func0<? extends T> fn) {
return exceptionally(t -> {
ErrorResponseException ere = Util.getThrowableInChain(t, ErrorResponseException.class);
if (ere != null && ere.errorCode == code) {
return fn.apply();
} else {
throw ServiceException.wrapIfChecked(t);
}
});
}
@Override
public Task<T> exceptionally(final Function<Throwable, ? extends T> fn) {
return from(super.exceptionally(TaskContext.wrap(fn)));
}
@Override
public Task<Void> thenRun(final Runnable action) {
final Runnable wrap = TaskContext.wrap(action);
if (defaultExecutor != null) {
return from(super.thenRunAsync(wrap, defaultExecutor));
}
return from(super.thenRun(wrap));
}
@Override
public Task<java.lang.Void> acceptEither(CompletionStage<? extends T> completionStage, Consumer<? super T> consumer) {
final Consumer<? super T> wrap = TaskContext.wrap(consumer);
if (defaultExecutor != null) {
return Task.from(super.acceptEitherAsync(completionStage, wrap, defaultExecutor));
}
return Task.from(super.acceptEither(completionStage, wrap));
}
@Override
public Task<java.lang.Void> acceptEitherAsync(CompletionStage<? extends T> completionStage, Consumer<? super T> consumer) {
final Consumer<? super T> wrap = TaskContext.wrap(consumer);
return Task.from(super.acceptEitherAsync(completionStage, wrap));
}
@Override
public Task<java.lang.Void> acceptEitherAsync(CompletionStage<? extends T> completionStage, Consumer<? super T> consumer, Executor executor) {
return Task.from(super.acceptEitherAsync(completionStage, TaskContext.wrap(consumer), executor));
}
@Override
public <U> Task<U> applyToEither(CompletionStage<? extends T> completionStage, Function<? super T, U> function) {
final Function<? super T, U> wrap = TaskContext.wrap(function);
if (defaultExecutor != null) {
return Task.from(super.applyToEitherAsync(completionStage, wrap, defaultExecutor));
}
return Task.from(super.applyToEither(completionStage, wrap));
}
@Override
public <U> Task<U> applyToEitherAsync(CompletionStage<? extends T> completionStage, Function<? super T, U> function, Executor executor) {
return Task.from(super.applyToEitherAsync(completionStage, TaskContext.wrap(function), executor));
}
@Override
public <U> Task<U> applyToEitherAsync(CompletionStage<? extends T> completionStage, Function<? super T, U> function) {
return Task.from(super.applyToEitherAsync(completionStage, TaskContext.wrap(function)));
}
@Override
public <U> Task<U> handleAsync(BiFunction<? super T, java.lang.Throwable, ? extends U> biFunction, Executor executor) {
final BiFunction<? super T, Throwable, ? extends U> wrap = TaskContext.wrap(biFunction);
return Task.from(super.handleAsync(wrap, executor));
}
@Override
public <U> Task<U> handleAsync(BiFunction<? super T, java.lang.Throwable, ? extends U> biFunction) {
final BiFunction<? super T, Throwable, ? extends U> wrap = TaskContext.wrap(biFunction);
return Task.from(super.handleAsync(wrap));
}
@Override
public Task<java.lang.Void> runAfterBoth(CompletionStage<?> completionStage, Runnable runnable) {
final Runnable wrap = TaskContext.wrap(runnable);
if (defaultExecutor != null) {
return Task.from(super.runAfterBothAsync(completionStage, wrap, defaultExecutor));
}
return Task.from(super.runAfterBoth(completionStage, wrap));
}
@Override
public Task<java.lang.Void> runAfterBothAsync(CompletionStage<?> completionStage, Runnable runnable) {
return Task.from(super.runAfterBothAsync(completionStage, TaskContext.wrap(runnable)));
}
@Override
public Task<java.lang.Void> runAfterBothAsync(CompletionStage<?> completionStage, Runnable runnable, Executor executor) {
return Task.from(super.runAfterBothAsync(completionStage, TaskContext.wrap(runnable), executor));
}
@Override
public Task<java.lang.Void> runAfterEither(CompletionStage<?> completionStage, Runnable runnable) {
final Runnable wrap = TaskContext.wrap(runnable);
if (defaultExecutor != null) {
return Task.from(super.runAfterEitherAsync(completionStage, wrap, defaultExecutor));
}
return Task.from(super.runAfterEither(completionStage, wrap));
}
@Override
public Task<java.lang.Void> runAfterEitherAsync(CompletionStage<?> completionStage, Runnable runnable) {
return Task.from(super.runAfterEitherAsync(completionStage, TaskContext.wrap(runnable)));
}
@Override
public Task<java.lang.Void> runAfterEitherAsync(CompletionStage<?> completionStage, Runnable runnable, Executor executor) {
return Task.from(super.runAfterEitherAsync(completionStage, TaskContext.wrap(runnable), executor));
}
public static Task<java.lang.Void> runAsync(Runnable runnable) {
return Task.from(CompletableFuture.runAsync(TaskContext.wrap(runnable)));
}
public static Task<java.lang.Void> runAsync(Runnable runnable, Executor executor) {
return Task.from(CompletableFuture.runAsync(TaskContext.wrap(runnable), executor));
}
public static <U> Task<U> supplyAsync(TaskSupplier<U> supplier) {
return Task.from(CompletableFuture.supplyAsync(TaskContext.wrap(supplier))).thenCompose(t -> t);
}
public static <U> Task<U> supplyAsync(Supplier<U> supplier) {
return Task.from(CompletableFuture.supplyAsync(TaskContext.wrap(supplier)));
}
public static <U> Task<U> supplyAsync(Supplier<U> supplier, Executor executor) {
return Task.from(CompletableFuture.supplyAsync(TaskContext.wrap(supplier), executor));
}
public static <U> Task<U> supplyAsync(TaskSupplier<U> supplier, Executor executor) {
return Task.from(CompletableFuture.supplyAsync(TaskContext.wrap(supplier), executor).thenCompose(t -> t));
}
@Override
public Task<java.lang.Void> thenAcceptAsync(Consumer<? super T> consumer, Executor executor) {
return Task.from(super.thenAcceptAsync(TaskContext.wrap(consumer), executor));
}
@Override
public Task<java.lang.Void> thenAcceptAsync(Consumer<? super T> consumer) {
return Task.from(super.thenAcceptAsync(TaskContext.wrap(consumer)));
}
@Override
public <U> Task<java.lang.Void> thenAcceptBoth(CompletionStage<? extends U> completionStage, BiConsumer<? super T, ? super U> biConsumer) {
final BiConsumer<? super T, ? super U> wrap = TaskContext.wrap(biConsumer);
if (defaultExecutor != null) {
return Task.from(super.thenAcceptBothAsync(completionStage, wrap, defaultExecutor));
}
return Task.from(super.thenAcceptBoth(completionStage, wrap));
}
@Override
public <U> Task<java.lang.Void> thenAcceptBothAsync(CompletionStage<? extends U> completionStage, BiConsumer<? super T, ? super U> biConsumer, Executor executor) {
return Task.from(super.thenAcceptBothAsync(completionStage, TaskContext.wrap(biConsumer), executor));
}
@Override
public <U> Task<java.lang.Void> thenAcceptBothAsync(CompletionStage<? extends U> completionStage, BiConsumer<? super T, ? super U> biConsumer) {
return Task.from(super.thenAcceptBothAsync(completionStage, TaskContext.wrap(biConsumer)));
}
@Override
public <U> Task<U> thenApplyAsync(Function<? super T, ? extends U> function, Executor executor) {
final Function<? super T, ? extends U> wrap = TaskContext.wrap(function);
return Task.from(super.thenApplyAsync(wrap, executor));
}
@Override
public <U> Task<U> thenApplyAsync(Function<? super T, ? extends U> function) {
final Function<? super T, ? extends U> wrap = TaskContext.wrap(function);
return Task.from(super.thenApplyAsync(wrap));
}
@Override
public <U, V> Task<V> thenCombine(CompletionStage<? extends U> completionStage, BiFunction<? super T, ? super U, ? extends V> biFunction) {
final BiFunction<? super T, ? super U, ? extends V> wrap = TaskContext.wrap(biFunction);
if (defaultExecutor != null) {
return Task.from(super.thenCombineAsync(completionStage, wrap, defaultExecutor));
}
return Task.from(super.thenCombine(completionStage, wrap));
}
@Override
public <U, V> Task<V> thenCombineAsync(CompletionStage<? extends U> completionStage, BiFunction<? super T, ? super U, ? extends V> biFunction, Executor executor) {
final BiFunction<? super T, ? super U, ? extends V> wrap = TaskContext.wrap(biFunction);
return Task.from(super.thenCombineAsync(completionStage, wrap, executor));
}
@Override
public <U, V> Task<V> thenCombineAsync(CompletionStage<? extends U> completionStage, BiFunction<? super T, ? super U, ? extends V> biFunction) {
final BiFunction<? super T, ? super U, ? extends V> wrap = TaskContext.wrap(biFunction);
return Task.from(super.thenCombineAsync(completionStage, wrap));
}
@Override
public <U> Task<U> thenComposeAsync(Function<? super T, ? extends java.util.concurrent.CompletionStage<U>> function, Executor executor) {
final Function<? super T, ? extends CompletionStage<U>> wrap = TaskContext.wrap(function);
return Task.from(super.thenComposeAsync(wrap, executor));
}
@Override
public <U> Task<U> thenComposeAsync(Function<? super T, ? extends CompletionStage<U>> function) {
final Function<? super T, ? extends CompletionStage<U>> wrap = TaskContext.wrap(function);
return Task.from(super.thenComposeAsync(wrap));
}
@Override
public Task<java.lang.Void> thenRunAsync(Runnable runnable) {
return Task.from(super.thenRunAsync(TaskContext.wrap(runnable)));
}
@Override
public Task<java.lang.Void> thenRunAsync(Runnable runnable, Executor executor) {
return Task.from(super.thenRunAsync(TaskContext.wrap(runnable), executor));
}
@Override
public Task<T> whenCompleteAsync(BiConsumer<? super T, ? super java.lang.Throwable> biConsumer) {
return Task.from(super.whenCompleteAsync(TaskContext.wrap(biConsumer)));
}
@Override
public Task<T> whenCompleteAsync(BiConsumer<? super T, ? super Throwable> biConsumer, Executor executor) {
return Task.from(super.whenCompleteAsync(TaskContext.wrap(biConsumer), executor));
}
/**
* @throws NullPointerException if the array or any of its elements are
* {@code null}
*/
public static Task<Void> allOf(CompletableFuture<?>... cfs) {
return from(CompletableFuture.allOf(cfs));
}
/**
* @throws NullPointerException if the collection or any of its elements are
* {@code null}
*/
public static <F extends CompletableFuture<?>, C extends Collection<F>> Task<Void> allOf(C cfs) {
return from(CompletableFuture.allOf(cfs.toArray(new CompletableFuture[cfs.size()])));
}
/**
* @throws NullPointerException if the stream or any of its elements are
* {@code null}
*/
public static <F extends CompletableFuture<?>> Task<Void> allOf(Stream<F> cfs) {
final List<F> futureList = cfs.collect(Collectors.toList());
@SuppressWarnings("rawtypes")
final CompletableFuture[] futureArray = futureList.toArray(new CompletableFuture[futureList.size()]);
return from(CompletableFuture.allOf(futureArray));
}
/**
* @throws NullPointerException if the array or any of its elements are
* {@code null}
*/
public static Task<Object> anyOf(CompletableFuture<?>... cfs) {
return from(CompletableFuture.anyOf(cfs));
}
/**
* @throws NullPointerException if the collection or any of its elements are
* {@code null}
*/
public static <F extends CompletableFuture<?>> Task<Object> anyOf(Collection<F> cfs) {
return from(CompletableFuture.anyOf(cfs.toArray(new CompletableFuture[cfs.size()])));
}
/**
* @throws NullPointerException if the stream or any of its elements are
* {@code null}
*/
public static <F extends CompletableFuture<?>> Task<Object> anyOf(Stream<F> cfs) {
return from(CompletableFuture.anyOf((CompletableFuture[]) cfs.toArray(size -> new CompletableFuture[size])));
}
private static class TaskChainException extends RuntimeException {
public TaskChainException(Throwable cause) {
super(cause);
}
}
} |
package org.lockss.test;
import java.io.*;
import java.net.*;
import java.util.*;
import junit.framework.TestCase;
public class TestMockDatagramSocket extends TestCase{
private MockDatagramSocket ds = null;
public TestMockDatagramSocket(String msg){
super(msg);
}
public void setUp(){
try{
ds = new MockDatagramSocket();
}
catch (SocketException se){
se.printStackTrace();
}
}
public void testIsClosedWhenNotClosed(){
assertTrue(!ds.isClosed());
}
public void testIsClosedWhenIsClosed(){
ds.close();
assertTrue(ds.isClosed());
}
public void testNoPacketSentReturnsNull(){
Vector packets = ds.getSentPackets();
assertEquals(0, packets.size());
}
public void testOnePacketSentReturnsOne() throws UnknownHostException{
String dataStr = "This is test data";
byte[] data = dataStr.getBytes();
InetAddress addr = InetAddress.getByName("127.0.0.1");
int port = 1234;
DatagramPacket sendPacket =
new DatagramPacket(data, data.length, addr, port);
ds.send(sendPacket);
Vector packets = ds.getSentPackets();
assertEquals(1, packets.size());
verifyEqual(sendPacket, (DatagramPacket)packets.elementAt(0));
}
public void testMultiPacketSentReturnsMulti() throws UnknownHostException{
Vector sentPackets = new Vector();
int numPackets = 5;
for (int ix=0; ix<numPackets; ix++){
String dataStr = ix+"This is test data"+ix;
byte[] data = dataStr.getBytes();
InetAddress addr = InetAddress.getByName("127.0.0.1");
int port = 1234;
DatagramPacket sendPacket =
new DatagramPacket(data, data.length, addr, port);
sentPackets.add(sendPacket);
ds.send(sendPacket);
}
Vector packets = ds.getSentPackets();
assertEquals(numPackets, packets.size());
for (int ix=0; ix<numPackets; ix++){
verifyEqual((DatagramPacket)sentPackets.elementAt(ix),
(DatagramPacket)packets.elementAt(ix));
}
}
public void testReceiveWithOutSetPacketsWaits() {
DatagramPacket packet = createEmptyPacket(10);
DoLater.Interrupter intr = null;
try {
intr = DoLater.interruptMeIn(1000);
ds.receive(packet);
fail("receive() returned when no packets");
} catch (IOException e) {
// this is what we're expecting
} finally {
intr.cancel();
if (!intr.did()) {
fail("get() of empty returned");
}
}
}
public void testReceivePacketGetsOnePacketSameSize() throws Exception{
byte data[] = "Test data".getBytes();
DatagramPacket packet =
new DatagramPacket(data, data.length,
InetAddress.getByName("127.0.0.1"), 1234);
ds.addToReceiveQueue(packet);
DatagramPacket receivedPacket = createEmptyPacket(data.length);
ds.receive(receivedPacket);
verifyEqual(packet, receivedPacket);
}
public void testReceivePacketHandlesTruncation() throws Exception{
byte data[] = "Test data".getBytes();
DatagramPacket packet =
new DatagramPacket(data, data.length,
InetAddress.getByName("127.0.0.1"), 1234);
ds.addToReceiveQueue(packet);
DatagramPacket receivedPacket = createEmptyPacket(data.length-1);
ds.receive(receivedPacket);
assertEquals(data.length-1, receivedPacket.getLength());
verifyEqualUptoTruncation(packet, receivedPacket);
}
public void testReceivePacketGetsMultiPackets() throws Exception{
int numPackets = 5;
Vector queuedPackets = new Vector();
for (int ix = 0; ix <= numPackets; ix++){
String dataStr = ix+"Test data"+ix;
byte data[] = dataStr.getBytes();
DatagramPacket packet =
new DatagramPacket(data, data.length,
InetAddress.getByName("127.0.0.1"), 1234);
ds.addToReceiveQueue(packet);
queuedPackets.add(packet);
}
for (int ix = 0; ix <= numPackets; ix++){
DatagramPacket expectedPacket =
(DatagramPacket)queuedPackets.elementAt(ix);
DatagramPacket receivedPacket =
createEmptyPacket(expectedPacket.getLength());
ds.receive(receivedPacket);
verifyEqual(receivedPacket, expectedPacket);
}
}
private void verifyEqual(DatagramPacket pac1, DatagramPacket pac2){
assertEquals(pac1.getPort(), pac2.getPort());
assertEquals(pac1.getAddress(), pac2.getAddress());
assertEquals(pac1.getLength(), pac2.getLength());
byte[] data1 = pac1.getData();
byte[] data2 = pac2.getData();
for (int ix=0; ix<data1.length; ix++){
assertEquals(data1[ix], data2[ix]);
}
}
private void verifyEqualUptoTruncation(DatagramPacket truncatedPac,
DatagramPacket fullPac){
assertEquals(truncatedPac.getPort(), fullPac.getPort());
assertEquals(truncatedPac.getAddress(), fullPac.getAddress());
byte[] data1 = truncatedPac.getData();
byte[] data2 = fullPac.getData();
for (int ix=0; ix<data1.length; ix++){
assertEquals(data1[ix], data2[ix]);
}
}
private DatagramPacket createEmptyPacket(int length){
byte[] data = new byte[length];
DatagramPacket packet = new DatagramPacket(data, length);
return packet;
}
} |
package game;
import java.util.Scanner;
import exception.InputInvalidException;
public class CustomScanner {
private final Scanner console;
public CustomScanner() {
console = new Scanner(System.in);
}
/**
* This method takes the minimum and maximum integer values in addition to
* allowing the user to use an escape phrase to exit the program.
*
* @param low
* the lowest value the user wants
* @param high
* the highest value the user wants. This value must be greater
* than the lowest value.
* @param escape
* determines if method checks for an escape phrase or not
* @return an integer greater than or equal to the low parameter and less
* than or equal to the high parameter. Otherwise, if the escape is
* true and the program finds an escape phrase, the program exits.
*/
public int getInt(int low, int high, boolean escape) {
String temp;
int x;
try {
temp = console.nextLine().trim();
if (escape == true)
checkEscape(temp);
x = Integer.parseInt(temp);
if (x < low || x > high)
return 0;
return x;
} catch (NumberFormatException e) {
errorMessage(e.toString());
}
return 0;
}
/**
* This method is an overloaded method that automatically sets the value of
* low and high to the integer limits.
*
* @param escape
* @return
*/
public int getInt(boolean escape) {
return getInt(Integer.MIN_VALUE, Integer.MAX_VALUE, escape);
}
/**
* This method is an overloaded method that automatically sets the value of
* escape to false.
*
* @param low
* @param high
* @return
*/
public int getInt(int low, int high) {
return getInt(low, high, false);
}
/**
* This method is an overloaded method that automatically sets the value of
* low and high to the integer limits and escape to false.
*
* @return
*/
public int getInt() {
return getInt(Integer.MIN_VALUE, Integer.MIN_VALUE, false);
}
/*
* 2016-02-18 19:21 Moved to CustomScanner in order to keep the TicTacToe
* class cleaner. Found the String trim() method
*
* 2016-02-18 17:25 I initially only used split() and did not use
* replaceAll(). I changed to the using replaceAll() because it allows to
* disregard any blank space that the user might input and only focus on the
* length of the user's input (should be 2, [character, number]) and the
* values of the input, although that is not validated here. I no longer
* check the length of each split input and the length of the input array. I
* found this optimization when I put a space in front of input before my a2
* (" a2") and it returned an unexpected error.
*/
/**
* This method gives a game a valid coordinate that corresponds to a square
* on the board. The user inputs an alphanumeric character string and this
* method turns it into a coordinate.
*
* @param game
* @return
*/
public Coordinate getSquareCoordinates(AbstractGame game) {
String temp;
// val[0] is the first input (the character / y value)
// val[1] is the second input (the number / x value)
int[] val = new int[2];
try {
temp = console.nextLine();
checkEscape(temp, game);
// remove all blank spaces
temp = temp.replaceAll("\\s", "");
// accept letter number form: a2
if (temp.length() == 2) {
val[0] = temp.toUpperCase().charAt(0) - 65;
val[1] = Character.getNumericValue(temp.charAt(1)) - 1;
} else {
throw new InputInvalidException("Number of valid characters not 2: " + temp.length());
}
} catch (Exception e) {
System.out.println("Unexpected Error");
e.printStackTrace(System.out);
throw new InputInvalidException();
}
Coordinate out = new Coordinate(val[0], val[1]);
// reject all numbers out of bounds
if (game.getBoard().squareExists(out)) {
throw new InputInvalidException("No square on those coordinates exist: " + temp);
}
return out;
}
public void pressToContinue() {
System.out.println("Press enter to continue");
checkEscape(console.nextLine());
}
public static void checkEscape(String temp) {
// looks for "EXIT" and "RESET" before the squares
for (String str : temp.split("\\s")) {
str = str.toUpperCase();
if (str.equals("EXIT")) {
System.out.println("Thank you for playing");
System.exit(0);
}
}
}
/**
*
* @param temp
* a String that will be checked for a key phrase.
* @param game
*/
public static void checkEscape(String temp, AbstractGame game) {
// looks for "EXIT" and "RESET" before the squares
for (String str : temp.split("\\s")) {
str = str.toUpperCase();
if (str.equals("EXIT")) {
System.out.println("Thank you for playing");
System.exit(0);
}
if (str.equals("RESTART") || str.equals("RESET")) {
System.out.println("Messy restart, please wait");
game.endGame();
return;
}
}
}
private void errorMessage(String error) {
System.out.println("Invalid: " + error);
}
} |
package nars.inference;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import nars.core.EventEmitter.Observer;
import nars.core.Events.ConceptBeliefRemove;
import nars.core.Events.TaskDerive;
import nars.core.Memory;
import nars.core.Parameters;
import nars.entity.Concept;
import nars.entity.Sentence;
import nars.entity.Stamp;
import nars.entity.Task;
import nars.entity.TruthValue;
import nars.inference.GraphExecutive.ParticlePlan;
import nars.io.Symbols;
import nars.io.Texts;
import nars.io.buffer.PriorityBuffer;
import nars.language.Conjunction;
import nars.language.Implication;
import nars.language.Interval;
import nars.language.Term;
import static nars.language.Terms.equalSubTermsInRespectToImageAndProduct;
import nars.language.Variables;
import nars.operator.Operation;
import nars.operator.Operator;
/**
* Operation execution and planning support.
* Strengthens and accelerates goal-reaching activity
*/
public class Executive implements Observer {
public final GraphExecutive graph;
public final Memory memory;
///** memory for faster execution of &/ statements (experiment) */
//public final Deque<TaskConceptContent> next = new ArrayDeque<>();
PriorityBuffer<TaskExecution> tasks;
private Set<TaskExecution> tasksToRemove = new HashSet();
public int shortTermMemorySize=10; //how many events its able to track for the temporal feedback system
//100 should be enough for all practical examples for now, we may make it adaptive later,
//which means adjusting according to the longest (&/,a1...an) =/> .. statement
public ArrayList<Task> lastEvents=new ArrayList<>();
/** number of tasks that are active in the sorted priority buffer for execution */
int numActiveTasks = 1;
/** max number of tasks that a plan can generate. chooses the N best */
int maxPlannedTasks = 4;
/** global plan search parameters */
float searchDepth = 48;
int particles = 32;
/** inline search parameters */
float inlineSearchDepth = 16;
int inlineParticles = 24;
float maxExecutionsPerDuration = 1f;
/** how much to multiply all cause relevancies per cycle */
double causeRelevancyFactor = 0.999;
/** how much to add value to each cause involved in a successful plan */
//TODO move this to a parameter class visible to both Executive and GraphExecutive
public static double relevancyOfSuccessfulPlan = 0.10;
/** time of last execution */
long lastExecution = -1;
/** motivation set on an executing task to prevent other tasks from interrupting it, unless they are relatively urgent.
* a larger value means it is more difficult for a new task to interrupt one which has
* already begun executing.
*/
float motivationToFinishCurrentExecution = 1.5f;
public Executive(Memory mem) {
this.memory = mem;
this.graph = new GraphExecutive(mem,this);
this.tasks = new PriorityBuffer<TaskExecution>(new Comparator<TaskExecution>() {
@Override
public final int compare(final TaskExecution a, final TaskExecution b) {
float ap = a.getDesire();
float bp = b.getDesire();
if (bp != ap) {
return Float.compare(ap, bp);
} else {
float ad = a.getPriority();
float bd = b.getPriority();
if (ad!=bd)
return Float.compare(ad, bd);
else {
float add = a.getDurability();
float bdd = b.getDurability();
return Float.compare(add, bdd);
}
}
}
}, numActiveTasks) {
@Override protected void reject(final TaskExecution t) {
removeTask(t);
}
};
memory.event.set(this, true, TaskDerive.class, ConceptBeliefRemove.class);
}
HashSet<Task> current_tasks=new HashSet<>();
@Override
public void event(Class event, Object[] args) {
if (event == TaskDerive.class) {
Task derivedTask=(Task) args[0];
if(derivedTask.sentence.content instanceof Implication &&
(((Implication) derivedTask.sentence.content).getTemporalOrder()==TemporalRules.ORDER_FORWARD ||
((Implication) derivedTask.sentence.content).getTemporalOrder()==TemporalRules.ORDER_CONCURRENT)) {
if(!current_tasks.contains(derivedTask) && !Variables.containVarIndep(derivedTask.toString())) {
current_tasks.add(derivedTask);
}
}
}
else if (event == ConceptBeliefRemove.class) {
Task removedTask=(Task) args[2]; //task is 3nd
if(current_tasks.contains(removedTask)) {
current_tasks.remove(removedTask);
}
}
}
public class TaskExecution {
/** may be null for input tasks */
public final Concept c;
public final Task t;
public int sequence;
public long delayUntil = -1;
private float motivationFactor = 1;
public TaskExecution(final Concept concept, Task t) {
this.c = concept;
//Check if task is
if(Parameters.TEMPORAL_PARTICLE_PLANNER) {
Term term = t.getContent();
if (term instanceof Implication) {
Implication it = (Implication)term;
if ((it.getTemporalOrder() == TemporalRules.ORDER_FORWARD) || (it.getTemporalOrder() == TemporalRules.ORDER_CONCURRENT)) {
if (it.getSubject() instanceof Conjunction) {
t = inlineConjunction(t, (Conjunction)it.getSubject());
}
}
}
else if (term instanceof Conjunction) {
t = inlineConjunction(t, (Conjunction)term);
}
}
this.t = t;
}
//TODO support multiple inline replacements
protected Task inlineConjunction(Task t, final Conjunction c) {
ArrayDeque<Term> inlined = new ArrayDeque();
boolean modified = false;
if (c.operator() == Symbols.NativeOperator.SEQUENCE) {
Term prev = null;
for (Term e : c.term) {
if (!isPlanTerm(e)) {
if (graph.isPlannable(e)) {
TreeSet<ParticlePlan> plans = graph.particlePlan(e, inlineSearchDepth, inlineParticles);
if (plans.size() > 0) {
//use the first
ParticlePlan pp = plans.first();
//if terms precede this one, remove a common prefix
//scan from the end of the sequence backward until a term matches the previous, and splice it there
//TODO more rigorous prefix compraison. compare sublist prefix
List<Term> seq = pp.sequence;
// if (prev!=null) {
// int previousTermIndex = pp.sequence.lastIndexOf(prev);
// if (previousTermIndex!=-1) {
// if (previousTermIndex == seq.size()-1)
// seq = Collections.EMPTY_LIST;
// else {
// seq = seq.subList(previousTermIndex+1, seq.size());
//System.out.println("inline: " + seq + " -> " + e + " in " + c);
inlined.addAll(seq);
//System.err.println("Inline " + e + " in " + t.getContent() + " = " + pp.sequence);
modified = true;
}
else {
//no plan available, this wont be able to execute
end();
}
}
else {
//this won't be able to execute here
end();
}
}
else {
//executable term, add
inlined.add(e);
}
prev = e;
}
}
//remove suffix intervals
if (inlined.size() > 0) {
while (inlined.peekLast() instanceof Interval) {
inlined.removeLast();
modified = true;
}
}
if (inlined.isEmpty())
end();
if (modified) {
Conjunction nc = c.cloneReplacingTerms(inlined.toArray(new Term[inlined.size()]));
t = t.clone(t.sentence.clone(nc) );
}
return t;
}
@Override public boolean equals(final Object obj) {
if (obj instanceof TaskExecution) {
return ((TaskExecution)obj).t.equals(t);
}
return false;
}
public final float getDesire() {
return t.getDesire().getExpectation() * motivationFactor;
}
public final float getPriority() { return t.getPriority(); }
public final float getDurability() { return t.getDurability(); }
//public final float getMotivation() { return getDesire() * getPriority() * motivationFactor; }
public final void setMotivationFactor(final float f) { this.motivationFactor = f; }
@Override public int hashCode() { return t.hashCode(); }
@Override
public String toString() {
return "!" + Texts.n2Slow(getDesire()) + "|" + sequence + "! " + t.toString();
}
public void end() {
setMotivationFactor(0);
if (t!=null)
t.end();
}
}
protected TaskExecution getExecution(final Task parent) {
for (final TaskExecution t : tasks) {
if (t.t.parentTask!=null)
if (t.t.parentTask.equals(parent))
return t;
}
return null;
}
public boolean addTask(final Concept c, final Task t) {
TaskExecution existingExecutable = getExecution(t.parentTask);
boolean valid = true;
if (existingExecutable!=null) {
//TODO compare motivation (desire * priority) instead?
//if the new task for the existin goal has a lower priority, ignore it
if (existingExecutable.getDesire() > t.getDesire().getExpectation()) {
//System.out.println("ignored lower priority task: " + t + " for parent " + t.parentTask);
valid = false;
}
//do not allow interrupting a lower priority, but already executing task
//TODO allow interruption if priority difference is above some threshold
if (existingExecutable.sequence > 0) {
//System.out.println("ignored late task: " + t + " for parent " + t.parentTask);
valid = false;
}
}
if (valid) {
if(!occured && this.expected_task!=null && ended) {
expected_task.expect(false); //ok this one didnt get his expectation
}
occured=false; //only bad to not happened not interrupted ones
ended=false;
final TaskExecution te = new TaskExecution(c, t);
if (tasks.add(te)) {
//added successfully
memory.emit(TaskExecution.class, te);
return true;
}
}
//t.end();
return false;
}
protected void removeTask(final TaskExecution t) {
if (tasksToRemove.add(t)) {
// if (memory.getRecorder().isActive())
// memory.getRecorder().output("Executive", "Task Remove: " + t.toString());
t.end();
}
}
protected void updateTasks() {
List<TaskExecution> t = new ArrayList(tasks);
t.removeAll(tasksToRemove);
tasks.clear();
for (TaskExecution x : t) {
if (x.getDesire() > 0) { // && (x.getPriority() > 0)) {
tasks.add(x);
//this is incompatible with the other usages of motivationFactor, so do not use this:
// if ((x.delayUntil!=-1) && (x.delayUntil <= memory.getTime())) {
// //restore motivation so task can resume processing
// x.motivationFactor = 1.0f;
}
}
tasksToRemove.clear();
}
// public void manageExecution() {
// if (next.isEmpty()) {
// return;
// TaskConceptContent n = next.pollFirst();
// if (n.task==null) {
// //we have to wait
// return;
// if (!(n.content instanceof Operation)) {
// throw new RuntimeException("manageExecution: Term content is not Operation: " + n.content);
// System.out.println("manageExecution: " + n.task);
// //ok it is time for action:
// execute((Operation)n.content, n.concept, n.task, true);
protected void execute(final Operation op, final Task task) {
Operator oper = op.getOperator();
//if (NAR.DEBUG)
//System.out.println("exe: " + task.getExplanation().trim());
op.setTask(task);
oper.call(op, memory);
//task.end(true);
}
public void decisionPlanning(final NAL nal, final Task t, final Concept concept) {
if (Parameters.TEMPORAL_PARTICLE_PLANNER) {
if (!isDesired(t, concept)) return;
boolean plannable = graph.isPlannable(t.getContent());
if (plannable) {
graph.plan(nal, concept, t, t.getContent(), particles, searchDepth, '!', maxPlannedTasks);
}
}
}
/** Entry point for all potentially executable tasks */
public void decisionMaking(final Task t, final Concept concept) {
if (isDesired(t, concept)) {
Term content = concept.term;
if (content instanceof Operation) {
addTask(concept, t);
}
else if (isSequenceConjunction(content)) {
addTask(concept, t);
}
}
else {
//t.end();
}
}
/** whether a concept's desire exceeds decision threshold */
public boolean isDesired(final Task t, final Concept c) {
float desire = c.getDesire().getExpectation();
float priority = t.budget.summary();
return (desire * priority) >= memory.param.decisionThreshold.get();
//return (c.getDesire().getExpectation() >= memory.param.decisionThreshold.get());
}
/** called during each memory cycle */
public void cycle() {
long now = memory.time();
//only execute something no less than every duration time
if (now - lastExecution < (memory.param.duration.get()/maxExecutionsPerDuration) )
return;
lastExecution = now;
graph.implication.multiplyRelevancy(causeRelevancyFactor);
updateTasks();
updateSensors();
if (tasks.isEmpty())
return;
/*if (NAR.DEBUG)*/ {
//TODO make a print function
if (tasks.size() > 1) {
System.out.println("Tasks @ " + memory.time());
for (TaskExecution tcc : tasks)
System.out.println(" " + tcc.toString());
}
else {
System.out.println("Task @ " + memory.time() + ": " + tasks.get(0));
}
}
TaskExecution topExecution = tasks.getFirst();
Task top = topExecution.t;
Term term = top.getContent();
if (term instanceof Operation) {
execute((Operation)term, top); //directly execute
removeTask(topExecution);
return;
}
else if (Parameters.TEMPORAL_PARTICLE_PLANNER && term instanceof Conjunction) {
Conjunction c = (Conjunction)term;
if (c.operator() == Symbols.NativeOperator.SEQUENCE) {
executeConjunctionSequence(topExecution, c);
return;
}
}
else if (Parameters.TEMPORAL_PARTICLE_PLANNER && term instanceof Implication) {
Implication it = (Implication)term;
if ((it.getTemporalOrder() == TemporalRules.ORDER_FORWARD) || (it.getTemporalOrder() == TemporalRules.ORDER_CONCURRENT)) {
if (it.getSubject() instanceof Conjunction) {
Conjunction c = (Conjunction)it.getSubject();
if (c.operator() == Symbols.NativeOperator.SEQUENCE) {
executeConjunctionSequence(topExecution, c);
return;
}
}
else if (it.getSubject() instanceof Operation) {
execute((Operation)it.getSubject(), top); //directly execute
removeTask(topExecution);
return;
}
}
throw new RuntimeException("Unrecognized executable term: " + it.getSubject() + "[" + it.getSubject().getClass() + "] from " + top);
}
else {
//throw new RuntimeException("Unknown Task type: "+ top);
}
// //Example prediction
// if (memory.getCurrentBelief()!=null) {
// Term currentTerm = memory.getCurrentBelief().content;
// if (implication.containsVertex(currentTerm)) {
// particlePredict(currentTerm, 12, particles);
}
public static boolean isPlanTerm(final Term t) {
return ((t instanceof Interval) || (t instanceof Operation));
}
public static boolean isExecutableTerm(final Term t) {
return (t instanceof Operation) || isSequenceConjunction(t);
//task.sentence.content instanceof Operation || (task.sentence.content instanceof Conjunction && task.sentence.content.getTemporalOrder()==TemporalRules.ORDER_FORWARD)))
}
public static boolean isSequenceConjunction(final Term c) {
if (c instanceof Conjunction) {
Conjunction cc = ((Conjunction)c);
return ( cc.operator() == Symbols.NativeOperator.SEQUENCE );
//return (cc.getTemporalOrder()==TemporalRules.ORDER_FORWARD) || (cc.getTemporalOrder()==TemporalRules.ORDER_CONCURRENT);
}
return false;
}
public Task expected_task=null;
public Term expected_event=null;
boolean ended=false;
private void executeConjunctionSequence(final TaskExecution task, final Conjunction c) {
int s = task.sequence;
Term currentTerm = c.term[s];
long now = memory.time();
if (task.delayUntil > now) {
//not ready to execute next term
return;
}
if (currentTerm instanceof Operation) {
Concept conc=memory.concept(currentTerm);
execute((Operation)currentTerm, task.t);
task.delayUntil = now + memory.param.duration.get();
s++;
}
else if (currentTerm instanceof Interval) {
Interval ui = (Interval)currentTerm;
task.delayUntil = memory.time() + Interval.magnitudeToTime(ui.magnitude, memory.param.duration);
s++;
}
else {
System.err.println("Non-executable term in sequence: " + currentTerm + " in " + c + " from task " + task.t);
//removeTask(task); //was never executed, dont remove
}
if (s == c.term.length) {
ended=true;
//completed task
if(task.t.sentence.content instanceof Implication) {
expected_task=task.t;
expected_event=((Implication)task.t.sentence.content).getPredicate();
}
removeTask(task);
task.sequence=0;
}
else {
ended=false;
//still incomplete
task.sequence = s;
task.setMotivationFactor(motivationToFinishCurrentExecution);
}
}
//check all predictive statements, match them with last events
public void temporalPredictionsAdapt() {
for(Task c : current_tasks) { //a =/> b or (&/ a1...an) =/> b
boolean concurrent_conjunction=false;
Term[] args=new Term[1];
Implication imp=(Implication) c.getContent();
boolean concurrent_implication=imp.getTemporalOrder()==TemporalRules.ORDER_CONCURRENT;
args[0]=imp.getSubject();
if(imp.getSubject() instanceof Conjunction) {
Conjunction conj=(Conjunction) imp.getSubject();
if(conj.temporalOrder==TemporalRules.ORDER_FORWARD || conj.temporalOrder==TemporalRules.ORDER_CONCURRENT) {
concurrent_conjunction=conj.temporalOrder==TemporalRules.ORDER_CONCURRENT;
args=conj.term; //in case of &/ this are the terms
}
}
int i=0;
boolean matched=true;
int off=0;
long expected_time=lastEvents.get(0).sentence.getOccurenceTime();
for(i=0;i<args.length;i++) {
//handling of intervals:
if(args[i] instanceof Interval) {
if(!concurrent_conjunction) {
expected_time+=((Interval)args[i]).getTime(memory);
}
off++;
continue;
}
//handling of other events, seeing if they match and are right in time
if(!args[i].equals(lastEvents.get(i-off).sentence.content)) { //it didnt match, instead sth different unexpected happened
matched=false; //whether intermediate events should be tolerated or not was a important question when considering this,
break; //if it should be allowed, the sequential match does not matter only if the events come like predicted.
} else { //however I decided that sequence matters also for now, because then the more accurate hypothesis wins.
if(lastEvents.get(i-off).sentence.truth.getExpectation()<=0.5) { //it matched according to sequence, but is its expectation bigger than 0.5? todo: decide how truth values of the expected events
//it didn't happen
matched=false;
break;
}
long occurence=lastEvents.get(i-off).sentence.getOccurenceTime();
boolean right_in_time=Math.abs(occurence-expected_time) < ((double)memory.param.duration.get())/Parameters.TEMPORAL_PREDICTION_FEEDBACK_ACCURACY_DIV;
if(!right_in_time) { //it matched so far, but is the timing right or did it happen when not relevant anymore?
matched=false;
break;
}
}
if(!concurrent_conjunction) {
expected_time+=memory.param.duration.get();
}
}
if(concurrent_conjunction && !concurrent_implication) { //implication is not concurrent
expected_time+=memory.param.duration.get(); //so here we have to add duration
}
else
if(!concurrent_conjunction && concurrent_implication) {
expected_time-=memory.param.duration.get();
} //else if both are concurrent, time has never been added so correct
//else if both are not concurrent, time was always added so also correct
//ok it matched, is the consequence also right?
if(matched) {
long occurence=lastEvents.get(args.length-off).sentence.getOccurenceTime();
boolean right_in_time=Math.abs(occurence-expected_time)<((double)memory.param.duration.get())/Parameters.TEMPORAL_PREDICTION_FEEDBACK_ACCURACY_DIV;
if(right_in_time && imp.getPredicate().equals(lastEvents.get(args.length-off).sentence.content)) { //it matched and same consequence, so positive evidence
c.sentence.truth=TruthFunctions.revision(c.sentence.truth, new TruthValue(1.0f,Parameters.DEFAULT_JUDGMENT_CONFIDENCE));
} else { //it matched and other consequence, so negative evidence
c.sentence.truth=TruthFunctions.revision(c.sentence.truth, new TruthValue(0.0f,Parameters.DEFAULT_JUDGMENT_CONFIDENCE));
} //todo use derived task with revision instead
}
}
}
public Task stmLast=null;
boolean occured=false;
public boolean inductionOnSucceedingEvents(final Task newEvent, NAL nal) {
if (newEvent == null || newEvent.sentence.stamp.getOccurrenceTime()==Stamp.ETERNAL || !isInputOrTriggeredOperation(newEvent,nal.mem))
return false;
if (stmLast!=null) {
if(equalSubTermsInRespectToImageAndProduct(newEvent.sentence.content,stmLast.sentence.content)) {
return false;
}
nal.setTheNewStamp(newEvent.sentence.stamp, stmLast.sentence.stamp, memory.time());
nal.setCurrentTask(newEvent);
Sentence currentBelief = stmLast.sentence;
nal.setCurrentBelief(currentBelief);
if(newEvent.getPriority()>Parameters.TEMPORAL_INDUCTION_MIN_PRIORITY) {
TemporalRules.temporalInduction(newEvent.sentence, currentBelief, nal);
}
}
//for this heuristic, only use input events & task effects of operations
if(newEvent.getPriority()>Parameters.TEMPORAL_INDUCTION_MIN_PRIORITY) {
if(Parameters.TEMPORAL_PARTICLE_PLANNER && this.expected_event!=null && this.expected_task!=null) {
if(newEvent.sentence.content.equals(this.expected_event)) {
this.expected_task.expect(true);
occured=true;
} //else {
// this.expected_task.expect(false);
// this.expected_event=null;
// this.expected_task=null; //done i think//todo, refine, it could come in a specific time, also +4 on end of a (&/ plan has to be used
}
stmLast=newEvent;
lastEvents.add(newEvent);
temporalPredictionsAdapt();
while(lastEvents.size()>shortTermMemorySize) {
lastEvents.remove(0);
}
}
return true;
}
//is input or by the system triggered operation
public boolean isInputOrTriggeredOperation(final Task newEvent, Memory mem) {
if(!((newEvent.isInput() || Parameters.INTERNAL_EXPERIENCE_FULL) || (newEvent.getCause()!=null))) {
return false;
}
/*Term newcontent=newEvent.sentence.content;
if(newcontent instanceof Operation) {
Term pred=((Operation)newcontent).getPredicate();
if(pred.equals(mem.getOperator("^want")) || pred.equals(mem.getOperator("^believe"))) {
return false;
}
}*/
return true;
}
/*
public boolean isActionable(final Task newEvent, Memory mem) {
if(!((newEvent.isInput()))) {
return false;
}
Term newcontent=newEvent.sentence.content;
if(newcontent instanceof Operation) {
Term pred=((Operation)newcontent).getPredicate();
if(pred.equals(mem.getOperator("^want")) || pred.equals(mem.getOperator("^believe"))) {
return false;
}
}
return true;
}*/
// public static class TaskConceptContent {
// public final Task task;
// public final Concept concept;
// public final Term content;
// public static TaskConceptContent NULL = new TaskConceptContent();
// /** null placeholder */
// protected TaskConceptContent() {
// this.task = null;
// this.concept = null;
// this.content = null;
// public TaskConceptContent(Task task, Concept concept, Term content) {
// this.task = task;
// this.concept = concept;
// this.content = content;
protected void updateSensors() {
memory.logic.PLAN_GRAPH_EDGE.commit(graph.implication.edgeSet().size());
memory.logic.PLAN_GRAPH_VERTEX.commit(graph.implication.vertexSet().size());
memory.logic.PLAN_TASK_EXECUTABLE.commit(tasks.size());
}
} |
package com.perm.kate.api;
import org.json.JSONException;
import org.json.JSONObject;
public class Album {
public long aid;
public long thumb_id;
public long owner_id;
public String title;
public String description;
public long created;
public long updated;
public long size;
public long privacy;
public long comment_privacy;
public String thumb_src;
public static Album parse(JSONObject o) throws JSONException {
Album a = new Album();
a.title = Api.unescape(o.optString("title"));
a.aid = Long.parseLong(o.getString("id"));
a.owner_id = Long.parseLong(o.getString("owner_id"));
String description = o.optString("description");
if (description != null && !description.equals("") && !description.equals("null"))
a.description = Api.unescape(description);
String thumb_id = o.optString("thumb_id");
if (thumb_id != null && !thumb_id.equals("") && !thumb_id.equals("null"))
a.thumb_id = Long.parseLong(thumb_id);
String created = o.optString("created");
if (created != null && !created.equals("") && !created.equals("null"))
a.created = Long.parseLong(created);
JSONObject privacy=o.optJSONObject("privacy_view");
if(privacy!=null){
String type = privacy.optString("type");
if("all".equals(type))
a.privacy=0;
else if("friends".equals(type))
a.privacy=1;
else if("friends_of_friends".equals(type))
a.privacy=2;
else if("nobody".equals(type))
a.privacy=3;
else if("users".equals(type))
a.privacy=4;
}
JSONObject privacy_comment=o.optJSONObject("privacy_comment");
if(privacy_comment!=null){
String type = privacy_comment.optString("type");
if("all".equals(type))
a.comment_privacy=0;
else if("friends".equals(type))
a.comment_privacy=1;
else if("friends_of_friends".equals(type))
a.comment_privacy=2;
else if("nobody".equals(type))
a.comment_privacy=3;
else if("users".equals(type))
a.comment_privacy=4;
}
a.size = o.optLong("size");
String updated = o.optString("updated");
if (updated != null && !updated.equals("") && !updated.equals("null"))
a.updated = Long.parseLong(updated);
a.thumb_src = o.optString("thumb_src");
return a;
}
} |
package pact;
import java.util.logging.Level;
import java.util.logging.Logger;
import au.com.dius.pact.consumer.dsl.DslPart;
import au.com.dius.pact.consumer.dsl.PactDslJsonBody;
import au.com.dius.pact.consumer.dsl.PactDslWithProvider;
import au.com.dius.pact.model.PactFragment;
import au.com.dius.pact.consumer.ConsumerPactTest;
import java.util.*;
import au.com.dius.pact.consumer.PactProviderRule;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
import org.junit.Rule;
import org.junit.Assert;
import au.com.dius.pact.consumer.dsl.PactDslJsonArray;
public class PactDslArrayLikeTest extends ConsumerPactTest {
Logger logger = Logger.getLogger(PactDslArrayLikeTest.class.getName());
@Rule
public PactProviderRule mockProvider = new PactProviderRule("test_provider", "localhost", 1234, this);
String v3Path = "/v3";
DslPart body = new PactDslJsonBody()
.object("meta")
.integerType("Count")
.eachLike("base")
.id()
.stringType("url")
.closeObject()
.closeArray()
.eachLike("stats")
.integerType("avg")
.integerType("Comments")
.integerType("daily")
.integerType("views")
.integerType("Dislikes")
.integerType("Likes")
.integerType("Views")
.integerType("totalC")
.integerType("totalV")
.closeObject()
.closeArray()
.closeObject()
.object("result")
.eachLike("packages")
.booleanType("instore")
.booleanType("custom")
.stringType("description")
.id()
.stringType("name")
.stringType("shortName")
.stringType("uri")
.eachLike("parents")
.id()
.stringType("uri")
.closeObject()
.closeArray()
.eachLike("stats")
.integerType("avg")
.integerType("comments")
.integerType("daily")
.integerType("views")
.integerType("Dislikes")
.integerType("Likes")
.integerType("Views")
.integerType("totalC")
.integerType("totalV")
.closeObject()
.closeArray()
.eachLike("omissions")
.closeObject()
.closeArray()
.eachLike("children")
.closeObject()
.closeArray()
.closeObject()
.closeArray()
.closeObject();
protected PactFragment createFragment(PactDslWithProvider builder) {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Content-Type", "application/json");
PactFragment fragment = builder
.uponReceiving("API v3 endpoint response")
.path("/")
.method("GET")
.willRespondWith()
.status(200)
.headers(headers)
.body(body)
.toFragment();
return fragment;
}
@Override
protected String providerName() {
return "test_provider";
}
@Override
protected String consumerName() {
return "test_consumer";
}
@Override
protected void runTest(String url) {
Map actualResponse;
try {
actualResponse = new ConsumerClient(url).getAsMap("/", "");
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
} |
// interfascia (at) thbbpt (dot) net
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
// USA
package interfascia;
import processing.core.*;
import java.awt.event.*;
/** The IFTextField class is used for a simple one-line text field */
public class IFTextField extends GUIComponent {
private int currentColor;
private String contents = "";
private int cursorPos = 0, visiblePortionStart = 0, visiblePortionEnd = 0;
private int startSelect = -1, endSelect = -1;
private float contentWidth = 0, visiblePortionWidth = 0;
private float cursorXPos = 0, startSelectXPos = 0, endSelectXPos = 0;
/**
* creates an empty IFTextField with the specified label, with specified position, and a default width of 100 pixels.
* @param argLabel the text field's label
* @param argX the text field's X location on the screen, relative to the PApplet.
* @param argY the text filed's Y location on the screen, relative
* to the PApplet.
*/
public IFTextField (String newLabel, int newX, int newY) {
this (newLabel, newX, newY, 100, "");
}
/**
* creates an empty IFTextField with the specified label and with specified position and width.
* @param argLabel the text field's label
* @param argX the text field's X location on the screen, relative to the PApplet.
* @param argY the text filed's Y location on the screen, relative to the PApplet.
* @param argWidth the text field's width
*/
public IFTextField (String argLabel, int argX, int argY, int argWidth) {
this (argLabel, argX, argY, argWidth, "");
}
/**
* creates an IFTextField with the specified label, with specified position and width, and with specified contents.
* @param argLabel the text field's label
* @param argX the text field's X location on the screen, relative to the PApplet.
* @param argY the text filed's Y location on the screen, relative to the PApplet.
* @param argWidth the text field's width
* @param argContents the default contents of the text field
*/
public IFTextField (String argLabel, int argX, int argY, int argWidth, String newValue) {
setLabel(argLabel);
setPosition(argX, argY);
setSize(argWidth, 21);
setValue(newValue);
}
public static boolean validUnicode(char b)
{
int c = (int)b;
return (
(c >= 0x0020 && c <= 0x007E) ||
(c >= 0x00A1 && c <= 0x017F) ||
(c == 0x018F) ||
(c == 0x0192) ||
(c >= 0x01A0 && c <= 0x01A1) ||
(c >= 0x01AF && c <= 0x01B0) ||
(c >= 0x01D0 && c <= 0x01DC) ||
(c >= 0x01FA && c <= 0x01FF) ||
(c >= 0x0218 && c <= 0x021B) ||
(c >= 0x0250 && c <= 0x02A8) ||
(c >= 0x02B0 && c <= 0x02E9) ||
(c >= 0x0300 && c <= 0x0345) ||
(c >= 0x0374 && c <= 0x0375) ||
(c == 0x037A) ||
(c == 0x037E) ||
(c >= 0x0384 && c <= 0x038A) ||
(c >= 0x038E && c <= 0x03A1) ||
(c >= 0x03A3 && c <= 0x03CE) ||
(c >= 0x03D0 && c <= 0x03D6) ||
(c >= 0x03DA) ||
(c >= 0x03DC) ||
(c >= 0x03DE) ||
(c >= 0x03E0) ||
(c >= 0x03E2 && c <= 0x03F3) ||
(c >= 0x0401 && c <= 0x044F) ||
(c >= 0x0451 && c <= 0x045C) ||
(c >= 0x045E && c <= 0x0486) ||
(c >= 0x0490 && c <= 0x04C4) ||
(c >= 0x04C7 && c <= 0x04C9) ||
(c >= 0x04CB && c <= 0x04CC) ||
(c >= 0x04D0 && c <= 0x04EB) ||
(c >= 0x04EE && c <= 0x04F5) ||
(c >= 0x04F8 && c <= 0x04F9) ||
(c >= 0x0591 && c <= 0x05A1) ||
(c >= 0x05A3 && c <= 0x05C4) ||
(c >= 0x05D0 && c <= 0x05EA) ||
(c >= 0x05F0 && c <= 0x05F4) ||
(c >= 0x060C) ||
(c >= 0x061B) ||
(c >= 0x061F) ||
(c >= 0x0621 && c <= 0x063A) ||
(c >= 0x0640 && c <= 0x0655) ||
(c >= 0x0660 && c <= 0x06EE) ||
(c >= 0x06F0 && c <= 0x06FE) ||
(c >= 0x0901 && c <= 0x0939) ||
(c >= 0x093C && c <= 0x094D) ||
(c >= 0x0950 && c <= 0x0954) ||
(c >= 0x0958 && c <= 0x0970) ||
(c >= 0x0E01 && c <= 0x0E3A) ||
(c >= 0x1E80 && c <= 0x1E85) ||
(c >= 0x1EA0 && c <= 0x1EF9) ||
(c >= 0x2000 && c <= 0x202E) ||
(c >= 0x2030 && c <= 0x2046) ||
(c == 0x2070) ||
(c >= 0x2074 && c <= 0x208E) ||
(c == 0x2091) ||
(c >= 0x20A0 && c <= 0x20AC) ||
(c >= 0x2100 && c <= 0x2138) ||
(c >= 0x2153 && c <= 0x2182) ||
(c >= 0x2190 && c <= 0x21EA) ||
(c >= 0x2190 && c <= 0x21EA) ||
(c >= 0x2000 && c <= 0x22F1) ||
(c == 0x2302) ||
(c >= 0x2320 && c <= 0x2321) ||
(c >= 0x2460 && c <= 0x2469) ||
(c == 0x2500) ||
(c == 0x2502) ||
(c == 0x250C) ||
(c == 0x2510) ||
(c == 0x2514) ||
(c == 0x2518) ||
(c == 0x251C) ||
(c == 0x2524) ||
(c == 0x252C) ||
(c == 0x2534) ||
(c == 0x253C) ||
(c >= 0x2550 && c <= 0x256C) ||
(c == 0x2580) ||
(c == 0x2584) ||
(c == 0x2588) ||
(c == 0x258C) ||
(c >= 0x2590 && c <= 0x2593) ||
(c == 0x25A0) ||
(c >= 0x25AA && c <= 0x25AC) ||
(c == 0x25B2) ||
(c == 0x25BA) ||
(c == 0x25BC) ||
(c == 0x25C4) ||
(c == 0x25C6) ||
(c >= 0x25CA && c <= 0x25CC) ||
(c == 0x25CF) ||
(c >= 0x25D7 && c <= 0x25D9) ||
(c == 0x25E6) ||
(c == 0x2605) ||
(c == 0x260E) ||
(c == 0x261B) ||
(c == 0x261E) ||
(c >= 0x263A && c <= 0x263C) ||
(c == 0x2640) ||
(c == 0x2642) ||
(c == 0x2660) ||
(c == 0x2663) ||
(c == 0x2665) ||
(c == 0x2666) ||
(c == 0x266A) ||
(c == 0x266B) ||
(c >= 0x2701 && c <= 0x2709) ||
(c >= 0x270C && c <= 0x2727) ||
(c >= 0x2729 && c <= 0x274B) ||
(c == 0x274D) ||
(c >= 0x274F && c <= 0x2752) ||
(c == 0x2756) ||
(c >= 0x2758 && c <= 0x275E) ||
(c >= 0x2761 && c <= 0x2767) ||
(c >= 0x2776 && c <= 0x2794) ||
(c >= 0x2798 && c <= 0x27BE) ||
(c >= 0xF001 && c <= 0xF002) ||
(c >= 0xF021 && c <= 0xF0FF) ||
(c >= 0xF601 && c <= 0xF605) ||
(c >= 0xF610 && c <= 0xF616) ||
(c >= 0xF800 && c <= 0xF807) ||
(c >= 0xF80A && c <= 0xF80B) ||
(c >= 0xF80E && c <= 0xF811) ||
(c >= 0xF814 && c <= 0xF815) ||
(c >= 0xF81F && c <= 0xF820) ||
(c >= 0xF81F && c <= 0xF820) ||
(c == 0xF833));
}
public void initWithParent () {
controller.parent.registerMouseEvent(this);
}
/**
* adds a character to the immediate right of the insertion point or replaces the selected group of characters. This method is called by <pre>public void MouseEvent</pre> if a unicode character is entered via the keyboard.
* @param c the character to be added
*/
private void addChar(char c) {
String t1, t2;
if (startSelect != -1 && endSelect != -1) {
if (startSelect > endSelect) {
int temp = startSelect;
startSelect = endSelect;
endSelect = temp;
}
if (endSelect > contents.length())
endSelect = contents.length();
t1 = contents.substring(0, startSelect);
t2 = contents.substring(endSelect);
cursorPos = startSelect;
startSelect = endSelect = -1;
} else {
t1 = contents.substring(0, cursorPos);
t2 = contents.substring(cursorPos);
}
contents = t1 + c + t2;
cursorPos++;
// Adjust the start and end positions of the visible portion of the string
if (controller.parent.textWidth(contents) < getWidth() - 12) {
visiblePortionStart = 0;
visiblePortionEnd = contents.length();
} else {
if (cursorPos == contents.length()) {
visiblePortionEnd = cursorPos;
shrinkLeft();
} else {
if (cursorPos >= visiblePortionEnd)
centerCursor();
else {
visiblePortionEnd = visiblePortionStart;
growRight();
}
//while (controller.parent.textWidth(contents.substring(visiblePortionStart, visiblePortionEnd)) < getWidth() - 12)
// visiblePortionEnd++;
}
}
fireEventNotification(this, "Modified");
}
/**
* deletes either the character directly to the left of the insertion point or the selected group of characters. It automatically handles cases where there is no character to the left of the insertion point (when the insertion point is at the beginning of the string). It is called by <pre>public void keyEvent</pre> when the delete key is pressed.
*/
private void backspaceChar() {
String t1 = "", t2 = "";
if (startSelect != -1 && endSelect != -1) {
if (startSelect > endSelect) {
int temp = startSelect;
startSelect = endSelect;
endSelect = temp;
}
if (endSelect > contents.length())
endSelect = contents.length();
t1 = contents.substring(0, startSelect);
t2 = contents.substring(endSelect);
cursorPos = startSelect;
startSelect = endSelect = -1;
contents = t1 + t2;
} else if (cursorPos > 0) {
if (cursorPos > contents.length())
cursorPos = contents.length();
t1 = contents.substring(0, cursorPos - 1);
t2 = contents.substring(cursorPos);
cursorPos
contents = t1 + t2;
}
// Adjust the start and end positions of the visible portion of the string
if (controller.parent.textWidth(contents) < getWidth() - 12) {
visiblePortionStart = 0;
visiblePortionEnd = contents.length();
} else {
if (cursorPos == contents.length()) {
visiblePortionEnd = cursorPos;
growLeft();
} else {
if (cursorPos <= visiblePortionStart) {
centerCursor();
} else {
visiblePortionEnd = visiblePortionStart;
growRight();
//while (controller.parent.textWidth(contents.substring(visiblePortionStart, visiblePortionEnd)) < getWidth() - 12) {
// if (visiblePortionEnd == contents.length())
// break;
// visiblePortionEnd++;
}
}
}
fireEventNotification(this, "Modified");
//controller.userState.restoreSettingsToApplet(controller.parent);
}
private void deleteChar() {
if(cursorPos >= contents.length()) return;
cursorPos++;
backspaceChar();
}
private void updateXPos() {
cursorXPos = controller.parent.textWidth(contents.substring(visiblePortionStart, cursorPos));
if (startSelect != -1 && endSelect != -1) {
int tempStart, tempEnd;
if (endSelect < startSelect) {
tempStart = endSelect;
tempEnd = startSelect;
} else {
tempStart = startSelect;
tempEnd = endSelect;
}
if (tempStart < visiblePortionStart)
startSelectXPos = 0;
else
startSelectXPos = controller.parent.textWidth(contents.substring(visiblePortionStart, tempStart));
if (tempEnd > visiblePortionEnd)
endSelectXPos = getWidth() - 4;
else
endSelectXPos = controller.parent.textWidth(contents.substring(visiblePortionStart, tempEnd));
}
}
private void growRight() {
while (controller.parent.textWidth(contents.substring(visiblePortionStart, visiblePortionEnd)) < getWidth() - 12) {
if (visiblePortionEnd == contents.length())
if (visiblePortionStart == 0)
break;
else
visiblePortionStart
visiblePortionEnd++;
}
}
private void growLeft() {
while (controller.parent.textWidth(contents.substring(visiblePortionStart, visiblePortionEnd)) < getWidth() - 12) {
if (visiblePortionStart == 0)
if (visiblePortionEnd == contents.length())
break;
else
visiblePortionEnd++;
visiblePortionStart
}
}
private void shrinkRight() {
while (controller.parent.textWidth(contents.substring(visiblePortionStart, visiblePortionEnd)) > getWidth() - 12) {
visiblePortionEnd
}
}
private void shrinkLeft() {
while (controller.parent.textWidth(contents.substring(visiblePortionStart, visiblePortionEnd)) > getWidth() - 12) {
visiblePortionStart++;
}
}
private void centerCursor() {
visiblePortionStart = visiblePortionEnd = cursorPos;
while (controller.parent.textWidth(contents.substring(visiblePortionStart, visiblePortionEnd)) < getWidth() - 12) {
if (visiblePortionStart != 0)
visiblePortionStart
if (visiblePortionEnd != contents.length())
visiblePortionEnd++;
if (visiblePortionEnd == contents.length() && visiblePortionStart == 0)
break;
}
}
/**
* given the X position of the mouse in relation to the X
* position of the text field, findClosestGap(int x) will
* return the index of the closest letter boundary in the
* letterWidths array.
*/
private int findClosestGap(int x) {
float prev = 0, cur;
if (x < 0) {
return visiblePortionStart;
} else if (x > getWidth()) {
return visiblePortionEnd;
}
for (int i = visiblePortionStart; i < visiblePortionEnd; i++) {
cur = controller.parent.textWidth(contents.substring(visiblePortionStart, i));
if (cur > x) {
if (cur - x < x - prev)
return i;
else
return i - 1;
}
prev = cur;
}
// Don't know what else to return
return contents.length();
}
/**
* sets the contents of the text box and displays the
* specified string in the text box widget.
* @param val the string to become the text field's contents
*/
public void setValue(String newValue) {
contents = newValue;
cursorPos = contents.length();
startSelect = endSelect = -1;
visiblePortionStart = 0;
visiblePortionEnd = contents.length();
// Adjust the start and end positions of the visible portion of the string
if (controller != null) {
if (controller.parent.textWidth(contents) > getWidth() - 12) {
shrinkRight();
}
}
fireEventNotification(this, "Set");
}
/**
* returns the string that is displayed in the text area.
* If the contents have not been initialized, getValue()
* returns NULL, if the contents have been initialized but
* not set, it returns an empty string.
* @return contents the contents of the text field
*/
public String getValue() {
return contents;
}
/**
* implemented to conform to Processing's mouse event handler
* requirements. You shouldn't call this method directly, as
* Processing will forward mouse events to this object directly.
* mouseEvent() handles mouse clicks, drags, and releases sent
* from the parent PApplet.
* @param e the MouseEvent to handle
*/
public void mouseEvent(MouseEvent e) {
controller.userState.saveSettingsForApplet(controller.parent);
lookAndFeel.defaultGraphicsState.restoreSettingsToApplet(controller.parent);
if (e.getID() == MouseEvent.MOUSE_PRESSED) {
if (isMouseOver(e.getX(), e.getY())) {
controller.requestFocus(this);
wasClicked = true;
endSelect = -1;
startSelect = cursorPos = findClosestGap(e.getX() - getX());
} else {
if (controller.getFocusStatusForComponent(this)) {
wasClicked = false;
controller.yieldFocus(this);
startSelect = endSelect = -1;
}
}
} else if (e.getID() == MouseEvent.MOUSE_DRAGGED) {
/*if (controller.parent.millis() % 500 == 0) {
System.out.println("MOVE");
if (e.getX() < getX() && endSelect > 0) {
// move left
endSelect = visiblePortionStart = endSelect - 1;
shrinkRight();
} else if (e.getX() > getX() + getWidth() && endSelect < contents.length() - 1) {
// move right
endSelect = visiblePortionEnd = endSelect + 1;
shrinkLeft();
}
}*/
endSelect = cursorPos = findClosestGap(e.getX() - getX());
} else if (e.getID() == MouseEvent.MOUSE_RELEASED) {
if (endSelect == startSelect) {
startSelect = -1;
endSelect = -1;
}
}
updateXPos();
controller.userState.restoreSettingsToApplet(controller.parent);
}
/**
* receives KeyEvents forwarded to it by the GUIController
* if the current instance is currently in focus.
* @param e the KeyEvent to be handled
*/
public void keyEvent(KeyEvent e) {
controller.userState.saveSettingsForApplet(controller.parent);
lookAndFeel.defaultGraphicsState.restoreSettingsToApplet(controller.parent);
int shortcutMask = java.awt.Toolkit.getDefaultToolkit().getMenuShortcutKeyMask();
boolean shiftDown = ((e.getModifiersEx() & KeyEvent.SHIFT_DOWN_MASK) == KeyEvent.SHIFT_DOWN_MASK);
if (e.getID() == KeyEvent.KEY_PRESSED) {
if (e.getKeyCode() == KeyEvent.VK_DOWN) {
if (shiftDown) {
if (startSelect == -1)
startSelect = cursorPos;
endSelect = cursorPos = visiblePortionEnd = contents.length();
} else {
// Shift isn't down
startSelect = endSelect = -1;
cursorPos = visiblePortionEnd = contents.length();
}
visiblePortionStart = visiblePortionEnd;
growLeft();
}
else if (e.getKeyCode() == KeyEvent.VK_UP) {
if (shiftDown) {
if (endSelect == -1)
endSelect = cursorPos;
startSelect = cursorPos = visiblePortionStart = 0;
} else {
// Shift isn't down
startSelect = endSelect = -1;
cursorPos = visiblePortionStart = 0;
}
visiblePortionEnd = visiblePortionStart;
growRight();
}
else if (e.getKeyCode() == KeyEvent.VK_LEFT) {
if (shiftDown) {
if (cursorPos > 0) {
if (startSelect != -1 && endSelect != -1) {
startSelect
cursorPos
} else {
endSelect = cursorPos;
cursorPos
startSelect = cursorPos;
}
}
} else {
if (startSelect != -1 && endSelect != -1) {
cursorPos = Math.min(startSelect, endSelect);
startSelect = endSelect = -1;
} else if (cursorPos > 0) {
cursorPos
}
}
centerCursor();
}
else if (e.getKeyCode() == KeyEvent.VK_RIGHT) {
if (shiftDown) {
if (cursorPos < contents.length()) {
if (startSelect != -1 && endSelect != -1) {
endSelect++;
cursorPos++;
} else {
startSelect = cursorPos;
cursorPos++;
endSelect = cursorPos;
}
}
} else {
if (startSelect != -1 && endSelect != -1) {
cursorPos = Math.max(startSelect, endSelect);
startSelect = endSelect = -1;
} else if (cursorPos < contents.length()) {
cursorPos++;
}
}
centerCursor();
}
else if (e.getKeyCode() == KeyEvent.VK_DELETE) {
deleteChar();
}
else if (e.getKeyCode() == KeyEvent.VK_ENTER) {
fireEventNotification(this, "Completed");
}
else{
if ((e.getModifiers() & shortcutMask) == shortcutMask) {
switch (e.getKeyCode()) {
case KeyEvent.VK_C:
System.out.println("Copy");
break;
case KeyEvent.VK_V:
System.out.println("Paste");
break;
case KeyEvent.VK_X:
System.out.println("Cut");
break;
case KeyEvent.VK_A:
startSelect = 0;
endSelect = contents.length();
break;
}
}
}
}
else if (e.getID() == KeyEvent.KEY_TYPED) {
if ((e.getModifiers() & shortcutMask) == shortcutMask) {
}
else if (e.getKeyChar() == '\b') {
backspaceChar();
}
else if (e.getKeyChar() != KeyEvent.CHAR_UNDEFINED) {
if(validUnicode(e.getKeyChar()))
addChar(e.getKeyChar());
}
}
updateXPos();
controller.userState.restoreSettingsToApplet(controller.parent);
}
/**
* draws the text field, contents, selection, and cursor
* to the screen.
*/
public void draw () {
boolean hasFocus = controller.getFocusStatusForComponent(this);
if (wasClicked) {
currentColor = lookAndFeel.activeColor;
} else if (isMouseOver (controller.parent.mouseX, controller.parent.mouseY) || hasFocus) {
currentColor = lookAndFeel.highlightColor;
} else {
currentColor = lookAndFeel.baseColor;
}
// Draw the surrounding box
controller.parent.stroke(lookAndFeel.highlightColor);
controller.parent.fill(lookAndFeel.borderColor);
controller.parent.rect(getX(), getY(), getWidth(), getHeight());
controller.parent.noStroke();
// Compute the left offset for the start of text
float offset;
if (cursorPos == contents.length() && controller.parent.textWidth(contents) > getWidth() - 8)
offset = (getWidth() - 4) - controller.parent.textWidth(contents.substring(visiblePortionStart, visiblePortionEnd));
else
offset = 4;
// Draw the selection rectangle
if (hasFocus && startSelect != -1 && endSelect != -1) {
controller.parent.fill(lookAndFeel.selectionColor);
controller.parent.rect(getX() + startSelectXPos + offset, getY() + 3, endSelectXPos - startSelectXPos + 1, 15);
}
// Draw the string
controller.parent.fill (lookAndFeel.textColor);
controller.parent.text (contents.substring(visiblePortionStart, visiblePortionEnd), getX() + offset, getY() + 5, getWidth() - 8, getHeight() - 6);
// Draw the insertion point (it blinks!)
if (hasFocus && (startSelect == -1 || endSelect == -1) && ((controller.parent.millis() % 1000) > 500)) {
controller.parent.stroke(lookAndFeel.darkGrayColor);
controller.parent.line(getX() + (int) cursorXPos + offset, getY() + 3, getX() + (int) cursorXPos + offset, getY() + 18);
}
}
public void actionPerformed(GUIEvent e) {
super.actionPerformed(e);
if (e.getSource() == this) {
if (e.getMessage().equals("Received Focus")) {
if (contents != "") {
startSelect = 0;
endSelect = contents.length();
}
} else if (e.getMessage().equals("Lost Focus")) {
if (contents != "") {
startSelect = endSelect = -1;
}
}
}
}
} |
package com.haulmont.cuba.web;
import com.haulmont.cuba.core.global.ClientType;
import com.haulmont.cuba.core.global.ConfigProvider;
import com.haulmont.cuba.core.global.GlobalConfig;
import com.haulmont.cuba.core.global.MessageProvider;
import com.haulmont.cuba.core.sys.AppContext;
import com.haulmont.cuba.core.sys.SecurityContext;
import com.haulmont.cuba.gui.AppConfig;
import com.haulmont.cuba.gui.ServiceLocator;
import com.haulmont.cuba.security.app.UserSessionService;
import com.haulmont.cuba.security.global.UserSession;
import com.haulmont.cuba.web.exception.ExceptionHandlers;
import com.haulmont.cuba.web.gui.WebTimer;
import com.haulmont.cuba.web.log.AppLog;
import com.haulmont.cuba.web.sys.ActiveDirectoryHelper;
import com.haulmont.cuba.web.sys.LinkHandler;
import com.haulmont.cuba.web.toolkit.Timer;
import com.vaadin.Application;
import com.vaadin.service.ApplicationContext;
import com.vaadin.terminal.Terminal;
import com.vaadin.terminal.gwt.server.AbstractApplicationServlet;
import com.vaadin.terminal.gwt.server.HttpServletRequestListener;
import com.vaadin.ui.Window;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Main class of the web application. Each client connection has its own App.
* Use {@link #getInstance()} static method to obtain the reference to the current App instance
* throughout the application code.
* <p/>
* Specific application should inherit from this class and set derived class name
* in <code>application</code> servlet parameter of <code>web.xml</code>
*/
public abstract class App extends Application
implements ApplicationContext.TransactionListener, HttpServletRequestListener {
private static final long serialVersionUID = -3435976475534930050L;
public static final Pattern WIN_PATTERN = Pattern.compile("win([0-9]{1,4})");
private static Log log = LogFactory.getLog(App.class);
public static final String THEME_NAME = "havana";
public static final String LAST_REQUEST_PARAMS_ATTR = "lastRequestParams";
public static final String LAST_REQUEST_ACTION_ATTR = "lastRequestAction";
public static final List<String> ACTION_NAMES = Arrays.asList("open", "login");
public static final String USER_SESSION_ATTR = "userSessionId";
public static final String APP_THEME_COOKIE_PREFIX = "APP_THEME_NAME_";
protected Connection connection;
private WebWindowManager windowManager;
private AppLog appLog;
protected ExceptionHandlers exceptionHandlers;
private static ThreadLocal<App> currentApp = new ThreadLocal<App>();
protected transient ThreadLocal<String> currentWindowName = new ThreadLocal<String>();
protected LinkHandler linkHandler;
protected AppTimers timers;
protected transient Map<Object, Long> requestStartTimes = new WeakHashMap<Object, Long>();
private static volatile boolean viewsDeployed;
private volatile String contextName;
private transient HttpServletResponse response;
private transient HttpSession httpSession;
private AppCookies cookies;
private BackgroundTaskManager backgroundTaskManager;
protected boolean testModeRequest = false;
protected boolean themeInitialized = false;
protected String clientAddress;
protected WebConfig webConfig;
protected WebTimer workerTimer;
static {
AppContext.setProperty(AppConfig.CLIENT_TYPE_PROP, ClientType.WEB.toString());
}
protected App() {
webConfig = ConfigProvider.getConfig(WebConfig.class);
appLog = new AppLog();
connection = createConnection();
windowManager = createWindowManager();
exceptionHandlers = new ExceptionHandlers(this);
cookies = new AppCookies() {
@Override
protected void addCookie(Cookie cookie) {
response.addCookie(cookie);
}
};
cookies.setCookiesEnabled(true);
timers = new AppTimers(this);
backgroundTaskManager = new BackgroundTaskManager();
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
currentWindowName = new ThreadLocal<String>();
requestStartTimes = new WeakHashMap<Object, Long>();
}
@Override
public void onRequestStart(HttpServletRequest request, HttpServletResponse response) {
this.response = response;
cookies.updateCookies(request);
if (!themeInitialized) {
GlobalConfig globalConfig = ConfigProvider.getConfig(GlobalConfig.class);
String userAppTheme = cookies.getCookieValue(APP_THEME_COOKIE_PREFIX + globalConfig.getWebContextName());
if (userAppTheme != null) {
if (!StringUtils.equals(userAppTheme, getTheme())) {
// check theme support
WebConfig webConfig = ConfigProvider.getConfig(WebConfig.class);
List<String> supportedThemes = webConfig.getAvailableAppThemes();
if (supportedThemes.contains(userAppTheme)) {
setTheme(userAppTheme);
}
}
}
themeInitialized = true;
}
if (ConfigProvider.getConfig(GlobalConfig.class).getTestMode()) {
String paramName = webConfig.getTestModeParamName();
testModeRequest = (paramName == null || request.getParameter(paramName) != null);
}
}
@Override
public void onRequestEnd(HttpServletRequest request, HttpServletResponse response) {
testModeRequest = false;
}
public static Application.SystemMessages getSystemMessages() {
Locale defaultLocale;
if (!AppContext.isStarted())
defaultLocale = Locale.getDefault();
else {
GlobalConfig globalConfig = ConfigProvider.getConfig(GlobalConfig.class);
Set<Map.Entry<String, Locale>> localeSet = globalConfig.getAvailableLocales().entrySet();
Map.Entry<String, Locale> localeEntry = localeSet.iterator().next();
defaultLocale = localeEntry.getValue();
}
return compileSystemMessages(defaultLocale);
}
public static CubaSystemMessages compileSystemMessages(Locale locale) {
CubaSystemMessages msgs = new CubaSystemMessages();
String webContext = AppContext.getProperty("cuba.webContextName");
if (AppContext.isStarted()) {
String messagePack = AppConfig.getMessagesPack();
msgs.setSessionExpiredCaption(MessageProvider.getMessage(
messagePack, "sessionExpiredCaption", locale));
msgs.setSessionExpiredMessage(MessageProvider.getMessage(
messagePack, "sessionExpiredMessage", locale));
msgs.setCommunicationErrorCaption(MessageProvider.getMessage(
messagePack, "communicationErrorCaption", locale));
msgs.setCommunicationErrorMessage(MessageProvider.getMessage(
messagePack, "communicationErrorMessage", locale));
msgs.setInternalErrorCaption(MessageProvider.getMessage(
messagePack, "internalErrorCaption", locale));
msgs.setInternalErrorMessage(MessageProvider.getMessage(
messagePack, "internalErrorMessage", locale));
msgs.setUiBlockingMessage(MessageProvider.getMessage(
messagePack, "uiBlockingMessage", locale));
}
msgs.setInternalErrorURL("/" + webContext + "?restartApp");
msgs.setOutOfSyncNotificationEnabled(false);
return msgs;
}
public static class CubaSystemMessages extends Application.CustomizedSystemMessages {
private String uiBlockingMessage = "";
public String getUiBlockingMessage() {
return uiBlockingMessage;
}
public void setUiBlockingMessage(String uiBlockingMessage) {
this.uiBlockingMessage = uiBlockingMessage;
}
}
protected abstract boolean loginOnStart(HttpServletRequest request);
protected abstract Connection createConnection();
/**
* Can be overridden in descendant to create an application-specific {@link WebWindowManager}
*/
protected WebWindowManager createWindowManager() {
return new WebWindowManager(this);
}
/**
* @return Current App instance. Can be invoked anywhere in application code.
*/
public static App getInstance() {
App app = currentApp.get();
if (app == null)
throw new IllegalStateException("No App bound to the current thread. This may be the result of hot-deployment.");
return app;
}
public static boolean isBound() {
return currentApp.get() != null;
}
public static String generateWebWindowName() {
Double d = Math.random() * 10000;
return "win" + d.intValue();
}
/**
* Initializes exception handlers immediately after login and logout.
* Can be overridden in descendants to manipulate exception handlers programmatically.
*
* @param isConnected true after login, false after logout
*/
protected void initExceptionHandlers(boolean isConnected) {
if (isConnected) {
exceptionHandlers.createByConfiguration();
} else {
exceptionHandlers.removeAll();
}
}
protected void checkDeployedViews() {
if (!viewsDeployed) {
deployViews();
viewsDeployed = true;
}
}
/**
* DEPRECATED: use cuba.viewsConfig application property
*/
@Deprecated
protected void deployViews() {
}
/**
* Should be overridden in descendant to create an application-specific main window
*/
protected AppWindow createAppWindow() {
AppWindow appWindow = new AppWindow(connection);
Timer timer = createSessionPingTimer(true);
if (timer != null)
timers.add(timer, appWindow);
return appWindow;
}
public AppWindow getAppWindow() {
String name = currentWindowName.get();
//noinspection deprecation
Window window = name == null ? getMainWindow() : getWindow(name);
if (window instanceof AppWindow)
return (AppWindow) window;
else
return null;
}
/**
* Don't use this method in application code.<br>
* Use {@link #getAppWindow} instead
*/
@Deprecated
@Override
public Window getMainWindow() {
return super.getMainWindow();
}
@Override
public void removeWindow(Window window) {
super.removeWindow(window);
if (window instanceof AppWindow) {
connection.removeListener((AppWindow) window);
}
}
/**
* @return Current connection object
*/
public Connection getConnection() {
return connection;
}
public WebWindowManager getWindowManager() {
return windowManager;
}
public AppLog getAppLog() {
return appLog;
}
protected String createWindowName(boolean main) {
String name = main ? AppContext.getProperty("cuba.web.mainWindowName") : AppContext.getProperty("cuba.web.loginWindowName");
if (StringUtils.isBlank(name))
name = generateWebWindowName();
return name;
}
public void userSubstituted(Connection connection) {
}
@Override
public void terminalError(Terminal.ErrorEvent event) {
GlobalConfig config = ConfigProvider.getConfig(GlobalConfig.class);
if (config.getTestMode()) {
String fileName = AppContext.getProperty("cuba.testModeExceptionLog");
if (!StringUtils.isBlank(fileName)) {
try {
FileOutputStream stream = new FileOutputStream(fileName);
try {
stream.write(ExceptionUtils.getStackTrace(event.getThrowable()).getBytes());
} finally {
stream.close();
}
} catch (Exception e) {
log.debug(e);
}
}
}
if (event instanceof AbstractApplicationServlet.RequestError) {
log.error("RequestError:", event.getThrowable());
} else {
exceptionHandlers.handle(event);
getAppLog().log(event);
}
}
@Override
public void transactionStart(Application application, Object transactionData) {
HttpServletRequest request = (HttpServletRequest) transactionData;
this.httpSession = request.getSession();
httpSession.setMaxInactiveInterval(webConfig.getHttpSessionExpirationTimeoutSec());
setClientAddress(request);
if (log.isTraceEnabled()) {
log.trace("requestStart: [@" + Integer.toHexString(System.identityHashCode(request)) + "] " +
request.getRequestURI() +
(request.getUserPrincipal() != null ? " [" + request.getUserPrincipal() + "]" : "") +
" from " + clientAddress);
}
if (application == App.this) {
currentApp.set((App) application);
}
application.setLocale(request.getLocale());
if (ActiveDirectoryHelper.useActiveDirectory())
setUser(request.getUserPrincipal());
if (contextName == null) {
contextName = request.getContextPath().substring(1);
}
String requestURI = request.getRequestURI();
String windowName = request.getParameter("windowName");
setupCurrentWindowName(requestURI, windowName);
String action = (String) httpSession.getAttribute(LAST_REQUEST_ACTION_ATTR);
if (!connection.isConnected() &&
!(("login".equals(action)) || auxillaryUrl(requestURI))) {
if (loginOnStart(request))
setupCurrentWindowName(requestURI, windowName);
}
if (connection.isConnected()) {
UserSession userSession = connection.getSession();
if (userSession != null) {
AppContext.setSecurityContext(new SecurityContext(userSession));
application.setLocale(userSession.getLocale());
}
requestStartTimes.put(transactionData, System.currentTimeMillis());
}
processExternalLink(request, requestURI);
}
protected void setClientAddress(HttpServletRequest request) {
String xForwardedFor = request.getHeader("X_FORWARDED_FOR");
if (!StringUtils.isBlank(xForwardedFor)) {
String[] strings = xForwardedFor.split(",");
clientAddress = strings[strings.length - 1].trim();
} else {
clientAddress = request.getRemoteAddr();
}
}
public static boolean auxillaryUrl(String uri) {
return uri.contains("/UIDL/") || uri.contains("/APP/") || uri.contains("/VAADIN/");
}
private void setupCurrentWindowName(String requestURI, String windowName) {
//noinspection deprecation
if (StringUtils.isEmpty(windowName))
currentWindowName.set(getMainWindow() == null ? null : getMainWindow().getName());
else
currentWindowName.set(windowName);
String[] parts = requestURI.split("/");
boolean contextFound = false;
for (String part : parts) {
if (StringUtils.isEmpty(part)) {
continue;
}
if (part.equals(contextName) && !contextFound) {
contextFound = true;
continue;
}
if (contextFound && part.equals("UIDL")) {
continue;
}
Matcher m = WIN_PATTERN.matcher(part);
if (m.matches()) {
currentWindowName.set(part);
break;
}
}
}
private void processExternalLink(HttpServletRequest request, String requestURI) {
String action = (String) request.getSession().getAttribute(LAST_REQUEST_ACTION_ATTR);
if ("open".equals(action) && !auxillaryUrl(requestURI)) {
Map<String, String> params = (Map<String, String>) request.getSession().getAttribute(LAST_REQUEST_PARAMS_ATTR);
if (params == null) {
log.warn("Unable to process the external link: lastRequestParams not found in session");
return;
}
LinkHandler linkHandler = new LinkHandler(this, params);
if (connection.isConnected())
linkHandler.handle();
else
this.linkHandler = linkHandler;
}
}
@Override
public void transactionEnd(Application application, Object transactionData) {
HttpServletRequest request = (HttpServletRequest) transactionData;
if (connection.isConnected()) {
UserSession userSession = connection.getSession();
if (userSession != null) {
request.getSession().setAttribute(USER_SESSION_ATTR, userSession);
} else {
request.getSession().setAttribute(USER_SESSION_ATTR, null);
}
} else {
request.getSession().setAttribute(USER_SESSION_ATTR, null);
}
Long start = requestStartTimes.remove(transactionData);
if (start != null) {
long t = System.currentTimeMillis() - start;
if (t > (webConfig.getLogLongRequestsThresholdSec() * 1000)) {
log.warn(String.format("Too long request processing [%d ms]: ip=%s, url=%s",
t, ((HttpServletRequest) transactionData).getRemoteAddr(), ((HttpServletRequest) transactionData).getRequestURI()));
}
}
if (application == App.this) {
currentApp.set(null);
currentApp.remove();
}
AppContext.setSecurityContext(null);
HttpSession httpSession = ((HttpServletRequest) transactionData).getSession();
httpSession.setAttribute(LAST_REQUEST_ACTION_ATTR, null);
httpSession.setAttribute(LAST_REQUEST_PARAMS_ATTR, null);
if (log.isTraceEnabled()) {
log.trace("requestEnd: [@" + Integer.toHexString(System.identityHashCode(transactionData)) + "]");
}
}
public BackgroundTaskManager getTaskManager() {
return backgroundTaskManager;
}
public void addBackgroundTask(Thread task) {
backgroundTaskManager.addTask(task);
}
public void removeBackgroundTask(Thread task) {
backgroundTaskManager.removeTask(task);
}
public void cleanupBackgroundTasks() {
backgroundTaskManager.cleanupTasks();
}
Window getCurrentWindow() {
String name = currentWindowName.get();
return (name == null ? getMainWindow() : getWindow(name));
}
public AppTimers getTimers() {
return timers;
}
/**
* Adds a timer on the application level
*
* @param timer new timer
*/
public void addTimer(Timer timer) {
timers.add(timer);
}
/**
* Adds a timer for the defined window
*
* @param timer new timer
* @param owner component that owns a timer
*/
public void addTimer(final Timer timer, com.haulmont.cuba.gui.components.Window owner) {
timers.add(timer, owner);
}
public WebTimer getWorkerTimer() {
if (workerTimer != null)
return workerTimer;
int uiCheckInterval = ConfigProvider.getConfig(WebConfig.class).getUiCheckInterval();
workerTimer = new WebTimer(uiCheckInterval, true);
workerTimer.stopTimer();
return workerTimer;
}
public void reinitializeAppearanceProperties() {
themeInitialized = false;
}
public void setUserAppTheme(String themeName) {
GlobalConfig globalConfig = ConfigProvider.getConfig(GlobalConfig.class);
addCookie(APP_THEME_COOKIE_PREFIX + globalConfig.getWebContextName(), themeName);
super.setTheme(themeName);
}
protected Timer createSessionPingTimer(final boolean connected) {
int sessionExpirationTimeout = webConfig.getHttpSessionExpirationTimeoutSec();
int sessionPingPeriod = sessionExpirationTimeout / 3;
if (sessionPingPeriod > 0) {
Timer timer = new Timer(sessionPingPeriod * 1000, true);
timer.addListener(new Timer.Listener() {
public void onTimer(Timer timer) {
if (connected) {
UserSessionService service = ServiceLocator.lookup(UserSessionService.NAME);
service.pingSession();
}
log.debug("Ping session");
}
public void onStopTimer(Timer timer) {
}
});
return timer;
}
return null;
}
public AppCookies getCookies() {
return cookies;
}
public HttpSession getHttpSession() {
return httpSession;
}
public String getCookieValue(String name) {
return cookies.getCookieValue(name);
}
public int getCookieMaxAge(String name) {
return cookies.getCookieMaxAge(name);
}
public void addCookie(String name, String value, int maxAge) {
cookies.addCookie(name, value, maxAge);
}
public void addCookie(String name, String value) {
cookies.addCookie(name, value);
}
public void removeCookie(String name) {
cookies.removeCookie(name);
}
public boolean isCookiesEnabled() {
return cookies.isCookiesEnabled();
}
public void setCookiesEnabled(boolean cookiesEnabled) {
cookies.setCookiesEnabled(cookiesEnabled);
}
public boolean isTestModeRequest() {
return testModeRequest;
}
public String getClientAddress() {
return clientAddress;
}
} |
package imagej.script;
import imagej.command.Command;
import imagej.module.AbstractModuleInfo;
import imagej.module.DefaultMutableModuleItem;
import imagej.module.Module;
import imagej.module.ModuleException;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import javax.script.ScriptException;
import org.scijava.Context;
import org.scijava.Contextual;
import org.scijava.log.LogService;
import org.scijava.plugin.Parameter;
import org.scijava.service.Service;
/**
* Metadata about a script.
*
* @author Curtis Rueden
* @author Johannes Schindelin
*/
public class ScriptInfo extends AbstractModuleInfo implements Contextual {
private final String path;
@Parameter
private Context context;
@Parameter
private LogService log;
private Map<String, Class<?>> typeMap;
public ScriptInfo(final String path, final Context context) {
this.path = path;
setContext(context);
try {
parseInputs();
}
catch (final ScriptException exc) {
log.error(exc);
}
catch (final IOException exc) {
log.error(exc);
}
}
// -- ScriptInfo methods --
public String getPath() {
return path;
}
// -- ModuleInfo methods --
@Override
public String getDelegateClassName() {
return ScriptModule.class.getName();
}
@Override
public Module createModule() throws ModuleException {
return new ScriptModule(this);
}
// -- Contextual methods --
@Override
public Context getContext() {
return context;
}
@Override
public void setContext(final Context context) {
context.inject(this);
}
// -- Helper methods --
/**
* Parses the script's input parameters.
* <p>
* ImageJ's scripting framework supports specifying @{@link Parameter}-style
* parameters in a preamble. The idea is to specify the input parameters in
* this way:
*
* <pre>
* // @UIService ui
* // @double degrees
* </pre>
*
* i.e. in the form <code>@<type> <name></code>. These input
* parameters will be parsed and filled just like @{@link Parameter}
* -annotated fields in {@link Command}s.
* </p>
*
* @throws ScriptException If a parameter annotation is malformed.
* @throws IOException If there is a problem reading the script file.
*/
private void parseInputs() throws ScriptException, IOException {
final FileReader fileReader = new FileReader(getPath());
final BufferedReader in = new BufferedReader(fileReader, 16384);
while (true) {
final String line = in.readLine();
if (line == null) break;
// scan for lines containing an '@' stopping at the first line
// containing at least one alpha-numerical character but no '@'.
final int at = line.indexOf('@');
if (at < 0) {
if (line.matches(".*[A-Za-z0-9].*")) break;
continue;
}
parseInput(line.substring(at + 1));
}
in.close();
}
private <T> void parseInput(final String line) throws ScriptException {
final String[] parts = line.trim().split("[ \t\n]+");
if (parts.length != 2) {
throw new ScriptException("Expected 'type name': " + line);
}
addInput(parts[1], parseType(parts[0]));
}
private <T> void addInput(final String name, final Class<T> type) {
final DefaultMutableModuleItem<T> item =
new DefaultMutableModuleItem<T>(this, name, type);
inputMap.put(name, item);
inputList.add(item);
}
private synchronized Class<?> parseType(final String string)
throws ScriptException
{
if (typeMap == null) {
typeMap = new HashMap<String, Class<?>>();
typeMap.put("byte", Byte.TYPE);
typeMap.put("short", Short.TYPE);
typeMap.put("int", Integer.TYPE);
typeMap.put("long", Long.TYPE);
typeMap.put("float", Float.TYPE);
typeMap.put("double", Double.TYPE);
typeMap.put("String", String.class);
typeMap.put("File", File.class);
for (final Service service : context.getServiceIndex()) {
final Class<?> clazz = service.getClass();
final String className = clazz.getName();
typeMap.put(className, clazz);
final int dot = className.lastIndexOf('.');
if (dot > 0) typeMap.put(className.substring(dot + 1), clazz);
}
}
final Class<?> type = typeMap.get(string);
if (type == null) {
try {
final Class<?> clazz =
Thread.currentThread().getContextClassLoader().loadClass(string);
typeMap.put(string, clazz);
return clazz;
}
catch (final ClassNotFoundException e) {
throw new ScriptException("Unknown type: " + string);
}
}
return type;
}
} |
package fi.kask.flyingstickman.States;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.utils.Array;
import fi.kask.flyingstickman.FlyingStickman;
import fi.kask.flyingstickman.sprites.SpikePole;
import fi.kask.flyingstickman.sprites.Stickman;
public class PlayState extends State {
private static final int SPIKE_SPACING = 125;
private static final int SPIKE_COUNT = 4;
private static final int STICKMAN_OFFSET_X = 100;
private Stickman _stickman;
private Texture _backgroundTexture;
private Array<SpikePole> _spikePoles;
public PlayState(GameStateManager gsm) {
super(gsm);
_stickman = new Stickman(-STICKMAN_OFFSET_X, 100);
_backgroundTexture = new Texture("background.png");
_camera.setToOrtho(false, FlyingStickman.WIDTH, FlyingStickman.HEIGHT);
_spikePoles = new Array<SpikePole>();
for(int i = 2; i <= SPIKE_COUNT + 1; i++) {
_spikePoles.add(new SpikePole(i * (SPIKE_SPACING + SpikePole.SPIKE_WIDTH)));
}
}
@Override
protected void handleInput() {
if(Gdx.input.justTouched()) {
_stickman.hopUp();
}
}
@Override
public void update(float dt) {
handleInput();
_stickman.update(dt);
_camera.position.x = _stickman.getPosition().x + STICKMAN_OFFSET_X;
// Repos spike poles which are left side of the screen
for(int i = 0; i < _spikePoles.size; i++) {
SpikePole spikePole = _spikePoles.get(i);
if(_camera.position.x - (_camera.viewportWidth / 2) > spikePole.getTopSpikePolePosition().x + spikePole.getTopSpikePoleTexture().getWidth()) {
spikePole.reposition(spikePole.getTopSpikePolePosition().x + ((SpikePole.SPIKE_WIDTH + SPIKE_SPACING) * SPIKE_COUNT));
}
// Check for collision
if(spikePole.collides(_stickman.getBounds())) {
_gameStateManager.set(new MenuState(_gameStateManager));
}
}
_camera.update();
}
@Override
public void render(SpriteBatch sb) {
sb.setProjectionMatrix(_camera.combined);
sb.begin();
sb.draw(_backgroundTexture, _camera.position.x - _camera.viewportWidth / 2, 0);
sb.draw(_stickman.getStickman(), _stickman.getPosition().x, _stickman.getPosition().y);
for(SpikePole spikePole : _spikePoles) {
sb.draw(spikePole.getTopSpikePoleTexture(), spikePole.getTopSpikePolePosition().x, spikePole.getTopSpikePolePosition().y);
sb.draw(spikePole.getBottomSpikePoleTexture(), spikePole.getBottomSpikePolePosition().x, spikePole.getBottomSpikePolePosition().y);
}
sb.end();
}
@Override
public void dispose() {
_stickman.dispose();
_backgroundTexture.dispose();
for(SpikePole spikePole : _spikePoles) {
spikePole.dispose();
}
}
} |
package co.aikar.commands;
import co.aikar.commands.apachecommonslang.ApacheCommonsLangUtil;
import com.google.common.collect.SetMultimap;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static co.aikar.commands.BaseCommand.CATCHUNKNOWN;
import static co.aikar.commands.BaseCommand.DEFAULT;
public interface RootCommand {
void addChild(BaseCommand command);
CommandManager getManager();
SetMultimap<String, RegisteredCommand> getSubCommands();
List<BaseCommand> getChildren();
String getCommandName();
default void addChildShared(List<BaseCommand> children, SetMultimap<String, RegisteredCommand> subCommands, BaseCommand command) {
command.subCommands.entries().forEach(e -> {
String key = e.getKey();
RegisteredCommand registeredCommand = e.getValue();
if (key.equals(DEFAULT) || key.equals(BaseCommand.CATCHUNKNOWN)) {
return;
}
Set<RegisteredCommand> registered = subCommands.get(key);
if (!registered.isEmpty()) {
BaseCommand prevBase = registered.iterator().next().scope;
if (prevBase != registeredCommand.scope) {
this.getManager().log(LogLevel.ERROR, "ACF Error: " + command.getName() + " registered subcommand " + key + " for root command " + getCommandName() + " - but it is already defined in " + prevBase.getName());
this.getManager().log(LogLevel.ERROR, "2 subcommands of the same prefix may not be spread over 2 different classes. Ignoring this.");
return;
}
}
subCommands.put(key, registeredCommand);
});
children.add(command);
}
default String getUniquePermission() {
Set<String> permissions = new HashSet<>();
for (BaseCommand child : getChildren()) {
for (RegisteredCommand<?> value : child.subCommands.values()) {
Set<String> requiredPermissions = value.getRequiredPermissions();
if (requiredPermissions.isEmpty()) {
return null;
} else {
permissions.addAll(requiredPermissions);
}
}
}
return permissions.size() == 1 ? permissions.iterator().next() : null;
}
default boolean hasAnyPermission(CommandIssuer issuer) {
List<BaseCommand> children = getChildren();
if (children.isEmpty()) {
return true;
}
for (BaseCommand child : children) {
if (!child.hasPermission(issuer)) {
continue;
}
for (RegisteredCommand value : child.getRegisteredCommands()) {
if (value.hasPermission(issuer)) {
return true;
}
}
}
return false;
}
default BaseCommand execute(CommandIssuer sender, String commandLabel, String[] args) {
BaseCommand command = getBaseCommand(args);
command.execute(sender, commandLabel, args);
return command;
}
default BaseCommand getBaseCommand(String[] args) {
SetMultimap<String, RegisteredCommand> subCommands = getSubCommands();
for (int i = args.length; i >= 0; i
String checkSub = ApacheCommonsLangUtil.join(args, " ", 0, i).toLowerCase();
Set<RegisteredCommand> registeredCommands = subCommands.get(checkSub);
if (!registeredCommands.isEmpty()) {
return ACFUtil.getFirstElement(registeredCommands).scope;
}
}
if (args.length == 0) {
Set<RegisteredCommand> registeredCommands = subCommands.get(DEFAULT);
if (!registeredCommands.isEmpty()) {
return ACFUtil.getFirstElement(registeredCommands).scope;
}
}
Set<RegisteredCommand> registeredCommands = subCommands.get(CATCHUNKNOWN);
if (!registeredCommands.isEmpty()) {
return ACFUtil.getFirstElement(registeredCommands).scope;
}
return getDefCommand();
}
default List<String> getTabCompletions(CommandIssuer sender, String alias, String[] args) {
return getTabCompletions(sender, alias, args, false);
}
default List<String> getTabCompletions(CommandIssuer sender, String alias, String[] args, boolean commandsOnly) {
return getTabCompletions(sender, alias, args, commandsOnly, false);
}
default List<String> getTabCompletions(CommandIssuer sender, String alias, String[] args, boolean commandsOnly, boolean isAsync) {
Set<String> completions = new HashSet<>();
getChildren().forEach(child -> {
if (!commandsOnly) {
completions.addAll(child.tabComplete(sender, alias, args, isAsync));
}
completions.addAll(child.getCommandsForCompletion(sender, args));
});
return new ArrayList<>(completions);
}
default RegisteredCommand getDefaultRegisteredCommand() {
BaseCommand defCommand = this.getDefCommand();
if (defCommand != null) {
return defCommand.getDefaultRegisteredCommand();
}
return null;
}
default BaseCommand getDefCommand() {
return null;
}
default String getDescription() {
final RegisteredCommand cmd = this.getDefaultRegisteredCommand();
if (cmd != null) {
return cmd.getHelpText();
}
BaseCommand defCommand = getDefCommand();
if (defCommand != null && defCommand.description != null) {
return defCommand.description;
}
return "";
}
default String getUsage() {
final RegisteredCommand cmd = this.getDefaultRegisteredCommand();
if (cmd != null) {
return cmd.syntaxText != null ? cmd.syntaxText : "";
}
return "";
}
} |
package jlibs.core.nio;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.SelectionKey;
/**
* @author Santhosh Kumar T
*/
public class PlainTransport extends Debuggable implements Transport{
private final ClientChannel client;
public PlainTransport(ClientChannel client){
this.client = client;
}
@Override
public long id(){
return client.id;
}
@Override
public void id(long newID){
client.id = newID;
}
@Override
public ClientChannel client(){
return client;
}
@Override
public int interests(){
return client.key.isValid() ? client.key.interestOps() : 0;
}
@Override
public void addInterest(int operation) throws IOException{
client.key.interestOps(interests() | operation);
client.nioSelector.timeoutTracker.track(client);
if(DEBUG){
switch(operation){
case SelectionKey.OP_CONNECT:
println("channel@"+id()+".connectWait");
return;
case SelectionKey.OP_READ:
println("channel@"+id()+".readWait");
return;
case SelectionKey.OP_WRITE:
println("channel@"+id()+".writeWait");
}
}
}
@Override
public void removeInterest(int operation){
client.key.interestOps(interests()&~operation);
if(interests()==0)
client.nioSelector.timeoutTracker.untrack(client);
}
@Override
public int ready(){
return client.key.isValid() ? client.key.readyOps() : 0;
}
@Override
public boolean process(){
int ops = client.key.interestOps();
if(client.isConnectable())
ops &= ~SelectionKey.OP_CONNECT;
if(client.isReadable())
ops &= ~SelectionKey.OP_READ;
if(client.isWritable())
ops &= ~SelectionKey.OP_WRITE;
client.key.interestOps(ops);
return true;
}
@Override
public int read(ByteBuffer dst) throws IOException{
int read = client.realChannel().read(dst);
if(DEBUG)
println("channel@"+id()+".read: "+read);
return read;
}
@Override
public int write(ByteBuffer src) throws IOException{
int wrote = client.realChannel().write(src);
if(DEBUG)
println("channel@"+id()+".write: "+wrote);
return wrote;
}
@Override
public void shutdownOutput() throws IOException{
if(DEBUG)
println("channel@"+id()+".shutdownOutput");
client.realChannel().socket().shutdownOutput();
}
@Override
public boolean isOutputShutdown(){
return client.realChannel().socket().isOutputShutdown();
}
@Override
public boolean isOpen(){
return client.realChannel().isOpen();
}
@Override
public void close() throws IOException{
if(DEBUG)
println("channel@"+id()+".close");
boolean wasOpen = isOpen();
boolean wasConnected = client.isConnected();
client.realChannel().close();
if(wasOpen){
if(wasConnected)
client.nioSelector.connectedClients
else
client.nioSelector.connectionPendingClients
client.nioSelector.timeoutTracker.untrack(client);
if(client.pool!=null)
client.pool.remove(client);
else if(client.futurePool!=null)
client.futurePool.remove(client);
}
}
} |
package org.kohsuke.stapler;
import net.sf.json.JSONArray;
import org.apache.commons.io.IOUtils;
import org.kohsuke.stapler.bind.JavaScriptMethod;
import org.kohsuke.stapler.lang.Klass;
import org.kohsuke.stapler.lang.MethodRef;
import javax.annotation.PostConstruct;
import javax.servlet.ServletException;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static javax.servlet.http.HttpServletResponse.*;
public class MetaClass extends TearOffSupport {
/**
* This meta class wraps this class
*
* @deprecated as of 1.177
* Use {@link #klass}. If you really want the Java class representation, use {@code klass.toJavaClass()}.
*/
public final Class clazz;
public final Klass<?> klass;
/**
* {@link MetaClassLoader} that wraps {@code clazz.getClassLoader()}.
* Null if the class is loaded by the bootstrap classloader.
*/
public final MetaClassLoader classLoader;
public final List<Dispatcher> dispatchers = new ArrayList<Dispatcher>();
/**
* Base metaclass.
* Note that <tt>baseClass.clazz==clazz.getSuperClass()</tt>
*/
public final MetaClass baseClass;
/**
* {@link WebApp} that owns this meta class.
*/
public final WebApp webApp;
/**
* If there's a method annotated with @PostConstruct, that {@link MethodRef} object, linked
* to the list of the base class.
*/
private volatile SingleLinkedList<MethodRef> postConstructMethods;
/*package*/ MetaClass(WebApp webApp, Klass<?> klass) {
this.clazz = klass.toJavaClass();
this.klass = klass;
this.webApp = webApp;
this.baseClass = webApp.getMetaClass(klass.getSuperClass());
this.classLoader = MetaClassLoader.get(clazz.getClassLoader());
buildDispatchers();
}
/**
* Build {@link #dispatchers}.
*
* <p>
* This is the meat of URL dispatching. It looks at the class
* via reflection and figures out what URLs are handled by who.
*/
/*package*/ void buildDispatchers() {
this.dispatchers.clear();
ClassDescriptor node = new ClassDescriptor(clazz,null/*TODO:support wrappers*/);
// check action <obj>.do<token>(...)
for( final Function f : node.methods.prefix("do") ) {
WebMethod a = f.getAnnotation(WebMethod.class);
String[] names;
if(a!=null && a.name().length>0) names=a.name();
else names=new String[]{camelize(f.getName().substring(2))}; // 'doFoo' -> 'foo'
for (String name : names) {
dispatchers.add(new NameBasedDispatcher(name,0) {
public boolean doDispatch(RequestImpl req, ResponseImpl rsp, Object node) throws IllegalAccessException, InvocationTargetException, ServletException, IOException {
if(traceable())
trace(req,rsp,"-> <%s>.%s(...)",node,f.getName());
return f.bindAndInvokeAndServeResponse(node, req, rsp);
}
public String toString() {
return f.getQualifiedName()+"(...) for url=/"+name+"/...";
}
});
}
}
// JavaScript proxy method invocations for <obj>js<token>
// reacts only to a specific content type
for( final Function f : node.methods.prefix("js") ) {
String name = camelize(f.getName().substring(2)); // jsXyz -> xyz
dispatchers.add(new JavaScriptProxyMethodDispatcher(name, f));
}
// JavaScript proxy method with @JavaScriptMethod
// reacts only to a specific content type
for( final Function f : node.methods.annotated(JavaScriptMethod.class) ) {
JavaScriptMethod a = f.getAnnotation(JavaScriptMethod.class);
String[] names;
if(a!=null && a.name().length>0) names=a.name();
else names=new String[]{f.getName()};
for (String name : names)
dispatchers.add(new JavaScriptProxyMethodDispatcher(name,f));
}
for (Facet f : webApp.facets)
f.buildViewDispatchers(this, dispatchers);
// check action <obj>.doIndex(...)
for( final Function f : node.methods.name("doIndex") ) {
dispatchers.add(new Dispatcher() {
public boolean dispatch(RequestImpl req, ResponseImpl rsp, Object node) throws IllegalAccessException, InvocationTargetException, ServletException, IOException {
if(req.tokens.hasMore())
return false; // applicable only when there's no more token
if(traceable())
trace(req,rsp,"-> <%s>.doIndex(...)",node);
return f.bindAndInvokeAndServeResponse(node,req,rsp);
}
public String toString() {
return f.getQualifiedName()+"(StaplerRequest,StaplerResponse) for url=/";
}
});
}
// check public properties of the form NODE.TOKEN
for (final Field f : node.fields) {
dispatchers.add(new NameBasedDispatcher(f.getName()) {
final String role = getProtectedRole(f);
public boolean doDispatch(RequestImpl req, ResponseImpl rsp, Object node) throws IOException, ServletException, IllegalAccessException {
if(role!=null && !req.isUserInRole(role))
throw new IllegalAccessException("Needs to be in role "+role);
if(traceable())
traceEval(req,rsp,node,f.getName());
req.getStapler().invoke(req, rsp, f.get(node));
return true;
}
public String toString() {
return String.format("%1$s.%2$s for url=/%2$s/...",f.getDeclaringClass().getName(),f.getName());
}
});
}
FunctionList getMethods = node.methods.prefix("get");
// check public selector methods of the form NODE.getTOKEN()
for( final Function f : getMethods.signature() ) {
if(f.getName().length()<=3)
continue;
WebMethod a = f.getAnnotation(WebMethod.class);
String[] names;
if(a!=null && a.name().length>0) names=a.name();
else names=new String[]{camelize(f.getName().substring(3))}; // 'getFoo' -> 'foo'
for (String name : names) {
dispatchers.add(new NameBasedDispatcher(name) {
public boolean doDispatch(RequestImpl req, ResponseImpl rsp, Object node) throws IOException, ServletException, IllegalAccessException, InvocationTargetException {
if(traceable())
traceEval(req,rsp,node,f.getName()+"()");
req.getStapler().invoke(req,rsp, f.invoke(req, rsp, node));
return true;
}
public String toString() {
return String.format("%1$s() for url=/%2$s/...",f.getQualifiedName(),name);
}
});
}
}
// check public selector methods of the form static NODE.getTOKEN(StaplerRequest)
for( final Function f : getMethods.signature(StaplerRequest.class) ) {
if(f.getName().length()<=3)
continue;
String name = camelize(f.getName().substring(3)); // 'getFoo' -> 'foo'
dispatchers.add(new NameBasedDispatcher(name) {
public boolean doDispatch(RequestImpl req, ResponseImpl rsp, Object node) throws IOException, ServletException, IllegalAccessException, InvocationTargetException {
if(traceable())
traceEval(req,rsp,node,f.getName()+"(...)");
req.getStapler().invoke(req,rsp, f.invoke(req, rsp, node, req));
return true;
}
public String toString() {
return String.format("%1$s(StaplerRequest) for url=/%2$s/...",f.getQualifiedName(),name);
}
});
}
// check public selector methods <obj>.get<Token>(String)
for( final Function f : getMethods.signature(String.class) ) {
if(f.getName().length()<=3)
continue;
String name = camelize(f.getName().substring(3)); // 'getFoo' -> 'foo'
dispatchers.add(new NameBasedDispatcher(name,1) {
public boolean doDispatch(RequestImpl req, ResponseImpl rsp, Object node) throws IOException, ServletException, IllegalAccessException, InvocationTargetException {
String token = req.tokens.next();
if(traceable())
traceEval(req,rsp,node,f.getName()+"(\""+token+"\")");
req.getStapler().invoke(req,rsp, f.invoke(req, rsp, node,token));
return true;
}
public String toString() {
return String.format("%1$s(String) for url=/%2$s/TOKEN/...",f.getQualifiedName(),name);
}
});
}
// check public selector methods <obj>.get<Token>(int)
for( final Function f : getMethods.signature(int.class) ) {
if(f.getName().length()<=3)
continue;
String name = camelize(f.getName().substring(3)); // 'getFoo' -> 'foo'
dispatchers.add(new NameBasedDispatcher(name,1) {
public boolean doDispatch(RequestImpl req, ResponseImpl rsp, Object node) throws IOException, ServletException, IllegalAccessException, InvocationTargetException {
int idx = req.tokens.nextAsInt();
if(traceable())
traceEval(req,rsp,node,f.getName()+"("+idx+")");
req.getStapler().invoke(req,rsp, f.invoke(req, rsp, node,idx));
return true;
}
public String toString() {
return String.format("%1$s(int) for url=/%2$s/N/...",f.getQualifiedName(),name);
}
});
}
// check public selector methods <obj>.get<Token>(long)
// TF: I'm sure these for loop blocks could be dried out in some way.
for( final Function f : getMethods.signature(long.class) ) {
if(f.getName().length()<=3)
continue;
String name = camelize(f.getName().substring(3)); // 'getFoo' -> 'foo'
dispatchers.add(new NameBasedDispatcher(name,1) {
public boolean doDispatch(RequestImpl req, ResponseImpl rsp, Object node) throws IOException, ServletException, IllegalAccessException, InvocationTargetException {
long idx = req.tokens.nextAsLong();
if(traceable())
traceEval(req,rsp,node,f.getName()+"("+idx+")");
req.getStapler().invoke(req,rsp, f.invoke(req, rsp, node,idx));
return true;
}
public String toString() {
return String.format("%1$s(long) for url=/%2$s/N/...",f.getQualifiedName(),name);
}
});
}
if(node.clazz.isArray()) {
dispatchers.add(new Dispatcher() {
public boolean dispatch(RequestImpl req, ResponseImpl rsp, Object node) throws IOException, ServletException {
if(!req.tokens.hasMore())
return false;
try {
int index = req.tokens.nextAsInt();
if(traceable())
traceEval(req,rsp,node,"((Object[])",")["+index+"]");
req.getStapler().invoke(req,rsp, ((Object[]) node)[index]);
return true;
} catch (NumberFormatException e) {
return false; // try next
}
}
public String toString() {
return "Array look-up for url=/N/...";
}
});
}
if(List.class.isAssignableFrom(node.clazz)) {
dispatchers.add(new Dispatcher() {
public boolean dispatch(RequestImpl req, ResponseImpl rsp, Object node) throws IOException, ServletException {
if(!req.tokens.hasMore())
return false;
try {
int index = req.tokens.nextAsInt();
if(traceable())
traceEval(req,rsp,node,"((List)",").get("+index+")");
List list = (List) node;
if (0<=index && index<list.size())
req.getStapler().invoke(req,rsp, list.get(index));
else {
if(traceable())
trace(req,rsp,"-> IndexOutOfRange [0,%d)",list.size());
rsp.sendError(SC_NOT_FOUND);
}
return true;
} catch (NumberFormatException e) {
return false; // try next
}
}
public String toString() {
return "List.get(int) look-up for url=/N/...";
}
});
}
if(Map.class.isAssignableFrom(node.clazz)) {
dispatchers.add(new Dispatcher() {
public boolean dispatch(RequestImpl req, ResponseImpl rsp, Object node) throws IOException, ServletException {
if(!req.tokens.hasMore())
return false;
try {
String key = req.tokens.peek();
if(traceable())
traceEval(req,rsp,"((Map)",").get(\""+key+"\")");
Object item = ((Map)node).get(key);
if(item!=null) {
req.tokens.next();
req.getStapler().invoke(req,rsp,item);
return true;
} else {
// otherwise just fall through
if(traceable())
trace(req,rsp,"Map.get(\""+key+"\")==null. Back tracking.");
return false;
}
} catch (NumberFormatException e) {
return false; // try next
}
}
public String toString() {
return "Map.get(String) look-up for url=/TOKEN/...";
}
});
}
// TODO: check if we can route to static resources
// which directory shall we look up a resource from?
for (Facet f : webApp.facets)
f.buildFallbackDispatchers(this, dispatchers);
// check public selector methods <obj>.getDynamic(<token>,...)
for( final Function f : getMethods.signatureStartsWith(String.class).name("getDynamic")) {
dispatchers.add(new Dispatcher() {
public boolean dispatch(RequestImpl req, ResponseImpl rsp, Object node) throws IllegalAccessException, InvocationTargetException, IOException, ServletException {
if(!req.tokens.hasMore())
return false;
String token = req.tokens.next();
if(traceable())
traceEval(req,rsp,node,"getDynamic(\""+token+"\",...)");
Object target = f.bindAndInvoke(node, req,rsp, token);
if(target!=null) {
req.getStapler().invoke(req,rsp, target);
return true;
} else {
if(traceable())
// indent: "-> evaluate(
trace(req,rsp," %s.getDynamic(\"%s\",...)==null. Back tracking.",node,token);
req.tokens.prev(); // cancel the next effect
return false;
}
}
public String toString() {
return String.format("%s(String,StaplerRequest,StaplerResponse) for url=/TOKEN/...",f.getQualifiedName());
}
});
}
// check action <obj>.doDynamic(...)
for( final Function f : node.methods.name("doDynamic") ) {
dispatchers.add(new Dispatcher() {
public boolean dispatch(RequestImpl req, ResponseImpl rsp, Object node) throws IllegalAccessException, InvocationTargetException, ServletException, IOException {
if(traceable())
trace(req,rsp,"-> <%s>.doDynamic(...)",node);
return f.bindAndInvokeAndServeResponse(node,req,rsp);
}
public String toString() {
return String.format("%s(StaplerRequest,StaplerResponse) for any URL",f.getQualifiedName());
}
});
}
}
/**
* Returns all the methods in the ancestory chain annotated with {@link PostConstruct}
* from those defined in the derived type toward those defined in the base type.
*
* Normally invocation requires visiting the list in the reverse order.
* @since 1.220
*/
public SingleLinkedList<MethodRef> getPostConstructMethods() {
if (postConstructMethods ==null) {
SingleLinkedList<MethodRef> l = baseClass==null ? SingleLinkedList.<MethodRef>empty() : baseClass.getPostConstructMethods();
for (MethodRef mr : klass.getDeclaredMethods()) {
if (mr.hasAnnotation(PostConstruct.class)) {
l = l.grow(mr);
}
}
postConstructMethods = l;
}
return postConstructMethods;
}
private String getProtectedRole(Field f) {
try {
LimitedTo a = f.getAnnotation(LimitedTo.class);
return (a!=null)?a.value():null;
} catch (LinkageError e) {
return null; // running in JDK 1.4
}
}
private static String camelize(String name) {
return Character.toLowerCase(name.charAt(0))+name.substring(1);
}
private static class JavaScriptProxyMethodDispatcher extends NameBasedDispatcher {
private final Function f;
public JavaScriptProxyMethodDispatcher(String name, Function f) {
super(name, 0);
this.f = f;
}
public boolean doDispatch(RequestImpl req, ResponseImpl rsp, Object node) throws IllegalAccessException, InvocationTargetException, ServletException, IOException {
if (!req.isJavaScriptProxyCall())
return false;
req.stapler.getWebApp().getCrumbIssuer().validateCrumb(req,req.getHeader("Crumb"));
if(traceable())
trace(req,rsp,"-> <%s>.%s(...)",node, f.getName());
JSONArray jsargs = JSONArray.fromObject(IOUtils.toString(req.getReader()));
Object[] args = new Object[jsargs.size()];
Class[] types = f.getParameterTypes();
Type[] genericTypes = f.getGenericParameterTypes();
if (args.length != types.length) {
throw new IllegalArgumentException("argument count mismatch between " + jsargs + " and " + Arrays.toString(genericTypes));
}
for (int i=0; i<args.length; i++)
args[i] = req.bindJSON(genericTypes[i],types[i],jsargs.get(i));
return f.bindAndInvokeAndServeResponse(node,req,rsp,args);
}
public String toString() {
return f.getQualifiedName()+"(...) for url=/"+name+"/...";
}
}
/**
* Don't cache anything in memory, so that any change
* will take effect instantly.
*/
public static boolean NO_CACHE = false;
static {
try {
NO_CACHE = Boolean.getBoolean("stapler.jelly.noCache");
} catch (SecurityException e) {
// ignore.
}
}
private static final Object NONE = "none";
} |
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBAsync;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBAsyncClientBuilder;
import com.amazonaws.services.dynamodbv2.document.internal.InternalUtils;
import com.amazonaws.services.dynamodbv2.model.AttributeValue;
import com.amazonaws.services.dynamodbv2.model.GetItemRequest;
import com.amazonaws.services.dynamodbv2.model.GetItemResult;
import com.amazonaws.services.dynamodbv2.model.ListTablesResult;
import com.google.common.collect.ImmutableMap;
import rx.Observable;
import java.util.List;
import java.util.Map;
import static com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration;
class ReactiveDynamo {
private final AmazonDynamoDBAsync db;
ReactiveDynamo(EndpointConfiguration endpointConfiguration) {
this.db = AmazonDynamoDBAsyncClientBuilder.standard()
.withEndpointConfiguration(endpointConfiguration)
.build();
}
Observable<List<String>> tables() {
return Observable.from(db.listTablesAsync())
.map(ListTablesResult::getTableNames);
}
Observable<Map<String, Object>> itemByHashKey(String tableName, String hashKeyName, String hashKeyValue) {
ImmutableMap<String, AttributeValue> map = ImmutableMap.of(hashKeyName, new AttributeValue(hashKeyValue));
GetItemRequest getItemRequest = new GetItemRequest().withTableName(tableName).withKey(map);
return Observable.from(db.getItemAsync(getItemRequest))
.map(GetItemResult::getItem)
.map(InternalUtils::toSimpleMapValue);
}
} |
package abra;
import static abra.Logger.log;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import abra.ReadEvaluator.Alignment;
import abra.SSWAligner.SSWAlignerResult;
import abra.SimpleMapper.Orientation;
import htsjdk.samtools.SAMFileHeader;
import htsjdk.samtools.SAMFileReader;
import htsjdk.samtools.SAMFileWriter;
import htsjdk.samtools.SAMFileWriterFactory;
import htsjdk.samtools.SAMRecord;
import htsjdk.samtools.SAMSequenceRecord;
import htsjdk.samtools.ValidationStringency;
/**
* ABRA's main entry point
*
* @author Lisle E. Mose (lmose at unc dot edu)
*/
public class ReAligner {
private static final int DEFAULT_MAX_UNALIGNED_READS = 1000000;
public static final int MAX_REGION_LENGTH = 400;
private static final int MIN_REGION_REMAINDER = 200;
public static final int REGION_OVERLAP = 200;
// Minimum sequence length recommended for use with bwa mem
private static final int MIN_CONTIG_LENGTH = 70;
// Cannot be larger than buffer in assembler.c
private static final int MAX_KMER_SIZE = 199;
private SAMFileHeader[] samHeaders;
private List<Feature> regions;
private String regionsBed;
private String tempDir;
private String unalignedRegionSam;
private String reference;
private String bwaIndex;
private AssemblerSettings assemblerSettings;
private int numThreads;
private int maxUnalignedReads = DEFAULT_MAX_UNALIGNED_READS;
private boolean shouldReprocessUnaligned = true;
private String localRepeatFile;
private String[] inputSams;
private SAMFileWriter[] writers;
private int readLength = -1;
private int maxMapq = -1;
private int minInsertLength = Integer.MAX_VALUE;
private int maxInsertLength = -1;
private boolean isPairedEnd = false;
private BufferedWriter contigWriter;
private CompareToReference2 c2r;
private ThreadManager threadManager;
private int minMappingQuality;
private boolean isDebug;
private boolean isSkipAssembly;
private boolean isSkipNonAssembly;
// If true, the input target file specifies kmer values
private boolean hasPresetKmers = false;
// RNA specific
private String junctionFile;
private List<Feature> junctions = new ArrayList<Feature>();
private ReverseComplementor rc = new ReverseComplementor();
public static final int COMPRESSION_LEVEL = 1;
public void reAlign(String[] inputFiles, String[] outputFiles) throws Exception {
this.inputSams = inputFiles;
logStartupInfo(outputFiles);
init();
c2r = new CompareToReference2();
c2r.init(this.reference);
Logger.info("Reading Input SAM Header and identifying read length");
getSamHeaderAndReadLength();
Logger.info("Read length: " + readLength);
Logger.info("Loading target regions");
loadRegions();
loadJunctions();
Clock clock = new Clock("Assembly");
clock.start();
String contigFasta = tempDir + "/" + "all_contigs.fasta";
contigWriter = new BufferedWriter(new FileWriter(contigFasta, false));
SAMFileWriterFactory writerFactory = new SAMFileWriterFactory();
// writerFactory.setUseAsyncIo(true);
// writerFactory.setAsyncOutputBufferSize(500000);
writerFactory.setUseAsyncIo(false);
writers = new SAMFileWriter[inputSams.length];
for (int i=0; i<inputSams.length; i++) {
// init BAM writer
writers[i] = writerFactory.makeBAMWriter(
samHeaders[i], false, new File(outputFiles[i]), COMPRESSION_LEVEL);
}
// Spawn thread for each chromosome
// TODO: Validate identical sequence dictionary for each input file
for (SAMSequenceRecord seqRecord : this.samHeaders[0].getSequenceDictionary().getSequences()) {
String chromosome = seqRecord.getSequenceName();
this.spawnChromosomeThread(chromosome);
}
Logger.info("Waiting for all threads to complete");
threadManager.waitForAllThreadsToComplete();
contigWriter.close();
clock.stopAndPrint();
for (SAMFileWriter writer : this.writers) {
writer.close();
}
System.err.println("Done.");
}
void debug(SAMRecord read, String msg) {
if (read.getReadName().equals("UNC9-SN296:440:C5F7CACXX:5:2108:19995:43952")) {
System.err.println("UNC9-SN296:440:C5F7CACXX:5:2108:19995:43952 - " + msg);
}
}
void processChromosome(String chromosome) throws Exception {
System.err.println("Processing chromosome: " + chromosome);
MultiSamReader reader = new MultiSamReader(this.inputSams, this.minMappingQuality, this.isPairedEnd, chromosome);
List<List<SAMRecordWrapper>> currReads = new ArrayList<List<SAMRecordWrapper>>();
for (int i=0; i<this.inputSams.length; i++) {
currReads.add(new ArrayList<SAMRecordWrapper>());
}
List<List<SAMRecordWrapper>> outOfRegionReads = new ArrayList<List<SAMRecordWrapper>>();
for (int i=0; i<this.inputSams.length; i++) {
outOfRegionReads.add(new ArrayList<SAMRecordWrapper>());
}
Map<Feature, Map<SimpleMapper, SSWAlignerResult>> regionContigs = new HashMap<Feature, Map<SimpleMapper, SSWAlignerResult>>();
int readCount = 0;
List<Feature> chromosomeRegions = new ArrayList<Feature>();
for (Feature region : regions) {
if (region.getSeqname().equals(chromosome)) {
chromosomeRegions.add(region);
}
}
List<Feature> chromosomeJunctions = new ArrayList<Feature>();
for (Feature junction : junctions) {
if (junction.getSeqname().equals(chromosome)) {
chromosomeJunctions.add(junction);
}
}
// Map<Feature, List<Feature>> regionJunctions = new HashMap<Feature, List<Feature>>();
// for (Feature region : chromosomeRegions) {
// regionJunctions.put(region, new ArrayList<Feature>());
// for (Feature junction : chromosomeJunctions) {
// if (region.containsEitherEnd(junction, MAX_REGION_LENGTH)) {
// regionJunctions.get(region).add(junction);
Map<Feature, List<Feature>> regionJunctions = JunctionUtils.getRegionJunctions(chromosomeRegions, chromosomeJunctions, readLength, MAX_REGION_LENGTH);
Set<Integer> regionsToProcess = new TreeSet<Integer>();
int currRegionIdx = -1;
for (SAMRecordWrapper record : reader) {
// If this is an unmapped read anchored by its mate, check rc flag
SAMRecord read1 = record.getSamRecord();
if (read1.getReadUnmappedFlag() && !read1.getMateUnmappedFlag()) {
if (!read1.getMateNegativeStrandFlag()) {
// Mate is not reverse complemented, so RC this read
read1.setReadString(rc.reverseComplement(read1.getReadString()));
read1.setBaseQualityString(rc.reverse(read1.getBaseQualityString()));
read1.setReadNegativeStrandFlag(true);
// TODO: Revert read information if not remapped...
}
}
int regionIdx = Feature.findFirstOverlappingRegion(reader.getSAMFileHeader(), record, chromosomeRegions, currRegionIdx);
// Identify next region that is a candidate for processing
// Note: Splicing can cause reads to go in and out of a region
if (regionIdx >= 0) {
regionsToProcess.add(regionIdx);
// Cache read for processing at end of region
debug(record.getSamRecord(), "Adding read.");
currReads.get(record.getSampleIdx()).add(record);
}
Iterator<Integer> regionIter = regionsToProcess.iterator();
if (regionIter.hasNext()) {
currRegionIdx = regionIter.next();
// If start position for current read is beyond current region, trigger assembly
Feature currRegion = chromosomeRegions.get(currRegionIdx);
if (record.getAdjustedAlignmentStart() > currRegion.getEnd() + this.readLength*2) {
System.err.println("Processing region: " + currRegion);
Map<SimpleMapper, SSWAlignerResult> mappedContigs = processRegion(currRegion, currReads, regionJunctions.get(currRegion));
System.err.println("Region: " + currRegion + " assembled: " + mappedContigs.keySet().size() + " contigs");
regionContigs.put(currRegion, mappedContigs);
// Remove curr region from list of regions to process
regionIter.remove();
}
}
// TODO: Consider dropping this... Reads are out of scope when we've moved beyond them via standard processing?
if (regionIdx < 0) {
// Process out of region read and output if ready.
List<SAMRecordWrapper> outOfRegionReadsForSample = outOfRegionReads.get(record.getSampleIdx());
debug(record.getSamRecord(), "Out of Region!!!");
outOfRegionReadsForSample.add(record);
if (outOfRegionReads.get(record.getSampleIdx()).size() > 2500) {
synchronized(this.writers[record.getSampleIdx()]) {
for (SAMRecordWrapper outOfRegionRead : outOfRegionReadsForSample) {
this.writers[record.getSampleIdx()].addAlignment(outOfRegionRead.getSamRecord());
}
}
outOfRegionReadsForSample.clear();
}
}
// Todo - make constant or parameterize
int MAX_READ_RANGE = 1000 + this.readLength;
// Check for out of scope reads every 2500 reads (TODO: is 2500 the best number?)
if (readCount % 2500 == 0) {
// Remap / output / clear out of scope reads
List<List<SAMRecordWrapper>> readsToRemap = new ArrayList<List<SAMRecordWrapper>>();
// Initialize per sample lists
for (List<SAMRecordWrapper> origSample : currReads) {
List<SAMRecordWrapper> sampleReadsToRemap = new ArrayList<SAMRecordWrapper>();
readsToRemap.add(sampleReadsToRemap);
Iterator<SAMRecordWrapper> iter = origSample.iterator();
while (iter.hasNext()) {
SAMRecordWrapper read = iter.next();
if (record.getSamRecord().getAlignmentStart() - read.getSamRecord().getAlignmentStart() > MAX_READ_RANGE) {
debug(record.getSamRecord(), "Ready to remap");
sampleReadsToRemap.add(read);
iter.remove();
} else {
debug(record.getSamRecord(), "Not ready to remap");
}
}
}
// Remap out of scope reads
long start = System.currentTimeMillis();
remapReads(regionContigs, readsToRemap);
long stop = System.currentTimeMillis();
System.err.println("REMAP_READS_SECS:\t" + (stop-start)/1000 + "\t" + record.getSamRecord().getReferenceName() + ":" + record.getSamRecord().getAlignmentStart());
// Remove out of scope region assemblies
List<Feature> regionsToRemove = new ArrayList<Feature>();
for (Feature region : regionContigs.keySet()) {
if (getFirstStartPos(currReads)-region.getStart() > MAX_READ_RANGE) {
regionsToRemove.add(region);
}
}
for (Feature region : regionsToRemove) {
System.err.println("Removing contigs for region: " + region);
regionContigs.remove(region);
}
String logPrefix = record.getSamRecord().getReferenceName() + ":" + record.getSamRecord().getAlignmentStart() + " : ";
if (regionContigs.size() > 10) {
System.err.println(logPrefix + "regionContigs size: " + regionContigs.size());
}
int currReadsCount = 0;
for (List<SAMRecordWrapper> reads : currReads) {
currReadsCount += reads.size();
}
if (currReadsCount > 10000) {
System.err.println(logPrefix + "Curr reads size: " + currReadsCount);
}
int outOfRegionCount = 0;
for (List<SAMRecordWrapper> reads : outOfRegionReads) {
outOfRegionCount += reads.size();
}
if (outOfRegionCount > 10000) {
System.err.println(logPrefix + "Out of region reads size: " + outOfRegionCount);
}
}
readCount += 1;
}
// Attempt to process last region if applicable
Iterator<Integer> regionIter = regionsToProcess.iterator();
if (regionIter.hasNext()) {
currRegionIdx = regionIter.next();
// We've moved beyond the current region
// Assemble reads
Feature region = chromosomeRegions.get(currRegionIdx);
System.err.println("Processing region: " + region);
Map<SimpleMapper, SSWAlignerResult> mappedContigs = processRegion(region, currReads, regionJunctions.get(region));
System.err.println("Region: " + region + " assembled: " + mappedContigs.keySet().size() + " contigs");
regionContigs.put(region, mappedContigs);
}
// Remap remaining reads
remapReads(regionContigs, currReads);
currReads.clear();
regionContigs.clear();
// Output remaining out of region reads
for (int i=0; i<outOfRegionReads.size(); i++) {
List<SAMRecordWrapper> outOfRegionReadsForSample = outOfRegionReads.get(i);
synchronized(this.writers[i]) {
for (SAMRecordWrapper outOfRegionRead : outOfRegionReadsForSample) {
this.writers[i].addAlignment(outOfRegionRead.getSamRecord());
}
}
outOfRegionReadsForSample.clear();
}
reader.close();
System.err.println("Chromosome: " + chromosome + " done.");
}
private int getFirstStartPos(List<List<SAMRecordWrapper>> readsList) {
int minPos = Integer.MAX_VALUE;
for (List<SAMRecordWrapper> reads : readsList) {
if (reads.size() > 0 && reads.get(0).getSamRecord().getAlignmentStart() < minPos) {
minPos = reads.get(0).getSamRecord().getAlignmentStart();
}
}
return minPos;
}
private void logStartupInfo(String[] outputFiles) {
int ctr = 0;
for (String input : inputSams) {
System.err.println("input" + ctr + ": " + input);
}
ctr = 0;
for (String output : outputFiles) {
System.err.println("output" + ctr + ": " + output);
}
System.err.println("regions: " + regionsBed);
System.err.println("reference: " + reference);
System.err.println("bwa index: " + bwaIndex);
System.err.println("working dir: " + tempDir);
System.err.println("num threads: " + numThreads);
System.err.println("max unaligned reads: " + maxUnalignedReads);
System.err.println(assemblerSettings.getDescription());
System.err.println("paired end: " + isPairedEnd);
System.err.println("isSkipAssembly: " + isSkipAssembly);
System.err.println("isSkipNonAssembly: " + isSkipNonAssembly);
String javaVersion = System.getProperty("java.version");
System.err.println("Java version: " + javaVersion);
if (javaVersion.startsWith("1.6") || javaVersion.startsWith("1.5") || javaVersion.startsWith("1.4")) {
throw new RuntimeException("Please upgrade to Java 7 or later to run ABRA.");
}
try {
InetAddress localhost = java.net.InetAddress.getLocalHost();
String hostname = localhost.getHostName();
System.err.println("hostname: " + hostname);
} catch (Throwable t) {
System.err.println("Error getting hostname: " + t.getMessage());
}
}
private void spawnChromosomeThread(String chromosome) throws InterruptedException {
ReAlignerRunnable thread = new ReAlignerRunnable(threadManager, this, chromosome);
System.err.println("Spawning thread for chromosome: " + chromosome);
threadManager.spawnThread(thread);
}
private synchronized void appendContigs(String contigs) throws IOException {
contigWriter.write(contigs);
}
private void remapRead(ReadEvaluator readEvaluator, SAMRecord read, int origEditDist) {
Alignment alignment = readEvaluator.getImprovedAlignment(origEditDist, read.getReadString(), read);
if (alignment != null) {
int readPos = alignment.pos;
// Set contig alignment info for all reads that map to contigs (even if read is unchanged)
String ya = alignment.chromosome + ":" + alignment.contigPos + ":" + alignment.contigCigar;
// If no change to alignment, just record the YA tag
if (!read.getReadUnmappedFlag() && read.getAlignmentStart() == readPos && read.getCigarString().equals(alignment.cigar)) {
read.setAttribute("YA", ya);
}
// If the read has actually moved to an improved alignment, update
if (origEditDist > alignment.numMismatches && (read.getReadUnmappedFlag() || read.getAlignmentStart() != readPos || !read.getCigarString().equals(alignment.cigar))) {
read.setAttribute("YA", ya);
// Original alignment info
String yo = "N/A";
if (!read.getReadUnmappedFlag()) {
String origOrientation = read.getReadNegativeStrandFlag() ? "-" : "+";
yo = read.getReferenceName() + ":" + read.getAlignmentStart() + ":" + origOrientation + ":" + read.getCigarString();
} else {
read.setReadUnmappedFlag(false);
}
read.setAttribute("YO", yo);
// Update alignment position and cigar and orientation
read.setAlignmentStart(alignment.pos);
read.setCigarString(alignment.cigar);
// If this is true, the read was already reverse complemented in the original alignment
if (read.getReadNegativeStrandFlag()) {
read.setReadNegativeStrandFlag(alignment.orientation == Orientation.FORWARD ? true : false);
} else {
read.setReadNegativeStrandFlag(alignment.orientation == Orientation.FORWARD ? false : true);
}
// Number of mismatches to contig
read.setAttribute("YM", alignment.numMismatches);
// Original edit distance
read.setAttribute("YX", origEditDist);
// Updated edit distance
read.setAttribute("NM", SAMRecordUtils.getEditDistance(read, c2r));
//TODO: Compute mapq intelligently???
read.setMappingQuality(Math.min(read.getMappingQuality()+10, 60));
}
}
}
private void remapReads(Map<Feature, Map<SimpleMapper, SSWAlignerResult>> mappedContigs, List<List<SAMRecordWrapper>> readsList) throws Exception {
int numContigs = 0;
for (Feature region : mappedContigs.keySet()) {
numContigs += mappedContigs.get(region).size();
}
if (readsList.get(0).size() > 0) {
System.err.println("** REMAPPING [" + readsList.get(0).size() + "] reads to [" + numContigs + "] contigs");
}
ReadEvaluator readEvaluator = new ReadEvaluator(mappedContigs);
int sampleIdx = 0;
// For each sample.
for (List<SAMRecordWrapper> reads : readsList) {
// For each read.
for (SAMRecordWrapper readWrapper : reads) {
SAMRecord read = readWrapper.getSamRecord();
if (read.getMappingQuality() > this.minMappingQuality || read.getReadUnmappedFlag()) {
// TODO: Use NM tag if available (need to handle soft clipping though!)
int origEditDist = SAMRecordUtils.getEditDistance(read, c2r);
// int origEditDist = c2r.numMismatches(read);
System.err.println("Read edit dist: " + read.getReadName() + " : " + origEditDist);
if (origEditDist > 0 || SAMRecordUtils.getNumSplices(read) > 0) {
remapRead(readEvaluator, read, origEditDist);
}
}
}
// Output all reads for this sample - synchronize on the current BAM
synchronized(this.writers[sampleIdx]) {
for (SAMRecordWrapper read : reads) {
this.writers[sampleIdx].addAlignment(read.getSamRecord());
}
}
sampleIdx += 1;
}
}
private List<List<SAMRecordWrapper>> subsetReads(Feature region, List<List<SAMRecordWrapper>> readsList) {
List<List<SAMRecordWrapper>> subset = new ArrayList<List<SAMRecordWrapper>>();
// Initialize per sample lists
for (List<SAMRecordWrapper> origSample : readsList) {
List<SAMRecordWrapper> subsetSample = new ArrayList<SAMRecordWrapper>();
subset.add(subsetSample);
for (SAMRecordWrapper read : origSample) {
if (region.overlapsRead(read.getSamRecord())) {
subsetSample.add(read);
}
}
}
return subset;
}
private SSWAlignerResult alignContig(String contig, SSWAligner ssw, List<SSWAligner> sswJunctions) {
SSWAlignerResult bestResult = null;
int bestScore = -1;
SSWAlignerResult sswResult;
for (SSWAligner sswJunc : sswJunctions) {
sswResult = sswJunc.align(contig);
if (sswResult != null && sswResult.getScore() > bestScore) {
bestScore = sswResult.getScore();
bestResult = sswResult;
}
}
sswResult = ssw.align(contig);
if (sswResult != null && sswResult.getScore() > bestScore) {
bestScore = sswResult.getScore();
bestResult = sswResult;
}
if (bestResult != null) {
System.err.println("BEST_SSW: " + bestResult.getGenomicPos() + " : " + bestResult.getCigar() + " : " + bestResult.getRefPos() + " : " + bestResult.getScore() + " : " + bestResult.getSequence());
} else {
System.err.println("NO_SSW: " + contig);
}
//TODO: Check for tie scores with different final alignment
return bestResult;
// mappedContigs.put(new SimpleMapper(bestResult.getSequence()), bestResult);
}
public Map<SimpleMapper, SSWAlignerResult> processRegion(Feature region, List<List<SAMRecordWrapper>> reads, List<Feature> junctions) throws Exception {
long start = System.currentTimeMillis();
if (isDebug) {
Logger.info("Processing region: " + region.getDescriptor());
}
if (region.getLength() > 10000) {
throw new IllegalArgumentException("Region too big: [" + region + "]");
}
Map<SimpleMapper, SSWAlignerResult> mappedContigs = new HashMap<SimpleMapper, SSWAlignerResult>();
List<List<SAMRecordWrapper>> readsList = subsetReads(region, reads);
try {
String contigsFasta = tempDir + "/" + region.getDescriptor() + "_contigs.fasta";
List<String> bams = new ArrayList<String>(Arrays.asList(this.inputSams));
// Get reference sequence matching current region (pad by 2 read lengths on each side)
int chromosomeLength = c2r.getReferenceLength(region.getSeqname());
int refSeqStart = Math.max((int) region.getStart() - this.readLength*2, 1);
int refSeqLength = Math.min((int) region.getLength() + this.readLength*4, chromosomeLength-1);
String refSeq = c2r.getSequence(region.getSeqname(), refSeqStart, refSeqLength);
SSWAligner ssw = new SSWAligner(refSeq, region.getSeqname(), refSeqStart, this.readLength);
List<SSWAligner> sswJunctions = new ArrayList<SSWAligner>();
// List<List<Feature>> junctionPermutations = JunctionUtils.combineJunctions(junctions, this.readLength);
List<List<Feature>> junctionPermutations = JunctionUtils.combineJunctions(junctions, (int) region.getLength());
System.err.println("NUM_JUNCTION_PERMUTATIONS:\t" + junctionPermutations.size() + "\t" + region);
int maxJunctionPermutations = 2056;
if (junctionPermutations.size() > maxJunctionPermutations) {
System.err.println("TOO_MANY_JUNCTION_PERMUTATIONS: " + region.getDescriptor());
} else {
for (List<Feature> junctionPerm : junctionPermutations) {
System.err.println("NUM_JUNCTIONS:\t" + junctionPerm.size() + "\t" + region);
System.err.println("CURR_JUNCTIONS:\t" + junctionPerm);
// List of junction positions within localized reference
List<Integer> junctionPos = new ArrayList<Integer>();
// List of junction lengths within localized reference
List<Integer> junctionLengths = new ArrayList<Integer>();
StringBuffer juncSeq = new StringBuffer();
int refStart = Math.max((int) junctionPerm.get(0).getStart() - (int) region.getLength() - this.readLength*2, 1);
String leftSeq = c2r.getSequence(region.getSeqname(), refStart, (int) junctionPerm.get(0).getStart() - refStart);
juncSeq.append(leftSeq);
junctionPos.add(leftSeq.length());
junctionLengths.add((int) junctionPerm.get(0).getLength()+1);
boolean isJunctionGapTooBig = false;
for (int i=1; i<junctionPerm.size(); i++) {
int midStart = (int) junctionPerm.get(i-1).getEnd()+1;
String middleSeq = c2r.getSequence(region.getSeqname(), midStart, (int) junctionPerm.get(i).getStart() - midStart);
if (middleSeq.length() > region.getLength()*2) {
isJunctionGapTooBig = true;
break;
}
juncSeq.append(middleSeq);
junctionPos.add(juncSeq.length());
junctionLengths.add((int) junctionPerm.get(i).getLength()+1);
}
// TODO: Tighten this up...
if (!isJunctionGapTooBig && juncSeq.length() < region.getLength()*10) {
// Sequence on right of last junction
// Junction stop is exclusive, so add 1 to starting position (junction end + 1)
Feature lastJunction = junctionPerm.get(junctionPerm.size()-1);
int rightStart = (int) lastJunction.getEnd()+1;
int rightStop = Math.min((int) lastJunction.getEnd() + (int) region.getLength() + this.readLength*2, chromosomeLength-1);
String rightSeq = c2r.getSequence(region.getSeqname(), rightStart, rightStop-rightStart);
juncSeq.append(rightSeq);
// Junction pos and length should already be added
SSWAligner sswJunc = new SSWAligner(juncSeq.toString(), region.getSeqname(), refStart, this.readLength, junctionPos, junctionLengths);
sswJunctions.add(sswJunc);
}
}
// Assemble contigs
if (this.isSkipAssembly || region.getKmer() > this.readLength-15) {
System.err.println("Skipping assembly of region: " + region.getDescriptor() + " - " + region.getKmer());
} else {
NativeAssembler assem = (NativeAssembler) newAssembler(region);
List<Feature> regions = new ArrayList<Feature>();
regions.add(region);
String contigs = assem.assembleContigs(bams, contigsFasta, tempDir, regions, region.getDescriptor(), true, this, c2r, readsList);
if (!contigs.equals("<ERROR>") && !contigs.equals("<REPEAT>") && !contigs.isEmpty()) {
// TODO: Turn this off by default
appendContigs(contigs);
List<ScoredContig> scoredContigs = ScoredContig.convertAndFilter(contigs);
System.err.println("# SCORED CONTIGS: " + scoredContigs.size());
// Map contigs to reference
for (ScoredContig contig : scoredContigs) {
// Filter contigs that match the reference
if (!refSeq.contains(contig.getContig())) {
SSWAlignerResult sswResult = alignContig(contig.getContig(), ssw, sswJunctions);
if (sswResult != null) {
// TODO: In multi-region processing, check to ensure identical contigs have identical mappings
mappedContigs.put(new SimpleMapper(sswResult.getSequence()), sswResult);
}
}
}
}
}
if (!this.isSkipNonAssembly) {
System.err.println("Processing non-assembled contigs for region: [" + region + "]");
// Go through artificial contig generation using indels observed in the original reads
AltContigGenerator altContigGenerator = new AltContigGenerator();
Collection<String> altContigs = altContigGenerator.getAltContigs(readsList, c2r, readLength);
for (String contig : altContigs) {
// TODO: Check to see if this contig is already in the map before aligning
SSWAlignerResult sswResult = ssw.align(contig);
if (sswResult != null) {
//TODO: Introduce penalty for non-assembled contigs?
mappedContigs.put(new SimpleMapper(sswResult.getSequence()), sswResult);
}
}
}
}
}
catch (Exception e) {
e.printStackTrace();
throw e;
}
long stop = System.currentTimeMillis();
System.err.println("PROCESS_REGION_SECS:\t" + (stop-start)/1000 + "\t" + region.getDescriptor());
return mappedContigs;
}
// Pair up junctions that could be spanned by a single read
protected List<Pair<Feature, Feature>> pairJunctions(List<Feature> junctions, int maxDist) {
List<Pair<Feature, Feature>> junctionPairs = new ArrayList<Pair<Feature, Feature>>();
for (Feature junc1 : junctions) {
for (Feature junc2 : junctions) {
if (junc1.getEnd() < junc2.getStart() && junc1.getEnd() + maxDist >= junc2.getStart()) {
junctionPairs.add(new Pair<Feature, Feature>(junc1, junc2));
}
}
}
return junctionPairs;
}
static List<Feature> getRegions(String regionsBed, int readLength, boolean hasPresetKmers) throws IOException {
RegionLoader loader = new RegionLoader();
List<Feature> regions = loader.load(regionsBed, hasPresetKmers);
if (regions.size() > 0 && (regions.get(0).getKmer() == 0)) {
regions = RegionLoader.collapseRegions(regions, readLength);
regions = splitRegions(regions);
}
return regions;
}
private void loadRegions() throws IOException {
this.regions = getRegions(regionsBed, readLength, hasPresetKmers);
System.err.println("Num regions: " + regions.size());
if (isDebug) {
for (Feature region : regions) {
System.err.println(region.getSeqname() + "\t" + region.getStart() + "\t" + region.getEnd() + "\t" + region.getKmer());
}
}
}
private void loadJunctions() throws IOException {
if (this.junctionFile != null) {
RegionLoader loader = new RegionLoader();
this.junctions = loader.load(junctionFile, false);
}
}
public void setRegionsBed(String bedFile) {
this.regionsBed = bedFile;
}
private void getSamHeaderAndReadLength() {
Logger.info("Identifying header and determining read length");
this.samHeaders = new SAMFileHeader[this.inputSams.length];
for (int i=0; i<this.inputSams.length; i++) {
SAMFileReader reader = new SAMFileReader(new File(inputSams[i]));
try {
reader.setValidationStringency(ValidationStringency.SILENT);
samHeaders[i] = reader.getFileHeader();
samHeaders[i].setSortOrder(SAMFileHeader.SortOrder.unsorted);
Iterator<SAMRecord> iter = reader.iterator();
int cnt = 0;
while ((iter.hasNext()) && (cnt < 1000000)) {
SAMRecord read = iter.next();
this.readLength = Math.max(this.readLength, read.getReadLength());
this.maxMapq = Math.max(this.maxMapq, read.getMappingQuality());
// Assumes aligner sets proper pair flag correctly
if ((isPairedEnd) && (read.getReadPairedFlag()) && (read.getProperPairFlag())) {
this.minInsertLength = Math.min(this.minInsertLength, Math.abs(read.getInferredInsertSize()));
this.maxInsertLength = Math.max(this.maxInsertLength, Math.abs(read.getInferredInsertSize()));
}
cnt += 1;
}
// Allow some fudge in insert length
minInsertLength = Math.max(minInsertLength - 2*readLength, 0);
maxInsertLength = maxInsertLength + 2*readLength;
} finally {
reader.close();
}
}
System.err.println("Min insert length: " + minInsertLength);
System.err.println("Max insert length: " + maxInsertLength);
Logger.info("Max read length is: " + readLength);
if (assemblerSettings.getMinContigLength() < 1) {
assemblerSettings.setMinContigLength(Math.max(readLength+1, MIN_CONTIG_LENGTH));
}
Logger.info("Min contig length: " + assemblerSettings.getMinContigLength());
}
static class Pair<T, Y> {
private T t;
private Y y;
public Pair(T t, Y y) {
this.t = t;
this.y = y;
}
public T getFirst() {
return t;
}
public Y getSecond() {
return y;
}
}
static List<Feature> splitRegions(List<Feature> regions,
int maxRegionLength, int minRegionRemainder, int regionOverlap) {
List<Feature> splitRegions = new ArrayList<Feature>();
for (Feature region : regions) {
if (region.getLength() <= maxRegionLength + minRegionRemainder) {
splitRegions.add(region);
} else {
splitRegions.addAll(splitWithOverlap(region, maxRegionLength, minRegionRemainder, regionOverlap));
}
}
return splitRegions;
}
/**
* If any of the input list of features is greater than maxSize, split them into multiple features.
*/
public static List<Feature> splitRegions(List<Feature> regions) {
return splitRegions(regions, MAX_REGION_LENGTH, MIN_REGION_REMAINDER, REGION_OVERLAP);
}
public static List<Feature> splitWithOverlap(Feature region) {
return splitWithOverlap(region, MAX_REGION_LENGTH, MIN_REGION_REMAINDER, REGION_OVERLAP);
}
static List<Feature> splitWithOverlap(Feature region, int maxRegionLength,
int minRegionRemainder, int regionOverlap) {
List<Feature> regions = new ArrayList<Feature>();
long pos = region.getStart();
long end = pos-1;
while (end < region.getEnd()) {
long start = pos;
end = pos + maxRegionLength;
long marker = end;
// If we're at or near the end of the region, stop at region end.
if (end > (region.getEnd() - minRegionRemainder)) {
end = region.getEnd();
}
pos = marker - regionOverlap;
regions.add(new Feature(region.getSeqname(), start, end));
}
return regions;
}
int[] getKmers(Feature region) {
int[] kmerSizes = null;
int kmerSize = region.getKmer();
if (kmerSize > 0) {
kmerSizes = toKmerArray(kmerSize, readLength);
} else {
kmerSizes = assemblerSettings.getKmerSize();
}
return kmerSizes;
}
int[] toKmerArray(int kmerSize, int readLength) {
int[] kmerSizes = null;
int maxKmerSize = this.readLength-5;
if (maxKmerSize > MAX_KMER_SIZE) {
maxKmerSize = MAX_KMER_SIZE;
}
List<Integer> kmers = new ArrayList<Integer>();
while (kmerSize < maxKmerSize) {
kmers.add(kmerSize);
kmerSize += 2;
}
kmerSizes = new int[kmers.size()];
int i=0;
for (int kmer : kmers) {
kmerSizes[i++] = kmer;
}
return kmerSizes;
}
private NativeAssembler newAssembler(Feature region) {
NativeAssembler assem = new NativeAssembler();
assem.setTruncateOutputOnRepeat(true);
assem.setMaxContigs(assemblerSettings
.getMaxPotentialContigs());
assem.setMaxPathsFromRoot(100000);
assem.setReadLength(readLength);
//assem.setKmer(assemblerSettings.getKmerSize());
assem.setKmer(getKmers(region));
assem.setMinKmerFrequency(assemblerSettings.getMinNodeFrequncy());
assem.setMinEdgeRatio(assemblerSettings.getMinEdgeRatio());
assem.setMinBaseQuality(assemblerSettings.getMinBaseQuality());
assem.setMaxNodes(assemblerSettings.getMaxNodes());
assem.setMinReadCandidateFraction(assemblerSettings.getMinReadCandidateFraction());
assem.setMaxAverageDepth(assemblerSettings.getMaxAverageDepth());
assem.setAverageDepthCeiling(assemblerSettings.getAverageDepthCeiling());
assem.setDebug(assemblerSettings.isDebug());
return assem;
}
private void init() throws IOException {
File workingDir = new File(tempDir);
if (workingDir.exists()) {
if (!workingDir.delete()) {
throw new IllegalStateException("Unable to delete: " + tempDir);
}
}
if (!workingDir.mkdir()) {
throw new IllegalStateException("Unable to create: " + tempDir);
}
File unalignedTempDir = new File(tempDir + "/unaligned");
if (!unalignedTempDir.mkdir()) {
throw new IllegalStateException("Unable to create: " + tempDir + "/unaligned");
}
new NativeLibraryLoader().load(tempDir);
threadManager = new ThreadManager(numThreads);
}
public void setReference(String reference) {
this.reference = reference;
}
public void setBwaIndex(String bwaIndex) {
this.bwaIndex = bwaIndex;
}
public void setTempDir(String temp) {
this.tempDir = temp;
}
public void setAssemblerSettings(AssemblerSettings settings) {
this.assemblerSettings = settings;
}
public void setNumThreads(int numThreads) {
this.numThreads = numThreads;
}
public void setShouldReprocessUnaligned(boolean shouldReprocessUnaligned) {
this.shouldReprocessUnaligned = shouldReprocessUnaligned;
}
public void setMaxUnalignedReads(int maxUnalignedReads) {
this.maxUnalignedReads = maxUnalignedReads;
}
public CompareToReference2 getC2r() {
return this.c2r;
}
public int getMinMappingQuality() {
return this.minMappingQuality;
}
public int getMaxInsertLength() {
return this.maxInsertLength;
}
public int getMinInsertLength() {
return this.minInsertLength;
}
public void setMaxInsertLength(int maxInsertLen) {
this.maxInsertLength = maxInsertLen;
}
public void setMinInsertLength(int minInsertLen) {
this.minInsertLength = minInsertLen;
}
boolean isFiltered(SAMRecord read) {
return SAMRecordUtils.isFiltered(isPairedEnd, read);
}
public static void run(String[] args) throws Exception {
System.err.println("Starting 0.97 ...");
ReAlignerOptions options = new ReAlignerOptions();
options.parseOptions(args);
if (options.isValid()) {
AssemblerSettings assemblerSettings = new AssemblerSettings();
assemblerSettings.setKmerSize(options.getKmerSizes());
assemblerSettings.setMinContigLength(options.getMinContigLength());
assemblerSettings.setMinNodeFrequncy(options.getMinNodeFrequency());
assemblerSettings.setMaxPotentialContigs(options
.getMaxPotentialContigs());
assemblerSettings.setMinUnalignedNodeFrequency(options.getMinUnalignedNodeFrequency());
assemblerSettings.setMinBaseQuality(options.getMinBaseQuality());
assemblerSettings.setMinReadCandidateFraction(options.getMinReadCandidateFraction());
assemblerSettings.setMaxAverageDepth(options.getMaxAverageRegionDepth());
assemblerSettings.setAverageDepthCeiling(options.getAverageDepthCeiling());
assemblerSettings.setMinEdgeRatio(options.getMinEdgeRatio());
assemblerSettings.setDebug(options.isDebug());
assemblerSettings.setMaxNodes(options.getMaxNodes());
ReAligner realigner = new ReAligner();
realigner.setReference(options.getReference());
realigner.setBwaIndex(options.getBwaIndex());
realigner.setRegionsBed(options.getTargetRegionFile());
realigner.setTempDir(options.getWorkingDir());
realigner.setAssemblerSettings(assemblerSettings);
realigner.setNumThreads(options.getNumThreads());
realigner.isPairedEnd = options.isPairedEnd();
realigner.minMappingQuality = options.getMinimumMappingQuality();
realigner.hasPresetKmers = options.hasPresetKmers();
realigner.isDebug = options.isDebug();
realigner.isSkipAssembly = options.isSkipAssembly();
realigner.isSkipNonAssembly = options.isSkipNonAssembly();
realigner.junctionFile = options.getJunctionFile();
long s = System.currentTimeMillis();
realigner.reAlign(options.getInputFiles(), options.getOutputFiles());
long e = System.currentTimeMillis();
System.err.println("Elapsed seconds: " + (e - s) / 1000);
} else {
System.exit(-1);
}
}
public static void main(String[] args) throws Exception {
// String inp = "--in /home/lmose/dev/ayc/opt/mem/test_tumor.bam --kmer 43 --mc-mapq 25 --mcl 101 --mcr -1.0 --mnf 2 --umnf 2 --mpc 50000 --out /home/lmose/dev/ayc/opt/mem/test_tumor.abra.bam --ref /home/lmose/reference/test/test.fa --targets /home/lmose/dev/ayc/opt/mem/test.gtf --threads 2 --working /home/lmose/dev/ayc/opt/mem/work1 --mur 50000000 --no-unalign --mbq 20 --rcf .02";
String inp = "--in /home/lmose/dev/ayc/opt/mem/test_tumor.bam --kmer 43 --out /home/lmose/dev/ayc/opt/mem/test_tumor.abra3.bam --ref /home/lmose/reference/test/test.fa --targets /home/lmose/dev/ayc/opt/mem/test2.bed --threads 2 --working /home/lmose/dev/ayc/opt/mem/work3";
run(inp.split("\\s+"));
}
} |
package abra;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.PosixFilePermissions;
import java.security.CodeSource;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
import java.util.UUID;
import abra.JunctionUtils.JunctionComparator;
import abra.JunctionUtils.TooManyJunctionPermutationsException;
import abra.ReadEvaluator.Alignment;
import abra.ContigAligner.ContigAlignerResult;
import abra.SimpleMapper.Orientation;
import htsjdk.samtools.Cigar;
import htsjdk.samtools.CigarElement;
import htsjdk.samtools.CigarOperator;
import htsjdk.samtools.SAMFileHeader;
import htsjdk.samtools.SAMProgramRecord;
import htsjdk.samtools.SAMRecord;
import htsjdk.samtools.SAMSequenceRecord;
import htsjdk.samtools.SamReader;
import htsjdk.samtools.TextCigarCodec;
/**
* ABRA's main entry point
*
* @author Lisle E. Mose (lmose at unc dot edu)
*/
public class ReAligner {
public static int MAX_REGION_LENGTH = 400;
private static int MIN_REGION_REMAINDER = 200;
public static int REGION_OVERLAP = 200;
// Minimum sequence length recommended for use with bwa mem
private static final int MIN_CONTIG_LENGTH = 70;
// Cannot be larger than buffer in assembler.c
private static final int MAX_KMER_SIZE = 199;
// These must match constants in C code (Aligner matrix dimensions)
private static final int MAX_CONTIG_LEN = 2000-1;
private static final int MAX_REF_REGION_LEN = 5000-1;
private SAMFileHeader[] samHeaders;
private List<Feature> regions;
private String regionsBed;
private String reference;
private AssemblerSettings assemblerSettings;
private int numThreads;
private String[] inputSams;
private int readLength = -1;
private int maxMapq = -1;
private int minInsertLength = Integer.MAX_VALUE;
private int maxInsertLength = -1;
private boolean isPairedEnd = false;
private BufferedWriter contigWriter = null;
public static CompareToReference2 c2r;
private ThreadManager threadManager;
private int minMappingQuality;
private double maxMismatchRate;
private boolean isDebug;
private boolean isSkipAssembly;
private boolean isSkipUnmappedTrigger;
private boolean useSoftClippedReads;
private boolean useObservedIndels;
private boolean useConsensusSeq;
private boolean isKeepTmp;
private boolean shouldSort;
private String tmpDir;
private int finalCompressionLevel;
private int maxRealignDist;
private int maxAssembledContigs;
// If true, the input target file specifies kmer values
private boolean hasPresetKmers = false;
private String contigFile = null;
// RNA specific
private String junctionFile;
private String gtfJunctionFile;
private Set<Feature> junctions = new HashSet<Feature>();
private ReverseComplementor rc = new ReverseComplementor();
private String version = "unknown";
private String cl = "unknown";
private int[] swScoring;
private int[] softClipParams;
private int maxCachedReads = 0;
private int maxReadsInRegion;
private int minAnchorLen;
private int maxAnchorMismatches;
private SortedSAMWriter writer;
private ChromosomeChunker chromosomeChunker;
private String chromosomesToSkipRegex;
private ChromosomeRegex chromosomeSkipRegex;
private boolean shouldUnsetDuplicates;
private String inputVcf;
private Map<String, List<Variant>> knownVariants;;
private boolean shouldCreateIndex;
private boolean shouldUseGkl;
private int ambiguousMapq;
private double maxReadNoise;
private int maxReadsInRamForSort;
public void reAlign(String[] inputFiles, String[] outputFiles) throws Exception {
this.inputSams = inputFiles;
logStartupInfo(outputFiles);
String tempDir = init();
c2r = new CompareToReference2();
c2r.init(this.reference);
chromosomeChunker = new ChromosomeChunker(c2r);
chromosomeChunker.init();
Logger.info("Reading Input SAM Header and identifying read length");
getSamHeaderAndReadLength();
Logger.info("Read length: " + readLength);
Logger.info("Loading target regions");
loadRegions();
loadJunctions();
Clock clock = new Clock("Realignment");
clock.start();
if (contigFile != null) {
contigWriter = new BufferedWriter(new FileWriter(contigFile, false));
}
for (int i=0; i<inputSams.length; i++) {
SAMProgramRecord pg = new SAMProgramRecord("ABRA2");
pg.setProgramVersion(this.version);
pg.setCommandLine(cl);
samHeaders[i].addProgramRecord(pg);
}
writer = new SortedSAMWriter(outputFiles, tempDir.toString(), samHeaders, isKeepTmp, chromosomeChunker,
finalCompressionLevel, shouldSort, maxRealignDist, shouldUnsetDuplicates, shouldCreateIndex, shouldUseGkl, maxReadsInRamForSort);
// Spawn thread for each chromosome
// TODO: Validate identical sequence dictionary for each input file
for (int i=0; i<this.chromosomeChunker.getChunks().size(); i++) {
spawnChromosomeThread(i);
}
Logger.info("Waiting for processing threads to complete");
threadManager.waitForAllThreadsToComplete();
if (contigWriter != null) {
contigWriter.close();
}
clock.stopAndPrint();
clock = new Clock("Sort and cleanup");
clock.start();
// Cut num threads in half to allow for async writer thread
threadManager = new ThreadManager(Math.max(numThreads / 2, 1));
for (int i=0; i<outputFiles.length; i++) {
SortedSAMWriterRunnable thread = new SortedSAMWriterRunnable(threadManager, writer, i, inputSams[i]);
threadManager.spawnThread(thread);
}
Logger.info("Waiting for writer threads to complete");
threadManager.waitForAllThreadsToComplete();
clock.stopAndPrint();
Logger.info("Done.");
}
void processChromosomeChunk(int chromosomeChunkIdx) throws Exception {
Feature chromosomeChunk = chromosomeChunker.getChunks().get(chromosomeChunkIdx);
String chromosome = chromosomeChunk.getSeqname();
Logger.info("Processing chromosome chunk: " + chromosomeChunk);
Clock clock = new Clock("Chromosome: " + chromosomeChunk);
clock.start();
writer.initChromosomeChunk(chromosomeChunkIdx);
MultiSamReader reader = new MultiSamReader(this.inputSams, this.minMappingQuality, this.isPairedEnd, chromosomeChunk);
List<List<SAMRecordWrapper>> currReads = new ArrayList<List<SAMRecordWrapper>>();
for (int i=0; i<this.inputSams.length; i++) {
currReads.add(new ArrayList<SAMRecordWrapper>());
}
List<List<SAMRecordWrapper>> outOfRegionReads = new ArrayList<List<SAMRecordWrapper>>();
for (int i=0; i<this.inputSams.length; i++) {
outOfRegionReads.add(new ArrayList<SAMRecordWrapper>());
}
Map<Feature, Map<SimpleMapper, ContigAlignerResult>> regionContigs = new HashMap<Feature, Map<SimpleMapper, ContigAlignerResult>>();
int readCount = 0;
// Identify regions overlapping the current chromosome chunk
List<Feature> chromosomeRegions = new ArrayList<Feature>();
for (Feature region : regions) {
if (region.getSeqname().equals(chromosome)) {
if (region.getStart() > chromosomeChunk.getStart()-MAX_REGION_LENGTH && region.getEnd() < chromosomeChunk.getEnd()+MAX_REGION_LENGTH) {
chromosomeRegions.add(region);
}
}
}
List<Feature> chromosomeJunctions = new ArrayList<Feature>();
for (Feature junction : junctions) {
if (junction.getSeqname().equals(chromosome)) {
chromosomeJunctions.add(junction);
}
}
List<Variant> knownVariants = getKnownVariants(chromosome);
Map<Feature, List<Feature>> regionJunctions = JunctionUtils.getRegionJunctions(chromosomeRegions, chromosomeJunctions, readLength, MAX_REGION_LENGTH);
Map<Feature, List<Variant>> regionVariants = Variant.groupByRegion(chromosomeRegions, knownVariants);
Set<Integer> regionsToProcess = new TreeSet<Integer>();
int searchStartRegionIdx = 0;
for (SAMRecordWrapper record : reader) {
// If this is an unmapped read anchored by its mate, check rc flag
SAMRecord read1 = record.getSamRecord();
if (read1.getReadUnmappedFlag() && !read1.getMateUnmappedFlag()) {
if (!read1.getReadNegativeStrandFlag() && !read1.getMateNegativeStrandFlag()) {
// Both ends in forward orientation. Reverse the unmapped read
read1.setReadString(rc.reverseComplement(read1.getReadString()));
read1.setBaseQualityString(rc.reverse(read1.getBaseQualityString()));
read1.setReadNegativeStrandFlag(true);
record.setUnalignedRc(true);
} else if (read1.getReadNegativeStrandFlag() && read1.getMateNegativeStrandFlag()) {
// Both ends in reverse orientation. Reverse the unmapped read
read1.setReadString(rc.reverseComplement(read1.getReadString()));
read1.setBaseQualityString(rc.reverse(read1.getBaseQualityString()));
read1.setReadNegativeStrandFlag(false);
record.setUnalignedRc(true);
}
}
List<Integer> overlappingRegions = new ArrayList<Integer>();
if (chromosomeRegions.size() > 0) {
// Advance currRegion to current locus @ record start - 1000000
// TODO: Use move dist instead of 1000000 ?
Feature searchStartRegion = chromosomeRegions.get(searchStartRegionIdx);
while (searchStartRegion != null && searchStartRegionIdx < chromosomeRegions.size()-1 && searchStartRegion.getEnd() < record.getAdjustedAlignmentStart() - 1000000) {
searchStartRegionIdx += 1;
searchStartRegion = chromosomeRegions.get(searchStartRegionIdx);
}
overlappingRegions = Feature.findAllOverlappingRegions(reader.getSAMFileHeader(), record, chromosomeRegions, searchStartRegionIdx);
}
// int regionIdx = Feature.findFirstOverlappingRegion(reader.getSAMFileHeader(), record, chromosomeRegions, currRegionIdx);
// Identify next region that is a candidate for processing
// Note: Splicing can cause reads to go in and out of a region
// if (regionIdx >= 0) {
if (!overlappingRegions.isEmpty()) {
regionsToProcess.addAll(overlappingRegions);
}
// Cache read for processing at end of region
currReads.get(record.getSampleIdx()).add(record);
Iterator<Integer> regionIter = regionsToProcess.iterator();
while (regionIter.hasNext()) {
int regionToProcessIdx = regionIter.next();
// If start position for current read is beyond current region, trigger assembly
Feature currRegion = chromosomeRegions.get(regionToProcessIdx);
if (record.getAdjustedAlignmentStart() > currRegion.getEnd() + this.readLength*2) {
Logger.debug("Processing region: %s", currRegion);
Map<SimpleMapper, ContigAlignerResult> mappedContigs = processRegion(currRegion, currReads, regionJunctions.get(currRegion), regionVariants.get(currRegion));
Logger.debug("Region: %s assembled: %d contigs", currRegion, mappedContigs.keySet().size());
regionContigs.put(currRegion, mappedContigs);
// Remove curr region from list of regions to process
regionIter.remove();
}
}
/*
// TODO: Consider dropping this... Reads are out of scope when we've moved beyond them via standard processing?
if (overlappingRegions.isEmpty()) {
// Process out of region read and output if ready.
List<SAMRecordWrapper> outOfRegionReadsForSample = outOfRegionReads.get(record.getSampleIdx());
outOfRegionReadsForSample.add(record);
if (outOfRegionReads.get(record.getSampleIdx()).size() > 2500) {
for (SAMRecordWrapper outOfRegionRead : outOfRegionReadsForSample) {
this.writer.addAlignment(record.getSampleIdx(), outOfRegionRead.getSamRecord());
}
outOfRegionReadsForSample.clear();
}
}
*/
int MAX_READ_RANGE = 1000 + this.readLength;
// Check for out of scope reads every 2500 reads (TODO: is 2500 the best number?)
if (readCount % 2500 == 0) {
// Remap / output / clear out of scope reads
List<List<SAMRecordWrapper>> readsToRemap = new ArrayList<List<SAMRecordWrapper>>();
// Initialize per sample lists
for (List<SAMRecordWrapper> origSample : currReads) {
List<SAMRecordWrapper> sampleReadsToRemap = new ArrayList<SAMRecordWrapper>();
readsToRemap.add(sampleReadsToRemap);
Iterator<SAMRecordWrapper> iter = origSample.iterator();
while (iter.hasNext()) {
SAMRecordWrapper read = iter.next();
// record == most recent read. read = cached read
if (record.getSamRecord().getAlignmentStart() - read.getSamRecord().getAlignmentStart() > MAX_READ_RANGE) {
// Only output reads with start pos within current chromosomeChunk
if (read.getSamRecord().getAlignmentStart() >= chromosomeChunk.getStart() &&
read.getSamRecord().getAlignmentStart() <= chromosomeChunk.getEnd()) {
sampleReadsToRemap.add(read);
}
iter.remove();
}
}
}
// Remap out of scope reads
long start = System.currentTimeMillis();
int totalReads = remapReads(regionContigs, readsToRemap, chromosomeChunkIdx);
long stop = System.currentTimeMillis();
// Logger.debug("REMAP_READS_MSECS:\t%d\t%d\t%s:%d", (stop-start), totalReads, record.getSamRecord().getReferenceName(), record.getSamRecord().getAlignmentStart());
// Remove out of scope region assemblies
List<Feature> regionsToRemove = new ArrayList<Feature>();
for (Feature region : regionContigs.keySet()) {
if (getFirstStartPos(currReads)-region.getStart() > MAX_READ_RANGE) {
regionsToRemove.add(region);
}
}
for (Feature region : regionsToRemove) {
Logger.debug("Removing contigs for region: %s", region.toString());
regionContigs.remove(region);
}
String logPrefix = record.getSamRecord().getReferenceName() + ":" + record.getSamRecord().getAlignmentStart() + " : ";
if (regionContigs.size() > 10) {
Logger.debug("%s\tregionContigs size: %d", logPrefix, regionContigs.size());
}
//TODO: Revisit this. Is it still necessary?
int currReadsCount = 0;
int idx = 0;
boolean shouldClear = false;
for (List<SAMRecordWrapper> reads : currReads) {
currReadsCount += reads.size();
if (reads.size() >= this.maxCachedReads) {
shouldClear = true;
Logger.warn(logPrefix + " Too many reads for sample: " + idx + " num_reads: " + reads.size() + ", clearing.");
}
idx += 1;
}
if (shouldClear) {
for (int i=0; i<currReads.size(); i++) {
List<SAMRecordWrapper> reads = currReads.get(i);
for (SAMRecordWrapper read : reads) {
this.writer.addAlignment(i, read, chromosomeChunkIdx);
}
reads.clear();
}
}
if (currReadsCount > 250000) {
Logger.info(logPrefix + "\tCurr reads size: " + currReadsCount);
}
int outOfRegionCount = 0;
for (List<SAMRecordWrapper> reads : outOfRegionReads) {
outOfRegionCount += reads.size();
}
if (outOfRegionCount > 10000) {
Logger.info(logPrefix + "\tOut of region reads size: " + outOfRegionCount);
}
}
readCount += 1;
}
// Attempt to process last region if applicable
Iterator<Integer> regionIter = regionsToProcess.iterator();
while (regionIter.hasNext()) {
int regionToProcessIdx = regionIter.next();
// We've moved beyond the current region
// Assemble reads
Feature region = chromosomeRegions.get(regionToProcessIdx);
Logger.debug("Processing region: %s", region);
Map<SimpleMapper, ContigAlignerResult> mappedContigs = processRegion(region, currReads, regionJunctions.get(region), regionVariants.get(region));
Logger.debug("Region: %s assembled: %d contigs", region, mappedContigs.keySet().size());
regionContigs.put(region, mappedContigs);
}
// Remap remaining reads
remapReads(regionContigs, currReads, chromosomeChunkIdx);
currReads.clear();
regionContigs.clear();
// Output remaining out of region reads
for (int i=0; i<outOfRegionReads.size(); i++) {
List<SAMRecordWrapper> outOfRegionReadsForSample = outOfRegionReads.get(i);
for (SAMRecordWrapper outOfRegionRead : outOfRegionReadsForSample) {
this.writer.addAlignment(i, outOfRegionRead, chromosomeChunkIdx);
}
outOfRegionReadsForSample.clear();
}
reader.close();
writer.finishChromosomeChunk(chromosomeChunkIdx);
clock.stopAndPrint();
}
private int getFirstStartPos(List<List<SAMRecordWrapper>> readsList) {
int minPos = Integer.MAX_VALUE;
for (List<SAMRecordWrapper> reads : readsList) {
if (reads.size() > 0 && reads.get(0).getSamRecord().getAlignmentStart() < minPos) {
minPos = reads.get(0).getSamRecord().getAlignmentStart();
}
}
return minPos;
}
private void logStartupInfo(String[] outputFiles) throws IOException {
Logger.info("ABRA version: " + this.version);
int ctr = 0;
for (String input : inputSams) {
Logger.info("input" + ctr + ": " + input);
}
ctr = 0;
for (String output : outputFiles) {
Logger.info("output" + ctr + ": " + output);
}
Logger.info("regions: " + regionsBed);
Logger.info("reference: " + reference);
Logger.info("num threads: " + numThreads);
Logger.info(assemblerSettings.getDescription());
Logger.info("paired end: " + isPairedEnd);
Logger.info("isSkipAssembly: " + isSkipAssembly);
Logger.info("useSoftClippedReads: " + useSoftClippedReads);
Logger.info("SW scoring: " + Arrays.toString(swScoring));
Logger.info("Soft clip params: " + Arrays.toString(softClipParams));
String javaVersion = System.getProperty("java.version");
Logger.info("Java version: " + javaVersion);
if (javaVersion.startsWith("1.6") || javaVersion.startsWith("1.5") || javaVersion.startsWith("1.4")) {
throw new RuntimeException("Please upgrade to Java 7 or later to run ABRA.");
}
try {
InetAddress localhost = java.net.InetAddress.getLocalHost();
String hostname = localhost.getHostName();
Logger.info("hostname: " + hostname);
} catch (Throwable t) {
Logger.error("Error getting hostname: " + t.getMessage());
}
}
private void spawnChromosomeThread(int chromosomeChunkIdx) throws InterruptedException {
ReAlignerRunnable thread = new ReAlignerRunnable(threadManager, this, chromosomeChunkIdx);
Logger.debug("Queuing thread for chromosome: " + chromosomeChunkIdx);
threadManager.spawnThread(thread);
}
private synchronized void appendContigs(String contigs) throws IOException {
if (contigWriter != null) {
contigWriter.write(contigs);
}
}
private void remapRead(ReadEvaluator readEvaluator, SAMRecord read, int origEditDist) {
Alignment alignment = readEvaluator.getImprovedAlignment(origEditDist, read, c2r);
if (alignment != null) {
if (alignment == Alignment.AMBIGUOUS) {
// Read maps equally well to reference and multiple differing contigs. Flag with mapq of 1.
if (ambiguousMapq >= 0 && read.getMappingQuality() > ambiguousMapq) {
read.setMappingQuality(ambiguousMapq);
}
} else if (origEditDist == alignment.numMismatches && (read.getAlignmentStart() != alignment.pos || !read.getCigarString().equals(alignment.cigar))) {
// Read maps ambiguously. Downgrade mapping quality
if (ambiguousMapq >= 0 && read.getMappingQuality() > ambiguousMapq) {
read.setMappingQuality(ambiguousMapq);
}
} else if (Math.abs(read.getAlignmentStart() - alignment.pos) > maxRealignDist) {
Logger.trace("Not moving read: " + read.getReadName() + " from: " + read.getAlignmentStart() + " to: " + alignment.pos);
} else if (origEditDist > alignment.numMismatches) {
SAMRecord orig = read.deepCopy();
int readPos = alignment.pos;
// Set contig alignment info for all reads that map to contigs (even if read is unchanged)
String ya = alignment.chromosome + ":" + alignment.contigPos + ":" + alignment.contigCigar;
// If no change to alignment, just record the YA tag
if (!read.getReadUnmappedFlag() && read.getAlignmentStart() == readPos && read.getCigarString().equals(alignment.cigar)) {
read.setAttribute("YA", ya);
}
// If the read has actually moved to an improved alignment, update
if (read.getReadUnmappedFlag() || read.getAlignmentStart() != readPos || !read.getCigarString().equals(alignment.cigar)) {
read.setAttribute("YA", ya);
// Original alignment info
String yo = "N/A";
if (!read.getReadUnmappedFlag()) {
String origOrientation = read.getReadNegativeStrandFlag() ? "-" : "+";
yo = read.getReferenceName() + ":" + read.getAlignmentStart() + ":" + origOrientation + ":" + read.getCigarString();
} else {
read.setReadUnmappedFlag(false);
read.setMappingQuality(this.maxMapq);
}
read.setAttribute("YO", yo);
// Update alignment position and cigar and orientation
read.setAlignmentStart(alignment.pos);
read.setCigarString(alignment.cigar);
// If this is true, the read was already reverse complemented in the original alignment
if (read.getReadNegativeStrandFlag()) {
read.setReadNegativeStrandFlag(alignment.orientation == Orientation.FORWARD ? true : false);
} else {
read.setReadNegativeStrandFlag(alignment.orientation == Orientation.FORWARD ? false : true);
}
// Number of mismatches to contig
read.setAttribute("YM", alignment.numMismatches);
// Original edit distance
read.setAttribute("YX", origEditDist);
String distTag = "NM";
if (orig.getAttribute("NM") == null && orig.getAttribute("nM") != null) {
distTag = "nM";
}
// Updated edit distance
read.setAttribute(distTag, SAMRecordUtils.getEditDistance(read, c2r, false));
//TODO: Compute mapq intelligently???
read.setMappingQuality(Math.min(read.getMappingQuality()+10, this.maxMapq));
// Check for realignments that are too noisy
int numMismatches = c2r.numHighQualityMismatches(read, 20, false);
int numIndels = SAMRecordUtils.getNumIndels(read);
int noise = numMismatches + (numIndels * 2);
int maxNoise = (int) (SAMRecordUtils.getUnclippedLength(read) * maxReadNoise);
if (noise > maxNoise ||
read.getAlignmentEnd() >= c2r.getChromosomeLength(read.getReferenceName())-1) {
// Read is too noisy or read maps off end of chromosome, revert
read.setAlignmentStart(orig.getAlignmentStart());
read.setCigar(orig.getCigar());
read.setReadNegativeStrandFlag(orig.getReadNegativeStrandFlag());
read.setAttribute("YA", null);
read.setAttribute("YO", null);
read.setAttribute("YM", null);
read.setAttribute("YX", null);
read.setAttribute(distTag, orig.getAttribute(distTag));
read.setMappingQuality(orig.getMappingQuality());
read.setReadUnmappedFlag(orig.getReadUnmappedFlag());
}
}
}
}
}
private int remapReads(Map<Feature, Map<SimpleMapper, ContigAlignerResult>> mappedContigs,
List<List<SAMRecordWrapper>> readsList, int chromosomeChunkIdx) throws Exception {
ReadEvaluator readEvaluator = new ReadEvaluator(mappedContigs);
int sampleIdx = 0;
int totalReads = 0;
// For each sample.
for (List<SAMRecordWrapper> reads : readsList) {
// For each read.
for (SAMRecordWrapper readWrapper : reads) {
totalReads += 1;
SAMRecord read = readWrapper.getSamRecord();
if (read.getMappingQuality() >= this.minMappingQuality || read.getReadUnmappedFlag()) {
if (Math.abs(read.getAlignmentStart() - read.getMateAlignmentStart()) < maxRealignDist &&
read.getReferenceName().equals(read.getMateReferenceName())) {
// TODO: Use NM tag if available (need to handle soft clipping though!)
int origEditDist = SAMRecordUtils.getEditDistance(read, c2r, true);
// int origEditDist = c2r.numMismatches(read);
remapRead(readEvaluator, read, origEditDist);
}
}
}
// Output all reads for this sample
for (SAMRecordWrapper read : reads) {
this.writer.addAlignment(sampleIdx, read, chromosomeChunkIdx);
}
sampleIdx += 1;
}
return totalReads;
}
private List<List<SAMRecordWrapper>> subsetReads(Feature region, List<List<SAMRecordWrapper>> readsList) {
List<List<SAMRecordWrapper>> subset = new ArrayList<List<SAMRecordWrapper>>();
// Initialize per sample lists
for (List<SAMRecordWrapper> origSample : readsList) {
// Track read pair ends
Map<String, SAMRecordWrapper> firstReads = new HashMap<String, SAMRecordWrapper>();
Map<String, SAMRecordWrapper> secondReads = new HashMap<String, SAMRecordWrapper>();
List<SAMRecordWrapper> subsetSample = new ArrayList<SAMRecordWrapper>();
subset.add(subsetSample);
for (SAMRecordWrapper read : origSample) {
if (region.overlapsRead(read.getSamRecord())) {
subsetSample.add(read);
if (read.getSamRecord().getFirstOfPairFlag() && SAMRecordUtils.isPrimary(read.getSamRecord())) {
firstReads.put(read.getSamRecord().getReadName() + "_" + read.getSamRecord().getAlignmentStart(), read);
} else if (read.getSamRecord().getSecondOfPairFlag() && SAMRecordUtils.isPrimary(read.getSamRecord())) {
secondReads.put(read.getSamRecord().getReadName() + "_" + read.getSamRecord().getAlignmentStart(), read);
}
}
}
for (SAMRecordWrapper read : subsetSample) {
if (SAMRecordUtils.hasPossibleAdapterReadThrough(read.getSamRecord(), firstReads, secondReads)) {
read.setShouldAssemble(false);
}
// If reads overlap, attempt to generate merged sequence
if (!read.hasMergedSeq()) {
SAMRecordUtils.mergeReadPair(read, firstReads, secondReads);
}
}
}
return subset;
}
private List<Feature> getExtraJunctions(ContigAlignerResult result, List<Feature> junctions, List<Feature> junctions2) {
// Look for junctions with adjacent deletions.
// Treat these as putative novel junctions.
Set<Feature> junctionSet = new HashSet<Feature>(junctions);
junctionSet.addAll(junctions2);
List<Feature> extraJunctions = new ArrayList<Feature>();
Cigar cigar = TextCigarCodec.decode(result.getCigar());
boolean isInGap = false;
int gapStart = -1;
int gapLength = -1;
int numElems = -1;
int refOffset = 0;
for (CigarElement elem : cigar.getCigarElements()) {
if (elem.getOperator() == CigarOperator.D || elem.getOperator() == CigarOperator.N) {
if (!isInGap) {
isInGap = true;
gapStart = refOffset;
gapLength = elem.getLength();
numElems = 1;
} else {
gapLength += elem.getLength();
numElems += 1;
}
} else {
if (isInGap) {
if (numElems > 1) {
long start = result.getGenomicPos() + gapStart;
long end = start + gapLength;
Feature junc = new Feature(result.getChromosome(), start, end-1);
if (!junctionSet.contains(junc)) {
Logger.info("Extra junction idenfified: %s", junc);
extraJunctions.add(junc);
}
}
isInGap = false;
gapStart = -1;
gapLength = -1;
numElems = -1;
}
}
if (elem.getOperator() == CigarOperator.M ||
elem.getOperator() == CigarOperator.D ||
elem.getOperator() == CigarOperator.N ||
elem.getOperator() == CigarOperator.X ||
elem.getOperator() == CigarOperator.EQ) {
refOffset += elem.getLength();
}
}
return extraJunctions;
}
private List<Feature> getExonSkippingJunctions(ContigAlignerResult result, List<Feature> junctions) {
// Handles special case where an exon skipping junction causes large gaps with a tiny (~1)
// number of bases mapped somewhere in the gap
Set<Feature> junctionSet = new HashSet<Feature>(junctions);
List<Feature> extraJunctions = new ArrayList<Feature>();
Cigar cigar = TextCigarCodec.decode(result.getCigar());
boolean isInGap = false;
int gapStart = -1;
int gapLength = -1;
int numElems = -1;
int refOffset = 0;
int maxBasesInMiddle = 5;
int middleBases = -1;
CigarOperator prev = CigarOperator.M;
for (CigarElement elem : cigar.getCigarElements()) {
if (elem.getOperator() == CigarOperator.D || elem.getOperator() == CigarOperator.N) {
if (!isInGap) {
isInGap = true;
gapStart = refOffset;
gapLength = elem.getLength();
numElems = 1;
middleBases = 0;
} else {
gapLength += elem.getLength();
numElems += 1;
}
} else {
if (isInGap) {
if (elem.getLength() + middleBases < maxBasesInMiddle) {
middleBases += elem.getLength();
} else {
if (numElems > 1 && middleBases > 0 && (prev == CigarOperator.D || prev == CigarOperator.N)) {
long start = result.getGenomicPos() + gapStart;
long end = start + gapLength;
// Find junction start / end points closest to gap start / end point
long closestStart = junctions.get(0).getStart();
long closestEnd = junctions.get(0).getEnd();
for (Feature junction : junctions) {
if (Math.abs(junction.getStart()-start) < Math.abs(closestStart-start)) {
closestStart = junction.getStart();
}
if (Math.abs(junction.getEnd()-end) < Math.abs(closestEnd-end)) {
closestEnd = junction.getEnd();
}
}
if (closestEnd > closestStart+1) {
Feature junc = new Feature(result.getChromosome(), closestStart, closestEnd);
if (!junctionSet.contains(junc)) {
Logger.info("Potential exon skipping junction idenfified: %s", junc);
extraJunctions.add(junc);
}
}
}
isInGap = false;
gapStart = -1;
gapLength = -1;
numElems = -1;
middleBases = -1;
}
}
}
if (elem.getOperator() == CigarOperator.M ||
elem.getOperator() == CigarOperator.D ||
elem.getOperator() == CigarOperator.N ||
elem.getOperator() == CigarOperator.X ||
elem.getOperator() == CigarOperator.EQ) {
refOffset += elem.getLength();
}
prev = elem.getOperator();
}
return extraJunctions;
}
private ContigAlignerResult alignContig(Feature region, String contig, ContigAligner ssw, List<ContigAligner> sswJunctions, List<Feature> allJunctions,
int chromosomeLength) {
ContigAlignerResult bestResult = null;
if (contig.length() > MAX_CONTIG_LEN) {
Logger.warn(String.format("In Region: %s, contig too long: [%s]", region, contig));
} else {
int bestScore = -1;
ContigAlignerResult sswResult;
for (ContigAligner sswJunc : sswJunctions) {
sswResult = sswJunc.align(contig);
if (sswResult != null && sswResult.getScore() > bestScore) {
bestScore = sswResult.getScore();
bestResult = sswResult;
}
}
sswResult = ssw.align(contig);
if (sswResult != null && sswResult.getScore() > bestScore) {
bestScore = sswResult.getScore();
bestResult = sswResult;
}
if (bestResult != null && bestResult != ContigAlignerResult.INDEL_NEAR_END) {
if (!allJunctions.isEmpty()) {
// Check for additional potential exon skipping junctions masked by a base or 2 interrupting the gap
// Using annotated exons here
List<Feature> extraJunctions = getExonSkippingJunctions(bestResult, allJunctions);
if (!extraJunctions.isEmpty()) {
List<Feature> combinedJunctions = new ArrayList<Feature>(allJunctions);
combinedJunctions.addAll(extraJunctions);
Collections.sort(combinedJunctions, new JunctionComparator());
List<List<Feature>> junctionPermutations = new ArrayList<List<Feature>>();
try {
junctionPermutations = JunctionUtils.combineJunctions(region, combinedJunctions, new HashSet<Feature>(extraJunctions), MAX_REGION_LENGTH, this.readLength);
} catch (TooManyJunctionPermutationsException e) {
Logger.warn("TOO_MANY_POTENTIAL_JUNCTION_PERMUTATIONS: " + region.getDescriptor());
}
for (List<Feature> permutation : junctionPermutations) {
boolean hasExtra = false;
for (Feature junc : permutation) {
if (extraJunctions.contains(junc)) {
hasExtra = true;
break;
}
}
if (hasExtra) {
ContigAligner aligner = getContigAlignerForJunctionPermutation(permutation, region, chromosomeLength);
if (aligner != null) {
sswResult = aligner.align(contig);
if (sswResult != null && sswResult.getScore() > bestScore) {
bestScore = sswResult.getScore();
bestResult = sswResult;
}
}
}
}
}
// Check for deletion adjacent to intron (i.e. skipped exon or unannotated splice)
// Not relying on annotated exons here.
extraJunctions = getExtraJunctions(bestResult, allJunctions, extraJunctions);
if (!extraJunctions.isEmpty()) {
List<Feature> combinedJunctions = new ArrayList<Feature>(allJunctions);
combinedJunctions.addAll(extraJunctions);
Collections.sort(combinedJunctions, new JunctionComparator());
List<List<Feature>> junctionPermutations = new ArrayList<List<Feature>>();
try {
junctionPermutations = JunctionUtils.combineJunctions(region, combinedJunctions, new HashSet<Feature>(extraJunctions), MAX_REGION_LENGTH, this.readLength);
} catch (TooManyJunctionPermutationsException e) {
Logger.warn("TOO_MANY_POTENTIAL_JUNCTION_PERMUTATIONS: " + region.getDescriptor());
}
for (List<Feature> permutation : junctionPermutations) {
boolean hasExtra = false;
for (Feature junc : permutation) {
if (extraJunctions.contains(junc)) {
hasExtra = true;
break;
}
}
if (hasExtra) {
ContigAligner aligner = getContigAlignerForJunctionPermutation(permutation, region, chromosomeLength);
if (aligner != null) {
sswResult = aligner.align(contig);
if (sswResult != null && sswResult.getScore() > bestScore) {
bestScore = sswResult.getScore();
bestResult = sswResult;
}
}
}
}
}
}
Logger.debug("BEST_SSW: %d : %s : %d: %d : %s",
bestResult.getGenomicPos(), bestResult.getCigar(), bestResult.getRefPos(), bestResult.getScore(), bestResult.getSequence());
} else {
Logger.debug("NO_SSW: %s", contig);
}
//TODO: Check for tie scores with different final alignment
}
return bestResult;
}
private boolean assemble(List<ContigAlignerResult> results, Feature region,
String refSeq, List<String> bams, List<List<SAMRecordWrapper>> readsList, ContigAligner contigAligner,
List<ContigAligner> junctionAligners, int mnf, int mbq, double mer, List<Feature> junctions,
int chromosomeLength, int maxNumContigs) throws IOException {
boolean shouldRetry = false;
NativeAssembler assem = (NativeAssembler) newAssembler(region);
List<Feature> regions = new ArrayList<Feature>();
regions.add(region);
StringBuffer readBuffer = new StringBuffer();
String contigs = assem.assembleContigs(bams, regions, region.getDescriptor(), true, this, c2r, readsList, mnf, mbq, mer, readBuffer);
if (!contigs.equals("<ERROR>") && !contigs.equals("<REPEAT>") && !contigs.isEmpty()) {
if (contigWriter != null) {
appendContigs(contigs);
}
List<ScoredContig> scoredContigs = ScoredContig.convertAndFilter(contigs, maxNumContigs, readBuffer);
// Map contigs to reference
for (ScoredContig contig : scoredContigs) {
// Filter contigs that match the reference
if (!refSeq.contains(contig.getContig())) {
ContigAlignerResult sswResult = alignContig(region, contig.getContig(), contigAligner, junctionAligners, junctions, chromosomeLength);
if (sswResult == ContigAlignerResult.INDEL_NEAR_END) {
shouldRetry = true;
} else if (sswResult != null) {
// TODO: In multi-region processing, check to ensure identical contigs have identical mappings
results.add(sswResult);
}
}
}
}
return shouldRetry;
}
private ContigAligner getContigAlignerForJunctionPermutation(List<Feature> junctionPerm, Feature region, int chromosomeLength) {
ContigAligner contigAligner = null;
// List of junction positions within localized reference
List<Integer> junctionPos = new ArrayList<Integer>();
// List of junction lengths within localized reference
List<Integer> junctionLengths = new ArrayList<Integer>();
StringBuffer juncSeq = new StringBuffer();
int refStart = Math.max((int) junctionPerm.get(0).getStart() - (int) region.getLength() - this.readLength*2, 1);
String leftSeq = c2r.getSequence(region.getSeqname(), refStart, (int) junctionPerm.get(0).getStart() - refStart);
juncSeq.append(leftSeq);
junctionPos.add(leftSeq.length());
junctionLengths.add((int) junctionPerm.get(0).getLength()+1);
boolean isJunctionGapTooBig = false;
for (int i=1; i<junctionPerm.size(); i++) {
int midStart = (int) junctionPerm.get(i-1).getEnd()+1;
String middleSeq = c2r.getSequence(region.getSeqname(), midStart, (int) junctionPerm.get(i).getStart() - midStart);
if (middleSeq.length() > region.getLength()*2) {
isJunctionGapTooBig = true;
break;
}
juncSeq.append(middleSeq);
junctionPos.add(juncSeq.length());
junctionLengths.add((int) junctionPerm.get(i).getLength()+1);
}
// TODO: Tighten this up...
if (!isJunctionGapTooBig && juncSeq.length() < region.getLength()*10) {
// Sequence on right of last junction
// Junction stop is exclusive, so add 1 to starting position (junction end + 1)
Feature lastJunction = junctionPerm.get(junctionPerm.size()-1);
int rightStart = (int) lastJunction.getEnd()+1;
int rightStop = Math.min((int) lastJunction.getEnd() + (int) region.getLength() + this.readLength*2, chromosomeLength-1);
if (rightStop-rightStart > 0) {
String rightSeq = c2r.getSequence(region.getSeqname(), rightStart, rightStop-rightStart);
juncSeq.append(rightSeq);
// Junction pos and length should already be added
if (juncSeq.length() > MAX_REF_REGION_LEN) {
// Make sure we don't blow up the hardcoded size C matrix
Logger.warn("Junction Ref Seq to long: " + juncSeq.toString());
} else {
contigAligner = new ContigAligner(juncSeq.toString(), region.getSeqname(), refStart, this.readLength, minAnchorLen, maxAnchorMismatches, junctionPos, junctionLengths);
}
}
}
return contigAligner;
}
public Map<SimpleMapper, ContigAlignerResult> processRegion(Feature region, List<List<SAMRecordWrapper>> reads, List<Feature> junctions, List<Variant> knownVariants) throws Exception {
long start = System.currentTimeMillis();
if (isDebug) {
Logger.info("Processing region: " + region.getDescriptor());
}
if (region.getLength() > 10000) {
throw new IllegalArgumentException("Region too big: [" + region + "]");
}
Map<SimpleMapper, ContigAlignerResult> mappedContigs = new HashMap<SimpleMapper, ContigAlignerResult>();
List<List<SAMRecordWrapper>> readsList = subsetReads(region, reads);
boolean isRegionOk = true;
for (List<SAMRecordWrapper> sampleReads : readsList) {
//TODO: Don't allow these reads to remap to neighboring regions.
if (maxReadsInRegion < 0 || sampleReads.size() > this.maxReadsInRegion) {
Logger.info("Too many reads in %s: %d", region, sampleReads.size());
isRegionOk = false;
break;
}
}
int assembledContigCount = 0;
int nonAssembledContigCount = 0;
int juncPermCount = 0;
if (isRegionOk) {
List<String> bams = new ArrayList<String>(Arrays.asList(this.inputSams));
// Get reference sequence matching current region (pad by 2 read lengths on each side)
int chromosomeLength = c2r.getReferenceLength(region.getSeqname());
int refSeqStart = Math.max((int) region.getStart() - this.readLength*2, 1);
int refSeqLength = Math.min((int) region.getLength() + this.readLength*4, chromosomeLength-1);
String refSeq = c2r.getSequence(region.getSeqname(), refSeqStart, refSeqLength);
ContigAligner ssw = new ContigAligner(refSeq, region.getSeqname(), refSeqStart, this.readLength, minAnchorLen, maxAnchorMismatches);
List<ContigAligner> junctionAligners = new ArrayList<ContigAligner>();
// List<List<Feature>> junctionPermutations = JunctionUtils.combineJunctions(junctions, this.readLength);
List<List<Feature>> junctionPermutations = new ArrayList<List<Feature>>();
try {
junctionPermutations = JunctionUtils.combineJunctions(region, junctions, new HashSet<Feature>(), MAX_REGION_LENGTH, this.readLength);
} catch (TooManyJunctionPermutationsException e) {
Logger.warn("TOO_MANY_POTENTIAL_JUNCTION_PERMUTATIONS: " + region.getDescriptor());
}
Logger.debug("NUM_JUNCTION_PERMUTATIONS:\t%d\t%s", junctionPermutations.size(), region);
if (junctionPermutations.size() > JunctionUtils.MAX_JUNCTION_PERMUTATIONS) {
Logger.warn("TOO_MANY_JUNCTION_PERMUTATIONS: " + region.getDescriptor() + "\t" + junctionPermutations.size());
} else {
juncPermCount = junctionPermutations.size();
for (List<Feature> junctionPerm : junctionPermutations) {
ContigAligner aligner = getContigAlignerForJunctionPermutation(junctionPerm, region, chromosomeLength);
if (aligner != null) {
Logger.debug("JUNC_REF_SEQ:\t%s\t%d", region.getDescriptor(), aligner.ref.length());
junctionAligners.add(aligner);
}
}
// Assemble contigs
if (this.isSkipAssembly || region.getKmer() > this.readLength-15) {
Logger.debug("Skipping assembly of region: " + region.getDescriptor() + " - " + region.getKmer());
} else {
// Restrict # of contigs if junction count grows high.
// TODO: Paramaterize
int maxCombos = 1024;
int maxNumContigs = junctionPermutations.size() == 0 ? maxAssembledContigs : Math.min(maxAssembledContigs, maxCombos/junctionPermutations.size());
if (maxNumContigs != maxAssembledContigs) {
Logger.info("MAX_ASSEM_CONTIG\t%s\t%d", region, maxNumContigs);
}
List<ContigAlignerResult> results = new ArrayList<ContigAlignerResult>();
boolean shouldRetry = assemble(results, region, refSeq, bams, readsList, ssw, junctionAligners,
assemblerSettings.getMinNodeFrequncy(), assemblerSettings.getMinBaseQuality(),
assemblerSettings.getMinEdgeRatio(), junctions, chromosomeLength, maxNumContigs);
if (shouldRetry) {
Logger.debug("RETRY_ASSEMBLY: %s", region);
// Indel near edge of contig indicates that we may have a low coverage indel encountered.
// Try to reassemble using less stringent pruning to see if we can get greater coverage.
results.clear();
assemble(results, region, refSeq, bams, readsList, ssw, junctionAligners,
assemblerSettings.getMinNodeFrequncy()/2, assemblerSettings.getMinBaseQuality()/2,
assemblerSettings.getMinEdgeRatio()/2.0, junctions, chromosomeLength, maxNumContigs);
}
for (ContigAlignerResult sswResult : results) {
mappedContigs.put(new SimpleMapper(sswResult.getSequence(), maxMismatchRate), sswResult);
}
assembledContigCount = mappedContigs.size();
}
if (useSoftClippedReads || useObservedIndels || (knownVariants != null && knownVariants.size() > 0)) {
Logger.debug("Processing non-assembled contigs for region: [" + region + "]");
// Go through artificial contig generation using indels observed in the original reads
AltContigGenerator altContigGenerator = new AltContigGenerator(softClipParams[0], softClipParams[1], softClipParams[2], softClipParams[3],
useObservedIndels, useSoftClippedReads, useConsensusSeq, minMappingQuality);
Collection<String> altContigs = altContigGenerator.getAltContigs(readsList, c2r, readLength, junctionPermutations.size(), region, knownVariants);
nonAssembledContigCount = altContigs.size();
for (String contig : altContigs) {
// TODO: Check to see if this contig is already in the map before aligning
ContigAlignerResult sswResult = alignContig(region, contig, ssw, junctionAligners, junctions, chromosomeLength);
if (sswResult != null && sswResult != ContigAlignerResult.INDEL_NEAR_END) {
// Set as secondary for remap prioritization
sswResult.setSecondary(true);
// Store for read mapping
mappedContigs.put(new SimpleMapper(sswResult.getSequence(), maxMismatchRate), sswResult);
}
}
}
}
}
long stop = System.currentTimeMillis();
synchronized(this.getClass()) {
Logger.info("PROCESS_REGION_MSECS:\t%s\t%d\t%d\t%d\t%d", region.getDescriptor(), (stop-start),
assembledContigCount, nonAssembledContigCount, juncPermCount);
}
return mappedContigs;
}
// Pair up junctions that could be spanned by a single read
protected List<Pair<Feature, Feature>> pairJunctions(List<Feature> junctions, int maxDist) {
List<Pair<Feature, Feature>> junctionPairs = new ArrayList<Pair<Feature, Feature>>();
for (Feature junc1 : junctions) {
for (Feature junc2 : junctions) {
if (junc1.getEnd() < junc2.getStart() && junc1.getEnd() + maxDist >= junc2.getStart()) {
junctionPairs.add(new Pair<Feature, Feature>(junc1, junc2));
}
}
}
return junctionPairs;
}
static List<Feature> getRegions(String regionsBed, int readLength, boolean hasPresetKmers) throws IOException {
RegionLoader loader = new RegionLoader();
List<Feature> regions = loader.load(regionsBed, hasPresetKmers);
if (regions.size() > 0 && (regions.get(0).getKmer() == 0)) {
regions = RegionLoader.collapseRegions(regions, readLength);
regions = splitRegions(regions);
}
return regions;
}
private List<Feature> getRegionsNoBed(int readLength, SAMFileHeader header) throws IOException {
List<Feature> regions = new ArrayList<Feature>();
List<SAMSequenceRecord> refSeq = header.getSequenceDictionary().getSequences();
for (SAMSequenceRecord seq : refSeq) {
if (!chromosomeSkipRegex.matches(seq.getSequenceName())) {
Feature region = new Feature(seq.getSequenceName(), 1, seq.getSequenceLength());
regions.add(region);
}
}
regions = RegionLoader.collapseRegions(regions, readLength);
regions = splitRegions(regions);
return regions;
}
private void loadRegions() throws IOException {
if (regionsBed != null) {
Logger.info("Loading target regions from : " + regionsBed);
this.regions = getRegions(regionsBed, readLength, hasPresetKmers);
} else {
Logger.info("No target bed file specified. Gathering regions using SAM header");
this.regions = getRegionsNoBed(readLength, this.samHeaders[0]);
}
Logger.info("Num regions: " + regions.size());
if (Logger.LEVEL == Logger.Level.TRACE) {
for (Feature region : regions) {
Logger.trace("%s\t%d\t%d\t%d", region.getSeqname(), region.getStart(), region.getEnd(), region.getKmer());
}
}
}
private void loadJunctions() throws IOException {
if (this.gtfJunctionFile != null) {
this.junctions = JunctionUtils.loadJunctionsFromGtf(gtfJunctionFile);
}
if (this.junctionFile != null) {
RegionLoader loader = new RegionLoader();
List<Feature> observedJunctions = loader.load(junctionFile, false);
Logger.info("Loaded " + observedJunctions.size() + " observed junctions");
junctions.addAll(observedJunctions);
}
Logger.info("Total junctions input: " + junctions.size());
}
public void setRegionsBed(String bedFile) {
this.regionsBed = bedFile;
}
private void getSamHeaderAndReadLength() throws IOException {
Logger.info("Identifying header and determining read length");
this.samHeaders = new SAMFileHeader[this.inputSams.length];
for (int i=0; i<this.inputSams.length; i++) {
SamReader reader = SAMRecordUtils.getSamReader(inputSams[i]);
try {
samHeaders[i] = reader.getFileHeader();
samHeaders[i].setSortOrder(SAMFileHeader.SortOrder.unsorted);
Iterator<SAMRecord> iter = reader.iterator();
int cnt = 0;
while ((iter.hasNext()) && (cnt < 1000000)) {
SAMRecord read = iter.next();
this.readLength = Math.max(this.readLength, read.getReadLength());
this.maxMapq = Math.max(this.maxMapq, read.getMappingQuality());
// Assumes aligner sets proper pair flag correctly
if ((isPairedEnd) && (read.getReadPairedFlag()) && (read.getProperPairFlag())) {
this.minInsertLength = Math.min(this.minInsertLength, Math.abs(read.getInferredInsertSize()));
this.maxInsertLength = Math.max(this.maxInsertLength, Math.abs(read.getInferredInsertSize()));
}
cnt += 1;
}
// Allow some fudge in insert length
minInsertLength = Math.max(minInsertLength - 2*readLength, 0);
maxInsertLength = maxInsertLength + 2*readLength;
} finally {
reader.close();
}
}
Logger.info("Min insert length: " + minInsertLength);
Logger.info("Max insert length: " + maxInsertLength);
Logger.info("Max read length is: " + readLength);
if (assemblerSettings.getMinContigLength() < 1) {
assemblerSettings.setMinContigLength(Math.max(readLength+1, MIN_CONTIG_LENGTH));
}
Logger.info("Min contig length: " + assemblerSettings.getMinContigLength());
}
static List<Feature> splitRegions(List<Feature> regions,
int maxRegionLength, int minRegionRemainder, int regionOverlap) {
List<Feature> splitRegions = new ArrayList<Feature>();
for (Feature region : regions) {
if (region.getLength() <= maxRegionLength + minRegionRemainder) {
splitRegions.add(region);
} else {
splitRegions.addAll(splitWithOverlap(region, maxRegionLength, minRegionRemainder, regionOverlap));
}
}
return splitRegions;
}
/**
* If any of the input list of features is greater than maxSize, split them into multiple features.
*/
public static List<Feature> splitRegions(List<Feature> regions) {
return splitRegions(regions, MAX_REGION_LENGTH, MIN_REGION_REMAINDER, REGION_OVERLAP);
}
public static List<Feature> splitWithOverlap(Feature region) {
return splitWithOverlap(region, MAX_REGION_LENGTH, MIN_REGION_REMAINDER, REGION_OVERLAP);
}
static List<Feature> splitWithOverlap(Feature region, int maxRegionLength,
int minRegionRemainder, int regionOverlap) {
List<Feature> regions = new ArrayList<Feature>();
long pos = region.getStart();
long end = pos-1;
while (end < region.getEnd()) {
long start = pos;
end = pos + maxRegionLength;
long marker = end;
// If we're at or near the end of the region, stop at region end.
if (end > (region.getEnd() - minRegionRemainder)) {
end = region.getEnd();
}
pos = marker - regionOverlap;
regions.add(new Feature(region.getSeqname(), start, end));
}
return regions;
}
int[] getKmers(Feature region) {
int[] kmerSizes = null;
int kmerSize = region.getKmer();
if (kmerSize > 0) {
kmerSizes = toKmerArray(kmerSize, readLength);
} else {
kmerSizes = assemblerSettings.getKmerSize();
}
return kmerSizes;
}
int[] toKmerArray(int kmerSize, int readLength) {
int[] kmerSizes = null;
int maxKmerSize = readLength-5;
if (maxKmerSize > MAX_KMER_SIZE) {
maxKmerSize = MAX_KMER_SIZE;
}
List<Integer> kmers = new ArrayList<Integer>();
while (kmerSize < maxKmerSize) {
kmers.add(kmerSize);
kmerSize += 2;
}
kmerSizes = new int[kmers.size()];
int i=0;
for (int kmer : kmers) {
kmerSizes[i++] = kmer;
}
return kmerSizes;
}
private NativeAssembler newAssembler(Feature region) {
NativeAssembler assem = new NativeAssembler();
assem.setTruncateOutputOnRepeat(true);
assem.setMaxPathsFromRoot(100000);
assem.setReadLength(readLength);
//assem.setKmer(assemblerSettings.getKmerSize());
assem.setKmer(getKmers(region));
assem.setMinKmerFrequency(assemblerSettings.getMinNodeFrequncy());
assem.setMinEdgeRatio(assemblerSettings.getMinEdgeRatio());
assem.setMinBaseQuality(assemblerSettings.getMinBaseQuality());
assem.setMaxNodes(assemblerSettings.getMaxNodes());
assem.setMinReadCandidateFraction(assemblerSettings.getMinReadCandidateFraction());
assem.setMaxAverageDepth(assemblerSettings.getMaxAverageDepth());
assem.setSkipUnmappedTrigger(this.isSkipUnmappedTrigger);
return assem;
}
private void deleteOnExit(File file) {
if (!isKeepTmp) {
file.deleteOnExit();
}
}
private String init() throws IOException {
if (tmpDir == null) {
tmpDir = System.getProperty("java.io.tmpdir");
} else {
System.setProperty("java.io.tmpdir", tmpDir);
}
this.chromosomeSkipRegex = new ChromosomeRegex(chromosomesToSkipRegex);
ContigAligner.init(swScoring);
Set<PosixFilePermission> perms = new HashSet<PosixFilePermission>();
perms.add(PosixFilePermission.OWNER_READ);
perms.add(PosixFilePermission.OWNER_WRITE);
perms.add(PosixFilePermission.OWNER_EXECUTE);
perms.add(PosixFilePermission.GROUP_READ);
perms.add(PosixFilePermission.GROUP_EXECUTE);
Path tempDir = Files.createTempDirectory("abra2_" + UUID.randomUUID(), PosixFilePermissions.asFileAttribute(perms));
deleteOnExit(tempDir.toFile());
Logger.info("Using temp directory: " + tempDir.toString());
new NativeLibraryLoader().load(tempDir.toString(), NativeLibraryLoader.ABRA, false);
// new NativeLibraryLoader().load(tempDir.toString(), NativeLibraryLoader.SSW, false);
// new NativeLibraryLoader().load(tempDir.toString(), NativeLibraryLoader.SSW_JNI, false);
new NativeLibraryLoader().load(tempDir.toString(), NativeLibraryLoader.DEFLATOR, true);
threadManager = new ThreadManager(numThreads);
if (inputVcf != null) {
this.knownVariants = Variant.loadFromFile(inputVcf);
}
return tempDir.toString();
}
public List<Variant> getKnownVariants(String chromosome) {
List<Variant> variants = null;
if (knownVariants != null) {
variants = knownVariants.get(chromosome);
}
if (variants == null) {
variants = Collections.emptyList();
}
return variants;
}
public void setReference(String reference) {
this.reference = reference;
}
public void setAssemblerSettings(AssemblerSettings settings) {
this.assemblerSettings = settings;
}
public void setNumThreads(int numThreads) {
this.numThreads = numThreads;
}
public CompareToReference2 getC2r() {
return this.c2r;
}
public int getMinMappingQuality() {
return this.minMappingQuality;
}
public int getMaxInsertLength() {
return this.maxInsertLength;
}
public int getMinInsertLength() {
return this.minInsertLength;
}
public void setMaxInsertLength(int maxInsertLen) {
this.maxInsertLength = maxInsertLen;
}
public void setMinInsertLength(int minInsertLen) {
this.minInsertLength = minInsertLen;
}
boolean isFiltered(SAMRecord read) {
return SAMRecordUtils.isFiltered(isPairedEnd, read);
}
private static String getVersion() {
String version = "unknown";
String metaFile = "/META-INF/maven/abra2/abra2/pom.properties";
Properties prop = new Properties();
try {
URL url = NativeLibraryLoader.class.getResource(metaFile);
InputStream input = url.openStream();
prop.load(input);
input.close();
version = prop.getProperty("version");
} catch (Exception e) {
e.printStackTrace();
Logger.error("Error reading version from pom.properties");
}
return version;
}
private static String getCommandLine(String[] args) {
String jar = "";
CodeSource cs = Abra.class.getProtectionDomain().getCodeSource();
if (cs != null) {
jar = cs.getLocation().toString();
if (jar.startsWith("file:")) {
jar = jar.replaceFirst("file:", "");
}
}
StringBuffer cl = new StringBuffer();
cl.append(jar);
for (String arg : args) {
cl.append(' ');
cl.append(arg);
}
return cl.toString();
}
public static void run(String[] args) throws Exception {
String version = getVersion();
Logger.info("Abra version: " + version);
String cl = getCommandLine(args);
Logger.info("Abra params: [" + cl + "]");
ReAlignerOptions options = new ReAlignerOptions();
options.parseOptions(args);
if (options.isValid()) {
Logger.setLevel(options.getLoggerLevel());
AssemblerSettings assemblerSettings = new AssemblerSettings();
assemblerSettings.setKmerSize(options.getKmerSizes());
assemblerSettings.setMinContigLength(options.getMinContigLength());
assemblerSettings.setMinNodeFrequncy(options.getMinNodeFrequency());
assemblerSettings.setMinBaseQuality(options.getMinBaseQuality());
assemblerSettings.setMinReadCandidateFraction(options.getMinReadCandidateFraction());
assemblerSettings.setMaxAverageDepth(options.getMaxAverageRegionDepth());
assemblerSettings.setMinEdgeRatio(options.getMinEdgeRatio());
assemblerSettings.setMaxNodes(options.getMaxNodes());
ReAligner realigner = new ReAligner();
realigner.setReference(options.getReference());
realigner.setRegionsBed(options.getTargetRegionFile());
realigner.setAssemblerSettings(assemblerSettings);
realigner.setNumThreads(options.getNumThreads());
realigner.isPairedEnd = options.isPairedEnd();
realigner.minMappingQuality = options.getMinimumMappingQuality();
realigner.maxMismatchRate = options.getMaxMismatchRate();
realigner.maxReadsInRegion = options.getMaxReadsInRegion();
realigner.hasPresetKmers = options.hasPresetKmers();
realigner.isSkipAssembly = options.isSkipAssembly();
realigner.isSkipUnmappedTrigger = options.isSkipUnmappedAssemblyTrigger();
realigner.useObservedIndels = options.useObservedIndels();
realigner.shouldSort = options.shouldSort();
realigner.maxRealignDist = options.getMaxRealignDist();
realigner.maxAssembledContigs = options.getMaxAssembledContigs();
realigner.useConsensusSeq = options.useConsensusSequence();
realigner.isKeepTmp = options.isKeepTmp();
realigner.tmpDir = options.getTmpDir();
realigner.useSoftClippedReads = options.useSoftClippedReads();
realigner.junctionFile = options.getJunctionFile();
realigner.gtfJunctionFile = options.getGtfJunctionFile();
realigner.contigFile = options.getContigFile();
realigner.swScoring = options.getSmithWatermanScoring();
realigner.softClipParams = options.getSoftClipParams();
realigner.maxCachedReads = options.getMaxCachedReads();
realigner.finalCompressionLevel = options.getCompressionLevel();
realigner.minAnchorLen = options.getContigAnchor()[0];
realigner.maxAnchorMismatches = options.getContigAnchor()[1];
realigner.chromosomesToSkipRegex = options.getChromosomesToSkipRegex();
realigner.shouldUnsetDuplicates = options.shouldUnsetDuplicates();
realigner.inputVcf = options.getInputVcf();
realigner.shouldCreateIndex = options.shouldCreateIndex();
realigner.shouldUseGkl = options.shouldUseGkl();
realigner.ambiguousMapq = options.getAmbiguousMapq();
realigner.maxReadNoise = options.getMaxReadNoise();
realigner.maxReadsInRamForSort = options.getMaxReadsInRamForSort();
MAX_REGION_LENGTH = options.getWindowSize();
MIN_REGION_REMAINDER = options.getWindowOverlap();
REGION_OVERLAP = options.getWindowOverlap();
realigner.cl = cl.toString();
realigner.version = version;
long s = System.currentTimeMillis();
realigner.reAlign(options.getInputFiles(), options.getOutputFiles());
long e = System.currentTimeMillis();
Logger.info("Elapsed seconds: " + (e - s) / 1000);
} else {
System.exit(-1);
}
}
public static void main(String[] args) throws Exception {
// String inp = "--in /home/lmose/dev/ayc/opt/mem/test_tumor.bam --kmer 43 --mc-mapq 25 --mcl 101 --mcr -1.0 --mnf 2 --umnf 2 --mpc 50000 --out /home/lmose/dev/ayc/opt/mem/test_tumor.abra.bam --ref /home/lmose/reference/test/test.fa --targets /home/lmose/dev/ayc/opt/mem/test.gtf --threads 2 --working /home/lmose/dev/ayc/opt/mem/work1 --mur 50000000 --no-unalign --mbq 20 --rcf .02";
String inp = "--in /home/lmose/dev/ayc/opt/mem/test_tumor.bam --kmer 43 --out /home/lmose/dev/ayc/opt/mem/test_tumor.abra3.bam --ref /home/lmose/reference/test/test.fa --targets /home/lmose/dev/ayc/opt/mem/test2.bed --threads 2 --working /home/lmose/dev/ayc/opt/mem/work3";
run(inp.split("\\s+"));
}
} |
package cineCheck;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import javax.ws.rs.InternalServerErrorException;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.core.MediaType;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
/**
* Handles operations upon cineplex.de booking websites.
* @author vincentvonhof
*
*/
public class ChkR {
// Vocabulary used while probing.
public char[] vocab = {'A','B','C','D','E','F','1','2','3','4','5','6','7','8','9','0'};
private static final String httpPrefix = "https:
private static final String httpCheck = "cineplex.de/booking-init";
private static final String httpCheckAlt = "booking.cineplex.de";
private static final String jsonPrefix = "https://booking.cineplex.de/TicketBoxXNG/booking/init.json?performanceId=";
private static final String securityChallengeAndResponse = "&c=110dd978-454d-4fc1-9fd9-d88a4d04d06d&r=00340d28223828499d252e984a2be75e8093e13077cb0e60934e22d131d2768d";
// ms to wait between GETs
private static final int backoff = 20;
private static int varyingPortionLength = 2;
public ChkR() {}
public ChkR(char[] vocabulary) {
vocab = vocabulary;
}
/**
* List all Shows
* @param url HTTP site of booking for one of the shows for that movie
*/
public List<BookingResponse> getAllListingsForMovie(String url) {
return getAllListingsForMovie(url, varyingPortionLength);
}
/**
* List all Shows
* @param url HTTP site of booking for one of the shows for that movie
* @param prefixLength amount of varying portion of the shows ID
* @throws IOException thrown upon network error
*/
public List<BookingResponse> getAllListingsForMovie(String url, int prefixLength) {
// Check URL validity
if (!(url.toLowerCase().contains(httpCheck.toLowerCase()) || url.toLowerCase().contains(httpCheckAlt.toLowerCase()))) {
System.out.println("URL should entail " + httpPrefix);
return null;
}
// Prepare JSON deserializer
Gson gson = prepareGson();
// Derive site and performance IDs from URL
int beginPos = url.indexOf("/site/", 0) + "/site/".length();
int endPos = beginPos + 2;
String site = url.substring(beginPos, endPos);
beginPos = url.indexOf("/performance/", 0) + "/performance/".length();
endPos = beginPos + 18;
String performance = url.substring(beginPos, endPos);
if (site.length() == 0 || performance.length() == 0) {
System.out.println("Could not derive City or Performance IDs");
return null;
}
// Derive postfix
String postfixPerfID = performance.substring(varyingPortionLength);
String andSiteID = "&siteId=" + site;
// Get ID from film that interests us from server
Client client = ClientBuilder.newClient();
Invocation.Builder invocationBuilder = client
.target(jsonPrefix + performance + "&siteId=" + site + securityChallengeAndResponse)
.request(MediaType.APPLICATION_JSON);
BookingResponse bookingResponse;
try {
bookingResponse = gson.fromJson(
invocationBuilder.get(String.class), BookingResponse.class);
} catch (javax.ws.rs.InternalServerErrorException ex500) {
// This URL that we were supplied is pointing a show not recognized by the server. Possibly it's an old listing.
System.out.println("The server has no entry for the supplied URL. Is the link you provided currently live?");
return null;
}
int kkFilmId = bookingResponse.kkFilmId;
// Calculate all possible combinations
Variants variants = new Variants(vocab);
List<char[]> combinations = variants.generateCombinations(prefixLength);
System.out.println("Trying " + combinations.size() +" ID combinations");
List<BookingResponse> bookingsInSystem = new ArrayList<BookingResponse>(combinations.size());
int countHit = 0, countMiss = 0, countWrongMovie = 0;
for (char[] combination : combinations) {
invocationBuilder = client
.target(jsonPrefix + new String(combination) + postfixPerfID + andSiteID + securityChallengeAndResponse)
.request(MediaType.APPLICATION_JSON);
// Try our request, if there is no hit we get a HTTP 500 Internal Server Error which we can ignore
try {
bookingResponse = gson.fromJson(
invocationBuilder.get(String.class), BookingResponse.class);
if (bookingResponse.kkFilmId == kkFilmId) {
bookingResponse.mURL = httpPrefix + "/site/" + site + "/performance/" + new String(combination) + postfixPerfID;
bookingsInSystem.add(bookingResponse);
countHit++;
} else {
countWrongMovie++;
}
} catch (InternalServerErrorException ex) {
countMiss++;
}
// Print a refreshing short status report to the console
int total = countHit + countMiss + countWrongMovie;
String statusText =
"Hit: " + countHit +
" Miss: " + countMiss +
" WrongMovie: " + countWrongMovie +
". Total:" + total + "/" +combinations.size();
System.out.print(statusText + "\r");
// Let's not be too aggressive
try {Thread.sleep(backoff);} catch (InterruptedException e) {e.printStackTrace();}
}
if (bookingsInSystem.size() == 0) {
System.out.println("No bookings found for URL");
return null;
}
// Sort results by date
Collections.sort(bookingsInSystem, new Comparator<BookingResponse>() {
public int compare(BookingResponse o1, BookingResponse o2) {
return o1.date.compareTo(o2.date);
}
});
// Print a summary to the console
for(BookingResponse booking : bookingsInSystem) {
System.out.println(booking);
}
System.out.println("Tried " + countMiss +" not exisiting booking IDs, and " + countWrongMovie + " entries for a different movie");
return bookingsInSystem;
}
/**
* Initialize gson
* @return Gson instance for this case
*/
private Gson prepareGson() {
GsonBuilder gsonBuilder = new GsonBuilder();
gsonBuilder.setDateFormat("d.M.yyyy hh:mm");
return gsonBuilder.serializeNulls().create();
}
/**
*
* @param args <URL> (optional <Output Destination>)
*/
public static void main(String[] args) {
List<BookingResponse> validBookings;
if (args == null || args.length == 0 || args[0] == null || args[0].length() == 0) {
System.out.println("Please supply a URL of a Cineplex.de screening listing");
} else {
// Get all valid listings
validBookings = new ChkR().getAllListingsForMovie(args[0]);
if (validBookings != null && validBookings.size() > 0) {
PrintWriter writer = null;
String URI = "BookingsFor" + validBookings.get(0).filmTitle.replace(" ", "").replace(".", "").replace("\\","").replace("/","") + ".txt";
// If we were supplied a destination, we'll write the results to a file there
if (args.length >= 2 && args[1] != null && args[1].length() != 0 ) {
URI = args[1].concat(URI);
}
try {
writer = new PrintWriter(URI, "UTF-8");
for (BookingResponse booking : validBookings) {
writer.println(booking);
}
System.out.println("Results written to " + URI);
}
catch (IOException ex) {
System.out.println("Results could not be written to the specified destination");
} finally {
try {if (writer != null) writer.close();} catch (Exception ex) {}
}
}
}
}
} |
package space.pxls;
import com.google.gson.Gson;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.xnio.XnioWorker;
import space.pxls.data.DBChatMessage;
import space.pxls.data.DBPixelPlacement;
import space.pxls.data.DBRollbackPixel;
import space.pxls.data.Database;
import space.pxls.server.*;
import space.pxls.user.Chatban;
import space.pxls.user.Role;
import space.pxls.user.User;
import space.pxls.user.UserManager;
import space.pxls.util.*;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.lang.Error;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.concurrent.TimeUnit;
public class App {
private static Gson gson;
private static Config config;
private static Database database;
private static UserManager userManager;
private static Logger pixelLogger;
private static Logger shadowbannedPixelLogger;
private static Logger appLogger;
private static String canvasCode;
private static int width;
private static int height;
private static byte[] board;
private static byte[] heatmap;
private static byte[] placemap;
private static byte[] virginmap;
private static boolean havePlacemap;
private static PxlsTimer mapSaveTimer;
private static PxlsTimer mapBackupTimer;
private static UndertowServer server;
private static String cachedWhoamiOrigin = null;
public static void main(String[] args) {
gson = new Gson();
loadConfig();
pixelLogger = LogManager.getLogger("Pixels");
shadowbannedPixelLogger = LogManager.getLogger("ShadowbannedPixels");
appLogger = LogManager.getLogger("App");
canvasCode = config.getString("canvascode");
width = config.getInt("board.width");
height = config.getInt("board.height");
board = new byte[width * height];
heatmap = new byte[width * height];
placemap = new byte[width * height];
virginmap = new byte[width * height];
if (!loadMap()) {
for (int x = 0; x < width; x++) {
for (int y = 0; y < height; y++) {
board[x + width * y] = getDefaultColor(x, y);
}
}
}
loadHeatmap();
loadPlacemap();
loadVirginmap();
database = new Database();
userManager = new UserManager();
new Thread(() -> {
Scanner s = new Scanner(System.in);
while (true) {
handleCommand(s.nextLine());
}
}).start();
new Timer().schedule(new SessionTimer(), 0, 1000 * 3600); // execute once every hour
new Timer().schedule(new DatabaseTimer(), 0, 1000 * 60 * 2);
int heatmap_timer_cd = (int) App.getConfig().getDuration("board.heatmapCooldown", TimeUnit.SECONDS);
new Timer().schedule(new HeatmapTimer(), 0, heatmap_timer_cd * 1000 / 256);
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
saveMapBackup();
saveMapForce();
}));
server = new UndertowServer(config.getInt("server.port"));
server.start();
new Timer().schedule(new TimerTask(){
@Override
public void run() {
tickStackedPixels();
}
}, 0, 5000);
try {
Path backupsDir = getStorageDir().resolve("backups/");
if (!Files.exists(backupsDir)) {
if (!backupsDir.toFile().mkdirs()) {
getLogger().error("Failed to make backup dirs");
} else {
getLogger().info(String.format("Created missing backups dir at %s%n", backupsDir.toAbsolutePath().normalize()));
}
}
} catch (Exception e) {
getLogger().error(new Error("Failed to create backup directories", e));
}
saveMap();
}
private static void handleCommand(String line) {
try {
String[] token = line.split(" ");
if (token[0].equalsIgnoreCase("reload")) {
cachedWhoamiOrigin = null;
loadConfig();
} else if (token[0].equalsIgnoreCase("save")) {
saveMapForce();
saveMapBackup();
} else if (token[0].equalsIgnoreCase("role")) {
User user = userManager.getByName(token[1]);
if (user != null) {
Role role = Role.valueOf(token[2].toUpperCase());
user.setRole(role);
database.setUserRole(user, role);
database.adminLogServer("Set "+user.getName()+"'s role to "+role.name());
System.out.println("Set " + user.getName() + "'s role to " + role.name());
} else {
System.out.println("Cannot find user " + token[1]);
}
} else if (token[0].equalsIgnoreCase("alert")) {
String rest = line.substring(token[0].length() + 1).trim();
server.broadcast(new ServerAlert(rest));
} else if (token[0].equalsIgnoreCase("ban")) {
if (token.length < 3) {
System.out.println("Missing reason");
return;
}
User user = userManager.getByName(token[1]);
if (user != null) {
String reason = line.substring(token[0].length() + token[1].length() + 2).trim();
user.ban(24 * 60 * 60, reason, null);
database.adminLogServer(String.format("ban %s with reason: %s", user.getName(), reason));
System.out.println("Banned " + user.getName() + " for 24 hours.");
} else {
System.out.println("Cannot find user " + token[1]);
}
} else if (token[0].equalsIgnoreCase("permaban")) {
if (token.length < 3) {
System.out.println("Missing reason");
return;
}
User user = userManager.getByName(token[1]);
if (user != null) {
String reason = line.substring(token[0].length() + token[1].length() + 2).trim();
user.permaban(reason, null);
database.adminLogServer(String.format("permaban %s with reason: %s", user.getName(), reason));
System.out.println("Permabanned " + user.getName());
} else {
System.out.println("Cannot find user " + token[1]);
}
} else if (token[0].equalsIgnoreCase("shadowban")) {
if (token.length < 3) {
System.out.println("Missing reason");
return;
}
User user = userManager.getByName(token[1]);
if (user != null) {
String reason = line.substring(token[0].length() + token[1].length() + 2).trim();
user.shadowban(reason, null);
database.adminLogServer(String.format("shadowban %s with reason: %s", user.getName(), reason));
System.out.println("Shadowbanned " + user.getName());
} else {
System.out.println("Cannot find user " + token[1]);
}
} else if (token[0].equalsIgnoreCase("unban")) {
if (token.length < 3) {
System.out.println("Missing reason");
return;
}
User user = userManager.getByName(token[1]);
if (user != null) {
user.unban(null, line.substring(token[0].length() + token[1].length() + 2).trim());
database.adminLogServer("unban "+user.getName());
System.out.println("Unbanned " + user.getName() + ".");
} else {
System.out.println("Cannot find user " + token[1]);
}
} else if (token[0].equalsIgnoreCase("nuke")) {
int fromX = Integer.parseInt(token[1]);
int fromY = Integer.parseInt(token[2]);
int toX = Integer.parseInt(token[3]);
int toY = Integer.parseInt(token[4]);
byte toColor = (byte)(token.length >= 6 ? Integer.parseInt(token[5]) : 0xFF);
nuke(fromX, fromY, toX, toY, (byte) 0xFF, toColor);
} else if (token[0].equalsIgnoreCase("replace")) {
int fromX = Integer.parseInt(token[1]);
int fromY = Integer.parseInt(token[2]);
int toX = Integer.parseInt(token[3]);
int toY = Integer.parseInt(token[4]);
byte fromColor = (byte) Integer.parseInt(token[5]);
byte toColor = (byte) (token.length >= 7 ? Integer.parseInt(token[6]) : 0xFF);
nuke(fromX, fromY, toX, toY, fromColor, toColor);
} else if (token[0].equalsIgnoreCase("cons")) {
if (token.length > 1) {
if (token[1].equalsIgnoreCase("authed") || token[1].equalsIgnoreCase("authd")) {
System.out.println("Authenticated connections count: " + server.getAuthedUsers().size());
} else {
System.out.println("All connections count: " + server.getPacketHandler().getNumAllCons());
System.out.println("Authenticated connections count: " + server.getAuthedUsers().size());
}
} else {
System.out.println("All connections count: " + server.getPacketHandler().getNumAllCons());
System.out.println("Authenticated connections count: " + server.getAuthedUsers().size());
}
} else if (token[0].equalsIgnoreCase("users")) {
System.out.println("Number of authenticated users: " + server.getAuthedUsers().size());
for (User user : server.getAuthedUsers().values()) {
System.out.println(String.format("[%d] %s (%s) (num connections: %d)", user.getId(), user.getName(), user.getRole().name(), user.getConnections().size()));
}
} else if (token[0].equalsIgnoreCase("stack")) {
//stack USERNAME[ set AMOUNT]
if (token.length > 1) {
User user = userManager.getByName(token[1]);
if (user != null) {
if (token.length == 2) {
System.out.printf("User %s has %d stacked%n", user.getName(), user.getStacked());
} else {
if (token[2].equalsIgnoreCase("set")) {
try {
Integer toSet = Integer.valueOf(token[3]);
user.setStacked(toSet);
server.getPacketHandler().sendAvailablePixels(user, "override");
} catch (NumberFormatException ignored) {
System.out.printf("Invalid value: %s%n", token[3]);
}
}
}
} else {
System.out.printf("Unknown user: %s%n", token[1]);
}
}
} else if (token[0].equalsIgnoreCase("cd-override")) {
//cd-override list|USERNAME[ STATE]
//STATE=on|off
if (token.length > 1) {
if (token[1].equalsIgnoreCase("list")) {
StringBuilder sb = new StringBuilder();
userManager.getAllUsersByToken().forEach((s, user) -> {
if (user.isOverridingCooldown()) sb.append(" ").append(user.getName()).append('\n');
});
System.out.println(sb);
} else if (token[1].equalsIgnoreCase("help")) {
System.out.println("cd-override list|USERNAME[ STATE]");
System.out.println("STATE=on|off");
} else {
User user = getUserManager().getByName(token[1]);
if (user == null) {
System.out.printf("Unknown user: %s%n", token[1]);
} else {
if (token.length >= 3) {
if (token[2].equalsIgnoreCase("on") || token[2].equalsIgnoreCase("off")) {
user.setOverrideCooldown(token[2].equalsIgnoreCase("on"));
System.out.printf("Updated %s's cd-override state to %s%n", user.getName(), token[2].toLowerCase());
} else {
System.out.printf("Invalid state: %s%n", token[2]);
}
} else {
System.out.printf("User's CD Override state is: %s%n", user.isOverridingCooldown() ? "on" : "off");
}
}
}
} else {
System.out.println("cd-override list|USERNAME[ STATE]");
System.out.println("STATE=on|off");
}
} else if (token[0].equalsIgnoreCase("broadcast")) {
//broadcast MESSAGE
if (token.length > 1) {
App.getServer().getPacketHandler().handleChatMessage(null, null, new ClientChatMessage(line.substring(token[0].length() + 1)));
}
} else if (token[0].equalsIgnoreCase("ChatBan")) {
if (token.length > 4) {
User user = getUserManager().getByName(token[1]);
if (user == null) System.out.printf("Unknown user: %s%n", token[1]);
else {
Integer banLength = 600;
try {
banLength = Integer.valueOf(token[2]);
} catch (Exception e) {
System.out.printf("Failed to parse BAN_LENGTH '%s'. Defaulting to 600", token[2]);
}
Boolean messageRemoval = token[3].equals("1") || token[3].equalsIgnoreCase("yes") || token[3].equalsIgnoreCase("true");
String reason = line.substring(token[0].length() + token[1].length() + token[2].length() + token[3].length() + 4);
Chatban.TEMP(user, null, System.currentTimeMillis() + banLength * 1000L, reason, messageRemoval, Integer.MAX_VALUE).commit();
}
} else {
System.out.println("chatban USER BAN_LENGTH MESSAGE_REMOVAL REASON\n USER: The name of the user\n BAN_LENGTH: The length in seconds of the chatban. For permas, see 'PermaChatBan' command.\n MESSAGE_REMOVAL: Boolean (1|0) of whether or not to purge the user from chat.\n REASON: The reason for the chatban. Will be displayed to the user");
}
} else if (token[0].equalsIgnoreCase("PermaChatBan")) {
if (token.length > 3) {
User user = userManager.getByName(token[1]);
if (user == null) System.out.printf("Unknown user: %s%n", token[1]);
else {
Boolean messageRemoval = token[2].equals("1") || token[2].equalsIgnoreCase("yes") || token[2].equalsIgnoreCase("true");
String reason = line.substring(token[0].length() + token[1].length() + token[2].length() + 3);
Chatban.PERMA(user, null, reason, messageRemoval, Integer.MAX_VALUE).commit();
}
} else {
System.out.println("PermaChatBan USER MESSAGE_REMOVAL REASON\n USER: The name of the user\n MESSAGE_REMOVAL: Boolean (1|0) of whether or not to purge the user from chat.\n REASON: The reason for the chatban. Will be displayed to the user");
}
} else if (token[0].equalsIgnoreCase("UnChatBan")) {
if (token.length > 2) {
User user = userManager.getByName(token[1]);
if (user == null) System.out.printf("Unknown user: %s%n", token[1]);
else {
Chatban.UNBAN(user, null, line.substring(token[0].length() + token[1].length() + 2)).commit();
}
} else {
System.out.println("UnChatBan USER REASON");
}
} else if (token[0].equalsIgnoreCase("ChatPurge")) {
if (token.length > 2) {
User user = userManager.getByName(token[1]);
if (user == null) System.out.printf("Unknown user: %s%n", token[1]);
else {
Integer toPurge = Integer.MAX_VALUE;
String reason = "";
try {
toPurge = Integer.valueOf(token[2]);
} catch (Exception e) {
System.out.printf("Failed to parse '%s' as a number, defaulting to %s%n", token[2], toPurge);
}
if (token.length >= 4) {
reason = line.substring(token[0].length() + token[1].length() + token[2].length() + 3);
} else {
reason = "";
}
if (toPurge > 0) {
App.getDatabase().handlePurge(user, null, toPurge, reason, true);
} else {
System.out.printf("Invalid toPurge. Should be >0, got %s%n", toPurge);
}
}
} else {
System.out.println("ChatPurge USER [AMOUNT ]REASON");
}
} else if (token[0].equalsIgnoreCase("cf")) {
String z = line.substring(token[0].length() + 1);
System.out.printf("running chat filter against '%s'%nResult: %s%n", z, ChatFilter.getInstance().filter(z, true));
} else if (token[0].equalsIgnoreCase("reloadUsers")) {
System.out.println("Working... (may cause some lag)");
userManager.reload();
System.out.println("Done.");
} else if (token[0].equalsIgnoreCase("flagRename")) {
//flagRename USERNAME [1|0]
if (token.length >= 2) {
boolean flagState = token.length < 3 || (token[2].equalsIgnoreCase("1") || token[2].equalsIgnoreCase("true") || token[2].equalsIgnoreCase("yes") || token[2].equalsIgnoreCase("y"));
User toFlag = userManager.getByName(token[1]);
if (toFlag != null) {
System.out.printf("Flagging %s as %s%n", toFlag.getName(), flagState);
toFlag.setRenameRequested(flagState);
} else {
System.out.println("User doesn't exist");
}
} else {
System.out.println("flagRename USERNAME [1|0]");
}
} else if (token[0].equalsIgnoreCase("setName") || token[0].equalsIgnoreCase("updateUsername")) {
//setName USERNAME NEW_USERNAME
if (token.length >= 3) {
User toRename = userManager.getByName(token[1]);
if (toRename != null) {
toRename.setRenameRequested(false);
if (toRename.updateUsername(token[2], true)) {
App.getServer().send(toRename, new ServerRenameSuccess(toRename.getName()));
System.out.println("Name udpated");
} else {
System.out.println("Failed to update name (function returned false. name taken or an error occurred)");
}
} else {
System.out.println("User doesn't exist");
}
} else {
System.out.printf("%s USERNAME NEW_USERNAME%n", token[0]);
}
}
} catch (RuntimeException e) {
e.printStackTrace();
}
}
private static int stackMultiplier;
private static int stackMaxStacked;
private static void loadConfig() {
config = ConfigFactory.parseFile(new File("pxls.conf")).withFallback(ConfigFactory.load());
config.checkValid(ConfigFactory.load());
RateLimitFactory.registerBucketHolder(ClientUndo.class, new RateLimitFactory.BucketConfig(((int) App.getConfig().getDuration("server.limits.undo.time", TimeUnit.SECONDS)), App.getConfig().getInt("server.limits.undo.count")));
RateLimitFactory.registerBucketHolder(DBChatMessage.class, new RateLimitFactory.BucketConfig(((int) App.getConfig().getDuration("server.limits.chat.time", TimeUnit.SECONDS)), App.getConfig().getInt("server.limits.chat.count")));
RateLimitFactory.registerBucketHolder("http:discordName", new RateLimitFactory.BucketConfig((int) App.getConfig().getDuration("server.limits.discordNameChange.time", TimeUnit.SECONDS), App.getConfig().getInt("server.limits.discordNameChange.count")));
mapSaveTimer = new PxlsTimer(config.getDuration("board.saveInterval", TimeUnit.SECONDS));
mapBackupTimer = new PxlsTimer(config.getDuration("board.backupInterval", TimeUnit.SECONDS));
stackMultiplier = App.getConfig().getInt("stacking.cooldownMultiplier");
stackMaxStacked = App.getConfig().getInt("stacking.maxStacked");
ChatFilter.getInstance().reload();
try {
Files.deleteIfExists(getStorageDir().resolve("index_cache.html"));
} catch (IOException e) {
// do nothing
}
}
public static int getStackMultiplier() {
return stackMultiplier;
}
public static int getStackMaxStacked() {
return stackMaxStacked;
}
public static Gson getGson() {
return gson;
}
public static Config getConfig() {
return config;
}
public static String getCanvasCode() {
return canvasCode;
}
public static int getWidth() {
return width;
}
public static int getHeight() {
return height;
}
public static byte[] getHeatmapData() {
return heatmap;
}
public static byte[] getVirginmapData() {
return virginmap;
}
public static byte[] getPlacemapData() {
return placemap;
}
public static byte[] getBoardData() {
return board;
}
public static boolean getHavePlacemap() {
return havePlacemap;
}
public static Path getStorageDir() {
return Paths.get(config.getString("server.storage"));
}
public static List<String> getPalette() {
return config.getStringList("board.palette");
}
public static boolean isCaptchaEnabled() {
return !config.getString("captcha.key").isEmpty() && !config.getString("captcha.secret").isEmpty();
}
public static String getWhoamiAllowedOrigin() {
if (cachedWhoamiOrigin == null) cachedWhoamiOrigin = config.getString("whoamiAllowedOrigin");
return cachedWhoamiOrigin;
}
public static int getPixel(int x, int y) {
return board[x + y * width];
}
public static int getPlacemap(int x, int y) {
return placemap[x + y * width];
}
public static int getVirginmap(int x, int y) {
return virginmap[x + y * width];
}
public static boolean getRegistrationEnabled() { return getConfig().getBoolean("oauth.enableRegistration"); }
public static void putPixel(int x, int y, int color, User user, boolean mod_action, String ip, boolean updateDatabase, String action) {
if (x < 0 || x >= width || y < 0 || y >= height || (color >= getPalette().size() && !(color == 0xFF || color == -1))) return;
String userName = user != null ? user.getName() : "<server>";
if (action.trim().isEmpty()) {
action = mod_action ? "mod overwrite" : "user place";
}
board[x + y * width] = (byte) color;
heatmap[x + y * width] = (byte) 0xFF;
virginmap[x + y * width] = (byte) 0x00;
pixelLogger.log(Level.INFO, String.format("%s\t%d\t%d\t%d\t%s\t%s", userName, x, y, color, ip, action));
if (updateDatabase) {
database.placePixel(x, y, color, user, mod_action);
}
}
public static void logShadowbannedPixel(int x, int y, int color, String userName, String ip) {
shadowbannedPixelLogger.info(String.format("%s\t%d\t%d\t%d\t%s", userName, x, y, color, ip));
}
public static void rollbackAfterBan(User who, int seconds) {
if (seconds <= 0) {
return;
}
XnioWorker worker = server.getServer().getWorker();
worker.execute(() -> rollbackAfterBan_(who, seconds));
}
private static void rollbackAfterBan_(User who, int seconds) {
List<DBRollbackPixel> pixels = database.getRollbackPixels(who, seconds); //get all pixels that can and need to be rolled back
List<ServerPlace.Pixel> forBroadcast = new ArrayList<>();
for (DBRollbackPixel rbPixel : pixels) {
//This is same for both instances
// putPixel() logs and updates the board[]
// forBroadcast.add() adds the pixel and later broadcasts it via websocket
// putRollbackPixel() adds rollback pixel to database (TABLE pixels) for undo and timelapse purposes
if (rbPixel.toPixel != null) { //if previous pixel (the one we are rolling back to) exists
putPixel(rbPixel.toPixel.x, rbPixel.toPixel.y, rbPixel.toPixel.color, who, false, "", false, "rollback");
forBroadcast.add(new ServerPlace.Pixel(rbPixel.toPixel.x, rbPixel.toPixel.y, rbPixel.toPixel.color));
database.putRollbackPixel(who, rbPixel.fromId, rbPixel.toPixel.id);
} else { //else rollback to blank canvas
DBPixelPlacement fromPixel = database.getPixelByID(rbPixel.fromId);
byte rollbackDefault = getDefaultColor(fromPixel.x, fromPixel.y);
putPixel(fromPixel.x, fromPixel.y, rollbackDefault, who, false, "", false, "rollback");
forBroadcast.add(new ServerPlace.Pixel(fromPixel.x, fromPixel.y, rollbackDefault));
database.putRollbackPixelNoPrevious(fromPixel.x, fromPixel.y, who, fromPixel.id);
}
}
server.broadcastNoShadow(new ServerPlace(forBroadcast));
}
public static void undoRollback(User who) {
XnioWorker worker = server.getServer().getWorker();
worker.execute(() -> undoRollback_(who));
}
private static void undoRollback_(User who) {
List<DBPixelPlacement> pixels = database.getUndoPixels(who); //get all pixels that can and need to be undone
List<ServerPlace.Pixel> forBroadcast = new ArrayList<>();
for (DBPixelPlacement fromPixel : pixels) {
//restores original pixel
putPixel(fromPixel.x, fromPixel.y, fromPixel.color, who, false, "", false, "rollback undo"); //in board[]
forBroadcast.add(new ServerPlace.Pixel(fromPixel.x, fromPixel.y, fromPixel.color)); //in websocket
database.putUndoPixel(fromPixel.x, fromPixel.y, fromPixel.color, who, fromPixel.id); //in database
}
server.broadcastNoShadow(new ServerPlace(forBroadcast));
}
private static void nuke(int fromX, int fromY, int toX, int toY, byte fromColor, byte toColor) {
XnioWorker worker = server.getServer().getWorker();
worker.execute(() -> nuke_(fromX, fromY, toX, toY, fromColor, toColor));
}
private static void nuke_(int fromX, int fromY, int toX, int toY, byte fromColor, byte toColor) {
List<ServerPlace.Pixel> forBroadcast = new ArrayList<>();
for (int x = Math.min(fromX, toX); x <= Math.max(fromX, toX); x++) {
for (int y = Math.min(fromY, toY); y <= Math.max(fromY, toY); y++) {
byte c = toColor;
if (toColor == 0xFF || toColor == -1) {
c = getDefaultColor(x, y);
}
int pixelColor = getPixel(x, y);
// fromColor is 0xFF or -1 if we're nuking
if (pixelColor != toColor) {
putPixel(x, y, c, null, true, "", false, "console nuke");
forBroadcast.add(new ServerPlace.Pixel(x, y, c));
if (fromColor == 0xFF || fromColor == -1) {
database.putNukePixel(x, y, c);
} else if (pixelColor == fromColor) {
database.putNukePixel(x, y, fromColor, c);
}
}
}
}
server.broadcastNoShadow(new ServerPlace(forBroadcast));
}
private static boolean loadMap() {
try {
byte[] bytes = Files.readAllBytes(getStorageDir().resolve("board.dat"));
System.arraycopy(bytes, 0, board, 0, width * height);
} catch (NoSuchFileException e) {
getLogger().warn("Cannot find board.dat in working directory, using blank board");
return false;
} catch (IOException e) {
e.printStackTrace();
}
return true;
}
private static void loadHeatmap() {
try {
byte[] bytes = Files.readAllBytes(getStorageDir().resolve("heatmap.dat"));
System.arraycopy(bytes, 0, heatmap, 0, width * height);
} catch (NoSuchFileException e) {
getLogger().warn("Cannot find heatmap.dat in working directory, using blank heatmap");
} catch (IOException e) {
e.printStackTrace();
}
}
private static void loadPlacemap() {
try {
byte[] bytes = Files.readAllBytes(getStorageDir().resolve("placemap.dat"));
System.arraycopy(bytes, 0, placemap, 0, width * height);
havePlacemap = true;
} catch (NoSuchFileException e) {
getLogger().warn("Cannot find placemap.dat in working directory, using blank placemap");
havePlacemap = false;
} catch (IOException e) {
e.printStackTrace();
}
}
private static void loadVirginmap() {
try {
byte[] bytes = Files.readAllBytes(getStorageDir().resolve("virginmap.dat"));
System.arraycopy(bytes, 0, virginmap, 0, width * height);
} catch (NoSuchFileException e) {
getLogger().warn("Cannot find virginmap.dat in working directory, using blank virginmap");
for (int x = 0; x < width; x++) {
for (int y = 0; y < height; y++) {
virginmap[x + width * y] = 0xFF;
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
public static void updateHeatmap() {
for (int i = 0; i < width * height; i++) {
if (heatmap[i] != 0) {
heatmap[i]
}
}
}
public static void tickStackedPixels() {
for (User user : server.getAuthedUsers().values()) {
user.tickStack();
}
}
public static void saveMap() {
mapSaveTimer.run(App::saveMapForce);
mapBackupTimer.run(App::saveMapBackup);
}
private static void saveMapForce() {
saveMapToDir(getStorageDir().resolve("board.dat"));
saveHeatmapToDir(getStorageDir().resolve("heatmap.dat"));
saveVirginmapToDir(getStorageDir().resolve("virginmap.dat"));
}
private static void saveMapBackup() {
saveMapToDir(getStorageDir().resolve("backups/board." + System.currentTimeMillis() + ".dat"));
}
private static void saveMapToDir(Path path) {
try {
Files.write(path, board);
} catch (IOException e) {
e.printStackTrace();
}
}
private static void saveHeatmapToDir(Path path) {
try {
Files.write(path, heatmap);
} catch (IOException e) {
e.printStackTrace();
}
}
private static void saveVirginmapToDir(Path path) {
try {
Files.write(path, virginmap);
} catch (IOException e) {
e.printStackTrace();
}
}
public static boolean shouldIncreaseSomePixelCount() {
return App.getConfig().getBoolean("pixelCounts.countTowardsAlltime") || App.getConfig().getBoolean("pixelCounts.countTowardsCurrent");
}
public static Logger getLogger() {
return appLogger;
}
public static UserManager getUserManager() {
return userManager;
}
public static byte getDefaultColor(int x, int y) {
try {
RandomAccessFile raf = new RandomAccessFile(getStorageDir().resolve("default_board.dat").toAbsolutePath().toString(), "r");
raf.seek(x + y*width);
byte b = raf.readByte();
raf.close();
return b;
} catch (NoSuchFileException e) {
} catch (IOException e) {
}
return (byte) config.getInt("board.defaultColor");
}
public static Database getDatabase() {
return database;
}
public static UndertowServer getServer() {
return server;
}
} |
package ui;
import javafx.application.Platform;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.geometry.Orientation;
import javafx.scene.Node;
import javafx.scene.control.*;
import javafx.scene.control.Alert.AlertType;
import javafx.scene.control.ButtonBar.ButtonData;
import javafx.scene.input.KeyCodeCombination;
import javafx.stage.Modality;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.ocpsoft.prettytime.PrettyTime;
import prefs.Preferences;
import ui.components.KeyboardShortcuts;
import ui.issuepanel.PanelControl;
import ui.issuepanel.FilterPanel;
import util.DialogMessage;
import util.Utility;
import util.events.*;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class MenuControl extends MenuBar {
private static final Logger logger = LogManager.getLogger(MenuControl.class.getName());
private final PanelControl panels;
private final ScrollPane panelsScrollPane;
private final UI ui;
private final Preferences prefs;
public MenuControl(UI ui, PanelControl panels, ScrollPane panelsScrollPane, Preferences prefs) {
this.panels = panels;
this.prefs = prefs;
this.panelsScrollPane = panelsScrollPane;
this.ui = ui;
createMenuItems();
}
private void createMenuItems() {
Menu file = createFileMenu();
Menu newMenu = new Menu("New");
newMenu.getItems().addAll(createNewMenuItems());
Menu panels = createPanelsMenu();
Menu boards = new Menu("Boards");
boards.getItems().addAll(createBoardsMenu());
Menu view = new Menu("View");
view.getItems().addAll(
createRefreshMenuItem(),
createDocumentationMenuItem(),
createRateLimitsMenuItem());
getMenus().addAll(file, newMenu, panels, boards, view);
}
private Menu createFileMenu() {
Menu file = new Menu("File");
MenuItem logout = new MenuItem("Logout");
logout.setOnAction(e -> {
logger.info("Logging out of HT");
prefs.setLastLoginCredentials("", "");
ui.quit();
});
MenuItem quit = new MenuItem("Quit");
quit.setOnAction(e -> {
logger.info("Quitting HT");
ui.quit();
});
file.getItems().addAll(logout, quit);
return file;
}
private Menu createPanelsMenu() {
Menu cols = new Menu("Panels");
MenuItem createLeft = new MenuItem("Create (Left)");
createLeft.setOnAction(e -> {
logger.info("Menu: Panels > Create (Left)");
panels.createNewPanelAtStart();
setHvalue(panelsScrollPane.getHmin());
});
createLeft.setAccelerator(KeyboardShortcuts.CREATE_LEFT_PANEL);
MenuItem createRight = new MenuItem("Create");
createRight.setOnAction(e -> {
logger.info("Menu: Panels > Create");
panels.createNewPanelAtEnd();
// listener is used as panelsScroll's Hmax property doesn't update
// synchronously
ChangeListener<Number> listener = new ChangeListener<Number>() {
@Override
public void changed(ObservableValue<? extends Number> arg0, Number arg1, Number arg2) {
for (Node child : panelsScrollPane.getChildrenUnmodifiable()) {
if (child instanceof ScrollBar) {
ScrollBar scrollBar = (ScrollBar) child;
if (scrollBar.getOrientation() == Orientation.HORIZONTAL
&& scrollBar.visibleProperty().get()) {
setHvalue(panelsScrollPane.getHmax());
break;
}
}
}
panels.widthProperty().removeListener(this);
}
};
panels.widthProperty().addListener(listener);
});
createRight.setAccelerator(KeyboardShortcuts.CREATE_RIGHT_PANEL);
MenuItem closePanel = new MenuItem("Close");
closePanel.setOnAction(e -> {
logger.info("Menu: Panels > Close");
panels.closeCurrentPanel();
});
closePanel.setAccelerator(KeyboardShortcuts.CLOSE_PANEL);
cols.getItems().addAll(createRight, createLeft, closePanel);
return cols;
}
/**
* Called upon the Boards > Save being clicked
*/
private void onBoardSave() {
logger.info("Menu: Boards > Save");
List<String> filterStrings = getCurrentFilterExprs();
if (filterStrings.isEmpty()) {
logger.info("Did not save new board");
return;
}
TextInputDialog dlg = new TextInputDialog("");
dlg.getEditor().setId("boardnameinput");
dlg.setTitle("Board Name");
dlg.getDialogPane().setContentText("What should this board be called?");
dlg.getDialogPane().setHeaderText("Please name this board");
Optional<String> response = dlg.showAndWait();
if (response.isPresent()) {
prefs.addBoard(response.get(), filterStrings);
ui.triggerEvent(new BoardSavedEvent());
logger.info("New board" + response.get() + " saved, containing " + filterStrings);
}
}
/**
* Called upon the Boards > Open being clicked
*/
private void onBoardOpen(String boardName, List<String> filters) {
logger.info("Menu: Boards > Open > " + boardName);
panels.closeAllPanels();
panels.openPanelsWithFilters(filters);
}
/**
* Called upon the Boards > Delete being clicked
*/
private void onBoardDelete(String boardName) {
logger.info("Menu: Boards > Delete > " + boardName);
Alert dlg = new Alert(AlertType.CONFIRMATION, "");
dlg.initModality(Modality.APPLICATION_MODAL);
dlg.setTitle("Confirmation");
dlg.getDialogPane().setHeaderText("Delete board '" + boardName + "'?");
dlg.getDialogPane().setContentText("Are you sure you want to delete this board?");
Optional<ButtonType> response = dlg.showAndWait();
if (response.isPresent() && response.get().getButtonData() == ButtonData.OK_DONE) {
prefs.removeBoard(boardName);
ui.triggerEvent(new BoardSavedEvent());
logger.info(boardName + " was deleted");
} else {
logger.info(boardName + " was not deleted");
}
}
private MenuItem[] createBoardsMenu() {
MenuItem save = new MenuItem("Save");
save.setOnAction(e -> onBoardSave());
Menu open = new Menu("Open");
Menu delete = new Menu("Delete");
ui.registerEvent((BoardSavedEventHandler) e -> {
open.getItems().clear();
delete.getItems().clear();
Map<String, List<String>> boards = prefs.getAllBoards();
for (final String boardName : boards.keySet()) {
final List<String> filterSet = boards.get(boardName);
MenuItem openItem = new MenuItem(boardName);
openItem.setOnAction(e1 -> onBoardOpen(boardName, filterSet));
open.getItems().add(openItem);
MenuItem deleteItem = new MenuItem(boardName);
deleteItem.setOnAction(e1 -> onBoardDelete(boardName));
delete.getItems().add(deleteItem);
}
});
return new MenuItem[] {save, open, delete};
}
/**
* Returns the list of filter strings currently showing the user interface
* @return
*/
private List<String> getCurrentFilterExprs() {
return panels.getChildren().stream().flatMap(c -> {
if (c instanceof FilterPanel) {
return Stream.of(((FilterPanel) c).getCurrentFilterString());
} else {
return Stream.of();
}
}).collect(Collectors.toList());
}
private MenuItem createRateLimitsMenuItem() {
MenuItem rateLimitsMenuItem = new MenuItem("Rate Limits");
rateLimitsMenuItem.setOnAction((e) -> {
logger.info("Menu: View > Rate Limits");
ui.logic.getRateLimitResetTime().whenComplete((rateLimits, ex) ->
showDialogOnAPICheck(rateLimits.left, rateLimits.right, ex)
);
});
return rateLimitsMenuItem;
}
private MenuItem createDocumentationMenuItem() {
MenuItem documentationMenuItem = new MenuItem("Documentation");
documentationMenuItem.setOnAction((e) -> {
logger.info("Menu: View > Documentation");
ui.getBrowserComponent().showDocs();
});
documentationMenuItem.setAccelerator(new KeyCodeCombination(KeyboardShortcuts.SHOW_DOCS));
return documentationMenuItem;
}
private MenuItem createRefreshMenuItem() {
MenuItem refreshMenuItem = new MenuItem("Refresh");
refreshMenuItem.setOnAction((e) -> {
logger.info("Menu: View > Refresh");
ui.logic.refresh();
});
refreshMenuItem.setAccelerator(new KeyCodeCombination(KeyboardShortcuts.REFRESH));
return refreshMenuItem;
}
private MenuItem[] createNewMenuItems() {
MenuItem newIssueMenuItem = new MenuItem("Issue");
newIssueMenuItem.setOnAction(e -> {
logger.info("Menu: New > Issue");
ui.triggerEvent(new IssueCreatedEvent());
});
newIssueMenuItem.setAccelerator(KeyboardShortcuts.NEW_ISSUE);
MenuItem newLabelMenuItem = new MenuItem("Label");
newLabelMenuItem.setOnAction(e -> {
logger.info("Menu: New > Label");
ui.triggerEvent(new LabelCreatedEvent());
});
newLabelMenuItem.setAccelerator(KeyboardShortcuts.NEW_LABEL);
MenuItem newMilestoneMenuItem = new MenuItem("Milestone");
newMilestoneMenuItem.setOnAction(e -> {
logger.info("Menu: New > Milestone");
ui.triggerEvent(new MilestoneCreatedEvent());
});
newMilestoneMenuItem.setAccelerator(KeyboardShortcuts.NEW_MILESTONE);
return new MenuItem[] { newIssueMenuItem, newLabelMenuItem, newMilestoneMenuItem };
}
public void scrollTo(int panelIndex, int numOfPanels){
setHvalue(panelIndex * (panelsScrollPane.getHmax()) / (numOfPanels - 1));
}
private void setHvalue(double val) {
panelsScrollPane.setHvalue(val);
}
private void showDialogOnAPICheck(int remaining, LocalDateTime reset, Throwable ex) {
if (ex == null) {
Platform.runLater(() -> DialogMessage.showInfoDialog(
"GitHub API Status",
String.format(
"You are currently logged in as %s.\n\n" +
"Remaining API calls for the hour: %s\n" +
"Next reset at: %s (%s)",
prefs.getLastLoginUsername(),
remaining,
reset,
new PrettyTime().format(Utility.localDateTimeToDate(reset))
)
));
} else {
Platform.runLater(() -> DialogMessage.showErrorDialog(
"Could not connect to GitHub",
"An error occurred while attempting to query the GitHub API."
));
}
}
} |
package dr.evomodelxml.continuous;
import dr.evolution.tree.MultivariateTraitTree;
import dr.evomodel.continuous.LatentFactorModel;
import dr.evomodelxml.treelikelihood.TreeTraitParserUtilities;
import dr.inference.model.CompoundParameter;
import dr.inference.model.DiagonalMatrix;
import dr.inference.model.MatrixParameter;
import dr.inference.model.Parameter;
import dr.xml.*;
import java.util.List;
/**
* @author Max Tolkoff
* @author Marc Suchard
*/
public class LatentFactorModelParser extends AbstractXMLObjectParser {
public final static String LATENT_FACTOR_MODEL = "latentFactorModel";
public final static String NUMBER_OF_FACTORS = "factorNumber";
public final static String FACTORS = "factors";
public final static String DATA = "data";
public final static String LOADINGS = "loadings";
public static final String ROW_PRECISION = "rowPrecision";
public static final String COLUMN_PRECISION = "columnPrecision";
public static final String SCALE_DATA="scaleData";
public String getParserName() {
return LATENT_FACTOR_MODEL;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
// Parameter latent = null;
MatrixParameter factors = MatrixParameter.recast("name",
(CompoundParameter) xo.getChild(FACTORS).getChild(CompoundParameter.class));
// MatrixParameter.DefaultBounds FactorBounds= new MatrixParameter.DefaultBounds(Double.MAX_VALUE,Double.MIN_VALUE, factors.getColumnDimension());
// factors.addBounds(null);
TreeTraitParserUtilities utilities = new TreeTraitParserUtilities();
// String traitName = TreeTraitParserUtilities.DEFAULT_TRAIT_NAME;
String traitName = (String) xo.getAttribute(TreeTraitParserUtilities.TRAIT_NAME);
MultivariateTraitTree treeModel = (MultivariateTraitTree) xo.getChild(MultivariateTraitTree.class);
// System.err.println("TN: " + traitName);
TreeTraitParserUtilities.TraitsAndMissingIndices returnValue =
utilities.parseTraitsFromTaxonAttributes(xo, traitName, treeModel, true);
MatrixParameter dataParameter = MatrixParameter.recast(returnValue.traitParameter.getId(),
returnValue.traitParameter);
// MatrixParameter.DefaultBounds DataBounds=new MatrixParameter.DefaultBounds(Double.MAX_VALUE, Double.MIN_VALUE, dataParameter.getColumnDimension());
// dataParameter.addBounds(null);
List<Integer> missingIndices = returnValue.missingIndices;
traitName = returnValue.traitName;
// MatrixParameter data = (MatrixParameter) xo.getChild(DATA).getChild(MatrixParameter.class);
// int colDim=treeModel.getTaxonCount();
// int rowDim=dataParameter.getDimension()/treeModel.getTaxonCount();
// Parameter[] dataTemp=new Parameter[colDim];
// for(int i=0; i<colDim; i++)
// dataTemp[i] = new Parameter.Default(rowDim);
// for(int j=0; j<rowDim; j++)
// dataTemp[i].setParameterValue(j, dataParameter.getParameterValue(i*rowDim+j));
// MatrixParameter dataMatrix=new MatrixParameter(null, dataTemp);
// System.err.print(new Matrix(dataMatrix.getParameterAsMatrix()));
// System.err.print(dataMatrix.getRowDimension());
MatrixParameter loadings = (MatrixParameter) xo.getChild(LOADINGS).getChild(MatrixParameter.class);
DiagonalMatrix rowPrecision = (DiagonalMatrix) xo.getChild(ROW_PRECISION).getChild(MatrixParameter.class);
DiagonalMatrix colPrecision = (DiagonalMatrix) xo.getChild(COLUMN_PRECISION).getChild(MatrixParameter.class);
boolean scaleData=xo.getAttribute(SCALE_DATA, false);
// int numFactors = xo.getAttribute(NUMBER_OF_FACTORS, 4);
Parameter temp=null;
for(int i=0; i<loadings.getRowDimension(); i++)
{
temp=loadings.getParameter(i);
if(temp.getParameterValue(i)<0)
{
temp.setParameterValue(i, temp.getParameterValue(i));
}
}
return new LatentFactorModel(dataParameter, factors, loadings, rowPrecision, colPrecision, scaleData);
}
private static final XMLSyntaxRule[] rules = {
AttributeRule.newIntegerRule(NUMBER_OF_FACTORS),
new ElementRule(MultivariateTraitTree.class),
AttributeRule.newStringRule(TreeTraitParserUtilities.TRAIT_NAME),
AttributeRule.newBooleanRule(SCALE_DATA, true),
new ElementRule(TreeTraitParserUtilities.TRAIT_PARAMETER, new XMLSyntaxRule[]{
new ElementRule(Parameter.class)
}),
new ElementRule(FACTORS, new XMLSyntaxRule[]{
new ElementRule(CompoundParameter.class),
}),
new ElementRule(LOADINGS, new XMLSyntaxRule[]{
new ElementRule(MatrixParameter.class)
}),
new ElementRule(ROW_PRECISION, new XMLSyntaxRule[]{
new ElementRule(DiagonalMatrix.class)
}),
new ElementRule(COLUMN_PRECISION, new XMLSyntaxRule[]{
new ElementRule(DiagonalMatrix.class)
}),
};
// <latentFactorModel>
// <factors>
// <parameter idref="factors"/>
// </factors>
// </latentFactorModel>
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
@Override
public String getParserDescription() {
return "Sets up a latent factor model, with starting guesses for the loadings and factor matrices as well as the data for the factor analysis";
}
@Override
public Class getReturnType() {
return LatentFactorModel.class;
}
} |
package edu.mit.kacquah.deckviewer.game;
import java.awt.Dimension;
import java.awt.Point;
import java.util.LinkedList;
import java.util.logging.Logger;
import javax.swing.JFrame;
import javax.swing.SwingUtilities;
import org.OpenNI.GeneralException;
import edu.mit.kacquah.deckviewer.action.SelectionManager;
import edu.mit.kacquah.deckviewer.deckobjects.*;
import edu.mit.kacquah.deckviewer.environment.Deck;
import edu.mit.kacquah.deckviewer.game.GlobalSettings.BackgroundRatio;
import edu.mit.kacquah.deckviewer.gesture.HandTracker;
import edu.mit.kacquah.deckviewer.gui.DeckViewerSwingFrame;
import edu.mit.kacquah.deckviewer.gui.StatusBar;
import edu.mit.kacquah.deckviewer.speech.Commands;
import edu.mit.kacquah.deckviewer.speech.SpeechEngine;
import edu.mit.kacquah.deckviewer.speech.SpeechParser;
import edu.mit.kacquah.deckviewer.utils.*;
import edu.mit.yingyin.tabletop.controllers.ProcessPacketController;
import edu.mit.yingyin.tabletop.models.HandTrackingEngine;
import edu.mit.yingyin.tabletop.models.ProcessPacket;
import edu.mit.yingyin.util.SystemUtil;
import processing.core.*;
/**
* Main PApplet for running the DeckViewerApp. Extended from processing core
* Papplet for main run loop and drawing functions.
*
* @author kojo
*
*/
public class DeckViewerPApplet extends PApplet implements PAppletRenderObject {
// App utils
private static Logger LOGGER = Logger.getLogger(DeckViewerPApplet.class
.getName());
// Directory constants
public final String WORKING_DIR = System.getProperty("user.dir");
public static final String MAIN_DIR = ".";
public static final String RESOURCE_DIR = FileUtil
.join(MAIN_DIR, "resources");
public static final String CALIB_DIR = FileUtil.join("data", "calibration");
public static final String OPENNI_CONFIG_FILE = FileUtil.join(MAIN_DIR,
"config", "config.xml");
public static final String CALIB_FILE = FileUtil.join(MAIN_DIR, CALIB_DIR,
"calibration.txt");
/**
* App dimensions used to size the application window.
*/
public int appWidth, appHeight;
private float scaleRatio;
// Deck Objects and Managers
private Deck deck;
private FlyingObjectManager flyingObjectManager;
// Hand Tracking
private HandTracker handTracker;
// Speech
private SpeechEngine speechEngine;
private SpeechParser speechParser;
// Actions
private SelectionManager selectionManager;
// Static Views
private LinkedList<StaticTextView> staticViews;
/**
* JFrame that contains this app.
*/
private JFrame parentFrame;
private StatusBar statusbar;
public void setup() {
// Init app state
initScreenSize();
// size(GameConstants.BACKGROUND_WIDTH, GameConstants.BACKGROUND_HEIGHT);
size(appWidth, appHeight);
frameRate(30);
// Rendering modes
imageMode(CENTER);
ellipseMode(CENTER);
// Init app utils
PImagePool.setParent(this);
// Static views
staticViews = new LinkedList<StaticTextView>();
// Setup the deck environment and variables
initDeckObjects();
// Setup tracking
initHandTracking();
// Debug strings
LOGGER.info(WORKING_DIR);
// Setup speech
initSpeech();
// Setup Actions
selectionManager = new SelectionManager(this, flyingObjectManager, deck,
handTracker);
speechParser.setSelectionManager(selectionManager);
// Setup status bar.
initStatusBar();
}
public void draw() {
// Update the app.
long elapsedTime = System.currentTimeMillis();
update(elapsedTime);
// render the app.
render(this);
}
public void update(long elapsedTime) {
Point point = new Point(mouseX, mouseY);
SwingUtilities.convertPointToScreen(point, this);
// LOGGER.info(""+point);
// Update all deck objects
deck.update(elapsedTime);
flyingObjectManager.update(elapsedTime);
// Update hand tracking
handTracker.update(elapsedTime);
// Update static views
for (StaticTextView view : staticViews) {
view.update(elapsedTime);
}
}
public void render(PApplet p) {
// Render all deck objects
deck.render(this);
flyingObjectManager.render(this);
// Render handtracking
handTracker.render(p);
// Render static views
for (StaticTextView view : staticViews) {
view.render(this);
}
}
public void keyPressed() {
switch (key) {
case 'S':
case 's':
// Select a flying object.
selectionManager.selectWithAction(Commands.MOVE);
break;
case 'E':
case 'e':
// Execute an action.
selectionManager.executeActionWithTarget(Commands.TO);
break;
case 'R':
case 'r':
// Reset the handtracking background calibration.
handTracker.recalibrateBackground();
break;
}
}
public void initScreenSize() {
if (GlobalSettings.fitToWindowScreen) {
fitWindowToScreen();
} else {
appWidth = GlobalSettings.desiredWidth;
appHeight = GlobalSettings.desiredHeight;
}
}
/**
* Maximizes the app window to the screen boundary while maintaining aspect
* ratio.
*/
private void fitWindowToScreen() {
// Dimensions for current virtual screen (all monitors combined).
Dimension screen = SystemUtil.getVirtualScreenBounds().getSize();
// Account for menu bar at bottom of screen.
screen.height -= GameConstants.STATUS_BAR_HEIGHT * 4;
float screenRatio = (float) screen.height / (float) screen.width;
float desiredRatio;
float origWidth;
if (GlobalSettings.backgroundRatio == BackgroundRatio.NORMAL) {
desiredRatio = (float) GameConstants.BACKGROUND_HEIGHT
/ (float) GameConstants.BACKGROUND_WIDTH;
origWidth = (float) GameConstants.BACKGROUND_WIDTH;
} else {
desiredRatio = (float) GameConstants.BACKGROUND_WIDE_HEIGHT
/ (float) GameConstants.BACKGROUND_WIDE_WIDTH;
origWidth = (float) GameConstants.BACKGROUND_WIDE_WIDTH;
}
if (screenRatio > desiredRatio) {
// Size based on maximizing width.
appWidth = screen.width;
appHeight = (int) ((float) appWidth * desiredRatio);
} else {
// Size based on maximizing height.
appHeight = screen.height;
appWidth = (int) ((float) appHeight / desiredRatio);
}
// If we're creating a window thats too big, scale it down.
if (GlobalSettings.limitMaxRes) {
int numPixels = appWidth * appHeight;
float pixelRatio = ((float)numPixels) / GlobalSettings.maxNumPixels;
if (pixelRatio > 1.0) {
float pixelRatioRoot = (float) Math.sqrt(pixelRatio);
appWidth = (int)((float)appWidth/ pixelRatioRoot);
appHeight = (int)((float)appHeight / pixelRatioRoot);
}
}
// The scaling ratio is used to resize all image sprites accordingly.
scaleRatio = appWidth / origWidth;
LOGGER.info("Final screen resolution: " + appWidth + "x" + appHeight);
}
/**
* Initialize the deck parameters and flight objects.
*/
private void initDeckObjects() {
deck = new Deck(this);
flyingObjectManager = new FlyingObjectManager();
// For now, we'll just place some random objects on the deck.
PVector pos = new PVector(width / 2, height / 2);
FlyingObject flyingObject = new FlyingObject("fmac", pos, 0);
flyingObjectManager.addFlyingObject(flyingObject);
pos = new PVector(width / 3, height / 2);
flyingObject = new FlyingObject("fmac", pos, 0);
flyingObjectManager.addFlyingObject(flyingObject);
pos = new PVector(width / 3 * 2, height / 2);
flyingObject = new FlyingObject("fmac", pos, 0);
flyingObjectManager.addFlyingObject(flyingObject);
}
/**
* Starts the hand tracker and debug display.
*/
private void initHandTracking() {
// HandTracker
Dimension tabletopRes = new Dimension(GameConstants.TABLETOP_WIDTH,
GameConstants.TABLETOP_HEIGHT);
handTracker = new HandTracker(this, tabletopRes);
handTracker.initHandTracking(OPENNI_CONFIG_FILE, CALIB_FILE);
}
/**
* Setup speech recognition for app.
*/
private void initSpeech() {
speechEngine = new SpeechEngine();
speechEngine.setGrammarPath(GlobalSettings.grammarPath);
speechEngine.setGrammarName(GlobalSettings.grammarName);
speechEngine.initRecognition();
speechParser = new SpeechParser();
speechEngine.setSpeechListener(speechParser);
if (GlobalSettings.useSpeechRecognition) {
speechEngine.startRecognition();
}
}
private void initStatusBar() {
this.statusbar = ((DeckViewerSwingFrame)parentFrame).getStatusBar();
this.statusbar.setMessage("Ready for command...");
}
public float scaleRatio() {
return this.scaleRatio;
}
public void addStaticView(StaticTextView view) {
this.staticViews.add(view);
}
public boolean removeStaticView(StaticTextView view) {
return this.staticViews.remove(view);
}
public void setParentFrameContainer(JFrame parentFrameContainer) {
this.parentFrame = parentFrameContainer;
this.frame = parentFrameContainer;
}
public void setStatusBar(StatusBar sb) {
this.statusbar = sb;
}
} |
package io.agrest;
import io.agrest.meta.AgAttribute;
import io.agrest.meta.AgEntity;
import io.agrest.meta.AgRelationship;
import org.apache.cayenne.exp.Expression;
import org.apache.cayenne.query.Ordering;
import org.apache.cayenne.query.SelectQuery;
import org.apache.cayenne.util.ToStringBuilder;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* A metadata object that describes a data structure of a given REST resource.
* Connected ResourceEntities form a tree-like structure that usually overlays a
* certain Cayenne mapping subgraph (unless this is a non-persistent entity),
* filtering and extending its properties to describe the data structure to be
* returned to the client.
* <p>
* ResourceEntity scope is usually a single request. It is built on the fly by
* the framework or by the application code.
*/
public class ResourceEntity<T> {
private boolean idIncluded;
private AgEntity<T> agEntity;
private Map<String, AgAttribute> attributes;
private Collection<String> defaultProperties;
private String applicationBase;
private String mapByPath;
private ResourceEntity<?> mapBy;
private Map<String, ResourceEntity<?>> children;
private AgRelationship incoming;
private List<Ordering> orderings;
private Expression qualifier;
private Map<String, EntityProperty> includedExtraProperties;
private Map<String, EntityProperty> extraProperties;
private int fetchOffset;
private int fetchLimit;
private boolean filtered;
private SelectQuery<T> select;
private List<T> result;
private Map<AgObjectId, Object> parentToChildResult;
public ResourceEntity(AgEntity<T> agEntity) {
this.idIncluded = false;
this.attributes = new HashMap<>();
this.defaultProperties = new HashSet<>();
this.children = new HashMap<>();
this.orderings = new ArrayList<>(2);
this.extraProperties = new HashMap<>();
this.includedExtraProperties = new HashMap<>();
this.agEntity = agEntity;
this.result = new ArrayList<>();
this.parentToChildResult = new LinkedHashMap<>();
}
public ResourceEntity(AgEntity<T> agEntity, AgRelationship incoming) {
this(agEntity);
this.incoming = incoming;
}
/**
* @since 1.12
*/
public AgEntity<T> getAgEntity() {
return agEntity;
}
public AgRelationship getIncoming() {
return incoming;
}
public Expression getQualifier() {
return qualifier;
}
/**
* Resets the qualifier for the entity to a new one.
*
* @param qualifier a new qualifier expression. Can be null.
* @since 2.7
*/
public void setQualifier(Expression qualifier) {
this.qualifier = qualifier;
}
public void andQualifier(Expression qualifier) {
if (this.qualifier == null) {
this.qualifier = qualifier;
} else {
this.qualifier = this.qualifier.andExp(qualifier);
}
}
public List<Ordering> getOrderings() {
return orderings;
}
public SelectQuery<T> getSelect() {
return select;
}
public void setSelect(SelectQuery<T> select) {
this.select = select;
}
/**
* @since 3.1
*/
public List<T> getResult() {
return result;
}
/**
* @since 3.1
*
* @param result objects
*/
public void setResult(List<T> result) {
this.result = result;
}
/**
* @since 3.1
*
* @param parentId
* @return
*/
public Object getResult(AgObjectId parentId) {
return parentToChildResult.get(parentId);
}
/**
* @since 3.1
* Stores object related to particular parent object.
* It is used for one-to-one relation between a parent and a child.
*
* @param parentId
* @param object
*/
public void setToOneResult(AgObjectId parentId, T object) {
parentToChildResult.put(parentId, object);
}
/**
* Stores result object as a List of objects. It is used for one-to-many relation between a parent and children.
*
* @param parentId
* @param object
* @since 3.1
*/
public void addToManyResult(AgObjectId parentId, T object) {
((List<T> )parentToChildResult.computeIfAbsent(parentId, k -> new ArrayList<>())).add(object);
}
/**
* @param parentId
* @param objects
* @since 3.1
*/
public void setToManyResult(AgObjectId parentId, List<T> objects) {
parentToChildResult.put(parentId, objects);
}
/**
* @since 1.12
*/
public Map<String, AgAttribute> getAttributes() {
return attributes;
}
/**
* @since 1.5
*/
public Collection<String> getDefaultProperties() {
return defaultProperties;
}
/**
* @since 1.5
*/
public boolean isDefault(String propertyName) {
return defaultProperties.contains(propertyName);
}
public Map<String, ResourceEntity<?>> getChildren() {
return children;
}
/**
* @since 1.1
*/
public ResourceEntity<?> getChild(String name) {
return children.get(name);
}
public Map<String, EntityProperty> getExtraProperties() {
return extraProperties;
}
public Map<String, EntityProperty> getIncludedExtraProperties() {
return includedExtraProperties;
}
public boolean isIdIncluded() {
return idIncluded;
}
public ResourceEntity<T> includeId(boolean include) {
this.idIncluded = include;
return this;
}
public ResourceEntity<T> includeId() {
this.idIncluded = true;
return this;
}
public ResourceEntity<T> excludeId() {
this.idIncluded = false;
return this;
}
public ResourceEntity<?> getMapBy() {
return mapBy;
}
/**
* @since 1.1
*/
public ResourceEntity<T> mapBy(ResourceEntity<?> mapBy, String mapByPath) {
this.mapByPath = mapByPath;
this.mapBy = mapBy;
return this;
}
public String getMapByPath() {
return mapByPath;
}
@Override
public String toString() {
ToStringBuilder tsb = new ToStringBuilder(this);
if (agEntity != null) {
tsb.append("name", agEntity.getName());
}
return tsb.toString();
}
public Class<T> getType() {
return agEntity.getType();
}
/**
* @since 1.20
*/
public int getFetchOffset() {
return fetchOffset;
}
/**
* @since 1.20
*/
public void setFetchOffset(int fetchOffset) {
this.fetchOffset = fetchOffset;
}
/**
* @since 1.20
*/
public int getFetchLimit() {
return fetchLimit;
}
/**
* @since 1.20
*/
public void setFetchLimit(int fetchLimit) {
this.fetchLimit = fetchLimit;
}
/**
* @since 1.20
*/
public String getApplicationBase() {
return applicationBase;
}
/**
* @since 1.20
*/
public void setApplicationBase(String applicationBase) {
this.applicationBase = applicationBase;
}
/**
* @since 1.23
*/
public boolean isFiltered() {
return filtered;
}
/**
* @since 1.23
*/
public void setFiltered(boolean filtered) {
this.filtered = filtered;
}
} |
package mondrian.olap;
import mondrian.calc.Calc;
import mondrian.calc.ExpCompiler;
import mondrian.calc.ExpCompiler.ResultStyle;
import mondrian.calc.impl.BetterExpCompiler;
import mondrian.mdx.*;
import mondrian.olap.fun.FunUtil;
import mondrian.olap.fun.ParameterFunDef;
import mondrian.olap.type.*;
import mondrian.resource.MondrianResource;
import mondrian.rolap.*;
import java.io.*;
import java.util.*;
public class Query extends QueryPart {
/**
* public-private: This must be public because it is still accessed in rolap.RolapCube
*/
public Formula[] formulas;
/**
* public-private: This must be public because it is still accessed in rolap.RolapConnection
*/
public QueryAxis[] axes;
/**
* public-private: This must be public because it is still accessed in rolap.RolapResult
*/
public QueryAxis slicerAxis;
/**
* Definitions of all parameters used in this query.
*/
private final List<Parameter> parameters = new ArrayList<Parameter>();
private final Map<String, Parameter> parametersByName =
new HashMap<String, Parameter>();
/**
* Cell properties. Not currently used.
*/
private final QueryPart[] cellProps;
/**
* Cube this query belongs to.
*/
private final Cube cube;
private final Connection connection;
public Calc[] axisCalcs;
public Calc slicerCalc;
/**
* Start time of query execution
*/
private long startTime;
/**
* Query timeout, in milliseconds
*/
private final int queryTimeout;
/**
* If true, cancel this query
*/
private boolean isCanceled;
/**
* If not <code>null</code>, this query was notified that it
* might cause an OutOfMemoryError.
*/
private String outOfMemoryMsg;
/**
* If true, query is in the middle of execution
*/
private boolean isExecuting;
/**
* Unique list of members referenced from the measures dimension.
* Will be used to determine if cross joins can be processed natively
* for virtual cubes.
*/
private Set<Member> measuresMembers;
/**
* If true, virtual cubes can be processed using native cross joins.
* It defaults to true, unless functions are applied on measures.
*/
private boolean nativeCrossJoinVirtualCube;
/**
* Used for virtual cubes. Contains the set of maps used to map the
* levels referenced in a virtual cube to the columns in the underlying
* base cubes.
*/
private Set<Map<RolapLevel, RolapStar.Column>> virtualCubeBaseCubeMaps;
/**
* Maps one of the level-to-column maps stored in
* virtualCubeBaseCubeMaps to a measure corresponding to the underlying
* cube that the level-to-column map corrsponds to
*/
private Map<Map<RolapLevel, RolapStar.Column>, RolapMember> levelMapToMeasureMap;
/**
* If true, loading schema
*/
private boolean load;
/**
* How should the query be returned? Valid values are:
* ResultStyle.ITERABLE
* ResultStyle.LIST
* ResultStyle.MUTABLE_LIST
*/
private ResultStyle resultStyle = ResultStyle.ITERABLE;
/**
* Creates a Query.
*/
public Query(
Connection connection,
Formula[] formulas,
QueryAxis[] axes,
String cube,
QueryAxis slicerAxis,
QueryPart[] cellProps,
boolean load) {
this(connection,
connection.getSchema().lookupCube(cube, true),
formulas,
axes,
slicerAxis,
cellProps,
new Parameter[0],
load);
}
/**
* Creates a Query.
*/
public Query(
Connection connection,
Cube mdxCube,
Formula[] formulas,
QueryAxis[] axes,
QueryAxis slicerAxis,
QueryPart[] cellProps,
Parameter[] parameters,
boolean load) {
this.connection = connection;
this.cube = mdxCube;
this.formulas = formulas;
this.axes = axes;
normalizeAxes();
this.slicerAxis = slicerAxis;
this.cellProps = cellProps;
this.parameters.addAll(Arrays.asList(parameters));
this.isExecuting = false;
this.queryTimeout =
MondrianProperties.instance().QueryTimeout.get() * 1000;
this.measuresMembers = new HashSet<Member>();
// assume, for now, that cross joins on virtual cubes can be
// processed natively; as we parse the query, we'll know otherwise
this.nativeCrossJoinVirtualCube = true;
this.load = load;
resolve();
}
/**
* Adds a new formula specifying a set
* to an existing query.
*/
public void addFormula(String[] names, Exp exp) {
Formula newFormula = new Formula(names, exp);
int formulaCount = 0;
if (formulas.length > 0) {
formulaCount = formulas.length;
}
Formula[] newFormulas = new Formula[formulaCount + 1];
System.arraycopy(formulas, 0, newFormulas, 0, formulaCount);
newFormulas[formulaCount] = newFormula;
formulas = newFormulas;
resolve();
}
/**
* Adds a new formula specifying a member
* to an existing query.
*/
public void addFormula(
String[] names,
Exp exp,
MemberProperty[] memberProperties) {
Formula newFormula = new Formula(names, exp, memberProperties);
int formulaCount = 0;
if (formulas.length > 0) {
formulaCount = formulas.length;
}
Formula[] newFormulas = new Formula[formulaCount + 1];
System.arraycopy(formulas, 0, newFormulas, 0, formulaCount);
newFormulas[formulaCount] = newFormula;
formulas = newFormulas;
resolve();
}
public Validator createValidator() {
return new StackValidator(connection.getSchema().getFunTable());
}
public Object clone() {
return new Query(
connection,
cube,
Formula.cloneArray(formulas),
QueryAxis.cloneArray(axes),
(slicerAxis == null) ? null : (QueryAxis) slicerAxis.clone(),
cellProps,
parameters.toArray(new Parameter[parameters.size()]),
load);
}
public Query safeClone() {
return (Query) clone();
}
public Connection getConnection() {
return connection;
}
public String getQueryString() {
return toMdx();
}
/**
* Issues a cancel request on this Query object. Once the thread
* running the query detects the cancel request, the query execution will
* throw an exception. See <code>BasicQueryTest.testCancel</code> for an
* example of usage of this method.
*/
public void cancel() {
isCanceled = true;
}
void setOutOfMemory(String msg) {
outOfMemoryMsg = msg;
}
/**
* Checks if either a cancel request has been issued on the query or
* the execution time has exceeded the timeout value (if one has been
* set). Exceptions are raised if either of these two conditions are
* met. This method should be called periodically during query execution
* to ensure timely detection of these events, particularly before/after
* any potentially long running operations.
*/
public void checkCancelOrTimeout() {
if (!isExecuting) {
return;
}
if (isCanceled) {
throw MondrianResource.instance().QueryCanceled.ex();
}
if (queryTimeout > 0) {
long currTime = System.currentTimeMillis();
if ((currTime - startTime) >= queryTimeout) {
throw MondrianResource.instance().QueryTimeout.ex(
(long) queryTimeout / 1000);
}
}
if (outOfMemoryMsg != null) {
throw new MemoryLimitExceededException(outOfMemoryMsg);
}
}
/**
* Sets the start time of query execution. Used to detect timeout for
* queries.
*/
public void setQueryStartTime() {
startTime = System.currentTimeMillis();
isExecuting = true;
}
/**
* Called when query execution has completed. Once query execution has
* ended, it is not possible to cancel or timeout the query until it
* starts executing again.
*/
public void setQueryEndExecution() {
isExecuting = false;
}
private void normalizeAxes() {
for (int i = 0; i < axes.length; i++) {
AxisOrdinal correctOrdinal = AxisOrdinal.forLogicalOrdinal(i);
if (axes[i].getAxisOrdinal() != correctOrdinal) {
for (int j = i + 1; j < axes.length; j++) {
if (axes[j].getAxisOrdinal() == correctOrdinal) {
// swap axes
QueryAxis temp = axes[i];
axes[i] = axes[j];
axes[j] = temp;
break;
}
}
}
}
}
/**
* Performs type-checking and validates internal consistency of a query,
* using the default resolver.
*
* <p>This method is called automatically when a query is created; you need
* to call this method manually if you have modified the query's expression
* tree in any way.
*/
public void resolve() {
final Validator validator = createValidator();
resolve(validator); // resolve self and children
// Create a dummy result so we can use its evaluator
final Evaluator evaluator = RolapUtil.createEvaluator(this);
ExpCompiler compiler = createCompiler(evaluator, validator);
compile(compiler);
}
/**
* @return true if Query object is being accessed during schema load
* and the property to ignore invalid members is set to true
*/
public boolean ignoreInvalidMembers()
{
return load &&
MondrianProperties.instance().IgnoreInvalidMembers.get();
}
/**
* A Query's ResultStyle can only be one of the following:
* ResultStyle.ITERABLE
* ResultStyle.LIST
* ResultStyle.MUTABLE_LIST
*
* @param resultStyle
*/
public void setResultStyle(ResultStyle resultStyle) {
switch (resultStyle) {
case ITERABLE :
case LIST :
case MUTABLE_LIST:
this.resultStyle = resultStyle;
break;
default :
throw ResultStyleException.generateBadType(
new ResultStyle[] {
ResultStyle.ITERABLE,
ResultStyle.LIST,
ResultStyle.MUTABLE_LIST
},
resultStyle
);
}
}
public ResultStyle getResultStyle() {
return resultStyle;
}
/**
* Generates compiled forms of all expressions.
*
* @param compiler Compiler
*/
private void compile(ExpCompiler compiler) {
if (formulas != null) {
for (Formula formula : formulas) {
formula.compile();
}
}
if (axes != null) {
axisCalcs = new Calc[axes.length];
for (int i = 0; i < axes.length; i++) {
axisCalcs[i] = axes[i].compile(compiler,
new ResultStyle[] { resultStyle });
}
}
if (slicerAxis != null) {
slicerCalc = slicerAxis.compile(compiler,
new ResultStyle[] { resultStyle });
}
}
/**
* Performs type-checking and validates internal consistency of a query.
*
* @param validator Validator
*/
void resolve(Validator validator) {
// Before commencing validation, create all calculated members,
// calculated sets, and parameters.
if (formulas != null) {
// Resolving of formulas should be done in two parts
// because formulas might depend on each other, so all calculated
// mdx elements have to be defined during resolve.
for (Formula formula : formulas) {
formula.createElement(validator.getQuery());
}
}
// Register all parameters.
parameters.clear();
parametersByName.clear();
accept(
new MdxVisitorImpl() {
public Object visit(ParameterExpr parameterExpr) {
Parameter parameter = parameterExpr.getParameter();
if (!parameters.contains(parameter)) {
parameters.add(parameter);
parametersByName.put(parameter.getName(), parameter);
}
return null;
}
public Object visit(UnresolvedFunCall call) {
if (call.getFunName().equals("Parameter")) {
// Is there already a parameter with this name?
String parameterName =
ParameterFunDef.getParameterName(call.getArgs());
if (parametersByName.get(parameterName) != null) {
throw MondrianResource.instance().
ParameterDefinedMoreThanOnce.ex(parameterName);
}
Type type =
ParameterFunDef.getParameterType(call.getArgs());
// Create a temporary parameter. We don't know its
// type yet. The default of NULL is temporary.
Parameter parameter = new ParameterImpl(
parameterName, Literal.nullValue, null, type);
parameters.add(parameter);
parametersByName.put(parameterName, parameter);
}
return null;
}
}
);
// Validate formulas.
if (formulas != null) {
for (Formula formula : formulas) {
validator.validate(formula);
}
}
// Validate axes.
if (axes != null) {
for (QueryAxis axis : axes) {
validator.validate(axis);
}
}
if (slicerAxis != null) {
slicerAxis.validate(validator);
}
// Make sure that no dimension is used on more than one axis.
final Dimension[] dimensions = getCube().getDimensions();
for (Dimension dimension : dimensions) {
int useCount = 0;
for (int j = -1; j < axes.length; j++) {
final QueryAxis axisExp;
if (j < 0) {
if (slicerAxis == null) {
continue;
}
axisExp = slicerAxis;
} else {
axisExp = axes[j];
}
if (axisExp.getSet().getType().usesDimension(dimension,
false)) {
++useCount;
}
}
if (useCount > 1) {
throw MondrianResource.instance().DimensionInIndependentAxes.ex(
dimension.getUniqueName());
}
}
}
public void unparse(PrintWriter pw) {
if (formulas != null) {
for (int i = 0; i < formulas.length; i++) {
if (i == 0) {
pw.print("with ");
} else {
pw.print(" ");
}
formulas[i].unparse(pw);
pw.println();
}
}
pw.print("select ");
if (axes != null) {
for (int i = 0; i < axes.length; i++) {
axes[i].unparse(pw);
if (i < axes.length - 1) {
pw.println(",");
pw.print(" ");
} else {
pw.println();
}
}
}
if (cube != null) {
pw.println("from [" + cube.getName() + "]");
}
if (slicerAxis != null) {
pw.print("where ");
slicerAxis.unparse(pw);
pw.println();
}
}
public String toMdx() {
StringWriter sw = new StringWriter();
PrintWriter pw = new QueryPrintWriter(sw);
unparse(pw);
return sw.toString();
}
/** Returns the MDX query string. */
public String toString() {
resolve();
return Util.unparse(this);
}
public Object[] getChildren() {
// Chidren are axes, slicer, and formulas (in that order, to be
// consistent with replaceChild).
List<QueryPart> list = new ArrayList<QueryPart>();
for (QueryAxis axis : axes) {
list.add(axis);
}
if (slicerAxis != null) {
list.add(slicerAxis);
}
for (Formula formula : formulas) {
list.add(formula);
}
return list.toArray();
}
public QueryAxis getSlicerAxis() {
return slicerAxis;
}
public void setSlicerAxis(QueryAxis axis) {
this.slicerAxis = axis;
}
/**
* Adds a level to an axis expression.
*/
public void addLevelToAxis(AxisOrdinal axis, Level level) {
assert axis != null;
axes[axis.logicalOrdinal()].addLevel(level);
}
/**
* Returns the hierarchies in an expression.
*
* <p>If the expression's type is a dimension with several hierarchies,
* assumes that the expression yields a member of the first (default)
* hierarchy of the dimension.
*
* <p>For example, the expression
* <blockquote><code>Crossjoin(
* Hierarchize(
* Union(
* {[Time].LastSibling}, [Time].LastSibling.Children)),
* {[Measures].[Unit Sales], [Measures].[Store Cost]})</code>
* </blockquote>
*
* has type <code>{[Time.Monthly], [Measures]}</code> even though
* <code>[Time].LastSibling</code> might return a member of either
* [Time.Monthly] or [Time.Weekly].
*/
private Hierarchy[] collectHierarchies(Exp queryPart) {
Type exprType = queryPart.getType();
if (exprType instanceof SetType) {
exprType = ((SetType) exprType).getElementType();
}
if (exprType instanceof TupleType) {
final Type[] types = ((TupleType) exprType).elementTypes;
ArrayList<Hierarchy> hierarchyList = new ArrayList<Hierarchy>();
for (Type type : types) {
hierarchyList.add(getTypeHierarchy(type));
}
return hierarchyList.toArray(new Hierarchy[hierarchyList.size()]);
}
return new Hierarchy[] {getTypeHierarchy(exprType)};
}
private Hierarchy getTypeHierarchy(final Type type) {
Hierarchy hierarchy = type.getHierarchy();
if (hierarchy != null) {
return hierarchy;
}
final Dimension dimension = type.getDimension();
if (dimension != null) {
return dimension.getHierarchy();
}
return null;
}
/**
* Assigns a value to the parameter with a given name.
*
* @throws RuntimeException if there is not parameter with the given name
*/
public void setParameter(String parameterName, String value) {
// Need to resolve query before we set parameters, in order to create
// slots to store them in. (This code will go away when parameters
// belong to prepared statements.)
if (parameters.isEmpty()) {
resolve();
}
Parameter param = getSchemaReader(false).getParameter(parameterName);
if (param == null) {
throw MondrianResource.instance().UnknownParameter.ex(parameterName);
}
if (!param.isModifiable()) {
throw MondrianResource.instance().ParameterIsNotModifiable.ex(
parameterName, param.getScope().name());
}
final Exp exp = quickParse(
TypeUtil.typeToCategory(param.getType()), value, this);
param.setValue(exp);
}
private static Exp quickParse(int category, String value, Query query) {
switch (category) {
case Category.Numeric:
return Literal.create(new Double(value));
case Category.String:
return Literal.createString(value);
case Category.Member:
Member member = (Member) Util.lookup(query, Util.explode(value));
return new MemberExpr(member);
default:
throw Category.instance.badValue(category);
}
}
/**
* Swaps the x- and y- axes.
* Does nothing if the number of axes != 2.
*/
public void swapAxes() {
if (axes.length == 2) {
Exp e0 = axes[0].getSet();
boolean nonEmpty0 = axes[0].isNonEmpty();
Exp e1 = axes[1].getSet();
boolean nonEmpty1 = axes[1].isNonEmpty();
axes[1].setSet(e0);
axes[1].setNonEmpty(nonEmpty0);
axes[0].setSet(e1);
axes[0].setNonEmpty(nonEmpty1);
// showSubtotals ???
}
}
/**
* Returns the parameters defined in this query.
*/
public Parameter[] getParameters() {
return parameters.toArray(new Parameter[parameters.size()]);
}
public Cube getCube() {
return cube;
}
public SchemaReader getSchemaReader(boolean accessControlled) {
final Role role = accessControlled
? getConnection().getRole()
: null;
final SchemaReader cubeSchemaReader = cube.getSchemaReader(role);
return new QuerySchemaReader(cubeSchemaReader);
}
/**
* Looks up a member whose unique name is <code>s</code> from cache.
* If the member is not in cache, returns null.
*/
public Member lookupMemberFromCache(String s) {
// first look in defined members
for (Member member : getDefinedMembers()) {
if (Util.equalName(member.getUniqueName(), s)) {
return member;
}
}
return null;
}
/**
* Looks up a named set.
*/
private NamedSet lookupNamedSet(String name) {
for (Formula formula : formulas) {
if (!formula.isMember() &&
formula.getElement() != null &&
formula.getName().equals(name)) {
return (NamedSet) formula.getElement();
}
}
return null;
}
/**
* Returns an array of the formulas used in this query.
*/
public Formula[] getFormulas() {
return formulas;
}
/**
* Returns an array of this query's axes.
*/
public QueryAxis[] getAxes() {
return axes;
}
/**
* Remove a formula from the query. If <code>failIfUsedInQuery</code> is
* true, checks and throws an error if formula is used somewhere in the
* query.
*/
public void removeFormula(String uniqueName, boolean failIfUsedInQuery) {
Formula formula = findFormula(uniqueName);
if (failIfUsedInQuery && formula != null) {
OlapElement mdxElement = formula.getElement();
//search the query tree to see if this formula expression is used
//anywhere (on the axes or in another formula)
Walker walker = new Walker(this);
while (walker.hasMoreElements()) {
Object queryElement = walker.nextElement();
if (!queryElement.equals(mdxElement)) {
continue;
}
// mdxElement is used in the query. lets find on on which axis
// or formula
String formulaType = formula.isMember()
? MondrianResource.instance().CalculatedMember.str()
: MondrianResource.instance().CalculatedSet.str();
int i = 0;
Object parent = walker.getAncestor(i);
Object grandParent = walker.getAncestor(i+1);
while ((parent != null) && (grandParent != null)) {
if (grandParent instanceof Query) {
if (parent instanceof Axis) {
throw MondrianResource.instance().
MdxCalculatedFormulaUsedOnAxis.ex(
formulaType,
uniqueName,
((QueryAxis) parent).getAxisName());
} else if (parent instanceof Formula) {
String parentFormulaType =
((Formula) parent).isMember()
? MondrianResource.instance().CalculatedMember.str()
: MondrianResource.instance().CalculatedSet.str();
throw MondrianResource.instance().
MdxCalculatedFormulaUsedInFormula.ex(
formulaType, uniqueName, parentFormulaType,
((Formula) parent).getUniqueName());
} else {
throw MondrianResource.instance().
MdxCalculatedFormulaUsedOnSlicer.ex(
formulaType, uniqueName);
}
}
++i;
parent = walker.getAncestor(i);
grandParent = walker.getAncestor(i+1);
}
throw MondrianResource.instance().
MdxCalculatedFormulaUsedInQuery.ex(
formulaType, uniqueName, Util.unparse(this));
}
}
// remove formula from query
List<Formula> formulaList = new ArrayList<Formula>();
for (Formula formula1 : formulas) {
if (!formula1.getUniqueName().equalsIgnoreCase(uniqueName)) {
formulaList.add(formula1);
}
}
// it has been found and removed
this.formulas = formulaList.toArray(new Formula[0]);
}
/**
* Check, whether a formula can be removed from the query.
*/
public boolean canRemoveFormula(String uniqueName) {
Formula formula = findFormula(uniqueName);
if (formula == null) {
return false;
}
OlapElement mdxElement = formula.getElement();
//search the query tree to see if this formula expression is used
//anywhere (on the axes or in another formula)
Walker walker = new Walker(this);
while (walker.hasMoreElements()) {
Object queryElement = walker.nextElement();
if (!queryElement.equals(mdxElement)) {
continue;
}
return false;
}
return true;
}
/** finds calculated member or set in array of formulas */
public Formula findFormula(String uniqueName) {
for (Formula formula : formulas) {
if (formula.getUniqueName().equalsIgnoreCase(uniqueName)) {
return formula;
}
}
return null;
}
/**
* Finds formula by name and renames it to new name.
*/
public void renameFormula(String uniqueName, String newName) {
Formula formula = findFormula(uniqueName);
if (formula == null) {
throw MondrianResource.instance().MdxFormulaNotFound.ex(
"formula", uniqueName, Util.unparse(this));
}
formula.rename(newName);
}
List<Member> getDefinedMembers() {
List<Member> definedMembers = new ArrayList<Member>();
for (final Formula formula : formulas) {
if (formula.isMember() &&
formula.getElement() != null &&
getConnection().getRole().canAccess(formula.getElement())) {
definedMembers.add((Member) formula.getElement());
}
}
return definedMembers;
}
/**
* Finds axis by index and sets flag to show empty cells on that axis.
*/
public void setAxisShowEmptyCells(int axis, boolean showEmpty) {
if (axis >= axes.length) {
throw MondrianResource.instance().MdxAxisShowSubtotalsNotSupported.
ex(axis);
}
axes[axis].setNonEmpty(!showEmpty);
}
/**
* Returns <code>Hierarchy[]</code> used on <code>axis</code>. It calls
* {@link #collectHierarchies}.
*/
public Hierarchy[] getMdxHierarchiesOnAxis(AxisOrdinal axis) {
if (axis.logicalOrdinal() >= axes.length) {
throw MondrianResource.instance().MdxAxisShowSubtotalsNotSupported.
ex(axis.logicalOrdinal());
}
QueryAxis queryAxis = (axis == AxisOrdinal.SLICER) ?
slicerAxis :
axes[axis.logicalOrdinal()];
return collectHierarchies(queryAxis.getSet());
}
public Calc compileExpression(Exp exp, boolean scalar) {
Evaluator evaluator = RolapEvaluator.create(this);
final Validator validator = createValidator();
final ExpCompiler compiler = createCompiler(evaluator, validator);
Calc calc = (scalar)
? compiler.compileScalar(exp, false)
: compiler.compile(exp);
return calc;
}
private ExpCompiler createCompiler(
Evaluator evaluator, final Validator validator) {
ExpCompiler compiler = new BetterExpCompiler(evaluator, validator);
((BetterExpCompiler) compiler).setAcceptableResultStyles(
new ResultStyle[] { resultStyle });
final int expDeps = MondrianProperties.instance().TestExpDependencies.get();
if (expDeps > 0) {
compiler = RolapUtil.createDependencyTestingCompiler(compiler);
}
return compiler;
}
/**
* Keeps track of references to members of the measures dimension
*
* @param olapElement potential measure member
*/
public void addMeasuresMembers(OlapElement olapElement)
{
if (olapElement instanceof Member) {
Member member = (Member) olapElement;
if (member.getDimension().getOrdinal(getCube()) == 0) {
measuresMembers.add(member);
}
}
}
/**
* @return set of members from the measures dimension referenced within
* this query
*/
public Set<Member> getMeasuresMembers()
{
return measuresMembers;
}
/**
* Indicates that the query cannot use native cross joins to process
* this virtual cube
*/
public void setVirtualCubeNonNativeCrossJoin()
{
nativeCrossJoinVirtualCube = false;
}
/**
* @return true if the query can use native cross joins on a virtual
* cube
*/
public boolean nativeCrossJoinVirtualCube()
{
return nativeCrossJoinVirtualCube;
}
/**
* Saves away the level to column maps for the underlying cubes that make
* up the virtual cube referenced in this query
*
* @param maps the set of maps to be saved
*/
public void setVirtualCubeBaseCubeMaps(Set<Map<RolapLevel, RolapStar.Column>> maps)
{
virtualCubeBaseCubeMaps = maps;
}
/**
* @return the set of level to column maps associated with the virtual
* cube this query references
*/
public Set<Map<RolapLevel, RolapStar.Column>> getVirtualCubeBaseCubeMaps()
{
return virtualCubeBaseCubeMaps;
}
/**
* Saves away the map that maps a level-to-column map to a measure
*
* @param map map to be saved
*/
public void setLevelMapToMeasureMap(
Map<Map<RolapLevel, RolapStar.Column>, RolapMember> map)
{
levelMapToMeasureMap = map;
}
/**
* @return the level-to-column-to-measure map
*/
public Map<Map<RolapLevel, RolapStar.Column>, RolapMember> getLevelMapToMeasureMap()
{
return levelMapToMeasureMap;
}
public Object accept(MdxVisitor visitor) {
Object o = visitor.visit(this);
// visit formulas
for (Formula formula : formulas) {
formula.accept(visitor);
}
// visit axes
for (QueryAxis axis : axes) {
axis.accept(visitor);
}
if (slicerAxis != null) {
slicerAxis.accept(visitor);
}
return o;
}
/**
* Default implementation of {@link Validator}.
*
* <p>Uses a stack to help us guess the type of our parent expression
* before we've completely resolved our children -- necessary,
* unfortunately, when figuring out whether the "*" operator denotes
* multiplication or crossjoin.
*
* <p>Keeps track of which nodes have already been resolved, so we don't
* try to resolve nodes which have already been resolved. (That would not
* be wrong, but can cause resolution to be an <code>O(2^N)</code>
* operation.)
*/
private class StackValidator implements Validator {
private final Stack<QueryPart> stack = new Stack<QueryPart>();
private final FunTable funTable;
private final Map<QueryPart, QueryPart> resolvedNodes =
new HashMap<QueryPart, QueryPart>();
private final QueryPart placeHolder = Literal.zero;
/**
* Creates a StackValidator.
*
* @pre funTable != null
*/
public StackValidator(FunTable funTable) {
Util.assertPrecondition(funTable != null, "funTable != null");
this.funTable = funTable;
}
public Query getQuery() {
return Query.this;
}
public Exp validate(Exp exp, boolean scalar) {
Exp resolved;
try {
resolved = (Exp) resolvedNodes.get(exp);
} catch (ClassCastException e) {
// A classcast exception will occur if there is a String
// placeholder in the map. This is an internal error -- should
// not occur for any query, valid or invalid.
throw Util.newInternal(
e,
"Infinite recursion encountered while validating '" +
Util.unparse(exp) + "'");
}
if (resolved == null) {
try {
stack.push((QueryPart) exp);
// To prevent recursion, put in a placeholder while we're
// resolving.
resolvedNodes.put((QueryPart) exp, placeHolder);
resolved = exp.accept(this);
Util.assertTrue(resolved != null);
resolvedNodes.put((QueryPart) exp, (QueryPart) resolved);
} finally {
stack.pop();
}
}
if (scalar) {
final Type type = resolved.getType();
if (!TypeUtil.canEvaluate(type)) {
String exprString = Util.unparse(resolved);
throw MondrianResource.instance().MdxMemberExpIsSet.ex(exprString);
}
}
return resolved;
}
public void validate(ParameterExpr parameterExpr) {
ParameterExpr resolved =
(ParameterExpr) resolvedNodes.get(parameterExpr);
if (resolved != null) {
return; // already resolved
}
try {
stack.push(parameterExpr);
resolvedNodes.put(parameterExpr, placeHolder);
resolved = (ParameterExpr) parameterExpr.accept(this);
assert resolved != null;
resolvedNodes.put(parameterExpr, resolved);
} finally {
stack.pop();
}
}
public void validate(MemberProperty memberProperty) {
MemberProperty resolved =
(MemberProperty) resolvedNodes.get(memberProperty);
if (resolved != null) {
return; // already resolved
}
try {
stack.push(memberProperty);
resolvedNodes.put(memberProperty, placeHolder);
memberProperty.resolve(this);
resolvedNodes.put(memberProperty, memberProperty);
} finally {
stack.pop();
}
}
public void validate(QueryAxis axis) {
final QueryAxis resolved = (QueryAxis) resolvedNodes.get(axis);
if (resolved != null) {
return; // already resolved
}
try {
stack.push(axis);
resolvedNodes.put(axis, placeHolder);
axis.resolve(this);
resolvedNodes.put(axis, axis);
} finally {
stack.pop();
}
}
public void validate(Formula formula) {
final Formula resolved = (Formula) resolvedNodes.get(formula);
if (resolved != null) {
return; // already resolved
}
try {
stack.push(formula);
resolvedNodes.put(formula, placeHolder);
formula.accept(this);
resolvedNodes.put(formula, formula);
} finally {
stack.pop();
}
}
public boolean canConvert(Exp fromExp, int to, int[] conversionCount) {
return FunUtil.canConvert(fromExp, to, conversionCount);
}
public boolean requiresExpression() {
return requiresExpression(stack.size() - 1);
}
private boolean requiresExpression(int n) {
if (n < 1) {
return false;
}
final Object parent = stack.get(n - 1);
if (parent instanceof Formula) {
return ((Formula) parent).isMember();
} else if (parent instanceof ResolvedFunCall) {
final ResolvedFunCall funCall = (ResolvedFunCall) parent;
if (funCall.getFunDef().getSyntax() == Syntax.Parentheses) {
return requiresExpression(n - 1);
} else {
int k = whichArg(funCall, (Exp) stack.get(n));
if (k < 0) {
// Arguments of call have mutated since call was placed
// on stack. Presumably the call has already been
// resolved correctly, so the answer we give here is
// irrelevant.
return false;
}
final FunDef funDef = funCall.getFunDef();
final int[] parameterTypes = funDef.getParameterCategories();
return parameterTypes[k] != Category.Set;
}
} else if (parent instanceof UnresolvedFunCall) {
final UnresolvedFunCall funCall = (UnresolvedFunCall) parent;
if (funCall.getSyntax() == Syntax.Parentheses) {
return requiresExpression(n - 1);
} else {
int k = whichArg(funCall, (Exp) stack.get(n));
if (k < 0) {
// Arguments of call have mutated since call was placed
// on stack. Presumably the call has already been
// resolved correctly, so the answer we give here is
// irrelevant.
return false;
}
return funTable.requiresExpression(funCall, k, this);
}
} else {
return false;
}
}
public FunTable getFunTable() {
return funTable;
}
public Parameter createOrLookupParam(
boolean definition,
String name,
Type type,
Exp defaultExp,
String description)
{
final SchemaReader schemaReader = getSchemaReader(false);
Parameter param = schemaReader.getParameter(name);
if (definition) {
if (param != null) {
if (param.getScope() == Parameter.Scope.Statement) {
ParameterImpl paramImpl = (ParameterImpl) param;
paramImpl.setDescription(description);
paramImpl.setDefaultExp(defaultExp);
paramImpl.setType(type);
}
return param;
}
param = new ParameterImpl(
name,
defaultExp, description, type);
// Append it to the list of known parameters.
parameters.add(param);
parametersByName.put(name, param);
return param;
} else {
if (param != null) {
return param;
}
throw MondrianResource.instance().UnknownParameter.ex(name);
}
}
private int whichArg(final FunCall node, final Exp arg) {
final Exp[] children = node.getArgs();
for (int i = 0; i < children.length; i++) {
if (children[i] == arg) {
return i;
}
}
return -1;
}
}
/**
* Source of metadata within the scope of a query.
*
* <p>Note especially that {@link #getCalculatedMember(String[])}
* returns the calculated members defined in this query.
*/
private class QuerySchemaReader extends DelegatingSchemaReader {
public QuerySchemaReader(SchemaReader cubeSchemaReader) {
super(cubeSchemaReader);
}
public Member getMemberByUniqueName(
String[] uniqueNameParts,
boolean failIfNotFound)
{
return getMemberByUniqueName(
uniqueNameParts, failIfNotFound, MatchType.EXACT);
}
public Member getMemberByUniqueName(
String[] uniqueNameParts,
boolean failIfNotFound,
MatchType matchType)
{
final String uniqueName = Util.implode(uniqueNameParts);
Member member = lookupMemberFromCache(uniqueName);
if (member == null) {
// Not a calculated member in the query, so go to the cube.
member = schemaReader.getMemberByUniqueName(
uniqueNameParts, failIfNotFound, matchType);
}
if (!failIfNotFound && member == null) {
return null;
}
if (getRole().canAccess(member)) {
return member;
} else {
return null;
}
}
public Member[] getLevelMembers(
Level level, boolean includeCalculated) {
Member[] members = super.getLevelMembers(level, false);
if (includeCalculated) {
members = Util.addLevelCalculatedMembers(this, level, members);
}
return members;
}
public Member getCalculatedMember(String[] nameParts) {
final String uniqueName = Util.implode(nameParts);
return lookupMemberFromCache(uniqueName);
}
public List<Member> getCalculatedMembers(Hierarchy hierarchy) {
List<Member> result = new ArrayList<Member>();
// Add calculated members in the cube.
final List<Member> calculatedMembers =
super.getCalculatedMembers(hierarchy);
result.addAll(calculatedMembers);
// Add calculated members defined in the query.
for (Member member : getDefinedMembers()) {
if (member.getHierarchy().equals(hierarchy)) {
result.add(member);
}
}
return result;
}
public List<Member> getCalculatedMembers(Level level) {
List<Member> hierarchyMembers =
getCalculatedMembers(level.getHierarchy());
List<Member> result = new ArrayList<Member>();
for (Member member : hierarchyMembers) {
if (member.getLevel().equals(level)) {
result.add(member);
}
}
return result;
}
public List<Member> getCalculatedMembers() {
return getDefinedMembers();
}
public OlapElement getElementChild(OlapElement parent, String s)
{
return getElementChild(parent, s, MatchType.EXACT);
}
public OlapElement getElementChild(
OlapElement parent, String s, MatchType matchType)
{
// first look in cube
OlapElement mdxElement =
schemaReader.getElementChild(parent, s, matchType);
if (mdxElement != null) {
return mdxElement;
}
// then look in defined members (removed sf#1084651)
// then in defined sets
for (Formula formula : formulas) {
if (formula.isMember()) {
continue; // have already done these
}
if (Util.equalName(formula.getNames()[0], s)) {
return formula.getNamedSet();
}
}
return mdxElement;
}
public OlapElement lookupCompound(
OlapElement parent,
String[] names,
boolean failIfNotFound,
int category)
{
return lookupCompound(
parent, names, failIfNotFound, category, MatchType.EXACT);
}
public OlapElement lookupCompound(
OlapElement parent,
String[] names,
boolean failIfNotFound,
int category,
MatchType matchType)
{
// First look to ourselves.
switch (category) {
case Category.Unknown:
case Category.Member:
if (parent == cube) {
final Member calculatedMember = getCalculatedMember(names);
if (calculatedMember != null) {
return calculatedMember;
}
}
}
switch (category) {
case Category.Unknown:
case Category.Set:
if (parent == cube) {
final NamedSet namedSet = getNamedSet(names);
if (namedSet != null) {
return namedSet;
}
}
}
// Then delegate to the next reader.
OlapElement olapElement = super.lookupCompound(
parent, names, failIfNotFound, category, matchType);
if (olapElement instanceof Member) {
Member member = (Member) olapElement;
final Formula formula = (Formula)
member.getPropertyValue(Property.FORMULA.name);
if (formula != null) {
// This is a calculated member defined against the cube.
// Create a free-standing formula using the same
// expression, then use the member defined in that formula.
final Formula formulaClone = (Formula) formula.clone();
formulaClone.createElement(Query.this);
formulaClone.accept(createValidator());
olapElement = formulaClone.getMdxMember();
}
}
return olapElement;
}
public NamedSet getNamedSet(String[] nameParts) {
if (nameParts.length != 1) {
return null;
}
return lookupNamedSet(nameParts[0]);
}
public Parameter getParameter(String name) {
// Look for a parameter defined in the query.
for (Parameter parameter : parameters) {
if (parameter.getName().equals(name)) {
return parameter;
}
}
// Look for a parameter defined in this connection.
if (Util.lookup(RolapConnectionProperties.class, name) != null) {
Object value = connection.getProperty(name);
// TODO: Don't assume it's a string.
// TODO: Create expression which will get the value from the
// connection at the time the query is executed.
Literal defaultValue =
Literal.createString(String.valueOf(value));
return new ConnectionParameterImpl(name, defaultValue);
}
return super.getParameter(name);
}
}
private static class ConnectionParameterImpl
extends ParameterImpl
{
public ConnectionParameterImpl(String name, Literal defaultValue) {
super(name, defaultValue, "Connection property", new StringType());
}
public Scope getScope() {
return Scope.Connection;
}
public void setValue(Object value) {
throw MondrianResource.instance().ParameterIsNotModifiable.ex(
getName(), getScope().name());
}
}
}
// End Query.java |
package com.sometrik.framework;
import java.util.HashMap;
import com.android.trivialdrivesample.util.IabException;
import com.android.trivialdrivesample.util.IabHelper;
import com.android.trivialdrivesample.util.IabHelper.IabAsyncInProgressException;
import com.android.trivialdrivesample.util.IabResult;
import com.android.trivialdrivesample.util.Inventory;
import android.R.bool;
import android.app.Activity;
import android.app.ActivityManager;
import android.app.AlertDialog;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.ConfigurationInfo;
import android.content.res.AssetManager;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.PointF;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.preference.PreferenceManager;
import android.util.DisplayMetrics;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.Window;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
public class FrameWork extends Activity implements NativeCommandHandler {
private RelativeLayout mainView;
private SharedPreferences prefs;
private SharedPreferences.Editor editor;
private FrameWork frameWork;
private double updateTimer = 0;
private IabHelper purchaseHelper;
private static final int RESULT_SETTINGS = 1;
private Inventory inventory;
private DisplayMetrics displayMetrics;
private boolean drawMode = false;
private Settings settings;
private float screenHeight;
private float screenWidth;
public Handler mainHandler;
private Intent dialogIntent;
private Bitmap picture;
private AlertDialog.Builder builder;
private AlertDialog alert;
private float windowYcoords;
public static HashMap<Integer, NativeCommandHandler> views = new HashMap<Integer, NativeCommandHandler>();
private int appId = 0;
private int currentView = 0;
public native void endModal(double timestamp, int value, byte[] textValue);
public native void textChangedEvent(double timestamp, int id, String text);
public native void intChangedEvent(double timestamp, int id, int changedInt);
public native void menuPressed(double timestamp, int viewId);
public native void keyPressed(double timestamp, int keyId, int viewId);
public native void touchEvent(int viewId, int mode, int fingerIndex, long time, float x, float y);
public native void onInit(AssetManager assetManager, float xSize, float ySize, float displayScale);
public native void nativeSetSurface(Surface surface, int surfaceId, int gl_version);
public native void nativeSurfaceDestroyed(double timestamp, int surfaceId, int gl_version);
public native void nativeOnResume(double timestamp, int appId);
public native void nativeOnPause(double timestamp, int appId);
public native void nativeOnStop(double timestamp, int appId);
public native void nativeOnStart(double timestamp, int appId);
public native void nativeOnDestroy(double timestamp, int appId);
public static native void onPurchaseEvent(double purchaseTime, String orderId, Boolean newPurchase);
public static native void onResize(double timestamp, float width, float height, int viewId);
public static native void onUpdate(double timestamp, int viewId);
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// You can disable status bar with this
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
LinearLayout linear = new LinearLayout(this);
linear.setId(-1);
// Init for screen settings
setupDisplayMetrics();
// Set up classes
settings = new Settings(this);
mainHandler = new Handler() {
public void handleMessage(Message msg) {
NativeCommand command = (NativeCommand) msg.obj;
command.apply(FrameWork.views.get(command.getInternalId()));
}
};
initNative();
}
public Boolean initializePurchaseHelper(String key, IabHelper.OnIabSetupFinishedListener listener) {
// Get PurchaseHelper. Requires App public key
purchaseHelper = new IabHelper(this, key);
purchaseHelper.startSetup(listener);
return true;
}
public Inventory getPurchaseHelperInventory() {
System.out.println("about to query purchaseHelper inventory");
try {
inventory = purchaseHelper.queryInventory();
return inventory;
} catch (IabException e) {
System.out.println("Exception getting inventory with message: " + e.getMessage());
e.printStackTrace();
}
return null;
}
private void initNative() {
System.out.println("Display scale: " + displayMetrics.scaledDensity);
float xSize = displayMetrics.widthPixels / displayMetrics.scaledDensity;
float ySize = displayMetrics.heightPixels / displayMetrics.scaledDensity;
onInit(getAssets(), xSize, ySize, displayMetrics.scaledDensity);
}
// Get screen settings
public DisplayMetrics setupDisplayMetrics() {
displayMetrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(displayMetrics);
screenHeight = displayMetrics.heightPixels;
screenWidth = displayMetrics.widthPixels;
return displayMetrics;
}
public void setSharedPreferences(String textValue){
prefs = getSharedPreferences(textValue, Context.MODE_PRIVATE);
editor = prefs.edit();
}
public SharedPreferences.Editor getPreferencesEditor(){ return editor; }
public static void addToViewList(NativeCommandHandler view){
System.out.println(view.getElementId() + " added to view list");
views.put(view.getElementId(), view);
}
public void launchBrowser(String url) {
Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
startActivity(browserIntent);
}
public void setCurrentView(View view) {
currentView = view.getId();
setContentView(view);
}
public int getCurrentViewId() {
return currentView;
}
private void createFormView(int id){
FWLayout layout = new FWLayout(this);
layout.setId(id);
views.put(id, layout);
}
public NativeSurface createNativeOpenGLView(final int id) {
final ActivityManager activityManager = (ActivityManager) getSystemService(Context.ACTIVITY_SERVICE);
final ConfigurationInfo configurationInfo = activityManager.getDeviceConfigurationInfo();
final int gl_version = configurationInfo.reqGlEsVersion;
System.out.println("about to create native surface. gl_version: " + gl_version);
NativeSurface surfaceView = new NativeSurface(this);
surfaceView.setId(id);
surfaceView.setLayoutParams(new FrameLayout.LayoutParams((int)screenWidth, (int)screenHeight));
surfaceView.setOnTouchListener(new MyOnTouchListener(this, id));
SurfaceHolder holder = surfaceView.getHolder();
holder.setFixedSize((int)screenWidth, (int)screenHeight);
holder.addCallback(new Callback() {
public void surfaceDestroyed(SurfaceHolder holder) {
System.out.println("surfaceDestroyed");
nativeSurfaceDestroyed(System.currentTimeMillis() / 1000.0, id, gl_version);
}
public void surfaceCreated(SurfaceHolder holder) { }
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
System.out.println("setting native surface");
nativeSetSurface(holder.getSurface(), id, gl_version);
System.out.println("native surface has been set");
}
});
System.out.println("...");
views.put(id, surfaceView);
if (currentView == 0){
System.out.println("no current view set. showing created surfaceView");
//Set value shows view
surfaceView.setValue(1);
}
System.out.println("native surface created");
return surfaceView;
}
// TODO: Add icon and sound
public void createNotification(String title, String text) {
System.out.println("Creating notification");
Intent intent = new Intent(this, FrameWork.class);
PendingIntent pIntent = PendingIntent.getActivity(this, (int) System.currentTimeMillis(), intent, 0);
Notification.Builder builder = new Notification.Builder(this);
builder.setContentTitle(title);
builder.setContentText(text);
// builder.setSmallIcon(R.drawable.picture);
builder.setContentIntent(pIntent);
builder.setAutoCancel(true);
Notification notif = builder.build();
NotificationManager notificationManager = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
notificationManager.notify(0, notif);
}
//Code to show user preferences on screen. Might be useful later
private void showUserSettings() {
// setContentView(R.layout.activity_main);
System.out.println("showSettings called");
SharedPreferences sharedPrefs = PreferenceManager.getDefaultSharedPreferences(this);
StringBuilder builder = new StringBuilder();
builder.append("\n Username: " + sharedPrefs.getString("prefUsername", "NULL"));
builder.append("\n Send report:" + sharedPrefs.getBoolean("prefSendReport", false));
builder.append("\n Sync Frequency: " + sharedPrefs.getString("prefSyncFrequency", "NULL"));
// TextView settingsTextView = (TextView) findViewById(R.id.textUserSettings);
// settingsTextView.setText(builder.toString());
}
private static PointF touchScreenStartPtArr[] = new PointF[10];
//Screen touchevent listener. Will send information to MyGLSurfaceView messagehandler
private class MyOnTouchListener implements OnTouchListener {
FrameWork frameWork;
int viewId;
public MyOnTouchListener(FrameWork frameWork, int viewId) {
this.frameWork = frameWork;
this.viewId = viewId;
}
public void onClick(View v) {
System.out.println("Click happened");
}
@Override
public boolean onTouch(View v, MotionEvent event) {
// On touchesBegin(), touchesEnded(), touchesMoved(), Different
// fingers (pointerId)
Message msg;
int[] intArray;
int action = event.getAction() & MotionEvent.ACTION_MASK;
int pointerIndex = (event.getAction() & MotionEvent.ACTION_POINTER_INDEX_MASK) >> MotionEvent.ACTION_POINTER_INDEX_SHIFT;
int fingerId = event.getPointerId(pointerIndex);
switch (action) {
//Touch event of screen touch-down for the first finger
case MotionEvent.ACTION_DOWN:
// System.out.println("Liike alkoi: " + event.getX() + " " + event.getY() + " - id: " + event.getActionIndex() + " time: " + System.currentTimeMillis());
touchEvent(viewId, 1, event.getActionIndex(), System.currentTimeMillis(), (int) event.getX(), (int) (event.getRawY() + windowYcoords));
break;
//Touch event of screen touch-down after the first touch
case MotionEvent.ACTION_POINTER_DOWN:
// System.out.println("Liike alkoi: " + event.getX() + " " + event.getY() + " - id: " + event.getActionIndex());
touchEvent(viewId, 1, event.getActionIndex(), System.currentTimeMillis(), (int) event.getX(), (int) (event.getRawY() + windowYcoords));
break;
//Touch event of finger moving
case MotionEvent.ACTION_MOVE:
int pointerCount = event.getPointerCount();
for (int i = 0; i < pointerCount; i++) {
pointerIndex = i;
int pointerId = event.getPointerId(pointerIndex);
System.out.println("finger move. FingerId: " + pointerId);
touchEvent(viewId, 2, pointerId, System.currentTimeMillis(), (int) event.getX(), (int) (event.getRawY() + windowYcoords));
}
break;
//touch event of first finger being removed from the screen
case MotionEvent.ACTION_UP:
//touch event of fingers other than the first leaving the screen
case MotionEvent.ACTION_POINTER_UP:
touchEvent(viewId, 3, event.getActionIndex(), System.currentTimeMillis(), (int) event.getX(), (int) (event.getRawY() + windowYcoords));
break;
}
return true;
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
System.out.println("onCreateOptionsMenu");
// menuPressed(System.currentTimeMillis() / 1000.0);
// getMenuInflater().inflate(R.menu.settings, menu);
return true;
}
@Override
public boolean onKeyDown(int keycode, KeyEvent e) {
switch (keycode) {
case KeyEvent.KEYCODE_MENU:
System.out.println("KeyEvent. KeyCode: " + keycode + " ViewId: " + findViewById(android.R.id.content).getRootView().getId());
menuPressed(System.currentTimeMillis() / 1000.0, currentView);
return true;
default:
System.out.println("KeyEvent. KeyCode: " + keycode + " ViewId: " + findViewById(android.R.id.content).getRootView().getId());
keyPressed(System.currentTimeMillis() / 1000.0, e.getKeyCode(), currentView);
break;
}
return super.onKeyDown(keycode, e);
}
private void createOptionsDialog(final int[] idArray, String[] names) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Options Menu");
builder.setItems(names, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int item) {
System.out.println("item selected: " + item);
System.out.println("item id: " + idArray[item]);
optionSelected(idArray[item]);
}
});
AlertDialog alert = builder.create();
alert.show();
}
//Called after option was selected from ActionSheet. Currently creates settings view
private void optionSelected(int id) {
}
//Listener for built in menu options. Propably removable
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
// case R.id.menu_settings:
// getFragmentManager().beginTransaction().replace(android.R.id.content, new MyPreferenceFragment()).commit();
// break;
// case 1:
// startActivity(new Intent(this, Settings.class));
// break;
}
return true;
}
public float getScreenWidth(){
return screenWidth;
}
public void setAppId(int id){
this.appId = id;
}
public boolean getDrawMode(){ return drawMode; }
public void disableDraw(){ drawMode = false; }
// returns database path
public String getDBPath(String dbName) {
System.out.println("getting DBPath _ db: " + dbName + " Path: " + String.valueOf(getDatabasePath(dbName)));
return String.valueOf(getDatabasePath(dbName));
}
public static void sendMessage(FrameWork frameWork, NativeCommand command) {
Message msg = Message.obtain(null, 1, command);
frameWork.mainHandler.sendMessage(msg);
}
public void addToPrefs(String key, String value){
editor.putString(key, value);
editor.apply();
}
public String getFromPrefs(String key){
return prefs.getString(key, "");
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
switch (requestCode) {
case RESULT_SETTINGS:
// showUserSettings();
break;
}
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
if (newConfig.orientation == Configuration.ORIENTATION_PORTRAIT) {
System.out.println("Orientation conf portrait");
} else if (newConfig.orientation == Configuration.ORIENTATION_LANDSCAPE) {
System.out.println("Orientation conf landscape");
}
}
@Override
public void onSaveInstanceState(Bundle savedInstanceState) {
// Save the user's current game state
// savedInstanceState.putInt(STATE_SCORE, mCurrentScore);
// savedInstanceState.putInt(STATE_LEVEL, mCurrentLevel);
// Always call the superclass so it can save the view hierarchy state
super.onSaveInstanceState(savedInstanceState);
}
public void onRestoreInstanceState(Bundle savedInstanceState) {
// Always call the superclass so it can restore the view hierarchy
super.onRestoreInstanceState(savedInstanceState);
// Restore state members from saved instance
// mCurrentScore = savedInstanceState.getInt(STATE_SCORE);
// mCurrentLevel = savedInstanceState.getInt(STATE_LEVEL);
}
@Override
public void onResume(){
super.onResume();
nativeOnResume(System.currentTimeMillis() / 1000.0, appId);
}
@Override
public void onPause(){
super.onPause();
nativeOnPause(System.currentTimeMillis() / 1000.0, appId);
}
@Override
public void onStop(){
super.onStop();
nativeOnStop(System.currentTimeMillis() / 1000.0, appId);
}
@Override
public void onStart(){
super.onStart();
nativeOnStart(System.currentTimeMillis() / 1000.0, appId);
}
@Override
public void onDestroy(){
super.onDestroy();
if (purchaseHelper != null){
try {
purchaseHelper.dispose();
} catch (IabAsyncInProgressException e) {
e.printStackTrace();
System.out.println("Error in disposing purchaseHelper with message: " + e.getMessage());
}
}
purchaseHelper = null;
nativeOnDestroy(System.currentTimeMillis() / 1000.0, appId);
}
public IabHelper getPurchaseHelper(){
return purchaseHelper;
}
//Load JNI. Framework references to make file.
static {
System.loadLibrary("framework");
}
@Override
public int getElementId() {
return appId;
}
@Override
public void addChild(View view) {
System.out.println("FrameWork couldn't handle addChild");
}
@Override
public void addOption(int optionId, String text) {
System.out.println("FrameWork couldn't handle addOption");
}
@Override
public void removeChild(int id) {
System.out.println("FrameWork couldn't handle addOption");
}
@Override
public void setValue(String v) {
System.out.println("FrameWork couldn't handle addOption");
}
@Override
public void setValue(int v) {
System.out.println("FrameWork couldn't handle addOption");
}
@Override
public void setEnabled(Boolean enabled) {
System.out.println("FrameWork couldn't handle addOption");
}
} |
package dynamake.models;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import dynamake.commands.CommandState;
import dynamake.commands.MappableForwardable;
import dynamake.commands.PendingCommandState;
import dynamake.commands.SetPropertyCommand;
import dynamake.transcription.Collector;
import dynamake.transcription.ExPendingCommandFactory2;
import dynamake.transcription.Execution;
import dynamake.transcription.SimpleExPendingCommandFactory;
import dynamake.transcription.Trigger;
public class RestorableModel implements Serializable {
private static final long serialVersionUID = 1L;
public static String PROPERTY_ORIGINS = "Origins";
public static String PROPERTY_CREATION = "Creation";
protected byte[] modelBaseSerialization;
// Origins must guarantee to not require mapping to new references
protected List<CommandState<Model>> modelOrigins;
protected List<CommandState<Model>> modelCreation;
protected MappableForwardable modelHistory;
public static RestorableModel wrap(Model model, boolean includeLocalHistory) {
RestorableModel wrapper = new RestorableModel();
wrap(wrapper, model, includeLocalHistory);
return wrapper;
}
protected static void wrap(RestorableModel wrapper, Model model, boolean includeLocalHistory) {
MappableForwardable modelHistory = null;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
try {
ObjectOutputStream out = new ObjectOutputStream(bos);
Model modelBase = model.cloneBase();
if(includeLocalHistory)
modelHistory = model.cloneHistory(includeLocalHistory);
out.writeObject(modelBase);
out.close();
} catch (IOException e) {
e.printStackTrace();
}
byte[] modelBaseSerialization = bos.toByteArray();
@SuppressWarnings("unchecked")
List<CommandState<Model>> modelOrigins = new ArrayList<CommandState<Model>>((List<CommandState<Model>>)model.getProperty(RestorableModel.PROPERTY_ORIGINS));
@SuppressWarnings("unchecked")
List<CommandState<Model>> modelCreation1 = (List<CommandState<Model>>)model.getProperty(RestorableModel.PROPERTY_CREATION);
List<CommandState<Model>> modelCreation = modelCreation1 != null ? new ArrayList<CommandState<Model>>(modelCreation1) : null;
wrapper.modelBaseSerialization = modelBaseSerialization;
wrapper.modelOrigins = modelOrigins;
wrapper.modelCreation = modelCreation;
wrapper.modelHistory = modelHistory;
}
protected RestorableModel(byte[] modelBaseSerialization, List<CommandState<Model>> modelOrigins, List<CommandState<Model>> modelCreation, MappableForwardable modelHistory) {
this.modelBaseSerialization = modelBaseSerialization;
this.modelOrigins = modelOrigins;
this.modelCreation = modelCreation;
this.modelHistory = modelHistory;
}
protected RestorableModel(byte[] modelBaseSerialization, List<CommandState<Model>> modelOrigins) {
this.modelBaseSerialization = modelBaseSerialization;
this.modelOrigins = modelOrigins;
}
protected RestorableModel() { }
public RestorableModel mapToReferenceLocation(Model sourceReference, Model targetReference) {
RestorableModel mapped = createRestorableModel(modelBaseSerialization, modelOrigins);
mapToReferenceLocation(mapped, sourceReference, targetReference);
return mapped;
}
protected void mapToReferenceLocation(RestorableModel mapped, Model sourceReference, Model targetReference) {
if(modelCreation != null) {
mapped.modelCreation = new ArrayList<CommandState<Model>>();
for(CommandState<Model> modelCreationPart: modelCreation) {
CommandState<Model> newModelCreationPart = modelCreationPart.mapToReferenceLocation(sourceReference, targetReference);
mapped.modelCreation.add(newModelCreationPart);
}
}
if(modelHistory != null)
mapped.modelHistory = modelHistory.mapToReferenceLocation(sourceReference, targetReference);
afterMapToReferenceLocation(mapped, sourceReference, targetReference);
}
public RestorableModel forForwarding() {
RestorableModel mapped = createRestorableModel(modelBaseSerialization, modelOrigins);
forForwarding(mapped);
return mapped;
}
protected void forForwarding(RestorableModel mapped) {
if(modelCreation != null) {
mapped.modelCreation = new ArrayList<CommandState<Model>>();
for(CommandState<Model> modelCreationPart: modelCreation) {
CommandState<Model> newModelCreationPart = modelCreationPart.forForwarding();
mapped.modelCreation.add(newModelCreationPart);
}
}
if(modelHistory != null)
mapped.modelHistory = modelHistory.forForwarding();
afterForForwarding(mapped);
}
protected RestorableModel createRestorableModel(byte[] modelBaseSerialization, List<CommandState<Model>> modelOrigins) {
return new RestorableModel(modelBaseSerialization, modelOrigins);
}
public Model unwrapBase(PropogationContext propCtx, int propDistance, Collector<Model> collector) {
Model modelBase = null;
ByteArrayInputStream bis = new ByteArrayInputStream(modelBaseSerialization);
ObjectInputStream in;
try {
in = new ObjectInputStream(bis);
modelBase = (Model) in.readObject();
in.close();
} catch (IOException | ClassNotFoundException e) {
e.printStackTrace();
}
return modelBase;
}
public void restoreOriginsOnBase(Model modelBase, PropogationContext propCtx, int propDistance, Collector<Model> collector) {
modelBase.playThenReverse(modelOrigins, propCtx, propDistance, collector);
modelBase.setProperty(RestorableModel.PROPERTY_ORIGINS, modelOrigins, propCtx, propDistance, collector);
}
public void restoreChangesOnBase(final Model modelBase, final PropogationContext propCtx, final int propDistance, Collector<Model> collector) {
ArrayList<CommandState<Model>> modelCreationAsPendingCommands = new ArrayList<CommandState<Model>>();
if(modelCreation != null) {
for(CommandState<Model> modelCreationPart: modelCreation) {
modelCreationAsPendingCommands.add(((Execution<Model>)modelCreationPart).pending);
}
}
modelCreationAsPendingCommands.addAll(appendedCreation);
// if(modelCreationAsPendingCommands.size() > 0) {
// collector.execute(new SimpleExPendingCommandFactory2<Model>(modelBase, modelCreationAsPendingCommands) {
// @Override
// public void afterPropogationFinished(List<PendingUndoablePair> pendingUndoablePairs, PropogationContext propCtx, int propDistance, Collector<Model> collector) {
// collector.execute(new SimpleExPendingCommandFactory2<Model>(modelBase, new PendingCommandState<Model>(
// new SetPropertyCommand(RestorableModel.PROPERTY_CREATION, pendingUndoablePairs),
// new SetPropertyCommand.AfterSetProperty()
// restoreChanges(modelBase, collector, modelCreationAsPendingCommands, 0, new ArrayList<Execution<Model>>());
ExPendingCommandFactory2.Util.sequence(collector, modelBase, modelCreationAsPendingCommands, new ExPendingCommandFactory2.Util.ExecutionsHandler<Model>() {
@Override
public void handleExecutions(List<Execution<Model>> allPendingUndoablePairs, Collector<Model> collector) {
collector.execute(new SimpleExPendingCommandFactory<Model>(modelBase, new PendingCommandState<Model>(
new SetPropertyCommand(RestorableModel.PROPERTY_CREATION, allPendingUndoablePairs),
new SetPropertyCommand.AfterSetProperty()
)));
}
});
if(modelHistory != null)
modelBase.restoreHistory(modelHistory, propCtx, propDistance, collector);
collector.execute(new Trigger<Model>() {
@Override
public void run(Collector<Model> collector) {
afterRestoreChangesOnBase(modelBase, propCtx, propDistance, collector);
}
});
}
private void restoreChanges(final Model modelBase, Collector<Model> collector, final List<CommandState<Model>> modelCreation, final int i, final List<Execution<Model>> allPendingUndoablePairs) {
// Execute one command at a time to leave space for side effect in between
if(i < modelCreation.size()) {
collector.execute(new SimpleExPendingCommandFactory<Model>(modelBase, modelCreation.subList(i, i + 1)) {
@Override
public void afterPropogationFinished(List<Execution<Model>> pendingUndoablePairs, PropogationContext propCtx, int propDistance, Collector<Model> collector) {
allPendingUndoablePairs.addAll(pendingUndoablePairs);
restoreChanges(modelBase, collector, modelCreation, i + 1, allPendingUndoablePairs);
}
});
} else {
collector.execute(new SimpleExPendingCommandFactory<Model>(modelBase, new PendingCommandState<Model>(
new SetPropertyCommand(RestorableModel.PROPERTY_CREATION, allPendingUndoablePairs),
new SetPropertyCommand.AfterSetProperty()
)));
}
}
protected void afterMapToReferenceLocation(RestorableModel mapped, Model sourceReference, Model targetReference) { }
protected void afterForForwarding(RestorableModel forForwarded) { }
protected void afterRestoreChangesOnBase(final Model modelBase, PropogationContext propCtx, int propDistance, Collector<Model> collector) { }
public Model unwrap(PropogationContext propCtx, int propDistance, Collector<Model> collector) {
Model modelBase = unwrapBase(propCtx, propDistance, collector);
restoreOriginsOnBase(modelBase, propCtx, propDistance, collector);
restoreChangesOnBase(modelBase, propCtx, propDistance, collector);
return modelBase;
}
private ArrayList<CommandState<Model>> appendedCreation = new ArrayList<CommandState<Model>>();
public void appendCreation(CommandState<Model> creationPartToAppend) {
appendedCreation.add(creationPartToAppend);
}
public void clearCreation() {
if(modelCreation != null)
modelCreation.clear();
}
} |
package fi.iki.elonen;
import java.math.BigInteger;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Date;
import java.util.Map;
public class CrawlerTest1 extends NanoHTTPD {
static ArrayList<String> alreadyAsked = new ArrayList<String>();
public CrawlerTest1() {
super(8181);
}
@Override
public Response serve(IHTTPSession session) {
int generation = 0;
Method method = session.getMethod();
String uri = session.getUri();
System.out.println(method + " '" + uri + "' ");
SecureRandom random = new SecureRandom();
String randomizer = new BigInteger(130, random).toString(32);
String msg = "<!DOCTYPE html>\n" +
"<html lang=\"en\">\n" +
" <head>\n" +
" <meta charset=\"utf-8\">\n" +
" <meta http-equiv=\"X-UA-Compatible\" content=\"IE=edge\">\n" +
" <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n" +
" <meta name=\"description\" content=\"\">\n" +
" <meta name=\"author\" content=\"\">\n" +
" <title>Сайт о веломаршрутах Татарстана и окрестностей Казани</title><!-- Bootstrap Core CSS -->\n" +
" <link class=\"\" href=\"/favicon.ico\" rel=\"shortcut icon\" type=\"image/x-icon\">\n" +
" <link href=\"/css/bootstrap.min.css?1\" rel=\"stylesheet\"><!-- Custom CSS -->\n" +
" <link href=\"/css/blog-post.css?1\" rel=\"stylesheet\"><!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --><!-- WARNING: Respond.js doesn't work if you view the page via file:// --><!--[if lt IE 9]>\n" +
" <script src=\"https://oss.maxcdn.com/libs/html5shiv/3.7.0/html5shiv.js\"></script>\n" +
" <script src=\"https://oss.maxcdn.com/libs/respond.js/1.4.2/respond.min.js\"></script>\n" +
" <![endif]
" </head><body><h1>Crawler Test1 - </h1>\n";
Map<String, String> parms = session.getParms();
if (parms.get("generation") == null) {
msg +=
"<a href=\"/?generation=1\">" + "Generation " + generation + "</a>";
} else {
generation = Integer.parseInt(parms.get("generation"));
System.out.println(generation);
if (generation < 5) {
msg += (new Date()).toString() + ": <a href='/" + randomizer + "?generation=" + (generation + 1) + "&rand=" + randomizer + "'>" + "Generation " + (generation) + "</a>";
msg += "<br>";
msg += (new Date()).toString() + ": <a href='/" + randomizer + "?generation=" + (generation + 1) + "&rand=" + randomizer + "'>" + "Generation " + (generation) + "</a>";
} else {
msg += (new Date()).toString() + ": <a href='/a?generation=" + (generation) + "'>" + "Generation " + (generation) + "</a>";
}
}
msg += "</body></html>\n";
try {
Thread.sleep(30);
} catch (InterruptedException e) {
e.printStackTrace();
}
Response resp = new NanoHTTPD.Response(msg);
//Last-Modified:Sun, 22 Feb 2015 13:34:37 GMT
//Server:Jetty(9.1.4.v20140401)
if (parms.get("generation") == null) {
//resp.addHeader("Set-Cookie", "JSESSIONID=" + randomizer);
session.getCookies().set("JSESSIONID", randomizer, 10000);
}
resp.addHeader("Last-Modified", "Sun, 22 Feb 2015 13:34:37 GMT");
resp.addHeader("Server", "Jetty(9.1.4.v20140401)");
resp.setChunkedTransfer(false);
if (alreadyAsked.contains(session.getUri().toString())) {
resp.setStatus(Response.Status.NOT_MODIFIED);
System.out.println("Already asked URI " + session.getUri().toString());
} else {
if (!session.getMethod().equals(Method.HEAD)) {
alreadyAsked.add(session.getUri().toString());
System.out.println("First time asked URI " + session.getUri().toString());
}
}
return resp;
}
public static void main(String[] args) {
ServerRunner.run(CrawlerTest1.class);
}
} |
/*
* Generate HTML from a Mardown source file.
*/
package flow.netbeans.markdown;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import javax.swing.JFileChooser;
import javax.swing.JOptionPane;
import org.openide.awt.ActionID;
import org.openide.awt.ActionReference;
import org.openide.awt.ActionReferences;
import org.openide.awt.ActionRegistration;
import org.openide.filesystems.FileObject;
import org.openide.util.NbBundle.Messages;
import org.pegdown.PegDownProcessor;
@ActionID(category = "File",
id = "flow.netbeans.markdown.GenerateHtmlAction")
@ActionRegistration(displayName = "#CTL_GenerateHtmlAction")
@ActionReferences({
@ActionReference(path = "Loaders/text/x-markdown/Actions", position = 250)
})
@Messages("CTL_GenerateHtmlAction=Generate HTML")
public final class MarkdownGenerateHtmlAction implements ActionListener {
private final MarkdownDataObject context;
private final PegDownProcessor markdownProcessor = new PegDownProcessor();
public MarkdownGenerateHtmlAction(MarkdownDataObject context) throws IOException {
this.context = context;
FileObject f = context.getPrimaryFile();
String markdownSource = f.asText();
String html = markdownProcessor.markdownToHtml(markdownSource);
String full = "<html><head><meta http-equiv=\"content-type\" content=\"text/html; charset=UTF-8\"></head><body>" + html.toString() + "</body></html>";
JFileChooser fileChooser = new JFileChooser("user.home");
int option = fileChooser.showSaveDialog(fileChooser);
int result = 0;
String fileName = "";
if (option == JFileChooser.APPROVE_OPTION) {
fileName = fileChooser.getSelectedFile().toString();
if (fileChooser.getSelectedFile().exists()) {
result = JOptionPane.showConfirmDialog(fileChooser,
"The file exists, overwrite?", "Existing file",
JOptionPane.YES_NO_CANCEL_OPTION);
}
switch (result) {
case JOptionPane.NO_OPTION:
return;
case JOptionPane.CANCEL_OPTION:
return;
}
try {
PrintStream out = new PrintStream(new FileOutputStream(fileName));
out.print(full);
out.close();
} catch (IOException e) {
}
}
}
public void actionPerformed(ActionEvent ev) {
}
} |
package org.rstudio.core.client.widget;
import com.google.gwt.dom.client.Element;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.Timer;
import com.google.gwt.user.client.ui.Frame;
public class ImageFrame extends Frame
{
public ImageFrame()
{
setUrl("javascript:false");
}
@Override
protected void onLoad()
{
super.onLoad();
new Timer() {
@Override
public void run()
{
// No way to tell when iframe is actually ready to be
// manipulated (sometimes contentWindow is null). Need
// to probe and retry.
if (!isReadyForContent(getElement()))
{
this.schedule(200);
}
else
{
setupContent(getElement());
replaceLocation(getElement(), url_);
}
}
}.schedule(100);
}
public void setMarginWidth(int width)
{
DOM.setElementAttribute(getElement(),
"marginwidth",
Integer.toString(width));
}
public void setMarginHeight(int height)
{
DOM.setElementAttribute(getElement(),
"marginheight",
Integer.toString(height));
}
public void setImageUrl(String url)
{
url_ = url;
if (isAttached())
replaceLocation(getElement(), url);
}
private native final boolean replaceLocation(Element el, String url) /*-{
if (!el.contentWindow.document)
return false;
var img = el.contentWindow.document.getElementById('img');
if (!img)
return false;
if (url && url != 'javascript:false') {
img.style.display = 'inline';
img.src = url;
}
else {
img.style.display = 'none';
}
return true;
}-*/;
private native boolean isReadyForContent(Element el) /*-{
return el != null
&& el.contentWindow != null
&& el.contentWindow.document != null;
}-*/;
private native void setupContent(Element el) /*-{
var doc = el.contentWindow.document;
// setupContent can get called multiple times, as progress causes the
// widget to be loaded/unloaded. This condition checks if we're already
// set up.
if (doc.getElementById('img'))
return;
doc.open();
doc.write(
'<html><head></head>' +
'<body style="margin: 0; padding: 0; overflow: hidden; border: none">' +
'<img id="img" width="100%" height="100%" style="display: none" src="javascript:false">' +
'</body></html>');
doc.close();
}-*/;
private String url_ = "javascript:false";
} |
package it.mobimentum.phonegapspinnerplugin;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.Activity;
import android.content.Intent;
import android.os.Handler;
public class SpinnerPlugin extends CordovaPlugin {
private static final String PARAM_SHOW_OVERLAY = "overlay";
private static final String PARAM_SHOW_TIMEOUT = "timeout";
@Override
public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException {
final Activity context = this.cordova.getActivity();
if (action.equals("show")) {
// Args
JSONObject argsObj = args.getJSONObject(0);
Boolean showOverlay = argsObj.has(PARAM_SHOW_OVERLAY) ? argsObj.getBoolean(PARAM_SHOW_OVERLAY) : null;
Integer hideTimeout = argsObj.has(PARAM_SHOW_TIMEOUT) ? argsObj.getInt(PARAM_SHOW_TIMEOUT) : null;
// Show
show(context, showOverlay, hideTimeout);
callbackContext.success();
}
else if (action.equals("hide")) {
// Hide
hide(context);
callbackContext.success();
}
callbackContext.error("Invalid action");
return false;
}
private boolean show(final Activity context, Boolean showOverlay, Integer hideTimeout) {
// Loading spinner
Intent intent = new Intent(context, ProgressActivity.class);
if (showOverlay != null) intent.putExtra(ProgressActivity.EXTRA_SHOW_OVERLAY, showOverlay);
context.startActivity(intent);
if (hideTimeout != null) {
new Handler().postDelayed(new Runnable() {
@Override
public void run() { hide(context); }
}, hideTimeout*1000);
}
return true;
}
private boolean hide(Activity context) {
// Loading spinner
Intent intent = new Intent(context, ProgressActivity.class);
intent.putExtra(ProgressActivity.ACTION_HIDE_PROGRESS, true);
context.startActivity(intent);
return true;
}
} |
package dbcache.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
*
* <br/>hibernate@Index
* <br/>index:getEntity,Entityequals
* <br/>getClass()(instanceof)this.id == that.id(this.id == this.getId())
* @see org.hibernate.annotations.Index
* @author Jake
* @date 20149710:50:17
*/
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
public @interface Index {
/**
*
* @return
*/
public String name();
} |
package org.apache.commons.collections;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
/**
* This class provides a skeletal implementation of the {@link Bag}
* interface to minimize the effort required for target implementations.
* Subclasses need only to call {@link #setMap(Map)} in their constructor
* specifying a map instance that will be used to store the contents of
* the bag.
*
* @since 2.0
* @author Chuck Burdick
* @author <a href="mailto:mas@apache.org">Michael A. Smith</a>
**/
public abstract class DefaultMapBag implements Bag {
private Map _map = null;
private int _total = 0;
private int _mods = 0;
public boolean add(Object o) {
return add(o, 1);
}
public boolean add(Object o, int i) {
_mods++;
if (i > 0) {
int count = (i + getCount(o));
_map.put(o, new Integer(count));
_total += i;
return (count == i);
} else {
return false;
}
}
public boolean addAll(Collection c) {
boolean changed = false;
Iterator i = c.iterator();
while (i.hasNext()) {
boolean added = add(i.next());
changed = changed || added;
}
return changed;
}
public void clear() {
_mods++;
_map.clear();
_total = 0;
}
public boolean contains(Object o) {
return _map.containsKey(o);
}
public boolean containsAll(Collection c) {
return containsAll(new HashBag(c));
}
/**
* Returns <code>true</code> if the bag contains all elements in
* the given collection, respecting cardinality.
* @see #containsAll(Collection)
**/
public boolean containsAll(Bag other) {
boolean result = true;
Iterator i = other.uniqueSet().iterator();
while (i.hasNext()) {
Object current = i.next();
boolean contains =
getCount(current) >= ((Bag)other).getCount(current);
result = result && contains;
}
return result;
}
public boolean equals(Object o) {
return (o == this ||
(o != null && o.getClass().equals(this.getClass()) &&
((DefaultMapBag)o)._map.equals(this._map)));
}
public int hashCode() {
return _map.hashCode();
}
public boolean isEmpty() {
return _map.isEmpty();
}
public Iterator iterator() {
return new BagIterator(this, extractList().iterator());
}
private class BagIterator implements Iterator {
private DefaultMapBag _parent = null;
private Iterator _support = null;
private Object _current = null;
private int _mods = 0;
public BagIterator(DefaultMapBag parent, Iterator support) {
_parent = parent;
_support = support;
_current = null;
_mods = parent.modCount();
}
public boolean hasNext() {
return _support.hasNext();
}
public Object next() {
if (_parent.modCount() != _mods) {
throw new ConcurrentModificationException();
}
_current = _support.next();
return _current;
}
public void remove() {
if (_parent.modCount() != _mods) {
throw new ConcurrentModificationException();
}
_support.remove();
_parent.remove(_current, 1);
_mods++;
}
}
public boolean remove (Object o) {
return remove(o, getCount(o));
}
public boolean remove (Object o, int i) {
_mods++;
boolean result = false;
int count = getCount(o);
if (i <= 0) {
result = false;
} else if (count > i) {
_map.put(o, new Integer(count - i));
result = true;
_total -= i;
} else { // count > 0 && count <= i
// need to remove all
result = (_map.remove(o) != null);
_total -= count;
}
return result;
}
public boolean removeAll(Collection c) {
boolean result = false;
if (c != null) {
Iterator i = c.iterator();
while (i.hasNext()) {
boolean changed = remove(i.next(), 1);
result = result || changed;
}
}
return result;
}
public boolean retainAll(Collection c) {
return retainAll(new HashBag(c));
}
/**
* Remove any members of the bag that are not in the given
* bag, respecting cardinality.
* @see #retainAll(Collection)
* @return <code>true</code> if this call changed the collection
**/
public boolean retainAll(Bag other) {
boolean result = false;
Bag excess = new HashBag();
Iterator i = uniqueSet().iterator();
while (i.hasNext()) {
Object current = i.next();
int myCount = getCount(current);
int otherCount = other.getCount(current);
if (1 <= otherCount && otherCount <= myCount) {
excess.add(current, myCount - otherCount);
} else {
excess.add(current, myCount);
}
}
if (!excess.isEmpty()) {
result = removeAll(excess);
}
return result;
}
public Object[] toArray() {
return extractList().toArray();
}
public Object[] toArray(Object[] a) {
return extractList().toArray(a);
}
public int getCount(Object o) {
int result = 0;
Integer count = MapUtils.getInteger(_map, o);
if (count != null) {
result = count.intValue();
}
return result;
}
public Set uniqueSet() {
return Collections.unmodifiableSet(_map.keySet());
}
public int size() {
return _total;
}
/**
* Actually walks the bag to make sure the count is correct and
* resets the running total
**/
protected int calcTotalSize() {
_total = extractList().size();
return _total;
}
/**
* Utility method for implementations to set the map that backs
* this bag. Not intended for interactive use outside of
* subclasses.
**/
protected void setMap(Map m) {
_map = m;
}
/**
* Utility method for implementations to access the map that backs
* this bag. Not intended for interactive use outside of
* subclasses.
**/
protected Map getMap() {
return _map;
}
/**
* Create a list for use in iteration, etc.
**/
private List extractList() {
List result = new ArrayList();
Iterator i = uniqueSet().iterator();
while (i.hasNext()) {
Object current = i.next();
for (int index = getCount(current); index > 0; index
result.add(current);
}
}
return result;
}
/**
* Return number of modifications for iterator
**/
private int modCount() {
return _mods;
}
/**
* Implement a toString() method suitable for debugging
**/
public String toString() {
StringBuffer buf = new StringBuffer();
buf.append("[");
Iterator i = uniqueSet().iterator();
while(i.hasNext()) {
Object current = i.next();
int count = getCount(current);
buf.append(count);
buf.append(":");
buf.append(current);
if(i.hasNext()) {
buf.append(",");
}
}
buf.append("]");
return buf.toString();
}
} |
package io.spine.server.tuple;
import com.google.common.collect.ImmutableList;
import com.google.protobuf.Empty;
import com.google.protobuf.GeneratedMessageV3;
import com.google.protobuf.Message;
import io.spine.validate.Validate;
import javax.annotation.Nonnull;
import java.io.Serializable;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Abstract base for tuple classes.
*
* @author Alexander Yevsyukov
*/
public abstract class Tuple implements Iterable<Message>, Serializable {
private static final long serialVersionUID = 0L;
/**
* Immutable list of tuple values.
*/
@SuppressWarnings("NonSerializableFieldInSerializableClass") // ensured in constructor
private final List<Message> values;
/**
* Creates a new instance with the passed values.
*
* <p>Values must extend {@link GeneratedMessageV3}.
*/
protected Tuple(Message... values) {
super();
final ImmutableList.Builder<Message> builder = ImmutableList.builder();
boolean nonEmptyFound = false;
for (Message value : values) {
checkNotNull(value);
checkArgument(
value instanceof GeneratedMessageV3,
"Unsupported Message class encountered: %s. " +
"Please create tuples with classes extending `GeneratedMessageV3`",
value.getClass()
.getName());
final boolean isEmpty = checkNotDefaultOrEmpty(value);
if (!isEmpty) {
nonEmptyFound = true;
}
builder.add(value);
}
checkArgument(nonEmptyFound, "Tuple cannot be all Empty");
this.values = builder.build();
}
/**
* Ensures that the passed message is not in default or is an instance of {@link Empty}.
*
* @return {@code true} if {@link Empty} is passed
*/
private static boolean checkNotDefaultOrEmpty(Message value) {
final boolean isEmpty = value instanceof Empty;
if (!isEmpty) {
final String valueClass = value.getClass()
.getName();
checkArgument(
Validate.isNotDefault(value),
"Tuples cannot contain default values. Default value of %s encountered.",
valueClass);
}
return isEmpty;
}
@Nonnull
@Override
public final Iterator<Message> iterator() {
final Iterator<Message> result = values.iterator();
return result;
}
/**
* Obtains a value at the specified index.
*
* @param index a zero-based index value
* @return the value at the index
* @throws IndexOutOfBoundsException if the index is out of range
*/
protected Message get(int index) {
final Message result = values.get(index);
return result;
}
@Override
public int hashCode() {
return Objects.hash(values);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {return true;}
if (obj == null || getClass() != obj.getClass()) {return false;}
final Tuple other = (Tuple) obj;
return Objects.equals(this.values, other.values);
}
interface AValue<T extends Message> {
T getA();
}
interface BValue<T extends Message> {
T getB();
}
} |
package org.apache.lenya.cms.ant;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.types.Path;
import java.io.File;
import java.io.FileInputStream;
import java.io.FilenameFilter;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.StringTokenizer;
/**
* @author <a href="mailto:michael.wechner@lenya.org">Michael Wechner</a>
*/
public class CopyJavaSourcesTask extends Task {
private Path pubsRootDirs;
private String javaDir;
private String buildDir;
public void execute() throws BuildException {
int numberOfDirectoriesCreated = 0;
int numberOfFilesCopied = 0;
TwoTuple twoTuple = new TwoTuple(numberOfDirectoriesCreated, numberOfFilesCopied);
File absoluteBuildDir = new File(getProject().getBaseDir(), Project.translatePath(buildDir));
//System.out.println("CopyJavaSourcesTask.execute(): " + absoluteBuildDir);
//System.out.println("CopyJavaSourcesTask.execute(): " + pubsRootDirs);
StringTokenizer st = new StringTokenizer(pubsRootDirs.toString(),File.pathSeparator);
while (st.hasMoreTokens()) {
String pubsRootDir = st.nextToken();
//System.out.println("CopyJavaSourcesTask.execute(): " + pubsRootDir);
File path = new File(pubsRootDir);
if (path.isDirectory()) {
if (new File(path, "publication.xml").isFile()) {
copyContentOfDir(new File(path, javaDir), absoluteBuildDir, twoTuple, new JavaFilenameFilter());
} else {
// FIXME: Look for publications defined by the file "publication.xml"
String[] pubs = path.list();
for (int i = 0; i < pubs.length; i++) {
//System.out.println("CopyJavaSourcesTask.execute(): " + pubs[i]);
File pubJavaDir = new File(path, new File(pubs[i], javaDir).toString());
//System.out.println("CopyJavaSourcesTask.execute(): " + pubJavaDir);
//System.out.println("CopyJavaSourcesTask.execute(): " + absoluteBuildDir);
copyContentOfDir(pubJavaDir, absoluteBuildDir, twoTuple, new JavaFilenameFilter());
}
}
} else {
throw new BuildException("No such directory: " + path);
}
}
numberOfDirectoriesCreated = twoTuple.x;
numberOfFilesCopied = twoTuple.y;
System.out.println("Copying " + numberOfDirectoriesCreated + " directories to " + absoluteBuildDir);
System.out.println("Copying " + numberOfFilesCopied + " files to "+absoluteBuildDir);
}
/**
* Copies the directory "source" into the directory "destination"
*/
static public void copyDir(File source, File destination, TwoTuple twoTuple, FilenameFilter filenameFilter) {
File actualDestination = new File(destination, source.getName());
actualDestination.mkdirs();
copyContentOfDir(source, actualDestination, twoTuple, filenameFilter);
}
/**
* Copies the content of a directory into another directory
*/
static public void copyContentOfDir(File source, File destination, TwoTuple twoTuple, FilenameFilter filenameFilter) {
if (source.isDirectory()) {
String[] files;
if (filenameFilter != null) {
files = source.list(filenameFilter);
} else {
files = source.list();
}
for (int i = 0; i < files.length; i++) {
File file = new File(source, files[i]);
if (file.isFile()) {
copyFile(file, new File(destination, files[i]), twoTuple);
} else if (file.isDirectory()) {
//System.out.println("CopyJavaSourcesTask.copyDir(): " + source + " " + destination);
copyContentOfDir(file, new File(destination, files[i]), twoTuple, filenameFilter);
} else {
System.err.println("CopyJavaSourcesTask.copyDir(): Neither file nor directory: " + file);
}
}
} else {
//System.err.println("CopyJavaSourcesTask.copyContentOfDir(): No such directory: " + source);
}
}
/**
* Copies the content of a file into another file
* @param destination File (not a directory!)
*/
static public void copyFile(File source, File destination, TwoTuple twoTuple) {
if (source.isFile()) {
File parentDest = new File(destination.getParent());
if (!parentDest.exists()) {
parentDest.mkdirs();
//System.out.println("CopyJavaSourcesTask.copyFile(): Directory created: " + parentDest);
int numberOfDirectoriesCreated = twoTuple.x;
numberOfDirectoriesCreated++;
twoTuple.x = numberOfDirectoriesCreated;
}
if (destination.isFile()) {
if (destination.lastModified() > source.lastModified()) {
return;
}
}
try {
byte[] buffer = new byte[1024];
int bytesRead = -1;
InputStream in = new FileInputStream(source);
OutputStream out = new FileOutputStream(destination);
while ((bytesRead = in.read(buffer)) >= 0) {
out.write(buffer, 0, bytesRead);
}
out.close();
in.close();
int numberOfFilesCopied = twoTuple.y;
numberOfFilesCopied++;
twoTuple.y = numberOfFilesCopied;
//System.out.println("CopyJavaSourcesTask.copyFile(): File copied (" + numberOfFilesCopied + "): " + source + " " + destination);
} catch (Exception e) {
System.err.println("CopyJavaSourcesTask.copyFile(): " + e);
}
} else {
System.err.println("CopyJavaSourcesTask.copyFile(): No such file: " + source);
}
}
public void setPubsRootDirs(Path pubsRootDirs) {
this.pubsRootDirs = pubsRootDirs;
}
public void setJavaDir(String javaDir) {
this.javaDir = javaDir;
}
public void setBuildDir(String buildDir) {
this.buildDir = buildDir;
}
} |
package org.modeldriven.fuml.environment;
import org.modeldriven.fuml.common.uuid.UUIDGenerator;
import org.modeldriven.fuml.repository.Repository;
import UMLPrimitiveTypes.UnlimitedNatural;
import fUML.Debug;
import fUML.Semantics.Classes.Kernel.BooleanValue;
import fUML.Semantics.Classes.Kernel.DataValue;
import fUML.Semantics.Classes.Kernel.EnumerationValue;
import fUML.Semantics.Classes.Kernel.FeatureValueList;
import fUML.Semantics.Classes.Kernel.IntegerValue;
import fUML.Semantics.Classes.Kernel.PrimitiveValue;
import fUML.Semantics.Classes.Kernel.Reference;
import fUML.Semantics.Classes.Kernel.StringValue;
import fUML.Semantics.Classes.Kernel.StructuredValue;
import fUML.Semantics.Classes.Kernel.UnlimitedNaturalValue;
import fUML.Semantics.Classes.Kernel.Value;
import fUML.Semantics.Classes.Kernel.ValueList;
import fUML.Semantics.CommonBehaviors.Communications.SignalInstance;
import fUML.Semantics.Loci.LociL1.Executor;
import fUML.Semantics.Loci.LociL1.Locus;
import fUML.Syntax.Classes.Kernel.Class_;
import fUML.Syntax.Classes.Kernel.Classifier;
import fUML.Syntax.Classes.Kernel.DataType;
import fUML.Syntax.Classes.Kernel.Element;
import fUML.Syntax.Classes.Kernel.Enumeration;
import fUML.Syntax.Classes.Kernel.PrimitiveType;
import fUML.Syntax.Classes.Kernel.StructuralFeature;
import fUML.Syntax.CommonBehaviors.BasicBehaviors.Behavior;
import fUML.Syntax.CommonBehaviors.Communications.Signal;
public class Environment {
private static Environment instance = null;
public Locus locus = null;
private PrimitiveType Boolean = null;
private PrimitiveType String = null;
private PrimitiveType Integer = null;
private PrimitiveType UnlimitedNatural = null;
private Environment() {
this.locus = new Locus();
this.locus.setFactory(new ExecutionFactory()); // Uses local subclass for ExecutionFactory
this.locus.setExecutor(new Executor());
this.locus.factory
.setStrategy(new fUML.Semantics.Classes.Kernel.RedefinitionBasedDispatchStrategy());
this.locus.factory
.setStrategy(new fUML.Semantics.CommonBehaviors.Communications.FIFOGetNextEventStrategy());
this.locus.factory
.setStrategy(new fUML.Semantics.Loci.LociL1.FirstChoiceStrategy());
this.Boolean = this.createBuiltInType("Boolean");
this.String = this.createBuiltInType("String");
this.Integer = this.createBuiltInType("Integer");
this.UnlimitedNatural = this.createBuiltInType("UnlimitedNatural");
// The fUML execution environment requires a single instance
// of these primitive types to be used for execution purposes.
// Give these types a "synthetic" XMI id such that they CAN be mapped
// by XMI id by various repository implementations.
this.Boolean.setXmiId(UUIDGenerator.instance().getIdString36());
this.String.setXmiId(UUIDGenerator.instance().getIdString36());
this.Integer.setXmiId(UUIDGenerator.instance().getIdString36());
this.UnlimitedNatural.setXmiId(UUIDGenerator.instance().getIdString36());
}
private PrimitiveType createBuiltInType(String name) {
PrimitiveType type = new PrimitiveType();
type.name = name;
this.locus.factory.addBuiltInType(type);
return type;
}
public static Environment getInstance()
{
if (instance == null)
initializeInstance();
return instance;
}
private static synchronized void initializeInstance()
{
if (instance == null)
instance = new Environment();
}
public Behavior findBehavior(String name)
{
org.modeldriven.fuml.repository.Element elem = Repository.INSTANCE.findElementByName(name);
if (elem != null) {
if (elem.getDelegate() instanceof Behavior)
return (Behavior)elem.getDelegate();
else
throw new EnvironmentException("Element '" + name + "' is not a Behavior, it is a '"
+ elem.getDelegate().getClass().getSimpleName() + "'");
}
else
return null;
}
public Element findElementById(String id)
{
org.modeldriven.fuml.repository.Element elem = Repository.INSTANCE.findElementById(id);
if (elem != null)
return elem.getDelegate();
else
return null;
}
public int getBehaviorCount()
{
return Repository.INSTANCE.getElementCount(Behavior.class);
}
public String[] getBehaviorNames()
{
return Repository.INSTANCE.getElementNames(Behavior.class);
}
public PrimitiveType getBoolean() {
return this.Boolean;
}
public PrimitiveType getString() {
return this.String;
}
public PrimitiveType getInteger() {
return this.Integer;
}
public PrimitiveType getUnlimitedNatural() {
return this.UnlimitedNatural;
}
public PrimitiveValue makePrimitiveValue(Classifier classifier) {
PrimitiveType type = (PrimitiveType) classifier;
PrimitiveValue primitiveValue = null;
if (type == this.Boolean) {
primitiveValue = new BooleanValue();
} else if (type == this.Integer) {
primitiveValue = new IntegerValue();
} else if (type == this.String) {
primitiveValue = new StringValue();
} else if (type == this.UnlimitedNatural) {
primitiveValue = new UnlimitedNaturalValue();
((UnlimitedNaturalValue) primitiveValue).value = new UnlimitedNatural();
}
if (primitiveValue != null) {
primitiveValue.type = type;
} else {
Debug.println("[makePrimitiveValue] " + type.name
+ " not understood.");
}
return primitiveValue;
}
public EnumerationValue makeEnumerationValue(Classifier classifier) {
Enumeration type = (Enumeration) classifier;
EnumerationValue enumerationValue = new EnumerationValue();
enumerationValue.type = type;
enumerationValue.literal = type.ownedLiteral.getValue(0);
return enumerationValue;
}
public fUML.Semantics.Classes.Kernel.StructuredValue makeStructuredValue(
fUML.Syntax.Classes.Kernel.Classifier classifier) {
StructuredValue structuredValue = null;
if (classifier instanceof DataType) {
structuredValue = new DataValue();
((DataValue) structuredValue).type = (DataType) classifier;
structuredValue.createFeatureValues();
} else if (classifier instanceof Class_) {
structuredValue = new Reference();
((Reference) structuredValue).referent = this.locus
.instantiate((Class_) classifier);
} else if (classifier instanceof Signal) {
structuredValue = new SignalInstance();
((SignalInstance) structuredValue).type = (Signal) classifier;
structuredValue.createFeatureValues();
}
FeatureValueList featureValues = structuredValue.getFeatureValues();
for (int i = 0; i < featureValues.size(); i++) {
StructuralFeature feature = featureValues.getValue(i).feature;
ValueList valueList = new ValueList();
valueList.addValue(this
.makeValue((Classifier) (feature.typedElement.type)));
structuredValue.setFeatureValue(feature, valueList, 0);
}
return structuredValue;
}
public Value makeValue(Classifier type) {
if (type == null) {
return this.makePrimitiveValue(this.String);
} else if (type instanceof PrimitiveType) {
return this.makePrimitiveValue(type);
} else if (type instanceof Enumeration) {
return this.makeEnumerationValue(type);
} else {
return this.makeStructuredValue(type);
}
} // makeValue
} |
package org.rlcommunity.critter.svg;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.geom.Rectangle2D;
import java.net.URI;
import org.rlcommunity.critter.Vector2D;
import com.kitfox.svg.SVGDiagram;
import com.kitfox.svg.SVGElement;
import com.kitfox.svg.SVGElementException;
import com.kitfox.svg.SVGException;
import com.kitfox.svg.animation.AnimationElement;
public class ShapeDrawing {
private final URI rootUrl;
private SVGDiagram diagram = null;
private SVGElement element = null;
private String nativeTransformation = null;
private final Vector2D nativeTranslation = new Vector2D();
public ShapeDrawing(String pictureName) {
rootUrl = Loader.load(pictureName);
if (rootUrl == null) {
return;
}
diagram = Loader.universe.getDiagram(rootUrl);
diagram.setIgnoringClipHeuristic(true);
element = Loader.universe.getElement(rootUrl);
nativeTransformation = Loader.getNativeTransformation(element);
setNativeTranslation();
}
private void setNativeTranslation() {
Rectangle2D boundingBox = diagram.getViewRect();
nativeTranslation.x = boundingBox.getWidth() * -0.5;
nativeTranslation.y = boundingBox.getHeight() * -0.5;
}
public void resetNativeTranslation() {
nativeTranslation.x = 0;
nativeTranslation.y = 0;
}
public void draw(Graphics g, Vector2D position, double direction) {
if (diagram == null)
return;
try {
setTransform(position, direction);
diagram.render((Graphics2D) g);
} catch (SVGException e) {
e.printStackTrace();
return;
}
}
private void setTransform(Vector2D position, double direction) {
// Transformations are applied from right to left, cool isn't it ?
String transformation = String.format("translate(%s,%s) rotate(%s) translate(%s,%s) %s",
position.x,
position.y,
Math.toDegrees(direction),
nativeTranslation.x,
nativeTranslation.y,
nativeTransformation);
try {
element.setAttribute("transform",
AnimationElement.AT_XML,
transformation);
Loader.universe.updateTime();
} catch (SVGElementException e) {
e.printStackTrace();
return;
} catch (SVGException e) {
e.printStackTrace();
return;
}
}
public URI root() {
return rootUrl;
}
} |
package algorithms.search;
import algorithms.YFastTrieLong;
import algorithms.util.ObjectSpaceEstimator;
import algorithms.util.PairInt;
import gnu.trove.iterator.TLongIterator;
import gnu.trove.map.TLongLongMap;
import gnu.trove.map.hash.TLongLongHashMap;
import gnu.trove.set.TLongSet;
import gnu.trove.set.hash.TLongHashSet;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.logging.Logger;
public class NearestNeighbor2DLong {
private final YFastTrieLong xbt;
private final int width;
private final int height;
private final long maxIndex;
private boolean useCache = true;
//TODO: consider replacing these with weak reference key hash maps.
// even though the later take more space, they can evict keys no longer
// used elsewhere. the google guava library has several structures
// could use for a cache and there are a couple libraries such as cache 2k.
private TLongLongMap pCache = new TLongLongHashMap();
private TLongLongMap sCache = new TLongLongHashMap();
private Logger log = Logger.getLogger(this.getClass().getName());
/**
*
* @param points non-negative coordinates
* @param imgWidth maximum x value of any data point + 1 including
* those to be queries
* @param imgHeight maximum y value of any data point + 1 including
* those to be queries
*/
public NearestNeighbor2DLong(Set<PairInt> points,
int imgWidth, int imgHeight) {
this.width = imgWidth;
this.height = imgHeight ;
maxIndex = (long)width * height;
int maxW = 1 + (int)Math.ceil(Math.log(maxIndex)/Math.log(2));
xbt = new YFastTrieLong(maxW);
for (PairInt p : points) {
int x = p.getX();
int y = p.getY();
if (x > width || x < 0) {
throw new IllegalArgumentException(
"x cannot be larger than "
+ " maxX given in constructor " + width
+ ". x=" + x);
}
if (y > height || y < 0) {
throw new IllegalArgumentException(
"y cannot be larger than "
+ " maxY given in constructor " + height + ". y=" + y);
}
long index = getInternalIndex(x, y);
//System.out.format("add %d (%d, %d)\n", index, x, y);
xbt.add(index);
}
}
/**
*
* @param pointIdxs pixel indexes formed from relationship
* pixIdx = (row * width) + col
* @param imgWidth maximum x value of any data point + 1including
* those to be queries
* @param imgHeight maximum y value of any data point + 1including
* those to be queries
*/
public NearestNeighbor2DLong(TLongSet pointIdxs,
int imgWidth, int imgHeight) {
this.width = imgWidth;
this.height = imgHeight;
maxIndex = (long)width * height;
int maxW = 1 + (int)Math.ceil(Math.log(maxIndex)/Math.log(2));
xbt = new YFastTrieLong(maxW);
TLongIterator iter = pointIdxs.iterator();
while (iter.hasNext()) {
long pixIdx = iter.next();
int x = getCol(pixIdx);
int y = getRow(pixIdx);
if (x > width || x < 0) {
throw new IllegalArgumentException(
"x cannot be larger than "
+ " maxX given in constructor " + width
+ ". x=" + x);
}
if (y > height || y < 0) {
throw new IllegalArgumentException(
"y cannot be larger than "
+ " maxY given in constructor " + height + ". y=" + y);
}
xbt.add(pixIdx);
}
}
public void doNotUseCache() {
useCache = false;
}
protected long getInternalIndex(int col, int row) {
long t = ((long)width * row) + col;
return t;
}
protected int getRow(long internalIndex) {
int row = (int)(internalIndex/width);
return row;
}
protected int getCol(long internalIndex) {
int row = (int)(internalIndex/width);
int col = (int)(internalIndex - ((long)row * width));
return col;
}
/**
<pre>
runtime complexity is
best case:
Note that caching leads to an O(1) term
over time.
worst case:
Note, worst case is: first column
filled with points and all else is empty and
the number of rows is same or larger than
number of columns and the
query is for the point in the last column and
last row... a predecessor call is necessary for
each row in the worst case.
Note: maxW = 1 + Math.ceil(Math.log(maxX * maxY)/Math.log(2));
</ore>
* @param x non-negative x coord to query for
* @param y non-negative y coord to query for
*/
public Set<PairInt> findClosest(final int x, final int y) {
return findClosestWithinTolerance(x, y, 0);
}
/**
* NOTE: NOT READY FOR USE
* method to return only the nearest point and any
* that are at the same distance within a tolerance.
* This is meant to be a nearest neighbor method
* with ability to return more than one at same distance within a tolerance
* of that distance.
* TODO: calculate the runtime complexity bounds....
* @param x non-negative x coord to query for
* @param y non-negative y coord to query for
* @param tolerance
* @return
*/
public Set<PairInt> findClosestWithinTolerance(int x, int y,
double tolerance) {
if (xbt.size() == 0) {
log.warning("xbt is empty");
return new HashSet<PairInt>();
}
if (x >= width || x < 0) {
log.fine(
"x cannot be larger than "
+ " maxX given in constructor " + width
+ ". x=" + x);
return null;
}
if (y >= height || y < 0) {
log.fine(
"y cannot be larger than "
+ " maxY given in constructor " + height + ". y=" + y);
return null;
}
double closestDist = Double.MAX_VALUE;
double closestDistPlusTol = Double.MAX_VALUE;
TLongSet closestIndexes = new TLongHashSet();
long idx = getInternalIndex(x, y);
//System.out.format("find %d (=%d, %d)\n", idx, x, y);
long q = xbt.find(idx);
if (q != -1) {
// have found nearest, but still need to search
// within tolerance distance for others.
closestDist = 0;
closestDistPlusTol = tolerance;
closestIndexes.add(idx);
}
long predecessor = -1;
long successor = -1;
if (useCache && pCache.containsKey(idx)) {
predecessor = pCache.get(idx);
} else {
//O(log_2(maxW))
predecessor = xbt.predecessor(idx);
if (useCache && predecessor != -1) {
pCache.put(idx, predecessor);
}
}
if (useCache && sCache.containsKey(idx)) {
successor = sCache.get(idx);
} else {
//O(log_2(maxW))
successor = xbt.successor(idx);
if (useCache && successor != -1){
sCache.put(idx, successor);
}
}
double dp2 = dist(x, y, predecessor);
double ds2 = dist(x, y, successor);
double dMin = Math.min(dp2, ds2);
//System.out.println("p=" + predecessor + " s=" + successor);
/*
if smallest is smaller than closest
if the new closest diff with current is greater
than tol, clear the indexes and reset closest
vars and add smallest to indexes
also add the other if within tolerance
else if closer is within tolerance,
update closest vars and add whichever or both
s2 and p2 to indexes (delaying detailed checks
of indexes until end of method)
else if smallest is <= closestPlusTol
add s2 and/or p2 to indexes
*/
if (dMin <= closestDist) {
if (Math.abs(closestDist - dMin) > tolerance) {
closestIndexes.clear();
closestDist = dMin;
closestDistPlusTol = closestDist + tolerance;
}
if ((predecessor != -1) &&
(dp2 <= closestDistPlusTol)) {
closestIndexes.add(predecessor);
}
if ((successor != -1) &&
(ds2 <= closestDistPlusTol)) {
closestIndexes.add(successor);
}
} else if (dMin <= closestDistPlusTol) {
if ((predecessor != -1) &&
(dp2 <= closestDistPlusTol)) {
closestIndexes.add(predecessor);
}
if ((successor != -1) &&
(ds2 <= closestDistPlusTol)) {
closestIndexes.add(successor);
}
}
//add tolerance to goal
int goal = (closestDist != Double.MAX_VALUE) ?
(int)Math.ceil(closestDistPlusTol) : 0;
int yLow = estimateLowBound(y, goal);
int yCurrent;
if (predecessor == -1) {
yCurrent = Integer.MIN_VALUE;
} else {
int pRow = getRow(predecessor);
if (pRow < y) {
yCurrent = pRow;
} else {
yCurrent = pRow - 1;
}
}
//System.out.println("yCurrent=" + yCurrent + " yLow=" + yLow);
// predecessor searches until reach yLow, adjusting goal by
// min distances
long p2 = -1;
long s2 = -1;
while (yCurrent >= yLow) {
long cIdx = getInternalIndex(x, yCurrent);
q = xbt.find(cIdx);
if (q != -1) {
p2 = q;
dp2 = dist(x, y, p2);
ds2 = Double.MAX_VALUE;
} else {
if (useCache && pCache.containsKey(cIdx)) {
p2 = pCache.get(cIdx);
} else {
p2 = xbt.predecessor(cIdx);
if (useCache && p2 != -1) {
pCache.put(cIdx, p2);
}
}
if (useCache && sCache.containsKey(cIdx)) {
s2 = sCache.get(cIdx);
} else {
//O(log_2(maxW))
s2 = xbt.successor(cIdx);
if (useCache && s2 != -1) {
sCache.put(cIdx, s2);
}
}
dp2 = dist(x, y, p2);
ds2 = dist(x, y, s2);
}
dMin = Math.min(dp2, ds2);
if (dMin <= closestDist) {
if (Math.abs(closestDist - dMin) > tolerance) {
closestIndexes.clear();
closestDist = dMin;
closestDistPlusTol = closestDist + tolerance;
goal = (int)Math.ceil(closestDistPlusTol);
yLow = estimateLowBound(y, goal);
}
if ((p2 != -1) && dp2 <= closestDistPlusTol) {
closestIndexes.add(p2);
}
if ((s2 != -1) && ds2 <= closestDistPlusTol) {
closestIndexes.add(s2);
}
} else if (dMin <= closestDistPlusTol) {
if ((p2 != -1) && dp2 <= closestDistPlusTol) {
closestIndexes.add(p2);
}
if ((s2 != -1) && ds2 <= closestDistPlusTol) {
closestIndexes.add(s2);
}
}
if (p2 != -1) {
long expectedNext = getInternalIndex(x, yCurrent - 1);
if (p2 > expectedNext) {
yCurrent -= 1;
} else {
yCurrent = getRow(p2) - 1;
}
} else {
yCurrent = Integer.MIN_VALUE;
}
}
//System.out.println("yCurrent=" + yCurrent + " yLow=" + yLow);
//System.out.println("p=" + p2 + " s=" + s2);
// successor searches to higher bounds
if (successor == -1) {
yCurrent = Integer.MAX_VALUE;
} else {
int sr = getRow(successor);
if (sr > y) {
yCurrent = sr;
} else {
yCurrent = sr + 1;
}
}
int yHigh = estimateHighBound(y, goal);
//System.out.println("yCurrent=" + yCurrent + " yHigh=" + yHigh);
while (yCurrent <= yHigh) {
long cIdx = getInternalIndex(x, yCurrent);
q = xbt.find(cIdx);
if (q != -1) {
p2 = q;
dp2 = dist(x, y, p2);
ds2 = Double.MAX_VALUE;
} else {
if (useCache && pCache.containsKey(cIdx)) {
p2 = pCache.get(cIdx);
} else {
//O(log_2(maxW))
p2 = xbt.predecessor(cIdx);
if (useCache && p2 != -1) {
pCache.put(cIdx, p2);
}
}
if (useCache && sCache.containsKey(cIdx)) {
s2 = sCache.get(cIdx);
} else {
//O(log_2(maxW))
s2 = xbt.successor(cIdx);
if (useCache && s2 != -1) {
sCache.put(cIdx, s2);
}
}
dp2 = dist(x, y, p2);
ds2 = dist(x, y, s2);
}
dMin = Math.min(dp2, ds2);
if (dMin <= closestDist) {
if (Math.abs(closestDist - dMin) > tolerance) {
closestIndexes.clear();
closestDist = dMin;
closestDistPlusTol = closestDist + tolerance;
goal = (int)Math.ceil(closestDistPlusTol);
yHigh = estimateHighBound(y, goal);
}
if ((p2 != -1) && dp2 <= closestDistPlusTol) {
closestIndexes.add(p2);
}
if ((s2 != -1) && ds2 <= closestDistPlusTol) {
closestIndexes.add(s2);
}
} else if (dMin <= closestDistPlusTol) {
if ((p2 != -1) && dp2 <= closestDistPlusTol) {
closestIndexes.add(p2);
}
if ((s2 != -1) && ds2 <= closestDistPlusTol) {
closestIndexes.add(s2);
}
}
if (s2 != -1) {
long expectedNext = getInternalIndex(x, yCurrent + 1);
if (s2 < expectedNext) {
yCurrent += 1;
} else {
yCurrent = getRow(s2) + 1;
}
} else {
yCurrent = Integer.MAX_VALUE;
}
//System.out.println("yCurrent=" + yCurrent + " yHigh=" + yHigh);
}
//filter results for closest and tolerance
Set<PairInt> results = new HashSet<PairInt>();
TLongIterator iter = closestIndexes.iterator();
while (iter.hasNext()) {
long index2 = iter.next();
if (dist(x, y, index2) <= closestDistPlusTol) {
int x2 = getCol(index2);
int y2 = getRow(index2);
PairInt p3 = new PairInt(x2, y2);
results.add(p3);
}
}
return results;
}
/**
<pre>
runtime complexity is
best case: 2 * O(log_2(maxW)).
Note that caching leads to an O(1) term
over time instead of the logarithmic term.
worst case: nRows * 2 * O(log_2(maxW))
Note, worst case is: first column
filled with points and all else is empty and
the number of rows is same or larger than
number of columns and the
query is for the point in the last column and
last row... a predecessor call is necessary for
each row in the worst case.
Note: maxW = 1 + Math.ceil(Math.log(maxX * maxY)/Math.log(2));
</ore>
* @param x non-negative x coord to query for
* @param y non-negative y coord to query for
*/
public Set<PairInt> findClosestNotEqual(final int x, final int y) {
return findClosest(x, y, Integer.MAX_VALUE, false);
}
/**
<pre>
runtime complexity is
best case:
Note that caching leads to an O(1) term
over time.
worst case:
Note: maxW = 1 + Math.ceil(Math.log(maxX * maxY)/Math.log(2));
</ore>
* @param x
* @param y
* @param dMax
* @return a set of points within dMax that are the
* closest points, else returns an empty set
*/
public Set<PairInt> findClosest(int x, int y, int dMax) {
return findClosest(x, y, dMax, true);
}
/**
<pre>
runtime complexity is
best case:
Note that caching leads to an O(1) term
over time.
worst case:
Note: maxW = 1 + Math.ceil(Math.log(maxX * maxY)/Math.log(2));
</ore>
* @param x
* @param y
* @param dMax
* @return a set of points within dMax that are the
* closest points, else returns an empty set
*/
private Set<PairInt> findClosest(int x, int y, int dMax, boolean includeEquals) {
if (x >= width || x < 0) {
log.fine(
"x cannot be larger than "
+ " maxX given in constructor " + width
+ ". x=" + x);
return null;
}
if (y >= height || y < 0) {
log.fine(
"y cannot be larger than "
+ " maxY given in constructor " + height + ". y=" + y);
return null;
}
long idx = getInternalIndex(x, y);
if (includeEquals) {
long q = xbt.find(idx);
if (q != -1) {
Set<PairInt> results = new HashSet<PairInt>();
results.add(new PairInt(x, y));
return results;
}
}
TLongSet closestIndexes = new TLongHashSet();
double closestDist = Double.MAX_VALUE;
long predecessor = -1;
long successor = -1;
if (useCache && pCache.containsKey(idx)) {
predecessor = pCache.get(idx);
} else {
//O(log_2(maxW))
predecessor = xbt.predecessor(idx);
if (useCache && predecessor != -1) {
pCache.put(idx, predecessor);
}
}
if (useCache && sCache.containsKey(idx)) {
successor = sCache.get(idx);
} else {
//O(log_2(maxW))
successor = xbt.successor(idx);
if (useCache && successor != -1) {
sCache.put(idx, successor);
}
}
double dp2 = dist(x, y, predecessor);
double ds2 = dist(x, y, successor);
if (!includeEquals) {
if (dp2 == 0) {
ds2 = Double.MAX_VALUE;
} else if (ds2 == 0) {
ds2 = Double.MAX_VALUE;
}
}
if (dp2 <= ds2 && (dp2 <= dMax)) {
closestDist = dp2;
closestIndexes.add(predecessor);
if (dp2 == ds2) {
closestIndexes.add(successor);
}
} else if (ds2 < dp2 && (ds2 <= dMax)) {
closestDist = ds2;
closestIndexes.add(successor);
}
int goal = (closestDist != Double.MAX_VALUE) ?
(int)Math.ceil(closestDist) : dMax;
if (goal > dMax) {
goal = dMax;
}
int yLow = estimateLowBound(y, goal);
int yCurrent;
if (predecessor == -1) {
yCurrent = Integer.MIN_VALUE;
} else {
int pRow = getRow(predecessor);
if (pRow < y) {
yCurrent = pRow;
} else {
yCurrent = pRow - 1;
}
}
// predecessor searches until reach yLow, adjusting goal by
// min distances
long p2 = -1;
long s2 = -1;
while (yCurrent >= yLow) {
long cIdx = getInternalIndex(x, yCurrent);
long q = xbt.find(cIdx);
if (q != -1) {
p2 = q;
dp2 = dist(x, y, p2);
ds2 = Double.MAX_VALUE;
} else {
if (useCache && pCache.containsKey(cIdx)) {
p2 = pCache.get(cIdx);
} else {
//O(log_2(maxW))
p2 = xbt.predecessor(cIdx);
if (useCache && p2 != -1) {
pCache.put(cIdx, p2);
}
}
if (useCache && sCache.containsKey(cIdx)) {
s2 = sCache.get(cIdx);
} else {
//O(log_2(maxW))
s2 = xbt.successor(cIdx);
if (useCache && s2 != -1) {
sCache.put(cIdx, s2);
}
}
dp2 = dist(x, y, p2);
ds2 = dist(x, y, s2);
}
if (!includeEquals) {
if (s2 != -1 && s2 == idx) {
ds2 = Double.MAX_VALUE;
}
}
if ((dp2 < ds2) && (dp2 < closestDist) && (dp2 <= dMax)) {
closestIndexes.clear();
closestDist = dp2;
closestIndexes.add(p2);
goal = (int)Math.ceil(closestDist);
if (goal > dMax) {
goal = dMax;
}
yLow = estimateLowBound(y, goal);
} else if ((ds2 < dp2) && (ds2 < closestDist) && (ds2 <= dMax)) {
closestIndexes.clear();
closestDist = ds2;
closestIndexes.add(s2);
goal = (int)Math.ceil(closestDist);
if (goal > dMax) {
goal = dMax;
}
yLow = estimateLowBound(y, goal);
} else if (dp2 == closestDist && (dp2 != Double.MAX_VALUE)
&& (dp2 <= dMax)) {
closestIndexes.add(p2);
if (dp2 == ds2) {
closestIndexes.add(s2);
}
} else if (ds2 == closestDist && (ds2 != Double.MAX_VALUE)
&& (ds2 <= dMax)) {
closestIndexes.add(s2);
}
if (p2 != -1) {
long expectedNext = getInternalIndex(x, yCurrent - 1);
if (p2 > expectedNext) {
yCurrent -= 1;
} else {
yCurrent = getRow(p2) - 1;
}
} else {
yCurrent = Integer.MIN_VALUE;
}
}
// successor searches to higher bounds
if (successor == -1) {
yCurrent = Integer.MAX_VALUE;
} else {
int sr = getRow(successor);
if (sr > y) {
yCurrent = sr;
} else {
yCurrent = sr + 1;
}
}
int yHigh = estimateHighBound(y, goal);
while (yCurrent <= yHigh) {
long cIdx = getInternalIndex(x, yCurrent);
long q = xbt.find(cIdx);
if (q != -1) {
p2 = q;
dp2 = dist(x, y, p2);
ds2 = Double.MAX_VALUE;
} else {
if (useCache && pCache.containsKey(cIdx)) {
p2 = pCache.get(cIdx);
} else {
//O(log_2(maxW))
p2 = xbt.predecessor(cIdx);
if (useCache && p2 != -1) {
pCache.put(cIdx, p2);
}
}
if (useCache && sCache.containsKey(cIdx)) {
s2 = sCache.get(cIdx);
} else {
//O(log_2(maxW))
s2 = xbt.successor(cIdx);
if (useCache && s2 != -1) {
sCache.put(cIdx, s2);
}
}
dp2 = dist(x, y, p2);
ds2 = dist(x, y, s2);
}
if (!includeEquals) {
if (p2 != -1 && p2 == idx) {
dp2 = Double.MAX_VALUE;
} else if (s2 != -1 && s2 == idx) {
ds2 = Double.MAX_VALUE;
}
}
if ((dp2 < ds2) && (dp2 < closestDist) && (dp2 <= dMax)) {
closestIndexes.clear();
closestDist = dp2;
closestIndexes.add(p2);
goal = (int)Math.ceil(closestDist);
if (goal > dMax) {
goal = dMax;
}
yHigh = estimateHighBound(y, goal);
} else if ((ds2 < dp2) && (ds2 < closestDist) && (ds2 <= dMax)) {
closestIndexes.clear();
closestDist = ds2;
closestIndexes.add(s2);
goal = (int)Math.ceil(closestDist);
if (goal > dMax) {
goal = dMax;
}
yHigh = estimateHighBound(y, goal);
} else if (dp2 == closestDist && (dp2 != Double.MAX_VALUE)
&& (dp2 <= dMax)) {
closestIndexes.add(p2);
if (dp2 == ds2) {
closestIndexes.add(s2);
}
} else if (ds2 == closestDist && (ds2 != Double.MAX_VALUE)
&& (ds2 <= dMax)) {
closestIndexes.add(s2);
}
if (s2 != -1) {
long expectedNext = getInternalIndex(x, yCurrent + 1);
if (s2 < expectedNext) {
yCurrent += 1;
} else {
yCurrent = getRow(s2) + 1;
}
} else {
yCurrent = Integer.MAX_VALUE;
}
}
Set<PairInt> results = new HashSet<PairInt>();
TLongIterator iter = closestIndexes.iterator();
while (iter.hasNext()) {
long index2 = iter.next();
int x2 = getCol(index2);
int y2 = getRow(index2);
results.add(new PairInt(x2, y2));
}
return results;
}
private double dist(int x, int y, long p2) {
if (p2 == -1) {
return Double.MAX_VALUE;
}
int x2 = getCol(p2);
int y2 = getRow(p2);
int diffX = x2 - x;
int diffY = y2 - y;
double dist = Math.sqrt(diffX * diffX + diffY * diffY);
return dist;
}
private int estimateLowBound(int y, int goal) {
int low = y - goal;
if (low < 0) {
low = 0;
}
return low;
}
private int estimateHighBound(int y, int goal) {
int high = y + goal;
if (high > height) {
high = height;
}
return high;
}
public static long estimateSizeOnHeap(int numberOfPoints,
int maxBitLength) {
long[] yTotals = YFastTrieLong.estimateSizeOnHeap(numberOfPoints,
maxBitLength);
//System.out.println("yft estimates=" + Arrays.toString(yTotals));
ObjectSpaceEstimator est = new ObjectSpaceEstimator();
est.setNObjRefsFields(4);
est.setNIntFields(2);
est.setNLongFields(1);
est.setNBooleanFields(1);
long total = est.estimateSizeOnHeap();
total += yTotals[1];
return total;
}
} |
package at.ac.tuwien.kr.alpha.common.atoms;
import java.util.List;
import java.util.Set;
import at.ac.tuwien.kr.alpha.common.Predicate;
import at.ac.tuwien.kr.alpha.common.terms.Term;
import at.ac.tuwien.kr.alpha.common.terms.VariableTerm;
import at.ac.tuwien.kr.alpha.grounder.Substitution;
import at.ac.tuwien.kr.alpha.grounder.Unifier;
public abstract class Atom implements Comparable<Atom> {
public abstract Predicate getPredicate();
public abstract List<Term> getTerms();
/**
* Creates a new Atom that represents this Atom, but has the given term list instead.
*
* @param terms the terms to set
* @return a new Atom with the given terms set
*/
public abstract Atom withTerms(List<Term> terms);
public abstract boolean isGround();
/**
* Set of all variables occurring in the Atom
*/
public Set<VariableTerm> getOccurringVariables() {
return toLiteral().getOccurringVariables();
}
/**
* This method applies a substitution to a potentially non-substitute atom. The resulting atom may be non-substitute.
*
* @param substitution the variable substitution to apply.
* @return the atom resulting from the applying the substitution.
*/
public abstract Atom substitute(Substitution substitution);
/**
* Creates a non-negated literal containing this atom
*/
public Literal toLiteral() {
return toLiteral(true);
}
/**
* Creates a literal containing this atom which will be negated if {@code positive} is {@code false}
*
* @param positive
* @return
*/
public abstract Literal toLiteral(boolean positive);
public Atom renameVariables(String newVariablePrefix) {
Unifier renamingSubstitution = new Unifier();
int counter = 0;
for (VariableTerm variable : getOccurringVariables()) {
renamingSubstitution.put(variable, VariableTerm.getInstance(newVariablePrefix + counter++));
}
return this.substitute(renamingSubstitution);
}
@Override
public int compareTo(Atom o) {
if (o == null) {
return 1;
}
final List<Term> aTerms = this.getTerms();
final List<Term> bTerms = o.getTerms();
if (aTerms.size() != bTerms.size()) {
return Integer.compare(aTerms.size(), bTerms.size());
}
int result = this.getPredicate().compareTo(o.getPredicate());
if (result != 0) {
return result;
}
for (int i = 0; i < aTerms.size(); i++) {
result = aTerms.get(i).compareTo(o.getTerms().get(i));
if (result != 0) {
return result;
}
}
return 0;
}
/**
* Returns whether this Atom is a builtin atom according to the ASP standard. This covers the atoms currently defined by {@link ComparisonAtom}.
*
* @return true if this Atom is a comparison atom, false otherwise.
*/
public boolean isBuiltin() {
return false;
}
@Override
public abstract boolean equals(Object o);
@Override
public abstract int hashCode();
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.