answer
stringlengths 17
10.2M
|
|---|
package org.epics.pvmanager;
/**
* A PV that can be both read and written. In general, the read payload will be
* different from the write payload.
*
* @param <R> type of the read payload
* @param <W> type of the write payload
* @author carcassi
*/
public class PV<R, W> implements PVReader<R>, PVWriter<W> {
// This class is a wrapper around a reader and a write. It has no logic by
// itself, and just forwards the messages to the appropriate object.
private final PVReader<R> reader;
private final PVWriter<W> writer;
PV(PVReader<R> reader, PVWriter<W> writer) {
this.reader = reader;
this.writer = writer;
}
@Override
public void addPVValueWriteListener(PVValueWriteListener listener) {
writer.addPVValueWriteListener(listener);
}
@Override
public void removePVValueChangeListener(PVValueWriteListener listener) {
writer.removePVValueChangeListener(listener);
}
@Override
public void write(W newValue) {
writer.write(newValue);
}
@Override
public Exception lastWriteException() {
return writer.lastWriteException();
}
@Override
public void addPVValueChangeListener(PVValueChangeListener listener) {
reader.addPVValueChangeListener(listener);
}
@Override
public void addPVValueChangeListener(Class<?> clazz, PVValueChangeListener listener) {
reader.addPVValueChangeListener(clazz, listener);
}
@Override
public void removePVValueChangeListener(PVValueChangeListener listener) {
reader.removePVValueChangeListener(listener);
}
@Override
public String getName() {
return reader.getName();
}
@Override
public R getValue() {
return reader.getValue();
}
@Override
public void close() {
reader.close();
writer.close();
}
@Override
public boolean isClosed() {
return reader.isClosed();
}
@Override
public Exception lastException() {
return reader.lastException();
}
}
|
package org.batfish.coordinator;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Strings;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.security.AccessControlException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.SortedSet;
import java.util.UUID;
import java.util.zip.ZipException;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import org.apache.commons.io.FileExistsException;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.batfish.common.BatfishException;
import org.batfish.common.BatfishLogger;
import org.batfish.common.Container;
import org.batfish.common.CoordConsts;
import org.batfish.common.Version;
import org.batfish.common.WorkItem;
import org.batfish.common.util.BatfishObjectMapper;
import org.batfish.coordinator.WorkQueueMgr.QueueType;
import org.batfish.coordinator.config.Settings;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.glassfish.jersey.media.multipart.FormDataParam;
@Path(CoordConsts.SVC_CFG_WORK_MGR)
public class WorkMgrService {
BatfishLogger _logger = Main.getLogger();
Settings _settings = Main.getSettings();
private static JSONArray successResponse(Object entity) {
return new JSONArray(Arrays.asList(CoordConsts.SVC_KEY_SUCCESS, entity));
}
private static JSONArray failureResponse(Object entity) {
return new JSONArray(Arrays.asList(CoordConsts.SVC_KEY_FAILURE, entity));
}
/**
* Check if an API key is valid
*
* @param apiKey The API key to check
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_CHECK_API_KEY)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray checkApiKey(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion) {
try {
_logger.info("WMS:checkApiKey " + apiKey + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkClientVersion(clientVersion);
boolean valid = Main.getAuthorizer().isValidWorkApiKey(apiKey);
return successResponse(new JSONObject().put(CoordConsts.SVC_KEY_API_KEY, valid));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:checkApiKey exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:checkApiKey exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
private void checkApiKeyValidity(String apiKey) throws Exception {
if (!Main.getAuthorizer().isValidWorkApiKey(apiKey)) {
throw new AccessControlException("Invalid API key: " + apiKey);
}
}
private void checkClientVersion(String clientVersion) throws Exception {
Version.checkCompatibleVersion("Service", "Client", clientVersion);
}
private void checkContainerAccessibility(String apiKey, String containerName) throws Exception {
if (!Main.getAuthorizer().isAccessibleContainer(apiKey, containerName, true)) {
throw new AccessControlException("container is not accessible by the api key");
}
}
private void checkStringParam(String paramStr, String parameterName) {
if (Strings.isNullOrEmpty(paramStr)) {
throw new IllegalArgumentException(parameterName + " is missing or empty");
}
}
/**
* Configures an analysis for the container
*
* @param apiKey The API key of the requester
* @param clientVersion The version of the client
* @param containerName The name of the container to configure
* @param newAnalysisStr The string representation of a new analysis to configure
* @param analysisName The name of the analysis to configure
* @param addQuestionsStream A stream providing the questions for the analysis
* @param delQuestions A list of questions to delete from the analysis
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_CONFIGURE_ANALYSIS)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray configureAnalysis(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_NEW_ANALYSIS) String newAnalysisStr,
@FormDataParam(CoordConsts.SVC_KEY_ANALYSIS_NAME) String analysisName,
@FormDataParam(CoordConsts.SVC_KEY_FILE) InputStream addQuestionsStream,
@FormDataParam(CoordConsts.SVC_KEY_DEL_ANALYSIS_QUESTIONS) String delQuestions) {
try {
_logger.info(
"WMS:configureAnalysis "
+ apiKey
+ " "
+ containerName
+ " "
+ newAnalysisStr
+ " "
+ analysisName
+ " "
+ delQuestions
+ "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkStringParam(analysisName, "Analysis name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
Map<String, String> questionsToAdd = new HashMap<>();
if (addQuestionsStream != null) {
BatfishObjectMapper mapper = new BatfishObjectMapper();
Map<String, Object> streamValue;
try {
streamValue =
mapper.readValue(addQuestionsStream, new TypeReference<Map<String, Object>>() {});
for (Entry<String, Object> entry : streamValue.entrySet()) {
String textValue = mapper.writeValueAsString(entry.getValue());
questionsToAdd.put(entry.getKey(), textValue);
}
} catch (IOException e) {
throw new BatfishException("Failed to read question JSON from input stream", e);
}
}
boolean newAnalysis = !Strings.isNullOrEmpty(newAnalysisStr);
List<String> questionsToDelete = new ArrayList<>();
if (!Strings.isNullOrEmpty(delQuestions)) {
JSONArray delQuestionsArray = new JSONArray(delQuestions);
for (int i = 0; i < delQuestionsArray.length(); i++) {
questionsToDelete.add(delQuestionsArray.getString(i));
}
}
Main.getWorkMgr()
.configureAnalysis(
containerName, newAnalysis, analysisName, questionsToAdd, questionsToDelete);
return successResponse(new JSONObject().put("result", "successfully configured analysis"));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException
| ZipException e) {
_logger.error("WMS:configureAnalysis exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:configureAnalysis exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Delete an analysis from the container
*
* @param apiKey The API key of the requester
* @param clientVersion The version of the client
* @param containerName The name of the container in which the analysis resides
* @param analysisName The name of the analysis to delete
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_DEL_ANALYSIS)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray delAnalysis(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_ANALYSIS_NAME) String analysisName) {
try {
_logger.info("WMS:delAnalysis " + apiKey + " " + containerName + " " + analysisName + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkStringParam(analysisName, "Analysis name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
Main.getWorkMgr().delAnalysis(containerName, analysisName);
return successResponse(new JSONObject().put("result", "successfully configured analysis"));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException
| ZipException e) {
_logger.error("WMS:delAnalysis exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:delAnalysis exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Delete the specified container
*
* @param apiKey The API key of the requester
* @param clientVersion The version of the client
* @param containerName The name of the container to delete
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_DEL_CONTAINER)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray delContainer(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName) {
try {
_logger.info("WMS:delContainer " + containerName + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
boolean status = Main.getWorkMgr().delContainer(containerName);
return successResponse(new JSONObject().put("result", status));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:delContainer exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:delContainer exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Deletes the specified environment under the specified container and testrig
*
* @param apiKey The API key of the requester
* @param clientVersion The version of the client
* @param containerName The name of the container in which the environment and testrig reside
* @param envName The name of the environment to delete
* @param testrigName The name of the testrig in which the environment resides
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_DEL_ENVIRONMENT)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray delEnvironment(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_ENV_NAME) String envName,
@FormDataParam(CoordConsts.SVC_KEY_TESTRIG_NAME) String testrigName) {
try {
_logger.info("WMS:delEnvironment " + containerName + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkStringParam(testrigName, "Testrig name");
checkStringParam(envName, "Environment name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
Main.getWorkMgr().delEnvironment(containerName, testrigName, envName);
return successResponse(new JSONObject().put("result", "true"));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:delEnvironment exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:delEnvironment exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Delete the specified question under the specified container
*
* @param apiKey The API key of the requester
* @param clientVersion The version of the client
* @param containerName The name of the container in which the question resides
* @param questionName The name of the question to delete
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_DEL_QUESTION)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray delQuestion(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_QUESTION_NAME) String questionName) {
try {
_logger.info("WMS:delQuestion " + containerName + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkStringParam(questionName, "Question name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
Main.getWorkMgr().delQuestion(containerName, questionName);
return successResponse(new JSONObject().put("result", "true"));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:delQuestion exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:delQuestion exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Deletesthe specified testrig under the specified container
*
* @param apiKey The API key of the requester
* @param clientVersion The version of the client
* @param containerName The name of the container in which the testrig resides
* @param testrigName The name of the testrig to delete
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_DEL_TESTRIG)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray delTestrig(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_TESTRIG_NAME) String testrigName) {
try {
_logger.info("WMS:delTestrig " + containerName + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkStringParam(testrigName, "Testrig name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
Main.getWorkMgr().delTestrig(containerName, testrigName);
return successResponse(new JSONObject().put("result", "true"));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:delTestrig exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:delTestrig exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Get answers for a previously run analysis
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param containerName The name of the container in which the analysis resides
* @param testrigName The name of the testrig on which the analysis was run
* @param baseEnv The name of the base environment on which the analysis was run
* @param deltaTestrig The name of the delta testrig on which the analysis was run
* @param deltaEnv The name of the delta environment on which the analysis was run
* @param analysisName The name of the analysis
* @param prettyAnswer Whether or not to pretty‐print the result
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_GET_ANALYSIS_ANSWERS)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray getAnalysisAnswers(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_TESTRIG_NAME) String testrigName,
@FormDataParam(CoordConsts.SVC_KEY_ENV_NAME) String baseEnv,
@FormDataParam(CoordConsts.SVC_KEY_DELTA_TESTRIG_NAME) String deltaTestrig,
@FormDataParam(CoordConsts.SVC_KEY_DELTA_ENV_NAME) String deltaEnv,
@FormDataParam(CoordConsts.SVC_KEY_ANALYSIS_NAME) String analysisName,
@FormDataParam(CoordConsts.SVC_KEY_PRETTY_ANSWER) String prettyAnswer,
@FormDataParam(CoordConsts.SVC_KEY_WORKITEM) String workItemStr /* optional */) {
try {
_logger.info(
"WMS:getAnalysisAnswers "
+ apiKey
+ " "
+ containerName
+ " "
+ testrigName
+ " "
+ analysisName
+ "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkStringParam(testrigName, "Base testrig name");
checkStringParam(baseEnv, "Base environment name");
checkStringParam(analysisName, "Analysis name");
checkStringParam(prettyAnswer, "Retrieve pretty-printed answers");
boolean pretty = Boolean.parseBoolean(prettyAnswer);
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
BatfishObjectMapper mapper = new BatfishObjectMapper();
JSONObject response = new JSONObject();
if (!Strings.isNullOrEmpty(workItemStr)) {
WorkItem workItem = WorkItem.fromJsonString(workItemStr);
if (!workItem.getContainerName().equals(containerName)
|| !workItem.getTestrigName().equals(testrigName)) {
return failureResponse(
"Mismatch in parameters: WorkItem is not for the supplied container or testrig");
}
QueuedWork work = Main.getWorkMgr().getMatchingWork(workItem, QueueType.INCOMPLETE);
if (work != null) {
String taskStr = mapper.writeValueAsString(work.getLastTaskCheckResult());
response
.put(CoordConsts.SVC_KEY_WORKID, work.getWorkItem().getId())
.put(CoordConsts.SVC_KEY_WORKSTATUS, work.getStatus().toString())
.put(CoordConsts.SVC_KEY_TASKSTATUS, taskStr);
}
}
Map<String, String> answers =
Main.getWorkMgr()
.getAnalysisAnswers(
containerName,
testrigName,
baseEnv,
deltaTestrig,
deltaEnv,
analysisName,
pretty);
String answersStr = mapper.writeValueAsString(answers);
return successResponse(response.put(CoordConsts.SVC_KEY_ANSWERS, answersStr));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:getAnalysisAnswers exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:getAnalsysisAnswers exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Get answer to a previously asked question
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param containerName The name of the container in which the question was asked
* @param testrigName The name of the testrig on which the question was asked
* @param baseEnv The name of the base environment on which the question was asked
* @param deltaTestrig The name of the delta testrig on which the question was asked
* @param deltaEnv The name of the delta environment on which the question was asked
* @param questionName The name of the question
* @param prettyAnswer Whether or not to pretty‐print the result
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_GET_ANSWER)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray getAnswer(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_TESTRIG_NAME) String testrigName,
@FormDataParam(CoordConsts.SVC_KEY_ENV_NAME) String baseEnv,
@FormDataParam(CoordConsts.SVC_KEY_DELTA_TESTRIG_NAME) String deltaTestrig,
@FormDataParam(CoordConsts.SVC_KEY_DELTA_ENV_NAME) String deltaEnv,
@FormDataParam(CoordConsts.SVC_KEY_QUESTION_NAME) String questionName,
@FormDataParam(CoordConsts.SVC_KEY_PRETTY_ANSWER) String prettyAnswer,
@FormDataParam(CoordConsts.SVC_KEY_WORKITEM) String workItemStr /* optional */) {
try {
_logger.info(
"WMS:getAnswer "
+ apiKey
+ " "
+ containerName
+ " "
+ testrigName
+ " "
+ questionName
+ "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkStringParam(testrigName, "Base testrig name");
checkStringParam(baseEnv, "Base environment name");
checkStringParam(questionName, "Question name");
checkStringParam(prettyAnswer, "Retrieve pretty-printed answer");
boolean pretty = Boolean.parseBoolean(prettyAnswer);
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
if (!Strings.isNullOrEmpty(workItemStr)) {
WorkItem workItem = WorkItem.fromJsonString(workItemStr);
if (!workItem.getContainerName().equals(containerName)
|| !workItem.getTestrigName().equals(testrigName)) {
return failureResponse(
"Mismatch in parameters: WorkItem is not for the supplied container or testrig");
}
QueuedWork work = Main.getWorkMgr().getMatchingWork(workItem, QueueType.INCOMPLETE);
if (work != null) {
BatfishObjectMapper mapper = new BatfishObjectMapper();
String taskStr = mapper.writeValueAsString(work.getLastTaskCheckResult());
return successResponse(
new JSONObject()
.put(CoordConsts.SVC_KEY_WORKID, work.getWorkItem().getId())
.put(CoordConsts.SVC_KEY_WORKSTATUS, work.getStatus().toString())
.put(CoordConsts.SVC_KEY_TASKSTATUS, taskStr));
}
}
String answer =
Main.getWorkMgr()
.getAnswer(
containerName,
testrigName,
baseEnv,
deltaTestrig,
deltaEnv,
questionName,
pretty);
return successResponse(new JSONObject().put(CoordConsts.SVC_KEY_ANSWER, answer));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:getAnswer exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:getAnswer exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Get content of the configuration file
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param containerName The name of the container in which the question was asked
* @param testrigName The name of the testrig in which the question was asked
* @param configName The name of the configuration file in which the question was asked
* @return A {@link Response Response} with an entity consists either a string of the file content
* of the configuration file {@code configName} or an error message if: the configuration file
* {@code configName} does not exist or the {@code apiKey} has no acess to the container
* {@code containerName}
*/
@POST
@Path(CoordConsts.SVC_RSC_GET_CONFIGURATION)
@Produces(MediaType.APPLICATION_JSON)
public Response getConfiguration(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_TESTRIG_NAME) String testrigName,
@FormDataParam(CoordConsts.SVC_KEY_CONFIGURATION_NAME) String configName) {
try {
_logger.info("WMS:getConfiguration " + containerName + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
java.nio.file.Path containerDir =
Main.getSettings().getContainersLocation().resolve(containerName).toAbsolutePath();
if (containerDir == null || !Files.exists(containerDir)) {
return Response.status(Response.Status.NOT_FOUND)
.entity("Container '" + containerName + "' not found")
.type(MediaType.TEXT_PLAIN)
.build();
}
checkContainerAccessibility(apiKey, containerName);
String configContent =
Main.getWorkMgr().getConfiguration(containerName, testrigName, configName);
return Response.ok(configContent).build();
} catch (AccessControlException e) {
return Response.status(Status.FORBIDDEN)
.entity(e.getMessage())
.type(MediaType.TEXT_PLAIN)
.build();
} catch (BatfishException e) {
return Response.status(Status.BAD_REQUEST)
.entity(e.getMessage())
.type(MediaType.TEXT_PLAIN)
.build();
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:getConfiguration exception: " + stackTrace);
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity(e.getCause())
.type(MediaType.TEXT_PLAIN)
.build();
}
}
/**
* Get information of the container
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param containerName The name of the container in which the question was asked
* @return A {@link Response Response} with an entity consists either a json representation of the
* container {@code containerName} or an error message if: the container {@code containerName}
* does not exist or the {@code apiKey} has no acess to the container {@code containerName}
*/
@POST
@Path(CoordConsts.SVC_RSC_GET_CONTAINER)
@Produces(MediaType.APPLICATION_JSON)
public Response getContainer(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName) {
try {
_logger.info("WMS:getContainer " + containerName + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
java.nio.file.Path containerDir =
Main.getSettings().getContainersLocation().resolve(containerName).toAbsolutePath();
if (containerDir == null || !Files.exists(containerDir)) {
return Response.status(Response.Status.NOT_FOUND)
.entity("Container '" + containerName + "' not found")
.type(MediaType.TEXT_PLAIN)
.build();
}
checkContainerAccessibility(apiKey, containerName);
Container container = Main.getWorkMgr().getContainer(containerDir);
BatfishObjectMapper mapper = new BatfishObjectMapper();
String containerString = mapper.writeValueAsString(container);
return Response.ok(containerString).build();
} catch (AccessControlException e) {
return Response.status(Status.FORBIDDEN)
.entity(e.getMessage())
.type(MediaType.TEXT_PLAIN)
.build();
} catch (BatfishException e) {
return Response.status(Status.BAD_REQUEST)
.entity(e.getMessage())
.type(MediaType.TEXT_PLAIN)
.build();
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:getContainer exception: " + stackTrace);
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity(e.getCause())
.type(MediaType.TEXT_PLAIN)
.build();
}
}
@GET
@Produces(MediaType.APPLICATION_JSON)
public JSONArray getInfo() {
_logger.info("WMS:getInfo\n");
try {
JSONObject map = new JSONObject();
map.put("Service name", "Batfish coordinator");
map.put(CoordConsts.SVC_KEY_VERSION, Version.getVersion());
map.put("APIs", "Enter ../application.wadl (relative to your URL) to see supported methods");
return successResponse(map);
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:getInfo exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Fetches the specified object from the specified container, testrig
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param containerName The container in which the object resides
* @param testrigName The testrig in which the object resides
* @param objectName The name of the object
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_GET_OBJECT)
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Response getObject(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_TESTRIG_NAME) String testrigName,
@FormDataParam(CoordConsts.SVC_KEY_OBJECT_NAME) String objectName) {
try {
_logger.info("WMS:getObject " + testrigName + " --> " + objectName + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkStringParam(testrigName, "Testrig name");
checkStringParam(objectName, "Object name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
java.nio.file.Path file =
Main.getWorkMgr().getTestrigObject(containerName, testrigName, objectName);
if (file == null || !Files.exists(file)) {
return Response.status(Response.Status.NOT_FOUND)
.entity("File not found")
.type(MediaType.TEXT_PLAIN)
.build();
}
String filename = file.getFileName().toString();
return Response.ok(file.toFile(), MediaType.APPLICATION_OCTET_STREAM)
.header("Content-Disposition", "attachment; filename=\"" + filename + "\"")
.header(CoordConsts.SVC_FILENAME_HDR, filename)
.build();
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
return Response.status(Response.Status.BAD_REQUEST)
.entity(e.getMessage())
.type(MediaType.TEXT_PLAIN)
.build();
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:getObject exception: " + stackTrace);
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity(e.getCause())
.type(MediaType.TEXT_PLAIN)
.build();
}
}
@POST
@Path(CoordConsts.SVC_RSC_GET_QUESTION_TEMPLATES)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray getQuestionTemplates(@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey) {
try {
_logger.info("WMS:getQuestionTemplates " + apiKey + "\n");
checkStringParam(apiKey, "API key");
checkApiKeyValidity(apiKey);
Map<String, String> questionTemplates = Main.getQuestionTemplates();
if (questionTemplates == null) {
return failureResponse("Question templates dir is not configured");
} else {
return successResponse(
new JSONObject().put(CoordConsts.SVC_KEY_QUESTION_LIST, questionTemplates));
}
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:getQuestionTemplates exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
@GET
@Path(CoordConsts.SVC_RSC_GETSTATUS)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray getStatus() {
try {
_logger.info("WMS:getStatus\n");
JSONObject retObject = Main.getWorkMgr().getStatusJson();
retObject.put("service-version", Version.getVersion());
return successResponse(retObject);
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:getStatus exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Obtain the counts of completed and incomplete work items
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param workId The work ID to check
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_GET_WORKSTATUS)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray getWorkStatus(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_WORKID) String workId) {
try {
_logger.info("WMS:getWorkStatus " + workId + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(workId, "work id");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
QueuedWork work = Main.getWorkMgr().getWork(UUID.fromString(workId));
if (work == null) {
return failureResponse("work with the specified id does not exist or is not inaccessible");
}
checkContainerAccessibility(apiKey, work.getWorkItem().getContainerName());
BatfishObjectMapper mapper = new BatfishObjectMapper();
String taskStr = mapper.writeValueAsString(work.getLastTaskCheckResult());
return successResponse(
new JSONObject()
.put(CoordConsts.SVC_KEY_WORKSTATUS, work.getStatus().toString())
.put(CoordConsts.SVC_KEY_TASKSTATUS, taskStr));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:getWorkStatus exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:getWorkStatus exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Initialize a new container
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param containerName The name of the container to initialize (overrides containerPrefix)
* @param containerPrefix The prefix used to generate the container name (ignored if containerName
* is not empty)
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_INIT_CONTAINER)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray initContainer(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_PREFIX) String containerPrefix) {
try {
_logger.info("WMS:initContainer " + containerPrefix + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
if (containerName == null || containerName.equals("")) {
checkStringParam(containerPrefix, "Container prefix");
}
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
String outputContainerName = Main.getWorkMgr().initContainer(containerName, containerPrefix);
Main.getAuthorizer().authorizeContainer(apiKey, outputContainerName);
return successResponse(
new JSONObject().put(CoordConsts.SVC_KEY_CONTAINER_NAME, outputContainerName));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:initContainer exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:initContainer exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* List the analyses under the specified container
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param containerName The name of the container whose analyses are to be listed
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_LIST_ANALYSES)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray listAnalyses(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName) {
try {
_logger.info("WMS:listAnalyses " + apiKey + " " + containerName + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
JSONObject retObject = new JSONObject();
for (String analysisName : Main.getWorkMgr().listAnalyses(containerName)) {
JSONObject analysisJson = new JSONObject();
for (String questionName :
Main.getWorkMgr().listAnalysisQuestions(containerName, analysisName)) {
String questionText =
Main.getWorkMgr().getAnalysisQuestion(containerName, analysisName, questionName);
analysisJson.put(questionName, new JSONObject(questionText));
}
retObject.put(analysisName, analysisJson);
}
return successResponse(new JSONObject().put(CoordConsts.SVC_KEY_ANALYSIS_LIST, retObject));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:listAnalyses exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:listAnalyses exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* List the containers that the specified API key can access
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_LIST_CONTAINERS)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray listContainers(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion) {
try {
_logger.info("WMS:listContainers " + apiKey + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
if (!_settings.getDefaultKeyListings() && apiKey.equals(CoordConsts.DEFAULT_API_KEY)) {
throw new AccessControlException("Listing containers is not allowed with Default API key");
}
SortedSet<String> containerList = Main.getWorkMgr().listContainers(apiKey);
return successResponse(
new JSONObject().put(CoordConsts.SVC_KEY_CONTAINER_LIST, new JSONArray(containerList)));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:listContainers exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:listContainers exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Lists the environments under the specified container, testrig
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param containerName The container in which the testrig and environments reside
* @param testrigName The name of the testrig whose environments are to be listed
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_LIST_ENVIRONMENTS)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray listEnvironments(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_TESTRIG_NAME) String testrigName) {
try {
_logger.info("WMS:listEnvironments " + apiKey + " " + containerName + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkStringParam(testrigName, "Testrig name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
SortedSet<String> environmentList =
Main.getWorkMgr().listEnvironments(containerName, testrigName);
return successResponse(
new JSONObject()
.put(CoordConsts.SVC_KEY_ENVIRONMENT_LIST, new JSONArray(environmentList)));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:listEnvironments exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:listEnvironments exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* List the questions under the specified container, testrig
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param containerName The name of the container in which the testrig and questions reside
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_LIST_QUESTIONS)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray listQuestions(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName) {
try {
_logger.info("WMS:listQuestions " + apiKey + " " + containerName + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
JSONObject retObject = new JSONObject();
for (String questionName : Main.getWorkMgr().listQuestions(containerName)) {
String questionText = Main.getWorkMgr().getQuestion(containerName, questionName);
retObject.put(questionName, new JSONObject(questionText));
}
return successResponse(new JSONObject().put(CoordConsts.SVC_KEY_QUESTION_LIST, retObject));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:listQuestions exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:listQuestions exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* List the testrigs under the specified container
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param containerName The name of the container whose testrigs are to be listed
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_LIST_TESTRIGS)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray listTestrigs(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName) {
try {
_logger.info("WMS:listTestrigs " + apiKey + " " + containerName + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
JSONArray retArray = new JSONArray();
SortedSet<String> testrigList = Main.getWorkMgr().listTestrigs(containerName);
for (String testrig : testrigList) {
String testrigInfo = Main.getWorkMgr().getTestrigInfo(containerName, testrig);
JSONObject jObject =
new JSONObject()
.put(CoordConsts.SVC_KEY_TESTRIG_NAME, testrig)
.put(CoordConsts.SVC_KEY_TESTRIG_INFO, testrigInfo);
retArray.put(jObject);
}
return successResponse(new JSONObject().put(CoordConsts.SVC_KEY_TESTRIG_LIST, retArray));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:listTestrigs exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:listTestrigs exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Upload a custom object under the specified container, testrig.
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param containerName The name of the container in which the testrig resides
* @param testrigName The name of the testrig under which to upload the object
* @param objectName The name of the object to upload
* @param fileStream The stream from which the object is read
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_PUT_OBJECT)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray putObject(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_TESTRIG_NAME) String testrigName,
@FormDataParam(CoordConsts.SVC_KEY_OBJECT_NAME) String objectName,
@FormDataParam(CoordConsts.SVC_KEY_FILE) InputStream fileStream) {
try {
_logger.infof(
"WMS:putObject %s %s %s / %s\n", apiKey, containerName, testrigName, objectName);
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkStringParam(testrigName, "Testrig name");
checkStringParam(objectName, "Object name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
Main.getWorkMgr().putObject(containerName, testrigName, objectName, fileStream);
return successResponse(new JSONObject().put("result", "successfully uploaded custom object"));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:putObject exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:putObject exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Queue a new work item
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param workItemStr The work item to queue
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_QUEUE_WORK)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray queueWork(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_WORKITEM) String workItemStr) {
try {
_logger.info("WMS:queueWork " + apiKey + " " + workItemStr + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(workItemStr, "Workitem string");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
WorkItem workItem = WorkItem.fromJsonString(workItemStr);
checkContainerAccessibility(apiKey, workItem.getContainerName());
boolean result = Main.getWorkMgr().queueWork(workItem);
return successResponse(new JSONObject().put("result", result));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:queueWork exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:queueWork exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Sync testrigs
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param containerName The container to sync testrigs for
* @param pluginId The plugin id to use for syncing
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_SYNC_TESTRIGS_SYNC_NOW)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray syncTestrigsSyncNow(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_PLUGIN_ID) String pluginId,
@FormDataParam(CoordConsts.SVC_KEY_FORCE) String forceStr) {
try {
_logger.info(
"WMS:syncTestrigsSyncNow " + apiKey + " " + containerName + " " + pluginId + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkStringParam(pluginId, "Plugin Id");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
boolean force = !Strings.isNullOrEmpty(forceStr) && Boolean.parseBoolean(forceStr);
int numCommits = Main.getWorkMgr().syncTestrigsSyncNow(containerName, pluginId, force);
return successResponse(new JSONObject().put("numCommits", numCommits));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:syncTestrigsSyncNow exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:syncTestrigsSyncNow exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Update settings for syncing testrigs
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param containerName The container to sync testrigs for
* @param pluginId The plugin id to use for syncing
* @param settingsStr The stringified version of settings
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_SYNC_TESTRIGS_UPDATE_SETTINGS)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray syncTestrigsUpdateSettings(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_PLUGIN_ID) String pluginId,
@FormDataParam(CoordConsts.SVC_KEY_SETTINGS) String settingsStr) {
try {
_logger.info(
"WMS:syncTestrigsUpdateSettings "
+ apiKey
+ " "
+ containerName
+ " "
+ pluginId
+ " "
+ settingsStr
+ "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkStringParam(pluginId, "Plugin Id");
checkStringParam(settingsStr, "Settings");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
BatfishObjectMapper mapper = new BatfishObjectMapper();
Map<String, String> settings =
mapper.readValue(settingsStr, new TypeReference<Map<String, String>>() {});
boolean result =
Main.getWorkMgr().syncTestrigsUpdateSettings(containerName, pluginId, settings);
return successResponse(new JSONObject().put("result", result));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:syncTestrigsUpdateSettings exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:syncTestrigsUpdateSettings exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
@GET
@Path("test")
@Produces(MediaType.TEXT_PLAIN)
public String test() {
try {
_logger.info("WMS:test\n");
JSONArray id = successResponse(Main.getWorkMgr().getStatusJson());
return id.toString();
// return Response.ok()
// .entity(id)
// // .header("Access-Control-Allow-Origin","*")
// .header("Access-Control-Allow-Methods", "GET, POST, DELETE, PUT")
// .allow("OPTIONS")
// .build();
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:test exception: " + stackTrace);
// return Response.serverError().build();
return "got error";
}
}
/**
* Uploads a new environment under the container, testrig
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param containerName The name of the container under which the testrig resides
* @param testrigName The name of the testrig under which to upload the new environment
* @param baseEnvName The base environment name from which the new environment initially inherits
* @param envName The name of the new environment to create
* @param fileStream The stream from which the contents of the new environment are read. These
* contents overwrite those inherited from any base environment.
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_UPLOAD_ENV)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray uploadEnvironment(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_TESTRIG_NAME) String testrigName,
@FormDataParam(CoordConsts.SVC_KEY_BASE_ENV_NAME) String baseEnvName,
@FormDataParam(CoordConsts.SVC_KEY_ENV_NAME) String envName,
@FormDataParam(CoordConsts.SVC_KEY_ZIPFILE) InputStream fileStream) {
try {
_logger.info(
"WMS:uploadEnvironment "
+ apiKey
+ " "
+ containerName
+ " "
+ testrigName
+ " / "
+ envName
+ "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkStringParam(testrigName, "Testrig name");
checkStringParam(envName, "Environment name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
Main.getWorkMgr()
.uploadEnvironment(containerName, testrigName, baseEnvName, envName, fileStream);
return successResponse(new JSONObject().put("result", "successfully uploaded environment"));
} catch (BatfishException e) {
_logger.error("WMS:uploadEnvironment exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:uploadEnvironment exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Upload a new question under the specified container, testrig. A file containing the question
* and a file containing the parameters must be provided.
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param containerName The name of the container under which the testrig resides
* @param testrigName The name of the testrig about which to ask the question
* @param qName The name of the question
* @param fileStream The stream from which the question is read
* @param paramFileStream The stream from which the parameters are read
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_UPLOAD_QUESTION)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray uploadQuestion(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_TESTRIG_NAME) String testrigName,
@FormDataParam(CoordConsts.SVC_KEY_QUESTION_NAME) String qName,
@FormDataParam(CoordConsts.SVC_KEY_FILE) InputStream fileStream,
@FormDataParam(CoordConsts.SVC_KEY_FILE2) InputStream paramFileStream) {
try {
_logger.info(
"WMS:uploadQuestion "
+ apiKey
+ " "
+ containerName
+ " "
+ testrigName
+ " / "
+ qName
+ "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkStringParam(qName, "Question name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
Main.getWorkMgr().uploadQuestion(containerName, qName, fileStream, paramFileStream);
return successResponse(new JSONObject().put("result", "successfully uploaded question"));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException e) {
_logger.error("WMS:uploadQuestion exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:uploadQuestion exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
/**
* Uploads a new testrig under the specified container
*
* @param apiKey The API key of the client
* @param clientVersion The version of the client
* @param containerName The name of the container under which to upload the new testrig
* @param testrigName The name of the new testrig to create
* @param fileStream The stream from which the new testrig is read
* @return TODO: document JSON response
*/
@POST
@Path(CoordConsts.SVC_RSC_UPLOAD_TESTRIG)
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
public JSONArray uploadTestrig(
@FormDataParam(CoordConsts.SVC_KEY_API_KEY) String apiKey,
@FormDataParam(CoordConsts.SVC_KEY_VERSION) String clientVersion,
@FormDataParam(CoordConsts.SVC_KEY_CONTAINER_NAME) String containerName,
@FormDataParam(CoordConsts.SVC_KEY_TESTRIG_NAME) String testrigName,
@FormDataParam(CoordConsts.SVC_KEY_ZIPFILE) InputStream fileStream,
@FormDataParam(CoordConsts.SVC_KEY_AUTO_ANALYZE_TESTRIG) String autoAnalyzeStr) {
try {
_logger.info("WMS:uploadTestrig " + apiKey + " " + containerName + " " + testrigName + "\n");
checkStringParam(apiKey, "API key");
checkStringParam(clientVersion, "Client version");
checkStringParam(containerName, "Container name");
checkStringParam(testrigName, "Testrig name");
checkApiKeyValidity(apiKey);
checkClientVersion(clientVersion);
checkContainerAccessibility(apiKey, containerName);
boolean autoAnalyze = false;
if (!Strings.isNullOrEmpty(autoAnalyzeStr)) {
autoAnalyze = Boolean.parseBoolean(autoAnalyzeStr);
}
Main.getWorkMgr().uploadTestrig(containerName, testrigName, fileStream, autoAnalyze);
return successResponse(new JSONObject().put("result", "successfully uploaded testrig"));
} catch (FileExistsException
| FileNotFoundException
| IllegalArgumentException
| AccessControlException
| ZipException e) {
_logger.error("WMS:uploadTestrig exception: " + e.getMessage() + "\n");
return failureResponse(e.getMessage());
} catch (Exception e) {
String stackTrace = ExceptionUtils.getFullStackTrace(e);
_logger.error("WMS:uploadTestrig exception: " + stackTrace);
return failureResponse(e.getMessage());
}
}
}
|
package org.jtrfp.trcl.flow;
import java.awt.Point;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
import org.jtrfp.trcl.Camera;
import org.jtrfp.trcl.DisplayModeHandler;
import org.jtrfp.trcl.NAVSystem;
import org.jtrfp.trcl.OverworldSystem;
import org.jtrfp.trcl.SkySystem;
import org.jtrfp.trcl.Tunnel;
import org.jtrfp.trcl.World;
import org.jtrfp.trcl.beh.CollidesWithTerrain;
import org.jtrfp.trcl.beh.CollidesWithTunnelWalls;
import org.jtrfp.trcl.beh.HeadingXAlwaysPositiveBehavior;
import org.jtrfp.trcl.beh.LoopingPositionBehavior;
import org.jtrfp.trcl.beh.MatchDirection;
import org.jtrfp.trcl.beh.MatchPosition;
import org.jtrfp.trcl.beh.SkyCubeCloudModeUpdateBehavior;
import org.jtrfp.trcl.beh.phy.MovesByVelocity;
import org.jtrfp.trcl.core.Features;
import org.jtrfp.trcl.core.Renderer;
import org.jtrfp.trcl.core.ResourceManager;
import org.jtrfp.trcl.core.TR;
import org.jtrfp.trcl.file.AbstractTriplet;
import org.jtrfp.trcl.file.DirectionVector;
import org.jtrfp.trcl.file.LVLFile;
import org.jtrfp.trcl.file.Location3D;
import org.jtrfp.trcl.file.NAVFile.NAVSubObject;
import org.jtrfp.trcl.file.NAVFile.START;
import org.jtrfp.trcl.file.TDFFile;
import org.jtrfp.trcl.flow.LoadingProgressReporter.UpdateHandler;
import org.jtrfp.trcl.flow.NAVObjective.Factory;
import org.jtrfp.trcl.obj.ObjectDirection;
import org.jtrfp.trcl.obj.Player;
import org.jtrfp.trcl.obj.PortalExit;
import org.jtrfp.trcl.obj.Projectile;
import org.jtrfp.trcl.obj.ProjectileFactory;
import org.jtrfp.trcl.obj.Propelled;
import org.jtrfp.trcl.obj.TunnelEntranceObject;
import org.jtrfp.trcl.obj.WorldObject;
import org.jtrfp.trcl.snd.GPUResidentMOD;
import org.jtrfp.trcl.snd.MusicPlaybackEvent;
import org.jtrfp.trcl.snd.SoundSystem;
public class Mission {
// PROPERTIES
public static final String MISSION_MODE = "missionMode";
public static final String SATELLITE_VIEW = "satelliteView";
private final TR tr;
private final List<NAVObjective>
navs = new LinkedList<NAVObjective>();
private final LVLFile lvl;
private final HashMap<String, Tunnel>
tunnels = new HashMap<String, Tunnel>();
private final HashMap<Integer, PortalExit>
tunnelPortals = new HashMap<Integer, PortalExit>();
private double[] playerStartPosition
= new double[3];
private List<NAVSubObject> navSubObjects;
private ObjectDirection playerStartDirection;
private final Game game;
private final String levelName;
private OverworldSystem overworldSystem;
private final Result[] missionEnd = new Result[]{null};
private int groundTargetsDestroyed=0,
airTargetsDestroyed=0,
foliageDestroyed=0;
private int totalNumTunnels;
private final LinkedList<Tunnel>
tunnelsRemaining = new LinkedList<Tunnel>();
private final boolean showIntro;
private volatile MusicPlaybackEvent
bgMusic;
private final Object missionLock = new Object();
private final Map<Integer,TunnelEntranceObject>
tunnelMap = new HashMap<Integer,TunnelEntranceObject>();
private boolean bossFight = false, satelliteView = false;
private MissionMode missionMode = new Mission.LoadingMode();
private final PropertyChangeSupport pcs = new PropertyChangeSupport(this);
private Tunnel currentTunnel;
private final DisplayModeHandler displayHandler;
private Object [] levelLoadingMode, gameplayMode, briefingMode, summaryMode, emptyMode= new Object[]{};
private enum LoadingStages {
navs, tunnels, overworld
}// end LoadingStages
//ROOT STATES
public interface MissionState extends Game.GameRunningMode{}
public interface ConstructingState extends MissionState{}
public interface ConstructedState extends MissionState{}
public interface ActiveMissionState extends ConstructedState{}
public interface LoadingState extends ActiveMissionState{}
public interface GameplayState extends ActiveMissionState{}
public interface Briefing extends GameplayState{}
public interface PlanetBrief extends Briefing{}
public interface EnemyBrief extends Briefing{}
public interface MissionSummary extends Briefing{}
public interface PlayerActivity extends GameplayState{}
public interface OverworldState extends PlayerActivity{}
public interface ChamberState extends OverworldState{}
public interface TunnelState extends PlayerActivity{}
public Mission(TR tr, Game game, LVLFile lvl, String levelName, boolean showIntro) {
this.tr = tr;
this.lvl = lvl;
this.game = game;
this.levelName = levelName;
this.showIntro = showIntro;
this.displayHandler = new DisplayModeHandler(tr.getDefaultGrid());
Features.init(this);
tr.setRunState(new ConstructingState(){});
levelLoadingMode = new Object[]{
((TVF3Game)game).levelLoadingScreen,
((TVF3Game)game).upfrontDisplay
};
tr.setRunState(new ConstructedState(){});
}// end Mission
public Result go() {
tr.setRunState(new LoadingState(){});
setMissionMode(new Mission.LoadingMode());
synchronized(missionLock){
synchronized(missionEnd){
if(missionEnd[0]!=null)
return missionEnd[0];
}
tr.getThreadManager().setPaused(true);
for(ProjectileFactory pf:tr.getResourceManager().getProjectileFactories())
for(Projectile proj:pf.getProjectiles())
proj.destroy();
System.out.println("Starting GampeplayLevel loading sequence...");
final LoadingProgressReporter rootProgress = LoadingProgressReporter.Impl
.createRoot(new UpdateHandler() {
@Override
public void update(double unitProgress) {
((TVF3Game)game).getLevelLoadingScreen().setLoadingProgress(unitProgress);
}
});
final LoadingProgressReporter[] progressStages = rootProgress
.generateSubReporters(LoadingStages.values().length);
final Renderer renderer = tr.mainRenderer.get();
renderer.getCamera().probeForBehavior(SkyCubeCloudModeUpdateBehavior.class).setEnable(false);
renderer.getSkyCube().setSkyCubeGen(GameShell.DEFAULT_GRADIENT);
final Camera camera = renderer.getCamera();
camera.setHeading(Vector3D.PLUS_I);
camera.setTop(Vector3D.PLUS_J);
((TVF3Game)game).levelLoadingMode();
displayHandler.setDisplayMode(levelLoadingMode);
//((TVF3Game)game).setDisplayMode(((TVF3Game)game).levelLoadingMode);
((TVF3Game)game).getUpfrontDisplay().submitPersistentMessage(levelName);
try {
final ResourceManager rm = tr.getResourceManager();
final Player player = ((TVF3Game)tr.getGame()).getPlayer();
final TDFFile tdf = rm.getTDFData(lvl.getTunnelDefinitionFile());
player.setActive(false);
// Abort check
synchronized(missionEnd){
if(missionEnd[0]!=null)
return missionEnd[0];
}
overworldSystem = new OverworldSystem(tr,
progressStages[LoadingStages.overworld.ordinal()]);
briefingMode = new Object[]{
((TVF3Game)game).briefingScreen,
overworldSystem
};
gameplayMode = new Object[]{
((TVF3Game)game).navSystem,
((TVF3Game)game).hudSystem,
((TVF3Game)game).upfrontDisplay,
overworldSystem,
rm.getDebrisSystem(),
rm.getPowerupSystem(),
rm.getProjectileFactories(),
rm.getExplosionFactory(),
rm.getSmokeSystem()
};
summaryMode = new Object[]{
((TVF3Game)game).getBriefingScreen(),
overworldSystem
};
getOverworldSystem().loadLevel(lvl, tdf);
System.out.println("\t...Done.");
// Install NAVs
final NAVSystem navSystem = ((TVF3Game)tr.getGame()).getNavSystem();
navSubObjects = rm.getNAVData(lvl.getNavigationFile())
.getNavObjects();
START s = (START) navSubObjects.get(0);
Location3D l3d = s.getLocationOnMap();
playerStartPosition[0] = TR.legacy2Modern(l3d.getZ());
playerStartPosition[2] = TR.legacy2Modern(l3d.getX());
final double HEIGHT_PADDING = 10000;
playerStartPosition[1] = Math.max(HEIGHT_PADDING + getOverworldSystem().getAltitudeMap().heightAt(
TR.legacy2Modern(l3d.getZ()),
TR.legacy2Modern(l3d.getX())),TR.legacy2Modern(l3d.getY()));
playerStartDirection = new ObjectDirection(s.getRoll(),
s.getPitch(), s.getYaw());
// ////// INITIAL HEADING
player.setPosition(getPlayerStartPosition());
player.setDirection(getPlayerStartDirection());
player.setHeading(player.getHeading().negate());// Kludge to fix
// incorrect heading
///////// STATE
final Propelled propelled = player.probeForBehavior(Propelled.class);
propelled.setPropulsion(propelled.getMinPropulsion());
installTunnels(tdf,progressStages[LoadingStages.tunnels.ordinal()]);
Factory f = new NAVObjective.Factory(tr);
final LoadingProgressReporter[] navProgress = progressStages[LoadingStages.navs
.ordinal()].generateSubReporters(navSubObjects.size());
for (int i = 0; i < navSubObjects.size(); i++) {
final NAVSubObject obj = navSubObjects.get(i);
f.create(tr, obj, navs);
navProgress[i].complete();
}// end for(navSubObjects)
navSystem.updateNAVState();
player.resetVelocityRotMomentum();
final String startX = System.getProperty("org.jtrfp.trcl.startX");
final String startY = System.getProperty("org.jtrfp.trcl.startY");
final String startZ = System.getProperty("org.jtrfp.trcl.startZ");
final double[] playerPos = player.getPosition();
if (startX != null && startY != null && startZ != null) {
System.out.println("Using user-specified start point");
final int sX = Integer.parseInt(startX);
final int sY = Integer.parseInt(startY);
final int sZ = Integer.parseInt(startZ);
playerPos[0] = sX;
playerPos[1] = sY;
playerPos[2] = sZ;
player.notifyPositionChange();
}// end if(user start point)
System.out.println("Start position set to " + player.getPosition()[0]+" "+player.getPosition()[1]+" "+player.getPosition()[2]);
System.out.println("Setting sun vector");
final AbstractTriplet sunVector = lvl.getSunlightDirectionVector();
tr.getThreadManager().submitToGL(new Callable<Void>() {
@Override
public Void call() throws Exception {
tr.mainRenderer.get().setSunVector(
new Vector3D(sunVector.getX(), sunVector.getY(),
sunVector.getZ()).normalize());
return null;
}
}).get();
System.out.println("\t...Done.");
} catch (Exception e) {
e.printStackTrace();
}
if (System.getProperties().containsKey(
"org.jtrfp.trcl.flow.Mission.skipNavs")) {
try {
final int skips = Integer.parseInt(System
.getProperty("org.jtrfp.trcl.flow.Mission.skipNavs"));
System.out.println("Skipping " + skips + " navs.");
for (int i = 0; i < skips; i++) {
removeNAVObjective(currentNAVObjective());
}// end for(skips)
} catch (NumberFormatException e) {
System.err
.println("Invalid format for property \"org.jtrfp.trcl.flow.Mission.skipNavs\". Must be integer.");
}
}// end if(containsKey)
// Transition to gameplay mode.
// Abort check
synchronized (missionEnd) {
if (missionEnd[0] != null)
return missionEnd[0];
}//end sync(missionEnd)
final SoundSystem ss = Mission.this.tr.soundSystem.get();
MusicPlaybackEvent evt;
Mission.this.tr.soundSystem.get().enqueuePlaybackEvent(
evt =ss
.getMusicFactory()
.create(new GPUResidentMOD(tr, tr
.getResourceManager().getMOD(
lvl.getBackgroundMusicFile())),
true));
synchronized(Mission.this){
if(bgMusic==null){
bgMusic=evt;
bgMusic.play();
}
}//end sync(Mission.this)
((TVF3Game)game).getUpfrontDisplay().removePersistentMessage();
tr.getThreadManager().setPaused(false);
if(showIntro){
tr.setRunState(new Briefing(){});
setMissionMode(new Mission.IntroMode());
displayHandler.setDisplayMode(briefingMode);
((TVF3Game)game).getBriefingScreen().briefingSequence(lvl);//TODO: Convert to feature
}
setMissionMode(new Mission.AboveGroundMode());
final SkySystem skySystem = getOverworldSystem().getSkySystem();
tr.mainRenderer.get().getCamera().probeForBehavior(SkyCubeCloudModeUpdateBehavior.class).setEnable(true);
renderer.getSkyCube().setSkyCubeGen(skySystem.getBelowCloudsSkyCubeGen());
renderer.setAmbientLight(skySystem.getSuggestedAmbientLight());
renderer.setSunColor(skySystem.getSuggestedSunColor());
((TVF3Game)game).getNavSystem() .activate();
displayHandler.setDisplayMode(gameplayMode);
((TVF3Game)game).getPlayer() .setActive(true);
((TVF3Game)tr.getGame()).setPaused(false);
tr.setRunState(new PlayerActivity(){});
//Wait for mission end
synchronized(missionEnd){
while(missionEnd[0]==null){try{missionEnd.wait();}
catch(InterruptedException e){break;}}}
//Completion summary
tr.setRunState(new Briefing(){});
if(missionEnd[0]!=null)
if(!missionEnd[0].isAbort()){
displayHandler.setDisplayMode(summaryMode);
setMissionMode(new Mission.MissionSummaryMode());
((TVF3Game)game).getBriefingScreen().missionCompleteSummary(lvl,missionEnd[0]);
}//end if(proper ending)
bgMusic.stop();
cleanup();
return missionEnd[0];
}//end sync
}// end go()
public NAVObjective currentNAVObjective() {
if (navs.isEmpty())
return null;
return navs.get(0);
}//end currentNAVObjective()
public void removeNAVObjective(NAVObjective o) {
navs.remove(o);
if (navs.size() == 0) {
missionCompleteSequence();
} else
((TVF3Game)tr.getGame()).getNavSystem().updateNAVState();
}// end removeNAVObjective(...)
public static class Result {
private final int airTargetsDestroyed, groundTargetsDestroyed,foliageDestroyed;
private final double tunnelsFoundPctNorm;
private boolean abort=false;
public Result(int airTargetsDestroyed, int groundTargetsDestroyed, int foliageDestroyed, double tunnelsFoundPctNorm) {
this.airTargetsDestroyed =airTargetsDestroyed;
this.groundTargetsDestroyed =groundTargetsDestroyed;
this.foliageDestroyed =foliageDestroyed;
this.tunnelsFoundPctNorm =tunnelsFoundPctNorm;
}//end constructor
/**
* @return the airTargetsDestroyed
*/
public int getAirTargetsDestroyed() {
return airTargetsDestroyed;
}
/**
* @return the groundTargetsDestroyed
*/
public int getGroundTargetsDestroyed() {
return groundTargetsDestroyed;
}
/**
* @return the foliageDestroyed
*/
public int getFoliageDestroyed() {
return foliageDestroyed;
}
/**
* @return the tunnelsFoundPctNorm
*/
public double getTunnelsFoundPctNorm() {
return tunnelsFoundPctNorm;
}
/**
* @return the abort
*/
public boolean isAbort() {
return abort;
}
/**
* @param abort the abort to set
*/
public void setAbort(boolean abort) {
this.abort = abort;
}
}// end Result
/**
* @return the playerStartPosition
*/
public double[] getPlayerStartPosition() {
return playerStartPosition;
}
/**
* @return the playerStartDirection
*/
public ObjectDirection getPlayerStartDirection() {
return playerStartDirection;
}
private void installTunnels(TDFFile tdf, LoadingProgressReporter reporter){
TDFFile.Tunnel[] tuns = tdf.getTunnels();
tuns = tuns == null?new TDFFile.Tunnel[0]:tuns;//Null means no tunnels.
final LoadingProgressReporter[] reporters = reporter
.generateSubReporters(tuns.length);
if (tuns != null) {
int tIndex = 0;
// Build tunnels
for (TDFFile.Tunnel tun : tuns) {
tr
.getReporter()
.report("org.jtrfp.trcl.TunnelInstaller.tunnel."
+ tIndex + ".entrance", tun.getEntrance().toString());
tr
.getReporter()
.report("org.jtrfp.trcl.TunnelInstaller.tunnel."
+ tIndex + ".exit", tun.getExit().toString());
newTunnel(tun,reporters[tIndex]);
tIndex++;
}//end if(tuns!=null)
}// end if(tuns!=null)
totalNumTunnels = tunnelsRemaining.size();
}//end installTunnels()
private Tunnel newTunnel(org.jtrfp.trcl.file.TDFFile.Tunnel tdfTun,
LoadingProgressReporter reporter) {
final Tunnel tunnel = new Tunnel(tr, tdfTun, reporter);
tunnelsRemaining.add(tunnel);
DirectionVector tunnelEntranceLegacyPos = tdfTun.getEntrance();
final Point tunnelEntranceMapSquarePos = new Point(
(int)(TR.legacy2MapSquare(tunnelEntranceLegacyPos.getZ())),
(int)(TR.legacy2MapSquare(tunnelEntranceLegacyPos.getX())));
addTunnelEntrance(tunnelEntranceMapSquarePos,tunnel);
PortalExit portalExit = getTunnelEntrancePortal(tunnelEntranceMapSquarePos);
if(portalExit!=null){
portalExit.setHeading(Tunnel.TUNNEL_START_DIRECTION.getHeading());
portalExit.setTop(Tunnel.TUNNEL_START_DIRECTION.getTop());
portalExit.setPosition(Tunnel.TUNNEL_START_POS.toArray());
portalExit.notifyPositionChange();
portalExit.setRootGrid(tunnel);
}else throw new NullPointerException("Null portal exit! "+tunnelEntranceMapSquarePos);
DirectionVector tunnelExitLegacyPos = tdfTun.getExit();
final Point tunnelExitMapSquarePos = new Point(
(int)(TR.legacy2MapSquare(tunnelExitLegacyPos.getZ())),
(int)(TR.legacy2MapSquare(tunnelExitLegacyPos.getX())));
System.out.println("Tunnel exit at sector "+tunnelExitMapSquarePos);
portalExit = getTunnelEntrancePortal(tunnelExitMapSquarePos);
/*if(portalExit!=null){
portalExit.setHeading(tunnel.getExitObject().getHeading().negate());
portalExit.setTop(tunnel.getExitObject().getTop());
portalExit.setPosition(tunnel.getExitObject().getPosition());
portalExit.notifyPositionChange();
portalExit.setRootGrid(tunnel);
}else System.err.println("Null exit.");*/
tunnels.put(tdfTun.getTunnelLVLFile().toUpperCase(), tunnel);
return tunnel;
}
public Tunnel getTunnelByFileName(String tunnelFileName) {
return tunnels.get(tunnelFileName.toUpperCase());
}
public TunnelEntranceObject getNearestTunnelEntrance(double xInLegacyUnits,
double yInLegacyUnits, double zInLegacyUnits) {
TunnelEntranceObject result = null;
double closestDistance = Double.POSITIVE_INFINITY;
final Vector3D entPos = new Vector3D(
TR.legacy2Modern(zInLegacyUnits),//Intentionally backwards
TR.legacy2Modern(yInLegacyUnits),
TR.legacy2Modern(xInLegacyUnits)
);
System.out.println("Requested entry pos="+entPos);
for (TunnelEntranceObject teo : tunnelMap.values()) {
final Vector3D pos = new Vector3D(teo.getPosition());
System.out.println("Found tunnel at "+pos);
final double distance = pos.distance(entPos);
if (distance < closestDistance) {
closestDistance = distance;
result = teo;
}
}// end for(tunnels)
return result;
}// end getTunnelWhoseEntranceClosestTo(...)
private void missionCompleteSequence() {
new Thread() {
@Override
public void run() {
// TODO: Behavior change: Camera XZ static, lag Y by ~16
// squares, heading/top affix toward player
// TODO: Turn off all player control behavior
// TODO: Behavior change: Player turns upward, top rolls on
// heading, speed at full throttle
// TODO: Wait 3 seconds
// TODO: Lightning shell on
// TODO: Wait 1 second
// TODO: Turbo forward
// TODO: Wait 500ms
// TODO: Jet thrust noise
// TODO: Player invisible.
System.out.println("MISSION COMPLETE.");
notifyMissionEnd(
new Result(
airTargetsDestroyed,
groundTargetsDestroyed,
foliageDestroyed,
1.-(double)tunnelsRemaining.size()/(double)totalNumTunnels));
}// end run()
}.start();
}//end missionCompleteSequence()
public void playerDestroyed() {
new Thread() {
@Override
public void run() {
// TODO Behavior change: Camera XYZ static, heading/top affix
// toward player
// TODO: Turn off all player control behavior
// TODO Player behavior change: Slow spin along heading axis,
// slow downward drift of heading
// TODO: Add behavior: explode and destroy on impact with ground
System.out.println("MISSION FAILED.");
notifyMissionEnd(null);
}// end run()
}.start();
}// end playerDestroyed()
private void notifyMissionEnd(Result r){
synchronized(missionEnd){
missionEnd[0]=r;
missionEnd.notifyAll();}
}//end notifyMissionEnd()
public List<NAVObjective> getRemainingNAVObjectives() {
return navs;
}
/**
* @return the navSubObjects
*/
public List<NAVSubObject> getNavSubObjects() {
return navSubObjects;
}
/**
* @param navSubObjects
* the navSubObjects to set
*/
public void setNavSubObjects(List<NAVSubObject> navSubObjects) {
this.navSubObjects = navSubObjects;
}
public void missionComplete() {
missionCompleteSequence();
}
public OverworldSystem getOverworldSystem() {
return overworldSystem;
}
public Mission notifyAirTargetDestroyed(){
airTargetsDestroyed++;
return this;
}
public Mission notifyGroundTargetDestroyed(){
groundTargetsDestroyed++;
return this;
}
public Mission notifyTunnelFound(Tunnel tun){
tunnelsRemaining.remove(tun);
return this;
}
public Mission notifyFoliageDestroyed(){
foliageDestroyed++;
return this;
}
public void enterBossMode(final String bossMusicFile){
setBossFight(true);
tr.getThreadManager().submitToThreadPool(new Callable<Void>() {
@Override
public Void call() throws Exception {
MusicPlaybackEvent evt;
final SoundSystem ss = Mission.this.tr.soundSystem.get();
Mission.this.tr.soundSystem.get().enqueuePlaybackEvent(
evt =ss
.getMusicFactory()
.create(tr.getResourceManager().gpuResidentMODs.get(bossMusicFile),
true));
synchronized(Mission.this){
evt.play();
if(bgMusic!=null)
bgMusic.stop();
bgMusic=evt;
}
return null;
}// end call()
});
}//end enterBossMode()
public void exitBossMode(){
setBossFight(false);
tr.getThreadManager().submitToThreadPool(new Callable<Void>() {
@Override
public Void call() throws Exception {
MusicPlaybackEvent evt;
final SoundSystem ss = Mission.this.tr.soundSystem.get();
Mission.this.tr.soundSystem.get().enqueuePlaybackEvent(
evt =ss
.getMusicFactory()
.create(tr.getResourceManager().gpuResidentMODs.get(lvl.getBackgroundMusicFile()),
true));
synchronized(Mission.this){
evt.play();
bgMusic.stop();
bgMusic=evt;}
return null;
}// end call()
});
}//end exitBossMode()
public void abort() {
final Result result = new Result(
airTargetsDestroyed,
groundTargetsDestroyed,
foliageDestroyed,
1.-(double)tunnelsRemaining.size()/(double)totalNumTunnels);
result.setAbort(true);
notifyMissionEnd(result);
//Wait for mission to end
synchronized(missionLock){//Don't execute while mission is in progress.
cleanup();
}//end sync{}
}//end abort()
private void cleanup() {
displayHandler.setDisplayMode(emptyMode);
tr.secondaryRenderer.get().getCamera().setRootGrid(null);
// Remove projectile factories
for(ProjectileFactory pf:tr.getResourceManager().getProjectileFactories())
for(Projectile projectile:pf.getProjectiles())
projectile.destroy();
}
/**
* Find a tunnel at the given map square, if any.
* @param mapSquareXZ Position in cells, not world coords.
* @return The Tunnel at this map square, or null if none here.
* @since Jan 13, 2015
*/
public TunnelEntranceObject getTunnelEntranceObject(Point mapSquareXZ){
final int key = pointToHash(mapSquareXZ);
System.out.println("getTunnelEntranceObject "+mapSquareXZ);
for(TunnelEntranceObject teo:tunnelMap.values())
System.out.print(" "+new Vector3D(teo.getPosition()).scalarMultiply(1/TR.mapSquareSize));
System.out.println();
return tunnelMap.get(key);
}
public void registerTunnelEntrancePortal(Point mapSquareXZ, PortalExit exit){
synchronized(tunnelPortals){
tunnelPortals.put(pointToHash(mapSquareXZ),exit);}
}
PortalExit getTunnelEntrancePortal(Point mapSquareXZ){
synchronized(tunnelPortals){
return tunnelPortals.get(pointToHash(mapSquareXZ));}
}
public void addTunnelEntrance(Point mapSquareXZ, Tunnel tunnel){
TunnelEntranceObject teo;
overworldSystem.add(teo = new TunnelEntranceObject(tr,tunnel));
tunnelMap.put(pointToHash(mapSquareXZ),teo);
}
private int pointToHash(Point point){
final int key =(int)point.getX()+(int)point.getY()*65536;
return key;
}
public synchronized void enterTunnel(final Tunnel tunnel) {
System.out.println("Entering tunnel "+tunnel);
final Game game = ((TVF3Game)tr.getGame());
final OverworldSystem overworldSystem = ((TVF3Game)game).getCurrentMission().getOverworldSystem();
currentTunnel = tunnel;
((TVF3Game)game).getCurrentMission().notifyTunnelFound(tunnel);
setMissionMode(new TunnelMode());
tr.getDefaultGrid().nonBlockingAddBranch(tunnel);
tr.getDefaultGrid().blockingRemoveBranch(overworldSystem);
//Move player to tunnel
tr.mainRenderer.get().getSkyCube().setSkyCubeGen(Tunnel.TUNNEL_SKYCUBE_GEN);
//Ensure chamber mode is off
overworldSystem.setChamberMode(false);
overworldSystem.setTunnelMode(true);
//Update debug data
tr.getReporter().report("org.jtrfp.Tunnel.isInTunnel?", "true");
final ProjectileFactory [] pfs = tr.getResourceManager().getProjectileFactories();
for(ProjectileFactory pf:pfs){
Projectile [] projectiles = pf.getProjectiles();
for(Projectile proj:projectiles){
((WorldObject)proj).
probeForBehavior(LoopingPositionBehavior.class).
setEnable(false);
}//end for(projectiles)
}//end for(projectileFactories)
final Player player = ((TVF3Game)tr.getGame()).getPlayer();
player.setActive(false);
player.resetVelocityRotMomentum();
player.probeForBehavior(CollidesWithTunnelWalls.class).setEnable(true);
player.probeForBehavior(MovesByVelocity.class) .setVelocity(Vector3D.ZERO);
player.probeForBehavior(LoopingPositionBehavior.class).setEnable(false);
player.probeForBehavior(HeadingXAlwaysPositiveBehavior.class).setEnable(true);
player.probeForBehavior(CollidesWithTerrain.class) .setEnable(false);
tunnel.dispatchTunnelEntryNotifications();
final Camera secondaryCam = tr.secondaryRenderer.get().getCamera();
player.setPosition(secondaryCam.getPosition());
player.setHeading (secondaryCam.getHeading());
player.setTop (secondaryCam.getTop());
player.notifyPositionChange();
//Move the secondary cam to the overworld.
overworldSystem.setChamberMode(tunnel.getExitObject().isMirrorTerrain());
secondaryCam.setRootGrid(overworldSystem);
//Set the skycube appropriately
tr.secondaryRenderer.get().getSkyCube().setSkyCubeGen(((TVF3Game)tr.getGame()).
getCurrentMission().
getOverworldSystem().
getSkySystem().
getBelowCloudsSkyCubeGen());
player.setActive(true);
}//end enterTunnel()
/**
* @param listener
* @see java.beans.PropertyChangeSupport#addPropertyChangeListener(java.beans.PropertyChangeListener)
*/
public void addPropertyChangeListener(PropertyChangeListener listener) {
pcs.addPropertyChangeListener(listener);
}
/**
* @param propertyName
* @param listener
* @see java.beans.PropertyChangeSupport#addPropertyChangeListener(java.lang.String, java.beans.PropertyChangeListener)
*/
public void addPropertyChangeListener(String propertyName,
PropertyChangeListener listener) {
pcs.addPropertyChangeListener(propertyName, listener);
}
/**
* @return
* @see java.beans.PropertyChangeSupport#getPropertyChangeListeners()
*/
public PropertyChangeListener[] getPropertyChangeListeners() {
return pcs.getPropertyChangeListeners();
}
/**
* @param propertyName
* @return
* @see java.beans.PropertyChangeSupport#getPropertyChangeListeners(java.lang.String)
*/
public PropertyChangeListener[] getPropertyChangeListeners(
String propertyName) {
return pcs.getPropertyChangeListeners(propertyName);
}
/**
* @param propertyName
* @return
* @see java.beans.PropertyChangeSupport#hasListeners(java.lang.String)
*/
public boolean hasListeners(String propertyName) {
return pcs.hasListeners(propertyName);
}
/**
* @param listener
* @see java.beans.PropertyChangeSupport#removePropertyChangeListener(java.beans.PropertyChangeListener)
*/
public void removePropertyChangeListener(PropertyChangeListener listener) {
pcs.removePropertyChangeListener(listener);
}
/**
* @param propertyName
* @param listener
* @see java.beans.PropertyChangeSupport#removePropertyChangeListener(java.lang.String, java.beans.PropertyChangeListener)
*/
public void removePropertyChangeListener(String propertyName,
PropertyChangeListener listener) {
pcs.removePropertyChangeListener(propertyName, listener);
}
public static interface MissionMode{}
public static class LoadingMode implements MissionMode{}
public static class BriefingMode implements MissionMode{}
public static class IntroMode extends BriefingMode{}
public static class EnemyIntroMode extends IntroMode{}
public static class PlanetIntroMode extends IntroMode{}
public static class MissionSummaryMode extends BriefingMode{}
public static class GameplayMode implements MissionMode{}
public static class TunnelMode extends GameplayMode{}
public static class ChamberMode extends GameplayMode{}
public static class AboveGroundMode extends GameplayMode{}
/**
* @return the missionMode
*/
public MissionMode getMissionMode() {
return missionMode;
}
/**
* @param missionMode the missionMode to set
*/
public void setMissionMode(MissionMode missionMode) {
pcs.firePropertyChange(MISSION_MODE, this.missionMode, missionMode);
this.missionMode = missionMode;
}
/**
* @return the bossFight
*/
public boolean isBossFight() {
return bossFight;
}
/**
* @param bossFight the bossFight to set
*/
public void setBossFight(boolean bossFight) {
pcs.firePropertyChange("bossFight", this.bossFight, bossFight);
this.bossFight = bossFight;
}
public void setSatelliteView(boolean satelliteView) {
if(!(getMissionMode() instanceof AboveGroundMode)&&satelliteView)
throw new IllegalArgumentException("Cannot activate satellite view while mission mode is "+getMissionMode().getClass().getSimpleName());
if(satelliteView && ((TVF3Game)tr.getGame()).isPaused())
throw new IllegalArgumentException("Cannot activate satellite view while paused.");
pcs.firePropertyChange(SATELLITE_VIEW, this.satelliteView, satelliteView);
if(satelliteView!=this.satelliteView){
final Game game = ((TVF3Game)tr.getGame());
final Camera cam = tr.mainRenderer.get().getCamera();
if(satelliteView){//Switched on
tr.getThreadManager().setPaused(true);
World.relevanceExecutor.submit(new Runnable(){
@Override
public void run() {
tr.getDefaultGrid().removeBranch(((TVF3Game)game).getNavSystem());
tr.getDefaultGrid().removeBranch(((TVF3Game)game).getHUDSystem());
}});
cam.setFogEnabled(false);
cam.probeForBehavior(MatchPosition.class).setEnable(false);
cam.probeForBehavior(MatchDirection.class).setEnable(false);
final Vector3D pPos = new Vector3D(((TVF3Game)game).getPlayer().getPosition());
final Vector3D pHeading = ((TVF3Game)tr.getGame()).getPlayer().getHeading();
cam.setPosition(new Vector3D(pPos.getX(),TR.visibilityDiameterInMapSquares*TR.mapSquareSize*.65,pPos.getZ()));
cam.setHeading(Vector3D.MINUS_J);
cam.setTop(new Vector3D(pHeading.getX(),.0000000001,pHeading.getZ()).normalize());
((TVF3Game)tr.getGame()).getSatDashboard().setVisible(true);
}else{//Switched off
tr.getThreadManager().setPaused(false);
World.relevanceExecutor.submit(new Runnable(){
@Override
public void run() {
((TVF3Game)tr.getGame()).getNavSystem().activate();
tr.getDefaultGrid().addBranch(((TVF3Game)game).getNavSystem());
tr.getDefaultGrid().addBranch(((TVF3Game)game).getHUDSystem());
}});
cam.setFogEnabled(true);
cam.probeForBehavior(MatchPosition.class).setEnable(true);
cam.probeForBehavior(MatchDirection.class).setEnable(true);
((TVF3Game)tr.getGame()).getSatDashboard().setVisible(false);
}//end !satelliteView
}//end if(change)
this.satelliteView=satelliteView;
}
/**
* @return the satelliteView
*/
public boolean isSatelliteView() {
System.out.println("isSatelliteView="+satelliteView);
return satelliteView;
}
public Tunnel getCurrentTunnel() {
if(!(getMissionMode() instanceof TunnelMode))return null;
return currentTunnel;
}
public Game getGame() {
return game;
}
}// end Mission
|
package plugin.google.maps;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.cordova.CordovaWebView;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.RectF;
import android.os.Build;
import android.os.Build.VERSION;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AbsoluteLayout;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import android.widget.ScrollView;
public class MyPluginLayout extends FrameLayout {
private CordovaWebView webView;
private ViewGroup root;
private RectF drawRect = new RectF();
private Context context;
private FrontLayerLayout frontLayer;
private ScrollView scrollView = null;
private FrameLayout scrollFrameLayout = null;
private View backgroundView = null;
private TouchableWrapper touchableWrapper;
private ViewGroup myView = null;
private boolean isScrolling = false;
private ViewGroup.LayoutParams orgLayoutParams = null;
private boolean isDebug = false;
private boolean isClickable = true;
private Map<String, RectF> HTMLNodes = new HashMap<String, RectF>();
private Activity mActivity = null;
@SuppressLint("NewApi")
public MyPluginLayout(CordovaWebView webView, Activity activity) {
super(webView.getContext());
mActivity = activity;
this.webView = webView;
this.root = (ViewGroup) webView.getParent();
this.context = webView.getContext();
webView.setBackgroundColor(Color.TRANSPARENT);
if (VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
webView.setLayerType(View.LAYER_TYPE_SOFTWARE, null);
}
frontLayer = new FrontLayerLayout(this.context);
scrollView = new ScrollView(this.context);
scrollView.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
backgroundView = new View(this.context);
backgroundView.setBackgroundColor(Color.WHITE);
backgroundView.setVerticalScrollBarEnabled(false);
backgroundView.setHorizontalScrollBarEnabled(false);
backgroundView.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, 9999));
scrollFrameLayout = new FrameLayout(this.context);
scrollFrameLayout.addView(backgroundView);
scrollFrameLayout.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT));
this.touchableWrapper = new TouchableWrapper(this.context);
}
public void setDrawingRect(float left, float top, float right, float bottom) {
this.drawRect.left = left;
this.drawRect.top = top;
this.drawRect.right = right;
this.drawRect.bottom = bottom;
if (this.isDebug == true) {
this.inValidate();
}
}
public void putHTMLElement(String domId, float left, float top, float right, float bottom) {
RectF rect = null;
if (this.HTMLNodes.containsKey(domId)) {
rect = this.HTMLNodes.get(domId);
} else {
rect = new RectF();
}
rect.left = left;
rect.top = top;
rect.right = right;
rect.bottom = bottom;
this.HTMLNodes.put(domId, rect);
if (this.isDebug == true) {
this.inValidate();
}
}
public void removeHTMLElement(String domId) {
this.HTMLNodes.remove(domId);
if (this.isDebug == true) {
this.inValidate();
}
}
public void clearHTMLElement() {
this.HTMLNodes.clear();
if (this.isDebug == true) {
this.inValidate();
}
}
public void setClickable(boolean clickable) {
this.isClickable = clickable;
if (this.isDebug == true) {
this.inValidate();
}
}
@SuppressWarnings("deprecation")
public void updateViewPosition() {
if (myView == null) {
return;
}
ViewGroup.LayoutParams lParams = this.myView.getLayoutParams();
int scrollY = webView.getScrollY();
if (lParams instanceof AbsoluteLayout.LayoutParams) {
AbsoluteLayout.LayoutParams params = (AbsoluteLayout.LayoutParams) lParams;
params.width = (int) this.drawRect.width();
params.height = (int) this.drawRect.height();
params.x = (int) this.drawRect.left;
params.y = (int) this.drawRect.top + scrollY;
myView.setLayoutParams(params);
} else if (lParams instanceof LinearLayout.LayoutParams) {
LinearLayout.LayoutParams params = (LinearLayout.LayoutParams) lParams;
params.width = (int) this.drawRect.width();
params.height = (int) this.drawRect.height();
params.topMargin = (int) this.drawRect.top + scrollY;
params.leftMargin = (int) this.drawRect.left;
myView.setLayoutParams(params);
} else if (lParams instanceof FrameLayout.LayoutParams) {
FrameLayout.LayoutParams params = (FrameLayout.LayoutParams) lParams;
params.width = (int) this.drawRect.width();
params.height = (int) this.drawRect.height();
params.topMargin = (int) this.drawRect.top + scrollY;
params.leftMargin = (int) this.drawRect.left;
params.gravity = Gravity.TOP;
myView.setLayoutParams(params);
}
if (android.os.Build.VERSION.SDK_INT < 11) {
// Force redraw
myView.requestLayout();
}
this.frontLayer.invalidate();
}
public View getMyView() {
return myView;
}
public void setDebug(boolean debug) {
this.isDebug = debug;
if (this.isDebug == true) {
this.inValidate();
}
}
public void detachMyView() {
if (myView == null) {
return;
}
root.removeView(this);
this.removeView(frontLayer);
frontLayer.removeView(webView);
scrollFrameLayout.removeView(myView);
myView.removeView(this.touchableWrapper);
this.removeView(this.scrollView);
this.scrollView.removeView(scrollFrameLayout);
if (orgLayoutParams != null) {
myView.setLayoutParams(orgLayoutParams);
}
root.addView(webView);
myView = null;
mActivity.getWindow().getDecorView().requestFocus();
}
public void attachMyView(ViewGroup pluginView) {
scrollView.scrollTo(webView.getScrollX(), webView.getScrollY());
if (myView == pluginView) {
return;
} else {
this.detachMyView();
}
//backgroundView.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, (int) (webView.getContentHeight() * webView.getScale() + webView.getHeight())));
myView = pluginView;
ViewGroup.LayoutParams lParams = myView.getLayoutParams();
orgLayoutParams = null;
if (lParams != null) {
orgLayoutParams = new ViewGroup.LayoutParams(lParams);
}
root.removeView(webView);
scrollView.addView(scrollFrameLayout);
this.addView(scrollView);
pluginView.addView(this.touchableWrapper);
scrollFrameLayout.addView(pluginView);
frontLayer.addView(webView);
this.addView(frontLayer);
root.addView(this);
mActivity.getWindow().getDecorView().requestFocus();
}
public void setPageSize(int width, int height) {
android.view.ViewGroup.LayoutParams lParams = backgroundView.getLayoutParams();
lParams.width = width;
lParams.height = height;
backgroundView.setLayoutParams(lParams);
}
public void scrollTo(int x, int y) {
this.scrollView.scrollTo(x, y);
}
public void setBackgroundColor(int color) {
this.backgroundView.setBackgroundColor(color);
}
public void inValidate() {
this.frontLayer.invalidate();
}
private class FrontLayerLayout extends FrameLayout {
public FrontLayerLayout(Context context) {
super(context);
this.setWillNotDraw(false);
}
@Override
public boolean onInterceptTouchEvent(MotionEvent event) {
if (isClickable == false || myView == null || myView.getVisibility() != View.VISIBLE) {
webView.requestFocus(View.FOCUS_DOWN);
return false;
}
int x = (int)event.getX();
int y = (int)event.getY();
int scrollY = webView.getScrollY();
boolean contains = drawRect.contains(x, y);
int action = event.getAction();
isScrolling = (contains == false && action == MotionEvent.ACTION_DOWN) ? true : isScrolling;
isScrolling = (action == MotionEvent.ACTION_UP) ? false : isScrolling;
contains = isScrolling == true ? false : contains;
if (contains) {
// Is the touch point on any HTML elements?
Set<Entry<String, RectF>> elements = MyPluginLayout.this.HTMLNodes.entrySet();
Iterator<Entry<String, RectF>> iterator = elements.iterator();
Entry <String, RectF> entry;
RectF rect;
while(iterator.hasNext() && contains == true) {
entry = iterator.next();
rect = entry.getValue();
rect.top -= scrollY;
rect.bottom -= scrollY;
if (entry.getValue().contains(x, y)) {
contains = false;
}
rect.top += scrollY;
rect.bottom += scrollY;
}
}
if (!contains) {
webView.requestFocus(View.FOCUS_DOWN);
}
return contains;
}
@Override
protected void onDraw(Canvas canvas) {
if (drawRect == null || isDebug == false) {
return;
}
int width = canvas.getWidth();
int height = canvas.getHeight();
int scrollY = webView.getScrollY();
Paint paint = new Paint();
paint.setColor(Color.argb(100, 0, 255, 0));
if (isClickable == false) {
canvas.drawRect(0f, 0f, width, height, paint);
return;
}
canvas.drawRect(0f, 0f, width, drawRect.top, paint);
canvas.drawRect(0, drawRect.top, drawRect.left, drawRect.bottom, paint);
canvas.drawRect(drawRect.right, drawRect.top, width, drawRect.bottom, paint);
canvas.drawRect(0, drawRect.bottom, width, height, paint);
paint.setColor(Color.argb(100, 255, 0, 0));
Set<Entry<String, RectF>> elements = MyPluginLayout.this.HTMLNodes.entrySet();
Iterator<Entry<String, RectF>> iterator = elements.iterator();
Entry <String, RectF> entry;
RectF rect;
while(iterator.hasNext()) {
entry = iterator.next();
rect = entry.getValue();
rect.top -= scrollY;
rect.bottom -= scrollY;
canvas.drawRect(rect, paint);
rect.top += scrollY;
rect.bottom += scrollY;
}
}
}
private class TouchableWrapper extends FrameLayout {
public TouchableWrapper(Context context) {
super(context);
}
@Override
public boolean dispatchTouchEvent(MotionEvent event) {
int action = event.getAction();
if (action == MotionEvent.ACTION_DOWN || action == MotionEvent.ACTION_UP) {
scrollView.requestDisallowInterceptTouchEvent(true);
}
return super.dispatchTouchEvent(event);
}
}
}
|
package org.jtrfp.trcl.gpu;
import java.awt.Color;
import java.util.Collection;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicLong;
import javax.media.opengl.GL3;
import org.apache.commons.collections4.collection.PredicatedCollection;
import org.apache.commons.collections4.functors.InstanceofPredicate;
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
import org.jtrfp.trcl.Camera;
import org.jtrfp.trcl.GridCubeProximitySorter;
import org.jtrfp.trcl.ObjectListWindow;
import org.jtrfp.trcl.SpacePartitioningGrid;
import org.jtrfp.trcl.World;
import org.jtrfp.trcl.coll.CollectionActionDispatcher;
import org.jtrfp.trcl.core.NotReadyException;
import org.jtrfp.trcl.core.TRFutureTask;
import org.jtrfp.trcl.core.ThreadManager;
import org.jtrfp.trcl.gui.ReporterFactory.Reporter;
import org.jtrfp.trcl.obj.Positionable;
import org.jtrfp.trcl.obj.PositionedRenderable;
import org.jtrfp.trcl.prop.SkyCube;
import org.jtrfp.trcl.tools.Util;
import com.ochafik.util.Adapter;
import com.ochafik.util.listenable.AdaptedCollection;
public final class Renderer {
private RendererFactory rendererFactory;
//private RenderableSpacePartitioningGrid rootGrid;
private final GridCubeProximitySorter proximitySorter = new GridCubeProximitySorter();
private GLFrameBuffer renderingTarget;
private boolean initialized = false;
private GPU gpu;
public TRFutureTask<RenderList> renderList;
private int frameNumber;
private long lastTimeMillis;
private double meanFPS;
float[] cameraMatrixAsFlatArray = new float[16];
float [] camRotationProjectionMatrix= new float[16];
private TRFutureTask<Void> relevanceUpdateFuture,relevanceCalcTask;
private SkyCube skyCube;
final AtomicLong nextRelevanceCalcTime = new AtomicLong(0L);
//private final CollisionManager collisionManager;
private Camera camera = null;
private PredicatedCollection<Positionable> relevantPositioned;
private Reporter reporter;
private ThreadManager threadManager;
private String debugName;
private boolean enabled = false;
private World world;
private ObjectListWindow objectListWindow;
private static final Adapter<Positionable,PositionedRenderable> castingAdapter = new Adapter<Positionable,PositionedRenderable>(){
@Override
public PositionedRenderable adapt(Positionable value)
throws UnsupportedOperationException {
return (PositionedRenderable)value;
}
@Override
public Positionable reAdapt(PositionedRenderable value)
throws UnsupportedOperationException {
return (Positionable)value;
}
};//end castingAdapter
public void ensureInit() {
if (initialized)
return;
Util.assertPropertiesNotNull(this, "gpu", "world", "threadManager");
final World world = getWorld();
final GPU gpu = getGpu();
final ThreadManager threadManager = getThreadManager();
Camera camera = world.newCamera();//TODO: Remove after redesign.
camera.setDebugName(getDebugName());
//setCamera(tr.getWorld().newCamera());//TODO: Use after redesign
System.out.println("...Done.");
System.out.println("Initializing RenderList...");
renderList = new TRFutureTask<RenderList>(new Callable<RenderList>(){
@Override
public RenderList call() throws Exception {
final RenderList rl = new RenderList(gpu, Renderer.this, getObjectListWindow(), getThreadManager());
rl.setReporter(getReporter());
return rl;
}});
threadManager.threadPool.submit(renderList);
if(getSkyCube() == null)
setSkyCube(new SkyCube(gpu));
relevantPositioned =
PredicatedCollection.predicatedCollection(
new AdaptedCollection<PositionedRenderable,Positionable>(renderList.get().getVisibleWorldObjectList(),Util.bidi2Backward(castingAdapter),Util.bidi2Forward(castingAdapter)),
new InstanceofPredicate(PositionedRenderable.class));
setCamera(camera);
assert camera!=null;
gpu.memoryManager.get().map();
initialized = true;
}// end ensureInit()
private void fpsTracking() {
final Reporter reporter = getReporter();
if(reporter == null)
return;
frameNumber++;
final boolean isKeyFrame = (frameNumber % 20) == 0;
if (isKeyFrame) {
final long dT = System.currentTimeMillis() - lastTimeMillis;
if(dT<=0)return;
final int fps = (int)(20.*(1000. / (double)dT));
reporter.report("org.jtrfp.trcl.core.Renderer."+debugName+" FPS", "" + fps);
final Collection<PositionedRenderable> coll = renderList.get().getVisibleWorldObjectList();
synchronized(coll){
reporter.report("org.jtrfp.trcl.core.Renderer."+debugName+" numVisibleObjects", coll.size()+"");
SpacePartitioningGrid spg = getCamera().getRootGrid();
if(spg!=null)
reporter.report("org.jtrfp.trcl.core.Renderer."+debugName+" rootGrid", spg.toString());
}
lastTimeMillis = System.currentTimeMillis();
}//end if(key frame)
}//end fpsTracking()
public void setCamera(Camera toUse){
final PredicatedCollection<Positionable> relevantPositioned = getRelevantPositioned();
if(this.camera!=null)
this.camera.getFlatRelevanceCollection().removeTarget(relevantPositioned, true);
this.camera=toUse;
toUse.getFlatRelevanceCollection().addTarget(relevantPositioned, true);
}
public final Callable<?> render = new Callable<Void>(){
@Override
public Void call() throws Exception {
final GL3 gl = gpu.getGl();
try{ensureInit();
final RenderList rl = renderList.getRealtime();
rl.sendToGPU(gl);
//Make sure memory on the GPU is up-to-date by flushing stale pages to GPU mem.
gpu.memoryManager.getRealtime().flushStalePages();
rl.render(gl);
// Update texture codepages
gpu.textureManager.getRealtime().vqCodebookManager.getRealtime().refreshStaleCodePages();
fpsTracking();
}catch(NotReadyException e){}
return null;
}};
public void setSunVector(Vector3D sv){
rendererFactory.getDeferredProgram().use();
rendererFactory.getSunVectorUniform().set((float)sv.getX(),(float)sv.getY(),(float)sv.getZ());
gpu.defaultProgram();
}
/**
* @return the rootGrid
*/
/*
public RenderableSpacePartitioningGrid getRootGrid() {
return rootGrid;
}
*/
/**
* @param rootGrid
* the rootGrid to set
*/
/*
public void setRootGrid(RenderableSpacePartitioningGrid rootGrid) {
this.rootGrid = rootGrid;
if(getCamera().getContainingGrid()!=null)
getCamera().getContainingGrid().remove(getCamera());
rootGrid.add(getCamera());//TODO: Remove later
}
*/
/**
* @return the cameraMatrixAsFlatArray
*/
public float[] getCameraMatrixAsFlatArray() {
return cameraMatrixAsFlatArray;
}
/**
* @return the camRotationProjectionMatrix
*/
public float[] getCamRotationProjectionMatrix() {
return camRotationProjectionMatrix;
}
/**
* @return the skyCube
*/
public SkyCube getSkyCube() {
return skyCube;
}
/**
* @param skyCube the skyCube to set
*/
public void setSkyCube(SkyCube skyCube) {
this.skyCube = skyCube;
}
public Renderer setSunColor(final Color color) {
gpu.submitToGL(new Callable<Void>(){
@Override
public Void call() throws Exception {
rendererFactory.getDeferredProgram().use();
rendererFactory.getDeferredProgram().getUniform("sunColor").set(color.getRed()/128f, color.getGreen()/128f, color.getBlue()/128f);
gpu.defaultProgram();
return null;
}
}).get();
return this;
}
public Renderer setAmbientLight(final Color color) {
gpu.submitToGL(new Callable<Void>(){
@Override
public Void call() throws Exception {
rendererFactory.getDeferredProgram().use();
rendererFactory.getDeferredProgram().getUniform("ambientLight").set(color.getRed()/128f, color.getGreen()/128f, color.getBlue()/128f);
gpu.defaultProgram();
return null;
}
}).get();
return this;
}//end setAmbientLight
/**
* @return the renderingTarget
*/
public GLFrameBuffer getRenderingTarget() {
return renderingTarget;
}
/**
* @param renderingTarget the renderingTarget to set
*/
public Renderer setRenderingTarget(GLFrameBuffer renderingTarget) {
this.renderingTarget = renderingTarget;
return this;
}
private final Object relevanceUpdateLock = new Object();
public RendererFactory getRendererFactory() {
return rendererFactory;
}
public TRFutureTask<RenderList> getRenderList() {
return renderList;
}
public Camera getCamera() {
return camera;
}
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
if(this.enabled == enabled)
return;
this.enabled = enabled;
if(isEnabled())
threadManager.addRepeatingGLTask(render);
else
threadManager.removeRepeatingGLTask(render);
getCamera().setActive(isEnabled());
}
@Override
public String toString(){
return "Renderer debugName="+debugName+" hash="+hashCode();
}
public String getDebugName() {
return debugName;
}
public Reporter getReporter() {
return reporter;
}
public void setReporter(Reporter reporter) {
this.reporter = reporter;
}
public World getWorld() {
return world;
}
public void setWorld(World world) {
this.world = world;
}
public RendererFactory getFactory() {
return rendererFactory;
}
public void setFactory(RendererFactory factory) {
this.rendererFactory = factory;
}
public PredicatedCollection<Positionable> getRelevantPositioned() {
return relevantPositioned;
}
public void setRelevantPositioned(
PredicatedCollection<Positionable> relevantPositioned) {
this.relevantPositioned = relevantPositioned;
}
public ThreadManager getThreadManager() {
return threadManager;
}
public void setThreadManager(ThreadManager threadManager) {
this.threadManager = threadManager;
}
public ObjectListWindow getObjectListWindow() {
return objectListWindow;
}
public void setObjectListWindow(ObjectListWindow objectListWindow) {
this.objectListWindow = objectListWindow;
}
public void setRendererFactory(RendererFactory rendererFactory) {
this.rendererFactory = rendererFactory;
}
public void setDebugName(String debugName) {
this.debugName = debugName;
}
public GPU getGpu() {
return gpu;
}
public void setGpu(GPU gpu) {
this.gpu = gpu;
}
}//end Renderer
|
package plugin.google.maps;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.apache.cordova.CallbackContext;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.location.Address;
import android.location.Geocoder;
import android.os.Bundle;
import com.google.android.gms.maps.model.LatLngBounds;
public class PluginGeocoder extends MyPlugin implements MyPluginInterface {
@SuppressWarnings("unused")
private void createGeocoder(final JSONArray args,
final CallbackContext callbackContext) throws JSONException, IOException {
JSONObject opts = args.getJSONObject(1);
Geocoder geocoder = new Geocoder(this.cordova.getActivity());
List<Address> geoResults;
JSONArray results = new JSONArray();
Iterator<Address> iterator = null;
// Geocoding
if (opts.has("position") == false && opts.has("address")) {
String address = opts.getString("address");
if (opts.has("bounds")) {
if (opts.has("bounds") == true) {
JSONArray points = opts.getJSONArray("bounds");
LatLngBounds bounds = PluginUtil.JSONArray2LatLngBounds(points);
geoResults = geocoder.getFromLocationName(address, 20,
bounds.southwest.latitude, bounds.southwest.longitude,
bounds.northeast.latitude, bounds.northeast.longitude);
iterator = geoResults.iterator();
}
} else {
try {
geoResults = geocoder.getFromLocationName(address, 20);
}catch (Exception e) {
callbackContext.error("Geocoder service is not available.");
return;
}
iterator = geoResults.iterator();
}
}
// Reverse geocoding
if (opts.has("position") && opts.has("address") == false) {
JSONObject position = opts.getJSONObject("position");
geoResults = geocoder.getFromLocation(
position.getDouble("lat"),
position.getDouble("lng"), 20);
iterator = geoResults.iterator();
}
if (iterator == null) {
callbackContext.error("Invalid request for geocoder");
return;
}
while(iterator.hasNext()) {
JSONObject result = new JSONObject();
Address addr = iterator.next();
JSONObject position = new JSONObject();
position.put("lat", addr.getLatitude());
position.put("lng", addr.getLongitude());
result.put("position", position);
result.put("locality", addr.getLocality());
result.put("adminArea", addr.getAdminArea());
result.put("country", addr.getCountryCode());
result.put("locale", addr.getLocale());
result.put("postalCode", addr.getPostalCode());
result.put("subAdminArea", addr.getSubAdminArea());
result.put("subLocality", addr.getSubLocality());
result.put("subThoroughfare", addr.getSubThoroughfare());
result.put("thoroughfare", addr.getThoroughfare());
JSONObject extra = new JSONObject();
extra.put("featureName", addr.getFeatureName());
extra.put("phone", addr.getPhone());
extra.put("permises", addr.getPremises());
extra.put("url", addr.getUrl());
Bundle extraInfo = addr.getExtras();
if (extraInfo != null) {
Set<String> keys = extraInfo.keySet();
Iterator<String> keyIterator = keys.iterator();
String key;
while(keyIterator.hasNext()) {
key = keyIterator.next();
extra.put(key, extraInfo.get(key));
}
}
result.put("extra", extra);
results.put(result);
}
callbackContext.success(results);
}
}
|
package org.jtrfp.trcl.obj;
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
import org.jtrfp.trcl.RenderMode;
import org.jtrfp.trcl.Triangle;
import org.jtrfp.trcl.core.TR;
import org.jtrfp.trcl.core.TRFutureTask;
import org.jtrfp.trcl.core.TextureDescription;
import org.jtrfp.trcl.gpu.Model;
public class Sprite2D extends WorldObject2DVisibleEverywhere {
public Sprite2D(TR tr, double z, double width, double height, TextureDescription tex, boolean useAlpha) {
super(tr);
final Model m = new Model(false,tr);
Triangle [] tris = Triangle.quad2Triangles(
new double[]{-width,width,width,-width},
new double[]{-height,-height,height,height},
new double[]{z,z,z,z},
new double[]{0,1,1,0},
new double[]{0,0,1,1}, tex, RenderMode.DYNAMIC, useAlpha,Vector3D.MINUS_K,"Sprite2D non-segmented");
m.addTriangles(tris);
m.finalizeModel();
setModel(m);
setTop(Vector3D.PLUS_J);
setActive(true);
setVisible(true);
}//end constructor
public Sprite2D(TR tr, double z, double width, double height, TextureDescription [] tex, boolean useAlpha){
super(tr);
final Model m = new Model(false,tr);
final int numSegs = tex.length;
for (int seg = 0; seg < numSegs; seg++) {
final double segWidth = width / numSegs;
final double x = (-width/2) + segWidth * seg;
Triangle[] tris = Triangle.quad2Triangles(new double[] { x,
x + segWidth, x + segWidth, x }, new double[] { -height/2, -height/2,
height/2, height/2 }, new double[] { z, z, z, z }, new double[] { 0, 1,
1, 0 }, new double[] { 0, 0, 1, 1 }, tex[seg],
RenderMode.DYNAMIC, Vector3D.MINUS_K,"Sprite2D "+numSegs+" segments");
tris[0].setAlphaBlended(true);
tris[1].setAlphaBlended(true);
m.addTriangles(tris);
}// end for(segs)
setModel(m.finalizeModel());
}//end constructor
}//end Sprite2D
|
/**
* A Media Picker Plugin for Cordova/PhoneGap.
*/
package vn.tungdx.mediapicker;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.PermissionHelper;
import org.apache.cordova.PluginResult;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.Manifest;
import android.util.Log;
import android.os.Environment
import vn.tungdx.mediapicker.activities.MediaPickerActivity;
import com.buzzcard.brandingtool.R;
public class MediaPicker extends CordovaPlugin {
public static String TAG = "MediaPicker";
private static final String EXTRA_MEDIA_OPTIONS = "extra_media_options";
private CallbackContext callbackContext;
private JSONObject params;
private int REQUEST_CODE_GET_PICTURES = 1000;
String [] permissions = {Manifest.permission.WRITE_EXTERNAL_STORAGE};
public boolean execute(String action, final JSONArray args, final CallbackContext callbackContext) throws JSONException {
this.callbackContext = callbackContext;
this.params = args.getJSONObject(0);
if (action.equals("getPictures")) {
if(hasPermisssion()) {
getPictures();
} else {
PermissionHelper.requestPermissions(this, REQUEST_CODE_GET_PICTURES, permissions);
}
return true;
}
return false;
}
private void getPictures() {
MediaOptions.Builder builder = new MediaOptions.Builder();
MediaOptions options = builder.canSelectBothPhotoVideo().canSelectMultiVideo(true).canSelectMultiPhoto(true).build();
Context context = this.cordova.getActivity().getApplicationContext();
Intent intent = new Intent(context, MediaPickerActivity.class);
intent.putExtra(EXTRA_MEDIA_OPTIONS, options);
if (this.cordova != null) {
this.cordova.startActivityForResult((CordovaPlugin) this, intent, 0);
}
}
public void onRequestPermissionResult(int requestCode, String[] permissions,
int[] grantResults) throws JSONException
{
PluginResult result;
if(callbackContext != null) {
for (int r : grantResults) {
if (r == PackageManager.PERMISSION_DENIED) {
result = new PluginResult(PluginResult.Status.ILLEGAL_ACCESS_EXCEPTION);
callbackContext.sendPluginResult(result);
return;
}
}
if(requestCode == REQUEST_CODE_GET_PICTURES) {
getPictures();
}
}
}
public boolean hasPermisssion() {
for(String p : permissions)
{
if(!PermissionHelper.hasPermission(this, p))
{
return false;
}
}
return true;
}
public void onActivityResult(int requestCode, int resultCode, Intent data) {
Context context = this.cordova.getActivity().getApplicationContext();
ArrayList<String> fileNames = new ArrayList<String>();
if (resultCode == -1) {
List<MediaItem> mediaSelectedList = MediaPickerActivity
.getMediaItemSelected(data);
for (int i = 0; i < mediaSelectedList.size(); i++) {
File inputFile = new File(mediaSelectedList.get(i).getPathOrigin(context).toString());
String ext = inputFile.getAbsolutePath().substring(inputFile.getAbsolutePath().lastIndexOf(".") + 1);
File outputFile = getWritableFile(ext);
try {
copyFile(inputFile, outputFile);
} catch (IOException exception) {
this.callbackContext.error(exception.getMessage());
return;
}
fileNames.add(outputFile.getAbsolutePath());
}
}
JSONArray res = new JSONArray(fileNames);
this.callbackContext.success(res);
super.onActivityResult(requestCode, resultCode, data);
}
private File getWritableFile(String ext)
{
int i = 1;
String state = Environment.getExternalStorageState();
File dataDirectory = Environment.MEDIA_MOUNTED.equals(state)
? cordova.getActivity().getApplicationContext().getExternalFilesDir(null)
: cordova.getActivity().getApplicationContext().getFilesDir();
// Create the data directory if it doesn't exist
dataDirectory.mkdirs();
String dataPath = dataDirectory.getAbsolutePath();
File file;
do {
file = new File(dataPath + String.format("/capture_%05d." + ext, i));
i++;
} while (file.exists());
return file;
}
public void copyFile(File src, File dst) throws IOException {
InputStream in = new FileInputStream(src);
OutputStream out = new FileOutputStream(dst);
try {
// Transfer bytes from in to out
byte[] buf = new byte[1024];
int len;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
out.flush();
} finally {
in.close();
out.close();
}
}
}
|
package com.jetbrains.python.inspections;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiReference;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.types.PyType;
import com.jetbrains.python.psi.types.TypeEvalContext;
import org.jetbrains.annotations.NotNull;
import java.util.List;
/**
* @author yole
*/
public abstract class PyInspectionExtension {
public static final ExtensionPointName<PyInspectionExtension> EP_NAME = ExtensionPointName.create("Pythonid.inspectionExtension");
public boolean ignoreUnused(PsiElement local, @NotNull TypeEvalContext evalContext) {
@SuppressWarnings("deprecation")
final boolean result = ignoreUnused(local);
return result;
}
/**
* @deprecated use {@link #ignoreUnused(PsiElement, TypeEvalContext)} instead.
* Will be removed in 2019.2
*/
@Deprecated
public boolean ignoreUnused(PsiElement local) {
return false;
}
public boolean ignoreShadowed(@NotNull final PsiElement element) {
return false;
}
public boolean ignoreMissingDocstring(PyDocStringOwner docStringOwner) {
return false;
}
public List<String> getFunctionParametersFromUsage(PsiElement elt) {
return null;
}
public boolean ignoreMethodParameters(@NotNull PyFunction function) {
return false;
}
public boolean ignorePackageNameInRequirements(@NotNull PyQualifiedExpression importedExpression) {
return false;
}
/**
* Checks if unresolved reference could be ignored.
*
* @param node element containing reference
* @param reference unresolved reference
* @return true if the unresolved reference could be ignored
*/
public boolean ignoreUnresolvedReference(@NotNull PyElement node, @NotNull PsiReference reference, @NotNull TypeEvalContext context) {
return false;
}
/**
* Checks if unresolved member could be ignored.
*
* @param type type whose member will be checked
* @param name member name
* @param context type evaluation context
* @return true if the unresolved member with the specified name could be ignored
*/
public boolean ignoreUnresolvedMember(@NotNull PyType type, @NotNull String name, @NotNull TypeEvalContext context) {
return false;
}
/**
* Returns true if access to protected (the one started with "_") symbol should not be treated as violation.
*
* @param expression access expression i.e. "_foo"
* @param context type eval to be used
* @return true if ignore
*/
public boolean ignoreProtectedSymbol(@NotNull final PyReferenceExpression expression, @NotNull final TypeEvalContext context) {
return false;
}
public boolean ignoreInitNewSignatures(@NotNull PyFunction original, @NotNull PyFunction complementary) {
return false;
}
}
|
package org.junit.rules;
import java.util.ArrayList;
import java.util.List;
import org.junit.AssumptionViolatedException;
import org.junit.runner.Description;
import org.junit.runners.model.MultipleFailureException;
import org.junit.runners.model.Statement;
/**
* TestWatcher is a base class for Rules that take note of the testing
* action, without modifying it. For example, this class will keep a log of each
* passing and failing test:
*
* <pre>
* public static class WatchmanTest {
* private static String watchedLog;
*
* @Rule
* public TestWatcher watchman= new TestWatcher() {
* @Override
* protected void failed(Throwable e, Description description) {
* watchedLog+= description + "\n";
* }
*
* @Override
* protected void succeeded(Description description) {
* watchedLog+= description + " " + "success!\n";
* }
* };
*
* @Test
* public void fails() {
* fail();
* }
*
* @Test
* public void succeeds() {
* }
* }
* </pre>
*
* @since 4.9
*/
public abstract class TestWatcher implements TestRule {
public Statement apply(final Statement base, final Description description) {
return new Statement() {
@Override
public void evaluate() throws Throwable {
List<Throwable> errors = new ArrayList<Throwable>();
startingQuietly(description, errors);
try {
base.evaluate();
succeededQuietly(description, errors);
} catch (org.junit.internal.AssumptionViolatedException e) {
errors.add(e);
skippedQuietly(e, description, errors);
} catch (Throwable e) {
errors.add(e);
failedQuietly(e, description, errors);
} finally {
finishedQuietly(description, errors);
}
MultipleFailureException.assertEmpty(errors);
}
};
}
private void succeededQuietly(Description description,
List<Throwable> errors) {
try {
succeeded(description);
} catch (Throwable e) {
errors.add(e);
}
}
private void failedQuietly(Throwable e, Description description,
List<Throwable> errors) {
try {
failed(e, description);
} catch (Throwable e1) {
errors.add(e1);
}
}
private void skippedQuietly(
org.junit.internal.AssumptionViolatedException e, Description description,
List<Throwable> errors) {
try {
if (e instanceof AssumptionViolatedException) {
skipped((AssumptionViolatedException) e, description);
} else {
skipped(e, description);
}
} catch (Throwable e1) {
errors.add(e1);
}
}
private void startingQuietly(Description description,
List<Throwable> errors) {
try {
starting(description);
} catch (Throwable e) {
errors.add(e);
}
}
private void finishedQuietly(Description description,
List<Throwable> errors) {
try {
finished(description);
} catch (Throwable e) {
errors.add(e);
}
}
/**
* Invoked when a test succeeds
*/
protected void succeeded(Description description) {
}
/**
* Invoked when a test fails
*/
protected void failed(Throwable e, Description description) {
}
/**
* Invoked when a test is skipped due to a failed assumption.
*/
protected void skipped(AssumptionViolatedException e, Description description) {
// For backwards compatibility with JUnit 4.11 and earlier, call the legacy version
org.junit.internal.AssumptionViolatedException asInternalException = e;
skipped(asInternalException, description);
}
/**
* Invoked when a test is skipped due to a failed assumption.
*
* @deprecated use {@link #skipped(AssumptionViolatedException, Description)}
*/
@Deprecated
protected void skipped(
org.junit.internal.AssumptionViolatedException e, Description description) {
}
/**
* Invoked when a test is about to start
*/
protected void starting(Description description) {
}
/**
* Invoked when a test method finishes (whether passing or failing)
*/
protected void finished(Description description) {
}
}
|
package org.minimalj.model;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Map;
import org.minimalj.backend.Backend;
import org.minimalj.model.properties.FlatProperties;
import org.minimalj.model.properties.PropertyInterface;
import org.minimalj.util.GenericUtils;
import org.minimalj.util.IdUtils;
import org.minimalj.util.StringUtils;
public class ViewUtil {
/**
* Creates a view to a complete object. Meaning all fields existing on view and
* the complete object are copied from the complete object to the view.
*
* @param completeObject
* the source
* @param viewObject
* the filled view object
* @return the view object (same as input)
*/
public static <T> T view(Object completeObject, T viewObject) {
if (completeObject == null) return null;
Map<String, PropertyInterface> propertiesOfCompleteObject = FlatProperties.getProperties(completeObject.getClass());
Map<String, PropertyInterface> properties = FlatProperties.getProperties(viewObject.getClass());
for (Map.Entry<String, PropertyInterface> entry : properties.entrySet()) {
PropertyInterface property = propertiesOfCompleteObject.get(entry.getKey());
Object value = property != null ? property.getValue(completeObject) : readByGetMethod(completeObject, entry.getKey());
entry.getValue().setValue(viewObject, value);
}
return viewObject;
}
private static Object readByGetMethod(Object completeObject, String name) {
try {
Method method = completeObject.getClass().getMethod("get" + StringUtils.upperFirstChar(name));
return method.invoke(completeObject);
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("Invalid view field: " + name + " for view on " + completeObject.getClass().getSimpleName());
} catch (SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
/**
* Resolves a view object to the real object. Of course this is only possible by
* asking the Backend to read the complete object. This method expects the view
* to have an id.
*
* @param viewObject
* the view object
* @return the complete object (could be newer as the view object as the Backend
* is asked)
*/
public static <T> T viewed(View<T> viewObject) {
if (viewObject == null) return null;
@SuppressWarnings("unchecked")
Class<T> viewedClass = (Class<T>) getViewedClass(viewObject.getClass());
Object id = IdUtils.getId(viewObject);
if (id == null) {
return null;
}
return Backend.read(viewedClass, id);
}
public static Class<?> getViewedClass(Class<?> clazz) {
for (Type type : clazz.getGenericInterfaces()) {
if (type instanceof ParameterizedType) {
ParameterizedType parameterizedType = (ParameterizedType) type;
Type rawType = parameterizedType.getRawType();
if (rawType.equals(View.class)) {
return GenericUtils.getGenericClass(parameterizedType);
}
}
}
return null;
}
public static Class<?> resolve(Class<?> clazz) {
if (View.class.isAssignableFrom(clazz)) {
Class<?> viewedClass = getViewedClass(clazz);
return viewedClass;
} else {
return clazz;
}
}
}
|
package org.vaadin.viritin;
import java.io.Serializable;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.WeakHashMap;
/**
* A general purpose helper class to us MTable/ListContainer for service layers
* (EJBs, Spring Data etc) that provide large amount of data. Makes paged
* requests to PagingProvider, caches recently used pages in memory and this way
* hides away Vaadin Container complexity from you. The class generic helper and
* is probably useful also other but Vaadin applications as well.
*
* @author Matti Tahvonen
* @param <T> The type of the objects in the list
*/
public class LazyList<T> extends AbstractList<T> implements Serializable {
private static final long serialVersionUID = 2423832460602269469L;
private List<T> findPageFromCache(int pageIndexForReqest) {
int p = pageIndexForReqest - pageIndex;
if (p < 0) {
return null;
}
if (pages.size() <= p) {
return null;
}
return pages.get(p);
}
private void loadPreviousPage() {
pageIndex
List<T> page = findEntities(pageIndex * pageSize);
pages.add(0, page);
if (pages.size() > maxPages) {
pages.remove(pages.size() - 1);
}
}
private void loadNextPage() {
List<T> page = findEntities((pageIndex + pages.size()) * pageSize);
pages.add(page);
if (pages.size() > maxPages) {
pages.remove(0);
pageIndex++;
}
}
// Split into subinterfaces for better Java 8 lambda support
/**
* Interface via the LazyList communicates with the "backend"
*
* @param <T> The type of the objects in the list
*/
public interface PagingProvider<T> extends Serializable {
/**
* Fetches one "page" of entities form the backend. The amount
* "maxResults" should match with the value configured for the LazyList
*
* @param firstRow the index of first row that should be fetched
* @return a sub list from given first index
*/
public List<T> findEntities(int firstRow);
}
/**
* LazyList detects the size of the "simulated" list with via this
* interface. Backend call is cached as COUNT queries in databases are
* commonly heavy.
*/
public interface CountProvider extends Serializable {
/**
* @return the count of entities listed in the LazyList
*/
public int size();
}
/**
* Interface via the LazyList communicates with the "backend"
*
* @param <T> The type of the objects in the list
*/
public interface EntityProvider<T> extends PagingProvider<T>, CountProvider {
}
private PagingProvider<T> pageProvider;
private final CountProvider countProvider;
// Vaadin table by default has 15 rows, 2x that to cache up an down
// With this setting it is maximum of 2 requests that happens. With
// normal scrolling just 0-1 per user interaction
public static final int DEFAULT_PAGE_SIZE = 15 + 15 * 2;
public int getMaxPages() {
return maxPages;
}
/**
* Sets the maximum of pages that are held in memory. By default 3, but it
* is adjusted automatically based on requests that are made to the list,
* like subList method calls. Most often this shouldn't be called by end
* user.
*
* @param maxPages the number of pages to be held in memory
*/
public void setMaxPages(int maxPages) {
this.maxPages = maxPages;
}
private int maxPages = 3;
List<List<T>> pages = new ArrayList<>();
private int pageIndex = -10;
private final int pageSize;
protected LazyList(CountProvider countProvider, int pageSize) {
this.countProvider = countProvider;
this.pageSize = pageSize;
}
/**
* Constructs a new LazyList with given provider and default page size of
* DEFAULT_PAGE_SIZE (30).
*
* @param dataProvider the data provider that is used to fetch pages of
* entities and to detect the total count of entities
*/
public LazyList(EntityProvider<T> dataProvider) {
this(dataProvider, DEFAULT_PAGE_SIZE);
}
/**
* Constructs a new LazyList with given provider and default page size of
* DEFAULT_PAGE_SIZE (30).
*
* @param dataProvider the data provider that is used to fetch pages of
* entities and to detect the total count of entities
* @param pageSize the page size to be used
*/
public LazyList(EntityProvider<T> dataProvider, int pageSize) {
this.pageProvider = dataProvider;
this.countProvider = dataProvider;
this.pageSize = pageSize;
}
/**
* Constructs a new LazyList with given providers and default page size of
* DEFAULT_PAGE_SIZE (30).
*
* @param pageProvider the interface via "pages" of entities are requested
* @param countProvider the interface via the total count of entities is
* detected.
*/
public LazyList(PagingProvider<T> pageProvider, CountProvider countProvider) {
this(pageProvider, countProvider, DEFAULT_PAGE_SIZE);
}
/**
* Constructs a new LazyList with given providers and page size.
*
* @param pageProvider the interface via "pages" of entities are requested
* @param countProvider the interface via the total count of entities is
* detected.
* @param pageSize the page size that should be used
*/
public LazyList(PagingProvider<T> pageProvider, CountProvider countProvider, int pageSize) {
this.pageProvider = pageProvider;
this.countProvider = countProvider;
this.pageSize = pageSize;
}
@Override
public T get(final int index) {
final int pageIndexForReqest = index / pageSize;
final int indexOnPage = index % pageSize;
// Find page from cache
List<T> page = findPageFromCache(pageIndexForReqest);
if (page == null) {
if (pageIndex >= 0) {
if (pageIndexForReqest > pageIndex && pageIndexForReqest < pageIndex + pages.size() + maxPages) {
// load next n pages forward
while (pageIndexForReqest >= pageIndex + pages.size()) {
loadNextPage();
}
} else if (pageIndexForReqest < pageIndex && pageIndexForReqest > pageIndex - maxPages) {
//load prev page to cache
while (pageIndexForReqest < pageIndex) {
loadPreviousPage();
}
} else {
initCacheFormPage(pageIndexForReqest);
}
} else {
// first page to load
initCacheFormPage(pageIndexForReqest);
}
page = findPageFromCache(pageIndexForReqest);
}
return page != null ? page.get(indexOnPage) : null;
}
protected void initCacheFormPage(final int pageIndexForReqest) {
// clear cache
pageIndex = pageIndexForReqest;
pages.clear();
pages.add(findEntities(pageIndex * pageSize));
}
protected List<T> findEntities(int i) {
return pageProvider.findEntities(i);
}
private Integer cachedSize;
@Override
public int size() {
if (cachedSize == null) {
cachedSize = countProvider.size();
}
return cachedSize;
}
private transient WeakHashMap<T, Integer> indexCache;
private Map<T, Integer> getIndexCache() {
if (indexCache == null) {
indexCache = new WeakHashMap<>();
}
return indexCache;
}
@Override
public int indexOf(Object o) {
// optimize: check the buffers first
Integer indexViaCache = getIndexCache().get(o);
if (indexViaCache != null) {
return indexViaCache;
}
for (int i = 0; i < pages.size(); i++) {
List<T> page = pages.get(i);
int indexOf = page.indexOf(o);
if (indexOf != -1) {
indexViaCache = (pageIndex + i) * pageSize + indexOf;
}
}
if (indexViaCache != null) {
/*
* In some cases (selected value) components like Vaadin combobox calls this, then stuff from elsewhere with indexes and
* finally again this method with the same object (possibly on other page). Thus, to avoid heavy iterating,
* cache the location.
*/
getIndexCache().put((T) o, indexViaCache);
return indexViaCache;
}
// fall back to iterating, this will most likely be sloooooow....
// If your app gets here, consider overwriting this method, and to
// some optimization at service/db level
return super.indexOf(o);
}
@Override
public boolean contains(Object o) {
// Although there would be the indexed version, vaadin sometimes calls this
// First check caches, then fall back to sluggish iterator :-(
if (getIndexCache().containsKey(o)) {
return true;
}
for (List<T> t : pages) {
if (t.contains(o)) {
return true;
}
}
return super.contains(o);
}
@Override
public List<T> subList(int fromIndex, int toIndex) {
final int sizeOfSublist = toIndex - fromIndex;
if (sizeOfSublist > maxPages * (pageSize -1)) {
// Increase the amount of cached pages if necessary
maxPages = sizeOfSublist/pageSize + 1;
}
return new ArrayList<>(super.subList(fromIndex, toIndex));
}
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
private int index = -1;
private final int size = size();
@Override
public boolean hasNext() {
return index + 1 < size;
}
@Override
public T next() {
index++;
return get(index);
}
@Override
public void remove() {
throw new UnsupportedOperationException("Not supported.");
}
};
}
/**
* Resets buffers used by the LazyList.
*/
public void reset() {
pages.clear();
pageIndex = -10;
cachedSize = null;
if (indexCache != null) {
indexCache.clear();
}
}
}
|
package io.github.benas.randombeans;
import io.github.benas.randombeans.api.Randomizer;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class ArrayPopulatorTest {
private static final int INT = 10;
private static final char CHAR = 'R';
private static final String STRING = "FOO";
@Mock
private PopulatorContext context;
@Mock
private EnhancedRandomImpl enhancedRandom;
@Mock
private RandomizerProvider randomizerProvider;
@Mock
private Randomizer integerRandomizer, characterRandomizer;
private ArrayPopulator arrayPopulator;
@Before
public void setUp() {
arrayPopulator = new ArrayPopulator(enhancedRandom, randomizerProvider);
when(enhancedRandom.nextInt()).thenReturn(INT);
when(randomizerProvider.getRandomizerByType(Integer.TYPE)).thenReturn(integerRandomizer);
when(randomizerProvider.getRandomizerByType(Character.TYPE)).thenReturn(characterRandomizer);
when(integerRandomizer.getRandomValue()).thenReturn(INT);
when(characterRandomizer.getRandomValue()).thenReturn(CHAR);
when(enhancedRandom.doPopulateBean(String.class, context)).thenReturn(STRING);
}
@Test
public void getRandomArray() {
String[] strings = (String[]) arrayPopulator.getRandomArray(String[].class, context);
assertThat(strings).containsOnly(STRING);
}
@Test
public void getRandomPrimitiveArray() {
int[] ints = (int[]) arrayPopulator.getRandomPrimitiveArray(Integer.TYPE);
assertThat(ints).containsOnly(INT);
}
@Test
public void charArraysShouldBeFilledWithOnlyAlphabeticLetters() {
char[] chars = (char[]) arrayPopulator.getRandomArray(char[].class, context);
assertThat(chars).containsOnly(CHAR);
}
}
|
package org.voovan.tools;
import org.voovan.tools.log.Logger;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
public class ObjectPool {
private Map<Integer,PooledObject> objects;
private Timer timer;
private long aliveTime = 5;
private boolean autoRefreshOnGet = true;
private AtomicInteger objectId = new AtomicInteger(0);
/**
*
* @param aliveTime ,:
* @param autoRefreshOnGet
*/
public ObjectPool(long aliveTime,boolean autoRefreshOnGet){
objects = new Hashtable<Integer,PooledObject>();
this.aliveTime = aliveTime;
timer = new Timer("VOOVAN@Object_Pool_Timer");
this.autoRefreshOnGet = autoRefreshOnGet;
removeDeadObject();
}
/**
*
* @param aliveTime ,:
*/
public ObjectPool(long aliveTime){
objects = new Hashtable<Integer,PooledObject>();
this.aliveTime = aliveTime;
timer = new Timer("VOOVAN@Object_Pool_Timer");
removeDeadObject();
}
/**
* , 5 s
* @param autoRefreshOnGet
*/
public ObjectPool(boolean autoRefreshOnGet){
objects = new Hashtable<Integer,PooledObject>();
timer = new Timer("VOOVAN@Object_Pool_Timer");
this.autoRefreshOnGet = autoRefreshOnGet;
removeDeadObject();
}
/**
* , 5 s
*/
public ObjectPool(){
objects = new Hashtable<Integer,PooledObject>();
timer = new Timer("VOOVAN@Object_Pool_Timer");
removeDeadObject();
}
/**
* ObjectId
* @return ObjectId
*/
private int getObjectId(){
objectId.getAndIncrement();
return objectId.get();
}
/**
*
* @return
*/
public boolean isAutoRefreshOnGet(){
return autoRefreshOnGet;
}
/**
*
* @param id hash
* @return
*/
public Object get(Integer id){
PooledObject pooledObject = objects.get(id);
if(pooledObject!=null) {
return pooledObject.getObject();
}else{
return null;
}
}
/**
*
* @param obj
* @return hash , 0 , null
*/
public int add(Object obj){
if(obj == null){
return 0;
}
int id = getObjectId();
objects.put(id, new PooledObject(this, id, obj));
return id;
}
/**
*
* @param id hash
* @return true: , false:
*/
public boolean contains(Integer id){
return objects.containsKey(id);
}
/**
*
* @param id hash
*/
public void remove(Integer id){
objects.remove(id);
}
/**
*
* @return
*/
public int size(){
return objects.size();
}
public void clear(){
objects.clear();
}
public void removeDeadObject(){
TimerTask aliveTask = new TimerTask() {
@Override
public void run() {
try {
synchronized (objects){
for (PooledObject pooledObject : objects.values().toArray(new PooledObject[]{})) {
if (!pooledObject.isAlive()) {
remove(pooledObject.getId());
}
}
}
}catch(Exception e){
e.printStackTrace();
}
}
};
timer.schedule(aliveTask,1,100);
}
private class PooledObject{
private long createTime;
private int id;
private Object object;
private ObjectPool objectPool;
public PooledObject(ObjectPool objectPool,int id,Object object) {
this.objectPool = objectPool;
this.createTime = System.currentTimeMillis();
this.id = id;
this.object = object;
}
public void refresh() {
createTime = System.currentTimeMillis();
}
/**
*
* @return
*/
public Object getObject() {
if(objectPool.isAutoRefreshOnGet()) {
refresh();
}
return object;
}
/**
*
* @param object
*/
public void setObject(Object object) {
this.object = object;
}
public int getId() {
return id;
}
/**
*
* @return
*/
public boolean isAlive(){
long currentAliveTime = System.currentTimeMillis() - createTime;
if (currentAliveTime >= objectPool.aliveTime*1000){
return false;
}else{
return true;
}
}
}
}
|
package org.xmpp.packet;
import org.dom4j.DocumentFactory;
import org.dom4j.Element;
import org.dom4j.QName;
import org.dom4j.io.OutputFormat;
import org.dom4j.io.XMLWriter;
import java.io.StringWriter;
import java.util.Iterator;
/**
* A stream error. Stream errors have a condition and they
* can optionally include explanation text.
*
* @author Matt Tucker
*/
public class StreamError {
private static final String ERROR_NAMESPACE = "urn:ietf:params:xml:ns:xmpp-streams";
private static DocumentFactory docFactory = DocumentFactory.getInstance();
private Element element;
/**
* Construcs a new StreamError with the specified condition.
*
* @param condition the error condition.
*/
public StreamError(Condition condition) {
this.element = docFactory.createElement(docFactory.createQName("error", "stream",
"http://etherx.jabber.org/streams"));
setCondition(condition);
}
/**
* Constructs a new StreamError with the specified condition and error text.
*
* @param condition the error condition.
* @param text the text description of the error.
*/
public StreamError(Condition condition, String text) {
this.element = docFactory.createElement(docFactory.createQName("error", "stream",
"http://etherx.jabber.org/streams"));
setCondition(condition);
setText(text, null);
}
/**
* Constructs a new StreamError with the specified condition and error text.
*
* @param condition the error condition.
* @param text the text description of the error.
* @param language the language code of the error description (e.g. "en").
*/
public StreamError(Condition condition, String text, String language) {
this.element = docFactory.createElement(docFactory.createQName("error", "stream",
"http://etherx.jabber.org/streams"));
setCondition(condition);
setText(text, language);
}
/**
* Constructs a new StreamError using an existing Element. This is useful
* for parsing incoming error Elements into StreamError objects.
*
* @param element the stream error Element.
*/
public StreamError(Element element) {
this.element = element;
}
/**
* Returns the error condition.
*
* @return the error condition.
* @see Condition
*/
@SuppressWarnings("unchecked")
public Condition getCondition() {
for (Iterator<Element> i=element.elementIterator(); i.hasNext(); ) {
Element el = i.next();
if (el.getNamespaceURI().equals(ERROR_NAMESPACE) &&
!el.getName().equals("text"))
{
return Condition.fromXMPP(el.getName());
}
}
return null;
}
/**
* Sets the error condition.
*
* @param condition the error condition.
* @see Condition
*/
@SuppressWarnings("unchecked")
public void setCondition(Condition condition) {
if (condition == null) {
throw new NullPointerException("Condition cannot be null");
}
Element conditionElement = null;
for (Iterator<Element> i=element.elementIterator(); i.hasNext(); ) {
Element el = i.next();
if (el.getNamespaceURI().equals(ERROR_NAMESPACE) &&
!el.getName().equals("text"))
{
conditionElement = el;
}
}
if (conditionElement != null) {
element.remove(conditionElement);
}
conditionElement = docFactory.createElement(condition.toXMPP(), ERROR_NAMESPACE);
element.add(conditionElement);
}
/**
* Returns a text description of the error, or <tt>null</tt> if there
* is no text description.
*
* @return the text description of the error.
*/
public String getText() {
return element.elementText("text");
}
/**
* Sets the text description of the error.
*
* @param text the text description of the error.
*/
public void setText(String text) {
setText(text, null);
}
/**
* Sets the text description of the error. Optionally, a language code
* can be specified to indicate the language of the description.
*
* @param text the text description of the error.
* @param language the language code of the description, or <tt>null</tt> to specify
* no language code.
*/
public void setText(String text, String language) {
Element textElement = element.element("text");
// If text is null, clear the text.
if (text == null) {
if (textElement != null) {
element.remove(textElement);
}
return;
}
if (textElement == null) {
textElement = docFactory.createElement("text", ERROR_NAMESPACE);
if (language != null) {
textElement.addAttribute(QName.get("lang", "xml",
"http:
}
element.add(textElement);
}
textElement.setText(text);
}
/**
* Returns the text description's language code, or <tt>null</tt> if there
* is no language code associated with the description text.
*
* @return the language code of the text description, if it exists.
*/
public String getTextLanguage() {
Element textElement = element.element("text");
if (textElement != null) {
return textElement.attributeValue(QName.get("lang", "xml",
"http:
}
return null;
}
/**
* Returns the DOM4J Element that backs the error. The element is the definitive
* representation of the error and can be manipulated directly to change
* error contents.
*
* @return the DOM4J Element.
*/
public Element getElement() {
return element;
}
/**
* Returns the textual XML representation of this stream error.
*
* @return the textual XML representation of this stream error.
*/
public String toXML() {
return element.asXML();
}
public String toString() {
StringWriter out = new StringWriter();
XMLWriter writer = new XMLWriter(out, OutputFormat.createPrettyPrint());
try {
writer.write(element);
}
catch (Exception e) { e.printStackTrace(); }
return out.toString();
}
/**
* Type-safe enumeration for the error condition.<p>
*
* Implementation note: XMPP error conditions use "-" characters in
* their names such as "bad-request". Because "-" characters are not valid
* identifier parts in Java, they have been converted to "_" characters in
* the enumeration names, such as <tt>bad_request</tt>. The {@link #toXMPP()} and
* {@link #fromXMPP(String)} methods can be used to convert between the
* enumertation values and XMPP error code strings.
*/
public enum Condition {
/**
* The entity has sent XML that cannot be processed; this error MAY be used
* instead of the more specific XML-related errors, such as <bad-namespace-prefix/>,
* <invalid-xml/>, <restricted-xml/>, <unsupported-encoding/>, and
* <xml-not-well-formed/>, although the more specific errors are preferred.
*/
bad_format("bad-format"),
/**
* The entity has sent a namespace prefix that is unsupported, or has sent no
* namespace prefix on an element that requires such a prefix.
*/
bad_namespace_prefix("bad-namespace-prefix"),
/**
* The server is closing the active stream for this entity because a new stream
* has been initiated that conflicts with the existing stream.
*/
conflict("conflict"),
/**
* The entity has not generated any traffic over the stream for some period of
* time (configurable according to a local service policy).
*/
connection_timeout("connection-timeout"),
/**
* The value of the 'to' attribute provided by the initiating entity in the
* stream header corresponds to a hostname that is no longer hosted by the server.
*/
host_gone("host-gone"),
/**
* The value of the 'to' attribute provided by the initiating entity in the
* stream header does not correspond to a hostname that is hosted by the server.
*/
host_unknown("host-unknown"),
/**
* A stanza sent between two servers lacks a 'to' or 'from' attribute
* (or the attribute has no value).
*/
improper_addressing("improper-addressing"),
/**
* The server has experienced a misconfiguration or an otherwise-undefined
* internal error that prevents it from servicing the stream.
*/
internal_server_error("internal-server-error"),
/**
* The JID or hostname provided in a 'from' address does not match an authorized
* JID or validated domain negotiated between servers via SASL or dialback, or
* between a client and a server via authentication and resource binding.
*/
invalid_from("invalid-from"),
/**
* The stream ID or dialback ID is invalid or does not match an ID previously provided.
*/
invalid_id("invalid-id"),
invalid_namespace("invalid-namespace"),
/**
* The entity has sent invalid XML over the stream to a server that performs validation.
*/
invalid_xml("invalid-xml"),
/**
* The entity has attempted to send data before the stream has been authenticated,
* or otherwise is not authorized to perform an action related to stream
* negotiation; the receiving entity MUST NOT process the offending stanza before
* sending the stream error.
*/
not_authorized("not-authorized"),
/**
* The entity has violated some local service policy; the server MAY choose to
* specify the policy in the <text/> element or an application-specific condition
* element.
*/
policy_violation("policy-violation"),
/**
* The server is unable to properly connect to a remote entity that is required for
* authentication or authorization.
*/
remote_connection_failed("remote-connection-failed"),
/**
* The server lacks the system resources necessary to service the stream.
*/
resource_constraint("resource-constraint"),
/**
* The entity has attempted to send restricted XML features such as a comment,
* processing instruction, DTD, entity reference, or unescaped character.
*/
restricted_xml("restricted-xml"),
/**
* The server will not provide service to the initiating entity but is redirecting
* traffic to another host; the server SHOULD specify the alternate hostname or IP
* address (which MUST be a valid domain identifier) as the XML character data of the
* <see-other-host/> element.
*/
see_other_host("see-other-host"),
/**
* The server is being shut down and all active streams are being closed.
*/
system_shutdown("system-shutdown"),
/**
* The error condition is not one of those defined by the other conditions in this
* list; this error condition SHOULD be used only in conjunction with an
* application-specific condition.
*/
undefined_condition("undefined-condition"),
/**
* The initiating entity has encoded the stream in an encoding that is not
* supported by the server.
*/
unsupported_encoding("unsupported-encoding"),
/**
* The initiating entity has sent a first-level child of the stream that is
* not supported by the server.
*/
unsupported_stanza_type("unsupported-stanza-type"),
/**
* the value of the 'version' attribute provided by the initiating entity in the
* stream header specifies a version of XMPP that is not supported by the server;
* the server MAY specify the version(s) it supports in the <text/> element.
*/
unsupported_version("unsupported-version"),
/**
* The initiating entity has sent XML that is not well-formed.
*/
xml_not_well_formed("xml-not-well-formed");
/**
* Converts a String value into its Condition representation.
*
* @param condition the String value.
* @return the condition corresponding to the String.
*/
public static Condition fromXMPP(String condition) {
if (condition == null) {
throw new NullPointerException();
}
condition = condition.toLowerCase();
if (bad_format.toXMPP().equals(condition)) {
return bad_format;
}
else if (bad_namespace_prefix.toXMPP().equals(condition)) {
return bad_namespace_prefix;
}
else if (conflict.toXMPP().equals(condition)) {
return conflict;
}
else if (connection_timeout.toXMPP().equals(condition)) {
return connection_timeout;
}
else if (host_gone.toXMPP().equals(condition)) {
return host_gone;
}
else if (host_unknown.toXMPP().equals(condition)) {
return host_unknown;
}
else if (improper_addressing.toXMPP().equals(condition)) {
return improper_addressing;
}
else if (internal_server_error.toXMPP().equals(condition)) {
return internal_server_error;
}
else if (invalid_from.toXMPP().equals(condition)) {
return invalid_from;
}
else if (invalid_id.toXMPP().equals(condition)) {
return invalid_id;
}
else if (invalid_namespace.toXMPP().equals(condition)) {
return invalid_namespace;
}
else if (invalid_xml.toXMPP().equals(condition)) {
return invalid_xml;
}
else if (not_authorized.toXMPP().equals(condition)) {
return not_authorized;
}
else if (policy_violation.toXMPP().equals(condition)) {
return policy_violation;
}
else if (remote_connection_failed.toXMPP().equals(condition)) {
return remote_connection_failed;
}
else if (resource_constraint.toXMPP().equals(condition)) {
return resource_constraint;
}
else if (restricted_xml.toXMPP().equals(condition)) {
return restricted_xml;
}
else if (see_other_host.toXMPP().equals(condition)) {
return see_other_host;
}
else if (system_shutdown.toXMPP().equals(condition)) {
return system_shutdown;
}
else if (undefined_condition.toXMPP().equals(condition)) {
return undefined_condition;
}
else if (unsupported_encoding.toXMPP().equals(condition)) {
return unsupported_encoding;
}
else if (unsupported_stanza_type.toXMPP().equals(condition)) {
return unsupported_stanza_type;
}
else if (unsupported_version.toXMPP().equals(condition)) {
return unsupported_version;
}
else if (xml_not_well_formed.toXMPP().equals(condition)) {
return xml_not_well_formed;
}
else {
throw new IllegalArgumentException("Condition invalid:" + condition);
}
}
private String value;
private Condition(String value) {
this.value = value;
}
/**
* Returns the error code as a valid XMPP error code string.
*
* @return the XMPP error code value.
*/
public String toXMPP() {
return value;
}
}
}
|
package seedu.address.ui;
import javafx.collections.ObservableList;
import javafx.fxml.FXML;
import javafx.scene.control.TextField;
import javafx.scene.layout.Region;
import seedu.address.logic.commands.CommandResult;
import seedu.address.logic.commands.exceptions.CommandException;
import seedu.address.logic.parser.exceptions.ParseException;
/**
* The UI component that is responsible for receiving user command inputs.
*/
public class CommandBox extends UiPart<Region> {
public static final String ERROR_STYLE_CLASS = "error";
private static final String FXML = "CommandBox.fxml";
private final CommandExecutor commandExecutor;
@FXML
private TextField commandTextField;
/**
* Creates a {@code CommandBox} with the given {@code CommandExecutor}.
*/
public CommandBox(CommandExecutor commandExecutor) {
super(FXML);
this.commandExecutor = commandExecutor;
// calls #setStyleToDefault() whenever there is a change to the text of the command box.
commandTextField.textProperty().addListener((unused1, unused2, unused3) -> setStyleToDefault());
}
/**
* Handles the Enter button pressed event.
*/
@FXML
private void handleCommandEntered() {
String commandText = commandTextField.getText();
if (commandText.equals("")) {
return;
}
try {
commandExecutor.execute(commandText);
commandTextField.setText("");
} catch (CommandException | ParseException e) {
setStyleToIndicateCommandFailure();
}
}
/**
* Sets the command box style to use the default style.
*/
private void setStyleToDefault() {
commandTextField.getStyleClass().remove(ERROR_STYLE_CLASS);
}
/**
* Sets the command box style to indicate a failed command.
*/
private void setStyleToIndicateCommandFailure() {
ObservableList<String> styleClass = commandTextField.getStyleClass();
if (styleClass.contains(ERROR_STYLE_CLASS)) {
return;
}
styleClass.add(ERROR_STYLE_CLASS);
}
/**
* Represents a function that can execute commands.
*/
@FunctionalInterface
public interface CommandExecutor {
/**
* Executes the command and returns the result.
*
* @see seedu.address.logic.Logic#execute(String)
*/
CommandResult execute(String commandText) throws CommandException, ParseException;
}
}
|
package com.tinkerpop.rexster.client;
import com.tinkerpop.rexster.protocol.msg.ErrorResponseMessage;
import com.tinkerpop.rexster.protocol.msg.RexProMessage;
import com.tinkerpop.rexster.protocol.msg.ScriptRequestMessage;
import com.tinkerpop.rexster.protocol.msg.ScriptResponseMessage;
import com.tinkerpop.rexster.protocol.serializer.msgpack.MsgPackSerializer;
import org.apache.commons.configuration.Configuration;
import org.apache.log4j.Logger;
import org.glassfish.grizzly.Connection;
import org.glassfish.grizzly.GrizzlyFuture;
import org.glassfish.grizzly.nio.NIOConnection;
import org.glassfish.grizzly.nio.transport.TCPNIOTransport;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
public class RexsterClient {
private static final Logger logger = Logger.getLogger(RexsterClient.class);
private final NIOConnection[] connections;
private int currentConnection = 0;
private final int timeoutConnection;
private final int timeoutWrite;
private final int timeoutRead;
private final int retries;
private final int waitBetweenRetries;
private final int asyncWriteQueueMaxBytes;
private final String language;
private final String graphName;
private final String graphObjName;
private final boolean transaction;
private final TCPNIOTransport transport;
private final String[] hosts;
private final int port;
private byte serializer;
protected static ConcurrentHashMap<UUID, ArrayBlockingQueue<Object>> responses = new ConcurrentHashMap<UUID, ArrayBlockingQueue<Object>>();
/**
* Wraps messages sent to the transport filter, and
* includes meta data
*/
static class MessageContainer {
private byte serializer;
private RexProMessage message;
MessageContainer(byte serializer, RexProMessage message) {
this.serializer = serializer;
this.message = message;
}
byte getSerializer() {
return serializer;
}
RexProMessage getMessage() {
return message;
}
}
protected RexsterClient(final Configuration configuration, final TCPNIOTransport transport) {
this.timeoutConnection = configuration.getInt(RexsterClientTokens.CONFIG_TIMEOUT_CONNECTION_MS);
this.timeoutRead = configuration.getInt(RexsterClientTokens.CONFIG_TIMEOUT_READ_MS);
this.timeoutWrite = configuration.getInt(RexsterClientTokens.CONFIG_TIMEOUT_WRITE_MS);
this.retries = configuration.getInt(RexsterClientTokens.CONFIG_MESSAGE_RETRY_COUNT);
this.waitBetweenRetries = configuration.getInt(RexsterClientTokens.CONFIG_MESSAGE_RETRY_WAIT_MS);
this.asyncWriteQueueMaxBytes = configuration.getInt(RexsterClientTokens.CONFIG_MAX_ASYNC_WRITE_QUEUE_BYTES);
this.language = configuration.getString(RexsterClientTokens.CONFIG_LANGUAGE);
this.graphName = configuration.getString(RexsterClientTokens.CONFIG_GRAPH_NAME);
this.graphObjName = configuration.getString(RexsterClientTokens.CONFIG_GRAPH_OBJECT_NAME);
this.transaction= configuration.getBoolean(RexsterClientTokens.CONFIG_TRANSACTION);
this.transport = transport;
this.port = configuration.getInt(RexsterClientTokens.CONFIG_PORT);
this.hosts = configuration.getStringArray(RexsterClientTokens.CONFIG_HOSTNAME);
this.serializer = configuration.getByte(RexsterClientTokens.CONFIG_SERIALIZER, MsgPackSerializer.SERIALIZER_ID);
this.connections = new NIOConnection[this.hosts.length];
}
/**
* Sends a RexProMessage, and returns the received RexProMessage response.
*
* This method is for low-level operations with RexPro only.
*
* @param rawMessage message to send.
*/
public RexProMessage execute(final RexProMessage rawMessage) throws RexProException, IOException {
final ArrayBlockingQueue<Object> responseQueue = new ArrayBlockingQueue<Object>(1);
final UUID requestId = rawMessage.requestAsUUID();
responses.put(requestId, responseQueue);
try {
this.sendRequest(rawMessage);
} catch (Throwable t) {
throw new IOException(t);
}
Object resultMessage;
try {
final long beginTime = System.currentTimeMillis();
resultMessage = responseQueue.poll(this.timeoutRead - (System.currentTimeMillis() - beginTime), TimeUnit.MILLISECONDS);
} catch (Exception ex) {
responses.remove(requestId);
throw new IOException(ex);
}
responses.remove(requestId);
if (resultMessage == null) {
throw new IOException(String.format("Message received response timeoutConnection (%s s)", this.timeoutConnection));
} else if (!(resultMessage instanceof RexProMessage)) {
logger.error(String.format("Rexster returned a message of type [%s]", resultMessage.getClass().getName()));
throw new RexProException("RexsterClient doesn't support the message type returned.");
}
return (RexProMessage) resultMessage;
}
/**
* Send a script to a RexPro Server for execution and return the result. No bindings are specified.
*
* @param script the script to execute
*/
public <T> List<T> execute(final String script) throws RexProException, IOException {
return execute(script, null);
}
/**
* Send a script to a RexPro Server for execution and return the result.
*
* Be sure that arguments sent are serializable by MsgPack or the object will not be bound properly on the
* server. For example a complex object like java.util.Date will simply be serialized via toString and
* therefore will be referenced as such when accessed via the Gremlin script.
*
* @param script the script to execute
* @param scriptArgs the map becomes bindings.
*/
public <T> List<T> execute(final String script, final Map<String, Object> scriptArgs) throws RexProException, IOException {
final ArrayBlockingQueue<Object> responseQueue = new ArrayBlockingQueue<Object>(1);
final RexProMessage msgToSend = createNoSessionScriptRequest(script, scriptArgs);
final UUID requestId = msgToSend.requestAsUUID();
responses.put(requestId, responseQueue);
try {
this.sendRequest(msgToSend);
} catch (Throwable t) {
throw new IOException(t);
}
Object resultMessage;
try {
final long beginTime = System.currentTimeMillis();
resultMessage = responseQueue.poll(this.timeoutRead - (System.currentTimeMillis() - beginTime), TimeUnit.MILLISECONDS);
} catch (Exception ex) {
responses.remove(requestId);
throw new IOException(ex);
}
responses.remove(requestId);
if (resultMessage == null) {
throw new IOException(String.format("Message received response timeoutConnection (%s s)", this.timeoutConnection));
}
if (resultMessage instanceof ScriptResponseMessage) {
final ScriptResponseMessage msg = (ScriptResponseMessage) resultMessage;
// when rexster returns an iterable it's read out of the unpacker as a single object much like a single
// vertex coming back from rexster. basically, this is the difference between g.v(1) and g.v(1).map.
// the latter returns an iterable essentially putting a list inside of the results list here on the
// client side. the idea here is to normalize all results to a list on the client side, and therefore,
// iterables like those from g.v(1).map need to be unrolled into the results list. Prefer this to
// doing it on the server, because the server should return what is asked of it, in case other clients
// want to process this differently.
final List<T> results = new ArrayList<T>();
if (msg.Results.get() instanceof Iterable) {
final Iterator<T> itty = ((Iterable) msg.Results.get()).iterator();
while(itty.hasNext()) {
results.add(itty.next());
}
} else {
results.add((T)msg.Results.get());
}
return results;
} else if (resultMessage instanceof ScriptResponseMessage) {
final ScriptResponseMessage msg = (ScriptResponseMessage) resultMessage;
final List<T> results = new ArrayList<T>();
for (String line : (String[]) msg.Results.get()) {
results.add((T) line);
}
return results;
}else if (resultMessage instanceof ErrorResponseMessage) {
logger.warn(String.format("Rexster returned an error response for [%s] with params [%s]",
script, scriptArgs));
throw new RexProException(((ErrorResponseMessage) resultMessage).ErrorMessage);
} else {
logger.error(String.format("Rexster returned a message of type [%s]", resultMessage.getClass().getName()));
throw new RexProException("RexsterClient doesn't support the message type returned.");
}
}
static void putResponse(final RexProMessage response) throws Exception {
final UUID requestId = response.requestAsUUID();
if (!responses.containsKey(requestId)) {
// probably a timeout if we get here... ???
logger.warn(String.format("No queue found in the response map: %s", requestId));
return;
}
try {
final ArrayBlockingQueue<Object> queue = responses.get(requestId);
if (queue != null) {
queue.put(response);
}
else {
// no queue for some reason....why ???
logger.error(String.format("No queue found in the response map: %s", requestId));
}
}
catch (InterruptedException e) {
// just trap this one ???
logger.error("Error reading the queue in the response map.", e);
}
}
private NIOConnection nextConnection() {
synchronized(connections) {
if (currentConnection == Integer.MAX_VALUE) { currentConnection = 0; }
currentConnection = (currentConnection + 1) % hosts.length;
final NIOConnection connection = connections[currentConnection];
if (connection == null || !connection.isOpen()) {
connections[currentConnection] = openConnection(this.hosts[currentConnection]);
}
return connections[currentConnection];
}
}
private NIOConnection openConnection(final String host) {
try {
final Future<Connection> future = this.transport.connect(host, port);
final NIOConnection connection = (NIOConnection) future.get(this.timeoutConnection, TimeUnit.MILLISECONDS);
connection.setMaxAsyncWriteQueueSize(asyncWriteQueueMaxBytes);
return connection;
} catch (Exception e) {
return null;
}
}
private void sendRequest(final RexProMessage toSend) throws Exception {
boolean sent = false;
int tries = this.retries;
while (tries > 0 && !sent) {
try {
final NIOConnection connection = nextConnection();
if (connection != null && connection.isOpen()) {
final GrizzlyFuture future = connection.write(new MessageContainer(serializer, toSend));
future.get(this.timeoutWrite, TimeUnit.MILLISECONDS);
sent = true;
} else {
throw new Exception("Connection was not open. Ensure that Rexster Server is running/reachable.");
}
} catch (Exception ex) {
logger.error(String.format("Request failed. Retry attempt [%s]", (this.retries - tries) + 1), ex);
tries
final UUID requestId = toSend.requestAsUUID();
if (tries == 0) {
responses.remove(requestId);
} else {
Thread.sleep(this.waitBetweenRetries);
}
}
}
if (!sent) {
throw new Exception("Could not send message.");
}
}
public void close() throws IOException {
RexsterClientFactory.removeClient(this);
}
public void closeClientAndConnections() throws IOException {
close();
for ( NIOConnection c : this.connections ) {
c.closeSilently();
}
}
private ScriptRequestMessage createNoSessionScriptRequest(final String script,
final Map<String, Object> scriptArguments) throws IOException, RexProException {
final ScriptRequestMessage scriptMessage = new ScriptRequestMessage();
scriptMessage.Script = script;
scriptMessage.LanguageName = this.language;
scriptMessage.metaSetGraphName(this.graphName);
scriptMessage.metaSetGraphObjName(this.graphObjName);
scriptMessage.metaSetInSession(false);
scriptMessage.metaSetTransaction(this.transaction);
scriptMessage.setRequestAsUUID(UUID.randomUUID());
scriptMessage.validateMetaData();
//attach bindings
if (scriptArguments != null) {
scriptMessage.Bindings.putAll(scriptArguments);
}
return scriptMessage;
}
public byte getSerializer() {
return serializer;
}
public void setSerializer(byte serializer) {
this.serializer = serializer;
}
}
|
package seedu.geekeep.ui;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.logging.Logger;
import javafx.fxml.FXML;
import javafx.scene.control.SplitPane;
import javafx.scene.control.TextField;
import javafx.scene.input.KeyCode;
import javafx.scene.input.KeyEvent;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.Region;
import seedu.geekeep.commons.core.LogsCenter;
import seedu.geekeep.commons.events.ui.NewResultAvailableEvent;
import seedu.geekeep.commons.util.FxViewUtil;
import seedu.geekeep.logic.Logic;
import seedu.geekeep.logic.commands.CommandResult;
import seedu.geekeep.logic.commands.exceptions.CommandException;
public class CommandBox extends UiPart<Region> {
private final Logger logger = LogsCenter.getLogger(CommandBox.class);
private static final String FXML = "CommandBox.fxml";
public static final String ERROR_STYLE_CLASS = "error";
private final Logic logic;
@FXML
private TextField commandTextField;
private Optional<String> currentPrefix = Optional.empty();
private int commandHistoryIndex;
private List<String> matchingCommands = new ArrayList<>();
private int matchingCommandIndex;
public CommandBox(AnchorPane commandBoxPlaceholder, Logic logic) {
super(FXML);
this.logic = logic;
addToPlaceholder(commandBoxPlaceholder);
addHistoryEventHandler();
}
private void addHistoryEventHandler() {
commandTextField.addEventFilter(KeyEvent.KEY_PRESSED, event -> {
if (!(event.getCode().equals(KeyCode.UP) || event.getCode().equals(KeyCode.DOWN))) {
currentPrefix = Optional.empty();
return;
}
String commandText = commandTextField.getText();
int caretPosition = commandTextField.getCaretPosition();
String prefix = commandText.substring(0, caretPosition);
if (!currentPrefix.equals(Optional.of(prefix))) {
currentPrefix = Optional.of(prefix);
commandHistoryIndex = getCommandHistory().size() - 1;
matchingCommands.clear();
matchingCommands.add(commandText);
matchingCommandIndex = 0;
}
Optional<String> matchingCommand;
if (event.getCode().equals(KeyCode.UP)) {
matchingCommand = findPreviousMatchingCommand(prefix, commandText);
} else {
matchingCommand = findNextMatchingCommand(prefix);
}
if (matchingCommand.isPresent()) {
commandTextField.setText(matchingCommand.get());
commandTextField.positionCaret(caretPosition);
}
event.consume();
});
}
private Optional<String> findPreviousMatchingCommand(String prefix, String commandText) {
logger.fine("Finding previous command that starts with \"" + prefix + "\"");
if (matchingCommandIndex + 1 < matchingCommands.size()) {
matchingCommandIndex++;
return Optional.of(matchingCommands.get(matchingCommandIndex));
}
List<String> commandHistory = getCommandHistory();
while (commandHistoryIndex >= 0) {
String command = commandHistory.get(commandHistoryIndex);
commandHistoryIndex
if (command.startsWith(prefix) && !command.equals(commandText)) {
matchingCommands.add(command);
matchingCommandIndex++;
return Optional.of(command);
}
}
return Optional.empty();
}
private Optional<String> findNextMatchingCommand(String prefix) {
logger.fine("Finding next command that starts with \"" + prefix + "\"");
if (matchingCommandIndex == 0) {
return Optional.empty();
}
matchingCommandIndex
return Optional.of(matchingCommands.get(matchingCommandIndex));
}
private List<String> getCommandHistory() {
return logic.getCommandHistory();
}
private void addToPlaceholder(AnchorPane placeHolderPane) {
SplitPane.setResizableWithParent(placeHolderPane, false);
placeHolderPane.getChildren().add(commandTextField);
FxViewUtil.applyAnchorBoundaryParameters(getRoot(), 0.0, 0.0, 0.0, 0.0);
FxViewUtil.applyAnchorBoundaryParameters(commandTextField, 0.0, 0.0, 0.0, 0.0);
}
@FXML
private void handleCommandInputChanged() {
try {
CommandResult commandResult = logic.execute(commandTextField.getText());
// process result of the command
setStyleToIndicateCommandSuccess();
commandTextField.setText("");
logger.info("Result: " + commandResult.feedbackToUser);
raise(new NewResultAvailableEvent(commandResult.feedbackToUser));
} catch (CommandException e) {
// handle command failure
setStyleToIndicateCommandFailure();
logger.info("Invalid command: " + commandTextField.getText());
raise(new NewResultAvailableEvent(e.getMessage()));
}
}
/**
* Sets the command box style to indicate a successful command.
*/
private void setStyleToIndicateCommandSuccess() {
commandTextField.getStyleClass().remove(ERROR_STYLE_CLASS);
}
/**
* Sets the command box style to indicate a failed command.
*/
private void setStyleToIndicateCommandFailure() {
commandTextField.getStyleClass().add(ERROR_STYLE_CLASS);
}
}
|
package com.github.database.rider.core;
import com.github.database.rider.core.DataSetProviderIt.TweetDataSetProvider;
import com.github.database.rider.core.api.dataset.DataSet;
import com.github.database.rider.core.api.dataset.DataSetExecutor;
import com.github.database.rider.core.api.dataset.DataSetProvider;
import com.github.database.rider.core.api.dataset.ExpectedDataSet;
import com.github.database.rider.core.api.exporter.ExportDataSet;
import com.github.database.rider.core.configuration.DataSetConfig;
import com.github.database.rider.core.connection.ConnectionHolderImpl;
import com.github.database.rider.core.dataset.DataSetExecutorImpl;
import com.github.database.rider.core.dataset.builder.ColumnSpec;
import com.github.database.rider.core.dataset.builder.RowBuilder;
import com.github.database.rider.core.dataset.builder.DataSetBuilder;
import com.github.database.rider.core.model.Tweet;
import com.github.database.rider.core.model.User;
import com.github.database.rider.core.model.User_;
import com.github.database.rider.core.util.EntityManagerProvider;
import org.dbunit.dataset.CompositeDataSet;
import org.dbunit.dataset.DataSetException;
import org.dbunit.dataset.IDataSet;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.List;
import static com.github.database.rider.core.util.EntityManagerProvider.em;
import static org.assertj.core.api.Assertions.assertThat;
@RunWith(JUnit4.class)
@DataSet(provider = TweetDataSetProvider.class, cleanBefore = true)
public class DataSetProviderIt {
@Rule
public EntityManagerProvider emProvider = EntityManagerProvider.instance("rules-it");
@Rule
public DBUnitRule dbUnitRule = DBUnitRule.instance(emProvider.connection());
@Test
@DataSet(provider = UserDataSetProvider.class, cleanBefore = true)
@ExportDataSet(outputName = "target/out.yml")
public void shouldSeedDatabaseProgrammatically() {
List<User> users = EntityManagerProvider.em().createQuery("select u from User u ").getResultList();
assertThat(users).
isNotNull().
isNotEmpty().hasSize(2).
extracting("name").
contains("@dbunit", "@dbrider");
}
@Test
@ExportDataSet(outputName = "target/out2.yml")
@DataSet(provider = UserDataSetWithMetaModelProvider.class, cleanBefore = true)
public void shouldSeedDatabaseProgrammaticallyUsingMetaModel() {
List<User> users = em().createQuery("select u from User u ").getResultList();
assertThat(users).
isNotNull().
isNotEmpty().hasSize(2).
extracting("name").
contains("@dbunit", "@dbrider");
}
@Test
@DataSet(provider = UserDataSetProviderWithColumnsSyntax.class)
@ExportDataSet(outputName = "target/out3.yml")
public void shouldSeedDatabaseUsingDataSetProviderWithColumnsSyntax() {
List<User> users = EntityManagerProvider.em().createQuery("select u from User u ").getResultList();
assertThat(users).
isNotNull().
isNotEmpty().hasSize(2).
extracting("name").
contains("@dbunit", "@dbrider");
}
@Test
@DataSet(provider = BrokenReferentialConstraintProvider.class, disableConstraints = true)
public void shouldSeedDataSetDisablingContraints() {
List<User> users = em().createQuery("select u from User u ").getResultList();
assertThat(users).
isNotNull().
isNotEmpty().hasSize(3).
extracting("name").
contains("@dbunit", "@dbrider", "@new row");
}
@Test
@DataSet(provider = UserDataSetProvider.class, cleanBefore = true, transactional = true)
@ExpectedDataSet(provider = ExpectedUserProvider.class, ignoreCols = "id")
public void shouldMatchExpectedDataSetUsingDataSetProvider() {
Long count = (Long) EntityManagerProvider.em().createQuery("select count(u) from User u ").getSingleResult();
assertThat(count).isEqualTo(2);
em().remove(EntityManagerProvider.em().find(User.class, 1L));
//assertThat(count).isEqualTo(1); //assertion is in expectedDataSet
}
@Test
public void shouldSeedDataSetUsingClassLevelDataSetProvider() {
Tweet tweet = (Tweet) EntityManagerProvider.em().createQuery("select t from Tweet t where t.id = 'abcdef12345'").getSingleResult();
assertThat(tweet).isNotNull()
.extracting("content")
.contains("dbrider rules!");
}
@Test
@DataSet(provider = CompositeDataSetProvider.class)
public void shouldSeedDataSetUsingCompositeDataSetProvider() {
List<User> users = EntityManagerProvider.em().createQuery("select u from User u ").getResultList();
assertThat(users).
isNotNull().
isNotEmpty().hasSize(2).
extracting("name").
contains("@dbunit", "@dbrider");
Tweet tweet = (Tweet) EntityManagerProvider.em().createQuery("select t from Tweet t where t.id = 'abcdef12345'").getSingleResult();
assertThat(tweet).isNotNull()
.extracting("content")
.contains("dbrider rules!");
}
@Test
public void shouldSeedDatabaseUsingDataSetProviderWithoutAnnotatation() throws SQLException {
try (Connection conn = EntityManagerProvider.instance("executor-it").connection()) {
DataSetExecutor executor = DataSetExecutorImpl.instance("executor-name", new ConnectionHolderImpl(conn));
DataSetConfig DataSetConfig = new DataSetConfig()
.datasetProvider(UserDataSetProvider.class)
.disableConstraints(true);
executor.clearDatabase(DataSetConfig);
executor.createDataSet(DataSetConfig);
List<User> users = EntityManagerProvider.em("executor-it").createQuery("select u from User u ").getResultList();
assertThat(users).
isNotNull().
isNotEmpty().hasSize(2).
extracting("name").
contains("@dbunit", "@dbrider");
}
}
@Test
@DataSet(provider = ReuseRowsAndDataSetsProvider.class, cleanBefore = true)
public void shouldReuseRowsAndDataSets() {
List<User> users = EntityManagerProvider.em().createQuery("select u from User u ").getResultList();
assertThat(users).
isNotNull().
isNotEmpty().hasSize(2).
extracting("name").
contains("user1", "user2");
Tweet tweet = (Tweet) EntityManagerProvider.em().createQuery("select t from Tweet t where t.id = 'abcdef12345'").getSingleResult();
assertThat(tweet).isNotNull()
.extracting("content")
.contains("dbrider rules!");
}
@Test
@DataSet(provider = DefaultValueDataSetProvider.class)
public void shouldSeedDatabaseWithDefaultValues() {
List<User> users = EntityManagerProvider.em().createQuery("select u from User u ").getResultList();
assertThat(users).
isNotNull().
isNotEmpty().hasSize(2).
extracting("name").
contains("@realpestano", "DEFAULT");
}
public static class UserDataSetProvider implements DataSetProvider {
@Override
public IDataSet provide() {
DataSetBuilder builder = new DataSetBuilder();
builder.table("user")
.row()
.column("id", 1)
.column("name", "@dbunit")
.row()
.column("id", 2)
.column("name", "@dbrider").build();
return builder.build();
}
}
public static class UserDataSetProviderWithColumnsSyntax implements DataSetProvider {
@Override
public IDataSet provide() {
DataSetBuilder builder = new DataSetBuilder();
IDataSet iDataSet = builder.table("user")
.columns("id", "name")
.values(1,"@dbunit")
.values(2,"@dbrider").build();
return iDataSet;
}
}
public static class UserDataSetWithMetaModelProvider implements DataSetProvider {
@Override
public IDataSet provide() {
DataSetBuilder builder = new DataSetBuilder();
return builder.table("user")
.row()
.column(User_.id, 1)
.column(User_.name, "@dbunit")
.row()
.column(User_.id, 2)
.column(User_.name, "@dbrider").build();
}
}
public static class ExpectedUserProvider implements DataSetProvider {
@Override
public IDataSet provide() {
DataSetBuilder builder = new DataSetBuilder();
builder.table("user")
.row()
.column("id", 2)
.column("name", "@dbrider");
return builder.build();
}
}
public static class TweetDataSetProvider implements DataSetProvider {
@Override
public IDataSet provide() {
DataSetBuilder builder = new DataSetBuilder();
builder.table("TWEET")
.row()
.column("ID", "abcdef12345").column("CONTENT", "dbrider rules!")
.column("DATE", "[DAY,NOW]");
return builder.build();
}
}
public static class BrokenReferentialConstraintProvider implements DataSetProvider {
@Override
public IDataSet provide() {
DataSetBuilder builder = new DataSetBuilder();
ColumnSpec id = ColumnSpec.of("ID");
ColumnSpec name = ColumnSpec.of("NAME");
IDataSet dataSet = builder
.table("USER") //start adding rows to 'USER' table
.row()
.column("ID", 1)
.column(name, "@dbunit")
.row() //keeps adding rows to the current table
.column(id, 2)
.column("NAME", "@dbrider")
.table("TWEET") //starts adding rows to 'TWEET' table
.row()
.column("ID", "abcdef12345")
.column("CONTENT", "dbunit rules!")
.column("DATE", "[DAY,NOW]")
.table("FOLLOWER")
.row()
.column(id, 1)
.column("USER_ID", 9999)
.column("FOLLOWER_ID", 9999)
.table("USER")// we still can add rows to table already added to the dataset
.row()
.column("ID", 3)
.column(name, "@new row")
.build();
return dataSet;
}
}
public static class CompositeDataSetProvider implements DataSetProvider {
@Override
public IDataSet provide() throws DataSetException {
IDataSet userDataSet = new UserDataSetProvider().provide();
IDataSet tweetDataSet = new TweetDataSetProvider().provide();
return new CompositeDataSet(userDataSet, tweetDataSet);
}
}
public static class ReuseRowsAndDataSetsProvider implements DataSetProvider {
@Override
public IDataSet provide() {
DataSetBuilder builder = new DataSetBuilder();
RowBuilder user1Row = new DataSetBuilder().table("USER")
.row()
.column("id", "1")
.column("name", "user1");
RowBuilder user2Row = new DataSetBuilder().table("USER")
.row()
.column("id", "2")
.column("name", "user2");
IDataSet iDataSet = builder.add(user1Row).add(user2Row)
.addDataSet(new TweetDataSetProvider().provide())
.build();
return iDataSet;
}
}
public static class DefaultValueDataSetProvider implements DataSetProvider {
@Override
public IDataSet provide() {
DataSetBuilder builder = new DataSetBuilder()
.defaultValue("NAME", "DEFAULT");
ColumnSpec id = ColumnSpec.of("ID");
return builder.table("USER")
.row()
.column(id, 1)
.column("NAME", "@realpestano")
.row()
.column(id, 2)
.build();
}
}
}
|
package sparqles.utils.cli;
import java.io.File;
import java.util.Collection;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sparqles.analytics.AAnalyserInit;
import sparqles.analytics.IndexViewAnalytics;
import sparqles.core.CONSTANTS;
import sparqles.core.Endpoint;
import sparqles.core.SPARQLESProperties;
import sparqles.schedule.Schedule;
import sparqles.schedule.Scheduler;
import sparqles.utils.DatahubAccess;
import sparqles.utils.DateFormater;
import sparqles.utils.MongoDBManager;
/**
* Main CLI class for the SPARQL Endpoint status program
* @author UmbrichJ
*
*/
public class SPARQLES extends CLIObject{
private static final Logger log = LoggerFactory.getLogger(SPARQLES.class);
private Scheduler scheduler;
private MongoDBManager dbm;
@Override
public String getDescription() {
return "Start and control SPARQLES";
}
@Override
protected void addOptions(Options opts) {
opts.addOption(ARGUMENTS.OPTION_PROP_FILE);
opts.addOption(ARGUMENTS.OPTION_INIT);
opts.addOption(ARGUMENTS.OPTION_START);
opts.addOption(ARGUMENTS.OPTION_RECOMPUTE);
opts.addOption(ARGUMENTS.OPTION_RESCHEDULE);
opts.addOption(ARGUMENTS.OPTION_RUN);
}
@Override
protected void execute(CommandLine cmd) {
parseCMD(cmd);
//reinitialise datahub
if( CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_INIT)){
//check the endpoint list
Collection<Endpoint> eps = DatahubAccess.checkEndpointList();
dbm.initEndpointCollection();
dbm.insert(eps);
}
if( CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_RESCHEDULE)){
Collection<Schedule> epss = Scheduler.createDefaultSchedule(dbm);
dbm.initScheduleCollection();
dbm.insert(epss);
}
if( CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_RECOMPUTE)){
recomputeAnalytics();
}
if( CLIObject.hasOption(cmd, ARGUMENTS.PARAM_RUN)){
String task = CLIObject.getOptionValue(cmd, ARGUMENTS.PARAM_RUN).trim();
if(task.equalsIgnoreCase(CONSTANTS.ITASK)){
IndexViewAnalytics a = new IndexViewAnalytics();
a.setDBManager(dbm);
a.execute();
}
}
if( CLIObject.hasOption(cmd, ARGUMENTS.PARAM_FLAG_START)){
start();
}
Runtime.getRuntime().addShutdownHook (new ShutdownThread(this));
}
private void recomputeAnalytics() {
dbm.initAggregateCollections();
AAnalyserInit a = new AAnalyserInit(dbm);
a.run();
}
private void start() {
// epm.init(dbm);
scheduler.init(dbm);
try {
long start = System.currentTimeMillis();
while (true) {
log.info("Running since {}", DateFormater.formatInterval(System.currentTimeMillis()-start));
Thread.sleep (1800000);
}
}catch (Throwable t) {
t.printStackTrace();
}
}
private void parseCMD(CommandLine cmd) {
//load the Properties
if( CLIObject.hasOption(cmd, ARGUMENTS.PARAM_PROP_FILE)){
File propFile = new File(CLIObject.getOptionValue(cmd, ARGUMENTS.PARAM_PROP_FILE));
if(propFile.exists()){
log.info("Reading properties from {}",propFile);
SPARQLESProperties.init(propFile);
}else{
log.warn("Specified property file ({}) does not exist", propFile);
}
}
setup(true,true);
}
public void init(String[] arguments) {
CommandLine cmd = verifyArgs(arguments);
parseCMD(cmd);
}
private void setup(boolean useDB, boolean useFM) {
//Init the scheduler
scheduler = new Scheduler();
if(useDB){
dbm = new MongoDBManager();
scheduler.useDB(dbm);
}
scheduler.useFileManager(useFM);
}
public void stop() {
log.info("[START] [SHUTDOWN] Shutting down the system");
scheduler.close();
log.info("[SUCCESS] [SHUTDOWN] Everything closed normally");
}
class ShutdownThread extends Thread{
private SPARQLES _s;
public ShutdownThread(SPARQLES s) {
_s=s;
}
@Override
public void run() {
_s.stop();
}
}
}
|
package uk.bl.wa.blindex;
import java.io.IOException;
import java.net.URI;
import java.net.URL;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.hadoop.Solate;
import org.apache.solr.hadoop.SolrInputDocumentWritable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import au.com.bytecode.opencsv.CSVParser;
/**
*
* @author Andrew Jackson <Andrew.Jackson@bl.uk>
*
*/
public class IndexerJob {
private static final Logger LOG = LoggerFactory.getLogger(IndexerJob.class);
protected static String solrHomeZipName = "solr_home.zip";
/**
*
* This mapper parses the input table, downloads the relevant XML, parses
* the content into Solr documents, computes the target SolrCloud slice and
* passes them down to the reducer.
*
* @author Andrew Jackson <Andrew.Jackson@bl.uk>
*
*/
public static class Map extends MapReduceBase implements
Mapper<LongWritable, Text, IntWritable, SolrInputDocumentWritable> {
private CSVParser p = new CSVParser();
private Solate sp;
private String domidUrlPrefix;
/*
* (non-Javadoc)
*
* @see
* org.apache.hadoop.mapred.MapReduceBase#configure(org.apache.hadoop
* .mapred.JobConf)
*/
@Override
public void configure(JobConf job) {
super.configure(job);
String zkHost = "openstack2.ad.bl.uk:2181,openstack4.ad.bl.uk:2181,openstack5.ad.bl.uk:2181/solr";
String collection = "jisc2";
int numShards = 4;
sp = new Solate(zkHost, collection, numShards);
domidUrlPrefix = "http://194.66.239.142/did/";
}
public void map(LongWritable key, Text value,
OutputCollector<IntWritable, SolrInputDocumentWritable> output,
Reporter reporter)
throws IOException {
// String[] parts = value.toString().split("\\x01");
String[] parts = p.parseLine(value.toString());
// If this is the header line, return now:
if ("entityid".equals(parts[0]))
return;
// Otherwise, grab the content info:
String entityuid = parts[1];
String simpletitle = parts[3];
String originalname = parts[5];
String domid = parts[8];
// Construct URL:
URL xmlUrl = new URL(domidUrlPrefix + domid);
// Pass to the SAX-based parser to collect the outputs:
List<String> docs = null;
try {
docs = JISC2TextExtractor.extract(xmlUrl.openStream());
} catch (Exception e) {
e.printStackTrace();
return;
}
for (int i = 0; i < docs.size(); i++) {
// Skip empty records:
if (docs.get(i).length() == 0)
continue;
// Build up a Solr document:
String doc_id = entityuid + "/p" + i;
SolrInputDocument doc = new SolrInputDocument();
doc.setField("id", doc_id);
doc.setField("simpletitle_s", simpletitle);
doc.setField("originalname_s", originalname);
doc.setField("domid_l", domid);
doc.setField("page_i", i);
doc.setField("content", docs.get(i));
output.collect(new IntWritable(sp.getPartition(doc_id, doc)),
new SolrInputDocumentWritable(doc));
}
}
}
/**
*
* This reducer collects the documents for each slice together and commits
* them to an embedded instance of the Solr server stored on HDFS
*
* @author Andrew Jackson <Andrew.Jackson@bl.uk>
*
*/
public static class Reduce extends MapReduceBase implements
Reducer<IntWritable, SolrInputDocumentWritable, Text, IntWritable> {
private FileSystem fs;
private Path solrHomeDir = null;
private Path outputDir;
private String shardPrefix = "shard";
/*
* (non-Javadoc)
*
* @see
* org.apache.hadoop.mapred.MapReduceBase#configure(org.apache.hadoop
* .mapred.JobConf)
*/
@Override
public void configure(JobConf job) {
LOG.info("Calling configure()...");
super.configure(job);
try {
// Filesystem:
fs = FileSystem.get(job);
// Input:
solrHomeDir = findSolrConfig(job, solrHomeZipName);
LOG.info("Found solrHomeDir " + solrHomeDir);
} catch (IOException e) {
e.printStackTrace();
LOG.error("FAILED in reducer configuration: " + e);
}
// Output:
outputDir = new Path("/user/admin/jisc2/solr/");
}
public void reduce(IntWritable key,
Iterator<SolrInputDocumentWritable> values,
OutputCollector<Text, IntWritable> output, Reporter reporter)
throws IOException {
int slice = key.get();
Path outputShardDir = new Path(outputDir, this.shardPrefix + slice);
LOG.info("Running reducer for " + slice + " > " + outputShardDir);
EmbeddedSolrServer solrServer = JISC2TextExtractor
.createEmbeddedSolrServer(solrHomeDir, fs, outputDir,
outputShardDir);
while (values.hasNext()) {
SolrInputDocument doc = values.next().getSolrInputDocument();
try {
solrServer.add(doc);
} catch (SolrServerException e) {
e.printStackTrace();
LOG.error("ADD " + e);
}
output.collect(new Text("" + key), new IntWritable(1));
}
try {
solrServer.commit();
solrServer.shutdown();
} catch (SolrServerException e) {
e.printStackTrace();
LOG.error("COMMIT " + e);
}
}
}
public static Path findSolrConfig(JobConf conf, String zipName)
throws IOException {
Path solrHome = null;
Path[] localArchives = DistributedCache.getLocalCacheArchives(conf);
if (localArchives.length == 0) {
LOG.error("No local cache archives.");
throw new IOException(String.format("No local cache archives."));
}
for (Path unpackedDir : localArchives) {
LOG.info("Looking at: " + unpackedDir + " for " + zipName);
if (unpackedDir.getName().equals(zipName)) {
LOG.info("Using this unpacked directory as solr home: {}",
unpackedDir);
solrHome = unpackedDir;
break;
}
}
return solrHome;
}
/**
* c.f. SolrRecordWriter, SolrOutputFormat
*
* Cloudera Search defaults to: /solr/jisc2/core_node1 ...but note no
* replicas, which is why the shard-to-core mapping looks easy.
*
* Take /user/admin/jisc2-xmls/000000_0 Read line-by-line Split on 0x01.
*
*
* @param args
* @throws Exception
*/
public static void main(String[] args) throws Exception {
JobConf conf = new JobConf(IndexerJob.Map.class);
conf.setJobName("JISC2_Indexer");
conf.setMapOutputKeyClass(IntWritable.class);
conf.setMapOutputValueClass(SolrInputDocumentWritable.class);
conf.setMapperClass(Map.class);
conf.setNumMapTasks(4);
conf.setReducerClass(Reduce.class);
conf.setNumReduceTasks(4);
conf.setInputFormat(TextInputFormat.class);
conf.setOutputFormat(TextOutputFormat.class);
// Get input and output folder from CLARGS:
FileInputFormat.setInputPaths(conf, new Path(args[1]));
FileOutputFormat.setOutputPath(conf, new Path(args[2]));
conf.setSpeculativeExecution(false);
// File solrHomeZip = new
// File("src/main/resources/jisc2/solr_home.zip");
Path zipPath = new Path("/user/admin/jisc2-xmls/solr_home.zip");
FileSystem fs = FileSystem.get(conf);
// fs.copyFromLocalFile(new Path(solrHomeZip.toString()), zipPath);
final URI baseZipUrl = fs.getUri().resolve(
zipPath.toString() + '#' + solrHomeZipName);
DistributedCache.addCacheArchive(baseZipUrl, conf);
LOG.debug("Set Solr distributed cache: {}",
Arrays.asList(DistributedCache.getCacheArchives(conf)));
LOG.debug("Set zipPath: {}", zipPath);
JobClient.runJob(conf);
}
}
|
package org.duracloud.s3storage;
import com.amazonaws.AmazonClientException;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.Headers;
import com.amazonaws.services.s3.model.AccessControlList;
import com.amazonaws.services.s3.model.Bucket;
import com.amazonaws.services.s3.model.BucketTaggingConfiguration;
import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.amazonaws.services.s3.model.CopyObjectRequest;
import com.amazonaws.services.s3.model.CopyObjectResult;
import com.amazonaws.services.s3.model.ListObjectsRequest;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.PutObjectResult;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import com.amazonaws.services.s3.model.StorageClass;
import com.amazonaws.services.s3.model.TagSet;
import org.apache.commons.lang.StringUtils;
import org.duracloud.common.stream.ChecksumInputStream;
import org.duracloud.common.util.DateUtil;
import org.duracloud.storage.domain.ContentIterator;
import org.duracloud.storage.domain.StorageAccount;
import org.duracloud.storage.error.NotFoundException;
import org.duracloud.storage.error.StorageException;
import org.duracloud.storage.provider.StorageProvider;
import org.duracloud.storage.provider.StorageProviderBase;
import org.duracloud.storage.util.StorageProviderUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.duracloud.storage.error.StorageException.NO_RETRY;
import static org.duracloud.storage.error.StorageException.RETRY;
/**
* Provides content storage backed by Amazon's Simple Storage Service.
*
* @author Bill Branan
*/
public class S3StorageProvider extends StorageProviderBase {
private final Logger log = LoggerFactory.getLogger(S3StorageProvider.class);
protected static final int MAX_ITEM_COUNT = 1000;
private static final StorageClass DEFAULT_STORAGE_CLASS =
StorageClass.Standard;
private String accessKeyId = null;
protected AmazonS3Client s3Client = null;
private StorageClass storageClass = null;
public S3StorageProvider(String accessKey, String secretKey) {
this(S3ProviderUtil.getAmazonS3Client(accessKey, secretKey),
accessKey,
null);
}
public S3StorageProvider(String accessKey,
String secretKey,
Map<String, String> options) {
this(S3ProviderUtil.getAmazonS3Client(accessKey, secretKey),
accessKey,
options);
}
public S3StorageProvider(AmazonS3Client s3Client,
String accessKey,
Map<String, String> options) {
this.accessKeyId = accessKey;
this.s3Client = s3Client;
this.storageClass = getStorageClass(options);
}
/**
* {@inheritDoc}
*/
public Iterator<String> getSpaces() {
log.debug("getSpaces()");
List<String> spaces = new ArrayList<>();
List<Bucket> buckets = listAllBuckets();
for (Bucket bucket : buckets) {
String bucketName = bucket.getName();
if (isSpace(bucketName)) {
spaces.add(getSpaceId(bucketName));
}
}
return spaces.iterator();
}
private List<Bucket> listAllBuckets() {
try {
return s3Client.listBuckets();
}
catch (AmazonClientException e) {
String err = "Could not retrieve list of S3 buckets due to error: "
+ e.getMessage();
throw new StorageException(err, e, RETRY);
}
}
/**
* {@inheritDoc}
*/
public Iterator<String> getSpaceContents(String spaceId,
String prefix) {
log.debug("getSpaceContents(" + spaceId + ", " + prefix);
throwIfSpaceNotExist(spaceId);
return new ContentIterator(this, spaceId, prefix);
}
/**
* {@inheritDoc}
*/
public List<String> getSpaceContentsChunked(String spaceId,
String prefix,
long maxResults,
String marker) {
log.debug("getSpaceContentsChunked(" + spaceId + ", " + prefix + ", " +
maxResults + ", " + marker + ")");
// Will throw if bucket does not exist
String bucketName = getBucketName(spaceId);
if(maxResults <= 0) {
maxResults = StorageProvider.DEFAULT_MAX_RESULTS;
}
return getCompleteBucketContents(bucketName, prefix, maxResults, marker);
}
private List<String> getCompleteBucketContents(String bucketName,
String prefix,
long maxResults,
String marker) {
List<String> contentItems = new ArrayList<>();
List<S3ObjectSummary> objects =
listObjects(bucketName, prefix, maxResults, marker);
for (S3ObjectSummary object : objects) {
contentItems.add(object.getKey());
}
return contentItems;
}
private List<S3ObjectSummary> listObjects(String bucketName,
String prefix,
long maxResults,
String marker) {
int numResults = new Long(maxResults).intValue();
ListObjectsRequest request =
new ListObjectsRequest(bucketName, prefix, marker, null, numResults);
try {
ObjectListing objectListing = s3Client.listObjects(request);
return objectListing.getObjectSummaries();
} catch (AmazonClientException e) {
String err = "Could not get contents of S3 bucket " + bucketName
+ " due to error: " + e.getMessage();
throw new StorageException(err, e, RETRY);
}
}
protected boolean spaceExists(String spaceId) {
try {
getBucketName(spaceId);
return true;
} catch(NotFoundException e) {
return false;
}
}
/**
* {@inheritDoc}
*/
public void createSpace(String spaceId) {
log.debug("createSpace(" + spaceId + ")");
throwIfSpaceExists(spaceId);
Bucket bucket = createBucket(spaceId);
Date created = bucket.getCreationDate();
if(created == null) {
created = new Date();
}
// Add space properties
Map<String, String> spaceProperties = new HashMap<>();
spaceProperties.put(PROPERTIES_SPACE_CREATED, formattedDate(created));
try {
setNewSpaceProperties(spaceId, spaceProperties);
} catch(StorageException e) {
removeSpace(spaceId);
String err = "Unable to create space due to: " + e.getMessage();
throw new StorageException(err, e, RETRY);
}
}
private Bucket createBucket(String spaceId) {
String bucketName = getNewBucketName(spaceId);
try {
return s3Client.createBucket(bucketName);
} catch (AmazonClientException e) {
String err = "Could not create S3 bucket with name " + bucketName
+ " due to error: " + e.getMessage();
throw new StorageException(err, e, RETRY);
}
}
protected String getNewBucketName(String spaceId) {
return S3ProviderUtil.createNewBucketName(accessKeyId, spaceId);
}
private String formattedDate(Date date) {
return DateUtil.convertToString(date.getTime());
}
/**
* {@inheritDoc}
*/
public void removeSpace(String spaceId) {
// Will throw if bucket does not exist
String bucketName = getBucketName(spaceId);
try {
s3Client.deleteBucket(bucketName);
} catch (AmazonClientException e) {
String err = "Could not delete S3 bucket with name " + bucketName
+ " due to error: " + e.getMessage();
throw new StorageException(err, e, RETRY);
}
}
/**
* {@inheritDoc}
*/
protected Map<String, String> getAllSpaceProperties(String spaceId) {
log.debug("getAllSpaceProperties(" + spaceId + ")");
// Will throw if bucket does not exist
String bucketName = getBucketName(spaceId);
// Retrieve space properties from bucket tags
Map<String, String> spaceProperties = new HashMap<>();
BucketTaggingConfiguration tagConfig =
s3Client.getBucketTaggingConfiguration(bucketName);
if(null != tagConfig) {
for(TagSet tagSet : tagConfig.getAllTagSets()) {
spaceProperties.putAll(tagSet.getAllTags());
}
}
// Handle @ symbol (change from +), to allow for email usernames in ACLs
spaceProperties = replaceInMapValues(spaceProperties, "+", "@");
// Add space count
spaceProperties.put(PROPERTIES_SPACE_COUNT,
getSpaceCount(spaceId, MAX_ITEM_COUNT));
return spaceProperties;
}
/*
* Counts the number of items in a space up to the maxCount. If maxCount
* is reached or exceeded, the returned string will indicate this with a
* trailing '+' character (e.g. 1000+).
*
* Note that anecdotal evidence shows that this method of counting
* (using size of chunked calls) is faster in most cases than enumerating
* the Iteration: StorageProviderUtil.count(getSpaceContents(spaceId, null))
*/
protected String getSpaceCount(String spaceId, int maxCount) {
List<String> spaceContentChunk = null;
long count = 0;
do {
String marker = null;
if (spaceContentChunk != null && spaceContentChunk.size() > 0) {
marker = spaceContentChunk.get(spaceContentChunk.size() - 1);
}
spaceContentChunk = getSpaceContentsChunked(spaceId,
null,
DEFAULT_MAX_RESULTS,
marker);
count += spaceContentChunk.size();
} while (spaceContentChunk.size() > 0 && count < maxCount);
String suffix = "";
if(count >= maxCount) {
suffix = "+";
}
return String.valueOf(count) + suffix;
}
private String getBucketCreationDate(String bucketName) {
Date created = null;
try {
List<Bucket> buckets = s3Client.listBuckets();
for(Bucket bucket : buckets) {
if(bucket.getName().equals(bucketName)) {
created = bucket.getCreationDate();
}
}
} catch (AmazonClientException e) {
String err = "Could not retrieve S3 bucket listing due to error: " +
e.getMessage();
throw new StorageException(err, e, RETRY);
}
String formattedDate = null;
if(created != null) {
formattedDate = formattedDate(created);
} else {
formattedDate = "unknown";
}
return formattedDate;
}
/**
* {@inheritDoc}
*/
protected void doSetSpaceProperties(String spaceId,
Map<String, String> spaceProperties) {
log.debug("setSpaceProperties(" + spaceId + ")");
// Will throw if bucket does not exist
String bucketName = getBucketName(spaceId);
Map<String, String> originalProperties;
try {
originalProperties = getAllSpaceProperties(spaceId);
} catch(NotFoundException e) {
// Likely adding a new space, so no existing properties yet.
originalProperties = new HashMap<>();
}
// Set creation date
String creationDate = originalProperties.get(PROPERTIES_SPACE_CREATED);
if(creationDate == null) {
creationDate = spaceProperties.get(PROPERTIES_SPACE_CREATED);
if(creationDate == null) {
creationDate = getBucketCreationDate(bucketName);
}
}
spaceProperties.put(PROPERTIES_SPACE_CREATED, creationDate);
// Handle @ symbol (change to +), to allow for email usernames in ACLs
spaceProperties = replaceInMapValues(spaceProperties, "@", "+");
// Store properties
BucketTaggingConfiguration tagConfig = new BucketTaggingConfiguration()
.withTagSets(new TagSet(spaceProperties));
s3Client.setBucketTaggingConfiguration(bucketName, tagConfig);
}
/*
* Performs a replaceAll of one string value for another in all the values
* of a map.
*/
private Map<String, String> replaceInMapValues(Map<String, String> map,
String oldVal,
String newVal) {
for(String key : map.keySet()) {
String value = map.get(key);
if(value.contains(oldVal)) {
value = StringUtils.replace(value, oldVal, newVal);
map.put(key, value);
}
}
return map;
}
/**
* {@inheritDoc}
*/
public String addContent(String spaceId,
String contentId,
String contentMimeType,
Map<String, String> userProperties,
long contentSize,
String contentChecksum,
InputStream content) {
log.debug("addContent("+ spaceId +", "+ contentId +", "+
contentMimeType +", "+ contentSize +", "+ contentChecksum +")");
// Will throw if bucket does not exist
String bucketName = getBucketName(spaceId);
// Wrap the content in order to be able to retrieve a checksum
ChecksumInputStream wrappedContent =
new ChecksumInputStream(content, contentChecksum);
userProperties = removeCalculatedProperties(userProperties);
if(contentMimeType == null || contentMimeType.equals("")) {
contentMimeType = DEFAULT_MIMETYPE;
}
ObjectMetadata objMetadata = new ObjectMetadata();
objMetadata.setContentType(contentMimeType);
if (contentSize > 0) {
objMetadata.setContentLength(contentSize);
}
if(userProperties != null) {
for (String key : userProperties.keySet()) {
if (log.isDebugEnabled()) {
log.debug("[" + key + "|" + userProperties.get(key) + "]");
}
objMetadata.addUserMetadata(getSpaceFree(key), userProperties.get(key));
}
}
PutObjectRequest putRequest = new PutObjectRequest(bucketName,
contentId,
wrappedContent,
objMetadata);
putRequest.setStorageClass(this.storageClass);
putRequest.setCannedAcl(CannedAccessControlList.Private);
// Add the object
String etag;
try {
PutObjectResult putResult = s3Client.putObject(putRequest);
etag = putResult.getETag();
} catch (AmazonClientException e) {
etag = doesContentExist(bucketName, contentId);
if(null == etag) {
String err = "Could not add content " + contentId +
" with type " + contentMimeType +
" and size " + contentSize +
" to S3 bucket " + bucketName + " due to error: " +
e.getMessage();
throw new StorageException(err, e, NO_RETRY);
}
}
// Compare checksum
String providerChecksum = getETagValue(etag);
String checksum = wrappedContent.getMD5();
return StorageProviderUtil.compareChecksum(providerChecksum,
spaceId,
contentId,
checksum);
}
/*
* Determines if a content item exists. If so, returns its MD5.
* If not, returns null.
*/
protected String doesContentExist(String bucketName, String contentId) {
int maxAttempts = 5;
for(int i=0; i<maxAttempts; i++) {
try {
ObjectMetadata metadata =
s3Client.getObjectMetadata(bucketName, contentId);
if(null != metadata) {
return metadata.getETag();
}
} catch(AmazonClientException e) {
wait(2);
}
}
return null;
}
protected void wait(int seconds) {
try {
Thread.sleep(1000 * seconds);
} catch(InterruptedException e) {
}
}
@Override
public String copyContent(String sourceSpaceId,
String sourceContentId,
String destSpaceId,
String destContentId) {
log.debug("copyContent({}, {}, {}, {})",
new Object[]{sourceSpaceId,
sourceContentId,
destSpaceId,
destContentId});
// Will throw if source bucket does not exist
String sourceBucketName = getBucketName(sourceSpaceId);
// Will throw if destination bucket does not exist
String destBucketName = getBucketName(destSpaceId);
throwIfContentNotExist(sourceBucketName, sourceContentId);
CopyObjectRequest request = new CopyObjectRequest(sourceBucketName,
sourceContentId,
destBucketName,
destContentId);
request.setStorageClass(this.storageClass);
request.setCannedAccessControlList(CannedAccessControlList.Private);
CopyObjectResult result = doCopyObject(request);
return StorageProviderUtil.compareChecksum(this,
sourceSpaceId,
sourceContentId,
result.getETag());
}
private CopyObjectResult doCopyObject(CopyObjectRequest request) {
try {
return s3Client.copyObject(request);
} catch (Exception e) {
StringBuilder err = new StringBuilder("Error copying from: ");
err.append(request.getSourceBucketName());
err.append(" / ");
err.append(request.getSourceKey());
err.append(", to: ");
err.append(request.getDestinationBucketName());
err.append(" / ");
err.append(request.getDestinationKey());
log.error(err.toString() + "msg: {}", e.getMessage());
throw new StorageException(err.toString(), e, RETRY);
}
}
/**
* {@inheritDoc}
*/
public InputStream getContent(String spaceId, String contentId) {
log.debug("getContent(" + spaceId + ", " + contentId + ")");
// Will throw if bucket does not exist
String bucketName = getBucketName(spaceId);
try {
S3Object contentItem = s3Client.getObject(bucketName, contentId);
return contentItem.getObjectContent();
} catch (AmazonClientException e) {
throwIfContentNotExist(bucketName, contentId);
String err = "Could not retrieve content " + contentId
+ " in S3 bucket " + bucketName + " due to error: "
+ e.getMessage();
throw new StorageException(err, e, RETRY);
}
}
/**
* {@inheritDoc}
*/
public void deleteContent(String spaceId, String contentId) {
log.debug("deleteContent(" + spaceId + ", " + contentId + ")");
// Will throw if bucket does not exist
String bucketName = getBucketName(spaceId);
// Note that the s3Client does not throw an exception or indicate if
// the object to be deleted does not exist. This check is being run
// up front to fulfill the DuraCloud contract for this method.
throwIfContentNotExist(bucketName, contentId);
try {
s3Client.deleteObject(bucketName, contentId);
} catch (AmazonClientException e) {
String err = "Could not delete content " + contentId
+ " from S3 bucket " + bucketName
+ " due to error: " + e.getMessage();
throw new StorageException(err, e, RETRY);
}
}
/**
* {@inheritDoc}
*/
public void setContentProperties(String spaceId,
String contentId,
Map<String, String> contentProperties) {
log.debug("setContentProperties(" + spaceId + ", " + contentId + ")");
// Will throw if bucket does not exist
String bucketName = getBucketName(spaceId);
contentProperties = removeCalculatedProperties(contentProperties);
// Determine mimetype, from properties list or existing value
String mimeType = contentProperties.remove(PROPERTIES_CONTENT_MIMETYPE);
if (mimeType == null || mimeType.equals("")) {
Map<String, String> existingMeta =
getContentProperties(spaceId, contentId);
String existingMime =
existingMeta.get(StorageProvider.PROPERTIES_CONTENT_MIMETYPE);
if (existingMime != null) {
mimeType = existingMime;
}
}
// Collect all object properties
ObjectMetadata objMetadata = new ObjectMetadata();
for (String key : contentProperties.keySet()) {
if (log.isDebugEnabled()) {
log.debug("[" + key + "|" + contentProperties.get(key) + "]");
}
objMetadata.addUserMetadata(getSpaceFree(key), contentProperties.get(key));
}
// Set Content-Type
if (mimeType != null && !mimeType.equals("")) {
objMetadata.setContentType(mimeType);
}
updateObjectProperties(bucketName, contentId, objMetadata);
}
@Override
protected Map<String, String> removeCalculatedProperties(Map<String, String> contentProperties) {
contentProperties = super.removeCalculatedProperties(contentProperties);
if(contentProperties != null){
contentProperties.remove(Headers.CONTENT_LENGTH);
contentProperties.remove(Headers.CONTENT_TYPE); // Content-Type is set on ObjectMetadata object
contentProperties.remove(Headers.LAST_MODIFIED);
contentProperties.remove(Headers.DATE);
contentProperties.remove(Headers.ETAG);
contentProperties.remove(Headers.CONTENT_LENGTH.toLowerCase());
contentProperties.remove(Headers.CONTENT_TYPE.toLowerCase());
contentProperties.remove(Headers.LAST_MODIFIED.toLowerCase());
contentProperties.remove(Headers.DATE.toLowerCase());
contentProperties.remove(Headers.ETAG.toLowerCase());
}
return contentProperties;
}
private void throwIfContentNotExist(String bucketName, String contentId) {
try {
s3Client.getObjectMetadata(bucketName, contentId);
} catch(AmazonClientException e) {
String err = "Could not find content item with ID " + contentId +
" in S3 bucket " + bucketName + ". S3 error: " + e.getMessage();
throw new NotFoundException(err);
}
}
private ObjectMetadata getObjectDetails(String bucketName,
String contentId,
boolean retry) {
try {
return s3Client.getObjectMetadata(bucketName, contentId);
} catch (AmazonClientException e) {
throwIfContentNotExist(bucketName, contentId);
String err = "Could not get details for content " + contentId
+ " in S3 bucket " + bucketName + " due to error: "
+ e.getMessage();
throw new StorageException(err, e, retry);
}
}
private void updateObjectProperties(String bucketName,
String contentId,
ObjectMetadata objMetadata) {
try {
AccessControlList originalACL =
s3Client.getObjectAcl(bucketName, contentId);
CopyObjectRequest copyRequest = new CopyObjectRequest(bucketName,
contentId,
bucketName,
contentId);
copyRequest.setStorageClass(this.storageClass);
copyRequest.setNewObjectMetadata(objMetadata);
s3Client.copyObject(copyRequest);
s3Client.setObjectAcl(bucketName, contentId, originalACL);
} catch (AmazonClientException e) {
throwIfContentNotExist(bucketName, contentId);
String err = "Could not update metadata for content "
+ contentId + " in S3 bucket " + bucketName
+ " due to error: " + e.getMessage();
throw new StorageException(err, e, NO_RETRY);
}
}
/**
* {@inheritDoc}
*/
public Map<String, String> getContentProperties(String spaceId,
String contentId) {
log.debug("getContentProperties(" + spaceId + ", " + contentId + ")");
// Will throw if bucket does not exist
String bucketName = getBucketName(spaceId);
// Get the content item from S3
ObjectMetadata objMetadata =
getObjectDetails(bucketName, contentId, RETRY);
if (objMetadata == null) {
String err = "No metadata is available for item " + contentId
+ " in S3 bucket " + bucketName;
throw new StorageException(err, NO_RETRY);
}
Map<String, String> contentProperties = new HashMap<>();
// Set the user properties
Map<String, String> userProperties = objMetadata.getUserMetadata();
for(String metaName : userProperties.keySet()) {
String metaValue = userProperties.get(metaName);
contentProperties.put(getWithSpace(metaName), metaValue);
}
// Set MIMETYPE
String contentType = objMetadata.getContentType();
if (contentType != null) {
contentProperties.put(PROPERTIES_CONTENT_MIMETYPE, contentType);
contentProperties.put(Headers.CONTENT_TYPE, contentType);
}
// Set SIZE
long contentLength = objMetadata.getContentLength();
if (contentLength >= 0) {
String size = String.valueOf(contentLength);
contentProperties.put(PROPERTIES_CONTENT_SIZE, size);
contentProperties.put(Headers.CONTENT_LENGTH, size);
}
// Set CHECKSUM
String checksum = objMetadata.getETag();
if (checksum != null) {
String eTagValue = getETagValue(checksum);
contentProperties.put(PROPERTIES_CONTENT_CHECKSUM, eTagValue);
contentProperties.put(PROPERTIES_CONTENT_MD5, eTagValue);
contentProperties.put(Headers.ETAG, eTagValue);
}
// Set MODIFIED
Date modified = objMetadata.getLastModified();
if (modified != null) {
String modDate = formattedDate(modified);
contentProperties.put(PROPERTIES_CONTENT_MODIFIED, modDate);
contentProperties.put(Headers.LAST_MODIFIED, modDate);
}
return contentProperties;
}
protected String getETagValue(String etag) {
String checksum = etag;
if (checksum != null) {
if (checksum.indexOf("\"") == 0 &&
checksum.lastIndexOf("\"") == checksum.length() - 1) {
// Remove wrapping quotes
checksum = checksum.substring(1, checksum.length() - 1);
}
}
return checksum;
}
/**
* Gets the name of an existing bucket based on a space ID. If no bucket
* with this spaceId exists, throws a NotFoundException
*
* @param spaceId the space Id to convert into an S3 bucket name
* @return S3 bucket name of a given DuraCloud space
* @throws NotFoundException if no bucket matches this spaceID
*/
public String getBucketName(String spaceId) {
// Determine if there is an existing bucket that matches this space ID.
// The bucket name may use any access key ID as the prefix, so there is
// no way to know the exact bucket name up front.
List<Bucket> buckets = listAllBuckets();
for(Bucket bucket : buckets){
String bucketName = bucket.getName();
if(bucketName.matches("[\\w]{20}[.]"+spaceId)) {
return bucketName;
}
}
throw new NotFoundException("No S3 bucket found matching spaceID: " +
spaceId);
}
/**
* Converts a bucket name into what could be passed in as a space ID.
*
* @param bucketName name of the S3 bucket
* @return the DuraCloud space name equivalent to a given S3 bucket Id
*/
protected String getSpaceId(String bucketName) {
String spaceId = bucketName;
if (isSpace(bucketName)) {
spaceId = spaceId.substring(accessKeyId.length() + 1);
}
return spaceId;
}
/**
* Determines if an S3 bucket is a DuraCloud space
*
* @param bucketName name of the S3 bucket
* @return true if the given S3 bucket name is named according to the
* DuraCloud space naming conventions, false otherwise
*/
protected boolean isSpace(String bucketName) {
boolean isSpace = false;
// According to AWS docs, the access key (used in DuraCloud as a
// prefix for uniqueness) is a 20 character alphanumeric sequence.
if (bucketName.matches("[\\w]{20}[.].*")) {
isSpace = true;
}
return isSpace;
}
/**
* Replaces all spaces with "%20"
*
* @param name string with possible space
* @return converted to string without spaces
*/
protected String getSpaceFree(String name) {
return name.replaceAll(" ", "%20");
}
/**
* Converts "%20" back to spaces
*
* @param name string
* @return converted to spaces
*/
protected String getWithSpace(String name) {
return name.replaceAll("%20", " ");
}
private StorageClass getStorageClass(Map<String, String> options) {
StorageClass sc = DEFAULT_STORAGE_CLASS;
String scOpt = null;
if (null != options) {
scOpt = options.get(StorageAccount.OPTS.STORAGE_CLASS.name());
if (null != scOpt) {
String storageClassLower = scOpt.toLowerCase();
if (storageClassLower.equals("reduced_redundancy") ||
storageClassLower.equals("reducedredundancy") ||
storageClassLower.equals("reduced") ||
storageClassLower.equals("rrs")) {
sc = StorageClass.ReducedRedundancy;
}
}
}
log.debug("StorageClass set to: {}, from {}", sc, scOpt);
return sc;
}
}
|
package vontus.magicbottle;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.bukkit.inventory.ItemStack;
import net.md_5.bungee.api.ChatColor;
import vontus.magicbottle.MagicBottle;
import vontus.magicbottle.Plugin;
import vontus.magicbottle.config.Config;
import vontus.magicbottle.config.Messages;
public class Commands implements CommandExecutor {
private Plugin plugin;
public Commands(Plugin plugin) {
this.plugin = plugin;
}
@Override
public boolean onCommand(CommandSender sender, Command command, String alias, String[] argument) {
if (command.getName().equalsIgnoreCase("magicbottle")) {
if (argument.length > 0) {
switch (argument[0]) {
case "about":
about(sender);
break;
case "reload":
reload(sender);
break;
case "give":
give(sender, argument);
break;
default:
sendMenu(sender);
}
} else {
sendMenu(sender);
}
}
return true;
}
private void about(CommandSender sender) {
sender.sendMessage(plugin.getDescription().getFullName() + " by Vontus");
sender.sendMessage("https:
}
private void reload(CommandSender sender) {
if (sender.hasPermission(Config.authorizationReload)) {
plugin.loadConfig();
sender.sendMessage(Messages.cmdMsgReloadCompleted);
} else {
sender.sendMessage(Messages.msgUnauthorizedToReload);
}
}
private void give(CommandSender sender, String[] args) {
if (sender.hasPermission(Config.authorizationGive)) {
Player player = null;
Integer level = 0;
Integer amount = 1;
if (sender instanceof Player) {
player = (Player) sender;
}
try {
switch (args.length) {
case 4:
Player p = plugin.getServer().getPlayer(args[3]);
if (p != null)
player = p;
case 3:
amount = Integer.parseInt(args[2]);
case 2:
level = Integer.parseInt(args[1]);
case 1:
if (level < 0 || level > Config.maxLevel) {
sender.sendMessage(Messages.cmdMsgLevelNotValid);
} else if (player == null) {
sender.sendMessage("You must specify a connected player");
} else {
giveBottle(level, amount, player);
String m = Messages.cmdMsgGivenMagicBottle;
m = m.replace("[amount]", amount.toString())
.replace("[player]", player.getName())
.replace("[level]", level.toString());
sender.sendMessage(m);
}
break;
default:
sender.sendMessage(correctUse("/magicbottle give [level] [amount] [player]"));
}
} catch (NumberFormatException e) {
sender.sendMessage(correctUse("/magicbottle give [level] [amount] [player]"));
}
} else {
sender.sendMessage(Messages.msgUnauthorizedToUseCommand);
}
}
private String correctUse(String s) {
String msg = Messages.cmdMsgCorrectUse;
return msg.replace("%", s);
}
private void sendMenu(CommandSender sender) {
sender.sendMessage(ChatColor.GOLD + "- MagicBottle Commands -");
sender.sendMessage(ChatColor.YELLOW + " /magicbottle about");
if (sender.hasPermission(Config.authorizationGive)) {
sender.sendMessage(ChatColor.YELLOW + " /magicbottle give [level] [amount] [player]");
}
if (sender.hasPermission(Config.authorizationReload)) {
sender.sendMessage(ChatColor.YELLOW + " /magicbottle reload");
}
}
private void giveBottle(int level, int amount, Player player) {
MagicBottle bottle = new MagicBottle(Exp.getExpAtLevel(level));
ItemStack item = bottle.getItem();
item.setAmount(amount);
player.getInventory().addItem(new ItemStack[] { item });
}
}
|
package com.franmontiel.fullscreendialog.sample;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import com.franmontiel.fullscreendialog.FullScreenDialogFragment;
public class MainActivity extends AppCompatActivity
implements FullScreenDialogFragment.OnConfirmListener, FullScreenDialogFragment.OnDiscardListener {
private TextView fullName;
private FullScreenDialogFragment dialogFragment;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
fullName = (TextView) findViewById(R.id.fullName);
final EditText name = (EditText) findViewById(R.id.nameField);
Button openDialog = (Button) findViewById(R.id.openDialog);
final String dialogTag = "dialog";
if (savedInstanceState != null) {
dialogFragment =
(FullScreenDialogFragment) getSupportFragmentManager().findFragmentByTag(dialogTag);
if (dialogFragment != null) {
dialogFragment.setOnConfirmListener(this);
dialogFragment.setOnDiscardListener(this);
}
}
openDialog.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
final Bundle args = new Bundle();
args.putString(SurnameFragment.EXTRA_NAME, name.getText().toString());
name.setText("");
dialogFragment = new FullScreenDialogFragment.Builder(MainActivity.this)
.setTitle(R.string.insert_surname)
.setConfirmButton(R.string.dialog_positive_button)
.setOnConfirmListener(MainActivity.this)
.setOnDiscardListener(MainActivity.this)
.setContent(SurnameFragment.class, args)
.build();
dialogFragment.show(getSupportFragmentManager(), dialogTag);
}
});
}
@Override
public void onConfirm(@Nullable Bundle result) {
fullName.setText(result.getString(SurnameFragment.RESULT_FULL_NAME));
}
@Override
public void onDiscard() {
Toast.makeText(MainActivity.this, R.string.dialog_discarded, Toast.LENGTH_SHORT).show();
}
@Override
public void onBackPressed() {
if (dialogFragment != null && dialogFragment.isAdded()) {
dialogFragment.onBackPressed();
} else {
super.onBackPressed();
}
}
}
|
package mondrian.rolap;
import mondrian.olap.*;
import java.util.List;
import java.util.ArrayList;
import java.util.HashMap;
/**
* A <code>RolapSchemaReader</code> allows you to read schema objects while
* observing the access-control profile specified by a given role.
*
* @author jhyde
* @since Feb 24, 2003
* @version $Id$
**/
abstract class RolapSchemaReader implements SchemaReader {
private Role role;
private HashMap hierarchyReaders = new HashMap();
RolapSchemaReader(Role role) {
this.role = role;
}
public Role getRole() {
return role;
}
public Member[] getHierarchyRootMembers(Hierarchy hierarchy) {
final Role.HierarchyAccess hierarchyAccess = role.getAccessDetails(hierarchy);
Level firstLevel;
if (hierarchyAccess == null) {
firstLevel = hierarchy.getLevels()[0];
} else {
firstLevel = hierarchyAccess.getTopLevel();
if (firstLevel == null) {
firstLevel = hierarchy.getLevels()[0];
}
}
return getLevelMembers(firstLevel);
}
private synchronized MemberReader getMemberReader(Hierarchy hierarchy) {
MemberReader memberReader = (MemberReader) hierarchyReaders.get(hierarchy);
if (memberReader == null) {
memberReader = ((RolapHierarchy) hierarchy).getMemberReader(role);
hierarchyReaders.put(hierarchy, memberReader);
}
return memberReader;
}
public void getMemberRange(Level level, Member startMember, Member endMember, List list) {
getMemberReader(level.getHierarchy()).getMemberRange(
(RolapLevel) level, (RolapMember) startMember, (RolapMember) endMember, list);
}
public int compareMembersHierarchically(Member m1, Member m2) {
final RolapHierarchy hierarchy = (RolapHierarchy) m1.getHierarchy();
Util.assertPrecondition(hierarchy == m2.getHierarchy());
return getMemberReader(hierarchy).compare((RolapMember) m1, (RolapMember) m2, true);
}
public Member getMemberParent(Member member) {
final Member parentMember = member.getParentMember();
if (parentMember != null) {
final Role.HierarchyAccess hierarchyAccess = role.getAccessDetails(member.getHierarchy());
if (hierarchyAccess != null &&
hierarchyAccess.getAccess(parentMember) == Access.NONE) {
return null;
}
}
return parentMember;
}
public int getMemberDepth(Member member) {
final Role.HierarchyAccess hierarchyAccess = role.getAccessDetails(member.getHierarchy());
if (hierarchyAccess != null) {
int memberDepth = member.getLevel().getDepth();
final Level topLevel = hierarchyAccess.getTopLevel();
if (topLevel != null) {
memberDepth -= topLevel.getDepth();
}
return memberDepth;
} else if (((RolapLevel) member.getLevel()).parentExp != null) {
// For members of parent-child hierarchy, members in the same level may have
// different depths.
int depth = 0;
for (Member m = member.getParentMember(); m != null; m = m.getParentMember()) {
depth++;
}
return depth;
} else {
return member.getLevel().getDepth();
}
}
public Member[] getMemberChildren(Member member) {
ArrayList children = new ArrayList();
getMemberReader(member.getHierarchy()).getMemberChildren(
(RolapMember) member, children);
return RolapUtil.toArray(children);
}
public Member[] getMemberChildren(Member[] members) {
if (members.length == 0) {
return RolapUtil.emptyMemberArray;
} else {
final MemberReader memberReader = getMemberReader(members[0].getHierarchy());
ArrayList children = new ArrayList();
for (int i = 0; i < members.length; i++) {
memberReader.getMemberChildren((RolapMember) members[i], children);
}
return RolapUtil.toArray(children);
}
}
public abstract Cube getCube();
public OlapElement getElementChild(OlapElement parent, String name) {
return parent.lookupChild(this, name);
}
public Member getMemberByUniqueName(String[] uniqueNameParts, boolean failIfNotFound) {
return Util.lookupMemberCompound(this, getCube(), uniqueNameParts, failIfNotFound);
}
public Member getLeadMember(Member member, int n) {
return getMemberReader(member.getHierarchy()).getLeadMember((RolapMember) member, n);
}
public Member[] getLevelMembers(Level level) {
final List membersInLevel = getMemberReader(level.getHierarchy()).getMembersInLevel(
(RolapLevel) level, 0, Integer.MAX_VALUE);
return RolapUtil.toArray(membersInLevel);
}
public Level[] getHierarchyLevels(Hierarchy hierarchy) {
Util.assertPrecondition(hierarchy != null, "hierarchy != null");
final Role.HierarchyAccess hierarchyAccess = role.getAccessDetails(hierarchy);
final Level[] levels = hierarchy.getLevels();
if (hierarchyAccess == null) {
return levels;
}
Level topLevel = hierarchyAccess.getTopLevel();
Level bottomLevel = hierarchyAccess.getBottomLevel();
if (topLevel == null &&
bottomLevel == null) {
return levels;
}
if (topLevel == null) {
topLevel = levels[0];
}
if (bottomLevel == null) {
bottomLevel = levels[levels.length - 1];
}
final int levelCount = bottomLevel.getDepth() - topLevel.getDepth() + 1;
Level[] restrictedLevels = new Level[levelCount];
System.arraycopy(levels, topLevel.getDepth(), restrictedLevels, 0, levelCount);
Util.assertPostcondition(restrictedLevels.length >= 1, "return.length >= 1");
return restrictedLevels;
}
public Member getHierarchyDefaultMember(Hierarchy hierarchy) {
RolapMember member = (RolapMember) hierarchy.getDefaultMember();
final Role.HierarchyAccess hierarchyAccess = role.getAccessDetails(hierarchy);
if (hierarchyAccess != null) {
final Level level = member.getLevel();
final int levelDepth = level.getDepth();
final Level topLevel = hierarchyAccess.getTopLevel();
final MemberReader unrestrictedMemberReader = ((RolapHierarchy) hierarchy).memberReader;
if (topLevel != null &&
topLevel.getDepth() > levelDepth) {
// Find the first child of the first child... until we get to
// a level we can see.
ArrayList children = new ArrayList();
do {
unrestrictedMemberReader.getMemberChildren(member, children);
Util.assertTrue(children.size() > 0);
member = (RolapMember) children.get(0);
children.clear();
} while (member.getLevel() != topLevel);
return member;
}
final Level bottomLevel = hierarchyAccess.getBottomLevel();
if (bottomLevel != null &&
bottomLevel.getDepth() < levelDepth) {
do {
member = (RolapMember) member.getParentMember();
Util.assertTrue(member != null);
} while (member.getLevel() != bottomLevel);
return member;
}
}
return member;
}
public boolean isDrillable(Member member) {
final RolapLevel level = (RolapLevel) member.getLevel();
if (level.parentExp != null) {
// This is a parent-child level, so its children, if any, come from
// the same level.
// todo: More efficient implementation
return getMemberChildren(member).length > 0;
} else {
// This is a regular level. It has children iff there is a lower
// level.
final Level childLevel = level.getChildLevel();
return childLevel != null &&
role.getAccess(childLevel) != Access.NONE;
}
}
}
// End RolapSchemaReader.java
|
package com.punchthrough.bean.sdk;
import android.test.suitebuilder.annotation.Suppress;
import android.util.Log;
import com.punchthrough.bean.sdk.internal.exception.ImageParsingException;
import com.punchthrough.bean.sdk.internal.upload.firmware.OADProfile;
import com.punchthrough.bean.sdk.internal.upload.firmware.OADState;
import com.punchthrough.bean.sdk.message.BeanError;
import com.punchthrough.bean.sdk.message.Callback;
import com.punchthrough.bean.sdk.message.UploadProgress;
import com.punchthrough.bean.sdk.upload.FirmwareBundle;
import com.punchthrough.bean.sdk.upload.FirmwareImage;
import com.punchthrough.bean.sdk.util.BeanTestCase;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
public class TestBeanFirmwareUpdate extends BeanTestCase {
private final String TAG = "TestBeanFirmwareUpdate";
private final int FW_TEST_MAX_DURATION = 5; // Minutes
private Bean bean;
private OADProfile.OADApproval oadApproval;
private static String hwVersion;
public void setUp() {
super.setUp();
try {
bean = discoverBean();
synchronousConnect(bean);
} catch (Exception e) {
e.printStackTrace();
fail("Could not connect to close Bean");
}
}
public void tearDown() {
super.tearDown();
try {
synchronousDisconnect(bean);
} catch (InterruptedException e) {
e.printStackTrace();
fail("Could not disconnect to Bean");
}
}
private String bundlePathForHardwareRevision(String hardwareRevision) throws Exception {
// Match on Bean Versions
if (hardwareRevision.startsWith("1") || hardwareRevision.startsWith("E")) {
return "firmware_bundles/asymmetrical/bean";
}
// Match on Bean+ Versions
if (hardwareRevision.startsWith("2")) {
return "firmware_bundles/asymmetrical/beanplus";
}
throw new Exception("Invalid hardware version: " + hardwareRevision);
}
public FirmwareBundle getFirmwareBundle(String hardwareRevision) throws Exception {
Log.i(TAG, "Finding firmware bundle for hardware version: " + hardwareRevision);
String bundlePath = bundlePathForHardwareRevision(hardwareRevision);
List<FirmwareImage> fwImages = new ArrayList<>();
for (String imageFileName : filesInAssetDir(getContext(), bundlePath)) {
String imagePath = FilenameUtils.concat(bundlePath, imageFileName);
try {
InputStream imageStream = getContext().getAssets().open(imagePath);
FirmwareImage image = new FirmwareImage(IOUtils.toByteArray(imageStream), imageFileName);
fwImages.add(image);
} catch (IOException | ImageParsingException e) {
throw new Exception(e.getMessage());
}
}
FirmwareBundle bundle = new FirmwareBundle(fwImages);
Log.i(TAG, "Found firmware bundle: " + bundle.version());
return bundle;
}
@Suppress
public void testFirmwareUpdate() throws Exception {
final CountDownLatch fwLatch = new CountDownLatch(1);
final CountDownLatch hwVersionLatch = new CountDownLatch(1);
bean.readHardwareVersion(new Callback<String>() {
@Override
public void onResult(String hardwareVersion) {
hwVersion = hardwareVersion;
hwVersionLatch.countDown();
}
});
hwVersionLatch.await(10, TimeUnit.SECONDS);
if (hwVersion == null) {
fail("Couldn't get HW version");
}
oadApproval = bean.programWithFirmware(getFirmwareBundle(hwVersion), new OADProfile.OADListener() {
@Override
public void complete() {
Log.i(TAG, "OAD Process Complete!");
fwLatch.countDown();
}
@Override
public void error(BeanError error) {
Log.e(TAG, "OAD Error: " + error.toString());
fail();
}
@Override
public void progress(UploadProgress uploadProgress) {
if (uploadProgress.blocksSent() % 50 == 0) {
Log.i(TAG, "OAD Progress: " + uploadProgress.completionBlocks());
}
}
@Override
public void updateRequired(boolean required) {
if (required) {
oadApproval.allow();
} else {
fwLatch.countDown();
}
}
@Override
public void stateChange(OADState state) {}
});
// Wait 5 minutes for it to complete or fail
fwLatch.await(FW_TEST_MAX_DURATION * 60, TimeUnit.SECONDS);
if (fwLatch.getCount() > 0) {
fail("Firmware Update Test took too long!");
} else {
Log.i(TAG, "Firmware Update Test completed successfully!");
}
}
}
|
package org.sejda.impl.sambox.component;
import static org.sejda.core.notification.dsl.ApplicationEventsNotifier.notifyEvent;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Set;
import org.sejda.common.ComponentsUtility;
import org.sejda.model.exception.TaskException;
import org.sejda.model.pdf.PdfVersion;
import org.sejda.model.task.NotifiableTaskMetadata;
import org.sejda.sambox.pdmodel.PDDocument;
import org.sejda.sambox.pdmodel.PDPage;
import org.sejda.sambox.pdmodel.interactive.action.PDAction;
import org.sejda.sambox.pdmodel.interactive.action.PDActionGoTo;
import org.sejda.sambox.pdmodel.interactive.annotation.PDAnnotation;
import org.sejda.sambox.pdmodel.interactive.annotation.PDAnnotationLink;
import org.sejda.sambox.pdmodel.interactive.documentnavigation.destination.PDDestination;
import org.sejda.sambox.pdmodel.interactive.documentnavigation.destination.PDPageDestination;
import org.sejda.sambox.pdmodel.interactive.documentnavigation.outline.PDDocumentOutline;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Component that retains pages from a given existing {@link PDDocument} and saves a new document containing retained pages and an outline that patches the new document.
*
* @author Andrea Vacondio
*
*/
public class PagesExtractor implements Closeable {
private static final Logger LOG = LoggerFactory.getLogger(PagesExtractor.class);
private OutlineMerger outlineMerger;
private PDDocumentOutline outline;
private PDDocument originalDocument;
private PDDocumentHandler destinationDocument;
public PagesExtractor(PDDocument origin) {
this.originalDocument = origin;
init();
}
private void init() {
this.outlineMerger = new OutlineMerger(originalDocument);
this.outline = new PDDocumentOutline();
this.destinationDocument = new PDDocumentHandler();
this.destinationDocument.initialiseBasedOn(originalDocument);
}
public void retain(Set<Integer> pages, NotifiableTaskMetadata taskMetadata) {
int currentStep = 0;
for (Integer page : pages) {
retain(page);
notifyEvent(taskMetadata).stepsCompleted(++currentStep).outOf(pages.size());
}
}
public void retain(int page) {
PDPage existingPage = originalDocument.getPage(page - 1);
destinationDocument.addPage(existingPage);
outlineMerger.addRelevantPage(existingPage);
LOG.trace("Imported page number {}", page);
processAnnotations(existingPage);
}
private void processAnnotations(PDPage imported) {
try {
List<PDAnnotation> annotations = imported.getAnnotations();
for (PDAnnotation annotation : annotations) {
if (annotation instanceof PDAnnotationLink) {
PDAnnotationLink link = (PDAnnotationLink) annotation;
PDDestination destination = link.getDestination();
if (destination == null && link.getAction() != null) {
PDAction action = link.getAction();
if (action instanceof PDActionGoTo) {
destination = ((PDActionGoTo) action).getDestination();
}
}
if (destination instanceof PDPageDestination) {
// TODO preserve links to pages within the splitted result
((PDPageDestination) destination).setPage(null);
}
}
// TODO preserve links to pages within the splitted result
annotation.setPage(null);
}
} catch (IOException e) {
LOG.warn("Failed to process annotations for page");
}
}
public void setVersion(PdfVersion version) {
destinationDocument.setVersionOnPDDocument(version);
}
public void setCompress(boolean compress) {
destinationDocument.setCompress(compress);
}
public void save(File file) throws TaskException {
outlineMerger.mergeRelevantOutlineTo(outline);
if (outline.hasChildren()) {
destinationDocument.setDocumentOutline(outline);
}
destinationDocument.saveDecryptedPDDocument(file);
}
public void close() {
ComponentsUtility.nullSafeCloseQuietly(destinationDocument);
outlineMerger = null;
}
/**
* Resets the component making it ready to start a new extractions from the original document
*/
public void reset() {
close();
init();
}
}
|
package edu.umn.bulletinboard.server.coordinator;
import java.net.MalformedURLException;
import java.rmi.Naming;
import java.rmi.NotBoundException;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import edu.umn.bulletinboard.common.constants.RMIConstants;
import edu.umn.bulletinboard.common.content.Article;
import edu.umn.bulletinboard.common.exception.IllegalIPException;
import edu.umn.bulletinboard.common.locks.ServerLock;
import edu.umn.bulletinboard.common.rmi.BulletinBoardService;
import edu.umn.bulletinboard.common.rmi.RegisterRet;
import edu.umn.bulletinboard.common.server.ServerInfo;
import edu.umn.bulletinboard.common.util.ConsistencyType;
import edu.umn.bulletinboard.common.util.LogUtil;
import edu.umn.bulletinboard.common.util.TimeUtil;
import edu.umn.bulletinboard.server.BulletinBoardServiceImpl;
import edu.umn.bulletinboard.server.Server;
import edu.umn.bulletinboard.server.ServerConfig;
import edu.umn.bulletinboard.server.exceptions.InvalidArticleException;
/**
* @author abhijeet
*
* This is a RMI service implementation for the coordinator and caters to requests from
* servers. Actual service acts as proxy and forwards the calls here.
*
* This is a bit tricky, the implementation takes care of all the inwards as well as
* outwards call to Coordinator. So, we cannot call this just coordinator service impl.
*
*/
public class Coordinator {
private static final String CLASS_NAME = Coordinator.class.getSimpleName();
//This is an assumption that number of articles cannot be more than
//Max value of integer in Java
int counter, serverIdCounter;
Map<Integer, ServerInfo> servers = new HashMap<Integer, ServerInfo>();
private static Coordinator instance = null;
private Coordinator() {}
public synchronized static Coordinator getInstance() {
if (null == instance) {
instance = new Coordinator();
}
return instance;
}
/**
* Read call from server. Should be mostly used in Quorum consistency.
*
* @param type
* @return
* @throws RemoteException
*/
public List<Article> readFromCoordinatingServer(ConsistencyType type)
throws RemoteException, MalformedURLException, NotBoundException {
final String method = CLASS_NAME + ".readFromCoordinatingServer()";
LogUtil.log(method,"Server:"+ Server.getServerId() + " "+ "Reading from coordinating server");
//for quorum consistency only
//TODO: pick up consistency level from properties file
if (type != ConsistencyType.QUORUM) {
throw new RemoteException("Not a quorum consistency");
}
//see which is the max value, get all the values from that server and return them
int latestUpdatedServerId = getLatestUpdatedServerId();
ServerInfo sInfo = null;
synchronized (ServerLock.register) {
sInfo = servers.get(latestUpdatedServerId);
}
if(sInfo == null) {
return new ArrayList<Article>();
}
BulletinBoardService client = getClient(sInfo, latestUpdatedServerId);
TimeUtil.delay();
return client.readFromServer();
}
private int getLatestUpdatedServerId() throws RemoteException, NotBoundException
, MalformedURLException {
Random random = new Random();
List<Integer> alreadyRead = new ArrayList<Integer>();
int max = 0, maxServer = 0;
for (int i = 0; i < getNR(); ++i) {
int servId = random.nextInt(serverIdCounter + 2); //exclusive
if (servId == serverIdCounter + 1) {
servId = 99;
}
ServerInfo serverInfo = null;
synchronized (ServerLock.register) {
serverInfo = servers.get(servId);
}
if (0 == servId || null == serverInfo
|| alreadyRead.contains(servId)) {
--i;
continue;
}
alreadyRead.add(servId);
BulletinBoardService client = getClient(serverInfo,servId);
TimeUtil.delay();
int latestArticleId = client.getLatestArticleId();
if (latestArticleId > max) {
max = latestArticleId;
maxServer = servId;
}
}
return maxServer;
}
private BulletinBoardService getClient(ServerInfo sInfo, int serverId) throws RemoteException
, NotBoundException, MalformedURLException {
final String method = CLASS_NAME + ".getClient()";
BulletinBoardService client = null;
if (99 == serverId) {
client = BulletinBoardServiceImpl.getInstance();
} else {
LogUtil.log(method, "Server:"+ Server.getServerId() + " "+ "Getting client id " + serverId + " address:"+sInfo.getIp()+":"
+ sInfo.getPort());
client = (BulletinBoardService) Naming.lookup("rmi:
+ sInfo.getIp() + ":" + sInfo.getPort()
+ "/" + RMIConstants.BB_SERVICE);
}
return client;
}
/**
* Choose call from server.
*
* @param id
* @param type
* @return
* @throws RemoteException
*/
public Article chooseFromCoordinatingServer(int id, ConsistencyType type)
throws RemoteException, MalformedURLException, NotBoundException {
final String method = CLASS_NAME + ".chooseFromCoordinatingServer()";
LogUtil.log(method, "Server:"+ Server.getServerId() + " "+ "Choose from coordinating server for id : " + id);
//for quorum consistency only
//TODO: pick up consistency level from properties file
if (type != ConsistencyType.QUORUM) {
throw new RemoteException("Not a quorum consistency");
}
int latestUpdatedServerId = getLatestUpdatedServerId();
//see which is the max value, get all the values from that server and return them
ServerInfo sInfo = null;
synchronized (ServerLock.register) {
sInfo = servers.get(latestUpdatedServerId);
}
BulletinBoardService client = getClient(sInfo,latestUpdatedServerId);
TimeUtil.delay();
return client.readFromServer(id);
}
private void syncAll(int id, Article article) throws RemoteException, NotBoundException
, MalformedURLException {
final String method = CLASS_NAME + ".syncAll()";
LogUtil.log(method, "Server:"+ Server.getServerId() + " "+ "Syncing ALL : " + id + ":" + article);
Set<Integer> keySet = null;
synchronized (ServerLock.register) {
keySet = servers.keySet();
}
for (int i : keySet) {
LogUtil.log(method, "Server:"+ Server.getServerId() + " "+ "Syncing to server: " + i);
ServerInfo sInfo = servers.get(i);
synchronized (ServerLock.register) {
sInfo = servers.get(i);
}
BulletinBoardService client = getClient(sInfo,i);
if (-1 == id) {
TimeUtil.delay();
client.writeToServer(article);
} else {
TimeUtil.delay();
client.replyToServer(id, article);
}
}
}
/**
* Write call from Server.
*
* @param articleText
* @param type
* @return
* @throws RemoteException
*/
public int writeToCoordinatingServer(Article articleText, ConsistencyType type)
throws RemoteException, MalformedURLException, NotBoundException, InvalidArticleException {
final String method = CLASS_NAME + ".writeToCoordinatingServer()";
LogUtil.log(method,"Server:"+ Server.getServerId() + " "+ "Writing " + articleText + " to coordinating server");
return writeReply(-1, articleText, type);
}
private int writeReply(int id, Article articleText, ConsistencyType type)
throws RemoteException, InvalidArticleException, MalformedURLException
, NotBoundException {
final String method = CLASS_NAME + ".writeReply()";
LogUtil.log(method, "Server:"+ Server.getServerId() + " "+ "Writing " + id + ":" + articleText);
if (! (type == ConsistencyType.QUORUM || type == ConsistencyType.SEQUENTIAL)) {
throw new RemoteException("Not a quorum/sequential consistency");
}
articleText.setId(++counter);
if (type == ConsistencyType.SEQUENTIAL) {
syncAll(id, articleText);
return counter;
}
//quorum consistency
//add the article
List<Integer> alreadySent = new ArrayList<Integer>();
Random random = new Random();
for (int i = 0; i < getNW(); ++i) {
int servId = random.nextInt(serverIdCounter + 2); //exclusive
if (servId == serverIdCounter + 1) {
servId = 99;
}
ServerInfo serverInfo = null;
synchronized (ServerLock.register) {
serverInfo = servers.get(servId);
}
if (0 == servId || null == serverInfo
|| alreadySent.contains(servId)) {
--i;
continue;
}
alreadySent.add(servId);
BulletinBoardService client = getClient(serverInfo, servId);
if (-1 == id) {
TimeUtil.delay();
client.writeToServer(articleText);
} else {
TimeUtil.delay();
client.replyToServer(id, articleText);
}
}
return counter;
}
/**
* Reply call from server.
*
* @param articleId
* @param article
* @param type
* @return
* @throws RemoteException
*/
public int replyToCoordinatingServer(int articleId, Article article
, ConsistencyType type) throws RemoteException, InvalidArticleException
, NotBoundException, MalformedURLException {
final String method = CLASS_NAME + ".replyToCoordinatingServer()";
LogUtil.log(method,"Server:"+ Server.getServerId() + " "+ "Replying " + article + " to article id: " + articleId + " in coordinating server");
return writeReply(articleId, article, type);
}
/**
* Generate unique Article ID and send it over to server.
*
* @return article id
* @throws RemoteException
*/
public int getNextArticleID() throws RemoteException {
// this should be as a lot of Servers will simultaneously
// call this method.
synchronized (ServerLock.getID) {
return ++counter;
}
}
/**
* Register a new server. When a server starts up, it should register with
* coordinator.
*
* @return server id
* @throws RemoteException
*/
public RegisterRet register(String ip, int port) throws RemoteException {
RegisterRet ret = null;
synchronized (ServerLock.register) {
++serverIdCounter;
try {
servers.put(serverIdCounter, new ServerInfo(ip, port));
ret = new RegisterRet(serverIdCounter, new ArrayList<ServerInfo>(
servers.values()));
} catch (IllegalIPException e) {
throw new RemoteException(e.getMessage());
}
}
return ret;
}
private int getNR() {
return ServerConfig.getNR();
}
private int getNW() {
if (-1 == ServerConfig.getNW()) {
int size = 0;
synchronized (ServerLock.register) {
size = servers.size();
}
return size;
}
return ServerConfig.getNW();
}
public Set<ServerInfo> getServers() {
HashSet<ServerInfo> hashSet = null;
synchronized (ServerLock.register) {
hashSet = new HashSet<ServerInfo>(servers.values());
}
return hashSet;
}
public Map<Integer, ServerInfo> getServerMap() {
return servers;
}
}
|
package com.epam.rft.atsy.service.impl;
import com.google.common.base.MoreObjects;
import com.epam.rft.atsy.persistence.entities.ApplicationEntity;
import com.epam.rft.atsy.persistence.entities.CandidateEntity;
import com.epam.rft.atsy.persistence.repositories.ApplicationsRepository;
import com.epam.rft.atsy.persistence.repositories.CandidateRepository;
import com.epam.rft.atsy.service.ApplicationsService;
import com.epam.rft.atsy.service.CandidateService;
import com.epam.rft.atsy.service.ConverterService;
import com.epam.rft.atsy.service.domain.CandidateDTO;
import com.epam.rft.atsy.service.exception.DuplicateCandidateException;
import com.epam.rft.atsy.service.request.CandidateFilterRequest;
import com.epam.rft.atsy.service.response.PagingResponse;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.exception.ConstraintViolationException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.Assert;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
@Service
@Slf4j
public class CandidateServiceImpl implements CandidateService {
@Autowired
private CandidateRepository candidateRepository;
@Autowired
private ApplicationsService applicationsService;
@Autowired
private ApplicationsRepository applicationsRepository;
@Autowired
private ConverterService converterService;
@Transactional(readOnly = true)
@Override
public CandidateDTO getCandidate(Long id) {
Assert.notNull(id);
CandidateEntity candidateEntity = candidateRepository.findOne(id);
return converterService.convert(candidateEntity, CandidateDTO.class);
}
@Transactional(readOnly = true)
@Override
public CandidateDTO getCandidateByApplicationID(Long applicationID) {
Assert.notNull(applicationID);
ApplicationEntity applicationEntity =
applicationsRepository.findOne(applicationID);
CandidateDTO
candidateDTO =
(applicationEntity != null ? converterService
.convert(applicationEntity.getCandidateEntity(), CandidateDTO.class) : null);
return candidateDTO;
}
@Transactional(readOnly = true)
@Override
public PagingResponse<CandidateDTO> getCandidatesByFilterRequest(
CandidateFilterRequest candidateFilterRequest) {
PageRequest pageRequest;
validateCandidateFilterRequest(candidateFilterRequest);
if (candidateFilterRequest.getSortName() != null
&& candidateFilterRequest.getSortOrder() != null) {
Sort.Direction
sortDirection =
Sort.Direction.fromString(candidateFilterRequest.getSortOrder());
pageRequest =
new PageRequest(candidateFilterRequest.getPageNumber(),
candidateFilterRequest.getPageSize(),
sortDirection, candidateFilterRequest.getSortName());
} else {
pageRequest =
new PageRequest(candidateFilterRequest.getPageNumber(),
candidateFilterRequest.getPageSize());
}
final String
name =
MoreObjects.firstNonNull(candidateFilterRequest.getCandidateName(), StringUtils.EMPTY);
final String
email =
MoreObjects.firstNonNull(candidateFilterRequest.getCandidateEmail(), StringUtils.EMPTY);
final String
phone =
MoreObjects.firstNonNull(candidateFilterRequest.getCandidatePhone(), StringUtils.EMPTY);
final String
positions =
MoreObjects.firstNonNull(candidateFilterRequest.getCandiadtePositions(), StringUtils.EMPTY);
final Page<CandidateEntity>
candidateEntitiesPage =
candidateRepository
.findByCandidateFilterRequest(name, email, phone, positions, pageRequest);
final List<CandidateDTO>
candidateDTOs =
converterService.convert(candidateEntitiesPage.getContent(), CandidateDTO.class);
final PagingResponse<CandidateDTO>
result =
new PagingResponse<>(candidateEntitiesPage.getTotalElements(), candidateDTOs);
return result;
}
@Transactional
@Override
public void deletePositionsByCandidate(CandidateDTO candidateDTO) {
candidateDTO.setPositions(null);
applicationsService.deleteApplicationsByCandidateDTO(candidateDTO);
}
@Transactional
@Override
public Long saveOrUpdate(CandidateDTO candidate) {
Assert.notNull(candidate);
CandidateEntity entity = converterService.convert(candidate, CandidateEntity.class);
try {
return candidateRepository.saveAndFlush(entity).getId();
} catch (ConstraintViolationException | DataIntegrityViolationException ex) {
log.error("Save to repository failed.", ex);
String candidateName = candidate.getName();
throw new DuplicateCandidateException(candidateName, ex);
}
}
private void validateCandidateFilterRequest(CandidateFilterRequest candidateFilterRequest) {
Assert.notNull(candidateFilterRequest);
Assert.notNull(candidateFilterRequest.getPageSize());
Assert.notNull(candidateFilterRequest.getPageNumber());
if (candidateFilterRequest.getSortName() != null
|| candidateFilterRequest.getSortOrder() != null) {
Assert.notNull(candidateFilterRequest.getSortName());
Assert.notNull(candidateFilterRequest.getSortOrder());
try {
Sort.Direction.fromString(candidateFilterRequest.getSortOrder());
} catch (Exception e) {
throw new IllegalArgumentException(
"Invalid sort order: " + candidateFilterRequest.getSortOrder(), e);
}
try {
new PageRequest(0, 10, Sort.Direction.fromString(candidateFilterRequest.getSortOrder()),
candidateFilterRequest.getSortName());
} catch (Exception e) {
throw new IllegalArgumentException(
"Invalid sort name: " + candidateFilterRequest.getSortName(), e);
}
}
}
}
|
package gov.nih.nci.evs.browser.utils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.Vector;
import org.LexGrid.LexBIG.DataModel.Collections.AssociatedConceptList;
import org.LexGrid.LexBIG.DataModel.Collections.AssociationList;
import org.LexGrid.LexBIG.DataModel.Collections.LocalNameList;
import org.LexGrid.LexBIG.DataModel.Collections.ResolvedConceptReferenceList;
import org.LexGrid.LexBIG.DataModel.Collections.SortOptionList;
import org.LexGrid.LexBIG.DataModel.Core.AssociatedConcept;
import org.LexGrid.LexBIG.DataModel.Core.Association;
import org.LexGrid.LexBIG.DataModel.Core.CodingSchemeSummary;
import org.LexGrid.LexBIG.DataModel.Core.CodingSchemeVersionOrTag;
import org.LexGrid.LexBIG.DataModel.Core.ConceptReference;
import org.LexGrid.LexBIG.DataModel.Core.NameAndValue;
import org.LexGrid.LexBIG.DataModel.Core.ResolvedConceptReference;
import org.LexGrid.LexBIG.Exceptions.LBException;
import org.LexGrid.LexBIG.Extensions.Generic.LexBIGServiceConvenienceMethods;
import org.LexGrid.LexBIG.Impl.LexBIGServiceImpl;
import org.LexGrid.LexBIG.LexBIGService.CodedNodeGraph;
import org.LexGrid.LexBIG.LexBIGService.CodedNodeSet;
import org.LexGrid.LexBIG.LexBIGService.LexBIGService;
import org.LexGrid.LexBIG.LexBIGService.CodedNodeSet.PropertyType;
import org.LexGrid.LexBIG.Utility.Constructors;
import org.LexGrid.LexBIG.Utility.ConvenienceMethods;
import org.LexGrid.commonTypes.Property;
import org.LexGrid.commonTypes.PropertyQualifier;
import org.LexGrid.commonTypes.Source;
import org.LexGrid.concepts.Presentation;
import org.apache.commons.lang.StringUtils;
import org.LexGrid.LexBIG.Utility.ConvenienceMethods;
import org.LexGrid.LexBIG.DataModel.Collections.AssociationList;
import org.LexGrid.LexBIG.DataModel.Core.AssociatedConcept;
import org.LexGrid.LexBIG.DataModel.Core.Association;
import org.LexGrid.LexBIG.DataModel.Core.NameAndValue;
import org.LexGrid.LexBIG.DataModel.Core.ResolvedConceptReference;
import org.LexGrid.LexBIG.Exceptions.LBException;
import org.LexGrid.LexBIG.Impl.LexBIGServiceImpl;
import org.LexGrid.LexBIG.LexBIGService.CodedNodeGraph;
import org.LexGrid.LexBIG.LexBIGService.CodedNodeSet;
import org.LexGrid.LexBIG.LexBIGService.LexBIGService;
import org.LexGrid.LexBIG.LexBIGService.CodedNodeSet.PropertyType;
import org.LexGrid.LexBIG.Utility.Constructors;
import org.LexGrid.LexBIG.DataModel.Collections.ResolvedConceptReferenceList;
import org.LexGrid.LexBIG.DataModel.Core.ResolvedConceptReference;
import org.LexGrid.commonTypes.EntityDescription;
import org.LexGrid.LexBIG.DataModel.Collections.NameAndValueList;
import org.LexGrid.concepts.Concept;
public class MetaTreeUtils {
static String[] hierAssocToParentNodes_ = new String[] { "PAR", "isa", "branch_of", "part_of", "tributary_of" };
static String[] hierAssocToChildNodes_ = new String[] { "CHD", "hasSubtype" };
static SortOptionList sortByCode_ = Constructors.createSortOptionList(new String[] {"code"});
LocalNameList noopList_ = Constructors.createLocalNameList("_noop_");
LexBIGServiceConvenienceMethods lbscm_ = null;
LexBIGService lbsvc_ = null;
private LexBIGService lbs;
private static String NCI_META_THESAURUS = "NCI MetaThesaurus";
private static String NCI_SOURCE = "NCI";
public MetaTreeUtils(){
init();
}
private void init(){
//lbs = LexBIGServiceImpl.defaultInstance();
lbs = RemoteServerUtil.createLexBIGService();
}
// Source Roots //
/**
* Finds the root node of a given sab.
*
* @param sab
* @throws Exception
*/
public void getRoots(String sab) throws Exception {
ResolvedConceptReference root = resolveReferenceGraphForward(getCodingSchemeRoot(sab));
AssociationList assocList = root.getSourceOf();
for(Association assoc : assocList.getAssociation()){
for(AssociatedConcept ac : assoc.getAssociatedConcepts().getAssociatedConcept()){
if(this.isSabQualifiedAssociation(ac, sab)){
displayRoot(ac);
}
}
}
}
public ResolvedConceptReferenceList getSourceRoots(String sab) throws Exception {
ResolvedConceptReferenceList rcrl = new ResolvedConceptReferenceList();
ResolvedConceptReference root = resolveReferenceGraphForward(getCodingSchemeRoot(sab));
AssociationList assocList = root.getSourceOf();
for(Association assoc : assocList.getAssociation()){
for(AssociatedConcept ac : assoc.getAssociatedConcepts().getAssociatedConcept()){
if(this.isSabQualifiedAssociation(ac, sab)){
ResolvedConceptReference r = new ResolvedConceptReference();
EntityDescription entityDescription = new EntityDescription();
entityDescription.setContent(ac.getEntityDescription().getContent());
r.setEntityDescription(entityDescription);
r.setCode(ac.getCode());
rcrl.addResolvedConceptReference(r);
}
}
}
return rcrl;
}
/**
* Displays the root node.
*
* @param ac
*/
protected void displayRoot(AssociatedConcept ac){
System.out.println(ac.getCode() + " - " + ac.getEntityDescription().getContent());
}
/**
* Gets the UMLS root node of a given SAB.
*
* @param sab
* @return
* @throws LBException
*/
private ResolvedConceptReference getCodingSchemeRoot(String sab) throws LBException {
CodedNodeSet cns = lbs.getCodingSchemeConcepts(NCI_META_THESAURUS, null);
cns.restrictToProperties(null, new PropertyType[] {PropertyType.PRESENTATION}, Constructors.createLocalNameList("SRC"), null, Constructors.createNameAndValueList("source-code", "V-"+sab));
ResolvedConceptReference[] refs = cns.resolveToList(null, null, new PropertyType[] {PropertyType.PRESENTATION}, -1).getResolvedConceptReference();
if(refs.length > 1){
throw new LBException("Found more than one Root for SAB: " + sab);
}
if(refs.length == 0){
throw new LBException("Didn't find a Root for SAB: " + sab);
}
return refs[0];
}
/**
* Resolve the relationships of a ResolvedConceptReference forward one level.
*
* @param ref
* @return
* @throws Exception
*/
private ResolvedConceptReference resolveReferenceGraphForward(ResolvedConceptReference ref) throws Exception {
CodedNodeGraph cng = lbs.getNodeGraph(NCI_META_THESAURUS, null, null);
cng.restrictToAssociations(Constructors.createNameAndValueList(new String[]{"CHD", "hasSubtype"}), null);
ResolvedConceptReference[] refs = cng.resolveAsList(ref, true, false, 1, 1, null, null, null, -1).getResolvedConceptReference();
return refs[0];
}
/**
* Determines whether or not the given reference is a root Concept for the given Coding Scheme.
*
* @param reference
* @param sourceCodingScheme
* @return
*/
private boolean isSabQualifiedAssociation(AssociatedConcept ac, String sab){
NameAndValue[] nvl = ac.getAssociationQualifiers().getNameAndValue();
for(NameAndValue nv : nvl){
if(nv.getName().equals(sab) &&
nv.getContent().equals("Source")){
return true;
}
}
return false;
}
// Tree
private static void Util_displayMessage(String s) {
System.out.println(s);
}
private static void Util_displayAndLogError(String s, Exception e) {
System.out.println(s);
}
/**
* Process the provided code, constraining relationships
* to the given source abbreviation.
* @throws LBException
*/
public void run(String cui, String sab) throws LBException {
// Resolve the coding scheme.
/*
CodingSchemeSummary css = Util.promptForCodeSystem();
if (css == null)
return;
String scheme = css.getCodingSchemeURI();
*/
String scheme = "NCI MetaThesaurus";
CodingSchemeVersionOrTag csvt = new CodingSchemeVersionOrTag();
//csvt.setVersion(css.getRepresentsVersion());
// Resolve the requested concept.
ResolvedConceptReference rcr = resolveConcept(scheme, csvt, cui);
if (rcr == null) {
Util_displayMessage("Unable to resolve a concept for CUI = '" + cui + "'");
return;
}
// Print a header for the item being processed.
Util_displayMessage("============================================================");
Util_displayMessage("Concept Information");;
Util_displayMessage("============================================================");
printHeader(rcr, sab);
// Print the hierarchies for the requested SAB.
Util_displayMessage("");
Util_displayMessage("============================================================");
Util_displayMessage("Hierarchies applicable for CUI " + cui + " for SAB " + sab);
Util_displayMessage("============================================================");
TreeItem ti = new TreeItem("<Start>", "Start of Tree", null);
long ms = System.currentTimeMillis();
int pathsResolved = 0;
int maxLevel = -1;
try {
// Identify the set of all codes on path from root
// to the focus code ...
TreeItem[] pathsFromRoot = buildPathsToRoot(rcr, scheme, csvt, sab, maxLevel);
pathsResolved = pathsFromRoot.length;
for (TreeItem rootItem : pathsFromRoot)
ti.addChild("CHD", rootItem);
} finally {
System.out.println("Run time (milliseconds): " + (System.currentTimeMillis() - ms) + " to resolve "
+ pathsResolved + " paths from root.");
}
printTree(ti, cui, 0);
// Print the neighboring CUIs/AUIs for this SAB.
Util_displayMessage("");
Util_displayMessage("============================================================");
Util_displayMessage("Neighboring CUIs and AUIs for CUI " + cui + " for SAB " + sab);;
Util_displayMessage("============================================================");
printNeighborhood(scheme, csvt, rcr, sab);
}
public HashMap getTreePathData(String scheme, String version, String sab, String code) throws LBException {
if (sab == null) sab = NCI_SOURCE;
return getTreePathData(scheme, version, sab, code, -1);
}
public HashMap getTreePathData(String scheme, String version, String sab, String code, int maxLevel) throws LBException {
if (sab == null) sab = NCI_SOURCE;
LexBIGService lbsvc = RemoteServerUtil.createLexBIGService();
LexBIGServiceConvenienceMethods lbscm = (LexBIGServiceConvenienceMethods) lbsvc
.getGenericExtension("LexBIGServiceConvenienceMethods");
lbscm.setLexBIGService(lbsvc);
CodingSchemeVersionOrTag csvt = new CodingSchemeVersionOrTag();
if (version != null) csvt.setVersion(version);
return getTreePathData(lbsvc, lbscm, scheme, csvt, sab, code, maxLevel);
}
public HashMap getTreePathData(LexBIGService lbsvc, LexBIGServiceConvenienceMethods lbscm, String scheme,
CodingSchemeVersionOrTag csvt, String sab, String focusCode) throws LBException {
if (sab == null) sab = NCI_SOURCE;
return getTreePathData(lbsvc, lbscm, scheme, csvt, sab, focusCode, -1);
}
public HashMap getTreePathData(LexBIGService lbsvc, LexBIGServiceConvenienceMethods lbscm, String scheme,
CodingSchemeVersionOrTag csvt, String sab, String cui, int maxLevel) throws LBException {
if (sab == null) sab = NCI_SOURCE;
HashMap hmap = new HashMap();
long ms = System.currentTimeMillis();
ResolvedConceptReference rcr = resolveConcept(scheme, csvt, cui);
if (rcr == null) {
Util_displayMessage("Unable to resolve a concept for CUI = '" + cui + "'");
return null;
}
// Dummy root (place holder)
TreeItem ti = new TreeItem("<Root>", "Root node", null);
int pathsResolved = 0;
try {
// Identify the set of all codes on path from root
// to the focus code ...
TreeItem[] pathsFromRoot = buildPathsToRoot(rcr, scheme, csvt, sab, maxLevel);
pathsResolved = pathsFromRoot.length;
for (TreeItem rootItem : pathsFromRoot) {
ti.addChild("CHD", rootItem);
}
} finally {
System.out.println("MetaTreeUtils Run time (milliseconds): " + (System.currentTimeMillis() - ms) + " to resolve "
+ pathsResolved + " paths from root.");
}
hmap.put(cui, ti);
return hmap;
}
/**
* Prints formatted text providing context for
* the given item including CUI, SAB, AUI, and Text.
* @throws LBException
*/
protected void printHeader(ResolvedConceptReference rcr, String sab)
throws LBException {
Util_displayMessage("CUI ....... : " + rcr.getConceptCode());
Util_displayMessage("Description : " + StringUtils.abbreviate(rcr.getEntityDescription().getContent(), 60));
Util_displayMessage("SAB ....... : " + sab);
Util_displayMessage("");
Util_displayMessage("AUIs with this CUI associated for this SAB :");
for (String line : getAtomText(rcr, sab).split("\\|"))
Util_displayMessage(" {" + line + '}');
}
/**
* Prints the given tree item, recursing through all branches.
*
* @param ti
*/
public void printTree(TreeItem ti, String focusCode, int depth) {
StringBuffer indent = new StringBuffer();
for (int i = 0; i < depth * 2; i++)
indent.append("| ");
StringBuffer codeAndText = new StringBuffer(indent)
.append(focusCode.equals(ti.code) ? ">" : " ")
.append(ti.code).append(':')
.append(StringUtils.abbreviate(ti.text, 60))
.append(ti.expandable ? " [+]" : "");
if (ti.auis != null)
for (String line : ti.auis.split("\\|"))
codeAndText.append('\n').append(indent)
.append(" {")
.append(StringUtils.abbreviate(line, 60))
.append('}');
Util_displayMessage(codeAndText.toString());
indent.append("| ");
for (String association : ti.assocToChildMap.keySet()) {
Util_displayMessage(indent.toString() + association);
List<TreeItem> children = ti.assocToChildMap.get(association);
Collections.sort(children);
for (TreeItem childItem : children)
printTree(childItem, focusCode, depth + 1);
}
}
/**
* Prints formatted text with the CUIs and AUIs of
* neighboring concepts for the requested SAB.
* @throws LBException
*/
protected void printNeighborhood(String scheme, CodingSchemeVersionOrTag csvt,
ResolvedConceptReference rcr, String sab)
throws LBException {
// Resolve neighboring concepts with associations
// qualified by the SAB.
CodedNodeGraph neighborsBySource = getLexBIGService().getNodeGraph(scheme, csvt, null);
neighborsBySource.restrictToAssociations(null, Constructors.createNameAndValueList(sab, "Source"));
ResolvedConceptReferenceList nodes = neighborsBySource.resolveAsList(
rcr, true, true, Integer.MAX_VALUE, 1,
null, new PropertyType[] { PropertyType.PRESENTATION },
sortByCode_, null, -1);
List<AssociatedConcept> neighbors = new ArrayList<AssociatedConcept>();
for (ResolvedConceptReference node : nodes.getResolvedConceptReference()) {
// Process sources and targets ...
if (node.getSourceOf() != null)
for (Association assoc : node.getSourceOf().getAssociation())
for (AssociatedConcept ac : assoc.getAssociatedConcepts().getAssociatedConcept())
if (isValidForSAB(ac, sab))
neighbors.add(ac);
if (node.getTargetOf() != null)
for (Association assoc : node.getTargetOf().getAssociation())
for (AssociatedConcept ac : assoc.getAssociatedConcepts().getAssociatedConcept())
if (isValidForSAB(ac, sab))
neighbors.add(ac);
// Add to printed output
for (ResolvedConceptReference neighbor : neighbors) {
Util_displayMessage(neighbor.getCode() + ':' +
StringUtils.abbreviate(neighbor.getEntityDescription().getContent(), 60));
for (String line : getAtomText(neighbor, sab).split("\\|"))
Util_displayMessage(" {" + StringUtils.abbreviate(line, 60) + '}');
}
}
}
/**
* Populate child nodes for a single branch of the tree, and indicates
* whether further expansion (to grandchildren) is possible.
*/
protected void addChildren(TreeItem ti, String scheme, CodingSchemeVersionOrTag csvt,
String sab, String branchRootCode, Set<String> codesToExclude,
String[] associationsToNavigate, boolean associationsNavigatedFwd) throws LBException {
LexBIGService lbsvc = getLexBIGService();
// Resolve the next branch, representing children of the given
// code, navigated according to the provided relationship and
// direction. Resolve the children as a code graph, looking 2
// levels deep but leaving the final level unresolved.
CodedNodeGraph cng = lbsvc.getNodeGraph(scheme, csvt, null);
ConceptReference focus = Constructors.createConceptReference(branchRootCode, scheme);
cng = cng.restrictToAssociations(
Constructors.createNameAndValueList(associationsToNavigate),
ConvenienceMethods.createNameAndValueList(sab, "Source"));
ResolvedConceptReferenceList branch = cng.resolveAsList(
focus, associationsNavigatedFwd, !associationsNavigatedFwd,
Integer.MAX_VALUE, 2,
null, new PropertyType[] { PropertyType.PRESENTATION },
sortByCode_, null, -1, true);
// The resolved branch will be represented by the first node in
// the resolved list. The node will be subdivided by source or
// target associations (depending on direction). The associated
// nodes define the children.
for (ResolvedConceptReference node : branch.getResolvedConceptReference()) {
AssociationList childAssociationList = associationsNavigatedFwd ? node.getSourceOf() : node.getTargetOf();
// Process each association defining children ...
for (Association child : childAssociationList.getAssociation()) {
String childNavText = getDirectionalLabel(scheme, csvt, child, associationsNavigatedFwd);
// Each association may have multiple children ...
AssociatedConceptList branchItemList = child.getAssociatedConcepts();
for (AssociatedConcept branchItemNode : branchItemList.getAssociatedConcept())
if (isValidForSAB(branchItemNode, sab)) {
String branchItemCode = branchItemNode.getCode();
// Add here if not in the list of excluded codes.
// This is also where we look to see if another level
// was indicated to be available. If so, mark the
// entry with a '+' to indicate it can be expanded.
if (!codesToExclude.contains(branchItemCode)) {
/*
TreeItem childItem =
new TreeItem(branchItemCode, branchItemNode.getEntityDescription().getContent(), getAtomText(branchItemNode, sab));
*/
TreeItem childItem =
new TreeItem(branchItemCode, branchItemNode.getEntityDescription().getContent());
childItem.expandable = false;
AssociationList grandchildBranch =
associationsNavigatedFwd ? branchItemNode.getSourceOf()
: branchItemNode.getTargetOf();
/*
if (grandchildBranch != null) {
childItem.expandable = true;
}
*/
if (grandchildBranch != null) {
for (Association grandchild : grandchildBranch.getAssociation()) {
java.lang.String association_name = grandchild.getAssociationName();
//System.out.println("association_name: " + association_name);
//String grandchildNavText = getDirectionalLabel(lbscm, scheme, csvt, child, associationsNavigatedFwd);
// Each association may have multiple children ...
AssociatedConceptList grandchildbranchItemList = grandchild.getAssociatedConcepts();
for (AssociatedConcept grandchildbranchItemNode : grandchildbranchItemList.getAssociatedConcept()) {
//System.out.println("\tgrandchildbranchItemNode AssociatedConcept: " + grandchildbranchItemNode.getConceptCode());
if (isValidForSAB(grandchildbranchItemNode, sab)) {
childItem.expandable = true;
break;
}
}
}
}
ti.addChild(childNavText, childItem);
}
}
}
}
}
/**
* Returns a resolved concept for the specified code and
* scheme.
* @throws LBException
*/
protected ResolvedConceptReference resolveConcept(String scheme,
CodingSchemeVersionOrTag csvt, String code)
throws LBException {
CodedNodeSet cns = getLexBIGService().getCodingSchemeConcepts(scheme, csvt);
cns.restrictToMatchingProperties(ConvenienceMethods.createLocalNameList("conceptCode"),
null, code, "exactMatch", null);
ResolvedConceptReferenceList cnsList = cns.resolveToList(
null, null, new PropertyType[] { PropertyType.PRESENTATION },
1);
return (cnsList.getResolvedConceptReferenceCount() == 0) ? null
: cnsList.getResolvedConceptReference(0);
}
/**
* Returns a cached instance of a LexBIG service.
*/
protected LexBIGService getLexBIGService() throws LBException {
if (lbsvc_ == null)
//lbsvc_ = LexBIGServiceImpl.defaultInstance();
lbsvc_ = RemoteServerUtil.createLexBIGService();
return lbsvc_;
}
/**
* Returns a cached instance of convenience methods.
*/
protected LexBIGServiceConvenienceMethods getConvenienceMethods() throws LBException {
if (lbscm_ == null)
lbscm_ = (LexBIGServiceConvenienceMethods)
getLexBIGService().getGenericExtension("LexBIGServiceConvenienceMethods");
lbscm_.setLexBIGService(lbsvc_);
return lbscm_;
}
/**
* Returns the label to display for the given association and directional
* indicator.
*/
protected String getDirectionalLabel(LexBIGServiceConvenienceMethods lbscm, String scheme, CodingSchemeVersionOrTag csvt,
Association assoc, boolean navigatedFwd) throws LBException {
String assocLabel = navigatedFwd ? lbscm.getAssociationForwardName(assoc.getAssociationName(), scheme, csvt)
: lbscm.getAssociationReverseName(assoc.getAssociationName(), scheme, csvt);
if (StringUtils.isBlank(assocLabel))
assocLabel = (navigatedFwd ? "" : "[Inverse]") + assoc.getAssociationName();
return assocLabel;
}
protected String getDirectionalLabel(String scheme, CodingSchemeVersionOrTag csvt,
Association assoc, boolean navigatedFwd) throws LBException {
//LexBIGServiceConvenienceMethods lbscm = getConvenienceMethods();
LexBIGService lbSvc = RemoteServerUtil.createLexBIGService();
LexBIGServiceConvenienceMethods lbscm = (LexBIGServiceConvenienceMethods) lbSvc
.getGenericExtension("LexBIGServiceConvenienceMethods");
lbscm.setLexBIGService(lbSvc);
String assocLabel = navigatedFwd ? lbscm.getAssociationForwardName(assoc.getAssociationName(), scheme, csvt)
: lbscm.getAssociationReverseName(assoc.getAssociationName(), scheme, csvt);
if (StringUtils.isBlank(assocLabel))
assocLabel = (navigatedFwd ? "" : "[Inverse]") + assoc.getAssociationName();
return assocLabel;
}
/**
* Returns a string representing the AUIs and
* text presentations applicable only for the
* given source abbreviation (SAB). All AUI
* text combinations are qualified by SAB and
* delimited by '|'.
*/
protected String getAtomText(ResolvedConceptReference rcr, String sab) {
StringBuffer text = new StringBuffer();
boolean first = true;
for (Presentation p : getSourcePresentations(rcr, sab)) {
if (!first)
text.append('|');
text.append(sab).append(':')
.append(getAtomText(p)).append(':')
.append('\'')
.append(p.getValue().getContent())
.append('\'');
first = false;
}
return
text.length() > 0 ? text.toString()
: "<No Match for SAB>";
}
/**
* Returns text for AUI qualifiers for the given property.
* This method iterates through available property qualifiers.
* Typically only one AUI is expected. If more are
* discovered, returned values are delimited by '|'.
*/
protected String getAtomText(Property prop) {
StringBuffer text = new StringBuffer();
boolean first = true;
for (PropertyQualifier pq : prop.getPropertyQualifier())
if ("AUI".equalsIgnoreCase(pq.getPropertyQualifierName())) {
if (!first)
text.append('|');
text.append(pq.getValue().getContent());
first = false;
}
return
text.length() > 0 ? text.toString()
: "<No AUI>";
}
/**
* Returns all assigned presentations matching the given
* source abbreviation (SAB). This method iterates through the
* available presentations to find any qualified to match
* the specified source.
*/
protected Presentation[] getSourcePresentations(ResolvedConceptReference rcr, String sab) {
// Ensure the associated entity was resolved, and look at each
// assigned presentation for a matching source qualifier.
List<Presentation> matches = new ArrayList<Presentation>();
if (rcr.getEntity() != null)
for (Presentation p : rcr.getEntity().getPresentation())
for (Source src : p.getSource())
if (sab.equalsIgnoreCase(src.getContent()))
matches.add(p);
return matches.toArray(new Presentation[matches.size()]);
}
/**
* Indicates whether the given associated concept contains
* a qualifier for the given source abbreviation (SAB).
* @return true if a qualifier exists; false otherwise.
*/
protected boolean isValidForSAB(AssociatedConcept ac, String sab) {
for (NameAndValue qualifier : ac.getAssociationQualifiers().getNameAndValue())
if ("Source".equalsIgnoreCase(qualifier.getContent())
&& sab.equalsIgnoreCase(qualifier.getName()))
return true;
return false;
}
public HashMap getSubconcepts(String scheme, String version, String code, String sab)
{
return getSubconcepts(scheme, version, code, sab, true);
}
public HashMap getSubconcepts(String scheme, String version, String code, String sab, boolean associationsNavigatedFwd)
{
HashMap hmap = new HashMap();
TreeItem ti = null;
long ms = System.currentTimeMillis();
Set<String> codesToExclude = Collections.EMPTY_SET;
boolean fwd = true;
String[] associationsToNavigate = fwd ? hierAssocToChildNodes_ : hierAssocToParentNodes_;
//boolean associationsNavigatedFwd = true;
CodingSchemeVersionOrTag csvt = new CodingSchemeVersionOrTag();
if (version != null) csvt.setVersion(version);
ResolvedConceptReferenceList matches = null;
//Vector v = new Vector();
try {
LexBIGService lbsvc = RemoteServerUtil.createLexBIGService();
LexBIGServiceConvenienceMethods lbscm = (LexBIGServiceConvenienceMethods) lbsvc
.getGenericExtension("LexBIGServiceConvenienceMethods");
lbscm.setLexBIGService(lbsvc);
String name = getCodeDescription(lbsvc, scheme, csvt, code);
ti = new TreeItem(code, name);
ti.expandable = false;
// Resolve the next branch, representing children of the given
// code, navigated according to the provided relationship and
// direction. Resolve the children as a code graph, looking 2
// levels deep but leaving the final level unresolved.
CodedNodeGraph cng = lbsvc.getNodeGraph(scheme, csvt, null);
ConceptReference focus = Constructors.createConceptReference(code, scheme);
cng = cng.restrictToAssociations(
Constructors.createNameAndValueList(associationsToNavigate),
ConvenienceMethods.createNameAndValueList(sab, "Source"));
ResolvedConceptReferenceList branch = cng.resolveAsList(
focus, associationsNavigatedFwd, !associationsNavigatedFwd,
Integer.MAX_VALUE, 2,
null, new PropertyType[] { PropertyType.PRESENTATION },
sortByCode_, null, -1, true);
// The resolved branch will be represented by the first node in
// the resolved list. The node will be subdivided by source or
// target associations (depending on direction). The associated
// nodes define the children.
for (ResolvedConceptReference node : branch.getResolvedConceptReference()) {
AssociationList childAssociationList = associationsNavigatedFwd ? node.getSourceOf() : node.getTargetOf();
// Process each association defining children ...
for (Association child : childAssociationList.getAssociation()) {
String childNavText = getDirectionalLabel(lbscm, scheme, csvt, child, associationsNavigatedFwd);
// Each association may have multiple children ...
AssociatedConceptList branchItemList = child.getAssociatedConcepts();
for (AssociatedConcept branchItemNode : branchItemList.getAssociatedConcept()) {
if (isValidForSAB(branchItemNode, sab)) {
String branchItemCode = branchItemNode.getCode();
// Add here if not in the list of excluded codes.
// This is also where we look to see if another level
// was indicated to be available. If so, mark the
// entry with a '+' to indicate it can be expanded.
if (!codesToExclude.contains(branchItemCode)) {
ti.expandable = true;
TreeItem childItem =
new TreeItem(branchItemCode,
branchItemNode.getEntityDescription().getContent());
AssociationList grandchildBranch =
associationsNavigatedFwd ? branchItemNode.getSourceOf()
: branchItemNode.getTargetOf();
if (grandchildBranch != null)
childItem.expandable = true;
ti.addChild(childNavText, childItem);
}
}
}
}
}
hmap.put(code, ti);
} catch (Exception ex) {
ex.printStackTrace();
}
System.out.println("Run time (milliseconds) getSubconcepts: " + (System.currentTimeMillis() - ms) + " to resolve " );
return hmap;
}
// Helper Methods
/**
* Returns the entity description for the given code.
*/
protected String getCodeDescription(LexBIGService lbsvc, String scheme, CodingSchemeVersionOrTag csvt, String code)
throws LBException {
CodedNodeSet cns = lbsvc.getCodingSchemeConcepts(scheme, csvt);
cns = cns.restrictToCodes(Constructors.createConceptReferenceList(code, scheme));
ResolvedConceptReferenceList rcrl = cns.resolveToList(null, noopList_, null, 1);
if (rcrl.getResolvedConceptReferenceCount() > 0) {
EntityDescription desc = rcrl.getResolvedConceptReference(0).getEntityDescription();
if (desc != null)
return desc.getContent();
}
return "<Not assigned>";
}
/**
* Returns the entity description for the given resolved concept reference.
*/
protected String getCodeDescription(ResolvedConceptReference ref) throws LBException {
EntityDescription desc = ref.getEntityDescription();
if (desc != null)
return desc.getContent();
return "<Not assigned>";
}
public List getTopNodes(TreeItem ti) {
List list = new ArrayList();
getTopNodes(ti, list, 0, 1);
return list;
}
public void getTopNodes(TreeItem ti, List list, int currLevel, int maxLevel) {
if (list == null) list = new ArrayList();
if (currLevel > maxLevel) return;
if (ti.assocToChildMap.keySet().size() > 0) {
if (ti.text.compareTo("Root node") != 0)
{
ResolvedConceptReference rcr = new ResolvedConceptReference();
rcr.setConceptCode(ti.code);
EntityDescription entityDescription = new EntityDescription();
entityDescription.setContent(ti.text);
rcr.setEntityDescription(entityDescription);
list.add(rcr);
}
}
for (String association : ti.assocToChildMap.keySet()) {
List<TreeItem> children = ti.assocToChildMap.get(association);
Collections.sort(children);
for (TreeItem childItem : children) {
getTopNodes(childItem, list, currLevel+1, maxLevel);
}
}
}
public static void dumpTreeItems(HashMap hmap) {
try {
Set keyset = hmap.keySet();
Object[] objs = keyset.toArray();
String code = (String) objs[0];
TreeItem ti = (TreeItem) hmap.get(code);
for (String association : ti.assocToChildMap.keySet()) {
System.out.println("\nassociation: " + association);
List<TreeItem> children = ti.assocToChildMap.get(association);
for (TreeItem childItem : children) {
System.out.println(childItem.text + "(" + childItem.code + ")");
int knt = 0;
if (childItem.expandable)
{
knt = 1;
System.out.println("\tnode.expandable");
} else {
System.out.println("\tnode.NOT expandable");
}
}
}
} catch (Exception e) {
}
}
public void run(String scheme, String version, String code) {
CodingSchemeVersionOrTag csvt = new CodingSchemeVersionOrTag();
ResolvedConceptReference rcr = null;
try {
rcr = resolveConcept(scheme, csvt, code);
} catch (Exception ex) {
}
if (rcr == null) {
Util_displayMessage("Unable to resolve a concept for CUI = '" + code + "'");
System.exit(1);
}
String name = null;
try {
name = getCodeDescription(rcr);
} catch (Exception ex) {
name = "Unknown";
}
System.out.println("Coding scheme: " + scheme);
System.out.println("code: " + code);
System.out.println("name: " + name);
String sab = "NCI";
//boolean associationsNavigatedFwd = true;
Long startTime = System.currentTimeMillis();
HashMap hmap1 = getSubconcepts(scheme, version, code, sab, true);
System.out.println("Call getSubconcepts true took: " + (System.currentTimeMillis() - startTime) + "ms");
dumpTreeItems(hmap1);
startTime = System.currentTimeMillis();
HashMap hmap2 = getSubconcepts(scheme, version, code, sab, false);
System.out.println("Call getSubconcepts false took: " + (System.currentTimeMillis() - startTime) + "ms");
dumpTreeItems(hmap2);
}
protected String getDisplayRef(ResolvedConceptReference ref){
return "[" + ref.getEntityDescription().getContent() + "(" + ref.getConceptCode() + ")]";
}
public HashMap getSubconcepts(String scheme, String version, String code, String sab, String asso_name, boolean associationsNavigatedFwd) {
HashSet hset = new HashSet();
HashMap hmap = new HashMap();
TreeItem ti = null;
Vector w = new Vector();
long ms = System.currentTimeMillis();
Set<String> codesToExclude = Collections.EMPTY_SET;
boolean fwd = true;
CodingSchemeVersionOrTag csvt = new CodingSchemeVersionOrTag();
try {
LexBIGService lbSvc = RemoteServerUtil.createLexBIGService();
LexBIGServiceConvenienceMethods lbscm = (LexBIGServiceConvenienceMethods) lbSvc
.getGenericExtension("LexBIGServiceConvenienceMethods");
lbscm.setLexBIGService(lbSvc);
String name = getCodeDescription(lbSvc, scheme, csvt, code);
ti = new TreeItem(code, name);
ti.expandable = false;
CodedNodeGraph cng = null;
ResolvedConceptReferenceList branch = null;
cng = lbSvc.getNodeGraph(scheme, null, null);
NameAndValueList nvl = null;
if (sab != null) nvl = ConvenienceMethods.createNameAndValueList(sab, "Source");
cng = cng.restrictToAssociations(Constructors.createNameAndValueList(new String[]{asso_name}), nvl);
branch = cng.resolveAsList(Constructors.createConceptReference(code, scheme),
associationsNavigatedFwd, !associationsNavigatedFwd,
Integer.MAX_VALUE, 2,
null, new PropertyType[] { PropertyType.PRESENTATION },
null, null, -1);
for (ResolvedConceptReference node : branch.getResolvedConceptReference()) {
AssociationList childAssociationList =
associationsNavigatedFwd ? node.getSourceOf()
: node.getTargetOf();
// Process each association defining children ...
for (Association child : childAssociationList.getAssociation()) {
String childNavText = getDirectionalLabel(lbscm, scheme, csvt, child, associationsNavigatedFwd);
// Each association may have multiple children ...
AssociatedConceptList branchItemList = child.getAssociatedConcepts();
for (AssociatedConcept branchItemNode : branchItemList.getAssociatedConcept()) {
//System.out.println("AssociatedConcept: " + branchItemNode.getConceptCode());
if (isValidForSAB(branchItemNode, sab)) {
String branchItemCode = branchItemNode.getCode();
// Add here if not in the list of excluded codes.
// This is also where we look to see if another level
// was indicated to be available. If so, mark the
// entry with a '+' to indicate it can be expanded.
if (!codesToExclude.contains(branchItemCode)) {
if (!hset.contains(branchItemCode)) {
hset.add(branchItemCode);
TreeItem childItem =
new TreeItem(branchItemCode, branchItemNode.getEntityDescription().getContent());
childItem.expandable = false;
AssociationList grandchildBranch =
associationsNavigatedFwd ? branchItemNode.getSourceOf()
: branchItemNode.getTargetOf();
if (grandchildBranch != null) {
for (Association grandchild : grandchildBranch.getAssociation()) {
java.lang.String association_name = grandchild.getAssociationName();
//System.out.println("association_name: " + association_name);
//String grandchildNavText = getDirectionalLabel(lbscm, scheme, csvt, child, associationsNavigatedFwd);
// Each association may have multiple children ...
AssociatedConceptList grandchildbranchItemList = grandchild.getAssociatedConcepts();
for (AssociatedConcept grandchildbranchItemNode : grandchildbranchItemList.getAssociatedConcept()) {
//System.out.println("\tgrandchildbranchItemNode AssociatedConcept: " + grandchildbranchItemNode.getConceptCode());
if (isValidForSAB(grandchildbranchItemNode, sab)) {
childItem.expandable = true;
break;
}
}
}
}
ti.addChild(childNavText, childItem);
ti.expandable = true;
}
}
}
}
}
}
hmap.put(code, ti);
} catch (Exception ex) {
ex.printStackTrace();
}
System.out.println("Run time (milliseconds) getSubconcepts: " + (System.currentTimeMillis() - ms) + " to resolve " );
return hmap;
}
protected String getAssociationSourceString(AssociatedConcept ac){
String sources = "";
NameAndValue[] nvl = ac.getAssociationQualifiers().getNameAndValue();
int knt = 0;
for (int i=0; i<nvl.length; i++) {
NameAndValue nv = nvl[i];
if (nv.getContent().compareToIgnoreCase("Source") == 0) {
knt++;
if (knt == 1) {
sources = sources + nv.getName();
} else {
sources = sources + " ;" + nv.getName();
}
}
}
return sources;
}
protected Vector getAssociationSources(AssociatedConcept ac){
Vector sources = new Vector();
NameAndValue[] nvl = ac.getAssociationQualifiers().getNameAndValue();
for (int i=0; i<nvl.length; i++) {
NameAndValue nv = nvl[i];
if (nv.getContent().compareToIgnoreCase("Source") == 0) {
sources.add(nv.getName());
}
}
return sources;
}
/**
* Build and returns tree items that represent the root
* and core concepts of resolved paths for printing.
* @throws LBException
*/
protected TreeItem[] buildPathsToRoot(ResolvedConceptReference rcr,
String scheme, CodingSchemeVersionOrTag csvt,
String sab, int maxLevel) throws LBException {
// Create a starting point for tree building.
TreeItem ti =
new TreeItem(rcr.getCode(), rcr.getEntityDescription().getContent(),
getAtomText(rcr, sab));
// Maintain root tree items.
Set<TreeItem> rootItems = new HashSet<TreeItem>();
Set<String> visited_links = new HashSet<String>();
// Natural flow of hierarchy relations moves forward
// from tree root to leaves. Build the paths to root here
// by processing upstream (child to parent) relationships.
//KLO testing
/*
buildPathsToUpperNodes(
ti, rcr, scheme, csvt, sab,
new HashMap<String, TreeItem>(),
rootItems, visited_links, maxLevel, 0);
*/
buildPathsToUpperNodes(
ti, rcr, scheme, csvt, sab,
new HashMap<String, TreeItem>(),
rootItems, visited_links, maxLevel, 0);//, hierAssocToParentNodes_, false);
// Return root items discovered during child to parent
// processing.
return rootItems.toArray(new TreeItem[rootItems.size()]);
}
protected boolean hasChildren(TreeItem tiParent, String code) {
if (tiParent == null) return false;
if (tiParent.assocToChildMap == null) return false;
for (String association : tiParent.assocToChildMap.keySet()) {
List<TreeItem> children = tiParent.assocToChildMap.get(association);
for (int i=0; i<children.size(); i++) {
TreeItem childItem = (TreeItem) children.get(i);
if (childItem.code.compareTo(code) == 0) return true;
}
}
return false;
}
/**
* Add all hierarchical paths to root that start from the
* referenced concept and move backward in the tree. If
* the natural flow of relations is thought of moving from tree
* root to leaves, this method processes nodes in the
* reverse direction (from child to parent).
* @throws LBException
*/
protected void buildPathsToUpperNodes(TreeItem ti, ResolvedConceptReference rcr,
String scheme, CodingSchemeVersionOrTag csvt,
String sab, Map<String, TreeItem> code2Tree,
Set<TreeItem> roots, Set<String> visited_links, int maxLevel, int currLevel)
throws LBException {
//if (maxLevel != -1 && currLevel >= maxLevel)
if (maxLevel != -1 && currLevel > maxLevel)
{
return;
}
// Only need to process a code once ...
if (code2Tree.containsKey(rcr.getCode()))
return;
// Cache for future reference.
code2Tree.put(rcr.getCode(), ti);
// UMLS relations can be defined with forward direction
// being parent to child or child to parent on a source
// by source basis. Iterate twice to ensure completeness;
// once navigating child to parent relations forward
// and once navigating parent to child relations
// backward. Both have the net effect of navigating
// from the bottom of the hierarchy to the top.
boolean isRoot = true;
for (int i = 0; i <= 1; i++) {
boolean fwd = i < 1;
String[] upstreamAssoc = fwd ? hierAssocToParentNodes_ : hierAssocToChildNodes_;
// Define a code graph for all relationships tagged with
// the specified sab.
CodedNodeGraph graph = getLexBIGService().getNodeGraph(scheme, csvt, null);
graph.restrictToAssociations(
ConvenienceMethods.createNameAndValueList(upstreamAssoc),
ConvenienceMethods.createNameAndValueList(sab, "Source"));
// Resolve one hop, retrieving presentations for
// comparison of source assignments.
ResolvedConceptReference[] refs = graph.resolveAsList(
rcr, fwd, !fwd, Integer.MAX_VALUE, 1,
null, new PropertyType[] { PropertyType.PRESENTATION },
sortByCode_, null, -1).getResolvedConceptReference();
// Create a new tree item for each upstream node, add the current
// tree item as a child, and recurse to go higher (if available).
if (refs.length > 0) {
// Each associated concept represents an upstream branch.
AssociationList aList = fwd ? refs[0].getSourceOf() : refs[0].getTargetOf();
for (Association assoc : aList.getAssociation()) {
// Go through the concepts one by one, adding the
// current tree item as a child of a new tree item
// representing the upstream node. If a tree item
// already exists for the parent, we reuse it to
// keep a single branch per parent.
for (AssociatedConcept refParent : assoc.getAssociatedConcepts().getAssociatedConcept())
if (isValidForSAB(refParent, sab)) {
// Fetch the term for this context ...
Presentation[] sabMatch = getSourcePresentations(refParent, sab);
if (sabMatch.length > 0) {
// We need to take into account direction of
// navigation on each pass to get the right label.
String directionalName = getDirectionalLabel(scheme, csvt, assoc, !fwd);
// Check for a previously registered item for the
// parent. If found, re-use it. Otherwise, create
// a new parent tree item.
String parentCode = refParent.getCode();
String link = rcr.getConceptCode() + "|" + parentCode;
if (!visited_links.contains(link)) {
visited_links.add(link);
TreeItem tiParent = code2Tree.get(parentCode);
if (tiParent == null) {
// Create a new tree item.
tiParent =
new TreeItem(parentCode, refParent.getEntityDescription().getContent(),
getAtomText(refParent, sab));
// Add immediate children of the parent code with an
// indication of sub-nodes (+). Codes already
// processed as part of the path are ignored since
// they are handled through recursion.
String[] downstreamAssoc = fwd ? hierAssocToChildNodes_ : hierAssocToParentNodes_;
addChildren(tiParent, scheme, csvt, sab, parentCode, code2Tree.keySet(),
downstreamAssoc, fwd);
// Try to go higher through recursion.
buildPathsToUpperNodes(tiParent, refParent,
scheme, csvt, sab, code2Tree, roots, visited_links, maxLevel, currLevel+1);
}
// Add the child (eliminate redundancy -- e.g., hasSubtype and CHD)
if (!hasChildren(tiParent, ti.code)) {
tiParent.addChild(directionalName, ti);
//KLO
tiParent.expandable = true;
}
}
isRoot = false;
}
}
}
}
}
if (maxLevel != -1 && currLevel == maxLevel) isRoot = true;
if (isRoot) {
System.out.println("================ Adding " + ti.code + " " + ti.text + " to roots.");
roots.add(ti);
}
}
protected void buildPathsToUpperNodes(TreeItem ti, ResolvedConceptReference rcr,
String scheme, CodingSchemeVersionOrTag csvt,
String sab, Map<String, TreeItem> code2Tree,
Set<TreeItem> roots, Set<String> visited_links, int maxLevel, int currLevel, String[] upstreamAssoc, boolean fwd)
throws LBException {
//if (maxLevel != -1 && currLevel >= maxLevel)
if (maxLevel != -1 && currLevel > maxLevel)
{
return;
}
// Only need to process a code once ...
if (code2Tree.containsKey(rcr.getCode()))
return;
// Cache for future reference.
code2Tree.put(rcr.getCode(), ti);
// UMLS relations can be defined with forward direction
// being parent to child or child to parent on a source
// by source basis. Iterate twice to ensure completeness;
// once navigating child to parent relations forward
// and once navigating parent to child relations
// backward. Both have the net effect of navigating
// from the bottom of the hierarchy to the top.
boolean isRoot = true;
/*
for (int i = 0; i <= 1; i++) {
boolean fwd = i < 1;
String[] upstreamAssoc = fwd ? hierAssocToParentNodes_ : hierAssocToChildNodes_;
*/
// Define a code graph for all relationships tagged with
// the specified sab.
CodedNodeGraph graph = getLexBIGService().getNodeGraph(scheme, csvt, null);
graph.restrictToAssociations(
ConvenienceMethods.createNameAndValueList(upstreamAssoc),
ConvenienceMethods.createNameAndValueList(sab, "Source"));
// Resolve one hop, retrieving presentations for
// comparison of source assignments.
ResolvedConceptReference[] refs = graph.resolveAsList(
rcr, fwd, !fwd, Integer.MAX_VALUE, 1,
null, new PropertyType[] { PropertyType.PRESENTATION },
sortByCode_, null, -1).getResolvedConceptReference();
// Create a new tree item for each upstream node, add the current
// tree item as a child, and recurse to go higher (if available).
if (refs.length > 0) {
// Each associated concept represents an upstream branch.
AssociationList aList = fwd ? refs[0].getSourceOf() : refs[0].getTargetOf();
for (Association assoc : aList.getAssociation()) {
// Go through the concepts one by one, adding the
// current tree item as a child of a new tree item
// representing the upstream node. If a tree item
// already exists for the parent, we reuse it to
// keep a single branch per parent.
for (AssociatedConcept refParent : assoc.getAssociatedConcepts().getAssociatedConcept())
if (isValidForSAB(refParent, sab)) {
// Fetch the term for this context ...
Presentation[] sabMatch = getSourcePresentations(refParent, sab);
if (sabMatch.length > 0) {
// We need to take into account direction of
// navigation on each pass to get the right label.
String directionalName = getDirectionalLabel(scheme, csvt, assoc, !fwd);
// Check for a previously registered item for the
// parent. If found, re-use it. Otherwise, create
// a new parent tree item.
String parentCode = refParent.getCode();
String link = rcr.getConceptCode() + "|" + parentCode;
if (!visited_links.contains(link)) {
visited_links.add(link);
TreeItem tiParent = code2Tree.get(parentCode);
if (tiParent == null) {
// Create a new tree item.
tiParent =
new TreeItem(parentCode, refParent.getEntityDescription().getContent(),
getAtomText(refParent, sab));
// Add immediate children of the parent code with an
// indication of sub-nodes (+). Codes already
// processed as part of the path are ignored since
// they are handled through recursion.
String[] downstreamAssoc = fwd ? hierAssocToChildNodes_ : hierAssocToParentNodes_;
addChildren(tiParent, scheme, csvt, sab, parentCode, code2Tree.keySet(),
downstreamAssoc, fwd);
// Try to go higher through recursion.
buildPathsToUpperNodes(tiParent, refParent,
scheme, csvt, sab, code2Tree, roots, visited_links, maxLevel, currLevel+1, upstreamAssoc, fwd);
}
// Add the child (eliminate redundancy -- e.g., hasSubtype and CHD)
if (!hasChildren(tiParent, ti.code)) {
tiParent.addChild(directionalName, ti);
//KLO
tiParent.expandable = true;
}
}
isRoot = false;
}
}
}
}
if (maxLevel != -1 && currLevel == maxLevel) isRoot = true;
if (isRoot) {
System.out.println("================ Adding " + ti.code + " " + ti.text + " to roots.");
roots.add(ti);
}
}
public void dumpTree(HashMap hmap, String focusCode, int level) {
try {
Set keyset = hmap.keySet();
Object[] objs = keyset.toArray();
String code = (String) objs[0];
TreeItem ti = (TreeItem) hmap.get(code);
for (String association : ti.assocToChildMap.keySet()) {
System.out.println("\nassociation: " + association);
List<TreeItem> children = ti.assocToChildMap.get(association);
for (TreeItem childItem : children) {
System.out.println(childItem.text + "(" + childItem.code + ")");
int knt = 0;
if (childItem.expandable)
{
knt = 1;
System.out.println("\tnode.expandable");
printTree(childItem, focusCode, level);
List list = getTopNodes(childItem);
for (int i=0; i<list.size(); i++) {
Object obj = list.get(i);
String nd_code = "";
String nd_name = "";
if (obj instanceof ResolvedConceptReference)
{
ResolvedConceptReference node = (ResolvedConceptReference) list.get(i);
nd_code = node.getConceptCode();
nd_name = node.getEntityDescription().getContent();
}
else if (obj instanceof Concept) {
Concept node = (Concept) list.get(i);
nd_code = node.getEntityCode();
nd_name = node.getEntityDescription().getContent();
}
System.out.println("TOP NODE: " + nd_name + " (" + nd_code + ")" );
}
} else {
System.out.println("\tnode.NOT expandable");
}
}
}
} catch (Exception e) {
}
}
public static void main(String[] args) throws Exception {
MetaTreeUtils test = new MetaTreeUtils();
String scheme = "NCI MetaThesaurus";
String version = null;
String code = "C1325880";//"C0001206";
boolean associationsNavigatedFwd = true;
String sab = "NCI";
HashMap new_map = null;
code = "C1154313";
/*
new_map = test.getSubconcepts(scheme, version, code, sab, "PAR", false);
test.dumpTreeItems(new_map);
code = "CL354459";
new_map = test.getSubconcepts(scheme, version, code, sab, "PAR", false);
test.dumpTreeItems(new_map);
code = "CL354459";
new_map = test.getSubconcepts(scheme, version, code, sab, "hasSubtype", true);
test.dumpTreeItems(new_map);
code = "C0031308";
new_map = test.getSubconcepts(scheme, version, code, sab, "PAR", false);
test.dumpTreeItems(new_map);
code = "C0031308";
new_map = test.getSubconcepts(scheme, version, code, sab, "hasSubtype", true);
test.dumpTreeItems(new_map);
code = "C0007581";
new_map = test.getSubconcepts(scheme, version, code, sab, "PAR", false);
test.dumpTreeItems(new_map);
code = "C0007581";
new_map = test.getSubconcepts(scheme, version, code, sab, "hasSubtype", true);
test.dumpTreeItems(new_map);
*/
//Cell Aging (CUI C0007581)
code = "C0007581";
new_map = test.getTreePathData(scheme, version, sab, code, -1);
//test.dumpTreeItems(new_map);
test.dumpTree(new_map, code, 5);
}
}
|
package gov.nih.nci.evs.browser.servlet;
import org.json.*;
import gov.nih.nci.evs.browser.utils.*;
import gov.nih.nci.evs.browser.common.*;
import java.io.*;
import java.util.*;
import java.net.URI;
import javax.servlet.*;
import javax.servlet.http.*;
import org.apache.log4j.*;
import gov.nih.nci.evs.browser.properties.*;
import static gov.nih.nci.evs.browser.common.Constants.*;
import org.LexGrid.LexBIG.DataModel.Core.CodingSchemeVersionOrTag;
import org.LexGrid.valueSets.ValueSetDefinition;
import org.LexGrid.LexBIG.DataModel.Collections.*;
import org.LexGrid.LexBIG.DataModel.Core.*;
import org.LexGrid.LexBIG.LexBIGService.*;
import org.LexGrid.LexBIG.Utility.*;
import org.LexGrid.codingSchemes.*;
import org.LexGrid.naming.*;
import org.LexGrid.LexBIG.Impl.Extensions.GenericExtensions.*;
import org.apache.log4j.*;
import javax.faces.event.ValueChangeEvent;
import org.LexGrid.LexBIG.caCore.interfaces.LexEVSDistributed;
import org.lexgrid.valuesets.LexEVSValueSetDefinitionServices;
import org.LexGrid.valueSets.ValueSetDefinition;
import org.LexGrid.commonTypes.Source;
import org.LexGrid.LexBIG.DataModel.Core.ResolvedConceptReference;
import org.lexgrid.valuesets.dto.ResolvedValueSetDefinition;
import org.LexGrid.LexBIG.Utility.Iterators.ResolvedConceptReferencesIterator;
import javax.servlet.ServletOutputStream;
import org.LexGrid.concepts.*;
import org.lexgrid.valuesets.dto.ResolvedValueSetCodedNodeSet;
import org.LexGrid.LexBIG.LexBIGService.CodedNodeSet.PropertyType;
import org.LexGrid.concepts.Definition;
import org.LexGrid.commonTypes.PropertyQualifier;
import org.LexGrid.commonTypes.Property;
/**
* @author EVS Team
* @version 1.0
*
* Modification history
* Initial implementation kim.ong@ngc.com
*
*/
public final class AjaxServlet extends HttpServlet {
private static Logger _logger = Logger.getLogger(AjaxServlet.class);
/**
* local constants
*/
private static final long serialVersionUID = 1L;
//private static final int STANDARD_VIEW = 1;
//private static final int TERMINOLOGY_VIEW = 2;
/**
* Validates the Init and Context parameters, configures authentication URL
*
* @throws ServletException if the init parameters are invalid or any other
* problems occur during initialisation
*/
public void init() throws ServletException {
}
/**
* Route the user to the execute method
*
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
*
* @exception IOException if an input/output error occurs
* @exception ServletException if a servlet exception occurs
*/
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
execute(request, response);
}
/**
* Route the user to the execute method
*
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
*
* @exception IOException if an input/output error occurs
* @exception ServletException if a Servlet exception occurs
*/
public void doPost(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
execute(request, response);
}
private static void debugJSONString(String msg, String jsonString) {
boolean debug = false; //DYEE_DEBUG (default: false)
if (! debug)
return;
_logger.debug(Utils.SEPARATOR);
if (msg != null && msg.length() > 0)
_logger.debug(msg);
_logger.debug("jsonString: " + jsonString);
_logger.debug("jsonString length: " + jsonString.length());
Utils.debugJSONString(jsonString);
}
public static void search_tree(HttpServletResponse response, String node_id,
String ontology_display_name, String ontology_version) {
try {
String jsonString = search_tree(node_id,
ontology_display_name, ontology_version);
if (jsonString == null)
return;
JSONObject json = new JSONObject();
JSONArray rootsArray = new JSONArray(jsonString);
json.put("root_nodes", rootsArray);
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
response.getWriter().write(json.toString());
response.getWriter().flush();
} catch (Exception e) {
e.printStackTrace();
}
}
public static String search_tree(String node_id,
String ontology_display_name, String ontology_version) throws Exception {
if (node_id == null || ontology_display_name == null)
return null;
Utils.StopWatch stopWatch = new Utils.StopWatch();
// String max_tree_level_str =
// NCItBrowserProperties.getProperty(
// NCItBrowserProperties.MAXIMUM_TREE_LEVEL);
// int maxLevel = Integer.parseInt(max_tree_level_str);
CodingSchemeVersionOrTag versionOrTag = new CodingSchemeVersionOrTag();
if (ontology_version != null) versionOrTag.setVersion(ontology_version);
String jsonString =
CacheController.getTree(
ontology_display_name, versionOrTag, node_id);
debugJSONString("Section: search_tree", jsonString);
_logger.debug("search_tree: " + stopWatch.getResult());
return jsonString;
}
/**
* Process the specified HTTP request, and create the corresponding HTTP
* response (or forward to another web component that will create it).
*
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
*
* @exception IOException if an input/output error occurs
* @exception ServletException if a servlet exception occurs
*/
public void execute(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
// Determine request by attributes
String action = request.getParameter("action");// DataConstants.ACTION);
String node_id = request.getParameter("ontology_node_id");// DataConstants.ONTOLOGY_NODE_ID);
String ontology_display_name =
request.getParameter("ontology_display_name");// DataConstants.ONTOLOGY_DISPLAY_NAME);
String ontology_version = request.getParameter("version");
if (ontology_version == null) {
ontology_version = DataUtils.getVocabularyVersionByTag(ontology_display_name, "PRODUCTION");
}
long ms = System.currentTimeMillis();
if (action.equals("expand_tree")) {
if (node_id != null && ontology_display_name != null) {
System.out.println("(*) EXPAND TREE NODE: " + node_id);
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;
try {
/*
// for HL7 (temporary fix)
ontology_display_name =
DataUtils.searchFormalName(ontology_display_name);
*/
nodesArray =
CacheController.getInstance().getSubconcepts(
ontology_display_name, ontology_version, node_id);
if (nodesArray != null) {
json.put("nodes", nodesArray);
}
} catch (Exception e) {
}
debugJSONString("Section: expand_tree", json.toString());
response.getWriter().write(json.toString());
/*
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
*/
}
}
/*
* else if (action.equals("search_tree")) {
*
*
* if (node_id != null && ontology_display_name != null) {
* response.setContentType("text/html");
* response.setHeader("Cache-Control", "no-cache"); JSONObject json =
* new JSONObject(); try { // testing // JSONArray rootsArray = //
* CacheController.getInstance().getPathsToRoots(ontology_display_name,
* // null, node_id, true);
*
* String max_tree_level_str = null; int maxLevel = -1; try {
* max_tree_level_str = NCItBrowserProperties .getInstance()
* .getProperty( NCItBrowserProperties.MAXIMUM_TREE_LEVEL); maxLevel =
* Integer.parseInt(max_tree_level_str);
*
* } catch (Exception ex) {
*
* }
*
* JSONArray rootsArray = CacheController.getInstance()
* .getPathsToRoots(ontology_display_name, null, node_id, true,
* maxLevel);
*
* if (rootsArray.length() == 0) { rootsArray =
* CacheController.getInstance() .getRootConcepts(ontology_display_name,
* null);
*
* boolean is_root = isRoot(rootsArray, node_id); if (!is_root) {
* //rootsArray = null; json.put("dummy_root_nodes", rootsArray);
* response.getWriter().write(json.toString());
* response.getWriter().flush();
*
* _logger.debug("Run time (milliseconds): " +
* (System.currentTimeMillis() - ms)); return; } }
* json.put("root_nodes", rootsArray); } catch (Exception e) {
* e.printStackTrace(); }
*
* response.getWriter().write(json.toString());
* response.getWriter().flush();
*
* _logger.debug("Run time (milliseconds): " +
* (System.currentTimeMillis() - ms)); return; } }
*/
if (action.equals("search_value_set")) {
search_value_set(request, response);
} else if (action.equals("create_src_vs_tree")) {
create_src_vs_tree(request, response);
} else if (action.equals("create_cs_vs_tree")) {
create_cs_vs_tree(request, response);
} else if (action.equals("search_hierarchy")) {
search_hierarchy(request, response, node_id, ontology_display_name, ontology_version);
} else if (action.equals("search_tree")) {
search_tree(response, node_id, ontology_display_name, ontology_version);
} else if (action.equals("build_tree")) {
if (ontology_display_name == null)
ontology_display_name = CODING_SCHEME_NAME;
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;// new JSONArray();
try {
nodesArray =
CacheController.getInstance().getRootConcepts(
ontology_display_name, ontology_version);
if (nodesArray != null) {
json.put("root_nodes", nodesArray);
}
} catch (Exception e) {
e.printStackTrace();
}
debugJSONString("Section: build_tree", json.toString());
response.getWriter().write(json.toString());
// response.getWriter().flush();
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
return;
} else if (action.equals("build_vs_tree")) {
if (ontology_display_name == null)
ontology_display_name = CODING_SCHEME_NAME;
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;// new JSONArray();
try {
//HashMap getRootValueSets(String codingSchemeURN)
String codingSchemeVersion = null;
nodesArray =
CacheController.getInstance().getRootValueSets(
ontology_display_name, codingSchemeVersion);
if (nodesArray != null) {
json.put("root_nodes", nodesArray);
}
} catch (Exception e) {
e.printStackTrace();
}
response.getWriter().write(json.toString());
//System.out.println(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
return;
} else if (action.equals("expand_vs_tree")) {
if (node_id != null && ontology_display_name != null) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;
try {
nodesArray =
CacheController.getInstance().getSubValueSets(
ontology_display_name, ontology_version, node_id);
if (nodesArray != null) {
System.out.println("expand_vs_tree nodesArray != null");
json.put("nodes", nodesArray);
} else {
System.out.println("expand_vs_tree nodesArray == null???");
}
} catch (Exception e) {
}
response.getWriter().write(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
}
} else if (action.equals("expand_entire_vs_tree")) {
if (node_id != null && ontology_display_name != null) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;
try {
nodesArray =
CacheController.getInstance().getSourceValueSetTree(
ontology_display_name, ontology_version, true);
if (nodesArray != null) {
System.out.println("expand_entire_vs_tree nodesArray != null");
json.put("root_nodes", nodesArray);
} else {
System.out.println("expand_entire_vs_tree nodesArray == null???");
}
} catch (Exception e) {
}
response.getWriter().write(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
}
} else if (action.equals("expand_entire_cs_vs_tree")) {
//if (node_id != null && ontology_display_name != null) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;
try {
nodesArray =
CacheController.getInstance().getCodingSchemeValueSetTree(
ontology_display_name, ontology_version, true);
if (nodesArray != null) {
System.out.println("expand_entire_vs_tree nodesArray != null");
json.put("root_nodes", nodesArray);
} else {
System.out.println("expand_entire_vs_tree nodesArray == null???");
}
} catch (Exception e) {
}
response.getWriter().write(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
} else if (action.equals("build_cs_vs_tree")) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;// new JSONArray();
try {
//HashMap getRootValueSets(String codingSchemeURN)
String codingSchemeVersion = null;
nodesArray =
CacheController.getInstance().getRootValueSets(true);
if (nodesArray != null) {
json.put("root_nodes", nodesArray);
}
} catch (Exception e) {
e.printStackTrace();
}
response.getWriter().write(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
return;
} else if (action.equals("expand_cs_vs_tree")) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;
String vsd_uri = ValueSetHierarchy.getValueSetURI(node_id);
node_id = ValueSetHierarchy.getCodingSchemeName(node_id);
//if (node_id != null && ontology_display_name != null) {
if (node_id != null) {
ValueSetDefinition vsd = ValueSetHierarchy.findValueSetDefinitionByURI(vsd_uri);
if (vsd == null) {
System.out.println("(****) coding scheme name: " + node_id);
try {
nodesArray = CacheController.getInstance().getRootValueSets(node_id, null);
//nodesArray = CacheController.getInstance().getRootValueSets(node_id, null); //find roots (by source)
if (nodesArray != null) {
json.put("nodes", nodesArray);
} else {
System.out.println("expand_vs_tree nodesArray == null???");
}
} catch (Exception e) {
}
} else {
try {
nodesArray =
CacheController.getInstance().getSubValueSets(
node_id, null, vsd_uri);
if (nodesArray != null) {
json.put("nodes", nodesArray);
}
} catch (Exception e) {
}
}
response.getWriter().write(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
}
} else if (action.equals("build_src_vs_tree")) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;// new JSONArray();
try {
//HashMap getRootValueSets(String codingSchemeURN)
String codingSchemeVersion = null;
nodesArray =
//CacheController.getInstance().getRootValueSets(true, true);
CacheController.getInstance().build_src_vs_tree();
if (nodesArray != null) {
json.put("root_nodes", nodesArray);
}
} catch (Exception e) {
e.printStackTrace();
}
response.getWriter().write(json.toString());
//System.out.println(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
return;
} else if (action.equals("expand_src_vs_tree")) {
if (node_id != null && ontology_display_name != null) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;
nodesArray = CacheController.getInstance().expand_src_vs_tree(node_id);
if (nodesArray == null) {
System.out.println("(*) CacheController returns nodesArray == null");
}
try {
if (nodesArray != null) {
System.out.println("expand_src_vs_tree nodesArray != null");
json.put("nodes", nodesArray);
} else {
System.out.println("expand_src_vs_tree nodesArray == null???");
}
} catch (Exception e) {
e.printStackTrace();
}
response.getWriter().write(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
}
}
}
private boolean isRoot(JSONArray rootsArray, String code) {
for (int i = 0; i < rootsArray.length(); i++) {
String node_id = null;
try {
JSONObject node = rootsArray.getJSONObject(i);
node_id = (String) node.get(CacheController.ONTOLOGY_NODE_ID);
if (node_id.compareTo(code) == 0)
return true;
} catch (Exception e) {
e.printStackTrace();
}
}
return false;
}
private static boolean _debug = false; // DYEE_DEBUG (default: false)
private static StringBuffer _debugBuffer = null;
public static void println(PrintWriter out, String text) {
if (_debug) {
_logger.debug("DBG: " + text);
_debugBuffer.append(text + "\n");
}
out.println(text);
}
public static void search_hierarchy(HttpServletRequest request, HttpServletResponse response, String node_id,
String ontology_display_name, String ontology_version) {
Enumeration parameters = request.getParameterNames();
String param = null;
while (parameters.hasMoreElements())
{
param = (String) parameters.nextElement();
String paramValue = request.getParameter(param);
}
response.setContentType("text/html");
PrintWriter out = null;
try {
out = response.getWriter();
} catch (Exception ex) {
ex.printStackTrace();
return;
}
if (_debug) {
_debugBuffer = new StringBuffer();
}
String localName = DataUtils.getLocalName(ontology_display_name);
String formalName = DataUtils.getFormalName(localName);
String term_browser_version = DataUtils.getMetadataValue(formalName, ontology_version, "term_browser_version");
String display_name = DataUtils.getMetadataValue(formalName, ontology_version, "display_name");
println(out, "");
println(out, "<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/yahoo-min.js\" ></script>");
println(out, "<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/event-min.js\" ></script>");
println(out, "<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/dom-min.js\" ></script>");
println(out, "<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/animation-min.js\" ></script>");
println(out, "<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/container-min.js\" ></script>");
println(out, "<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/connection-min.js\" ></script>");
//println(out, "<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/autocomplete-min.js\" ></script>");
println(out, "<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/treeview-min.js\" ></script>");
println(out, "");
println(out, "");
println(out, "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 Transitional//EN\">");
println(out, "<html xmlns=\"http:
println(out, " <head>");
println(out, " <title>Vocabulary Hierarchy</title>");
println(out, " <meta http-equiv=\"Content-Type\" content=\"text/html; charset=iso-8859-1\">");
println(out, " <link rel=\"stylesheet\" type=\"text/css\" href=\"/ncitbrowser/css/styleSheet.css\" />");
println(out, " <link rel=\"shortcut icon\" href=\"/ncitbrowser/favicon.ico\" type=\"image/x-icon\" />");
println(out, " <link rel=\"stylesheet\" type=\"text/css\" href=\"/ncitbrowser/css/yui/fonts.css\" />");
println(out, " <link rel=\"stylesheet\" type=\"text/css\" href=\"/ncitbrowser/css/yui/grids.css\" />");
println(out, " <link rel=\"stylesheet\" type=\"text/css\" href=\"/ncitbrowser/css/yui/code.css\" />");
println(out, " <link rel=\"stylesheet\" type=\"text/css\" href=\"/ncitbrowser/css/yui/tree.css\" />");
println(out, " <script type=\"text/javascript\" src=\"/ncitbrowser/js/script.js\"></script>");
println(out, " <script type=\"text/javascript\" src=\"/ncitbrowser/js/search.js\"></script>");
println(out, " <script type=\"text/javascript\" src=\"/ncitbrowser/js/dropdown.js\"></script>");
println(out, "");
println(out, " <script language=\"JavaScript\">");
println(out, "");
println(out, " var tree;");
println(out, " var nodeIndex;");
println(out, " var rootDescDiv;");
println(out, " var emptyRootDiv;");
println(out, " var treeStatusDiv;");
println(out, " var nodes = [];");
println(out, " var currOpener;");
println(out, "");
println(out, " function load(url,target) {");
println(out, " if (target != '')");
println(out, " target.window.location.href = url;");
println(out, " else");
println(out, " window.location.href = url;");
println(out, " }");
println(out, "");
println(out, " function init() {");
println(out, "");
println(out, " rootDescDiv = new YAHOO.widget.Module(\"rootDesc\", {visible:false} );");
println(out, " resetRootDesc();");
println(out, "");
println(out, " emptyRootDiv = new YAHOO.widget.Module(\"emptyRoot\", {visible:true} );");
println(out, " resetEmptyRoot();");
println(out, "");
println(out, " treeStatusDiv = new YAHOO.widget.Module(\"treeStatus\", {visible:true} );");
println(out, " resetTreeStatus();");
println(out, "");
println(out, " currOpener = opener;");
println(out, " initTree();");
println(out, " }");
println(out, "");
println(out, " function addTreeNode(rootNode, nodeInfo) {");
println(out, " var newNodeDetails = \"javascript:onClickTreeNode('\" + nodeInfo.ontology_node_id + \"');\";");
println(out, " var newNodeData = { label:nodeInfo.ontology_node_name, id:nodeInfo.ontology_node_id, href:newNodeDetails };");
println(out, " var newNode = new YAHOO.widget.TextNode(newNodeData, rootNode, false);");
println(out, " if (nodeInfo.ontology_node_child_count > 0) {");
println(out, " newNode.setDynamicLoad(loadNodeData);");
println(out, " }");
println(out, " }");
println(out, "");
println(out, " function buildTree(ontology_node_id, ontology_display_name) {");
println(out, " var handleBuildTreeSuccess = function(o) {");
println(out, " var respTxt = o.responseText;");
println(out, " var respObj = eval('(' + respTxt + ')');");
println(out, " if ( typeof(respObj) != \"undefined\") {");
println(out, " if ( typeof(respObj.root_nodes) != \"undefined\") {");
println(out, " var root = tree.getRoot();");
println(out, " if (respObj.root_nodes.length == 0) {");
println(out, " showEmptyRoot();");
println(out, " }");
println(out, " else {");
println(out, " for (var i=0; i < respObj.root_nodes.length; i++) {");
println(out, " var nodeInfo = respObj.root_nodes[i];");
println(out, " var expand = false;");
println(out, " addTreeNode(root, nodeInfo, expand);");
println(out, " }");
println(out, " }");
println(out, "");
println(out, " tree.draw();");
println(out, " }");
println(out, " }");
println(out, " resetTreeStatus();");
println(out, " }");
println(out, "");
println(out, " var handleBuildTreeFailure = function(o) {");
println(out, " resetTreeStatus();");
println(out, " resetEmptyRoot();");
println(out, " alert('responseFailure: ' + o.statusText);");
println(out, " }");
println(out, "");
println(out, " var buildTreeCallback =");
println(out, " {");
println(out, " success:handleBuildTreeSuccess,");
println(out, " failure:handleBuildTreeFailure");
println(out, " };");
println(out, "");
println(out, " if (ontology_display_name!='') {");
println(out, " resetEmptyRoot();");
println(out, "");
println(out, " showTreeLoadingStatus();");
println(out, " var ontology_source = null;");
println(out, " var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
println(out, " var request = YAHOO.util.Connect.asyncRequest('GET','/ncitbrowser/ajax?action=build_tree&ontology_node_id=' +ontology_node_id+'&ontology_display_name='+ontology_display_name+'&version='+ontology_version+'&ontology_source='+ontology_source,buildTreeCallback);");
println(out, " }");
println(out, " }");
println(out, "");
println(out, " function resetTree(ontology_node_id, ontology_display_name) {");
println(out, "");
println(out, " var handleResetTreeSuccess = function(o) {");
println(out, " var respTxt = o.responseText;");
println(out, " var respObj = eval('(' + respTxt + ')');");
println(out, " if ( typeof(respObj) != \"undefined\") {");
println(out, " if ( typeof(respObj.root_node) != \"undefined\") {");
println(out, " var root = tree.getRoot();");
println(out, " var nodeDetails = \"javascript:onClickTreeNode('\" + respObj.root_node.ontology_node_id + \"');\";");
println(out, " var rootNodeData = { label:respObj.root_node.ontology_node_name, id:respObj.root_node.ontology_node_id, href:nodeDetails };");
println(out, " var expand = false;");
println(out, " if (respObj.root_node.ontology_node_child_count > 0) {");
println(out, " expand = true;");
println(out, " }");
println(out, " var ontRoot = new YAHOO.widget.TextNode(rootNodeData, root, expand);");
println(out, "");
println(out, " if ( typeof(respObj.child_nodes) != \"undefined\") {");
println(out, " for (var i=0; i < respObj.child_nodes.length; i++) {");
println(out, " var nodeInfo = respObj.child_nodes[i];");
println(out, " addTreeNode(ontRoot, nodeInfo);");
println(out, " }");
println(out, " }");
println(out, " tree.draw();");
println(out, " setRootDesc(respObj.root_node.ontology_node_name, ontology_display_name);");
println(out, " }");
println(out, " }");
println(out, " resetTreeStatus();");
println(out, " }");
println(out, "");
println(out, " var handleResetTreeFailure = function(o) {");
println(out, " resetTreeStatus();");
println(out, " alert('responseFailure: ' + o.statusText);");
println(out, " }");
println(out, "");
println(out, " var resetTreeCallback =");
println(out, " {");
println(out, " success:handleResetTreeSuccess,");
println(out, " failure:handleResetTreeFailure");
println(out, " };");
println(out, " if (ontology_node_id!= '') {");
println(out, " showTreeLoadingStatus();");
println(out, " var ontology_source = null;");
println(out, " var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
println(out, " var request = YAHOO.util.Connect.asyncRequest('GET','/ncitbrowser/ajax?action=reset_tree&ontology_node_id=' +ontology_node_id+'&ontology_display_name='+ontology_display_name + '&version='+ ontology_version +'&ontology_source='+ontology_source,resetTreeCallback);");
println(out, " }");
println(out, " }");
println(out, "");
println(out, " function onClickTreeNode(ontology_node_id) {");
out.println(" if (ontology_node_id.indexOf(\"_dot_\") != -1) return;");
println(out, " var ontology_display_name = document.forms[\"pg_form\"].ontology_display_name.value;");
println(out, " var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
println(out, " load('/ncitbrowser/ConceptReport.jsp?dictionary='+ ontology_display_name + '&version='+ ontology_version + '&code=' + ontology_node_id, currOpener);");
println(out, " }");
println(out, "");
println(out, " function onClickViewEntireOntology(ontology_display_name) {");
println(out, " var ontology_display_name = document.pg_form.ontology_display_name.value;");
println(out, " tree = new YAHOO.widget.TreeView(\"treecontainer\");");
println(out, " tree.draw();");
println(out, " resetRootDesc();");
println(out, " buildTree('', ontology_display_name);");
println(out, " }");
println(out, "");
println(out, " function initTree() {");
println(out, "");
println(out, " tree = new YAHOO.widget.TreeView(\"treecontainer\");");
println(out, " var ontology_node_id = document.forms[\"pg_form\"].ontology_node_id.value;");
println(out, " var ontology_display_name = document.forms[\"pg_form\"].ontology_display_name.value;");
println(out, "");
println(out, " if (ontology_node_id == null || ontology_node_id == \"null\")");
println(out, " {");
println(out, " buildTree(ontology_node_id, ontology_display_name);");
println(out, " }");
println(out, " else");
println(out, " {");
println(out, " searchTree(ontology_node_id, ontology_display_name);");
println(out, " }");
println(out, " }");
println(out, "");
println(out, " function initRootDesc() {");
println(out, " rootDescDiv.setBody('');");
println(out, " initRootDesc.show();");
println(out, " rootDescDiv.render();");
println(out, " }");
println(out, "");
println(out, " function resetRootDesc() {");
println(out, " rootDescDiv.hide();");
println(out, " rootDescDiv.setBody('');");
println(out, " rootDescDiv.render();");
println(out, " }");
println(out, "");
println(out, " function resetEmptyRoot() {");
println(out, " emptyRootDiv.hide();");
println(out, " emptyRootDiv.setBody('');");
println(out, " emptyRootDiv.render();");
println(out, " }");
println(out, "");
println(out, " function resetTreeStatus() {");
println(out, " treeStatusDiv.hide();");
println(out, " treeStatusDiv.setBody('');");
println(out, " treeStatusDiv.render();");
println(out, " }");
println(out, "");
println(out, " function showEmptyRoot() {");
println(out, " emptyRootDiv.setBody(\"<span class='instruction_text'>No root nodes available.</span>\");");
println(out, " emptyRootDiv.show();");
println(out, " emptyRootDiv.render();");
println(out, " }");
println(out, "");
println(out, " function showNodeNotFound(node_id) {");
println(out, " //emptyRootDiv.setBody(\"<span class='instruction_text'>Concept with code \" + node_id + \" not found in the hierarchy.</span>\");");
println(out, " emptyRootDiv.setBody(\"<span class='instruction_text'>Concept not part of the parent-child hierarchy in this source; check other relationships.</span>\");");
println(out, " emptyRootDiv.show();");
println(out, " emptyRootDiv.render();");
println(out, " }");
println(out, " ");
println(out, " function showPartialHierarchy() {");
println(out, " rootDescDiv.setBody(\"<span class='instruction_text'>(Note: This tree only shows partial hierarchy.)</span>\");");
println(out, " rootDescDiv.show();");
println(out, " rootDescDiv.render();");
println(out, " }");
println(out, "");
println(out, " function showTreeLoadingStatus() {");
println(out, " treeStatusDiv.setBody(\"<img src='/ncitbrowser/images/loading.gif'/> <span class='instruction_text'>Building tree ...</span>\");");
println(out, " treeStatusDiv.show();");
println(out, " treeStatusDiv.render();");
println(out, " }");
println(out, "");
println(out, " function showTreeDrawingStatus() {");
println(out, " treeStatusDiv.setBody(\"<img src='/ncitbrowser/images/loading.gif'/> <span class='instruction_text'>Drawing tree ...</span>\");");
println(out, " treeStatusDiv.show();");
println(out, " treeStatusDiv.render();");
println(out, " }");
println(out, "");
println(out, " function showSearchingTreeStatus() {");
println(out, " treeStatusDiv.setBody(\"<img src='/ncitbrowser/images/loading.gif'/> <span class='instruction_text'>Searching tree... Please wait.</span>\");");
println(out, " treeStatusDiv.show();");
println(out, " treeStatusDiv.render();");
println(out, " }");
println(out, "");
println(out, " function showConstructingTreeStatus() {");
println(out, " treeStatusDiv.setBody(\"<img src='/ncitbrowser/images/loading.gif'/> <span class='instruction_text'>Constructing tree... Please wait.</span>\");");
println(out, " treeStatusDiv.show();");
println(out, " treeStatusDiv.render();");
println(out, " }");
println(out, "");
/*
println(out, " function loadNodeData(node, fnLoadComplete) {");
println(out, " var id = node.data.id;");
println(out, "");
println(out, " var responseSuccess = function(o)");
println(out, " {");
println(out, " var path;");
println(out, " var dirs;");
println(out, " var files;");
println(out, " var respTxt = o.responseText;");
println(out, " var respObj = eval('(' + respTxt + ')');");
println(out, " var fileNum = 0;");
println(out, " var categoryNum = 0;");
println(out, " if ( typeof(respObj.nodes) != \"undefined\") {");
println(out, " for (var i=0; i < respObj.nodes.length; i++) {");
println(out, " var name = respObj.nodes[i].ontology_node_name;");
println(out, " var nodeDetails = \"javascript:onClickTreeNode('\" + respObj.nodes[i].ontology_node_id + \"');\";");
println(out, " var newNodeData = { label:name, id:respObj.nodes[i].ontology_node_id, href:nodeDetails };");
println(out, " var newNode = new YAHOO.widget.TextNode(newNodeData, node, false);");
println(out, " if (respObj.nodes[i].ontology_node_child_count > 0) {");
println(out, " newNode.setDynamicLoad(loadNodeData);");
println(out, " }");
println(out, " }");
println(out, " }");
println(out, " tree.draw();");
println(out, " fnLoadComplete();");
println(out, " }");
*/
out.println(" function loadNodeData(node, fnLoadComplete) {");
out.println(" var id = node.data.id;");
out.println("");
out.println(" var responseSuccess = function(o)");
out.println(" {");
out.println(" var path;");
out.println(" var dirs;");
out.println(" var files;");
out.println(" var respTxt = o.responseText;");
out.println(" var respObj = eval('(' + respTxt + ')');");
out.println(" var fileNum = 0;");
out.println(" var categoryNum = 0;");
out.println(" var pos = id.indexOf(\"_dot_\");");
out.println(" if ( typeof(respObj.nodes) != \"undefined\") {");
out.println(" if (pos == -1) {");
out.println(" for (var i=0; i < respObj.nodes.length; i++) {");
out.println(" var name = respObj.nodes[i].ontology_node_name;");
out.println(" var nodeDetails = \"javascript:onClickTreeNode('\" + respObj.nodes[i].ontology_node_id + \"');\";");
out.println(" var newNodeData = { label:name, id:respObj.nodes[i].ontology_node_id, href:nodeDetails };");
out.println(" var newNode = new YAHOO.widget.TextNode(newNodeData, node, false);");
out.println(" if (respObj.nodes[i].ontology_node_child_count > 0) {");
out.println(" newNode.setDynamicLoad(loadNodeData);");
out.println(" }");
out.println(" }");
out.println("");
out.println(" } else {");
out.println("");
out.println(" var parent = node.parent;");
out.println(" for (var i=0; i < respObj.nodes.length; i++) {");
out.println(" var name = respObj.nodes[i].ontology_node_name;");
out.println(" var nodeDetails = \"javascript:onClickTreeNode('\" + respObj.nodes[i].ontology_node_id + \"');\";");
out.println(" var newNodeData = { label:name, id:respObj.nodes[i].ontology_node_id, href:nodeDetails };");
out.println("");
out.println(" var newNode = new YAHOO.widget.TextNode(newNodeData, parent, true);");
out.println(" if (respObj.nodes[i].ontology_node_child_count > 0) {");
out.println(" newNode.setDynamicLoad(loadNodeData);");
out.println(" }");
out.println(" }");
out.println(" tree.removeNode(node,true);");
out.println(" }");
out.println(" }");
out.println(" fnLoadComplete();");
out.println(" }");
println(out, "");
println(out, " var responseFailure = function(o){");
println(out, " alert('responseFailure: ' + o.statusText);");
println(out, " }");
println(out, "");
println(out, " var callback =");
println(out, " {");
println(out, " success:responseSuccess,");
println(out, " failure:responseFailure");
println(out, " };");
println(out, "");
println(out, " var ontology_display_name = document.forms[\"pg_form\"].ontology_display_name.value;");
println(out, " var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
//println(out, " var ontology_display_name = " + "\"" + ontology_display_name + "\";");
//println(out, " var ontology_version = " + "\"" + ontology_version + "\";");
println(out, " var cObj = YAHOO.util.Connect.asyncRequest('GET','/ncitbrowser/ajax?action=expand_tree&ontology_node_id=' +id+'&ontology_display_name='+ontology_display_name+'&version='+ontology_version,callback);");
println(out, " }");
println(out, "");
println(out, " function setRootDesc(rootNodeName, ontology_display_name) {");
println(out, " var newDesc = \"<span class='instruction_text'>Root set to <b>\" + rootNodeName + \"</b></span>\";");
println(out, " rootDescDiv.setBody(newDesc);");
println(out, " var footer = \"<a onClick='javascript:onClickViewEntireOntology();' href='#' class='link_text'>view full ontology}</a>\";");
println(out, " rootDescDiv.setFooter(footer);");
println(out, " rootDescDiv.show();");
println(out, " rootDescDiv.render();");
println(out, " }");
println(out, "");
println(out, "");
println(out, " function searchTree(ontology_node_id, ontology_display_name) {");
println(out, "");
println(out, " var root = tree.getRoot();");
//new ViewInHierarchyUtil().printTree(out, ontology_display_name, ontology_version, node_id);
new ViewInHierarchyUtils().printTree(out, ontology_display_name, ontology_version, node_id);
println(out, " showPartialHierarchy();");
println(out, " tree.draw();");
println(out, " }");
println(out, "");
println(out, "");
println(out, " function addTreeBranch(ontology_node_id, rootNode, nodeInfo) {");
println(out, " var newNodeDetails = \"javascript:onClickTreeNode('\" + nodeInfo.ontology_node_id + \"');\";");
println(out, " var newNodeData = { label:nodeInfo.ontology_node_name, id:nodeInfo.ontology_node_id, href:newNodeDetails };");
println(out, "");
println(out, " var expand = false;");
println(out, " var childNodes = nodeInfo.children_nodes;");
println(out, "");
println(out, " if (childNodes.length > 0) {");
println(out, " expand = true;");
println(out, " }");
println(out, " var newNode = new YAHOO.widget.TextNode(newNodeData, rootNode, expand);");
println(out, " if (nodeInfo.ontology_node_id == ontology_node_id) {");
println(out, " newNode.labelStyle = \"ygtvlabel_highlight\";");
println(out, " }");
println(out, "");
println(out, " if (nodeInfo.ontology_node_id == ontology_node_id) {");
println(out, " newNode.isLeaf = true;");
println(out, " if (nodeInfo.ontology_node_child_count > 0) {");
println(out, " newNode.isLeaf = false;");
println(out, " newNode.setDynamicLoad(loadNodeData);");
println(out, " } else {");
println(out, " tree.draw();");
println(out, " }");
println(out, "");
println(out, " } else {");
println(out, " if (nodeInfo.ontology_node_id != ontology_node_id) {");
println(out, " if (nodeInfo.ontology_node_child_count == 0 && nodeInfo.ontology_node_id != ontology_node_id) {");
println(out, " newNode.isLeaf = true;");
println(out, " } else if (childNodes.length == 0) {");
println(out, " newNode.setDynamicLoad(loadNodeData);");
println(out, " }");
println(out, " }");
println(out, " }");
println(out, "");
println(out, " tree.draw();");
println(out, " for (var i=0; i < childNodes.length; i++) {");
println(out, " var childnodeInfo = childNodes[i];");
println(out, " addTreeBranch(ontology_node_id, newNode, childnodeInfo);");
println(out, " }");
println(out, " }");
println(out, " YAHOO.util.Event.addListener(window, \"load\", init);");
println(out, "");
println(out, " </script>");
println(out, "</head>");
println(out, "<body>");
println(out, " ");
println(out, " <!-- Begin Skip Top Navigation -->");
println(out, " <a href=\"#evs-content\" class=\"hideLink\" accesskey=\"1\" title=\"Skip repetitive navigation links\">skip navigation links</A>");
println(out, " <!-- End Skip Top Navigation --> ");
println(out, " <div id=\"popupContainer\">");
println(out, " <!-- nci popup banner -->");
println(out, " <div class=\"ncipopupbanner\">");
println(out, " <a href=\"http:
println(out, " <a href=\"http:
println(out, " </div>");
println(out, " <!-- end nci popup banner -->");
println(out, " <div id=\"popupMainArea\">");
println(out, " <a name=\"evs-content\" id=\"evs-content\"></a>");
println(out, " <table class=\"evsLogoBg\" cellspacing=\"0\" cellpadding=\"0\" border=\"0\">");
println(out, " <tr>");
println(out, " <td valign=\"top\">");
println(out, " <a href=\"http://evs.nci.nih.gov/\" target=\"_blank\" alt=\"Enterprise Vocabulary Services\">");
println(out, " <img src=\"/ncitbrowser/images/evs-popup-logo.gif\" width=\"213\" height=\"26\" alt=\"EVS: Enterprise Vocabulary Services\" title=\"EVS: Enterprise Vocabulary Services\" border=\"0\" />");
println(out, " </a>");
println(out, " </td>");
println(out, " <td valign=\"top\"><div id=\"closeWindow\"><a href=\"javascript:window.close();\"><img src=\"/ncitbrowser/images/thesaurus_close_icon.gif\" width=\"10\" height=\"10\" border=\"0\" alt=\"Close Window\" /> CLOSE WINDOW</a></div></td>");
println(out, " </tr>");
println(out, " </table>");
println(out, "");
println(out, "");
String release_date = DataUtils.getVersionReleaseDate(ontology_display_name, ontology_version);
if (ontology_display_name.compareTo("NCI Thesaurus") == 0 || ontology_display_name.compareTo("NCI_Thesaurus") == 0) {
println(out, " <div>");
println(out, " <img src=\"/ncitbrowser/images/thesaurus_popup_banner.gif\" width=\"612\" height=\"56\" alt=\"NCI Thesaurus\" title=\"\" border=\"0\" />");
println(out, " ");
println(out, " ");
println(out, " <span class=\"texttitle-blue-rightjust-2\">" + ontology_version + " (Release date: " + release_date + ")</span>");
println(out, " ");
println(out, "");
println(out, " </div>");
} else {
println(out, " <div>");
println(out, " <img src=\"/ncitbrowser/images/other_popup_banner.gif\" width=\"612\" height=\"56\" alt=\"" + display_name + "\" title=\"\" border=\"0\" />");
println(out, " <div class=\"vocabularynamepopupshort\">" + display_name );
println(out, " ");
println(out, " ");
println(out, " <span class=\"texttitle-blue-rightjust\">" + ontology_version + " (Release date: " + release_date + ")</span>");
println(out, " ");
println(out, " ");
println(out, " </div>");
println(out, " </div>");
}
println(out, "");
println(out, " <div id=\"popupContentArea\">");
println(out, " <table width=\"580px\" cellpadding=\"3\" cellspacing=\"0\" border=\"0\">");
println(out, " <tr class=\"textbody\">");
println(out, " <td class=\"pageTitle\" align=\"left\">");
println(out, " " + display_name + " Hierarchy");
println(out, " </td>");
println(out, " <td class=\"pageTitle\" align=\"right\">");
println(out, " <font size=\"1\" color=\"red\" align=\"right\">");
println(out, " <a href=\"javascript:printPage()\"><img src=\"/ncitbrowser/images/printer.bmp\" border=\"0\" alt=\"Send to Printer\"><i>Send to Printer</i></a>");
println(out, " </font>");
println(out, " </td>");
println(out, " </tr>");
println(out, " </table>");
if (! ServerMonitorThread.getInstance().isLexEVSRunning()) {
println(out, " <div class=\"textbodyredsmall\">" + ServerMonitorThread.getInstance().getMessage() + "</div>");
} else {
println(out, " <!-- Tree content -->");
println(out, " <div id=\"rootDesc\">");
println(out, " <div id=\"bd\"></div>");
println(out, " <div id=\"ft\"></div>");
println(out, " </div>");
println(out, " <div id=\"treeStatus\">");
println(out, " <div id=\"bd\"></div>");
println(out, " </div>");
println(out, " <div id=\"emptyRoot\">");
println(out, " <div id=\"bd\"></div>");
println(out, " </div>");
println(out, " <div id=\"treecontainer\"></div>");
}
println(out, "");
println(out, " <form id=\"pg_form\">");
println(out, " ");
String ontology_node_id_value = HTTPUtils.cleanXSS(node_id);
String ontology_display_name_value = HTTPUtils.cleanXSS(ontology_display_name);
String ontology_version_value = HTTPUtils.cleanXSS(ontology_version);
println(out, " <input type=\"hidden\" id=\"ontology_node_id\" name=\"ontology_node_id\" value=\"" + ontology_node_id_value + "\" />");
println(out, " <input type=\"hidden\" id=\"ontology_display_name\" name=\"ontology_display_name\" value=\"" + ontology_display_name_value + "\" />");
//println(out, " <input type=\"hidden\" id=\"schema\" name=\"schema\" value=\"" + scheme_value + "\" />");
println(out, " <input type=\"hidden\" id=\"ontology_version\" name=\"ontology_version\" value=\"" + ontology_version_value + "\" />");
println(out, "");
println(out, " </form>");
println(out, " <!-- End of Tree control content -->");
println(out, " </div>");
println(out, " </div>");
println(out, " </div>");
println(out, " ");
println(out, "</body>");
println(out, "</html>");
if (_debug) {
_logger.debug(Utils.SEPARATOR);
_logger.debug("VIH HTML:\n" + _debugBuffer);
_debugBuffer = null;
_logger.debug(Utils.SEPARATOR);
}
}
public static void create_src_vs_tree(HttpServletRequest request, HttpServletResponse response) {
create_vs_tree(request, response, Constants.STANDARD_VIEW);
}
public static void create_cs_vs_tree(HttpServletRequest request, HttpServletResponse response) {
String dictionary = (String) request.getParameter("dictionary");
if (!DataUtils.isNull(dictionary)) {
String version = (String) request.getParameter("version");
create_vs_tree(request, response, Constants.TERMINOLOGY_VIEW, dictionary, version);
} else {
create_vs_tree(request, response, Constants.TERMINOLOGY_VIEW);
}
}
public static void create_vs_tree(HttpServletRequest request, HttpServletResponse response, int view) {
response.setContentType("text/html");
PrintWriter out = null;
try {
out = response.getWriter();
} catch (Exception ex) {
ex.printStackTrace();
return;
}
String message = (String) request.getSession().getAttribute("message");
out.println("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 Transitional//EN\">");
out.println("<html xmlns:c=\"http://java.sun.com/jsp/jstl/core\">");
out.println("<head>");
if (view == Constants.STANDARD_VIEW) {
out.println(" <title>NCI Term Browser - Value Set Source View</title>");
} else {
out.println(" <title>NCI Term Browser - Value Set Terminology View</title>");
}
//out.println(" <title>NCI Thesaurus</title>");
out.println(" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=iso-8859-1\">");
out.println("");
out.println("<style type=\"text/css\">");
out.println("/*margin and padding on body element");
out.println(" can introduce errors in determining");
out.println(" element position and are not recommended;");
out.println(" we turn them off as a foundation for YUI");
out.println(" CSS treatments. */");
out.println("body {");
out.println(" margin:0;");
out.println(" padding:0;");
out.println("}");
out.println("</style>");
out.println("");
out.println("<link rel=\"stylesheet\" type=\"text/css\" href=\"http://yui.yahooapis.com/2.9.0/build/fonts/fonts-min.css\" />");
out.println("<link rel=\"stylesheet\" type=\"text/css\" href=\"http://yui.yahooapis.com/2.9.0/build/treeview/assets/skins/sam/treeview.css\" />");
out.println("");
out.println("<script type=\"text/javascript\" src=\"http://yui.yahooapis.com/2.9.0/build/yahoo-dom-event/yahoo-dom-event.js\"></script>");
out.println("<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/treeview-min.js\" ></script>"); //GF31982
out.println("");
out.println("");
out.println("<!-- Dependency -->");
out.println("<script src=\"http://yui.yahooapis.com/2.9.0/build/yahoo/yahoo-min.js\"></script>");
out.println("");
out.println("<!-- Source file -->");
out.println("<!
out.println(" If you require only basic HTTP transaction support, use the");
out.println(" connection_core.js file.");
out.println("
out.println("<script src=\"http://yui.yahooapis.com/2.9.0/build/connection/connection_core-min.js\"></script>");
out.println("");
out.println("<!
out.println(" Use the full connection.js if you require the following features:");
out.println(" - Form serialization.");
out.println(" - File Upload using the iframe transport.");
out.println(" - Cross-domain(XDR) transactions.");
out.println("
out.println("<script src=\"http://yui.yahooapis.com/2.9.0/build/connection/connection-min.js\"></script>");
out.println("");
out.println("");
out.println("");
out.println("<!--begin custom header content for this example
out.println("<!--Additional custom style rules for this example:
out.println("<style type=\"text/css\">");
out.println("");
out.println("");
out.println(".ygtvcheck0 { background: url(/ncitbrowser/images/yui/treeview/check0.gif) 0 0 no-repeat; width:16px; height:20px; float:left; cursor:pointer; }");
out.println(".ygtvcheck1 { background: url(/ncitbrowser/images/yui/treeview/check1.gif) 0 0 no-repeat; width:16px; height:20px; float:left; cursor:pointer; }");
out.println(".ygtvcheck2 { background: url(/ncitbrowser/images/yui/treeview/check2.gif) 0 0 no-repeat; width:16px; height:20px; float:left; cursor:pointer; }");
out.println("");
out.println("");
out.println(".ygtv-edit-TaskNode { width: 190px;}");
out.println(".ygtv-edit-TaskNode .ygtvcancel, .ygtv-edit-TextNode .ygtvok { border:none;}");
out.println(".ygtv-edit-TaskNode .ygtv-button-container { float: right;}");
out.println(".ygtv-edit-TaskNode .ygtv-input input{ width: 140px;}");
out.println(".whitebg {");
out.println(" background-color:white;");
out.println("}");
out.println("</style>");
out.println("");
out.println(" <link rel=\"stylesheet\" type=\"text/css\" href=\"/ncitbrowser/css/styleSheet.css\" />");
out.println(" <link rel=\"shortcut icon\" href=\"/ncitbrowser/favicon.ico\" type=\"image/x-icon\" />");
out.println("");
out.println(" <script type=\"text/javascript\" src=\"/ncitbrowser/js/script.js\"></script>");
out.println(" <script type=\"text/javascript\" src=\"/ncitbrowser/js/tasknode.js\"></script>");
println(out, " <script type=\"text/javascript\" src=\"/ncitbrowser/js/search.js\"></script>");
println(out, " <script type=\"text/javascript\" src=\"/ncitbrowser/js/dropdown.js\"></script>");
out.println("");
out.println(" <script type=\"text/javascript\">");
out.println("");
out.println(" function refresh() {");
out.println("");
out.println(" var selectValueSetSearchOptionObj = document.forms[\"valueSetSearchForm\"].selectValueSetSearchOption;");
out.println("");
out.println(" for (var i=0; i<selectValueSetSearchOptionObj.length; i++) {");
out.println(" if (selectValueSetSearchOptionObj[i].checked) {");
out.println(" selectValueSetSearchOption = selectValueSetSearchOptionObj[i].value;");
out.println(" }");
out.println(" }");
out.println("");
out.println("");
out.println(" window.location.href=\"/ncitbrowser/pages/value_set_source_view.jsf?refresh=1\""); //Before(GF31982)
//GF31982(Not Sure): out.println(" window.location.href=\"/ncitbrowser/ajax?action=create_src_vs_tree?refresh=1\"");
out.println(" + \"&nav_type=valuesets\" + \"&opt=\"+ selectValueSetSearchOption;");
out.println("");
out.println(" }");
out.println(" </script>");
out.println("");
out.println(" <script language=\"JavaScript\">");
out.println("");
out.println(" var tree;");
out.println(" var nodeIndex;");
out.println(" var nodes = [];");
out.println("");
out.println(" function load(url,target) {");
out.println(" if (target != '')");
out.println(" target.window.location.href = url;");
out.println(" else");
out.println(" window.location.href = url;");
out.println(" }");
out.println("");
out.println(" function init() {");
out.println(" //initTree();");
out.println(" }");
out.println("");
out.println(" //handler for expanding all nodes");
out.println(" YAHOO.util.Event.on(\"expand_all\", \"click\", function(e) {");
out.println(" //expandEntireTree();");
out.println("");
out.println(" tree.expandAll();");
out.println(" //YAHOO.util.Event.preventDefault(e);");
out.println(" });");
out.println("");
out.println(" //handler for collapsing all nodes");
out.println(" YAHOO.util.Event.on(\"collapse_all\", \"click\", function(e) {");
out.println(" tree.collapseAll();");
out.println(" //YAHOO.util.Event.preventDefault(e);");
out.println(" });");
out.println("");
out.println(" //handler for checking all nodes");
out.println(" YAHOO.util.Event.on(\"check_all\", \"click\", function(e) {");
out.println(" check_all();");
out.println(" //YAHOO.util.Event.preventDefault(e);");
out.println(" });");
out.println("");
out.println(" //handler for unchecking all nodes");
out.println(" YAHOO.util.Event.on(\"uncheck_all\", \"click\", function(e) {");
out.println(" uncheck_all();");
out.println(" //YAHOO.util.Event.preventDefault(e);");
out.println(" });");
out.println("");
out.println("");
out.println("");
out.println(" YAHOO.util.Event.on(\"getchecked\", \"click\", function(e) {");
out.println(" //alert(\"Checked nodes: \" + YAHOO.lang.dump(getCheckedNodes()), \"info\", \"example\");");
out.println(" //YAHOO.util.Event.preventDefault(e);");
out.println("");
out.println(" });");
out.println("");
out.println("");
out.println(" function addTreeNode(rootNode, nodeInfo) {");
out.println(" var newNodeDetails = \"javascript:onClickTreeNode('\" + nodeInfo.ontology_node_id + \"');\";");
out.println("");
out.println(" if (nodeInfo.ontology_node_id.indexOf(\"TVS_\") >= 0) {");
out.println(" newNodeData = { label:nodeInfo.ontology_node_name, id:nodeInfo.ontology_node_id };");
out.println(" } else {");
out.println(" newNodeData = { label:nodeInfo.ontology_node_name, id:nodeInfo.ontology_node_id, href:newNodeDetails };");
out.println(" }");
out.println("");
out.println(" var newNode = new YAHOO.widget.TaskNode(newNodeData, rootNode, false);");
out.println(" if (nodeInfo.ontology_node_child_count > 0) {");
out.println(" newNode.setDynamicLoad(loadNodeData);");
out.println(" }");
out.println(" }");
out.println("");
out.println(" function buildTree(ontology_node_id, ontology_display_name) {");
out.println(" var handleBuildTreeSuccess = function(o) {");
out.println(" var respTxt = o.responseText;");
out.println(" var respObj = eval('(' + respTxt + ')');");
out.println(" if ( typeof(respObj) != \"undefined\") {");
out.println(" if ( typeof(respObj.root_nodes) != \"undefined\") {");
out.println(" var root = tree.getRoot();");
out.println(" if (respObj.root_nodes.length == 0) {");
out.println(" //showEmptyRoot();");
out.println(" }");
out.println(" else {");
out.println(" for (var i=0; i < respObj.root_nodes.length; i++) {");
out.println(" var nodeInfo = respObj.root_nodes[i];");
out.println(" var expand = false;");
out.println(" //addTreeNode(root, nodeInfo, expand);");
out.println("");
out.println(" addTreeNode(root, nodeInfo);");
out.println(" }");
out.println(" }");
out.println("");
out.println(" tree.draw();");
out.println(" }");
out.println(" }");
out.println(" }");
out.println("");
out.println(" var handleBuildTreeFailure = function(o) {");
out.println(" alert('responseFailure: ' + o.statusText);");
out.println(" }");
out.println("");
out.println(" var buildTreeCallback =");
out.println(" {");
out.println(" success:handleBuildTreeSuccess,");
out.println(" failure:handleBuildTreeFailure");
out.println(" };");
out.println("");
out.println(" if (ontology_display_name!='') {");
out.println(" var ontology_source = null;");
out.println(" var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
out.println(" var request = YAHOO.util.Connect.asyncRequest('GET','/ncitbrowser/ajax?action=build_src_vs_tree&ontology_node_id=' +ontology_node_id+'&ontology_display_name='+ontology_display_name+'&version='+ontology_version+'&ontology_source='+ontology_source,buildTreeCallback);");
out.println(" }");
out.println(" }");
out.println("");
out.println(" function resetTree(ontology_node_id, ontology_display_name) {");
out.println("");
out.println(" var handleResetTreeSuccess = function(o) {");
out.println(" var respTxt = o.responseText;");
out.println(" var respObj = eval('(' + respTxt + ')');");
out.println(" if ( typeof(respObj) != \"undefined\") {");
out.println(" if ( typeof(respObj.root_node) != \"undefined\") {");
out.println(" var root = tree.getRoot();");
out.println(" var nodeDetails = \"javascript:onClickTreeNode('\" + respObj.root_node.ontology_node_id + \"');\";");
out.println(" var rootNodeData = { label:respObj.root_node.ontology_node_name, id:respObj.root_node.ontology_node_id, href:nodeDetails };");
out.println(" var expand = false;");
out.println(" if (respObj.root_node.ontology_node_child_count > 0) {");
out.println(" expand = true;");
out.println(" }");
out.println(" var ontRoot = new YAHOO.widget.TaskNode(rootNodeData, root, expand);");
out.println("");
out.println(" if ( typeof(respObj.child_nodes) != \"undefined\") {");
out.println(" for (var i=0; i < respObj.child_nodes.length; i++) {");
out.println(" var nodeInfo = respObj.child_nodes[i];");
out.println(" addTreeNode(ontRoot, nodeInfo);");
out.println(" }");
out.println(" }");
out.println(" tree.draw();");
out.println(" }");
out.println(" }");
out.println(" }");
out.println("");
out.println(" var handleResetTreeFailure = function(o) {");
out.println(" alert('responseFailure: ' + o.statusText);");
out.println(" }");
out.println("");
out.println(" var resetTreeCallback =");
out.println(" {");
out.println(" success:handleResetTreeSuccess,");
out.println(" failure:handleResetTreeFailure");
out.println(" };");
out.println(" if (ontology_node_id!= '') {");
out.println(" var ontology_source = null;");
out.println(" var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
out.println(" var request = YAHOO.util.Connect.asyncRequest('GET','/ncitbrowser/ajax?action=reset_vs_tree&ontology_node_id=' +ontology_node_id+'&ontology_display_name='+ontology_display_name + '&version='+ ontology_version +'&ontology_source='+ontology_source,resetTreeCallback);");
out.println(" }");
out.println(" }");
out.println("");
out.println(" function onClickTreeNode(ontology_node_id) {");
out.println(" //alert(\"onClickTreeNode \" + ontology_node_id);");
out.println(" window.location = '/ncitbrowser/pages/value_set_treenode_redirect.jsf?ontology_node_id=' + ontology_node_id;");
out.println(" }");
out.println("");
out.println("");
out.println(" function onClickViewEntireOntology(ontology_display_name) {");
out.println(" var ontology_display_name = document.pg_form.ontology_display_name.value;");
out.println(" tree = new YAHOO.widget.TreeView(\"treecontainer\");");
out.println(" tree.draw();");
// out.println(" buildTree('', ontology_display_name);");
out.println(" }");
out.println("");
out.println(" function initTree() {");
out.println("");
out.println(" tree = new YAHOO.widget.TreeView(\"treecontainer\");");
out.println(" tree.setNodesProperty('propagateHighlightUp',true);");
out.println(" tree.setNodesProperty('propagateHighlightDown',true);");
out.println(" tree.subscribe('keydown',tree._onKeyDownEvent);");
out.println("");
out.println("");
out.println("");
out.println("");
out.println(" tree.subscribe(\"expand\", function(node) {");
out.println("");
out.println(" YAHOO.util.UserAction.keydown(document.body, { keyCode: 39 });");
out.println("");
out.println(" });");
out.println("");
out.println("");
out.println("");
out.println(" tree.subscribe(\"collapse\", function(node) {");
out.println(" //alert(\"Collapsing \" + node.label );");
out.println("");
out.println(" YAHOO.util.UserAction.keydown(document.body, { keyCode: 109 });");
out.println(" });");
out.println("");
out.println(" // By default, trees with TextNodes will fire an event for when the label is clicked:");
out.println(" tree.subscribe(\"checkClick\", function(node) {");
out.println(" //alert(node.data.myNodeId + \" label was checked\");");
out.println(" });");
out.println("");
out.println("");
println(out, " var root = tree.getRoot();");
HashMap value_set_tree_hmap = null;
if (view == Constants.STANDARD_VIEW) {
value_set_tree_hmap = DataUtils.getSourceValueSetTree();
} else {
value_set_tree_hmap = DataUtils.getCodingSchemeValueSetTree();
}
TreeItem root = (TreeItem) value_set_tree_hmap.get("<Root>");
//new ValueSetUtils().printTree(out, root);
new ValueSetUtils().printTree(out, root, view);
String contextPath = request.getContextPath();
String view_str = new Integer(view).toString();
//[#31914] Search option and algorithm in value set search box are not preserved in session.
//String option = (String) request.getSession().getAttribute("selectValueSetSearchOption");
//String algorithm = (String) request.getSession().getAttribute("valueset_search_algorithm");
String option = (String) request.getParameter("selectValueSetSearchOption");
String algorithm = (String) request.getParameter("valueset_search_algorithm");
String matchText = (String) request.getParameter("matchText");
if (DataUtils.isNull(matchText)) {
matchText = (String) request.getSession().getAttribute("matchText");
}
if (DataUtils.isNull(matchText)) {
matchText = "";
} else {
matchText = matchText.trim();
}
request.getSession().setAttribute("matchText", matchText);
String option_code = "";
String option_name = "";
if (DataUtils.isNull(option)) {
option_code = "checked";
} else {
if (option.compareToIgnoreCase("Code") == 0) {
option_code = "checked";
}
if (option.compareToIgnoreCase("Name") == 0) {
option_name = "checked";
}
}
String algorithm_exactMatch = "";
String algorithm_startsWith = "";
String algorithm_contains = "";
if (DataUtils.isNull(algorithm)) {
algorithm_exactMatch = "checked";
} else {
if (algorithm.compareToIgnoreCase("exactMatch") == 0) {
algorithm_exactMatch = "checked";
}
if (algorithm.compareToIgnoreCase("startsWith") == 0) {
algorithm_startsWith = "checked";
}
if (algorithm.compareToIgnoreCase("contains") == 0) {
algorithm_contains = "checked";
}
}
System.out.println("*** OPTION: " + option);
System.out.println("*** ALGORITHM: " + algorithm);
System.out.println("*** matchText: " + matchText);
System.out.println("AjaxServlet option_code: " + option_code);
System.out.println("AjaxServlet option_name: " + option_name);
System.out.println("AjaxServlet algorithm_exactMatch: " + algorithm_exactMatch);
System.out.println("AjaxServlet algorithm_startsWith: " + algorithm_startsWith);
System.out.println("AjaxServlet algorithm_contains: " + algorithm_contains);
out.println("");
if (message == null) {
out.println(" tree.collapseAll();");
}
out.println(" tree.draw();");
out.println(" }");
out.println("");
out.println("");
out.println(" function onCheckClick(node) {");
out.println(" YAHOO.log(node.label + \" check was clicked, new state: \" + node.checkState, \"info\", \"example\");");
out.println(" }");
out.println("");
out.println(" function check_all() {");
out.println(" var topNodes = tree.getRoot().children;");
out.println(" for(var i=0; i<topNodes.length; ++i) {");
out.println(" topNodes[i].check();");
out.println(" }");
out.println(" }");
out.println("");
out.println(" function uncheck_all() {");
out.println(" var topNodes = tree.getRoot().children;");
out.println(" for(var i=0; i<topNodes.length; ++i) {");
out.println(" topNodes[i].uncheck();");
out.println(" }");
out.println(" }");
out.println("");
out.println("");
out.println("");
out.println(" function expand_all() {");
out.println(" //alert(\"expand_all\");");
out.println(" var ontology_display_name = document.forms[\"pg_form\"].ontology_display_name.value;");
out.println(" onClickViewEntireOntology(ontology_display_name);");
out.println(" }");
out.println("");
out.println("");
// 0=unchecked, 1=some children checked, 2=all children checked
out.println(" // Gets the labels of all of the fully checked nodes");
out.println(" // Could be updated to only return checked leaf nodes by evaluating");
out.println(" // the children collection first.");
out.println(" function getCheckedNodes(nodes) {");
out.println(" nodes = nodes || tree.getRoot().children;");
out.println(" checkedNodes = [];");
out.println(" for(var i=0, l=nodes.length; i<l; i=i+1) {");
out.println(" var n = nodes[i];");
out.println(" if (n.checkState > 0) { // if we were interested in the nodes that have some but not all children checked");
out.println(" //if (n.checkState == 2) {");
out.println(" checkedNodes.push(n.label); // just using label for simplicity");
out.println("");
out.println(" if (n.hasChildren()) {");
out.println(" checkedNodes = checkedNodes.concat(getCheckedNodes(n.children));");
out.println(" }");
out.println("");
out.println(" }");
out.println(" }");
out.println("");
out.println(" var checked_vocabularies = document.forms[\"valueSetSearchForm\"].checked_vocabularies;");
out.println(" checked_vocabularies.value = checkedNodes;");
out.println("");
out.println(" return checkedNodes;");
out.println(" }");
out.println("");
out.println("");
out.println("");
out.println("");
out.println(" function loadNodeData(node, fnLoadComplete) {");
out.println(" var id = node.data.id;");
out.println("");
out.println(" var responseSuccess = function(o)");
out.println(" {");
out.println(" var path;");
out.println(" var dirs;");
out.println(" var files;");
out.println(" var respTxt = o.responseText;");
out.println(" var respObj = eval('(' + respTxt + ')');");
out.println(" var fileNum = 0;");
out.println(" var categoryNum = 0;");
out.println(" if ( typeof(respObj.nodes) != \"undefined\") {");
out.println(" for (var i=0; i < respObj.nodes.length; i++) {");
out.println(" var name = respObj.nodes[i].ontology_node_name;");
out.println(" var nodeDetails = \"javascript:onClickTreeNode('\" + respObj.nodes[i].ontology_node_id + \"');\";");
out.println(" var newNodeData = { label:name, id:respObj.nodes[i].ontology_node_id, href:nodeDetails };");
out.println(" var newNode = new YAHOO.widget.TaskNode(newNodeData, node, false);");
out.println(" if (respObj.nodes[i].ontology_node_child_count > 0) {");
out.println(" newNode.setDynamicLoad(loadNodeData);");
out.println(" }");
out.println(" }");
out.println(" }");
out.println(" tree.draw();");
out.println(" fnLoadComplete();");
out.println(" }");
out.println("");
out.println(" var responseFailure = function(o){");
out.println(" alert('responseFailure: ' + o.statusText);");
out.println(" }");
out.println("");
out.println(" var callback =");
out.println(" {");
out.println(" success:responseSuccess,");
out.println(" failure:responseFailure");
out.println(" };");
out.println("");
out.println(" var ontology_display_name = document.forms[\"pg_form\"].ontology_display_name.value;");
out.println(" var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
out.println(" var cObj = YAHOO.util.Connect.asyncRequest('GET','/ncitbrowser/ajax?action=expand_src_vs_tree&ontology_node_id=' +id+'&ontology_display_name='+ontology_display_name+'&version='+ontology_version,callback);");
out.println(" }");
out.println("");
out.println("");
out.println(" function searchTree(ontology_node_id, ontology_display_name) {");
out.println("");
out.println(" var handleBuildTreeSuccess = function(o) {");
out.println("");
out.println(" var respTxt = o.responseText;");
out.println(" var respObj = eval('(' + respTxt + ')');");
out.println(" if ( typeof(respObj) != \"undefined\") {");
out.println("");
out.println(" if ( typeof(respObj.dummy_root_nodes) != \"undefined\") {");
out.println(" showNodeNotFound(ontology_node_id);");
out.println(" }");
out.println("");
out.println(" else if ( typeof(respObj.root_nodes) != \"undefined\") {");
out.println(" var root = tree.getRoot();");
out.println(" if (respObj.root_nodes.length == 0) {");
out.println(" //showEmptyRoot();");
out.println(" }");
out.println(" else {");
out.println(" showPartialHierarchy();");
out.println(" showConstructingTreeStatus();");
out.println("");
out.println(" for (var i=0; i < respObj.root_nodes.length; i++) {");
out.println(" var nodeInfo = respObj.root_nodes[i];");
out.println(" //var expand = false;");
out.println(" addTreeBranch(ontology_node_id, root, nodeInfo);");
out.println(" }");
out.println(" }");
out.println(" }");
out.println(" }");
out.println(" }");
out.println("");
out.println(" var handleBuildTreeFailure = function(o) {");
out.println(" alert('responseFailure: ' + o.statusText);");
out.println(" }");
out.println("");
out.println(" var buildTreeCallback =");
out.println(" {");
out.println(" success:handleBuildTreeSuccess,");
out.println(" failure:handleBuildTreeFailure");
out.println(" };");
out.println("");
out.println(" if (ontology_display_name!='') {");
out.println(" var ontology_source = null;//document.pg_form.ontology_source.value;");
out.println(" var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
out.println(" var request = YAHOO.util.Connect.asyncRequest('GET','/ncitbrowser/ajax?action=search_vs_tree&ontology_node_id=' +ontology_node_id+'&ontology_display_name='+ontology_display_name+'&version='+ontology_version+'&ontology_source='+ontology_source,buildTreeCallback);");
out.println("");
out.println(" }");
out.println(" }");
out.println("");
out.println("");
out.println("");
out.println(" function expandEntireTree() {");
out.println(" tree = new YAHOO.widget.TreeView(\"treecontainer\");");
out.println(" //tree.draw();");
out.println("");
out.println(" var ontology_display_name = document.forms[\"pg_form\"].ontology_display_name.value;");
out.println(" var ontology_node_id = document.forms[\"pg_form\"].ontology_node_id.value;");
out.println("");
out.println(" var handleBuildTreeSuccess = function(o) {");
out.println("");
out.println(" var respTxt = o.responseText;");
out.println(" var respObj = eval('(' + respTxt + ')');");
out.println(" if ( typeof(respObj) != \"undefined\") {");
out.println("");
out.println(" if ( typeof(respObj.root_nodes) != \"undefined\") {");
out.println("");
out.println(" //alert(respObj.root_nodes.length);");
out.println("");
out.println(" var root = tree.getRoot();");
out.println(" if (respObj.root_nodes.length == 0) {");
out.println(" //showEmptyRoot();");
out.println(" } else {");
out.println("");
out.println("");
out.println("");
out.println("");
out.println(" for (var i=0; i < respObj.root_nodes.length; i++) {");
out.println(" var nodeInfo = respObj.root_nodes[i];");
out.println(" //alert(\"calling addTreeBranch \");");
out.println("");
out.println(" addTreeBranch(ontology_node_id, root, nodeInfo);");
out.println(" }");
out.println(" }");
out.println(" }");
out.println(" }");
out.println(" }");
out.println("");
out.println(" var handleBuildTreeFailure = function(o) {");
out.println(" alert('responseFailure: ' + o.statusText);");
out.println(" }");
out.println("");
out.println(" var buildTreeCallback =");
out.println(" {");
out.println(" success:handleBuildTreeSuccess,");
out.println(" failure:handleBuildTreeFailure");
out.println(" };");
out.println("");
out.println(" if (ontology_display_name!='') {");
out.println(" var ontology_source = null;");
out.println(" var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
out.println(" var request = YAHOO.util.Connect.asyncRequest('GET','/ncitbrowser/ajax?action=expand_entire_vs_tree&ontology_node_id=' +ontology_node_id+'&ontology_display_name='+ontology_display_name+'&version='+ontology_version+'&ontology_source='+ontology_source,buildTreeCallback);");
out.println("");
out.println(" }");
out.println(" }");
out.println("");
out.println("");
out.println("");
out.println("");
out.println(" function addTreeBranch(ontology_node_id, rootNode, nodeInfo) {");
out.println(" var newNodeDetails = \"javascript:onClickTreeNode('\" + nodeInfo.ontology_node_id + \"');\";");
out.println("");
out.println(" var newNodeData;");
out.println(" if (ontology_node_id.indexOf(\"TVS_\") >= 0) {");
out.println(" newNodeData = { label:nodeInfo.ontology_node_name, id:nodeInfo.ontology_node_id };");
out.println(" } else {");
out.println(" newNodeData = { label:nodeInfo.ontology_node_name, id:nodeInfo.ontology_node_id, href:newNodeDetails };");
out.println(" }");
out.println("");
out.println(" var expand = false;");
out.println(" var childNodes = nodeInfo.children_nodes;");
out.println("");
out.println(" if (childNodes.length > 0) {");
out.println(" expand = true;");
out.println(" }");
out.println(" var newNode = new YAHOO.widget.TaskNode(newNodeData, rootNode, expand);");
out.println(" if (nodeInfo.ontology_node_id == ontology_node_id) {");
out.println(" newNode.labelStyle = \"ygtvlabel_highlight\";");
out.println(" }");
out.println("");
out.println(" if (nodeInfo.ontology_node_id == ontology_node_id) {");
out.println(" newNode.isLeaf = true;");
out.println(" if (nodeInfo.ontology_node_child_count > 0) {");
out.println(" newNode.isLeaf = false;");
out.println(" newNode.setDynamicLoad(loadNodeData);");
out.println(" } else {");
out.println(" tree.draw();");
out.println(" }");
out.println("");
out.println(" } else {");
out.println(" if (nodeInfo.ontology_node_id != ontology_node_id) {");
out.println(" if (nodeInfo.ontology_node_child_count == 0 && nodeInfo.ontology_node_id != ontology_node_id) {");
out.println(" newNode.isLeaf = true;");
out.println(" } else if (childNodes.length == 0) {");
out.println(" newNode.setDynamicLoad(loadNodeData);");
out.println(" }");
out.println(" }");
out.println(" }");
out.println("");
out.println(" tree.draw();");
out.println(" for (var i=0; i < childNodes.length; i++) {");
out.println(" var childnodeInfo = childNodes[i];");
out.println(" addTreeBranch(ontology_node_id, newNode, childnodeInfo);");
out.println(" }");
out.println(" }");
out.println(" YAHOO.util.Event.addListener(window, \"load\", init);");
out.println("");
out.println(" YAHOO.util.Event.onDOMReady(initTree);");
out.println("");
out.println("");
out.println(" </script>");
out.println("");
out.println("</head>");
out.println("");
out.println("");
out.println("");
out.println("");
out.println("");
out.println("<body onLoad=\"document.forms.valueSetSearchForm.matchText.focus();\">");
out.println(" <script type=\"text/javascript\" src=\"/ncitbrowser/js/wz_tooltip.js\"></script>");
out.println(" <script type=\"text/javascript\" src=\"/ncitbrowser/js/tip_centerwindow.js\"></script>");
out.println(" <script type=\"text/javascript\" src=\"/ncitbrowser/js/tip_followscroll.js\"></script>");
out.println("");
out.println("");
out.println("");
out.println("");
out.println("");
out.println(" <!-- Begin Skip Top Navigation -->");
out.println(" <a href=\"#evs-content\" class=\"hideLink\" accesskey=\"1\" title=\"Skip repetitive navigation links\">skip navigation links</A>");
out.println(" <!-- End Skip Top Navigation -->");
out.println("");
out.println("<!-- nci banner -->");
out.println("<div class=\"ncibanner\">");
out.println(" <a href=\"http:
out.println(" <img src=\"/ncitbrowser/images/logotype.gif\"");
out.println(" width=\"440\" height=\"39\" border=\"0\"");
out.println(" alt=\"National Cancer Institute\"/>");
out.println(" </a>");
out.println(" <a href=\"http:
out.println(" <img src=\"/ncitbrowser/images/spacer.gif\"");
out.println(" width=\"48\" height=\"39\" border=\"0\"");
out.println(" alt=\"National Cancer Institute\" class=\"print-header\"/>");
out.println(" </a>");
out.println(" <a href=\"http:
out.println(" <img src=\"/ncitbrowser/images/tagline_nologo.gif\"");
out.println(" width=\"173\" height=\"39\" border=\"0\"");
out.println(" alt=\"U.S. National Institutes of Health\"/>");
out.println(" </a>");
out.println(" <a href=\"http:
out.println(" <img src=\"/ncitbrowser/images/cancer-gov.gif\"");
out.println(" width=\"99\" height=\"39\" border=\"0\"");
out.println(" alt=\"www.cancer.gov\"/>");
out.println(" </a>");
out.println("</div>");
out.println("<!-- end nci banner -->");
out.println("");
out.println(" <div class=\"center-page\">");
out.println(" <!-- EVS Logo -->");
out.println("<div>");
out.println(" <img src=\"/ncitbrowser/images/evs-logo-swapped.gif\" alt=\"EVS Logo\"");
out.println(" width=\"745\" height=\"26\" border=\"0\"");
out.println(" usemap=\"#external-evs\" />");
out.println(" <map id=\"external-evs\" name=\"external-evs\">");
out.println(" <area shape=\"rect\" coords=\"0,0,140,26\"");
out.println(" href=\"/ncitbrowser/start.jsf\" target=\"_self\"");
out.println(" alt=\"NCI Term Browser\" />");
out.println(" <area shape=\"rect\" coords=\"520,0,745,26\"");
out.println(" href=\"http://evs.nci.nih.gov/\" target=\"_blank\"");
out.println(" alt=\"Enterprise Vocabulary Services\" />");
out.println(" </map>");
out.println("</div>");
out.println("");
out.println("");
out.println("<table cellspacing=\"0\" cellpadding=\"0\" border=\"0\">");
out.println(" <tr>");
out.println(" <td width=\"5\"></td>");
out.println(" <td><a href=\"/ncitbrowser/pages/multiple_search.jsf?nav_type=terminologies\">");
out.println(" <img name=\"tab_terms\" src=\"/ncitbrowser/images/tab_terms.gif\"");
out.println(" border=\"0\" alt=\"Terminologies\" title=\"Terminologies\" /></a></td>");
//Before(GF31982): out.println(" <td><a href=\"/ncitbrowser/pages/value_set_source_view.jsf?nav_type=valuesets\">");
out.println(" <td><a href=\"/ncitbrowser/ajax?action=create_src_vs_tree\">"); //GF31982
out.println(" <img name=\"tab_valuesets\" src=\"/ncitbrowser/images/tab_valuesets_clicked.gif\"");
out.println(" border=\"0\" alt=\"Value Sets\" title=\"ValueSets\" /></a></td>");
out.println(" <td><a href=\"/ncitbrowser/pages/mapping_search.jsf?nav_type=mappings\">");
out.println(" <img name=\"tab_map\" src=\"/ncitbrowser/images/tab_map.gif\"");
out.println(" border=\"0\" alt=\"Mappings\" title=\"Mappings\" /></a></td>");
out.println(" </tr>");
out.println("</table>");
out.println("");
out.println("<div class=\"mainbox-top\"><img src=\"/ncitbrowser/images/mainbox-top.gif\" width=\"745\" height=\"5\" alt=\"\"/></div>");
out.println("<!-- end EVS Logo -->");
out.println(" <!-- Main box -->");
out.println(" <div id=\"main-area\">");
out.println("");
out.println(" <!-- Thesaurus, banner search area -->");
out.println(" <div class=\"bannerarea\">");
out.println(" <a href=\"/ncitbrowser/start.jsf\" style=\"text-decoration: none;\">");
out.println(" <div class=\"vocabularynamebanner_tb\">");
out.println(" <span class=\"vocabularynamelong_tb\">" + JSPUtils.getApplicationVersionDisplay() + "</span>");
out.println(" </div>");
out.println(" </a>");
out.println(" <div class=\"search-globalnav\">");
out.println(" <!-- Search box -->");
out.println(" <div class=\"searchbox-top\"><img src=\"/ncitbrowser/images/searchbox-top.gif\" width=\"352\" height=\"2\" alt=\"SearchBox Top\" /></div>");
out.println(" <div class=\"searchbox\">");
out.println("");
out.println("");
//out.println("<form id=\"valueSetSearchForm\" name=\"valueSetSearchForm\" method=\"post\" action=\"" + contextPath + + "/ajax?action=saerch_value_set_tree\"> "/pages/value_set_source_view.jsf\" class=\"search-form-main-area\" enctype=\"application/x-www-form-urlencoded\">");
out.println("<form id=\"valueSetSearchForm\" name=\"valueSetSearchForm\" method=\"post\" action=\"" + contextPath + "/ajax?action=search_value_set\" class=\"search-form-main-area\" enctype=\"application/x-www-form-urlencoded\">");
out.println("<input type=\"hidden\" name=\"valueSetSearchForm\" value=\"valueSetSearchForm\" />");
out.println("<input type=\"hidden\" name=\"view\" value=\"" + view_str + "\" />");
out.println("");
out.println("");
out.println("");
out.println(" <input type=\"hidden\" id=\"checked_vocabularies\" name=\"checked_vocabularies\" value=\"\" />");
out.println("");
out.println("");
out.println("");
out.println("<table border=\"0\" cellspacing=\"0\" cellpadding=\"0\" style=\"margin: 2px\" >");
out.println(" <tr valign=\"top\" align=\"left\">");
out.println(" <td align=\"left\" class=\"textbody\">");
out.println("");
out.println(" <input CLASS=\"searchbox-input-2\"");
out.println(" name=\"matchText\"");
out.println(" value=\"" + matchText + "\"");
out.println(" onFocus=\"active = true\"");
out.println(" onBlur=\"active = false\"");
out.println(" onkeypress=\"return submitEnter('valueSetSearchForm:valueset_search',event)\"");
out.println(" tabindex=\"1\"/>");
out.println("");
out.println("");
out.println(" <input id=\"valueSetSearchForm:valueset_search\" type=\"image\" src=\"/ncitbrowser/images/search.gif\" name=\"valueSetSearchForm:valueset_search\" alt=\"Search Value Sets\" onclick=\"javascript:getCheckedNodes();\" tabindex=\"2\" class=\"searchbox-btn\" /><a href=\"/ncitbrowser/pages/help.jsf#searchhelp\" tabindex=\"3\"><img src=\"/ncitbrowser/images/search-help.gif\" alt=\"Search Help\" style=\"border-width:0;\" class=\"searchbox-btn\" /></a>");
out.println("");
out.println("");
out.println(" </td>");
out.println(" </tr>");
out.println("");
out.println(" <tr valign=\"top\" align=\"left\">");
out.println(" <td>");
out.println(" <table border=\"0\" cellspacing=\"0\" cellpadding=\"0\" style=\"margin: 0px\">");
out.println("");
out.println(" <tr valign=\"top\" align=\"left\">");
out.println(" <td align=\"left\" class=\"textbody\">");
out.println(" <input type=\"radio\" name=\"valueset_search_algorithm\" value=\"exactMatch\" alt=\"Exact Match\" " + algorithm_exactMatch + " tabindex=\"3\">Exact Match ");
out.println(" <input type=\"radio\" name=\"valueset_search_algorithm\" value=\"startsWith\" alt=\"Begins With\" " + algorithm_startsWith + " tabindex=\"3\">Begins With ");
out.println(" <input type=\"radio\" name=\"valueset_search_algorithm\" value=\"contains\" alt=\"Contains\" " + algorithm_contains + " tabindex=\"3\">Contains");
out.println(" </td>");
out.println(" </tr>");
out.println("");
out.println(" <tr align=\"left\">");
out.println(" <td height=\"1px\" bgcolor=\"#2F2F5F\" align=\"left\"></td>");
out.println(" </tr>");
out.println(" <tr valign=\"top\" align=\"left\">");
out.println(" <td align=\"left\" class=\"textbody\">");
out.println(" <input type=\"radio\" id=\"selectValueSetSearchOption\" name=\"selectValueSetSearchOption\" value=\"Code\" " + option_code + " alt=\"Code\" tabindex=\"1\" >Code ");
out.println(" <input type=\"radio\" id=\"selectValueSetSearchOption\" name=\"selectValueSetSearchOption\" value=\"Name\" " + option_name + " alt=\"Name\" tabindex=\"1\" >Name");
out.println(" </td>");
out.println(" </tr>");
out.println(" </table>");
out.println(" </td>");
out.println(" </tr>");
out.println("</table>");
out.println(" <input type=\"hidden\" name=\"referer\" id=\"referer\" value=\"http%3A%2F%2Flocalhost%3A8080%2Fncitbrowser%2Fpages%2Fresolved_value_set_search_results.jsf\">");
out.println(" <input type=\"hidden\" id=\"nav_type\" name=\"nav_type\" value=\"valuesets\" />");
out.println(" <input type=\"hidden\" id=\"view\" name=\"view\" value=\"source\" />");
out.println("");
out.println("<input type=\"hidden\" name=\"javax.faces.ViewState\" id=\"javax.faces.ViewState\" value=\"j_id22:j_id23\" />");
out.println("</form>");
out.println(" </div> <!-- searchbox -->");
out.println("");
out.println(" <div class=\"searchbox-bottom\"><img src=\"/ncitbrowser/images/searchbox-bottom.gif\" width=\"352\" height=\"2\" alt=\"SearchBox Bottom\" /></div>");
out.println(" <!-- end Search box -->");
out.println(" <!-- Global Navigation -->");
out.println("");
out.println("<table class=\"global-nav\" border=\"0\" width=\"100%\" height=\"37px\" cellpadding=\"0\" cellspacing=\"0\">");
out.println(" <tr>");
out.println(" <td align=\"left\" valign=\"bottom\">");
out.println(" <a href=\"#\" onclick=\"javascript:window.open('/ncitbrowser/pages/source_help_info-termbrowser.jsf',");
out.println(" '_blank','top=100, left=100, height=740, width=780, status=no, menubar=no, resizable=yes, scrollbars=yes, toolbar=no, location=no, directories=no');\" tabindex=\"13\">");
out.println(" Sources</a>");
out.println("");
//KLO, 022612
out.println(" \r\n");
out.println(" ");
out.print( VisitedConceptUtils.getDisplayLink(request, true) );
out.println(" \r\n");
// Visited concepts -- to be implemented.
// out.println(" | <A href=\"#\" onmouseover=\"Tip('<ul><li><a href=\'/ncitbrowser/ConceptReport.jsp?dictionary=NCI Thesaurus&version=11.09d&code=C44256\'>Ratio (NCI Thesaurus 11.09d)</a><br></li></ul>',WIDTH, 300, TITLE, 'Visited Concepts', SHADOW, true, FADEIN, 300, FADEOUT, 300, STICKY, 1, CLOSEBTN, true, CLICKCLOSE, true)\" onmouseout=UnTip() >Visited Concepts</A>");
out.println(" </td>");
out.println(" <td align=\"right\" valign=\"bottom\">");
out.println(" <a href=\"");
out.print( request.getContextPath() );
out.println("/pages/help.jsf\" tabindex=\"16\">Help</a>\r\n");
out.println(" </td>\r\n");
out.println(" <td width=\"7\"></td>\r\n");
out.println(" </tr>\r\n");
out.println("</table>");
/*
out.println(" <a href=\"/ncitbrowser/pages/help.jsf\" tabindex=\"16\">Help</a>");
out.println(" </td>");
out.println(" <td width=\"7\"></td>");
out.println(" </tr>");
out.println("</table>");
*/
out.println(" <!-- end Global Navigation -->");
out.println("");
out.println(" </div> <!-- search-globalnav -->");
out.println(" </div> <!-- bannerarea -->");
out.println("");
out.println(" <!-- end Thesaurus, banner search area -->");
out.println(" <!-- Quick links bar -->");
out.println("");
out.println("<div class=\"bluebar\">");
out.println(" <table border=\"0\" cellspacing=\"0\" cellpadding=\"0\">");
out.println(" <tr>");
out.println(" <td><div class=\"quicklink-status\"> </div></td>");
out.println(" <td>");
out.println("");
addQuickLink(request, out);
out.println("");
out.println(" </td>");
out.println(" </tr>");
out.println(" </table>");
out.println("");
out.println("</div>");
if (! ServerMonitorThread.getInstance().isLexEVSRunning()) {
out.println(" <div class=\"redbar\">");
out.println(" <table border=\"0\" cellspacing=\"0\" cellpadding=\"0\">");
out.println(" <tr>");
out.println(" <td class=\"lexevs-status\">");
out.println(" " + ServerMonitorThread.getInstance().getMessage());
out.println(" </td>");
out.println(" </tr>");
out.println(" </table>");
out.println(" </div>");
}
out.println(" <!-- end Quick links bar -->");
out.println("");
out.println(" <!-- Page content -->");
out.println(" <div class=\"pagecontent\">");
out.println("");
if (message != null) {
out.println("\r\n");
out.println(" <p class=\"textbodyred\">");
out.print(message);
out.println("</p>\r\n");
out.println(" ");
request.getSession().removeAttribute("message");
}
out.println("<p class=\"textbody\">");
out.println("View value sets organized by standards category or source terminology.");
out.println("Standards categories group the value sets supporting them; all other labels lead to the home pages of actual value sets or source terminologies.");
out.println("Search or browse a value set from its home page, or search all value sets at once from this page (very slow) to find which ones contain a particular code or term.");
out.println("</p>");
out.println("");
out.println(" <div id=\"popupContentArea\">");
out.println(" <a name=\"evs-content\" id=\"evs-content\"></a>");
out.println("");
out.println(" <table width=\"580px\" cellpadding=\"3\" cellspacing=\"0\" border=\"0\">");
out.println("");
out.println("");
out.println("");
out.println("");
out.println(" <tr class=\"textbody\">");
out.println(" <td class=\"textbody\" align=\"left\">");
out.println("");
if (view == Constants.STANDARD_VIEW) {
out.println(" Standards View");
out.println(" |");
out.println(" <a href=\"" + contextPath + "/ajax?action=create_cs_vs_tree\">Terminology View</a>");
} else {
out.println(" <a href=\"" + contextPath + "/ajax?action=create_src_vs_tree\">Standards View</a>");
out.println(" |");
out.println(" Terminology View");
}
out.println(" </td>");
out.println("");
out.println(" <td align=\"right\">");
out.println(" <font size=\"1\" color=\"red\" align=\"right\">");
out.println(" <a href=\"javascript:printPage()\"><img src=\"/ncitbrowser/images/printer.bmp\" border=\"0\" alt=\"Send to Printer\"><i>Send to Printer</i></a>");
out.println(" </font>");
out.println(" </td>");
out.println(" </tr>");
out.println(" </table>");
out.println("");
out.println(" <hr/>");
out.println("");
out.println("");
out.println("");
out.println("<style>");
out.println("#expandcontractdiv {border:1px solid #336600; background-color:#FFFFCC; margin:0 0 .5em 0; padding:0.2em;}");
out.println("#treecontainer { background: #fff }");
out.println("</style>");
out.println("");
out.println("");
out.println("<div id=\"expandcontractdiv\">");
out.println(" <a id=\"expand_all\" href=\"#\">Expand all</a>");
out.println(" <a id=\"collapse_all\" href=\"#\">Collapse all</a>");
out.println(" <a id=\"check_all\" href=\"#\">Check all</a>");
out.println(" <a id=\"uncheck_all\" href=\"#\">Uncheck all</a>");
out.println("</div>");
out.println("");
out.println("");
out.println("");
out.println(" <!-- Tree content -->");
out.println("");
out.println(" <div id=\"treecontainer\" class=\"ygtv-checkbox\"></div>");
out.println("");
out.println(" <form id=\"pg_form\">");
out.println("");
out.println(" <input type=\"hidden\" id=\"ontology_node_id\" name=\"ontology_node_id\" value=\"null\" />");
//out.println(" <input type=\"hidden\" id=\"ontology_display_name\" name=\"ontology_display_name\" value=\"null\" />");
out.println(" <input type=\"hidden\" id=\"schema\" name=\"schema\" value=\"null\" />");
//out.println(" <input type=\"hidden\" id=\"ontology_version\" name=\"ontology_version\" value=\"null\" />");
out.println(" <input type=\"hidden\" id=\"view\" name=\"view\" value=\"source\" />");
out.println(" </form>");
out.println("");
out.println("");
out.println(" </div> <!-- popupContentArea -->");
out.println("");
out.println("");
out.println("<div class=\"textbody\">");
out.println("<!-- footer -->");
out.println("<div class=\"footer\" style=\"width:720px\">");
out.println(" <ul>");
out.println(" <li><a href=\"http:
out.println(" <li><a href=\"/ncitbrowser/pages/contact_us.jsf\">Contact Us</a> |</li>");
out.println(" <li><a href=\"http:
out.println(" <li><a href=\"http:
out.println(" <li><a href=\"http:
out.println(" </ul>");
out.println(" <p>");
out.println(" A Service of the National Cancer Institute<br />");
out.println(" <img src=\"/ncitbrowser/images/external-footer-logos.gif\"");
out.println(" alt=\"External Footer Logos\" width=\"238\" height=\"34\" border=\"0\"");
out.println(" usemap=\"#external-footer\" />");
out.println(" </p>");
out.println(" <map id=\"external-footer\" name=\"external-footer\">");
out.println(" <area shape=\"rect\" coords=\"0,0,46,34\"");
out.println(" href=\"http:
out.println(" alt=\"National Cancer Institute\" />");
out.println(" <area shape=\"rect\" coords=\"55,1,99,32\"");
out.println(" href=\"http:
out.println(" alt=\"U.S. Health & Human Services\" />");
out.println(" <area shape=\"rect\" coords=\"103,1,147,31\"");
out.println(" href=\"http:
out.println(" alt=\"National Institutes of Health\" />");
out.println(" <area shape=\"rect\" coords=\"148,1,235,33\"");
out.println(" href=\"http:
out.println(" alt=\"USA.gov\" />");
out.println(" </map>");
out.println("</div>");
out.println("<!-- end footer -->");
out.println("</div>");
out.println("");
out.println("");
out.println(" </div> <!-- pagecontent -->");
out.println(" </div> <!-- main-area -->");
out.println(" <div class=\"mainbox-bottom\"><img src=\"/ncitbrowser/images/mainbox-bottom.gif\" width=\"745\" height=\"5\" alt=\"Mainbox Bottom\" /></div>");
out.println("");
out.println(" </div> <!-- center-page -->");
out.println("");
out.println("</body>");
out.println("</html>");
out.println("");
}
public static void search_value_set(HttpServletRequest request, HttpServletResponse response) {
System.out.println("(*** AjaxServlet ***) search_value_set ...");
String selectValueSetSearchOption = (String) request.getParameter("selectValueSetSearchOption");
request.getSession().setAttribute("selectValueSetSearchOption", selectValueSetSearchOption);
String algorithm = (String) request.getParameter("valueset_search_algorithm");
request.getSession().setAttribute("valueset_search_algorithm", algorithm);
System.out.println("(*** AjaxServlet ***) selectValueSetSearchOption ..." + selectValueSetSearchOption);
System.out.println("(*** AjaxServlet ***) search_value_set ...algorithm " + algorithm);
// check if any checkbox is checked.
String contextPath = request.getContextPath();
String view_str = (String) request.getParameter("view");
int view = Integer.parseInt(view_str);
String msg = null;
request.getSession().removeAttribute("checked_vocabularies");
String checked_vocabularies = (String) request.getParameter("checked_vocabularies");
System.out.println("checked_vocabularies: " + checked_vocabularies);
String matchText = (String) request.getParameter("matchText");
if (DataUtils.isNull(matchText)) {
matchText = "";
} else {
matchText = matchText.trim();
}
request.getSession().setAttribute("matchText", matchText);
System.out.println("(*** AjaxServlet ***) search_value_set ...matchText " + matchText);
String ontology_display_name = (String) request.getParameter("ontology_display_name");
String ontology_version = (String) request.getParameter("ontology_version");
System.out.println("search_value_set ontology_display_name: " + ontology_display_name);
System.out.println("search_value_set ontology_version: " + ontology_version);
if (matchText.compareTo("") == 0) {
msg = "Please enter a search string.";
System.out.println(msg);
request.getSession().setAttribute("message", msg);
if (!DataUtils.isNull(ontology_display_name) && !DataUtils.isNull(ontology_version)) {
create_vs_tree(request, response, view, ontology_display_name, ontology_version);
} else {
create_vs_tree(request, response, view);
}
return;
}
if (checked_vocabularies == null || (checked_vocabularies != null && checked_vocabularies.compareTo("") == 0)) { //DYEE
msg = "No value set definition is selected.";
System.out.println(msg);
request.getSession().setAttribute("message", msg);
if (!DataUtils.isNull(ontology_display_name) && !DataUtils.isNull(ontology_version)) {
create_vs_tree(request, response, view, ontology_display_name, ontology_version);
} else {
create_vs_tree(request, response, view);
}
} else {
String destination = contextPath + "/pages/value_set_search_results.jsf";
try {
String retstr = valueSetSearchAction(request);
//KLO, 041312
if (retstr.compareTo("message") == 0) {
if (!DataUtils.isNull(ontology_display_name) && !DataUtils.isNull(ontology_version)) {
create_vs_tree(request, response, view, ontology_display_name, ontology_version);
} else {
create_vs_tree(request, response, view);
}
return;
}
System.out.println("(*) redirecting to: " + destination);
response.sendRedirect(response.encodeRedirectURL(destination));
request.getSession().setAttribute("checked_vocabularies", checked_vocabularies);
} catch (Exception ex) {
System.out.println("response.sendRedirect failed???");
}
}
}
public static String valueSetSearchAction(HttpServletRequest request) {
java.lang.String valueSetDefinitionRevisionId = null;
String msg = null;
String selectValueSetSearchOption = (String) request.getParameter("selectValueSetSearchOption");
if (DataUtils.isNull(selectValueSetSearchOption)) {
selectValueSetSearchOption = "Name";
}
request.getSession().setAttribute("selectValueSetSearchOption", selectValueSetSearchOption);
String algorithm = (String) request.getParameter("valueset_search_algorithm");
if (DataUtils.isNull(algorithm)) {
algorithm = "exactMatch";
}
request.getSession().setAttribute("valueset_search_algorithm", algorithm);
String checked_vocabularies = (String) request.getParameter("checked_vocabularies");
System.out.println("checked_vocabularies: " + checked_vocabularies);
if (checked_vocabularies != null && checked_vocabularies.compareTo("") == 0) {
msg = "No value set definition is selected.";
System.out.println(msg);
request.getSession().setAttribute("message", msg);
return "message";
}
Vector selected_vocabularies = new Vector();
selected_vocabularies = DataUtils.parseData(checked_vocabularies, ",");
System.out.println("selected_vocabularies count: " + selected_vocabularies.size());
String VSD_view = (String) request.getParameter("view");
request.getSession().setAttribute("view", VSD_view);
String matchText = (String) request.getParameter("matchText");
Vector v = new Vector();
LexEVSValueSetDefinitionServices vsd_service = null;
vsd_service = RemoteServerUtil.getLexEVSValueSetDefinitionServices();
if (matchText != null) matchText = matchText.trim();
if (selectValueSetSearchOption.compareTo("Code") == 0) {
String uri = null;
try {
String versionTag = null;//"PRODUCTION";
if (checked_vocabularies != null) {
for (int k=0; k<selected_vocabularies.size(); k++) {
String vsd_name = (String) selected_vocabularies.elementAt(k);
String vsd_uri = DataUtils.getValueSetDefinitionURIByName(vsd_name);
System.out.println("vsd_name: " + vsd_name + " (vsd_uri: " + vsd_uri + ")");
try {
//ValueSetDefinition vsd = vsd_service.getValueSetDefinition(new URI(vsd_uri), null);
if (vsd_uri != null) {
ValueSetDefinition vsd = vsd_service.getValueSetDefinition(new URI(vsd_uri), null);
AbsoluteCodingSchemeVersionReference acsvr = vsd_service.isEntityInValueSet(matchText,
new URI(vsd_uri),
null,
versionTag);
if (acsvr != null) {
String metadata = DataUtils.getValueSetDefinitionMetadata(vsd);
if (metadata != null) {
v.add(metadata);
}
}
} else {
System.out.println("WARNING: Unable to find vsd_uri for " + vsd_name);
}
} catch (Exception ex) {
System.out.println("WARNING: vsd_service.getValueSetDefinition threw exception: " + vsd_name);
}
}
} else {
AbsoluteCodingSchemeVersionReferenceList csVersionList = null;//ValueSetHierarchy.getAbsoluteCodingSchemeVersionReferenceList();
List list = vsd_service.listValueSetsWithEntityCode(matchText, null, csVersionList, versionTag);
if (list != null) {
for (int j=0; j<list.size(); j++) {
uri = (String) list.get(j);
String vsd_name = DataUtils.valueSetDefiniionURI2Name(uri);
if (selected_vocabularies.contains(vsd_name)) {
try {
ValueSetDefinition vsd = vsd_service.getValueSetDefinition(new URI(uri), null);
if (vsd == null) {
msg = "Unable to find any value set with URI " + uri + ".";
request.getSession().setAttribute("message", msg);
return "message";
}
String metadata = DataUtils.getValueSetDefinitionMetadata(vsd);
if (metadata != null) {
v.add(metadata);
}
} catch (Exception ex) {
ex.printStackTrace();
msg = "Unable to find any value set with URI " + uri + ".";
request.getSession().setAttribute("message", msg);
return "message";
}
}
}
}
}
request.getSession().setAttribute("matched_vsds", v);
if (v.size() == 0) {
msg = "No match found.";
request.getSession().setAttribute("message", msg);
return "message";
} else if (v.size() == 1) {
request.getSession().setAttribute("vsd_uri", uri);
}
return "value_set";
} catch (Exception ex) {
ex.printStackTrace();
System.out.println("vsd_service.listValueSetsWithEntityCode throws exceptions???");
}
msg = "Unexpected errors encountered; search by code failed.";
request.getSession().setAttribute("message", msg);
return "message";
} else if (selectValueSetSearchOption.compareTo("Name") == 0) {
String uri = null;
try {
Vector uri_vec = DataUtils.getValueSetURIs();
for (int i=0; i<uri_vec.size(); i++) {
uri = (String) uri_vec.elementAt(i);
String vsd_name = DataUtils.valueSetDefiniionURI2Name(uri);
if (checked_vocabularies == null || selected_vocabularies.contains(vsd_name)) {
//System.out.println("Searching " + vsd_name + "...");
AbsoluteCodingSchemeVersionReferenceList csVersionList = null;
/*
Vector cs_ref_vec = DataUtils.getCodingSchemeReferencesInValueSetDefinition(uri, "PRODUCTION");
if (cs_ref_vec != null) {
csVersionList = DataUtils.vector2CodingSchemeVersionReferenceList(cs_ref_vec);
}
*/
ResolvedValueSetCodedNodeSet rvs_cns = null;
SortOptionList sortOptions = null;
LocalNameList propertyNames = null;
CodedNodeSet.PropertyType[] propertyTypes = null;
try {
System.out.println("URI: " + uri);
rvs_cns = vsd_service.getValueSetDefinitionEntitiesForTerm(matchText, algorithm, new URI(uri), csVersionList, null);
if (rvs_cns != null) {
CodedNodeSet cns = rvs_cns.getCodedNodeSet();
ResolvedConceptReferencesIterator itr = cns.resolve(sortOptions, propertyNames, propertyTypes);
if (itr != null && itr.numberRemaining() > 0) {
AbsoluteCodingSchemeVersionReferenceList ref_list = rvs_cns.getCodingSchemeVersionRefList();
if (ref_list.getAbsoluteCodingSchemeVersionReferenceCount() > 0) {
try {
ValueSetDefinition vsd = vsd_service.getValueSetDefinition(new URI(uri), null);
if (vsd == null) {
msg = "Unable to find any value set with name " + matchText + ".";
request.getSession().setAttribute("message", msg);
return "message";
}
String metadata = DataUtils.getValueSetDefinitionMetadata(vsd);
if (metadata != null) {
v.add(metadata);
}
} catch (Exception ex) {
ex.printStackTrace();
msg = "Unable to find any value set with name " + matchText + ".";
request.getSession().setAttribute("message", msg);
return "message";
}
}
}
}
} catch (Exception ex) {
//System.out.println("WARNING: getValueSetDefinitionEntitiesForTerm throws exception???");
msg = "getValueSetDefinitionEntitiesForTerm throws exception -- search by \"" + matchText + "\" failed. (VSD URI: " + uri + ")";
System.out.println(msg);
request.getSession().setAttribute("message", msg);
ex.printStackTrace();
return "message";
}
}
}
request.getSession().setAttribute("matched_vsds", v);
if (v.size() == 0) {
msg = "No match found.";
request.getSession().setAttribute("message", msg);
return "message";
} else if (v.size() == 1) {
request.getSession().setAttribute("vsd_uri", uri);
}
return "value_set";
} catch (Exception ex) {
//ex.printStackTrace();
System.out.println("vsd_service.getValueSetDefinitionEntitiesForTerm throws exceptions???");
}
msg = "Unexpected errors encountered; search by name failed.";
request.getSession().setAttribute("message", msg);
return "message";
}
return "value_set";
}
public static void create_vs_tree(HttpServletRequest request, HttpServletResponse response, int view, String dictionary, String version) {
response.setContentType("text/html");
PrintWriter out = null;
try {
out = response.getWriter();
} catch (Exception ex) {
ex.printStackTrace();
return;
}
String message = (String) request.getSession().getAttribute("message");
out.println("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 Transitional//EN\">");
out.println("<html xmlns:c=\"http://java.sun.com/jsp/jstl/core\">");
out.println("<head>");
out.println(" <title>" + dictionary + " value set</title>");
//out.println(" <title>NCI Thesaurus</title>");
out.println(" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=iso-8859-1\">");
out.println("");
out.println("<style type=\"text/css\">");
out.println("/*margin and padding on body element");
out.println(" can introduce errors in determining");
out.println(" element position and are not recommended;");
out.println(" we turn them off as a foundation for YUI");
out.println(" CSS treatments. */");
out.println("body {");
out.println(" margin:0;");
out.println(" padding:0;");
out.println("}");
out.println("</style>");
out.println("");
out.println("<link rel=\"stylesheet\" type=\"text/css\" href=\"http://yui.yahooapis.com/2.9.0/build/fonts/fonts-min.css\" />");
out.println("<link rel=\"stylesheet\" type=\"text/css\" href=\"http://yui.yahooapis.com/2.9.0/build/treeview/assets/skins/sam/treeview.css\" />");
out.println("");
out.println("<script type=\"text/javascript\" src=\"http://yui.yahooapis.com/2.9.0/build/yahoo-dom-event/yahoo-dom-event.js\"></script>");
out.println("<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/treeview-min.js\" ></script>");
out.println("");
out.println("");
out.println("<!-- Dependency -->");
out.println("<script src=\"http://yui.yahooapis.com/2.9.0/build/yahoo/yahoo-min.js\"></script>");
out.println("");
out.println("<!-- Source file -->");
out.println("<!
out.println(" If you require only basic HTTP transaction support, use the");
out.println(" connection_core.js file.");
out.println("
out.println("<script src=\"http://yui.yahooapis.com/2.9.0/build/connection/connection_core-min.js\"></script>");
out.println("");
out.println("<!
out.println(" Use the full connection.js if you require the following features:");
out.println(" - Form serialization.");
out.println(" - File Upload using the iframe transport.");
out.println(" - Cross-domain(XDR) transactions.");
out.println("
out.println("<script src=\"http://yui.yahooapis.com/2.9.0/build/connection/connection-min.js\"></script>");
out.println("");
out.println("");
out.println("");
out.println("<!--begin custom header content for this example
out.println("<!--Additional custom style rules for this example:
out.println("<style type=\"text/css\">");
out.println("");
out.println("");
out.println(".ygtvcheck0 { background: url(/ncitbrowser/images/yui/treeview/check0.gif) 0 0 no-repeat; width:16px; height:20px; float:left; cursor:pointer; }");
out.println(".ygtvcheck1 { background: url(/ncitbrowser/images/yui/treeview/check1.gif) 0 0 no-repeat; width:16px; height:20px; float:left; cursor:pointer; }");
out.println(".ygtvcheck2 { background: url(/ncitbrowser/images/yui/treeview/check2.gif) 0 0 no-repeat; width:16px; height:20px; float:left; cursor:pointer; }");
out.println("");
out.println("");
out.println(".ygtv-edit-TaskNode { width: 190px;}");
out.println(".ygtv-edit-TaskNode .ygtvcancel, .ygtv-edit-TextNode .ygtvok { border:none;}");
out.println(".ygtv-edit-TaskNode .ygtv-button-container { float: right;}");
out.println(".ygtv-edit-TaskNode .ygtv-input input{ width: 140px;}");
out.println(".whitebg {");
out.println(" background-color:white;");
out.println("}");
out.println("</style>");
out.println("");
out.println(" <link rel=\"stylesheet\" type=\"text/css\" href=\"/ncitbrowser/css/styleSheet.css\" />");
out.println(" <link rel=\"shortcut icon\" href=\"/ncitbrowser/favicon.ico\" type=\"image/x-icon\" />");
out.println("");
out.println(" <script type=\"text/javascript\" src=\"/ncitbrowser/js/script.js\"></script>");
out.println(" <script type=\"text/javascript\" src=\"/ncitbrowser/js/tasknode.js\"></script>");
println(out, " <script type=\"text/javascript\" src=\"/ncitbrowser/js/search.js\"></script>");
println(out, " <script type=\"text/javascript\" src=\"/ncitbrowser/js/dropdown.js\"></script>");
out.println("");
out.println(" <script type=\"text/javascript\">");
out.println("");
out.println(" function refresh() {");
out.println("");
out.println(" var selectValueSetSearchOptionObj = document.forms[\"valueSetSearchForm\"].selectValueSetSearchOption;");
out.println("");
out.println(" for (var i=0; i<selectValueSetSearchOptionObj.length; i++) {");
out.println(" if (selectValueSetSearchOptionObj[i].checked) {");
out.println(" selectValueSetSearchOption = selectValueSetSearchOptionObj[i].value;");
out.println(" }");
out.println(" }");
out.println("");
out.println("");
out.println(" window.location.href=\"/ncitbrowser/pages/value_set_source_view.jsf?refresh=1\"");
out.println(" + \"&nav_type=valuesets\" + \"&opt=\"+ selectValueSetSearchOption;");
out.println("");
out.println(" }");
out.println(" </script>");
out.println("");
out.println(" <script language=\"JavaScript\">");
out.println("");
out.println(" var tree;");
out.println(" var nodeIndex;");
out.println(" var nodes = [];");
out.println("");
out.println(" function load(url,target) {");
out.println(" if (target != '')");
out.println(" target.window.location.href = url;");
out.println(" else");
out.println(" window.location.href = url;");
out.println(" }");
out.println("");
out.println(" function init() {");
out.println(" //initTree();");
out.println(" }");
out.println("");
out.println(" //handler for expanding all nodes");
out.println(" YAHOO.util.Event.on(\"expand_all\", \"click\", function(e) {");
out.println(" //expandEntireTree();");
out.println("");
out.println(" tree.expandAll();");
out.println(" //YAHOO.util.Event.preventDefault(e);");
out.println(" });");
out.println("");
out.println(" //handler for collapsing all nodes");
out.println(" YAHOO.util.Event.on(\"collapse_all\", \"click\", function(e) {");
out.println(" tree.collapseAll();");
out.println(" //YAHOO.util.Event.preventDefault(e);");
out.println(" });");
out.println("");
out.println(" //handler for checking all nodes");
out.println(" YAHOO.util.Event.on(\"check_all\", \"click\", function(e) {");
out.println(" check_all();");
out.println(" //YAHOO.util.Event.preventDefault(e);");
out.println(" });");
out.println("");
out.println(" //handler for unchecking all nodes");
out.println(" YAHOO.util.Event.on(\"uncheck_all\", \"click\", function(e) {");
out.println(" uncheck_all();");
out.println(" //YAHOO.util.Event.preventDefault(e);");
out.println(" });");
out.println("");
out.println("");
out.println("");
out.println(" YAHOO.util.Event.on(\"getchecked\", \"click\", function(e) {");
out.println(" //alert(\"Checked nodes: \" + YAHOO.lang.dump(getCheckedNodes()), \"info\", \"example\");");
out.println(" //YAHOO.util.Event.preventDefault(e);");
out.println("");
out.println(" });");
out.println("");
out.println("");
out.println(" function addTreeNode(rootNode, nodeInfo) {");
out.println(" var newNodeDetails = \"javascript:onClickTreeNode('\" + nodeInfo.ontology_node_id + \"');\";");
out.println("");
out.println(" if (nodeInfo.ontology_node_id.indexOf(\"TVS_\") >= 0) {");
out.println(" newNodeData = { label:nodeInfo.ontology_node_name, id:nodeInfo.ontology_node_id };");
out.println(" } else {");
out.println(" newNodeData = { label:nodeInfo.ontology_node_name, id:nodeInfo.ontology_node_id, href:newNodeDetails };");
out.println(" }");
out.println("");
out.println(" var newNode = new YAHOO.widget.TaskNode(newNodeData, rootNode, false);");
out.println(" if (nodeInfo.ontology_node_child_count > 0) {");
out.println(" newNode.setDynamicLoad(loadNodeData);");
out.println(" }");
out.println(" }");
out.println("");
out.println(" function buildTree(ontology_node_id, ontology_display_name) {");
out.println(" var handleBuildTreeSuccess = function(o) {");
out.println(" var respTxt = o.responseText;");
out.println(" var respObj = eval('(' + respTxt + ')');");
out.println(" if ( typeof(respObj) != \"undefined\") {");
out.println(" if ( typeof(respObj.root_nodes) != \"undefined\") {");
out.println(" var root = tree.getRoot();");
out.println(" if (respObj.root_nodes.length == 0) {");
out.println(" //showEmptyRoot();");
out.println(" }");
out.println(" else {");
out.println(" for (var i=0; i < respObj.root_nodes.length; i++) {");
out.println(" var nodeInfo = respObj.root_nodes[i];");
out.println(" var expand = false;");
out.println(" //addTreeNode(root, nodeInfo, expand);");
out.println("");
out.println(" addTreeNode(root, nodeInfo);");
out.println(" }");
out.println(" }");
out.println("");
out.println(" tree.draw();");
out.println(" }");
out.println(" }");
out.println(" }");
out.println("");
out.println(" var handleBuildTreeFailure = function(o) {");
out.println(" alert('responseFailure: ' + o.statusText);");
out.println(" }");
out.println("");
out.println(" var buildTreeCallback =");
out.println(" {");
out.println(" success:handleBuildTreeSuccess,");
out.println(" failure:handleBuildTreeFailure");
out.println(" };");
out.println("");
out.println(" if (ontology_display_name!='') {");
out.println(" var ontology_source = null;");
out.println(" var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
out.println(" var request = YAHOO.util.Connect.asyncRequest('GET','/ncitbrowser/ajax?action=build_src_vs_tree&ontology_node_id=' +ontology_node_id+'&ontology_display_name='+ontology_display_name+'&version='+ontology_version+'&ontology_source='+ontology_source,buildTreeCallback);");
out.println(" }");
out.println(" }");
out.println("");
out.println(" function resetTree(ontology_node_id, ontology_display_name) {");
out.println("");
out.println(" var handleResetTreeSuccess = function(o) {");
out.println(" var respTxt = o.responseText;");
out.println(" var respObj = eval('(' + respTxt + ')');");
out.println(" if ( typeof(respObj) != \"undefined\") {");
out.println(" if ( typeof(respObj.root_node) != \"undefined\") {");
out.println(" var root = tree.getRoot();");
out.println(" var nodeDetails = \"javascript:onClickTreeNode('\" + respObj.root_node.ontology_node_id + \"');\";");
out.println(" var rootNodeData = { label:respObj.root_node.ontology_node_name, id:respObj.root_node.ontology_node_id, href:nodeDetails };");
out.println(" var expand = false;");
out.println(" if (respObj.root_node.ontology_node_child_count > 0) {");
out.println(" expand = true;");
out.println(" }");
out.println(" var ontRoot = new YAHOO.widget.TaskNode(rootNodeData, root, expand);");
out.println("");
out.println(" if ( typeof(respObj.child_nodes) != \"undefined\") {");
out.println(" for (var i=0; i < respObj.child_nodes.length; i++) {");
out.println(" var nodeInfo = respObj.child_nodes[i];");
out.println(" addTreeNode(ontRoot, nodeInfo);");
out.println(" }");
out.println(" }");
out.println(" tree.draw();");
out.println(" }");
out.println(" }");
out.println(" }");
out.println("");
out.println(" var handleResetTreeFailure = function(o) {");
out.println(" alert('responseFailure: ' + o.statusText);");
out.println(" }");
out.println("");
out.println(" var resetTreeCallback =");
out.println(" {");
out.println(" success:handleResetTreeSuccess,");
out.println(" failure:handleResetTreeFailure");
out.println(" };");
out.println(" if (ontology_node_id!= '') {");
out.println(" var ontology_source = null;");
out.println(" var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
out.println(" var request = YAHOO.util.Connect.asyncRequest('GET','/ncitbrowser/ajax?action=reset_vs_tree&ontology_node_id=' +ontology_node_id+'&ontology_display_name='+ontology_display_name + '&version='+ ontology_version +'&ontology_source='+ontology_source,resetTreeCallback);");
out.println(" }");
out.println(" }");
out.println("");
out.println(" function onClickTreeNode(ontology_node_id) {");
out.println(" //alert(\"onClickTreeNode \" + ontology_node_id);");
out.println(" window.location = '/ncitbrowser/pages/value_set_treenode_redirect.jsf?ontology_node_id=' + ontology_node_id;");
out.println(" }");
out.println("");
out.println("");
out.println(" function onClickViewEntireOntology(ontology_display_name) {");
out.println(" var ontology_display_name = document.pg_form.ontology_display_name.value;");
out.println(" tree = new YAHOO.widget.TreeView(\"treecontainer\");");
out.println(" tree.draw();");
out.println(" }");
out.println("");
out.println(" function initTree() {");
out.println("");
out.println(" tree = new YAHOO.widget.TreeView(\"treecontainer\");");
//out.println(" pre_check();");
out.println(" tree.setNodesProperty('propagateHighlightUp',true);");
out.println(" tree.setNodesProperty('propagateHighlightDown',true);");
out.println(" tree.subscribe('keydown',tree._onKeyDownEvent);");
out.println("");
out.println("");
out.println("");
out.println("");
out.println(" tree.subscribe(\"expand\", function(node) {");
out.println("");
out.println(" YAHOO.util.UserAction.keydown(document.body, { keyCode: 39 });");
out.println("");
out.println(" });");
out.println("");
out.println("");
out.println("");
out.println(" tree.subscribe(\"collapse\", function(node) {");
out.println(" //alert(\"Collapsing \" + node.label );");
out.println("");
out.println(" YAHOO.util.UserAction.keydown(document.body, { keyCode: 109 });");
out.println(" });");
out.println("");
out.println(" // By default, trees with TextNodes will fire an event for when the label is clicked:");
out.println(" tree.subscribe(\"checkClick\", function(node) {");
out.println(" //alert(node.data.myNodeId + \" label was checked\");");
out.println(" });");
out.println("");
out.println("");
println(out, " var root = tree.getRoot();");
HashMap value_set_tree_hmap = DataUtils.getCodingSchemeValueSetTree();
TreeItem root = (TreeItem) value_set_tree_hmap.get("<Root>");
new ValueSetUtils().printTree(out, root, Constants.TERMINOLOGY_VIEW, dictionary);
//new ValueSetUtils().printTree(out, root, Constants.TERMINOLOGY_VIEW);
String contextPath = request.getContextPath();
String view_str = new Integer(view).toString();
//String option = (String) request.getSession().getAttribute("selectValueSetSearchOption");
//String algorithm = (String) request.getSession().getAttribute("valueset_search_algorithm");
String option = (String) request.getParameter("selectValueSetSearchOption");
String algorithm = (String) request.getParameter("valueset_search_algorithm");
String option_code = "";
String option_name = "";
if (DataUtils.isNull(option)) {
option_code = "checked";
} else {
if (option.compareToIgnoreCase("Code") == 0) {
option_code = "checked";
}
if (option.compareToIgnoreCase("Name") == 0) {
option_name = "checked";
}
}
String algorithm_exactMatch = "";
String algorithm_startsWith = "";
String algorithm_contains = "";
if (DataUtils.isNull(algorithm)) {
algorithm_exactMatch = "checked";
} else {
if (algorithm.compareToIgnoreCase("exactMatch") == 0) {
algorithm_exactMatch = "checked";
}
if (algorithm.compareToIgnoreCase("startsWith") == 0) {
algorithm_startsWith = "checked";
}
if (algorithm.compareToIgnoreCase("contains") == 0) {
algorithm_contains = "checked";
}
}
out.println("");
if (message == null) {
out.println(" tree.collapseAll();");
}
out.println(" tree.draw();");
out.println(" }");
out.println("");
out.println("");
out.println(" function onCheckClick(node) {");
out.println(" YAHOO.log(node.label + \" check was clicked, new state: \" + node.checkState, \"info\", \"example\");");
out.println(" }");
out.println("");
out.println(" function check_all() {");
out.println(" var topNodes = tree.getRoot().children;");
out.println(" for(var i=0; i<topNodes.length; ++i) {");
out.println(" topNodes[i].check();");
out.println(" }");
out.println(" }");
out.println("");
out.println(" function uncheck_all() {");
out.println(" var topNodes = tree.getRoot().children;");
out.println(" for(var i=0; i<topNodes.length; ++i) {");
out.println(" topNodes[i].uncheck();");
out.println(" }");
out.println(" }");
out.println("");
out.println("");
out.println("");
out.println(" function expand_all() {");
out.println(" //alert(\"expand_all\");");
out.println(" var ontology_display_name = document.forms[\"pg_form\"].ontology_display_name.value;");
out.println(" onClickViewEntireOntology(ontology_display_name);");
out.println(" }");
out.println("");
out.println(" function pre_check() {");
out.println(" var ontology_display_name = document.forms[\"pg_form\"].ontology_display_name.value;");
//out.println(" alert(ontology_display_name);");
out.println(" var topNodes = tree.getRoot().children;");
out.println(" for(var i=0; i<topNodes.length; ++i) {");
out.println(" if (topNodes[i].label == ontology_display_name) {");
out.println(" topNodes[i].check();");
out.println(" }");
out.println(" }");
out.println(" }");
out.println("");
// 0=unchecked, 1=some children checked, 2=all children checked
out.println(" // Gets the labels of all of the fully checked nodes");
out.println(" // Could be updated to only return checked leaf nodes by evaluating");
out.println(" // the children collection first.");
out.println(" function getCheckedNodes(nodes) {");
out.println(" nodes = nodes || tree.getRoot().children;");
out.println(" checkedNodes = [];");
out.println(" for(var i=0, l=nodes.length; i<l; i=i+1) {");
out.println(" var n = nodes[i];");
out.println(" if (n.checkState > 0) { // if we were interested in the nodes that have some but not all children checked");
out.println(" //if (n.checkState == 2) {");
out.println(" checkedNodes.push(n.label); // just using label for simplicity");
out.println("");
out.println(" if (n.hasChildren()) {");
out.println(" checkedNodes = checkedNodes.concat(getCheckedNodes(n.children));");
out.println(" }");
out.println("");
out.println(" }");
out.println(" }");
out.println("");
out.println(" var checked_vocabularies = document.forms[\"valueSetSearchForm\"].checked_vocabularies;");
out.println(" checked_vocabularies.value = checkedNodes;");
out.println("");
out.println(" return checkedNodes;");
out.println(" }");
out.println("");
out.println("");
out.println("");
out.println("");
out.println(" function loadNodeData(node, fnLoadComplete) {");
out.println(" var id = node.data.id;");
out.println("");
out.println(" var responseSuccess = function(o)");
out.println(" {");
out.println(" var path;");
out.println(" var dirs;");
out.println(" var files;");
out.println(" var respTxt = o.responseText;");
out.println(" var respObj = eval('(' + respTxt + ')');");
out.println(" var fileNum = 0;");
out.println(" var categoryNum = 0;");
out.println(" if ( typeof(respObj.nodes) != \"undefined\") {");
out.println(" for (var i=0; i < respObj.nodes.length; i++) {");
out.println(" var name = respObj.nodes[i].ontology_node_name;");
out.println(" var nodeDetails = \"javascript:onClickTreeNode('\" + respObj.nodes[i].ontology_node_id + \"');\";");
out.println(" var newNodeData = { label:name, id:respObj.nodes[i].ontology_node_id, href:nodeDetails };");
out.println(" var newNode = new YAHOO.widget.TaskNode(newNodeData, node, false);");
out.println(" if (respObj.nodes[i].ontology_node_child_count > 0) {");
out.println(" newNode.setDynamicLoad(loadNodeData);");
out.println(" }");
out.println(" }");
out.println(" }");
out.println(" tree.draw();");
out.println(" fnLoadComplete();");
out.println(" }");
out.println("");
out.println(" var responseFailure = function(o){");
out.println(" alert('responseFailure: ' + o.statusText);");
out.println(" }");
out.println("");
out.println(" var callback =");
out.println(" {");
out.println(" success:responseSuccess,");
out.println(" failure:responseFailure");
out.println(" };");
out.println("");
out.println(" var ontology_display_name = document.forms[\"pg_form\"].ontology_display_name.value;");
out.println(" var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
out.println(" var cObj = YAHOO.util.Connect.asyncRequest('GET','/ncitbrowser/ajax?action=expand_src_vs_tree&ontology_node_id=' +id+'&ontology_display_name='+ontology_display_name+'&version='+ontology_version,callback);");
out.println(" }");
out.println("");
out.println("");
out.println(" function searchTree(ontology_node_id, ontology_display_name) {");
out.println("");
out.println(" var handleBuildTreeSuccess = function(o) {");
out.println("");
out.println(" var respTxt = o.responseText;");
out.println(" var respObj = eval('(' + respTxt + ')');");
out.println(" if ( typeof(respObj) != \"undefined\") {");
out.println("");
out.println(" if ( typeof(respObj.dummy_root_nodes) != \"undefined\") {");
out.println(" showNodeNotFound(ontology_node_id);");
out.println(" }");
out.println("");
out.println(" else if ( typeof(respObj.root_nodes) != \"undefined\") {");
out.println(" var root = tree.getRoot();");
out.println(" if (respObj.root_nodes.length == 0) {");
out.println(" //showEmptyRoot();");
out.println(" }");
out.println(" else {");
out.println(" showPartialHierarchy();");
out.println(" showConstructingTreeStatus();");
out.println("");
out.println(" for (var i=0; i < respObj.root_nodes.length; i++) {");
out.println(" var nodeInfo = respObj.root_nodes[i];");
out.println(" //var expand = false;");
out.println(" addTreeBranch(ontology_node_id, root, nodeInfo);");
out.println(" }");
out.println(" }");
out.println(" }");
out.println(" }");
out.println(" }");
out.println("");
out.println(" var handleBuildTreeFailure = function(o) {");
out.println(" alert('responseFailure: ' + o.statusText);");
out.println(" }");
out.println("");
out.println(" var buildTreeCallback =");
out.println(" {");
out.println(" success:handleBuildTreeSuccess,");
out.println(" failure:handleBuildTreeFailure");
out.println(" };");
out.println("");
out.println(" if (ontology_display_name!='') {");
out.println(" var ontology_source = null;//document.pg_form.ontology_source.value;");
out.println(" var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
out.println(" var request = YAHOO.util.Connect.asyncRequest('GET','/ncitbrowser/ajax?action=search_vs_tree&ontology_node_id=' +ontology_node_id+'&ontology_display_name='+ontology_display_name+'&version='+ontology_version+'&ontology_source='+ontology_source,buildTreeCallback);");
out.println("");
out.println(" }");
out.println(" }");
out.println("");
out.println("");
out.println("");
out.println(" function expandEntireTree() {");
out.println(" tree = new YAHOO.widget.TreeView(\"treecontainer\");");
out.println(" //tree.draw();");
out.println("");
out.println(" var ontology_display_name = document.forms[\"pg_form\"].ontology_display_name.value;");
out.println(" var ontology_node_id = document.forms[\"pg_form\"].ontology_node_id.value;");
out.println("");
out.println(" var handleBuildTreeSuccess = function(o) {");
out.println("");
out.println(" var respTxt = o.responseText;");
out.println(" var respObj = eval('(' + respTxt + ')');");
out.println(" if ( typeof(respObj) != \"undefined\") {");
out.println("");
out.println(" if ( typeof(respObj.root_nodes) != \"undefined\") {");
out.println("");
out.println(" //alert(respObj.root_nodes.length);");
out.println("");
out.println(" var root = tree.getRoot();");
out.println(" if (respObj.root_nodes.length == 0) {");
out.println(" //showEmptyRoot();");
out.println(" } else {");
out.println("");
out.println("");
out.println("");
out.println("");
out.println(" for (var i=0; i < respObj.root_nodes.length; i++) {");
out.println(" var nodeInfo = respObj.root_nodes[i];");
out.println(" //alert(\"calling addTreeBranch \");");
out.println("");
out.println(" addTreeBranch(ontology_node_id, root, nodeInfo);");
out.println(" }");
out.println(" }");
out.println(" }");
out.println(" }");
out.println(" }");
out.println("");
out.println(" var handleBuildTreeFailure = function(o) {");
out.println(" alert('responseFailure: ' + o.statusText);");
out.println(" }");
out.println("");
out.println(" var buildTreeCallback =");
out.println(" {");
out.println(" success:handleBuildTreeSuccess,");
out.println(" failure:handleBuildTreeFailure");
out.println(" };");
out.println("");
out.println(" if (ontology_display_name!='') {");
out.println(" var ontology_source = null;");
out.println(" var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
out.println(" var request = YAHOO.util.Connect.asyncRequest('GET','/ncitbrowser/ajax?action=expand_entire_vs_tree&ontology_node_id=' +ontology_node_id+'&ontology_display_name='+ontology_display_name+'&version='+ontology_version+'&ontology_source='+ontology_source,buildTreeCallback);");
out.println("");
out.println(" }");
out.println(" }");
out.println("");
out.println("");
out.println("");
out.println("");
out.println(" function addTreeBranch(ontology_node_id, rootNode, nodeInfo) {");
out.println(" var newNodeDetails = \"javascript:onClickTreeNode('\" + nodeInfo.ontology_node_id + \"');\";");
out.println("");
out.println(" var newNodeData;");
out.println(" if (ontology_node_id.indexOf(\"TVS_\") >= 0) {");
out.println(" newNodeData = { label:nodeInfo.ontology_node_name, id:nodeInfo.ontology_node_id };");
out.println(" } else {");
out.println(" newNodeData = { label:nodeInfo.ontology_node_name, id:nodeInfo.ontology_node_id, href:newNodeDetails };");
out.println(" }");
out.println("");
out.println(" var expand = false;");
out.println(" var childNodes = nodeInfo.children_nodes;");
out.println("");
out.println(" if (childNodes.length > 0) {");
out.println(" expand = true;");
out.println(" }");
out.println(" var newNode = new YAHOO.widget.TaskNode(newNodeData, rootNode, expand);");
out.println(" if (nodeInfo.ontology_node_id == ontology_node_id) {");
out.println(" newNode.labelStyle = \"ygtvlabel_highlight\";");
out.println(" }");
out.println("");
out.println(" if (nodeInfo.ontology_node_id == ontology_node_id) {");
out.println(" newNode.isLeaf = true;");
out.println(" if (nodeInfo.ontology_node_child_count > 0) {");
out.println(" newNode.isLeaf = false;");
out.println(" newNode.setDynamicLoad(loadNodeData);");
out.println(" } else {");
out.println(" tree.draw();");
out.println(" }");
out.println("");
out.println(" } else {");
out.println(" if (nodeInfo.ontology_node_id != ontology_node_id) {");
out.println(" if (nodeInfo.ontology_node_child_count == 0 && nodeInfo.ontology_node_id != ontology_node_id) {");
out.println(" newNode.isLeaf = true;");
out.println(" } else if (childNodes.length == 0) {");
out.println(" newNode.setDynamicLoad(loadNodeData);");
out.println(" }");
out.println(" }");
out.println(" }");
out.println("");
out.println(" tree.draw();");
out.println(" for (var i=0; i < childNodes.length; i++) {");
out.println(" var childnodeInfo = childNodes[i];");
out.println(" addTreeBranch(ontology_node_id, newNode, childnodeInfo);");
out.println(" }");
out.println(" }");
out.println(" YAHOO.util.Event.addListener(window, \"load\", init);");
out.println("");
out.println(" YAHOO.util.Event.onDOMReady(initTree);");
out.println("");
out.println("");
out.println(" </script>");
out.println("");
out.println("</head>");
out.println("");
out.println("");
out.println("");
out.println("");
out.println("");
//out.println("<body>");
out.println("<body onLoad=\"document.forms.valueSetSearchForm.matchText.focus();\">");
out.println(" <script type=\"text/javascript\" src=\"/ncitbrowser/js/wz_tooltip.js\"></script>");
out.println(" <script type=\"text/javascript\" src=\"/ncitbrowser/js/tip_centerwindow.js\"></script>");
out.println(" <script type=\"text/javascript\" src=\"/ncitbrowser/js/tip_followscroll.js\"></script>");
out.println("");
out.println("");
out.println("");
out.println("");
out.println("");
out.println(" <!-- Begin Skip Top Navigation -->");
out.println(" <a href=\"#evs-content\" class=\"hideLink\" accesskey=\"1\" title=\"Skip repetitive navigation links\">skip navigation links</A>");
out.println(" <!-- End Skip Top Navigation -->");
out.println("");
out.println("<!-- nci banner -->");
out.println("<div class=\"ncibanner\">");
out.println(" <a href=\"http:
out.println(" <img src=\"/ncitbrowser/images/logotype.gif\"");
out.println(" width=\"440\" height=\"39\" border=\"0\"");
out.println(" alt=\"National Cancer Institute\"/>");
out.println(" </a>");
out.println(" <a href=\"http:
out.println(" <img src=\"/ncitbrowser/images/spacer.gif\"");
out.println(" width=\"48\" height=\"39\" border=\"0\"");
out.println(" alt=\"National Cancer Institute\" class=\"print-header\"/>");
out.println(" </a>");
out.println(" <a href=\"http:
out.println(" <img src=\"/ncitbrowser/images/tagline_nologo.gif\"");
out.println(" width=\"173\" height=\"39\" border=\"0\"");
out.println(" alt=\"U.S. National Institutes of Health\"/>");
out.println(" </a>");
out.println(" <a href=\"http:
out.println(" <img src=\"/ncitbrowser/images/cancer-gov.gif\"");
out.println(" width=\"99\" height=\"39\" border=\"0\"");
out.println(" alt=\"www.cancer.gov\"/>");
out.println(" </a>");
out.println("</div>");
out.println("<!-- end nci banner -->");
out.println("");
out.println(" <div class=\"center-page\">");
out.println(" <!-- EVS Logo -->");
out.println("<div>");
// to be modified
out.println(" <img src=\"/ncitbrowser/images/evs-logo-swapped.gif\" alt=\"EVS Logo\"");
out.println(" width=\"745\" height=\"26\" border=\"0\"");
out.println(" usemap=\"#external-evs\" />");
out.println(" <map id=\"external-evs\" name=\"external-evs\">");
out.println(" <area shape=\"rect\" coords=\"0,0,140,26\"");
out.println(" href=\"/ncitbrowser/start.jsf\" target=\"_self\"");
out.println(" alt=\"NCI Term Browser\" />");
out.println(" <area shape=\"rect\" coords=\"520,0,745,26\"");
out.println(" href=\"http://evs.nci.nih.gov/\" target=\"_blank\"");
out.println(" alt=\"Enterprise Vocabulary Services\" />");
out.println(" </map>");
out.println("</div>");
out.println("");
out.println("");
out.println("<table cellspacing=\"0\" cellpadding=\"0\" border=\"0\">");
out.println(" <tr>");
out.println(" <td width=\"5\"></td>");
//to be modified
out.println(" <td><a href=\"/ncitbrowser/pages/multiple_search.jsf?nav_type=terminologies\">");
out.println(" <img name=\"tab_terms\" src=\"/ncitbrowser/images/tab_terms_clicked.gif\"");
out.println(" border=\"0\" alt=\"Terminologies\" title=\"Terminologies\" /></a></td>");
out.println(" <td><a href=\"/ncitbrowser/ajax?action=create_src_vs_tree\">");
out.println(" <img name=\"tab_valuesets\" src=\"/ncitbrowser/images/tab_valuesets.gif\"");
out.println(" border=\"0\" alt=\"Value Sets\" title=\"ValueSets\" /></a></td>");
out.println(" <td><a href=\"/ncitbrowser/pages/mapping_search.jsf?nav_type=mappings\">");
out.println(" <img name=\"tab_map\" src=\"/ncitbrowser/images/tab_map.gif\"");
out.println(" border=\"0\" alt=\"Mappings\" title=\"Mappings\" /></a></td>");
out.println(" </tr>");
out.println("</table>");
out.println("");
out.println("<div class=\"mainbox-top\"><img src=\"/ncitbrowser/images/mainbox-top.gif\" width=\"745\" height=\"5\" alt=\"\"/></div>");
out.println("<!-- end EVS Logo -->");
out.println(" <!-- Main box -->");
out.println(" <div id=\"main-area\">");
out.println("");
out.println(" <!-- Thesaurus, banner search area -->");
out.println(" <div class=\"bannerarea\">");
/*
out.println(" <a href=\"/ncitbrowser/start.jsf\" style=\"text-decoration: none;\">");
out.println(" <div class=\"vocabularynamebanner_tb\">");
out.println(" <span class=\"vocabularynamelong_tb\">" + JSPUtils.getApplicationVersionDisplay() + "</span>");
out.println(" </div>");
out.println(" </a>");
*/
JSPUtils.JSPHeaderInfoMore info = new JSPUtils.JSPHeaderInfoMore(request);
String scheme = info.dictionary;
String term_browser_version = info.term_browser_version;
String display_name = info.display_name;
String basePath = request.getContextPath();
String release_date = DataUtils.getVersionReleaseDate(scheme, version);
if (dictionary != null && dictionary.compareTo("NCI Thesaurus") == 0) {
out.println("<a href=\"/ncitbrowser/pages/home.jsf?version=" + version + "\" style=\"text-decoration: none;\">");
out.println(" <div class=\"vocabularynamebanner_ncit\">");
out.println(" <span class=\"vocabularynamelong_ncit\">Version: " + version + " (Release date: " + release_date + ")</span>");
out.println(" </div>");
out.println("</a>");
/*
out.write("\r\n");
out.write(" <div class=\"banner\"><a href=\"");
out.print(basePath);
out.write("\"><img src=\"");
out.print(basePath);
out.write("/images/thesaurus_browser_logo.jpg\" width=\"383\" height=\"117\" alt=\"Thesaurus Browser Logo\" border=\"0\"/></a></div>\r\n");
*/
} else {
out.write("\r\n");
out.write("\r\n");
out.write(" ");
if (version == null) {
out.write("\r\n");
out.write(" <a class=\"vocabularynamebanner\" href=\"");
out.print(request.getContextPath());
out.write("/pages/vocabulary.jsf?dictionary=");
out.print(HTTPUtils.cleanXSS(dictionary));
out.write("\">\r\n");
out.write(" ");
} else {
out.write("\r\n");
out.write(" <a class=\"vocabularynamebanner\" href=\"");
out.print(request.getContextPath());
out.write("/pages/vocabulary.jsf?dictionary=");
out.print(HTTPUtils.cleanXSS(dictionary));
out.write("&version=");
out.print(HTTPUtils.cleanXSS(version));
out.write("\">\r\n");
out.write(" ");
}
out.write("\r\n");
out.write(" <div class=\"vocabularynamebanner\">\r\n");
out.write(" <div class=\"vocabularynameshort\" STYLE=\"font-size: ");
out.print(HTTPUtils.maxFontSize(display_name));
out.write("px; font-family : Arial\">\r\n");
out.write(" ");
out.print(HTTPUtils.cleanXSS(display_name));
out.write("\r\n");
out.write(" </div>\r\n");
out.write(" \r\n");
boolean display_release_date = true;
if (release_date == null || release_date.compareTo("") == 0) {
display_release_date = false;
}
if (display_release_date) {
out.write("\r\n");
out.write(" <div class=\"vocabularynamelong\">Version: ");
out.print(HTTPUtils.cleanXSS(term_browser_version));
out.write(" (Release date: ");
out.print(release_date);
out.write(")</div>\r\n");
} else {
out.write("\r\n");
out.write(" <div class=\"vocabularynamelong\">Version: ");
out.print(HTTPUtils.cleanXSS(term_browser_version));
out.write("</div>\r\n");
}
out.write(" \r\n");
out.write(" \r\n");
out.write(" </div>\r\n");
out.write(" </a>\r\n");
}
out.println(" <div class=\"search-globalnav\">");
out.println(" <!-- Search box -->");
out.println(" <div class=\"searchbox-top\"><img src=\"/ncitbrowser/images/searchbox-top.gif\" width=\"352\" height=\"2\" alt=\"SearchBox Top\" /></div>");
out.println(" <div class=\"searchbox\">");
out.println("");
out.println("");
out.println("<form id=\"valueSetSearchForm\" name=\"valueSetSearchForm\" method=\"post\" action=\"" + contextPath + "/ajax?action=search_value_set\" class=\"search-form-main-area\" enctype=\"application/x-www-form-urlencoded\">");
out.println("<input type=\"hidden\" name=\"valueSetSearchForm\" value=\"valueSetSearchForm\" />");
out.println("<input type=\"hidden\" name=\"view\" value=\"" + view_str + "\" />");
String matchText = (String) request.getSession().getAttribute("matchText");
if (DataUtils.isNull(matchText)) {
matchText = "";
}
out.println("");
out.println("");
out.println("");
out.println(" <input type=\"hidden\" id=\"checked_vocabularies\" name=\"checked_vocabularies\" value=\"\" />");
out.println("");
out.println("");
out.println("");
out.println("<table border=\"0\" cellspacing=\"0\" cellpadding=\"0\" style=\"margin: 2px\" >");
out.println(" <tr valign=\"top\" align=\"left\">");
out.println(" <td align=\"left\" class=\"textbody\">");
out.println("");
out.println(" <input CLASS=\"searchbox-input-2\"");
out.println(" name=\"matchText\"");
out.println(" value=\"" + matchText + "\"");
out.println(" onFocus=\"active = true\"");
out.println(" onBlur=\"active = false\"");
out.println(" onkeypress=\"return submitEnter('valueSetSearchForm:valueset_search',event)\"");
out.println(" tabindex=\"1\"/>");
out.println("");
out.println("");
out.println(" <input id=\"valueSetSearchForm:valueset_search\" type=\"image\" src=\"/ncitbrowser/images/search.gif\" name=\"valueSetSearchForm:valueset_search\" alt=\"Search Value Sets\" onclick=\"javascript:getCheckedNodes();\" tabindex=\"2\" class=\"searchbox-btn\" /><a href=\"/ncitbrowser/pages/help.jsf#searchhelp\" tabindex=\"3\"><img src=\"/ncitbrowser/images/search-help.gif\" alt=\"Search Help\" style=\"border-width:0;\" class=\"searchbox-btn\" /></a>");
out.println("");
out.println("");
out.println(" </td>");
out.println(" </tr>");
out.println("");
out.println(" <tr valign=\"top\" align=\"left\">");
out.println(" <td>");
out.println(" <table border=\"0\" cellspacing=\"0\" cellpadding=\"0\" style=\"margin: 0px\">");
out.println("");
out.println(" <tr valign=\"top\" align=\"left\">");
out.println(" <td align=\"left\" class=\"textbody\">");
out.println(" <input type=\"radio\" name=\"valueset_search_algorithm\" value=\"exactMatch\" alt=\"Exact Match\" " + algorithm_exactMatch + " tabindex=\"3\">Exact Match ");
out.println(" <input type=\"radio\" name=\"valueset_search_algorithm\" value=\"startsWith\" alt=\"Begins With\" " + algorithm_startsWith + " tabindex=\"3\">Begins With ");
out.println(" <input type=\"radio\" name=\"valueset_search_algorithm\" value=\"contains\" alt=\"Contains\" " + algorithm_contains + " tabindex=\"3\">Contains");
out.println(" </td>");
out.println(" </tr>");
out.println("");
out.println(" <tr align=\"left\">");
out.println(" <td height=\"1px\" bgcolor=\"#2F2F5F\" align=\"left\"></td>");
out.println(" </tr>");
out.println(" <tr valign=\"top\" align=\"left\">");
out.println(" <td align=\"left\" class=\"textbody\">");
out.println(" <input type=\"radio\" id=\"selectValueSetSearchOption\" name=\"selectValueSetSearchOption\" value=\"Code\" " + option_code + " alt=\"Code\" tabindex=\"1\" >Code ");
out.println(" <input type=\"radio\" id=\"selectValueSetSearchOption\" name=\"selectValueSetSearchOption\" value=\"Name\" " + option_name + " alt=\"Name\" tabindex=\"1\" >Name");
out.println(" </td>");
out.println(" </tr>");
out.println(" </table>");
out.println(" </td>");
out.println(" </tr>");
out.println("</table>");
out.println(" <input type=\"hidden\" name=\"referer\" id=\"referer\" value=\"http%3A%2F%2Flocalhost%3A8080%2Fncitbrowser%2Fpages%2Fresolved_value_set_search_results.jsf\">");
out.println(" <input type=\"hidden\" id=\"nav_type\" name=\"nav_type\" value=\"valuesets\" />");
out.println(" <input type=\"hidden\" id=\"view\" name=\"view\" value=\"source\" />");
out.println(" <input type=\"hidden\" id=\"ontology_display_name\" name=\"ontology_display_name\" value=\"" + dictionary + "\" />");
out.println(" <input type=\"hidden\" id=\"schema\" name=\"schema\" value=\"" + dictionary + "\" />");
out.println(" <input type=\"hidden\" id=\"ontology_version\" name=\"ontology_version\" value=\"" + version + "\" />");
out.println("");
out.println("<input type=\"hidden\" name=\"javax.faces.ViewState\" id=\"javax.faces.ViewState\" value=\"j_id22:j_id23\" />");
out.println("</form>");
out.println(" </div> <!-- searchbox -->");
out.println("");
out.println(" <div class=\"searchbox-bottom\"><img src=\"/ncitbrowser/images/searchbox-bottom.gif\" width=\"352\" height=\"2\" alt=\"SearchBox Bottom\" /></div>");
out.println(" <!-- end Search box -->");
out.println(" <!-- Global Navigation -->");
out.println("");
/*
out.println("<table class=\"global-nav\" border=\"0\" width=\"100%\" height=\"37px\" cellpadding=\"0\" cellspacing=\"0\">");
out.println(" <tr>");
out.println(" <td align=\"left\" valign=\"bottom\">");
out.println(" <a href=\"#\" onclick=\"javascript:window.open('/ncitbrowser/pages/source_help_info-termbrowser.jsf',");
out.println(" '_blank','top=100, left=100, height=740, width=780, status=no, menubar=no, resizable=yes, scrollbars=yes, toolbar=no, location=no, directories=no');\" tabindex=\"13\">");
out.println(" Sources</a>");
out.println("");
out.println(" \r\n");
out.println(" ");
out.print( VisitedConceptUtils.getDisplayLink(request, true) );
out.println(" \r\n");
out.println(" </td>");
out.println(" <td align=\"right\" valign=\"bottom\">");
out.println(" <a href=\"");
out.print( request.getContextPath() );
out.println("/pages/help.jsf\" tabindex=\"16\">Help</a>\r\n");
out.println(" </td>\r\n");
out.println(" <td width=\"7\"></td>\r\n");
out.println(" </tr>\r\n");
out.println("</table>");
*/
boolean hasValueSet = ValueSetHierarchy.hasValueSet(scheme);
boolean hasMapping = DataUtils.hasMapping(scheme);
boolean tree_access_allowed = true;
if (DataUtils._vocabulariesWithoutTreeAccessHashSet.contains(scheme)) {
tree_access_allowed = false;
}
boolean vocabulary_isMapping = DataUtils.isMapping(scheme, null);
out.write(" <table class=\"global-nav\" border=\"0\" width=\"100%\" height=\"37px\" cellpadding=\"0\" cellspacing=\"0\">\r\n");
out.write(" <tr>\r\n");
out.write(" <td valign=\"bottom\">\r\n");
out.write(" ");
Boolean[] isPipeDisplayed = new Boolean[] { Boolean.FALSE };
out.write("\r\n");
out.write(" ");
if (vocabulary_isMapping) {
out.write("\r\n");
out.write(" ");
out.print( JSPUtils.getPipeSeparator(isPipeDisplayed) );
out.write("\r\n");
out.write(" <a href=\"");
out.print(request.getContextPath() );
out.write("/pages/mapping.jsf?dictionary=");
out.print(HTTPUtils.cleanXSS(scheme));
out.write("&version=");
out.print(version);
out.write("\">\r\n");
out.write(" Mapping\r\n");
out.write(" </a>\r\n");
out.write(" ");
} else if (tree_access_allowed) {
out.write("\r\n");
out.write(" ");
out.print( JSPUtils.getPipeSeparator(isPipeDisplayed) );
out.write("\r\n");
out.write(" <a href=\"#\" onclick=\"javascript:window.open('");
out.print(request.getContextPath());
out.write("/pages/hierarchy.jsf?dictionary=");
out.print(HTTPUtils.cleanXSS(scheme));
out.write("&version=");
out.print(HTTPUtils.cleanXSS(version));
out.write("', '_blank','top=100, left=100, height=740, width=680, status=no, menubar=no, resizable=yes, scrollbars=yes, toolbar=no, location=no, directories=no');\" tabindex=\"12\">\r\n");
out.write(" Hierarchy </a>\r\n");
out.write(" ");
}
out.write(" \r\n");
out.write(" \r\n");
out.write(" \r\n");
out.write(" ");
if (hasValueSet) {
out.write("\r\n");
out.write(" ");
out.print( JSPUtils.getPipeSeparator(isPipeDisplayed) );
out.write("\r\n");
out.write(" <!
out.write(" <a href=\"");
out.print( request.getContextPath() );
out.write("/pages/value_set_hierarchy.jsf?dictionary=");
out.print(HTTPUtils.cleanXSS(scheme));
out.write("&version=");
out.print(HTTPUtils.cleanXSS(version));
out.write("\" tabindex=\"15\">Value Sets</a>\r\n");
out.write("
out.write(" <a href=\"");
out.print( request.getContextPath() );
out.write("/ajax?action=create_cs_vs_tree&dictionary=");
out.print(HTTPUtils.cleanXSS(scheme));
out.write("&version=");
out.print(HTTPUtils.cleanXSS(version));
out.write("\" tabindex=\"15\">Value Sets</a>\r\n");
out.write("\r\n");
out.write("\r\n");
out.write(" ");
}
out.write("\r\n");
out.write(" \r\n");
out.write(" ");
if (hasMapping) {
out.write("\r\n");
out.write(" ");
out.print( JSPUtils.getPipeSeparator(isPipeDisplayed) );
out.write("\r\n");
out.write(" <a href=\"");
out.print( request.getContextPath() );
out.write("/pages/cs_mappings.jsf?dictionary=");
out.print(HTTPUtils.cleanXSS(scheme));
out.write("&version=");
out.print(HTTPUtils.cleanXSS(version));
out.write("\" tabindex=\"15\">Maps</a> \r\n");
out.write(" ");
}
out.write(" ");
out.print( VisitedConceptUtils.getDisplayLink(request, isPipeDisplayed) );
out.write("\r\n");
out.write(" </td>\r\n");
out.write(" <td align=\"right\" valign=\"bottom\">\r\n");
out.write(" <a href=\"");
out.print(request.getContextPath());
out.write("/pages/help.jsf\" tabindex=\"16\">Help</a>\r\n");
out.write(" </td>\r\n");
out.write(" <td width=\"7\" valign=\"bottom\"></td>\r\n");
out.write(" </tr>\r\n");
out.write(" </table>\r\n");
out.println(" <!-- end Global Navigation -->");
out.println("");
out.println(" </div> <!-- search-globalnav -->");
out.println(" </div> <!-- bannerarea -->");
out.println("");
out.println(" <!-- end Thesaurus, banner search area -->");
out.println(" <!-- Quick links bar -->");
out.println("");
out.println("<div class=\"bluebar\">");
out.println(" <table border=\"0\" cellspacing=\"0\" cellpadding=\"0\">");
out.println(" <tr>");
out.println(" <td><div class=\"quicklink-status\"> </div></td>");
out.println(" <td>");
out.println("");
addQuickLink(request, out);
out.println("");
out.println(" </td>");
out.println(" </tr>");
out.println(" </table>");
out.println("");
out.println("</div>");
if (! ServerMonitorThread.getInstance().isLexEVSRunning()) {
out.println(" <div class=\"redbar\">");
out.println(" <table border=\"0\" cellspacing=\"0\" cellpadding=\"0\">");
out.println(" <tr>");
out.println(" <td class=\"lexevs-status\">");
out.println(" " + ServerMonitorThread.getInstance().getMessage());
out.println(" </td>");
out.println(" </tr>");
out.println(" </table>");
out.println(" </div>");
}
out.println(" <!-- end Quick links bar -->");
out.println("");
out.println(" <!-- Page content -->");
out.println(" <div class=\"pagecontent\">");
out.println("");
if (message != null) {
out.println("\r\n");
out.println(" <p class=\"textbodyred\">");
out.print(message);
out.println("</p>\r\n");
out.println(" ");
request.getSession().removeAttribute("message");
}
// to be modified
/*
out.println("<p class=\"textbody\">");
out.println("View value sets organized by standards category or source terminology.");
out.println("Standards categories group the value sets supporting them; all other labels lead to the home pages of actual value sets or source terminologies.");
out.println("Search or browse a value set from its home page, or search all value sets at once from this page (very slow) to find which ones contain a particular code or term.");
out.println("</p>");
*/
out.println("");
out.println(" <div id=\"popupContentArea\">");
out.println(" <a name=\"evs-content\" id=\"evs-content\"></a>");
out.println("");
out.println(" <table width=\"580px\" cellpadding=\"3\" cellspacing=\"0\" border=\"0\">");
out.println("");
out.println("");
out.println("");
out.println("");
out.println(" <tr class=\"textbody\">");
out.println(" <td class=\"textbody\" align=\"left\">");
out.println("");
/*
if (view == Constants.STANDARD_VIEW) {
out.println(" Standards View");
out.println(" |");
out.println(" <a href=\"" + contextPath + "/ajax?action=create_cs_vs_tree\">Terminology View</a>");
} else {
out.println(" <a href=\"" + contextPath + "/ajax?action=create_src_vs_tree\">Standards View</a>");
out.println(" |");
out.println(" Terminology View");
}
*/
out.println(" </td>");
out.println("");
out.println(" <td align=\"right\">");
out.println(" <font size=\"1\" color=\"red\" align=\"right\">");
out.println(" <a href=\"javascript:printPage()\"><img src=\"/ncitbrowser/images/printer.bmp\" border=\"0\" alt=\"Send to Printer\"><i>Send to Printer</i></a>");
out.println(" </font>");
out.println(" </td>");
out.println(" </tr>");
out.println(" </table>");
out.println("");
out.println(" <hr/>");
out.println("");
out.println("");
out.println("");
out.println("<style>");
out.println("#expandcontractdiv {border:1px solid #336600; background-color:#FFFFCC; margin:0 0 .5em 0; padding:0.2em;}");
out.println("#treecontainer { background: #fff }");
out.println("</style>");
out.println("");
out.println("");
out.println("<div id=\"expandcontractdiv\">");
out.println(" <a id=\"expand_all\" href=\"#\">Expand all</a>");
out.println(" <a id=\"collapse_all\" href=\"#\">Collapse all</a>");
out.println(" <a id=\"check_all\" href=\"#\">Check all</a>");
out.println(" <a id=\"uncheck_all\" href=\"#\">Uncheck all</a>");
out.println("</div>");
out.println("");
out.println("");
out.println("");
out.println(" <!-- Tree content -->");
out.println("");
out.println(" <div id=\"treecontainer\" class=\"ygtv-checkbox\"></div>");
out.println("");
out.println(" <form id=\"pg_form\">");
out.println("");
out.println(" <input type=\"hidden\" id=\"ontology_node_id\" name=\"ontology_node_id\" value=\"null\" />");
out.println(" <input type=\"hidden\" id=\"ontology_display_name\" name=\"ontology_display_name\" value=\"" + dictionary + "\" />");
out.println(" <input type=\"hidden\" id=\"schema\" name=\"schema\" value=\"" + dictionary + "\" />");
out.println(" <input type=\"hidden\" id=\"ontology_version\" name=\"ontology_version\" value=\"" + version + "\" />");
out.println(" <input type=\"hidden\" id=\"view\" name=\"view\" value=\"source\" />");
out.println(" </form>");
out.println("");
out.println("");
out.println(" </div> <!-- popupContentArea -->");
out.println("");
out.println("");
out.println("<div class=\"textbody\">");
out.println("<!-- footer -->");
out.println("<div class=\"footer\" style=\"width:720px\">");
out.println(" <ul>");
out.println(" <li><a href=\"http:
out.println(" <li><a href=\"/ncitbrowser/pages/contact_us.jsf\">Contact Us</a> |</li>");
out.println(" <li><a href=\"http:
out.println(" <li><a href=\"http:
out.println(" <li><a href=\"http:
out.println(" </ul>");
out.println(" <p>");
out.println(" A Service of the National Cancer Institute<br />");
out.println(" <img src=\"/ncitbrowser/images/external-footer-logos.gif\"");
out.println(" alt=\"External Footer Logos\" width=\"238\" height=\"34\" border=\"0\"");
out.println(" usemap=\"#external-footer\" />");
out.println(" </p>");
out.println(" <map id=\"external-footer\" name=\"external-footer\">");
out.println(" <area shape=\"rect\" coords=\"0,0,46,34\"");
out.println(" href=\"http:
out.println(" alt=\"National Cancer Institute\" />");
out.println(" <area shape=\"rect\" coords=\"55,1,99,32\"");
out.println(" href=\"http:
out.println(" alt=\"U.S. Health & Human Services\" />");
out.println(" <area shape=\"rect\" coords=\"103,1,147,31\"");
out.println(" href=\"http:
out.println(" alt=\"National Institutes of Health\" />");
out.println(" <area shape=\"rect\" coords=\"148,1,235,33\"");
out.println(" href=\"http:
out.println(" alt=\"USA.gov\" />");
out.println(" </map>");
out.println("</div>");
out.println("<!-- end footer -->");
out.println("</div>");
out.println("");
out.println("");
out.println(" </div> <!-- pagecontent -->");
out.println(" </div> <!-- main-area -->");
out.println(" <div class=\"mainbox-bottom\"><img src=\"/ncitbrowser/images/mainbox-bottom.gif\" width=\"745\" height=\"5\" alt=\"Mainbox Bottom\" /></div>");
out.println("");
out.println(" </div> <!-- center-page -->");
out.println("");
out.println("</body>");
out.println("</html>");
out.println("");
}
public static void addQuickLink(HttpServletRequest request, PrintWriter out) {
String basePath = request.getContextPath();
String ncim_url = new DataUtils().getNCImURL();
String quicklink_dictionary = (String) request.getSession().getAttribute("dictionary");
quicklink_dictionary = DataUtils.getFormalName(quicklink_dictionary);
String term_suggestion_application_url2 = "";
String dictionary_encoded2 = "";
if (quicklink_dictionary != null) {
term_suggestion_application_url2 = DataUtils.getMetadataValue(quicklink_dictionary, "term_suggestion_application_url");
dictionary_encoded2 = DataUtils.replaceAll(quicklink_dictionary, " ", "%20");
}
out.write(" <div id=\"quicklinksholder\">\r\n");
out.write(" <ul id=\"quicklinks\"\r\n");
out.write(" onmouseover=\"document.quicklinksimg.src='");
out.print(basePath);
out.write("/images/quicklinks-active.gif';\"\r\n");
out.write(" onmouseout=\"document.quicklinksimg.src='");
out.print(basePath);
out.write("/images/quicklinks-inactive.gif';\">\r\n");
out.write(" <li>\r\n");
out.write(" <a href=\"#\" tabindex=\"-1\"><img src=\"");
out.print(basePath);
out.write("/images/quicklinks-inactive.gif\" width=\"162\"\r\n");
out.write(" height=\"18\" border=\"0\" name=\"quicklinksimg\" alt=\"Quick Links\" />\r\n");
out.write(" </a>\r\n");
out.write(" <ul>\r\n");
out.write(" <li><a href=\"http://evs.nci.nih.gov/\" tabindex=\"-1\" target=\"_blank\"\r\n");
out.write(" alt=\"Enterprise Vocabulary Services\">EVS Home</a></li>\r\n");
out.write(" <li><a href=\"");
out.print(ncim_url);
out.write("\" tabindex=\"-1\" target=\"_blank\"\r\n");
out.write(" alt=\"NCI Metathesaurus\">NCI Metathesaurus Browser</a></li>\r\n");
out.write("\r\n");
out.write(" ");
if (quicklink_dictionary == null || quicklink_dictionary.compareTo("NCI Thesaurus") != 0) {
out.write("\r\n");
out.write("\r\n");
out.write(" <li><a href=\"");
out.print( request.getContextPath() );
out.write("/index.jsp\" tabindex=\"-1\"\r\n");
out.write(" alt=\"NCI Thesaurus Browser\">NCI Thesaurus Browser</a></li>\r\n");
out.write("\r\n");
out.write(" ");
}
out.write("\r\n");
out.write("\r\n");
out.write(" <li>\r\n");
out.write(" <a href=\"");
out.print( request.getContextPath() );
out.write("/termbrowser.jsf\" tabindex=\"-1\" alt=\"NCI Term Browser\">NCI Term Browser</a>\r\n");
out.write(" </li>\r\n");
out.write(" \r\n");
out.write(" <li><a href=\"http:
out.write(" alt=\"NCI Terminology Resources\">NCI Terminology Resources</a></li>\r\n");
out.write(" ");
if (term_suggestion_application_url2 != null && term_suggestion_application_url2.length() > 0) {
out.write("\r\n");
out.write(" <li><a href=\"");
out.print(term_suggestion_application_url2);
out.write("?dictionary=");
out.print(dictionary_encoded2);
out.write("\" tabindex=\"-1\" target=\"_blank\" alt=\"Term Suggestion\">Term Suggestion</a></li>\r\n");
out.write(" ");
}
out.write("\r\n");
out.write("\r\n");
out.write(" </ul>\r\n");
out.write(" </li>\r\n");
out.write(" </ul>\r\n");
out.write(" </div>\r\n");
}
}
|
package com.twosheds.pi;
import android.os.Handler;
import android.os.Message;
import android.support.v7.app.ActionBarActivity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import java.util.Random;
public class MainActivity extends ActionBarActivity {
private static final double PRECISION = 0.00001;
private static final int EVENT_NEW_VALUE = 1;
private Random random;
private static int countInside;
private static int countTotal;
private static boolean isRunning;
private GraphView graphView;
private TextView piView;
private TextView stepView;
private Button startButton;
private Handler handler = new Handler() {
@Override
public void handleMessage(Message message) {
int event = message.what;
switch (event) {
case EVENT_NEW_VALUE:
updateViews((double) message.obj);
break;
}
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
graphView = (GraphView) findViewById(R.id.graph);
piView = (TextView) findViewById(R.id.pi);
stepView = (TextView) findViewById(R.id.steps);
startButton = (Button) findViewById(R.id.button_start);
countTotal = 0;
countInside = 0;
random = new Random();
updateViews(0.0);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
public void onStartCalculation(View view) {
if (isRunning) {
isRunning = false;
startButton.setText(R.string.action_start);
countTotal = 0;
countInside = 0;
graphView.clearPoints();
} else {
isRunning = true;
startButton.setText(R.string.action_stop);
Thread drawThread = new Thread() {
@Override
public void run() {
double oldPi = 10.0;
double pi = 20.0;
while (isRunning && Math.abs(oldPi - pi) > PRECISION) {
double x = random.nextDouble() * 2 - 1;
double y = random.nextDouble() * 2 - 1;
double distance = Math.sqrt(x * x + y * y);
boolean isInside = distance < 1;
if (isInside) {
countInside++;
}
countTotal++;
if (countTotal != countInside) {
oldPi = pi;
}
pi = (double) countInside * 4.0d / (double) countTotal;
Message msg = handler.obtainMessage(EVENT_NEW_VALUE);
msg.obj = pi;
msg.sendToTarget();
graphView.drawPoint(x, y, isInside);
try {
sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
};
drawThread.start();
}
}
private void updateViews(double pi) {
piView.setText(String.format("\u03C0 = %1.5f", pi));
stepView.setText(getString(R.string.steps, countTotal));
}
}
|
package gov.nih.nci.evs.browser.servlet;
import org.json.*;
import gov.nih.nci.evs.browser.utils.*;
import java.io.*;
import java.util.*;
import javax.servlet.*;
import javax.servlet.http.*;
import org.apache.log4j.*;
import gov.nih.nci.evs.browser.properties.*;
import static gov.nih.nci.evs.browser.common.Constants.*;
import org.LexGrid.LexBIG.DataModel.Core.CodingSchemeVersionOrTag;
import org.LexGrid.valueSets.ValueSetDefinition;
/**
* @author EVS Team
* @version 1.0
*
* Modification history
* Initial implementation kim.ong@ngc.com
*
*/
public final class AjaxServlet extends HttpServlet {
private static Logger _logger = Logger.getLogger(AjaxServlet.class);
/**
* local constants
*/
private static final long serialVersionUID = 1L;
/**
* Validates the Init and Context parameters, configures authentication URL
*
* @throws ServletException if the init parameters are invalid or any other
* problems occur during initialisation
*/
public void init() throws ServletException {
}
/**
* Route the user to the execute method
*
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
*
* @exception IOException if an input/output error occurs
* @exception ServletException if a servlet exception occurs
*/
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
execute(request, response);
}
/**
* Route the user to the execute method
*
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
*
* @exception IOException if an input/output error occurs
* @exception ServletException if a Servlet exception occurs
*/
public void doPost(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
execute(request, response);
}
private static void debugJSONString(String msg, String jsonString) {
boolean debug = false; //DYEE_DEBUG
if (! debug)
return;
_logger.debug(Utils.SEPARATOR);
if (msg != null && msg.length() > 0)
_logger.debug(msg);
_logger.debug("jsonString: " + jsonString);
_logger.debug("jsonString length: " + jsonString.length());
Utils.debugJSONString(jsonString);
}
public static void search_tree(HttpServletResponse response, String node_id,
String ontology_display_name, String ontology_version) {
try {
String jsonString = search_tree(node_id,
ontology_display_name, ontology_version);
if (jsonString == null)
return;
JSONObject json = new JSONObject();
JSONArray rootsArray = new JSONArray(jsonString);
json.put("root_nodes", rootsArray);
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
response.getWriter().write(json.toString());
response.getWriter().flush();
} catch (Exception e) {
e.printStackTrace();
}
}
public static String search_tree(String node_id,
String ontology_display_name, String ontology_version) throws Exception {
if (node_id == null || ontology_display_name == null)
return null;
Utils.StopWatch stopWatch = new Utils.StopWatch();
// String max_tree_level_str =
// NCItBrowserProperties.getProperty(
// NCItBrowserProperties.MAXIMUM_TREE_LEVEL);
// int maxLevel = Integer.parseInt(max_tree_level_str);
CodingSchemeVersionOrTag versionOrTag = new CodingSchemeVersionOrTag();
if (ontology_version != null) versionOrTag.setVersion(ontology_version);
String jsonString =
CacheController.getTree(
ontology_display_name, versionOrTag, node_id);
debugJSONString("Section: search_tree", jsonString);
_logger.debug("search_tree: " + stopWatch.getResult());
return jsonString;
}
/**
* Process the specified HTTP request, and create the corresponding HTTP
* response (or forward to another web component that will create it).
*
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
*
* @exception IOException if an input/output error occurs
* @exception ServletException if a servlet exception occurs
*/
public void execute(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
// Determine request by attributes
String action = request.getParameter("action");// DataConstants.ACTION);
String node_id = request.getParameter("ontology_node_id");// DataConstants.ONTOLOGY_NODE_ID);
String ontology_display_name =
request.getParameter("ontology_display_name");// DataConstants.ONTOLOGY_DISPLAY_NAME);
String ontology_version = request.getParameter("version");
if (ontology_version == null) {
ontology_version = DataUtils.getVocabularyVersionByTag(ontology_display_name, "PRODUCTION");
}
long ms = System.currentTimeMillis();
if (action.equals("expand_tree")) {
if (node_id != null && ontology_display_name != null) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;
try {
// for HL7 (temporary fix)
ontology_display_name =
DataUtils.searchFormalName(ontology_display_name);
nodesArray =
CacheController.getInstance().getSubconcepts(
ontology_display_name, ontology_version, node_id);
if (nodesArray != null) {
json.put("nodes", nodesArray);
}
} catch (Exception e) {
}
debugJSONString("Section: expand_tree", json.toString());
response.getWriter().write(json.toString());
/*
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
*/
}
}
/*
* else if (action.equals("search_tree")) {
*
*
* if (node_id != null && ontology_display_name != null) {
* response.setContentType("text/html");
* response.setHeader("Cache-Control", "no-cache"); JSONObject json =
* new JSONObject(); try { // testing // JSONArray rootsArray = //
* CacheController.getInstance().getPathsToRoots(ontology_display_name,
* // null, node_id, true);
*
* String max_tree_level_str = null; int maxLevel = -1; try {
* max_tree_level_str = NCItBrowserProperties .getInstance()
* .getProperty( NCItBrowserProperties.MAXIMUM_TREE_LEVEL); maxLevel =
* Integer.parseInt(max_tree_level_str);
*
* } catch (Exception ex) {
*
* }
*
* JSONArray rootsArray = CacheController.getInstance()
* .getPathsToRoots(ontology_display_name, null, node_id, true,
* maxLevel);
*
* if (rootsArray.length() == 0) { rootsArray =
* CacheController.getInstance() .getRootConcepts(ontology_display_name,
* null);
*
* boolean is_root = isRoot(rootsArray, node_id); if (!is_root) {
* //rootsArray = null; json.put("dummy_root_nodes", rootsArray);
* response.getWriter().write(json.toString());
* response.getWriter().flush();
*
* _logger.debug("Run time (milliseconds): " +
* (System.currentTimeMillis() - ms)); return; } }
* json.put("root_nodes", rootsArray); } catch (Exception e) {
* e.printStackTrace(); }
*
* response.getWriter().write(json.toString());
* response.getWriter().flush();
*
* _logger.debug("Run time (milliseconds): " +
* (System.currentTimeMillis() - ms)); return; } }
*/
if (action.equals("search_hierarchy")) {
search_hierarchy(request, response, node_id, ontology_display_name, ontology_version);
} else if (action.equals("search_tree")) {
search_tree(response, node_id, ontology_display_name, ontology_version);
}
else if (action.equals("build_tree")) {
if (ontology_display_name == null)
ontology_display_name = CODING_SCHEME_NAME;
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;// new JSONArray();
try {
nodesArray =
CacheController.getInstance().getRootConcepts(
ontology_display_name, ontology_version);
if (nodesArray != null) {
json.put("root_nodes", nodesArray);
}
} catch (Exception e) {
e.printStackTrace();
}
debugJSONString("Section: build_tree", json.toString());
response.getWriter().write(json.toString());
// response.getWriter().flush();
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
return;
} else if (action.equals("build_vs_tree")) {
if (ontology_display_name == null)
ontology_display_name = CODING_SCHEME_NAME;
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;// new JSONArray();
try {
//HashMap getRootValueSets(String codingSchemeURN)
String codingSchemeVersion = null;
nodesArray =
CacheController.getInstance().getRootValueSets(
ontology_display_name, codingSchemeVersion);
if (nodesArray != null) {
json.put("root_nodes", nodesArray);
}
} catch (Exception e) {
e.printStackTrace();
}
response.getWriter().write(json.toString());
//System.out.println(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
return;
} else if (action.equals("expand_vs_tree")) {
if (node_id != null && ontology_display_name != null) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;
try {
nodesArray =
CacheController.getInstance().getSubValueSets(
ontology_display_name, ontology_version, node_id);
if (nodesArray != null) {
System.out.println("expand_vs_tree nodesArray != null");
json.put("nodes", nodesArray);
} else {
System.out.println("expand_vs_tree nodesArray == null???");
}
} catch (Exception e) {
}
response.getWriter().write(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
}
} else if (action.equals("expand_entire_vs_tree")) {
if (node_id != null && ontology_display_name != null) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;
try {
nodesArray =
CacheController.getInstance().getSourceValueSetTree(
ontology_display_name, ontology_version, true);
if (nodesArray != null) {
System.out.println("expand_entire_vs_tree nodesArray != null");
json.put("root_nodes", nodesArray);
} else {
System.out.println("expand_entire_vs_tree nodesArray == null???");
}
} catch (Exception e) {
}
response.getWriter().write(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
}
} else if (action.equals("expand_entire_cs_vs_tree")) {
//if (node_id != null && ontology_display_name != null) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;
try {
nodesArray =
CacheController.getInstance().getCodingSchemeValueSetTree(
ontology_display_name, ontology_version, true);
if (nodesArray != null) {
System.out.println("expand_entire_vs_tree nodesArray != null");
json.put("root_nodes", nodesArray);
} else {
System.out.println("expand_entire_vs_tree nodesArray == null???");
}
} catch (Exception e) {
}
response.getWriter().write(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
} else if (action.equals("build_cs_vs_tree")) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;// new JSONArray();
try {
//HashMap getRootValueSets(String codingSchemeURN)
String codingSchemeVersion = null;
nodesArray =
CacheController.getInstance().getRootValueSets(true);
if (nodesArray != null) {
json.put("root_nodes", nodesArray);
}
} catch (Exception e) {
e.printStackTrace();
}
response.getWriter().write(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
return;
} else if (action.equals("expand_cs_vs_tree")) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;
String vsd_uri = ValueSetHierarchy.getValueSetURI(node_id);
node_id = ValueSetHierarchy.getCodingSchemeName(node_id);
//if (node_id != null && ontology_display_name != null) {
if (node_id != null) {
ValueSetDefinition vsd = ValueSetHierarchy.findValueSetDefinitionByURI(vsd_uri);
if (vsd == null) {
System.out.println("(****) coding scheme name: " + node_id);
try {
nodesArray = CacheController.getInstance().getRootValueSets(node_id, null);
//nodesArray = CacheController.getInstance().getRootValueSets(node_id, null); //find roots (by source)
if (nodesArray != null) {
json.put("nodes", nodesArray);
} else {
System.out.println("expand_vs_tree nodesArray == null???");
}
} catch (Exception e) {
}
} else {
try {
nodesArray =
CacheController.getInstance().getSubValueSets(
node_id, null, vsd_uri);
if (nodesArray != null) {
json.put("nodes", nodesArray);
}
} catch (Exception e) {
}
}
response.getWriter().write(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
}
} else if (action.equals("build_src_vs_tree")) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;// new JSONArray();
try {
//HashMap getRootValueSets(String codingSchemeURN)
String codingSchemeVersion = null;
nodesArray =
//CacheController.getInstance().getRootValueSets(true, true);
CacheController.getInstance().build_src_vs_tree();
if (nodesArray != null) {
json.put("root_nodes", nodesArray);
}
} catch (Exception e) {
e.printStackTrace();
}
response.getWriter().write(json.toString());
//System.out.println(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
return;
} else if (action.equals("expand_src_vs_tree")) {
if (node_id != null && ontology_display_name != null) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;
nodesArray = CacheController.getInstance().expand_src_vs_tree(node_id);
if (nodesArray == null) {
System.out.println("(*) CacheController returns nodesArray == null");
}
try {
if (nodesArray != null) {
System.out.println("expand_src_vs_tree nodesArray != null");
json.put("nodes", nodesArray);
} else {
System.out.println("expand_src_vs_tree nodesArray == null???");
}
} catch (Exception e) {
e.printStackTrace();
}
response.getWriter().write(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
}
}
}
private boolean isRoot(JSONArray rootsArray, String code) {
for (int i = 0; i < rootsArray.length(); i++) {
String node_id = null;
try {
JSONObject node = rootsArray.getJSONObject(i);
node_id = (String) node.get(CacheController.ONTOLOGY_NODE_ID);
if (node_id.compareTo(code) == 0)
return true;
} catch (Exception e) {
e.printStackTrace();
}
}
return false;
}
private static boolean _debug = false; // DYEE_DEBUG
private static StringBuffer _debugBuffer = null;
public static void println(PrintWriter out, String text) {
if (_debug) {
_logger.debug("DBG: " + text);
_debugBuffer.append(text + "\n");
}
out.println(text);
}
public static void search_hierarchy(HttpServletRequest request, HttpServletResponse response, String node_id,
String ontology_display_name, String ontology_version) {
Enumeration parameters = request.getParameterNames();
String param = null;
while (parameters.hasMoreElements())
{
param = (String) parameters.nextElement();
String paramValue = request.getParameter(param);
}
response.setContentType("text/html");
PrintWriter out = null;
try {
out = response.getWriter();
} catch (Exception ex) {
ex.printStackTrace();
return;
}
if (_debug) {
_debugBuffer = new StringBuffer();
}
String localName = DataUtils.getLocalName(ontology_display_name);
String formalName = DataUtils.getFormalName(localName);
String term_browser_version = DataUtils.getMetadataValue(formalName, ontology_version, "term_browser_version");
String display_name = DataUtils.getMetadataValue(formalName, ontology_version, "display_name");
println(out, "");
println(out, "<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/yahoo-min.js\" ></script>");
println(out, "<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/event-min.js\" ></script>");
println(out, "<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/dom-min.js\" ></script>");
println(out, "<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/animation-min.js\" ></script>");
println(out, "<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/container-min.js\" ></script>");
println(out, "<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/connection-min.js\" ></script>");
//println(out, "<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/autocomplete-min.js\" ></script>");
println(out, "<script type=\"text/javascript\" src=\"/ncitbrowser/js/yui/treeview-min.js\" ></script>");
println(out, "");
println(out, "");
println(out, "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 Transitional//EN\">");
println(out, "<html xmlns=\"http:
println(out, " <head>");
println(out, " <title>Vocabulary Hierarchy</title>");
println(out, " <meta http-equiv=\"Content-Type\" content=\"text/html; charset=iso-8859-1\">");
println(out, " <link rel=\"stylesheet\" type=\"text/css\" href=\"/ncitbrowser/css/styleSheet.css\" />");
println(out, " <link rel=\"shortcut icon\" href=\"/ncitbrowser/favicon.ico\" type=\"image/x-icon\" />");
println(out, " <link rel=\"stylesheet\" type=\"text/css\" href=\"/ncitbrowser/css/yui/fonts.css\" />");
println(out, " <link rel=\"stylesheet\" type=\"text/css\" href=\"/ncitbrowser/css/yui/grids.css\" />");
println(out, " <link rel=\"stylesheet\" type=\"text/css\" href=\"/ncitbrowser/css/yui/code.css\" />");
println(out, " <link rel=\"stylesheet\" type=\"text/css\" href=\"/ncitbrowser/css/yui/tree.css\" />");
println(out, " <script type=\"text/javascript\" src=\"/ncitbrowser/js/script.js\"></script>");
println(out, "");
println(out, " <script language=\"JavaScript\">");
println(out, "");
println(out, " var tree;");
println(out, " var nodeIndex;");
println(out, " var rootDescDiv;");
println(out, " var emptyRootDiv;");
println(out, " var treeStatusDiv;");
println(out, " var nodes = [];");
println(out, " var currOpener;");
println(out, "");
println(out, " function load(url,target) {");
println(out, " if (target != '')");
println(out, " target.window.location.href = url;");
println(out, " else");
println(out, " window.location.href = url;");
println(out, " }");
println(out, "");
println(out, " function init() {");
println(out, "");
println(out, " rootDescDiv = new YAHOO.widget.Module(\"rootDesc\", {visible:false} );");
println(out, " resetRootDesc();");
println(out, "");
println(out, " emptyRootDiv = new YAHOO.widget.Module(\"emptyRoot\", {visible:true} );");
println(out, " resetEmptyRoot();");
println(out, "");
println(out, " treeStatusDiv = new YAHOO.widget.Module(\"treeStatus\", {visible:true} );");
println(out, " resetTreeStatus();");
println(out, "");
println(out, " currOpener = opener;");
println(out, " initTree();");
println(out, " }");
println(out, "");
println(out, " function addTreeNode(rootNode, nodeInfo) {");
println(out, " var newNodeDetails = \"javascript:onClickTreeNode('\" + nodeInfo.ontology_node_id + \"');\";");
println(out, " var newNodeData = { label:nodeInfo.ontology_node_name, id:nodeInfo.ontology_node_id, href:newNodeDetails };");
println(out, " var newNode = new YAHOO.widget.TextNode(newNodeData, rootNode, false);");
println(out, " if (nodeInfo.ontology_node_child_count > 0) {");
println(out, " newNode.setDynamicLoad(loadNodeData);");
println(out, " }");
println(out, " }");
println(out, "");
println(out, " function buildTree(ontology_node_id, ontology_display_name) {");
println(out, " var handleBuildTreeSuccess = function(o) {");
println(out, " var respTxt = o.responseText;");
println(out, " var respObj = eval('(' + respTxt + ')');");
println(out, " if ( typeof(respObj) != \"undefined\") {");
println(out, " if ( typeof(respObj.root_nodes) != \"undefined\") {");
println(out, " var root = tree.getRoot();");
println(out, " if (respObj.root_nodes.length == 0) {");
println(out, " showEmptyRoot();");
println(out, " }");
println(out, " else {");
println(out, " for (var i=0; i < respObj.root_nodes.length; i++) {");
println(out, " var nodeInfo = respObj.root_nodes[i];");
println(out, " var expand = false;");
println(out, " addTreeNode(root, nodeInfo, expand);");
println(out, " }");
println(out, " }");
println(out, "");
println(out, " tree.draw();");
println(out, " }");
println(out, " }");
println(out, " resetTreeStatus();");
println(out, " }");
println(out, "");
println(out, " var handleBuildTreeFailure = function(o) {");
println(out, " resetTreeStatus();");
println(out, " resetEmptyRoot();");
println(out, " alert('responseFailure: ' + o.statusText);");
println(out, " }");
println(out, "");
println(out, " var buildTreeCallback =");
println(out, " {");
println(out, " success:handleBuildTreeSuccess,");
println(out, " failure:handleBuildTreeFailure");
println(out, " };");
println(out, "");
println(out, " if (ontology_display_name!='') {");
println(out, " resetEmptyRoot();");
println(out, "");
println(out, " showTreeLoadingStatus();");
println(out, " var ontology_source = null;");
println(out, " var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
println(out, " var request = YAHOO.util.Connect.asyncRequest('GET','/ncitbrowser/ajax?action=build_tree&ontology_node_id=' +ontology_node_id+'&ontology_display_name='+ontology_display_name+'&version='+ontology_version+'&ontology_source='+ontology_source,buildTreeCallback);");
println(out, " }");
println(out, " }");
println(out, "");
println(out, " function resetTree(ontology_node_id, ontology_display_name) {");
println(out, "");
println(out, " var handleResetTreeSuccess = function(o) {");
println(out, " var respTxt = o.responseText;");
println(out, " var respObj = eval('(' + respTxt + ')');");
println(out, " if ( typeof(respObj) != \"undefined\") {");
println(out, " if ( typeof(respObj.root_node) != \"undefined\") {");
println(out, " var root = tree.getRoot();");
println(out, " var nodeDetails = \"javascript:onClickTreeNode('\" + respObj.root_node.ontology_node_id + \"');\";");
println(out, " var rootNodeData = { label:respObj.root_node.ontology_node_name, id:respObj.root_node.ontology_node_id, href:nodeDetails };");
println(out, " var expand = false;");
println(out, " if (respObj.root_node.ontology_node_child_count > 0) {");
println(out, " expand = true;");
println(out, " }");
println(out, " var ontRoot = new YAHOO.widget.TextNode(rootNodeData, root, expand);");
println(out, "");
println(out, " if ( typeof(respObj.child_nodes) != \"undefined\") {");
println(out, " for (var i=0; i < respObj.child_nodes.length; i++) {");
println(out, " var nodeInfo = respObj.child_nodes[i];");
println(out, " addTreeNode(ontRoot, nodeInfo);");
println(out, " }");
println(out, " }");
println(out, " tree.draw();");
println(out, " setRootDesc(respObj.root_node.ontology_node_name, ontology_display_name);");
println(out, " }");
println(out, " }");
println(out, " resetTreeStatus();");
println(out, " }");
println(out, "");
println(out, " var handleResetTreeFailure = function(o) {");
println(out, " resetTreeStatus();");
println(out, " alert('responseFailure: ' + o.statusText);");
println(out, " }");
println(out, "");
println(out, " var resetTreeCallback =");
println(out, " {");
println(out, " success:handleResetTreeSuccess,");
println(out, " failure:handleResetTreeFailure");
println(out, " };");
println(out, " if (ontology_node_id!= '') {");
println(out, " showTreeLoadingStatus();");
println(out, " var ontology_source = null;");
println(out, " var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
println(out, " var request = YAHOO.util.Connect.asyncRequest('GET','/ncitbrowser/ajax?action=reset_tree&ontology_node_id=' +ontology_node_id+'&ontology_display_name='+ontology_display_name + '&version='+ ontology_version +'&ontology_source='+ontology_source,resetTreeCallback);");
println(out, " }");
println(out, " }");
println(out, "");
println(out, " function onClickTreeNode(ontology_node_id) {");
out.println(" if (ontology_node_id.indexOf(\"_dot_\") != -1) return;");
println(out, " var ontology_display_name = document.forms[\"pg_form\"].ontology_display_name.value;");
println(out, " var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
println(out, " load('/ncitbrowser/ConceptReport.jsp?dictionary='+ ontology_display_name + '&version='+ ontology_version + '&code=' + ontology_node_id, currOpener);");
println(out, " }");
println(out, "");
println(out, " function onClickViewEntireOntology(ontology_display_name) {");
println(out, " var ontology_display_name = document.pg_form.ontology_display_name.value;");
println(out, " tree = new YAHOO.widget.TreeView(\"treecontainer\");");
println(out, " tree.draw();");
println(out, " resetRootDesc();");
println(out, " buildTree('', ontology_display_name);");
println(out, " }");
println(out, "");
println(out, " function initTree() {");
println(out, "");
println(out, " tree = new YAHOO.widget.TreeView(\"treecontainer\");");
println(out, " var ontology_node_id = document.forms[\"pg_form\"].ontology_node_id.value;");
println(out, " var ontology_display_name = document.forms[\"pg_form\"].ontology_display_name.value;");
println(out, "");
println(out, " if (ontology_node_id == null || ontology_node_id == \"null\")");
println(out, " {");
println(out, " buildTree(ontology_node_id, ontology_display_name);");
println(out, " }");
println(out, " else");
println(out, " {");
println(out, " searchTree(ontology_node_id, ontology_display_name);");
println(out, " }");
println(out, " }");
println(out, "");
println(out, " function initRootDesc() {");
println(out, " rootDescDiv.setBody('');");
println(out, " initRootDesc.show();");
println(out, " rootDescDiv.render();");
println(out, " }");
println(out, "");
println(out, " function resetRootDesc() {");
println(out, " rootDescDiv.hide();");
println(out, " rootDescDiv.setBody('');");
println(out, " rootDescDiv.render();");
println(out, " }");
println(out, "");
println(out, " function resetEmptyRoot() {");
println(out, " emptyRootDiv.hide();");
println(out, " emptyRootDiv.setBody('');");
println(out, " emptyRootDiv.render();");
println(out, " }");
println(out, "");
println(out, " function resetTreeStatus() {");
println(out, " treeStatusDiv.hide();");
println(out, " treeStatusDiv.setBody('');");
println(out, " treeStatusDiv.render();");
println(out, " }");
println(out, "");
println(out, " function showEmptyRoot() {");
println(out, " emptyRootDiv.setBody(\"<span class='instruction_text'>No root nodes available.</span>\");");
println(out, " emptyRootDiv.show();");
println(out, " emptyRootDiv.render();");
println(out, " }");
println(out, "");
println(out, " function showNodeNotFound(node_id) {");
println(out, " //emptyRootDiv.setBody(\"<span class='instruction_text'>Concept with code \" + node_id + \" not found in the hierarchy.</span>\");");
println(out, " emptyRootDiv.setBody(\"<span class='instruction_text'>Concept not part of the parent-child hierarchy in this source; check other relationships.</span>\");");
println(out, " emptyRootDiv.show();");
println(out, " emptyRootDiv.render();");
println(out, " }");
println(out, " ");
println(out, " function showPartialHierarchy() {");
println(out, " rootDescDiv.setBody(\"<span class='instruction_text'>(Note: This tree only shows partial hierarchy.)</span>\");");
println(out, " rootDescDiv.show();");
println(out, " rootDescDiv.render();");
println(out, " }");
println(out, "");
println(out, " function showTreeLoadingStatus() {");
println(out, " treeStatusDiv.setBody(\"<img src='/ncitbrowser/images/loading.gif'/> <span class='instruction_text'>Building tree ...</span>\");");
println(out, " treeStatusDiv.show();");
println(out, " treeStatusDiv.render();");
println(out, " }");
println(out, "");
println(out, " function showTreeDrawingStatus() {");
println(out, " treeStatusDiv.setBody(\"<img src='/ncitbrowser/images/loading.gif'/> <span class='instruction_text'>Drawing tree ...</span>\");");
println(out, " treeStatusDiv.show();");
println(out, " treeStatusDiv.render();");
println(out, " }");
println(out, "");
println(out, " function showSearchingTreeStatus() {");
println(out, " treeStatusDiv.setBody(\"<img src='/ncitbrowser/images/loading.gif'/> <span class='instruction_text'>Searching tree... Please wait.</span>\");");
println(out, " treeStatusDiv.show();");
println(out, " treeStatusDiv.render();");
println(out, " }");
println(out, "");
println(out, " function showConstructingTreeStatus() {");
println(out, " treeStatusDiv.setBody(\"<img src='/ncitbrowser/images/loading.gif'/> <span class='instruction_text'>Constructing tree... Please wait.</span>\");");
println(out, " treeStatusDiv.show();");
println(out, " treeStatusDiv.render();");
println(out, " }");
println(out, "");
println(out, " function loadNodeData(node, fnLoadComplete) {");
println(out, " var id = node.data.id;");
println(out, "");
println(out, " var responseSuccess = function(o)");
println(out, " {");
println(out, " var path;");
println(out, " var dirs;");
println(out, " var files;");
println(out, " var respTxt = o.responseText;");
println(out, " var respObj = eval('(' + respTxt + ')');");
println(out, " var fileNum = 0;");
println(out, " var categoryNum = 0;");
println(out, " if ( typeof(respObj.nodes) != \"undefined\") {");
println(out, " for (var i=0; i < respObj.nodes.length; i++) {");
println(out, " var name = respObj.nodes[i].ontology_node_name;");
println(out, " var nodeDetails = \"javascript:onClickTreeNode('\" + respObj.nodes[i].ontology_node_id + \"');\";");
println(out, " var newNodeData = { label:name, id:respObj.nodes[i].ontology_node_id, href:nodeDetails };");
println(out, " var newNode = new YAHOO.widget.TextNode(newNodeData, node, false);");
println(out, " if (respObj.nodes[i].ontology_node_child_count > 0) {");
println(out, " newNode.setDynamicLoad(loadNodeData);");
println(out, " }");
println(out, " }");
println(out, " }");
println(out, " tree.draw();");
println(out, " fnLoadComplete();");
println(out, " }");
println(out, "");
println(out, " var responseFailure = function(o){");
println(out, " alert('responseFailure: ' + o.statusText);");
println(out, " }");
println(out, "");
println(out, " var callback =");
println(out, " {");
println(out, " success:responseSuccess,");
println(out, " failure:responseFailure");
println(out, " };");
println(out, "");
println(out, " var ontology_display_name = document.forms[\"pg_form\"].ontology_display_name.value;");
println(out, " var ontology_version = document.forms[\"pg_form\"].ontology_version.value;");
//println(out, " var ontology_display_name = " + "\"" + ontology_display_name + "\";");
//println(out, " var ontology_version = " + "\"" + ontology_version + "\";");
println(out, " var cObj = YAHOO.util.Connect.asyncRequest('GET','/ncitbrowser/ajax?action=expand_tree&ontology_node_id=' +id+'&ontology_display_name='+ontology_display_name+'&version='+ontology_version,callback);");
println(out, " }");
println(out, "");
println(out, " function setRootDesc(rootNodeName, ontology_display_name) {");
println(out, " var newDesc = \"<span class='instruction_text'>Root set to <b>\" + rootNodeName + \"</b></span>\";");
println(out, " rootDescDiv.setBody(newDesc);");
println(out, " var footer = \"<a onClick='javascript:onClickViewEntireOntology();' href='#' class='link_text'>view full ontology}</a>\";");
println(out, " rootDescDiv.setFooter(footer);");
println(out, " rootDescDiv.show();");
println(out, " rootDescDiv.render();");
println(out, " }");
println(out, "");
println(out, "");
println(out, " function searchTree(ontology_node_id, ontology_display_name) {");
println(out, "");
println(out, " var root = tree.getRoot();");
//new ViewInHierarchyUtil().printTree(out, ontology_display_name, ontology_version, node_id);
new ViewInHierarchyUtils().printTree(out, ontology_display_name, ontology_version, node_id);
println(out, " showPartialHierarchy();");
println(out, " tree.draw();");
println(out, " }");
println(out, "");
println(out, "");
println(out, " function addTreeBranch(ontology_node_id, rootNode, nodeInfo) {");
println(out, " var newNodeDetails = \"javascript:onClickTreeNode('\" + nodeInfo.ontology_node_id + \"');\";");
println(out, " var newNodeData = { label:nodeInfo.ontology_node_name, id:nodeInfo.ontology_node_id, href:newNodeDetails };");
println(out, "");
println(out, " var expand = false;");
println(out, " var childNodes = nodeInfo.children_nodes;");
println(out, "");
println(out, " if (childNodes.length > 0) {");
println(out, " expand = true;");
println(out, " }");
println(out, " var newNode = new YAHOO.widget.TextNode(newNodeData, rootNode, expand);");
println(out, " if (nodeInfo.ontology_node_id == ontology_node_id) {");
println(out, " newNode.labelStyle = \"ygtvlabel_highlight\";");
println(out, " }");
println(out, "");
println(out, " if (nodeInfo.ontology_node_id == ontology_node_id) {");
println(out, " newNode.isLeaf = true;");
println(out, " if (nodeInfo.ontology_node_child_count > 0) {");
println(out, " newNode.isLeaf = false;");
println(out, " newNode.setDynamicLoad(loadNodeData);");
println(out, " } else {");
println(out, " tree.draw();");
println(out, " }");
println(out, "");
println(out, " } else {");
println(out, " if (nodeInfo.ontology_node_id != ontology_node_id) {");
println(out, " if (nodeInfo.ontology_node_child_count == 0 && nodeInfo.ontology_node_id != ontology_node_id) {");
println(out, " newNode.isLeaf = true;");
println(out, " } else if (childNodes.length == 0) {");
println(out, " newNode.setDynamicLoad(loadNodeData);");
println(out, " }");
println(out, " }");
println(out, " }");
println(out, "");
println(out, " tree.draw();");
println(out, " for (var i=0; i < childNodes.length; i++) {");
println(out, " var childnodeInfo = childNodes[i];");
println(out, " addTreeBranch(ontology_node_id, newNode, childnodeInfo);");
println(out, " }");
println(out, " }");
println(out, " YAHOO.util.Event.addListener(window, \"load\", init);");
println(out, "");
println(out, " </script>");
println(out, "</head>");
println(out, "<body>");
println(out, " ");
println(out, " <!-- Begin Skip Top Navigation -->");
println(out, " <a href=\"#evs-content\" class=\"hideLink\" accesskey=\"1\" title=\"Skip repetitive navigation links\">skip navigation links</A>");
println(out, " <!-- End Skip Top Navigation --> ");
println(out, " <div id=\"popupContainer\">");
println(out, " <!-- nci popup banner -->");
println(out, " <div class=\"ncipopupbanner\">");
println(out, " <a href=\"http:
println(out, " <a href=\"http:
println(out, " </div>");
println(out, " <!-- end nci popup banner -->");
println(out, " <div id=\"popupMainArea\">");
println(out, " <a name=\"evs-content\" id=\"evs-content\"></a>");
println(out, " <table class=\"evsLogoBg\" cellspacing=\"0\" cellpadding=\"0\" border=\"0\">");
println(out, " <tr>");
println(out, " <td valign=\"top\">");
println(out, " <a href=\"http://evs.nci.nih.gov/\" target=\"_blank\" alt=\"Enterprise Vocabulary Services\">");
println(out, " <img src=\"/ncitbrowser/images/evs-popup-logo.gif\" width=\"213\" height=\"26\" alt=\"EVS: Enterprise Vocabulary Services\" title=\"EVS: Enterprise Vocabulary Services\" border=\"0\" />");
println(out, " </a>");
println(out, " </td>");
println(out, " <td valign=\"top\"><div id=\"closeWindow\"><a href=\"javascript:window.close();\"><img src=\"/ncitbrowser/images/thesaurus_close_icon.gif\" width=\"10\" height=\"10\" border=\"0\" alt=\"Close Window\" /> CLOSE WINDOW</a></div></td>");
println(out, " </tr>");
println(out, " </table>");
println(out, "");
println(out, "");
String release_date = DataUtils.getVersionReleaseDate(ontology_display_name, ontology_version);
if (ontology_display_name.compareTo("NCI Thesaurus") == 0 || ontology_display_name.compareTo("NCI_Thesaurus") == 0) {
println(out, " <div>");
println(out, " <img src=\"/ncitbrowser/images/thesaurus_popup_banner.gif\" width=\"612\" height=\"56\" alt=\"NCI Thesaurus\" title=\"\" border=\"0\" />");
println(out, " ");
println(out, " ");
println(out, " <span class=\"texttitle-blue-rightjust-2\">" + ontology_version + " (Release date: " + release_date + ")</span>");
println(out, " ");
println(out, "");
println(out, " </div>");
} else {
println(out, " <div>");
println(out, " <img src=\"/ncitbrowser/images/other_popup_banner.gif\" width=\"612\" height=\"56\" alt=\"" + display_name + "\" title=\"\" border=\"0\" />");
println(out, " <div class=\"vocabularynamepopupshort\">" + display_name );
println(out, " ");
println(out, " ");
println(out, " <span class=\"texttitle-blue-rightjust\">" + ontology_version + " (Release date: " + release_date + ")</span>");
println(out, " ");
println(out, " ");
println(out, " </div>");
println(out, " </div>");
}
println(out, "");
println(out, " <div id=\"popupContentArea\">");
println(out, " <table width=\"580px\" cellpadding=\"3\" cellspacing=\"0\" border=\"0\">");
println(out, " <tr class=\"textbody\">");
println(out, " <td class=\"pageTitle\" align=\"left\">");
println(out, " " + display_name + " Hierarchy");
println(out, " </td>");
println(out, " <td class=\"pageTitle\" align=\"right\">");
println(out, " <font size=\"1\" color=\"red\" align=\"right\">");
println(out, " <a href=\"javascript:printPage()\"><img src=\"/ncitbrowser/images/printer.bmp\" border=\"0\" alt=\"Send to Printer\"><i>Send to Printer</i></a>");
println(out, " </font>");
println(out, " </td>");
println(out, " </tr>");
println(out, " </table>");
println(out, " <!-- Tree content -->");
println(out, " <div id=\"rootDesc\">");
println(out, " <div id=\"bd\"></div>");
println(out, " <div id=\"ft\"></div>");
println(out, " </div>");
println(out, " <div id=\"treeStatus\">");
println(out, " <div id=\"bd\"></div>");
println(out, " </div>");
println(out, " <div id=\"emptyRoot\">");
println(out, " <div id=\"bd\"></div>");
println(out, " </div>");
println(out, " <div id=\"treecontainer\"></div>");
println(out, "");
println(out, " <form id=\"pg_form\">");
println(out, " ");
// to be modified:
// println(out, " <input type=\"hidden\" id=\"ontology_node_id\" name=\"ontology_node_id\" value=\"C37927\" />");
// println(out, " <input type=\"hidden\" id=\"ontology_display_name\" name=\"ontology_display_name\" value=\"NCI Thesaurus\" />");
// println(out, " <input type=\"hidden\" id=\"schema\" name=\"schema\" value=\"null\" />");
// println(out, " <input type=\"hidden\" id=\"ontology_version\" name=\"ontology_version\" value=\"11.11d\" />");
String ontology_node_id_value = HTTPUtils.cleanXSS(node_id);
String ontology_display_name_value = HTTPUtils.cleanXSS(ontology_display_name);
String ontology_version_value = HTTPUtils.cleanXSS(ontology_version);
//String scheme_value = HTTPUtils.cleanXSS(schema);
System.out.println("ontology_node_id_value: " + ontology_node_id_value);
System.out.println("ontology_display_name_value: " + ontology_display_name_value);
System.out.println("ontology_version_value: " + ontology_version_value);
println(out, " <input type=\"hidden\" id=\"ontology_node_id\" name=\"ontology_node_id\" value=\"" + ontology_node_id_value + "\" />");
println(out, " <input type=\"hidden\" id=\"ontology_display_name\" name=\"ontology_display_name\" value=\"" + ontology_display_name_value + "\" />");
//println(out, " <input type=\"hidden\" id=\"schema\" name=\"schema\" value=\"" + scheme_value + "\" />");
println(out, " <input type=\"hidden\" id=\"ontology_version\" name=\"ontology_version\" value=\"" + ontology_version_value + "\" />");
println(out, "");
println(out, " </form>");
println(out, " <!-- End of Tree control content -->");
println(out, " </div>");
println(out, " </div>");
println(out, " </div>");
println(out, " ");
println(out, "</body>");
println(out, "</html>");
if (_debug) {
_logger.debug(Utils.SEPARATOR);
_logger.debug("VIH HTML:\n" + _debugBuffer);
_debugBuffer = null;
_logger.debug(Utils.SEPARATOR);
}
}
}
|
package kaist.cs550_2016.poche;
import android.util.Log;
import android.widget.Toast;
public class Debug {
/**
* Whether to use debug mode.<br>
* In production, this variable must be set to false.
*/
public static final boolean isDebug = true;
/**
* Wrapper for logging in debug mode.
* To not be mixed with other logs, <br>
* the log level is equivalent to {@link Log#w(String, String)}.
* @param tag object/activity for identifying message
* @param message message to be printed in log
*/
public static void log(Object tag, String message) {
if (!isDebug) return;
Log.w(tag.getClass().getSimpleName(), message);
}
/**
* For the lazy
* @param message
*/
public static void log(String message) {
if (!isDebug) return;
Log.w("via Debug.java: ", message);
}
/**
* Doing this quite a log
* @param color
*/
public static void logColor(int color) {
if (!isDebug) return;
Log.w("Color via Debug.java: ", String.format("0x%08X", color));
}
/**
* Wrapper for showing toast in debug mode.
* @param message message to be printed in log
* @param duration time length to be shown (See {@link Toast})
*/
public static void toast(String message, int duration) {
if (!isDebug) return;
Toast.makeText(App.getAppContext(), message, duration).show();
}
/**
* Wrapper for showing toast in debug mode in short duration.
* @param message message to be printed in log
*/
public static void toast(String message) {
toast(message, Toast.LENGTH_SHORT);
}
/**
* System time recorded for stopwatch utility.
*/
private static long start;
/**
* Start a new stopwatch.
*/
public static void stopwatchStart() {
if (!isDebug) return;
start = System.currentTimeMillis();
}
/**
* Returns the elapsed CPU time (in seconds) since the stopwatch was started.
* @return elapsed CPU time (in seconds) since the stopwatch was started
*/
public static double stopwatchEnd() {
if (!isDebug) return 0;
long now = System.currentTimeMillis();
return (now - start) / 1000.0;
}
/**
* Utility function for showing stopwatch value in {@link Toast}.<br>
* Format: [[task]] + finished in [[time]] s.
* @param task name for task showing in message.
*/
public static void toastStopwatch(String task) {
if (!isDebug) return;
Debug.toast(task + " finished in " + Debug.stopwatchEnd() + " s.");
}
}
|
package com.android.email.activity;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.StringTokenizer;
import com.android.email.K9Activity;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Parcelable;
import android.provider.OpenableColumns;
import android.text.TextWatcher;
import android.text.util.Rfc822Tokenizer;
import android.util.Config;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.view.View.OnClickListener;
import android.view.View.OnFocusChangeListener;
import android.webkit.WebView;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.MultiAutoCompleteTextView;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.AutoCompleteTextView.Validator;
import com.android.email.Account;
import com.android.email.Email;
import com.android.email.EmailAddressAdapter;
import com.android.email.EmailAddressValidator;
import com.android.email.MessagingController;
import com.android.email.MessagingListener;
import com.android.email.Preferences;
import com.android.email.R;
import com.android.email.Utility;
import com.android.email.mail.Address;
import com.android.email.mail.Body;
import com.android.email.mail.Flag;
import com.android.email.mail.Message;
import com.android.email.mail.MessagingException;
import com.android.email.mail.Multipart;
import com.android.email.mail.Part;
import com.android.email.mail.Message.RecipientType;
import com.android.email.mail.internet.MimeBodyPart;
import com.android.email.mail.internet.MimeHeader;
import com.android.email.mail.internet.MimeMessage;
import com.android.email.mail.internet.MimeMultipart;
import com.android.email.mail.internet.MimeUtility;
import com.android.email.mail.internet.TextBody;
import com.android.email.mail.store.LocalStore;
import com.android.email.mail.store.LocalStore.LocalAttachmentBody;
public class MessageCompose extends K9Activity implements OnClickListener, OnFocusChangeListener {
private static final String ACTION_REPLY = "com.android.email.intent.action.REPLY";
private static final String ACTION_REPLY_ALL = "com.android.email.intent.action.REPLY_ALL";
private static final String ACTION_FORWARD = "com.android.email.intent.action.FORWARD";
private static final String ACTION_EDIT_DRAFT = "com.android.email.intent.action.EDIT_DRAFT";
private static final String EXTRA_ACCOUNT = "account";
private static final String EXTRA_FOLDER = "folder";
private static final String EXTRA_MESSAGE = "message";
private static final String STATE_KEY_ATTACHMENTS =
"com.android.email.activity.MessageCompose.attachments";
private static final String STATE_KEY_CC_SHOWN =
"com.android.email.activity.MessageCompose.ccShown";
private static final String STATE_KEY_BCC_SHOWN =
"com.android.email.activity.MessageCompose.bccShown";
private static final String STATE_KEY_QUOTED_TEXT_SHOWN =
"com.android.email.activity.MessageCompose.quotedTextShown";
private static final String STATE_KEY_SOURCE_MESSAGE_PROCED =
"com.android.email.activity.MessageCompose.stateKeySourceMessageProced";
private static final String STATE_KEY_DRAFT_UID =
"com.android.email.activity.MessageCompose.draftUid";
private static final String STATE_IDENTITY_CHANGED =
"com.android.email.activity.MessageCompose.identityChanged";
private static final String STATE_IDENTITY =
"com.android.email.activity.MessageCompose.identity";
private static final int MSG_PROGRESS_ON = 1;
private static final int MSG_PROGRESS_OFF = 2;
private static final int MSG_UPDATE_TITLE = 3;
private static final int MSG_SKIPPED_ATTACHMENTS = 4;
private static final int MSG_SAVED_DRAFT = 5;
private static final int MSG_DISCARDED_DRAFT = 6;
private static final int ACTIVITY_REQUEST_PICK_ATTACHMENT = 1;
private static final int ACTIVITY_CHOOSE_IDENTITY = 2;
private Account mAccount;
private Account.Identity mIdentity;
private boolean mIdentityChanged = false;
private boolean mSignatureChanged = false;
private String mFolder;
private String mSourceMessageUid;
private Message mSourceMessage;
/**
* Indicates that the source message has been processed at least once and should not
* be processed on any subsequent loads. This protects us from adding attachments that
* have already been added from the restore of the view state.
*/
private boolean mSourceMessageProcessed = false;
private TextView mFromView;
private MultiAutoCompleteTextView mToView;
private MultiAutoCompleteTextView mCcView;
private MultiAutoCompleteTextView mBccView;
private EditText mSubjectView;
private EditText mSignatureView;
private EditText mMessageContentView;
private LinearLayout mAttachments;
private View mQuotedTextBar;
private ImageButton mQuotedTextDelete;
private EditText mQuotedText;
private boolean mDraftNeedsSaving = false;
/**
* The draft uid of this message. This is used when saving drafts so that the same draft is
* overwritten instead of being created anew. This property is null until the first save.
*/
private String mDraftUid;
private Handler mHandler = new Handler() {
@Override
public void handleMessage(android.os.Message msg) {
switch (msg.what) {
case MSG_PROGRESS_ON:
setProgressBarIndeterminateVisibility(true);
break;
case MSG_PROGRESS_OFF:
setProgressBarIndeterminateVisibility(false);
break;
case MSG_UPDATE_TITLE:
updateTitle();
break;
case MSG_SKIPPED_ATTACHMENTS:
Toast.makeText(
MessageCompose.this,
getString(R.string.message_compose_attachments_skipped_toast),
Toast.LENGTH_LONG).show();
break;
case MSG_SAVED_DRAFT:
Toast.makeText(
MessageCompose.this,
getString(R.string.message_saved_toast),
Toast.LENGTH_LONG).show();
break;
case MSG_DISCARDED_DRAFT:
Toast.makeText(
MessageCompose.this,
getString(R.string.message_discarded_toast),
Toast.LENGTH_LONG).show();
break;
default:
super.handleMessage(msg);
break;
}
}
};
private Listener mListener = new Listener();
private EmailAddressAdapter mAddressAdapter;
private Validator mAddressValidator;
class Attachment implements Serializable {
public String name;
public String contentType;
public long size;
public Uri uri;
}
/**
* Compose a new message using the given account. If account is null the default account
* will be used.
* @param context
* @param account
*/
public static void actionCompose(Context context, Account account) {
Intent i = new Intent(context, MessageCompose.class);
i.putExtra(EXTRA_ACCOUNT, account);
context.startActivity(i);
}
/**
* Compose a new message as a reply to the given message. If replyAll is true the function
* is reply all instead of simply reply.
* @param context
* @param account
* @param message
* @param replyAll
*/
public static void actionReply(
Context context,
Account account,
Message message,
boolean replyAll) {
Intent i = new Intent(context, MessageCompose.class);
i.putExtra(EXTRA_ACCOUNT, account);
i.putExtra(EXTRA_FOLDER, message.getFolder().getName());
i.putExtra(EXTRA_MESSAGE, message.getUid());
if (replyAll) {
i.setAction(ACTION_REPLY_ALL);
}
else {
i.setAction(ACTION_REPLY);
}
context.startActivity(i);
}
/**
* Compose a new message as a forward of the given message.
* @param context
* @param account
* @param message
*/
public static void actionForward(Context context, Account account, Message message) {
Intent i = new Intent(context, MessageCompose.class);
i.putExtra(EXTRA_ACCOUNT, account);
i.putExtra(EXTRA_FOLDER, message.getFolder().getName());
i.putExtra(EXTRA_MESSAGE, message.getUid());
i.setAction(ACTION_FORWARD);
context.startActivity(i);
}
/**
* Continue composition of the given message. This action modifies the way this Activity
* handles certain actions.
* Save will attempt to replace the message in the given folder with the updated version.
* Discard will delete the message from the given folder.
* @param context
* @param account
* @param folder
* @param message
*/
public static void actionEditDraft(Context context, Account account, Message message) {
Intent i = new Intent(context, MessageCompose.class);
i.putExtra(EXTRA_ACCOUNT, account);
i.putExtra(EXTRA_FOLDER, message.getFolder().getName());
i.putExtra(EXTRA_MESSAGE, message.getUid());
i.setAction(ACTION_EDIT_DRAFT);
context.startActivity(i);
}
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS);
setContentView(R.layout.message_compose);
mAddressAdapter = new EmailAddressAdapter(this);
mAddressValidator = new EmailAddressValidator();
mFromView = (TextView)findViewById(R.id.from);
mToView = (MultiAutoCompleteTextView)findViewById(R.id.to);
mCcView = (MultiAutoCompleteTextView)findViewById(R.id.cc);
mBccView = (MultiAutoCompleteTextView)findViewById(R.id.bcc);
mSubjectView = (EditText)findViewById(R.id.subject);
EditText upperSignature = (EditText)findViewById(R.id.upper_signature);
EditText lowerSignature = (EditText)findViewById(R.id.lower_signature);
mMessageContentView = (EditText)findViewById(R.id.message_content);
mAttachments = (LinearLayout)findViewById(R.id.attachments);
mQuotedTextBar = findViewById(R.id.quoted_text_bar);
mQuotedTextDelete = (ImageButton)findViewById(R.id.quoted_text_delete);
mQuotedText = (EditText)findViewById(R.id.quoted_text);
TextWatcher watcher = new TextWatcher() {
public void beforeTextChanged(CharSequence s, int start,
int before, int after) { }
public void onTextChanged(CharSequence s, int start,
int before, int count) {
mDraftNeedsSaving = true;
}
public void afterTextChanged(android.text.Editable s) { }
};
TextWatcher sigwatcher = new TextWatcher() {
public void beforeTextChanged(CharSequence s, int start,
int before, int after) { }
public void onTextChanged(CharSequence s, int start,
int before, int count) {
mDraftNeedsSaving = true;
mSignatureChanged = true;
}
public void afterTextChanged(android.text.Editable s) { }
};
mToView.addTextChangedListener(watcher);
mCcView.addTextChangedListener(watcher);
mBccView.addTextChangedListener(watcher);
mSubjectView.addTextChangedListener(watcher);
mMessageContentView.addTextChangedListener(watcher);
/*
* We set this to invisible by default. Other methods will turn it back on if it's
* needed.
*/
mQuotedTextBar.setVisibility(View.GONE);
mQuotedText.setVisibility(View.GONE);
mQuotedTextDelete.setOnClickListener(this);
mFromView.setVisibility(View.GONE);
mToView.setAdapter(mAddressAdapter);
mToView.setTokenizer(new Rfc822Tokenizer());
mToView.setValidator(mAddressValidator);
mCcView.setAdapter(mAddressAdapter);
mCcView.setTokenizer(new Rfc822Tokenizer());
mCcView.setValidator(mAddressValidator);
mBccView.setAdapter(mAddressAdapter);
mBccView.setTokenizer(new Rfc822Tokenizer());
mBccView.setValidator(mAddressValidator);
mSubjectView.setOnFocusChangeListener(this);
if (savedInstanceState != null) {
/*
* This data gets used in onCreate, so grab it here instead of onRestoreIntstanceState
*/
mSourceMessageProcessed = savedInstanceState.getBoolean(STATE_KEY_SOURCE_MESSAGE_PROCED, false);
}
Intent intent = getIntent();
String action = intent.getAction();
if (Intent.ACTION_VIEW.equals(action) || Intent.ACTION_SENDTO.equals(action)) {
/*
* Someone has clicked a mailto: link. The address is in the URI.
*/
mAccount = Preferences.getPreferences(this).getDefaultAccount();
if (mAccount == null) {
/*
* There are no accounts set up. This should not have happened. Prompt the
* user to set up an account as an acceptable bailout.
*/
startActivity(new Intent(this, Accounts.class));
mDraftNeedsSaving = false;
finish();
return;
}
if (intent.getData() != null) {
Uri uri = intent.getData();
try {
if (uri.getScheme().equalsIgnoreCase("mailto")) {
Address[] addresses = Address.parse(uri.getSchemeSpecificPart());
addAddresses(mToView, addresses);
}
}
catch (Exception e) {
/*
* If we can't extract any information from the URI it's okay. They can
* still compose a message.
*/
}
}
}
else if (Intent.ACTION_SEND.equals(action)) {
/*
* Someone is trying to compose an email with an attachment, probably Pictures.
* The Intent should contain an EXTRA_STREAM with the data to attach.
*/
mAccount = Preferences.getPreferences(this).getDefaultAccount();
if (mAccount == null) {
/*
* There are no accounts set up. This should not have happened. Prompt the
* user to set up an account as an acceptable bailout.
*/
startActivity(new Intent(this, Accounts.class));
mDraftNeedsSaving = false;
finish();
return;
}
String text = intent.getStringExtra(Intent.EXTRA_TEXT);
if (text != null) {
mMessageContentView.setText(text);
}
String subject = intent.getStringExtra(Intent.EXTRA_SUBJECT);
if (subject != null) {
mSubjectView.setText(subject);
}
String type = intent.getType();
Uri stream = (Uri) intent.getParcelableExtra(Intent.EXTRA_STREAM);
if (stream != null && type != null) {
if (MimeUtility.mimeTypeMatches(type, Email.ACCEPTABLE_ATTACHMENT_SEND_TYPES)) {
addAttachment(stream);
}
}
/*
* There might be an EXTRA_SUBJECT, EXTRA_TEXT, EXTRA_EMAIL, EXTRA_BCC or EXTRA_CC
*/
String extraSubject = intent.getStringExtra(Intent.EXTRA_SUBJECT);
String extraText = intent.getStringExtra(Intent.EXTRA_TEXT);
String[] extraEmail = intent.getStringArrayExtra(Intent.EXTRA_EMAIL);
String[] extraBcc = intent.getStringArrayExtra(Intent.EXTRA_BCC);
String[] extraCc = intent.getStringArrayExtra(Intent.EXTRA_CC);
String addressList = new String();
// Cache array size, as per Google's recommendations.
int arraySize;
int i;
mSubjectView.setText(extraSubject);
mMessageContentView.setText(extraText);
if (extraEmail != null) {
arraySize = extraEmail.length;
if (arraySize > 1){
for (i=0; i < (arraySize-1); i++) {
addressList += extraEmail[i]+", ";
}
addressList += extraEmail[arraySize-1];
}
}
mToView.setText(addressList);
addressList = "";
if (extraBcc != null) {
arraySize = extraBcc.length;
if (arraySize > 1) {
for (i=0; i < (arraySize-1); i++) {
addressList += extraBcc[i]+", ";
}
addressList += extraBcc[arraySize-1];
}
}
mBccView.setText(addressList);
addressList = "";
if (extraCc != null){
arraySize = extraCc.length;
if (arraySize > 1){
for (i=0; i < (arraySize-1); i++) {
addressList += extraCc[i]+", ";
}
addressList += extraCc[arraySize-1];
}
}
mCcView.setText(addressList);
addressList = "";
} else {
mAccount = (Account) intent.getSerializableExtra(EXTRA_ACCOUNT);
mFolder = (String) intent.getStringExtra(EXTRA_FOLDER);
mSourceMessageUid = (String) intent.getStringExtra(EXTRA_MESSAGE);
}
if (mIdentity == null)
{
mIdentity = mAccount.getIdentity(0);
}
if (mAccount.isSignatureBeforeQuotedText())
{
mSignatureView = upperSignature;
lowerSignature.setVisibility(View.GONE);
}
else
{
mSignatureView = lowerSignature;
upperSignature.setVisibility(View.GONE);
}
mSignatureView.addTextChangedListener(sigwatcher);
updateFrom();
updateSignature();
Log.d(Email.LOG_TAG, "action = " + action + ", mAccount = " + mAccount + ", mFolder = " + mFolder + ", mSourceMessageUid = " + mSourceMessageUid);
if ((ACTION_REPLY.equals(action) || ACTION_REPLY_ALL.equals(action)) && mAccount != null && mFolder != null && mSourceMessageUid != null) {
Log.d(Email.LOG_TAG, "Setting message ANSWERED flag to true");
// TODO: Really, we should wait until we send the message, but that would require saving the original
// message info along with a Draft copy, in case it is left in Drafts for a while before being sent
MessagingController.getInstance(getApplication()).setMessageFlag(mAccount, mFolder, mSourceMessageUid, Flag.ANSWERED, true);
}
if (ACTION_REPLY.equals(action) || ACTION_REPLY_ALL.equals(action) || ACTION_FORWARD.equals(action) || ACTION_EDIT_DRAFT.equals(action)) {
/*
* If we need to load the message we add ourself as a message listener here
* so we can kick it off. Normally we add in onResume but we don't
* want to reload the message every time the activity is resumed.
* There is no harm in adding twice.
*/
MessagingController.getInstance(getApplication()).addListener(mListener);
MessagingController.getInstance(getApplication()).loadMessageForView( mAccount, mFolder, mSourceMessageUid, null);
}
if (ACTION_REPLY.equals(action) || ACTION_REPLY_ALL.equals(action) || ACTION_EDIT_DRAFT.equals(action)) {
//change focus to message body.
mMessageContentView.requestFocus();
}
if (!ACTION_EDIT_DRAFT.equals(action)) {
addAddress(mBccView, new Address(mAccount.getAlwaysBcc(), ""));
}
updateTitle();
}
public void onResume() {
super.onResume();
MessagingController.getInstance(getApplication()).addListener(mListener);
}
public void onPause() {
super.onPause();
saveIfNeeded();
MessagingController.getInstance(getApplication()).removeListener(mListener);
}
/**
* The framework handles most of the fields, but we need to handle stuff that we
* dynamically show and hide:
* Attachment list,
* Cc field,
* Bcc field,
* Quoted text,
*/
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
saveIfNeeded();
ArrayList<Uri> attachments = new ArrayList<Uri>();
for (int i = 0, count = mAttachments.getChildCount(); i < count; i++) {
View view = mAttachments.getChildAt(i);
Attachment attachment = (Attachment) view.getTag();
attachments.add(attachment.uri);
}
outState.putParcelableArrayList(STATE_KEY_ATTACHMENTS, attachments);
outState.putBoolean(STATE_KEY_CC_SHOWN, mCcView.getVisibility() == View.VISIBLE);
outState.putBoolean(STATE_KEY_BCC_SHOWN, mBccView.getVisibility() == View.VISIBLE);
outState.putBoolean(STATE_KEY_QUOTED_TEXT_SHOWN, mQuotedTextBar.getVisibility() == View.VISIBLE);
outState.putBoolean(STATE_KEY_SOURCE_MESSAGE_PROCED, mSourceMessageProcessed);
outState.putString(STATE_KEY_DRAFT_UID, mDraftUid);
outState.putSerializable(STATE_IDENTITY, mIdentity);
outState.putBoolean(STATE_IDENTITY_CHANGED, mIdentityChanged);
}
@Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
ArrayList<Parcelable> attachments = (ArrayList<Parcelable>) savedInstanceState.getParcelableArrayList(STATE_KEY_ATTACHMENTS);
mAttachments.removeAllViews();
for (Parcelable p : attachments) {
Uri uri = (Uri) p;
addAttachment(uri);
}
mCcView.setVisibility(savedInstanceState.getBoolean(STATE_KEY_CC_SHOWN) ? View.VISIBLE : View.GONE);
mBccView.setVisibility(savedInstanceState.getBoolean(STATE_KEY_BCC_SHOWN) ? View.VISIBLE : View.GONE);
mQuotedTextBar.setVisibility(savedInstanceState.getBoolean(STATE_KEY_QUOTED_TEXT_SHOWN) ? View.VISIBLE : View.GONE);
mQuotedText.setVisibility(savedInstanceState.getBoolean(STATE_KEY_QUOTED_TEXT_SHOWN) ? View.VISIBLE : View.GONE);
mDraftUid = savedInstanceState.getString(STATE_KEY_DRAFT_UID);
mIdentity = (Account.Identity)savedInstanceState.getSerializable(STATE_IDENTITY);
mIdentityChanged = savedInstanceState.getBoolean(STATE_IDENTITY_CHANGED);
updateFrom();
updateSignature();
mDraftNeedsSaving = false;
}
private void updateTitle() {
if (mSubjectView.getText().length() == 0) {
setTitle(R.string.compose_title);
} else {
setTitle(mSubjectView.getText().toString());
}
}
public void onFocusChange(View view, boolean focused) {
if (!focused) {
updateTitle();
}
}
private void addAddresses(MultiAutoCompleteTextView view, Address[] addresses) {
if (addresses == null) {
return;
}
for (Address address : addresses) {
addAddress(view, address);
}
}
private void addAddress(MultiAutoCompleteTextView view, Address address) {
view.append(address + ", ");
}
private Address[] getAddresses(MultiAutoCompleteTextView view) {
Address[] addresses = Address.parseUnencoded(view.getText().toString().trim());
return addresses;
}
private MimeMessage createMessage(boolean appendSig) throws MessagingException {
MimeMessage message = new MimeMessage();
message.setSentDate(new Date());
Address from = new Address(mIdentity.getEmail(), mIdentity.getName());
message.setFrom(from);
message.setRecipients(RecipientType.TO, getAddresses(mToView));
message.setRecipients(RecipientType.CC, getAddresses(mCcView));
message.setRecipients(RecipientType.BCC, getAddresses(mBccView));
message.setSubject(mSubjectView.getText().toString());
// XXX TODO - not sure why this won't add header
// message.setHeader("X-User-Agent", getString(R.string.message_header_mua));
/*
* Build the Body that will contain the text of the message. We'll decide where to
* include it later.
*/
String text = mMessageContentView.getText().toString();
if (appendSig && mAccount.isSignatureBeforeQuotedText()) {
text = appendSignature(text);
}
if (mQuotedTextBar.getVisibility() == View.VISIBLE) {
text += "\n" + mQuotedText.getText().toString();
}
if (appendSig && mAccount.isSignatureBeforeQuotedText() == false) {
text = appendSignature(text);
}
TextBody body = new TextBody(text);
if (mAttachments.getChildCount() > 0) {
/*
* The message has attachments that need to be included. First we add the part
* containing the text that will be sent and then we include each attachment.
*/
MimeMultipart mp;
mp = new MimeMultipart();
mp.addBodyPart(new MimeBodyPart(body, "text/plain"));
for (int i = 0, count = mAttachments.getChildCount(); i < count; i++) {
Attachment attachment = (Attachment) mAttachments.getChildAt(i).getTag();
MimeBodyPart bp = new MimeBodyPart( new LocalStore.LocalAttachmentBody(attachment.uri, getApplication()));
bp.setHeader(MimeHeader.HEADER_CONTENT_TYPE, String.format("%s;\n name=\"%s\"", attachment.contentType, attachment.name));
bp.setHeader(MimeHeader.HEADER_CONTENT_TRANSFER_ENCODING, "base64");
bp.setHeader(MimeHeader.HEADER_CONTENT_DISPOSITION, String.format("attachment;\n filename=\"%s\"", attachment.name));
mp.addBodyPart(bp);
}
message.setBody(mp);
}
else {
/*
* No attachments to include, just stick the text body in the message and call
* it good.
*/
message.setBody(body);
}
return message;
}
private String appendSignature (String text) {
String signature= mSignatureView.getText().toString();
if (signature != null && ! signature.contentEquals("")){
text += "\n" + signature;
}
return text;
}
private void sendOrSaveMessage(boolean save) {
/*
* Create the message from all the data the user has entered.
*/
MimeMessage message;
try {
message = createMessage(!save); // Only append sig on save
}
catch (MessagingException me) {
Log.e(Email.LOG_TAG, "Failed to create new message for send or save.", me);
throw new RuntimeException("Failed to create a new message for send or save.", me);
}
if (save) {
/*
* Save a draft
*/
if (mDraftUid != null) {
message.setUid(mDraftUid);
}
else if (ACTION_EDIT_DRAFT.equals(getIntent().getAction())) {
/*
* We're saving a previously saved draft, so update the new message's uid
* to the old message's uid.
*/
message.setUid(mSourceMessageUid);
}
String k9identity = Utility.base64Encode("" + mMessageContentView.getText().toString().length());
if (mIdentityChanged || mSignatureChanged)
{
String signature = mSignatureView.getText().toString();
k9identity += ":" + Utility.base64Encode(signature) ;
if (mIdentityChanged)
{
String name = mIdentity.getName();
String email = mIdentity.getEmail();
k9identity += ":" + Utility.base64Encode(name) + ":" + Utility.base64Encode(email);
}
}
Log.d(Email.LOG_TAG, "Saving identity: " + k9identity);
message.setHeader(Email.K9MAIL_IDENTITY, k9identity);
MessagingController.getInstance(getApplication()).saveDraft(mAccount, message);
mDraftUid = message.getUid();
// Don't display the toast if the user is just changing the orientation
if ((getChangingConfigurations() & ActivityInfo.CONFIG_ORIENTATION) == 0) {
mHandler.sendEmptyMessage(MSG_SAVED_DRAFT);
}
}
else {
/*
* Send the message
* TODO Is it possible for us to be editing a draft with a null source message? Don't
* think so. Could probably remove below check.
*/
if (ACTION_EDIT_DRAFT.equals(getIntent().getAction()) && mSourceMessageUid != null) {
/*
* We're sending a previously saved draft, so delete the old draft first.
*/
MessagingController.getInstance(getApplication()).deleteMessage( mAccount, mFolder, mSourceMessage, null);
}
MessagingController.getInstance(getApplication()).sendMessage(mAccount, message, null);
}
}
private void saveIfNeeded() {
if (!mDraftNeedsSaving) {
return;
}
mDraftNeedsSaving = false;
sendOrSaveMessage(true);
}
private void onSend() {
if (getAddresses(mToView).length == 0 && getAddresses(mCcView).length == 0 && getAddresses(mBccView).length == 0) {
mToView.setError(getString(R.string.message_compose_error_no_recipients));
Toast.makeText(this, getString(R.string.message_compose_error_no_recipients), Toast.LENGTH_LONG).show();
return;
}
sendOrSaveMessage(false);
mDraftNeedsSaving = false;
finish();
}
private void onDiscard() {
if (mSourceMessageUid != null) {
if (ACTION_EDIT_DRAFT.equals(getIntent().getAction()) && mSourceMessageUid != null) {
MessagingController.getInstance(getApplication()).deleteMessage( mAccount, mFolder, mSourceMessage, null);
}
}
mHandler.sendEmptyMessage(MSG_DISCARDED_DRAFT);
mDraftNeedsSaving = false;
finish();
}
private void onSave() {
saveIfNeeded();
finish();
}
private void onAddCcBcc() {
mCcView.setVisibility(View.VISIBLE);
mBccView.setVisibility(View.VISIBLE);
}
/**
* Kick off a picker for whatever kind of MIME types we'll accept and let Android take over.
*/
private void onAddAttachment() {
Intent i = new Intent(Intent.ACTION_GET_CONTENT);
i.addCategory(Intent.CATEGORY_OPENABLE);
i.setType(Email.ACCEPTABLE_ATTACHMENT_SEND_TYPES[0]);
startActivityForResult(Intent.createChooser(i, null), ACTIVITY_REQUEST_PICK_ATTACHMENT);
}
private void addAttachment(Uri uri) {
addAttachment(uri, -1, null);
}
private void addAttachment(Uri uri, int size, String name) {
ContentResolver contentResolver = getContentResolver();
String contentType = contentResolver.getType(uri);
if (contentType == null) {
contentType = "";
}
Attachment attachment = new Attachment();
attachment.name = name;
attachment.contentType = contentType;
attachment.size = size;
attachment.uri = uri;
if (attachment.size == -1 || attachment.name == null) {
Cursor metadataCursor = contentResolver.query( uri, new String[]{ OpenableColumns.DISPLAY_NAME, OpenableColumns.SIZE }, null, null, null);
if (metadataCursor != null) {
try {
if (metadataCursor.moveToFirst()) {
if (attachment.name == null) {
attachment.name = metadataCursor.getString(0);
}
if (attachment.size == -1) {
attachment.size = metadataCursor.getInt(1);
}
}
} finally {
metadataCursor.close();
}
}
}
if (attachment.name == null) {
attachment.name = uri.getLastPathSegment();
}
View view = getLayoutInflater().inflate( R.layout.message_compose_attachment, mAttachments, false);
TextView nameView = (TextView)view.findViewById(R.id.attachment_name);
ImageButton delete = (ImageButton)view.findViewById(R.id.attachment_delete);
nameView.setText(attachment.name);
delete.setOnClickListener(this);
delete.setTag(view);
view.setTag(attachment);
mAttachments.addView(view);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if(resultCode != RESULT_OK)
return;
if (data == null) {
return;
}
switch(requestCode) {
case ACTIVITY_REQUEST_PICK_ATTACHMENT:
addAttachment(data.getData());
mDraftNeedsSaving = true;
break;
case ACTIVITY_CHOOSE_IDENTITY:
onIdentityChosen(data);
break;
}
}
private void onIdentityChosen(Intent intent)
{
Bundle bundle = intent.getExtras();;
switchToIdentity( (Account.Identity)bundle.getSerializable(ChooseIdentity.EXTRA_IDENTITY));
}
private void switchToIdentity(Account.Identity identity)
{
mIdentity = identity;
mIdentityChanged = true;
mDraftNeedsSaving = true;
updateFrom();
updateSignature();
}
private void updateFrom()
{
if (mIdentityChanged)
{
mFromView.setVisibility(View.VISIBLE);
}
mFromView.setText(getString(R.string.message_view_from_format, mIdentity.getName(), mIdentity.getEmail()));
}
private void updateSignature()
{
mSignatureView.setText(mIdentity.getSignature());
}
public void onClick(View view) {
switch (view.getId()) {
case R.id.attachment_delete:
/*
* The view is the delete button, and we have previously set the tag of
* the delete button to the view that owns it. We don't use parent because the
* view is very complex and could change in the future.
*/
mAttachments.removeView((View) view.getTag());
mDraftNeedsSaving = true;
break;
case R.id.quoted_text_delete:
mQuotedTextBar.setVisibility(View.GONE);
mQuotedText.setVisibility(View.GONE);
mDraftNeedsSaving = true;
break;
}
}
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.send:
onSend();
break;
case R.id.save:
onSave();
break;
case R.id.discard:
onDiscard();
break;
case R.id.add_cc_bcc:
onAddCcBcc();
break;
case R.id.add_attachment:
onAddAttachment();
break;
case R.id.choose_identity:
onChooseIdentity();
break;
default:
return super.onOptionsItemSelected(item);
}
return true;
}
private void onChooseIdentity()
{
if (mAccount.getIdentities().size() > 1)
{
Intent intent = new Intent(this, ChooseIdentity.class);
intent.putExtra(ChooseIdentity.EXTRA_ACCOUNT, mAccount);
startActivityForResult(intent, ACTIVITY_CHOOSE_IDENTITY);
}
else
{
Toast.makeText(this, getString(R.string.no_identities),
Toast.LENGTH_LONG).show();
}
}
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
getMenuInflater().inflate(R.menu.message_compose_option, menu);
return true;
}
/**
* Returns true if all attachments were able to be attached, otherwise returns false.
*/
private boolean loadAttachments(Part part, int depth) throws MessagingException {
if (part.getBody() instanceof Multipart) {
Multipart mp = (Multipart) part.getBody();
boolean ret = true;
for (int i = 0, count = mp.getCount(); i < count; i++) {
if (!loadAttachments(mp.getBodyPart(i), depth + 1)) {
ret = false;
}
}
return ret;
} else {
String contentType = MimeUtility.unfoldAndDecode(part.getContentType());
String name = MimeUtility.getHeaderParameter(contentType, "name");
if (name != null) {
Body body = part.getBody();
if (body != null && body instanceof LocalAttachmentBody) {
final Uri uri = ((LocalAttachmentBody) body).getContentUri();
mHandler.post(new Runnable() {
public void run() {
addAttachment(uri);
}
});
}
else {
return false;
}
}
return true;
}
}
/**
* Pull out the parts of the now loaded source message and apply them to the new message
* depending on the type of message being composed.
* @param message
*/
private void processSourceMessage(Message message) {
String action = getIntent().getAction();
if (ACTION_REPLY.equals(action) || ACTION_REPLY_ALL.equals(action)) {
try {
if (message.getSubject() != null && !message.getSubject().toLowerCase().startsWith("re:")) {
mSubjectView.setText("Re: " + message.getSubject());
}
else {
mSubjectView.setText(message.getSubject());
}
/*
* If a reply-to was included with the message use that, otherwise use the from
* or sender address.
*/
Address[] replyToAddresses;
if (message.getReplyTo().length > 0) {
addAddresses(mToView, replyToAddresses = message.getReplyTo());
}
else {
addAddresses(mToView, replyToAddresses = message.getFrom());
}
Part part = MimeUtility.findFirstPartByMimeType(mSourceMessage,
"text/plain");
if (part != null) {
String quotedText = String.format(
getString(R.string.message_compose_reply_header_fmt),
Address.toString(mSourceMessage.getFrom()));
quotedText += MimeUtility.getTextFromPart(part).replaceAll("(?m)^", ">");
mQuotedText.setText(quotedText);
mQuotedTextBar.setVisibility(View.VISIBLE);
mQuotedText.setVisibility(View.VISIBLE);
}
if (ACTION_REPLY_ALL.equals(action) || ACTION_REPLY.equals(action))
{
Account.Identity useIdentity = null;
for (Address address : message.getRecipients(RecipientType.TO)) {
Account.Identity identity = mAccount.findIdentity(address);
if (identity != null) {
useIdentity = identity;
break;
}
}
if (useIdentity == null)
{
if (message.getRecipients(RecipientType.CC).length > 0) {
for (Address address : message.getRecipients(RecipientType.CC)) {
Account.Identity identity = mAccount.findIdentity(address);
if (identity != null) {
useIdentity = identity;
break;
}
}
}
}
if (useIdentity != null)
{
Account.Identity defaultIdentity = mAccount.getIdentity(0);
if (useIdentity != defaultIdentity)
{
switchToIdentity(useIdentity);
}
}
}
if (ACTION_REPLY_ALL.equals(action)) {
for (Address address : message.getRecipients(RecipientType.TO)) {
Account.Identity identity = mAccount.findIdentity(address);
if (!mAccount.isAnIdentity(address)) {
addAddress(mToView, address);
}
}
if (message.getRecipients(RecipientType.CC).length > 0) {
for (Address address : message.getRecipients(RecipientType.CC)) {
if (!mAccount.isAnIdentity(address) && !Utility.arrayContains(replyToAddresses, address)) {
addAddress(mCcView, address);
}
}
mCcView.setVisibility(View.VISIBLE);
}
}
}
catch (MessagingException me) {
/*
* This really should not happen at this point but if it does it's okay.
* The user can continue composing their message.
*/
}
}
else if (ACTION_FORWARD.equals(action)) {
try {
if (message.getSubject() != null && !message.getSubject().toLowerCase().startsWith("fwd:")) {
mSubjectView.setText("Fwd: " + message.getSubject());
}
else {
mSubjectView.setText(message.getSubject());
}
Part part = MimeUtility.findFirstPartByMimeType(message, "text/plain");
if (part == null) {
part = MimeUtility.findFirstPartByMimeType(message, "text/html");
}
if (part != null) {
String quotedText = MimeUtility.getTextFromPart(part);
if (quotedText != null) {
String text = String.format(
getString(R.string.message_compose_fwd_header_fmt),
mSourceMessage.getSubject(),
Address.toString(mSourceMessage.getFrom()),
Address.toString(
mSourceMessage.getRecipients(RecipientType.TO)),
Address.toString(
mSourceMessage.getRecipients(RecipientType.CC)));
text += quotedText;
mQuotedText.setText(text);
mQuotedTextBar.setVisibility(View.VISIBLE);
mQuotedText.setVisibility(View.VISIBLE);
}
}
if (!mSourceMessageProcessed) {
if (!loadAttachments(message, 0)) {
mHandler.sendEmptyMessage(MSG_SKIPPED_ATTACHMENTS);
}
}
}
catch (MessagingException me) {
/*
* This really should not happen at this point but if it does it's okay.
* The user can continue composing their message.
*/
}
}
else if (ACTION_EDIT_DRAFT.equals(action)) {
try {
mSubjectView.setText(message.getSubject());
addAddresses(mToView, message.getRecipients(RecipientType.TO));
if (message.getRecipients(RecipientType.CC).length > 0) {
addAddresses(mCcView, message.getRecipients(RecipientType.CC));
mCcView.setVisibility(View.VISIBLE);
}
if (message.getRecipients(RecipientType.BCC).length > 0) {
addAddresses(mBccView, message.getRecipients(RecipientType.BCC));
mBccView.setVisibility(View.VISIBLE);
}
if (!mSourceMessageProcessed) {
loadAttachments(message, 0);
}
Integer bodyLength = null;
String[] k9identities = message.getHeader(Email.K9MAIL_IDENTITY);
if (k9identities != null && k9identities.length > 0)
{
String k9identity = k9identities[0];
if (k9identity != null)
{
Log.d(Email.LOG_TAG, "Got a saved identity: " + k9identity);
StringTokenizer tokens = new StringTokenizer(k9identity, ":", false);
String bodyLengthS = null;
String name = null;
String email = null;
String signature = null;
if (tokens.hasMoreTokens())
{
bodyLengthS = Utility.base64Decode(tokens.nextToken());
try
{
bodyLength = Integer.parseInt(bodyLengthS);
}
catch (Exception e)
{
Log.e(Email.LOG_TAG, "Unable to parse bodyLength '" + bodyLengthS + "'");
}
}
if (tokens.hasMoreTokens())
{
signature = Utility.base64Decode(tokens.nextToken());
}
if (tokens.hasMoreTokens())
{
name = Utility.base64Decode(tokens.nextToken());
}
if (tokens.hasMoreTokens())
{
email = Utility.base64Decode(tokens.nextToken());
}
Account.Identity newIdentity= mAccount.new Identity();
if (signature != null)
{
newIdentity.setSignature(signature);
mSignatureChanged = true;
}
else
{
newIdentity.setSignature(mIdentity.getSignature());
}
if (name != null)
{
newIdentity.setName(name);
mIdentityChanged = true;
}
else
{
newIdentity.setName(mIdentity.getName());
}
if (email != null)
{
newIdentity.setEmail(email);
mIdentityChanged = true;
}
else
{
newIdentity.setEmail(mIdentity.getEmail());
}
mIdentity = newIdentity;
updateSignature();
updateFrom();
}
}
Part part = MimeUtility.findFirstPartByMimeType(message, "text/plain");
if (part != null) {
String text = MimeUtility.getTextFromPart(part);
if (bodyLength != null && bodyLength + 1 < text.length()) // + 1 to get rid of the newline we added when saving the draft
{
String bodyText = text.substring(0, bodyLength);
String quotedText = text.substring(bodyLength + 1, text.length());
mMessageContentView.setText(bodyText);
mQuotedText.setText(quotedText);
mQuotedTextBar.setVisibility(View.VISIBLE);
mQuotedText.setVisibility(View.VISIBLE);
}
else
{
mMessageContentView.setText(text);
}
}
}
catch (MessagingException me) {
// TODO
}
}
mSourceMessageProcessed = true;
mDraftNeedsSaving = false;
}
class Listener extends MessagingListener {
@Override
public void loadMessageForViewStarted(Account account, String folder, String uid) {
mHandler.sendEmptyMessage(MSG_PROGRESS_ON);
}
@Override
public void loadMessageForViewFinished(Account account, String folder, String uid, Message message) {
mHandler.sendEmptyMessage(MSG_PROGRESS_OFF);
}
@Override
public void loadMessageForViewBodyAvailable(Account account, String folder, String uid, final Message message) {
mSourceMessage = message;
runOnUiThread(new Runnable() {
public void run() {
processSourceMessage(message);
}
});
}
@Override
public void loadMessageForViewFailed(Account account, String folder, String uid, final String message) {
mHandler.sendEmptyMessage(MSG_PROGRESS_OFF);
// TODO show network error
}
@Override
public void messageUidChanged( Account account, String folder, String oldUid, String newUid) {
if (account.equals(mAccount) && (folder.equals(mFolder) || (mFolder == null && folder.equals(mAccount.getDraftsFolderName())))) {
if (oldUid.equals(mDraftUid)) {
mDraftUid = newUid;
}
if (oldUid.equals(mSourceMessageUid)) {
mSourceMessageUid = newUid;
}
if (mSourceMessage != null && (oldUid.equals(mSourceMessage.getUid()))) {
mSourceMessage.setUid(newUid);
}
}
}
}
}
|
package com.android.email.activity;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.StringTokenizer;
import com.android.email.K9Activity;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Parcelable;
import android.provider.OpenableColumns;
import android.text.TextWatcher;
import android.text.util.Rfc822Tokenizer;
import android.util.Config;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.view.View.OnClickListener;
import android.view.View.OnFocusChangeListener;
import android.webkit.WebView;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.MultiAutoCompleteTextView;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.AutoCompleteTextView.Validator;
import com.android.email.Account;
import com.android.email.Email;
import com.android.email.EmailAddressAdapter;
import com.android.email.EmailAddressValidator;
import com.android.email.MessagingController;
import com.android.email.MessagingListener;
import com.android.email.Preferences;
import com.android.email.R;
import com.android.email.Utility;
import com.android.email.mail.Address;
import com.android.email.mail.Body;
import com.android.email.mail.Flag;
import com.android.email.mail.Message;
import com.android.email.mail.MessagingException;
import com.android.email.mail.Multipart;
import com.android.email.mail.Part;
import com.android.email.mail.Message.RecipientType;
import com.android.email.mail.internet.MimeBodyPart;
import com.android.email.mail.internet.MimeHeader;
import com.android.email.mail.internet.MimeMessage;
import com.android.email.mail.internet.MimeMultipart;
import com.android.email.mail.internet.MimeUtility;
import com.android.email.mail.internet.TextBody;
import com.android.email.mail.store.LocalStore;
import com.android.email.mail.store.LocalStore.LocalAttachmentBody;
public class MessageCompose extends K9Activity implements OnClickListener, OnFocusChangeListener {
private static final String ACTION_REPLY = "com.android.email.intent.action.REPLY";
private static final String ACTION_REPLY_ALL = "com.android.email.intent.action.REPLY_ALL";
private static final String ACTION_FORWARD = "com.android.email.intent.action.FORWARD";
private static final String ACTION_EDIT_DRAFT = "com.android.email.intent.action.EDIT_DRAFT";
private static final String EXTRA_ACCOUNT = "account";
private static final String EXTRA_FOLDER = "folder";
private static final String EXTRA_MESSAGE = "message";
private static final String STATE_KEY_ATTACHMENTS =
"com.android.email.activity.MessageCompose.attachments";
private static final String STATE_KEY_CC_SHOWN =
"com.android.email.activity.MessageCompose.ccShown";
private static final String STATE_KEY_BCC_SHOWN =
"com.android.email.activity.MessageCompose.bccShown";
private static final String STATE_KEY_QUOTED_TEXT_SHOWN =
"com.android.email.activity.MessageCompose.quotedTextShown";
private static final String STATE_KEY_SOURCE_MESSAGE_PROCED =
"com.android.email.activity.MessageCompose.stateKeySourceMessageProced";
private static final String STATE_KEY_DRAFT_UID =
"com.android.email.activity.MessageCompose.draftUid";
private static final String STATE_IDENTITY_CHANGED =
"com.android.email.activity.MessageCompose.identityChanged";
private static final String STATE_IDENTITY =
"com.android.email.activity.MessageCompose.identity";
private static final int MSG_PROGRESS_ON = 1;
private static final int MSG_PROGRESS_OFF = 2;
private static final int MSG_UPDATE_TITLE = 3;
private static final int MSG_SKIPPED_ATTACHMENTS = 4;
private static final int MSG_SAVED_DRAFT = 5;
private static final int MSG_DISCARDED_DRAFT = 6;
private static final int ACTIVITY_REQUEST_PICK_ATTACHMENT = 1;
private static final int ACTIVITY_CHOOSE_IDENTITY = 2;
private Account mAccount;
private Account.Identity mIdentity;
private boolean mIdentityChanged = false;
private boolean mSignatureChanged = false;
private String mFolder;
private String mSourceMessageUid;
private Message mSourceMessage;
/**
* Indicates that the source message has been processed at least once and should not
* be processed on any subsequent loads. This protects us from adding attachments that
* have already been added from the restore of the view state.
*/
private boolean mSourceMessageProcessed = false;
private TextView mFromView;
private MultiAutoCompleteTextView mToView;
private MultiAutoCompleteTextView mCcView;
private MultiAutoCompleteTextView mBccView;
private EditText mSubjectView;
private EditText mSignatureView;
private EditText mMessageContentView;
private LinearLayout mAttachments;
private View mQuotedTextBar;
private ImageButton mQuotedTextDelete;
private EditText mQuotedText;
private boolean mDraftNeedsSaving = false;
/**
* The draft uid of this message. This is used when saving drafts so that the same draft is
* overwritten instead of being created anew. This property is null until the first save.
*/
private String mDraftUid;
private Handler mHandler = new Handler() {
@Override
public void handleMessage(android.os.Message msg) {
switch (msg.what) {
case MSG_PROGRESS_ON:
setProgressBarIndeterminateVisibility(true);
break;
case MSG_PROGRESS_OFF:
setProgressBarIndeterminateVisibility(false);
break;
case MSG_UPDATE_TITLE:
updateTitle();
break;
case MSG_SKIPPED_ATTACHMENTS:
Toast.makeText(
MessageCompose.this,
getString(R.string.message_compose_attachments_skipped_toast),
Toast.LENGTH_LONG).show();
break;
case MSG_SAVED_DRAFT:
Toast.makeText(
MessageCompose.this,
getString(R.string.message_saved_toast),
Toast.LENGTH_LONG).show();
break;
case MSG_DISCARDED_DRAFT:
Toast.makeText(
MessageCompose.this,
getString(R.string.message_discarded_toast),
Toast.LENGTH_LONG).show();
break;
default:
super.handleMessage(msg);
break;
}
}
};
private Listener mListener = new Listener();
private EmailAddressAdapter mAddressAdapter;
private Validator mAddressValidator;
class Attachment implements Serializable {
public String name;
public String contentType;
public long size;
public Uri uri;
}
/**
* Compose a new message using the given account. If account is null the default account
* will be used.
* @param context
* @param account
*/
public static void actionCompose(Context context, Account account) {
Intent i = new Intent(context, MessageCompose.class);
i.putExtra(EXTRA_ACCOUNT, account);
context.startActivity(i);
}
/**
* Compose a new message as a reply to the given message. If replyAll is true the function
* is reply all instead of simply reply.
* @param context
* @param account
* @param message
* @param replyAll
*/
public static void actionReply(
Context context,
Account account,
Message message,
boolean replyAll) {
Intent i = new Intent(context, MessageCompose.class);
i.putExtra(EXTRA_ACCOUNT, account);
i.putExtra(EXTRA_FOLDER, message.getFolder().getName());
i.putExtra(EXTRA_MESSAGE, message.getUid());
if (replyAll) {
i.setAction(ACTION_REPLY_ALL);
}
else {
i.setAction(ACTION_REPLY);
}
context.startActivity(i);
}
/**
* Compose a new message as a forward of the given message.
* @param context
* @param account
* @param message
*/
public static void actionForward(Context context, Account account, Message message) {
Intent i = new Intent(context, MessageCompose.class);
i.putExtra(EXTRA_ACCOUNT, account);
i.putExtra(EXTRA_FOLDER, message.getFolder().getName());
i.putExtra(EXTRA_MESSAGE, message.getUid());
i.setAction(ACTION_FORWARD);
context.startActivity(i);
}
/**
* Continue composition of the given message. This action modifies the way this Activity
* handles certain actions.
* Save will attempt to replace the message in the given folder with the updated version.
* Discard will delete the message from the given folder.
* @param context
* @param account
* @param folder
* @param message
*/
public static void actionEditDraft(Context context, Account account, Message message) {
Intent i = new Intent(context, MessageCompose.class);
i.putExtra(EXTRA_ACCOUNT, account);
i.putExtra(EXTRA_FOLDER, message.getFolder().getName());
i.putExtra(EXTRA_MESSAGE, message.getUid());
i.setAction(ACTION_EDIT_DRAFT);
context.startActivity(i);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS);
setContentView(R.layout.message_compose);
mAddressAdapter = new EmailAddressAdapter(this);
mAddressValidator = new EmailAddressValidator();
mFromView = (TextView)findViewById(R.id.from);
mToView = (MultiAutoCompleteTextView)findViewById(R.id.to);
mCcView = (MultiAutoCompleteTextView)findViewById(R.id.cc);
mBccView = (MultiAutoCompleteTextView)findViewById(R.id.bcc);
mSubjectView = (EditText)findViewById(R.id.subject);
EditText upperSignature = (EditText)findViewById(R.id.upper_signature);
EditText lowerSignature = (EditText)findViewById(R.id.lower_signature);
mMessageContentView = (EditText)findViewById(R.id.message_content);
mAttachments = (LinearLayout)findViewById(R.id.attachments);
mQuotedTextBar = findViewById(R.id.quoted_text_bar);
mQuotedTextDelete = (ImageButton)findViewById(R.id.quoted_text_delete);
mQuotedText = (EditText)findViewById(R.id.quoted_text);
TextWatcher watcher = new TextWatcher() {
public void beforeTextChanged(CharSequence s, int start,
int before, int after) { }
public void onTextChanged(CharSequence s, int start,
int before, int count) {
mDraftNeedsSaving = true;
}
public void afterTextChanged(android.text.Editable s) { }
};
TextWatcher sigwatcher = new TextWatcher() {
public void beforeTextChanged(CharSequence s, int start,
int before, int after) { }
public void onTextChanged(CharSequence s, int start,
int before, int count) {
mDraftNeedsSaving = true;
mSignatureChanged = true;
}
public void afterTextChanged(android.text.Editable s) { }
};
mToView.addTextChangedListener(watcher);
mCcView.addTextChangedListener(watcher);
mBccView.addTextChangedListener(watcher);
mSubjectView.addTextChangedListener(watcher);
mMessageContentView.addTextChangedListener(watcher);
/*
* We set this to invisible by default. Other methods will turn it back on if it's
* needed.
*/
mQuotedTextBar.setVisibility(View.GONE);
mQuotedText.setVisibility(View.GONE);
mQuotedTextDelete.setOnClickListener(this);
mFromView.setVisibility(View.GONE);
mToView.setAdapter(mAddressAdapter);
mToView.setTokenizer(new Rfc822Tokenizer());
mToView.setValidator(mAddressValidator);
mCcView.setAdapter(mAddressAdapter);
mCcView.setTokenizer(new Rfc822Tokenizer());
mCcView.setValidator(mAddressValidator);
mBccView.setAdapter(mAddressAdapter);
mBccView.setTokenizer(new Rfc822Tokenizer());
mBccView.setValidator(mAddressValidator);
mSubjectView.setOnFocusChangeListener(this);
if (savedInstanceState != null) {
/*
* This data gets used in onCreate, so grab it here instead of onRestoreIntstanceState
*/
mSourceMessageProcessed = savedInstanceState.getBoolean(STATE_KEY_SOURCE_MESSAGE_PROCED, false);
}
Intent intent = getIntent();
String action = intent.getAction();
if (Intent.ACTION_VIEW.equals(action) || Intent.ACTION_SENDTO.equals(action)) {
/*
* Someone has clicked a mailto: link. The address is in the URI.
*/
mAccount = Preferences.getPreferences(this).getDefaultAccount();
if (mAccount == null) {
/*
* There are no accounts set up. This should not have happened. Prompt the
* user to set up an account as an acceptable bailout.
*/
startActivity(new Intent(this, Accounts.class));
mDraftNeedsSaving = false;
finish();
return;
}
if (intent.getData() != null) {
Uri uri = intent.getData();
try {
if (uri.getScheme().equalsIgnoreCase("mailto")) {
Address[] addresses = Address.parse(uri.getSchemeSpecificPart());
addAddresses(mToView, addresses);
}
}
catch (Exception e) {
/*
* If we can't extract any information from the URI it's okay. They can
* still compose a message.
*/
}
}
}
else if (Intent.ACTION_SEND.equals(action)) {
/*
* Someone is trying to compose an email with an attachment, probably Pictures.
* The Intent should contain an EXTRA_STREAM with the data to attach.
*/
mAccount = Preferences.getPreferences(this).getDefaultAccount();
if (mAccount == null) {
/*
* There are no accounts set up. This should not have happened. Prompt the
* user to set up an account as an acceptable bailout.
*/
startActivity(new Intent(this, Accounts.class));
mDraftNeedsSaving = false;
finish();
return;
}
String text = intent.getStringExtra(Intent.EXTRA_TEXT);
if (text != null) {
mMessageContentView.setText(text);
}
String subject = intent.getStringExtra(Intent.EXTRA_SUBJECT);
if (subject != null) {
mSubjectView.setText(subject);
}
String type = intent.getType();
Uri stream = (Uri) intent.getParcelableExtra(Intent.EXTRA_STREAM);
if (stream != null && type != null) {
if (MimeUtility.mimeTypeMatches(type, Email.ACCEPTABLE_ATTACHMENT_SEND_TYPES)) {
addAttachment(stream);
}
}
/*
* There might be an EXTRA_SUBJECT, EXTRA_TEXT, EXTRA_EMAIL, EXTRA_BCC or EXTRA_CC
*/
String extraSubject = intent.getStringExtra(Intent.EXTRA_SUBJECT);
String extraText = intent.getStringExtra(Intent.EXTRA_TEXT);
String[] extraEmail = intent.getStringArrayExtra(Intent.EXTRA_EMAIL);
String[] extraBcc = intent.getStringArrayExtra(Intent.EXTRA_BCC);
String[] extraCc = intent.getStringArrayExtra(Intent.EXTRA_CC);
String addressList = new String();
// Cache array size, as per Google's recommendations.
int arraySize;
int i;
mSubjectView.setText(extraSubject);
mMessageContentView.setText(extraText);
if (extraEmail != null) {
arraySize = extraEmail.length;
if (arraySize > 1){
for (i=0; i < (arraySize-1); i++) {
addressList += extraEmail[i]+", ";
}
addressList += extraEmail[arraySize-1];
}
}
mToView.setText(addressList);
addressList = "";
if (extraBcc != null) {
arraySize = extraBcc.length;
if (arraySize > 1) {
for (i=0; i < (arraySize-1); i++) {
addressList += extraBcc[i]+", ";
}
addressList += extraBcc[arraySize-1];
}
}
mBccView.setText(addressList);
addressList = "";
if (extraCc != null){
arraySize = extraCc.length;
if (arraySize > 1){
for (i=0; i < (arraySize-1); i++) {
addressList += extraCc[i]+", ";
}
addressList += extraCc[arraySize-1];
}
}
mCcView.setText(addressList);
addressList = "";
} else {
mAccount = (Account) intent.getSerializableExtra(EXTRA_ACCOUNT);
mFolder = (String) intent.getStringExtra(EXTRA_FOLDER);
mSourceMessageUid = (String) intent.getStringExtra(EXTRA_MESSAGE);
}
if (mIdentity == null)
{
mIdentity = mAccount.getIdentity(0);
}
if (mAccount.isSignatureBeforeQuotedText())
{
mSignatureView = upperSignature;
lowerSignature.setVisibility(View.GONE);
}
else
{
mSignatureView = lowerSignature;
upperSignature.setVisibility(View.GONE);
}
mSignatureView.addTextChangedListener(sigwatcher);
if (!mSourceMessageProcessed) {
updateFrom();
updateSignature();
if (ACTION_REPLY.equals(action) || ACTION_REPLY_ALL.equals(action) || ACTION_FORWARD.equals(action) || ACTION_EDIT_DRAFT.equals(action)) {
/*
* If we need to load the message we add ourself as a message listener here
* so we can kick it off. Normally we add in onResume but we don't
* want to reload the message every time the activity is resumed.
* There is no harm in adding twice.
*/
MessagingController.getInstance(getApplication()).addListener(mListener);
MessagingController.getInstance(getApplication()).loadMessageForView( mAccount, mFolder, mSourceMessageUid, null);
}
if (!ACTION_EDIT_DRAFT.equals(action)) {
String bccAddress = mAccount.getAlwaysBcc();
if (bccAddress!=null
&& !"".equals(bccAddress)) {
addAddress(mBccView, new Address(mAccount.getAlwaysBcc(), ""));
}
}
Log.d(Email.LOG_TAG, "action = " + action + ", mAccount = " + mAccount + ", mFolder = " + mFolder + ", mSourceMessageUid = " + mSourceMessageUid);
if ((ACTION_REPLY.equals(action) || ACTION_REPLY_ALL.equals(action)) && mAccount != null && mFolder != null && mSourceMessageUid != null) {
Log.d(Email.LOG_TAG, "Setting message ANSWERED flag to true");
// TODO: Really, we should wait until we send the message, but that would require saving the original
// message info along with a Draft copy, in case it is left in Drafts for a while before being sent
MessagingController.getInstance(getApplication()).setMessageFlag(mAccount, mFolder, mSourceMessageUid, Flag.ANSWERED, true);
}
updateTitle();
}
if (ACTION_REPLY.equals(action) || ACTION_REPLY_ALL.equals(action) || ACTION_EDIT_DRAFT.equals(action)) {
//change focus to message body.
mMessageContentView.requestFocus();
}
}
public void onResume() {
super.onResume();
MessagingController.getInstance(getApplication()).addListener(mListener);
}
public void onPause() {
super.onPause();
saveIfNeeded();
MessagingController.getInstance(getApplication()).removeListener(mListener);
}
/**
* The framework handles most of the fields, but we need to handle stuff that we
* dynamically show and hide:
* Attachment list,
* Cc field,
* Bcc field,
* Quoted text,
*/
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
saveIfNeeded();
ArrayList<Uri> attachments = new ArrayList<Uri>();
for (int i = 0, count = mAttachments.getChildCount(); i < count; i++) {
View view = mAttachments.getChildAt(i);
Attachment attachment = (Attachment) view.getTag();
attachments.add(attachment.uri);
}
outState.putParcelableArrayList(STATE_KEY_ATTACHMENTS, attachments);
outState.putBoolean(STATE_KEY_CC_SHOWN, mCcView.getVisibility() == View.VISIBLE);
outState.putBoolean(STATE_KEY_BCC_SHOWN, mBccView.getVisibility() == View.VISIBLE);
outState.putBoolean(STATE_KEY_QUOTED_TEXT_SHOWN, mQuotedTextBar.getVisibility() == View.VISIBLE);
outState.putBoolean(STATE_KEY_SOURCE_MESSAGE_PROCED, mSourceMessageProcessed);
outState.putString(STATE_KEY_DRAFT_UID, mDraftUid);
outState.putSerializable(STATE_IDENTITY, mIdentity);
outState.putBoolean(STATE_IDENTITY_CHANGED, mIdentityChanged);
}
@Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
ArrayList<Parcelable> attachments = (ArrayList<Parcelable>) savedInstanceState.getParcelableArrayList(STATE_KEY_ATTACHMENTS);
mAttachments.removeAllViews();
for (Parcelable p : attachments) {
Uri uri = (Uri) p;
addAttachment(uri);
}
mCcView.setVisibility(savedInstanceState.getBoolean(STATE_KEY_CC_SHOWN) ? View.VISIBLE : View.GONE);
mBccView.setVisibility(savedInstanceState.getBoolean(STATE_KEY_BCC_SHOWN) ? View.VISIBLE : View.GONE);
mQuotedTextBar.setVisibility(savedInstanceState.getBoolean(STATE_KEY_QUOTED_TEXT_SHOWN) ? View.VISIBLE : View.GONE);
mQuotedText.setVisibility(savedInstanceState.getBoolean(STATE_KEY_QUOTED_TEXT_SHOWN) ? View.VISIBLE : View.GONE);
mDraftUid = savedInstanceState.getString(STATE_KEY_DRAFT_UID);
mIdentity = (Account.Identity)savedInstanceState.getSerializable(STATE_IDENTITY);
mIdentityChanged = savedInstanceState.getBoolean(STATE_IDENTITY_CHANGED);
updateFrom();
updateSignature();
mDraftNeedsSaving = false;
}
private void updateTitle() {
if (mSubjectView.getText().length() == 0) {
setTitle(R.string.compose_title);
} else {
setTitle(mSubjectView.getText().toString());
}
}
public void onFocusChange(View view, boolean focused) {
if (!focused) {
updateTitle();
}
}
private void addAddresses(MultiAutoCompleteTextView view, Address[] addresses) {
if (addresses == null) {
return;
}
for (Address address : addresses) {
addAddress(view, address);
}
}
private void addAddress(MultiAutoCompleteTextView view, Address address) {
view.append(address + ", ");
}
private Address[] getAddresses(MultiAutoCompleteTextView view) {
Address[] addresses = Address.parseUnencoded(view.getText().toString().trim());
return addresses;
}
private MimeMessage createMessage(boolean appendSig) throws MessagingException {
MimeMessage message = new MimeMessage();
message.addSentDate(new Date());
Address from = new Address(mIdentity.getEmail(), mIdentity.getName());
message.setFrom(from);
message.setRecipients(RecipientType.TO, getAddresses(mToView));
message.setRecipients(RecipientType.CC, getAddresses(mCcView));
message.setRecipients(RecipientType.BCC, getAddresses(mBccView));
message.setSubject(mSubjectView.getText().toString());
// XXX TODO - not sure why this won't add header
// message.setHeader("X-User-Agent", getString(R.string.message_header_mua));
/*
* Build the Body that will contain the text of the message. We'll decide where to
* include it later.
*/
String text = mMessageContentView.getText().toString();
if (appendSig && mAccount.isSignatureBeforeQuotedText()) {
text = appendSignature(text);
}
if (mQuotedTextBar.getVisibility() == View.VISIBLE) {
text += "\n" + mQuotedText.getText().toString();
}
if (appendSig && mAccount.isSignatureBeforeQuotedText() == false) {
text = appendSignature(text);
}
TextBody body = new TextBody(text);
if (mAttachments.getChildCount() > 0) {
/*
* The message has attachments that need to be included. First we add the part
* containing the text that will be sent and then we include each attachment.
*/
MimeMultipart mp;
mp = new MimeMultipart();
mp.addBodyPart(new MimeBodyPart(body, "text/plain"));
for (int i = 0, count = mAttachments.getChildCount(); i < count; i++) {
Attachment attachment = (Attachment) mAttachments.getChildAt(i).getTag();
MimeBodyPart bp = new MimeBodyPart( new LocalStore.LocalAttachmentBody(attachment.uri, getApplication()));
bp.addHeader(MimeHeader.HEADER_CONTENT_TYPE, String.format("%s;\n name=\"%s\"", attachment.contentType, attachment.name));
bp.addHeader(MimeHeader.HEADER_CONTENT_TRANSFER_ENCODING, "base64");
bp.addHeader(MimeHeader.HEADER_CONTENT_DISPOSITION, String.format("attachment;\n filename=\"%s\"", attachment.name));
mp.addBodyPart(bp);
}
message.setBody(mp);
}
else {
/*
* No attachments to include, just stick the text body in the message and call
* it good.
*/
message.setBody(body);
}
return message;
}
private String appendSignature (String text) {
String signature= mSignatureView.getText().toString();
if (signature != null && ! signature.contentEquals("")){
text += "\n" + signature;
}
return text;
}
private void sendOrSaveMessage(boolean save) {
/*
* Create the message from all the data the user has entered.
*/
MimeMessage message;
try {
message = createMessage(!save); // Only append sig on save
}
catch (MessagingException me) {
Log.e(Email.LOG_TAG, "Failed to create new message for send or save.", me);
throw new RuntimeException("Failed to create a new message for send or save.", me);
}
if (save) {
/*
* Save a draft
*/
if (mDraftUid != null) {
message.setUid(mDraftUid);
}
else if (ACTION_EDIT_DRAFT.equals(getIntent().getAction())) {
/*
* We're saving a previously saved draft, so update the new message's uid
* to the old message's uid.
*/
message.setUid(mSourceMessageUid);
}
String k9identity = Utility.base64Encode("" + mMessageContentView.getText().toString().length());
if (mIdentityChanged || mSignatureChanged)
{
String signature = mSignatureView.getText().toString();
k9identity += ":" + Utility.base64Encode(signature) ;
if (mIdentityChanged)
{
String name = mIdentity.getName();
String email = mIdentity.getEmail();
k9identity += ":" + Utility.base64Encode(name) + ":" + Utility.base64Encode(email);
}
}
Log.d(Email.LOG_TAG, "Saving identity: " + k9identity);
message.addHeader(Email.K9MAIL_IDENTITY, k9identity);
MessagingController.getInstance(getApplication()).saveDraft(mAccount, message);
mDraftUid = message.getUid();
// Don't display the toast if the user is just changing the orientation
if ((getChangingConfigurations() & ActivityInfo.CONFIG_ORIENTATION) == 0) {
mHandler.sendEmptyMessage(MSG_SAVED_DRAFT);
}
}
else {
/*
* Send the message
* TODO Is it possible for us to be editing a draft with a null source message? Don't
* think so. Could probably remove below check.
*/
if (ACTION_EDIT_DRAFT.equals(getIntent().getAction()) && mSourceMessageUid != null) {
/*
* We're sending a previously saved draft, so delete the old draft first.
*/
MessagingController.getInstance(getApplication()).deleteMessage( mAccount, mFolder, mSourceMessage, null);
}
MessagingController.getInstance(getApplication()).sendMessage(mAccount, message, null);
}
}
private void saveIfNeeded() {
if (!mDraftNeedsSaving) {
return;
}
mDraftNeedsSaving = false;
sendOrSaveMessage(true);
}
private void onSend() {
if (getAddresses(mToView).length == 0 && getAddresses(mCcView).length == 0 && getAddresses(mBccView).length == 0) {
mToView.setError(getString(R.string.message_compose_error_no_recipients));
Toast.makeText(this, getString(R.string.message_compose_error_no_recipients), Toast.LENGTH_LONG).show();
return;
}
sendOrSaveMessage(false);
mDraftNeedsSaving = false;
finish();
}
private void onDiscard() {
if (mSourceMessageUid != null) {
if (ACTION_EDIT_DRAFT.equals(getIntent().getAction()) && mSourceMessageUid != null) {
MessagingController.getInstance(getApplication()).deleteMessage( mAccount, mFolder, mSourceMessage, null);
}
}
mHandler.sendEmptyMessage(MSG_DISCARDED_DRAFT);
mDraftNeedsSaving = false;
finish();
}
private void onSave() {
saveIfNeeded();
finish();
}
private void onAddCcBcc() {
mCcView.setVisibility(View.VISIBLE);
mBccView.setVisibility(View.VISIBLE);
}
/**
* Kick off a picker for whatever kind of MIME types we'll accept and let Android take over.
*/
private void onAddAttachment() {
Intent i = new Intent(Intent.ACTION_GET_CONTENT);
i.addCategory(Intent.CATEGORY_OPENABLE);
i.setType(Email.ACCEPTABLE_ATTACHMENT_SEND_TYPES[0]);
startActivityForResult(Intent.createChooser(i, null), ACTIVITY_REQUEST_PICK_ATTACHMENT);
}
private void addAttachment(Uri uri) {
addAttachment(uri, -1, null);
}
private void addAttachment(Uri uri, int size, String name) {
ContentResolver contentResolver = getContentResolver();
String contentType = contentResolver.getType(uri);
if (contentType == null) {
contentType = "";
}
Attachment attachment = new Attachment();
attachment.name = name;
attachment.contentType = contentType;
attachment.size = size;
attachment.uri = uri;
if (attachment.size == -1 || attachment.name == null) {
Cursor metadataCursor = contentResolver.query( uri, new String[]{ OpenableColumns.DISPLAY_NAME, OpenableColumns.SIZE }, null, null, null);
if (metadataCursor != null) {
try {
if (metadataCursor.moveToFirst()) {
if (attachment.name == null) {
attachment.name = metadataCursor.getString(0);
}
if (attachment.size == -1) {
attachment.size = metadataCursor.getInt(1);
}
}
} finally {
metadataCursor.close();
}
}
}
if (attachment.name == null) {
attachment.name = uri.getLastPathSegment();
}
View view = getLayoutInflater().inflate( R.layout.message_compose_attachment, mAttachments, false);
TextView nameView = (TextView)view.findViewById(R.id.attachment_name);
ImageButton delete = (ImageButton)view.findViewById(R.id.attachment_delete);
nameView.setText(attachment.name);
delete.setOnClickListener(this);
delete.setTag(view);
view.setTag(attachment);
mAttachments.addView(view);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if(resultCode != RESULT_OK)
return;
if (data == null) {
return;
}
switch(requestCode) {
case ACTIVITY_REQUEST_PICK_ATTACHMENT:
addAttachment(data.getData());
mDraftNeedsSaving = true;
break;
case ACTIVITY_CHOOSE_IDENTITY:
onIdentityChosen(data);
break;
}
}
private void onIdentityChosen(Intent intent)
{
Bundle bundle = intent.getExtras();;
switchToIdentity( (Account.Identity)bundle.getSerializable(ChooseIdentity.EXTRA_IDENTITY));
}
private void switchToIdentity(Account.Identity identity)
{
mIdentity = identity;
mIdentityChanged = true;
mDraftNeedsSaving = true;
updateFrom();
updateSignature();
}
private void updateFrom()
{
if (mIdentityChanged)
{
mFromView.setVisibility(View.VISIBLE);
}
mFromView.setText(getString(R.string.message_view_from_format, mIdentity.getName(), mIdentity.getEmail()));
}
private void updateSignature()
{
mSignatureView.setText(mIdentity.getSignature());
}
public void onClick(View view) {
switch (view.getId()) {
case R.id.attachment_delete:
/*
* The view is the delete button, and we have previously set the tag of
* the delete button to the view that owns it. We don't use parent because the
* view is very complex and could change in the future.
*/
mAttachments.removeView((View) view.getTag());
mDraftNeedsSaving = true;
break;
case R.id.quoted_text_delete:
mQuotedTextBar.setVisibility(View.GONE);
mQuotedText.setVisibility(View.GONE);
mDraftNeedsSaving = true;
break;
}
}
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.send:
onSend();
break;
case R.id.save:
onSave();
break;
case R.id.discard:
onDiscard();
break;
case R.id.add_cc_bcc:
onAddCcBcc();
break;
case R.id.add_attachment:
onAddAttachment();
break;
case R.id.choose_identity:
onChooseIdentity();
break;
default:
return super.onOptionsItemSelected(item);
}
return true;
}
private void onChooseIdentity()
{
if (mAccount.getIdentities().size() > 1)
{
Intent intent = new Intent(this, ChooseIdentity.class);
intent.putExtra(ChooseIdentity.EXTRA_ACCOUNT, mAccount);
startActivityForResult(intent, ACTIVITY_CHOOSE_IDENTITY);
}
else
{
Toast.makeText(this, getString(R.string.no_identities),
Toast.LENGTH_LONG).show();
}
}
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
getMenuInflater().inflate(R.menu.message_compose_option, menu);
return true;
}
/**
* Returns true if all attachments were able to be attached, otherwise returns false.
*/
private boolean loadAttachments(Part part, int depth) throws MessagingException {
if (part.getBody() instanceof Multipart) {
Multipart mp = (Multipart) part.getBody();
boolean ret = true;
for (int i = 0, count = mp.getCount(); i < count; i++) {
if (!loadAttachments(mp.getBodyPart(i), depth + 1)) {
ret = false;
}
}
return ret;
} else {
String contentType = MimeUtility.unfoldAndDecode(part.getContentType());
String name = MimeUtility.getHeaderParameter(contentType, "name");
if (name != null) {
Body body = part.getBody();
if (body != null && body instanceof LocalAttachmentBody) {
final Uri uri = ((LocalAttachmentBody) body).getContentUri();
mHandler.post(new Runnable() {
public void run() {
addAttachment(uri);
}
});
}
else {
return false;
}
}
return true;
}
}
/**
* Pull out the parts of the now loaded source message and apply them to the new message
* depending on the type of message being composed.
* @param message
*/
private void processSourceMessage(Message message) {
String action = getIntent().getAction();
if (ACTION_REPLY.equals(action) || ACTION_REPLY_ALL.equals(action)) {
try {
if (message.getSubject() != null && !message.getSubject().toLowerCase().startsWith("re:")) {
mSubjectView.setText("Re: " + message.getSubject());
}
else {
mSubjectView.setText(message.getSubject());
}
/*
* If a reply-to was included with the message use that, otherwise use the from
* or sender address.
*/
Address[] replyToAddresses;
if (message.getReplyTo().length > 0) {
addAddresses(mToView, replyToAddresses = message.getReplyTo());
}
else {
addAddresses(mToView, replyToAddresses = message.getFrom());
}
Part part = MimeUtility.findFirstPartByMimeType(mSourceMessage,
"text/plain");
if (part != null) {
String quotedText = String.format(
getString(R.string.message_compose_reply_header_fmt),
Address.toString(mSourceMessage.getFrom()));
quotedText += MimeUtility.getTextFromPart(part).replaceAll("(?m)^", ">");
mQuotedText.setText(quotedText);
mQuotedTextBar.setVisibility(View.VISIBLE);
mQuotedText.setVisibility(View.VISIBLE);
}
if (ACTION_REPLY_ALL.equals(action) || ACTION_REPLY.equals(action))
{
Account.Identity useIdentity = null;
for (Address address : message.getRecipients(RecipientType.TO)) {
Account.Identity identity = mAccount.findIdentity(address);
if (identity != null) {
useIdentity = identity;
break;
}
}
if (useIdentity == null)
{
if (message.getRecipients(RecipientType.CC).length > 0) {
for (Address address : message.getRecipients(RecipientType.CC)) {
Account.Identity identity = mAccount.findIdentity(address);
if (identity != null) {
useIdentity = identity;
break;
}
}
}
}
if (useIdentity != null)
{
Account.Identity defaultIdentity = mAccount.getIdentity(0);
if (useIdentity != defaultIdentity)
{
switchToIdentity(useIdentity);
}
}
}
if (ACTION_REPLY_ALL.equals(action)) {
for (Address address : message.getRecipients(RecipientType.TO)) {
Account.Identity identity = mAccount.findIdentity(address);
if (!mAccount.isAnIdentity(address)) {
addAddress(mToView, address);
}
}
if (message.getRecipients(RecipientType.CC).length > 0) {
for (Address address : message.getRecipients(RecipientType.CC)) {
if (!mAccount.isAnIdentity(address) && !Utility.arrayContains(replyToAddresses, address)) {
addAddress(mCcView, address);
}
}
mCcView.setVisibility(View.VISIBLE);
}
}
}
catch (MessagingException me) {
/*
* This really should not happen at this point but if it does it's okay.
* The user can continue composing their message.
*/
}
}
else if (ACTION_FORWARD.equals(action)) {
try {
if (message.getSubject() != null && !message.getSubject().toLowerCase().startsWith("fwd:")) {
mSubjectView.setText("Fwd: " + message.getSubject());
}
else {
mSubjectView.setText(message.getSubject());
}
Part part = MimeUtility.findFirstPartByMimeType(message, "text/plain");
if (part == null) {
part = MimeUtility.findFirstPartByMimeType(message, "text/html");
}
if (part != null) {
String quotedText = MimeUtility.getTextFromPart(part);
if (quotedText != null) {
String text = String.format(
getString(R.string.message_compose_fwd_header_fmt),
mSourceMessage.getSubject(),
Address.toString(mSourceMessage.getFrom()),
Address.toString(
mSourceMessage.getRecipients(RecipientType.TO)),
Address.toString(
mSourceMessage.getRecipients(RecipientType.CC)));
text += quotedText;
mQuotedText.setText(text);
mQuotedTextBar.setVisibility(View.VISIBLE);
mQuotedText.setVisibility(View.VISIBLE);
}
}
if (!mSourceMessageProcessed) {
if (!loadAttachments(message, 0)) {
mHandler.sendEmptyMessage(MSG_SKIPPED_ATTACHMENTS);
}
}
}
catch (MessagingException me) {
/*
* This really should not happen at this point but if it does it's okay.
* The user can continue composing their message.
*/
}
}
else if (ACTION_EDIT_DRAFT.equals(action)) {
try {
mSubjectView.setText(message.getSubject());
addAddresses(mToView, message.getRecipients(RecipientType.TO));
if (message.getRecipients(RecipientType.CC).length > 0) {
addAddresses(mCcView, message.getRecipients(RecipientType.CC));
mCcView.setVisibility(View.VISIBLE);
}
if (message.getRecipients(RecipientType.BCC).length > 0) {
addAddresses(mBccView, message.getRecipients(RecipientType.BCC));
mBccView.setVisibility(View.VISIBLE);
}
if (!mSourceMessageProcessed) {
loadAttachments(message, 0);
}
Integer bodyLength = null;
String[] k9identities = message.getHeader(Email.K9MAIL_IDENTITY);
if (k9identities != null && k9identities.length > 0)
{
String k9identity = k9identities[0];
if (k9identity != null)
{
Log.d(Email.LOG_TAG, "Got a saved identity: " + k9identity);
StringTokenizer tokens = new StringTokenizer(k9identity, ":", false);
String bodyLengthS = null;
String name = null;
String email = null;
String signature = null;
if (tokens.hasMoreTokens())
{
bodyLengthS = Utility.base64Decode(tokens.nextToken());
try
{
bodyLength = Integer.parseInt(bodyLengthS);
}
catch (Exception e)
{
Log.e(Email.LOG_TAG, "Unable to parse bodyLength '" + bodyLengthS + "'");
}
}
if (tokens.hasMoreTokens())
{
signature = Utility.base64Decode(tokens.nextToken());
}
if (tokens.hasMoreTokens())
{
name = Utility.base64Decode(tokens.nextToken());
}
if (tokens.hasMoreTokens())
{
email = Utility.base64Decode(tokens.nextToken());
}
Account.Identity newIdentity= mAccount.new Identity();
if (signature != null)
{
newIdentity.setSignature(signature);
mSignatureChanged = true;
}
else
{
newIdentity.setSignature(mIdentity.getSignature());
}
if (name != null)
{
newIdentity.setName(name);
mIdentityChanged = true;
}
else
{
newIdentity.setName(mIdentity.getName());
}
if (email != null)
{
newIdentity.setEmail(email);
mIdentityChanged = true;
}
else
{
newIdentity.setEmail(mIdentity.getEmail());
}
mIdentity = newIdentity;
updateSignature();
updateFrom();
}
}
Part part = MimeUtility.findFirstPartByMimeType(message, "text/plain");
if (part != null) {
String text = MimeUtility.getTextFromPart(part);
if (bodyLength != null && bodyLength + 1 < text.length()) // + 1 to get rid of the newline we added when saving the draft
{
String bodyText = text.substring(0, bodyLength);
String quotedText = text.substring(bodyLength + 1, text.length());
mMessageContentView.setText(bodyText);
mQuotedText.setText(quotedText);
mQuotedTextBar.setVisibility(View.VISIBLE);
mQuotedText.setVisibility(View.VISIBLE);
}
else
{
mMessageContentView.setText(text);
}
}
}
catch (MessagingException me) {
// TODO
}
}
mSourceMessageProcessed = true;
mDraftNeedsSaving = false;
}
class Listener extends MessagingListener {
@Override
public void loadMessageForViewStarted(Account account, String folder, String uid) {
mHandler.sendEmptyMessage(MSG_PROGRESS_ON);
}
@Override
public void loadMessageForViewFinished(Account account, String folder, String uid, Message message) {
mHandler.sendEmptyMessage(MSG_PROGRESS_OFF);
}
@Override
public void loadMessageForViewBodyAvailable(Account account, String folder, String uid, final Message message) {
mSourceMessage = message;
runOnUiThread(new Runnable() {
public void run() {
processSourceMessage(message);
}
});
}
@Override
public void loadMessageForViewFailed(Account account, String folder, String uid, final String message) {
mHandler.sendEmptyMessage(MSG_PROGRESS_OFF);
// TODO show network error
}
@Override
public void messageUidChanged( Account account, String folder, String oldUid, String newUid) {
if (account.equals(mAccount) && (folder.equals(mFolder) || (mFolder == null && folder.equals(mAccount.getDraftsFolderName())))) {
if (oldUid.equals(mDraftUid)) {
mDraftUid = newUid;
}
if (oldUid.equals(mSourceMessageUid)) {
mSourceMessageUid = newUid;
}
if (mSourceMessage != null && (oldUid.equals(mSourceMessage.getUid()))) {
mSourceMessage.setUid(newUid);
}
}
}
}
}
|
package editor;
import editor.search.MessageDisplay;
import editor.search.StandardLocalSearch;
import editor.undo.AtomicUndoManager;
import editor.util.BrowserUtil;
import editor.util.EditorUtilities;
import editor.util.ILabel;
import editor.util.LabelListPopup;
import editor.util.PlatformUtil;
import editor.util.SettleModalEventQueue;
import editor.util.TaskQueue;
import editor.util.TypeNameUtil;
import gw.config.CommonServices;
import gw.lang.Gosu;
import gw.lang.parser.IScriptPartId;
import gw.lang.parser.ScriptPartId;
import gw.lang.parser.ScriptabilityModifiers;
import gw.lang.parser.TypelessScriptPartId;
import gw.lang.reflect.IType;
import gw.lang.reflect.ITypeRef;
import gw.lang.reflect.TypeSystem;
import gw.lang.reflect.gs.GosuClassPathThing;
import gw.lang.reflect.gs.IGosuProgram;
import gw.lang.reflect.java.JavaTypes;
import gw.util.StreamUtil;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.event.MenuEvent;
import javax.swing.event.MenuListener;
import javax.swing.filechooser.FileFilter;
import javax.swing.text.AbstractDocument;
import java.awt.*;
import java.awt.datatransfer.Clipboard;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.InputEvent;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.StringReader;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.stream.Collectors;
public class GosuPanel extends JPanel
{
private static final boolean DEBUG = true;
private static final int MAX_TABS = 12;
private SystemPanel _resultPanel;
private SplitPane _outerSplitPane;
private SplitPane _splitPane;
private JPanel _projectView;
private JFrame _parentFrame;
private boolean _bRunning;
private String _commandLine = "";
private JTabbedPane _tabPane;
private AtomicUndoManager _defaultUndoMgr;
private TabSelectionHistory _history;
private JLabel _status;
private JPanel _statPanel;
private boolean _initialFile;
private String _project;
public GosuPanel( JFrame basicGosuEditor )
{
_parentFrame = basicGosuEditor;
_defaultUndoMgr = new AtomicUndoManager( 10 );
configUI();
}
private TabSelectionHistory getTabSelectionHistory()
{
return _history;
}
void configUI()
{
setLayout( new BorderLayout() );
_resultPanel = new SystemPanel();
_tabPane = new JTabbedPane( JTabbedPane.TOP, JTabbedPane.SCROLL_TAB_LAYOUT );
_history = new TabSelectionHistory( _tabPane );
getTabSelectionHistory().setTabHistoryHandler( new EditorTabHistoryHandler() );
_tabPane.addChangeListener(
e -> {
savePreviousTab();
if( getCurrentEditor() == null )
{
return;
}
updateTitle();
getCurrentEditor().getEditor().requestFocus();
parse();
storeProjectState();
} );
_tabPane.addMouseListener(
new MouseAdapter()
{
@Override
public void mouseReleased( MouseEvent e )
{
int iTab = _tabPane.getUI().tabForCoordinate( _tabPane, e.getX(), e.getY() );
if( iTab >= 0 && _tabPane.getTabCount() > 1 )
{
if( SwingUtilities.isMiddleMouseButton( e ) )
{
_tabPane.removeTabAt( iTab );
}
}
}
} );
_splitPane = new SplitPane( SwingConstants.VERTICAL, _tabPane, _resultPanel );
_splitPane.setBorder( BorderFactory.createEmptyBorder( 0, 3, 3, 3 ) );
_projectView = new JPanel();
_projectView.setBackground( Color.white );
_outerSplitPane = new SplitPane( SwingConstants.HORIZONTAL, _projectView, _splitPane );
add( _outerSplitPane, BorderLayout.CENTER );
JPanel statPanel = makeStatusBar();
add( statPanel, BorderLayout.SOUTH );
JMenuBar menuBar = makeMenuBar();
_parentFrame.setJMenuBar( menuBar );
handleMacStuff();
setSplitPosition( 10 );
EventQueue.invokeLater( this::mapKeystrokes );
}
private void handleMacStuff()
{
if( PlatformUtil.isMac() )
{
System.setProperty( "apple.laf.useScreenMenuBar", "true" );
System.setProperty( "com.apple.mrj.application.apple.menu.about.name", "Gosu Editor" );
}
}
public void clearTabs()
{
_tabPane.removeAll();
SettleModalEventQueue.instance().run();
getTabSelectionHistory().dispose();
}
private void storeProjectState()
{
if( _initialFile )
{
return;
}
Properties props = new Properties();
props.put( "Tab.Active", ((File)((JComponent)_tabPane.getSelectedComponent()).getClientProperty( "_file" )).getAbsolutePath() );
for( int i = 0; i < _tabPane.getTabCount(); i++ )
{
File file = (File)((JComponent)_tabPane.getComponentAt( i )).getClientProperty( "_file" );
props.put( "Tab.Open." + ((char)(i + 'A')), file.getAbsolutePath() );
}
List<String> localClasspath = getLocalClasspath();
for( int i = 0; i < localClasspath.size(); i++ )
{
props.put( "Classpath.Entry" + i, localClasspath.get( i ) );
}
File userFile = EditorUtilities.getOrMakeProjectFile( getProject() );
try
{
FileWriter fw = new FileWriter( userFile );
props.store( fw, "Gosu Project" );
}
catch( IOException e )
{
throw new RuntimeException( e );
}
EditorUtilities.saveLayoutState( _project );
}
private String getProject()
{
return _project;
}
static List<String> getLocalClasspath() {
String javaHome = System.getProperty( "java.home" ).toLowerCase();
List<String> localPath = new ArrayList<>();
List<File> classpath = Gosu.getClasspath();
for( int i = 0; i < classpath.size(); i++ )
{
File file = classpath.get( i );
String filePath = file.getAbsolutePath().toLowerCase();
if( !isUpperLevelClasspath( javaHome, filePath ) )
{
localPath.add( file.getAbsolutePath() );
}
}
return localPath;
}
private static boolean isUpperLevelClasspath( String javaHome, String filePath )
{
return filePath.startsWith( javaHome ) ||
filePath.contains( File.separator + "gosu-lang" + File.separator ) ||
filePath.endsWith( File.separator + "tools.jar" ) ||
filePath.endsWith( File.separator + "idea_rt.jar" );
}
public void restoreProjectState( String project )
{
_project = project;
File userFile = EditorUtilities.getOrMakeProjectFile( project );
if( !userFile.isFile() )
{
throw new IllegalStateException();
}
Properties props = new Properties();
try
{
props.load( new FileReader( userFile ) );
Set<String> keys = props.stringPropertyNames();
//noinspection SuspiciousToArrayCall
String[] sortedKeys = keys.toArray( new String[keys.size()] );
Arrays.sort( sortedKeys );
ArrayList<File> classpath = new ArrayList<>();
for( String cpEntry : sortedKeys )
{
if( cpEntry.startsWith( "Classpath.Entry" ) )
{
File file = new File( props.getProperty( cpEntry ) );
if( file.exists() )
{
classpath.add( file );
}
}
}
if( classpath.size() > 0 )
{
Gosu.setClasspath( classpath );
}
TypeSystem.refresh( TypeSystem.getGlobalModule() );
for( String strTab : sortedKeys )
{
if( strTab.startsWith( "Tab.Open" ) )
{
File file = new File( props.getProperty( strTab ) );
if( file.isFile() )
{
openFile( file );
}
}
}
String strActiveFile = props.getProperty( "Tab.Active" );
if( strActiveFile == null )
{
openFile( EditorUtilities.getOrMakeUntitledProgram( project ) );
}
else
{
openTab( new File( strActiveFile ) );
}
}
catch( IOException e )
{
throw new RuntimeException( e );
}
}
private JPanel makeStatusBar()
{
_statPanel = new JPanel( new BorderLayout() );
_status = new JLabel();
XPToolBarButton btnStop = new XPToolBarButton( "Stop" );
btnStop.addActionListener( new StopActionHandler() );
_statPanel.add( btnStop, BorderLayout.WEST );
_statPanel.add( _status, BorderLayout.CENTER );
_statPanel.setVisible( false );
return _statPanel;
}
private void parse()
{
EventQueue.invokeLater( () -> getCurrentEditor().parse() );
}
private void savePreviousTab()
{
GosuEditor editor = getTabSelectionHistory().getPreviousEditor();
if( editor != null )
{
if( isDirty( editor ) )
{
save( (File)editor.getClientProperty( "_file" ), editor );
}
}
}
private GosuEditor createEditor()
{
final GosuEditor editor = new GosuEditor( null,
new AtomicUndoManager( 10000 ),
ScriptabilityModifiers.SCRIPTABLE,
new DefaultContextMenuHandler(),
false, true );
editor.setBorder( BorderFactory.createEmptyBorder() );
addDirtyListener( editor );
EventQueue.invokeLater( () -> ((AbstractDocument)editor.getEditor().getDocument()).setDocumentFilter( new GosuPanelDocumentFilter( editor ) ) );
return editor;
}
private void addDirtyListener( final GosuEditor editor )
{
editor.getUndoManager().addChangeListener(
new ChangeListener()
{
private ChangeEvent _lastChangeEvent;
@Override
public void stateChanged( ChangeEvent e )
{
if( e != _lastChangeEvent )
{
_lastChangeEvent = e;
setDirty( editor, true );
}
}
} );
}
private GosuEditor initEditorMode( File file, GosuEditor editor )
{
if( file != null && file.getName() != null )
{
if( file.getName().endsWith( ".gsx" ) )
{
editor.setProgram( false );
editor.setTemplate( false );
editor.setClass( false );
editor.setEnhancement( true );
}
else if( file.getName().endsWith( ".gs" ) )
{
editor.setProgram( false );
editor.setTemplate( false );
editor.setClass( true );
editor.setEnhancement( false );
}
else if( file.getName().endsWith( ".gst" ) )
{
editor.setProgram( false );
editor.setTemplate( true );
editor.setClass( false );
editor.setEnhancement( false );
}
else
{
editor.setProgram( true );
editor.setTemplate( false );
editor.setClass( false );
editor.setEnhancement( false );
}
}
return editor;
}
private JMenuBar makeMenuBar()
{
JMenuBar menuBar = new JMenuBar();
makeFileMenu( menuBar );
makeEditMenu( menuBar );
makeSearchMenu( menuBar );
makeCodeMenu( menuBar );
makeRunMenu( menuBar );
makeWindowMenu( menuBar );
makeHelpMenu( menuBar );
return menuBar;
}
private void makeHelpMenu( JMenuBar menuBar )
{
JMenu helpMenu = new SmartMenu( "Help" );
helpMenu.setMnemonic( 'H' );
menuBar.add( helpMenu );
JMenuItem gosuItem = new JMenuItem(
new AbstractAction( "Gosu Online" )
{
@Override
public void actionPerformed( ActionEvent e )
{
BrowserUtil.openURL( "http://gosu-lang.org" );
}
} );
gosuItem.setMnemonic( 'G' );
helpMenu.add( gosuItem );
helpMenu.addSeparator();
JMenuItem contextItem = new JMenuItem(
new AbstractAction( "Doc Lookup at Caret" )
{
@Override
public void actionPerformed( ActionEvent e )
{
getCurrentEditor().displayJavadocHelp( getCurrentEditor().getDeepestLocationAtCaret() );
}
} );
contextItem.setMnemonic( 'D' );
contextItem.setAccelerator( KeyStroke.getKeyStroke( "F1" ) );
helpMenu.add( contextItem );
helpMenu.addSeparator();
JMenuItem introItem = new JMenuItem(
new AbstractAction( "Introduction" )
{
@Override
public void actionPerformed( ActionEvent e )
{
BrowserUtil.openURL( "http://gosu-lang.org/intro.shtml" );
}
} );
introItem.setMnemonic( 'I' );
helpMenu.add( introItem );
JMenuItem docsItem = new JMenuItem(
new AbstractAction( "Documentation" )
{
@Override
public void actionPerformed( ActionEvent e )
{
BrowserUtil.openURL( "http://gosu-lang.org/doc/index.html" );
}
} );
docsItem.setMnemonic( 'D' );
helpMenu.add( docsItem );
JMenuItem historyItem = new JMenuItem(
new AbstractAction( "History" )
{
@Override
public void actionPerformed( ActionEvent e )
{
BrowserUtil.openURL( "http://gosu-lang.org/history.shtml" );
}
} );
historyItem.setMnemonic( 'H' );
helpMenu.add( historyItem );
helpMenu.addSeparator();
JMenuItem eclipseItem = new JMenuItem(
new AbstractAction( "Eclipse" )
{
@Override
public void actionPerformed( ActionEvent e )
{
BrowserUtil.openURL( "http://gosu-lang.org/eclipse.shtml" );
}
} );
eclipseItem.setMnemonic( 'E' );
helpMenu.add( eclipseItem );
JMenuItem intellijItem = new JMenuItem(
new AbstractAction( "IntelliJ" )
{
@Override
public void actionPerformed( ActionEvent e )
{
BrowserUtil.openURL( "http://gosu-lang.org/editors.shtml" );
}
} );
intellijItem.setMnemonic( 'I' );
helpMenu.add( intellijItem );
helpMenu.addSeparator();
JMenuItem bugItem = new JMenuItem(
new AbstractAction( "Report Bugs" )
{
@Override
public void actionPerformed( ActionEvent e )
{
BrowserUtil.openURL( "http://code.google.com/p/gosu-lang/issues/entry" );
}
} );
bugItem.setMnemonic( 'B' );
helpMenu.add( bugItem );
JMenuItem discussItem = new JMenuItem(
new AbstractAction( "Discuss" )
{
@Override
public void actionPerformed( ActionEvent e )
{
BrowserUtil.openURL( "http://groups.google.com/group/gosu-lang" );
}
} );
discussItem.setMnemonic( 'D' );
helpMenu.add( discussItem );
JMenuItem otherItem = new JMenuItem(
new AbstractAction( "Other Links" )
{
@Override
public void actionPerformed( ActionEvent e )
{
BrowserUtil.openURL( "http://gosu-lang.org/links.shtml" );
}
} );
otherItem.setMnemonic( 'L' );
helpMenu.add( otherItem );
}
private void makeWindowMenu( JMenuBar menuBar )
{
JMenu windowMenu = new SmartMenu( "Window" );
windowMenu.setMnemonic( 'W' );
menuBar.add( windowMenu );
JMenuItem previousItem = new JMenuItem(
new AbstractAction( "Previous Editor" )
{
@Override
public void actionPerformed( ActionEvent e )
{
goBackward();
}
} );
previousItem.setMnemonic( 'P' );
previousItem.setAccelerator( KeyStroke.getKeyStroke( "alt LEFT" ) );
windowMenu.add( previousItem );
JMenuItem nextItem = new JMenuItem(
new AbstractAction( "Next Editor" )
{
@Override
public void actionPerformed( ActionEvent e )
{
goForward();
}
} );
nextItem.setMnemonic( 'N' );
nextItem.setAccelerator( KeyStroke.getKeyStroke( "alt RIGHT" ) );
windowMenu.add( nextItem );
windowMenu.addSeparator();
JMenuItem recentItem = new JMenuItem(
new AbstractAction( "Recent Editors" )
{
@Override
public void actionPerformed( ActionEvent e )
{
displayRecentViewsPopup();
}
} );
recentItem.setMnemonic( 'R' );
recentItem.setAccelerator( KeyStroke.getKeyStroke( "control E" ) );
windowMenu.add( recentItem );
windowMenu.addSeparator();
JMenuItem closeActiveItem = new JMenuItem(
new AbstractAction( "Close Active Editor" )
{
@Override
public void actionPerformed( ActionEvent e )
{
saveIfDirty();
closeActiveEditor();
}
} );
closeActiveItem.setMnemonic( 'C' );
closeActiveItem.setAccelerator( KeyStroke.getKeyStroke( "control F4" ) );
windowMenu.add( closeActiveItem );
JMenuItem closeOthersItem = new JMenuItem(
new AbstractAction( "Close Others" )
{
@Override
public void actionPerformed( ActionEvent e )
{
closeOthers();
}
} );
closeOthersItem.setMnemonic( 'O' );
windowMenu.add( closeOthersItem );
}
private void makeCodeMenu( JMenuBar menuBar )
{
JMenu codeMenu = new SmartMenu( "Code" );
codeMenu.setMnemonic( 'd' );
menuBar.add( codeMenu );
JMenuItem completeItem = new JMenuItem(
new AbstractAction( "Complete Code" )
{
@Override
public void actionPerformed( ActionEvent e )
{
getCurrentEditor().handleCompleteCode();
}
} );
completeItem.setMnemonic( 'C' );
completeItem.setAccelerator( KeyStroke.getKeyStroke( "control SPACE" ) );
codeMenu.add( completeItem );
codeMenu.addSeparator();
JMenuItem paraminfoItem = new JMenuItem(
new AbstractAction( "Parameter Info" )
{
@Override
public void actionPerformed( ActionEvent e )
{
if( !getCurrentEditor().isIntellisensePopupShowing() )
{
getCurrentEditor().displayParameterInfoPopup( getCurrentEditor().getEditor().getCaretPosition() );
}
}
} );
paraminfoItem.setMnemonic( 'P' );
paraminfoItem.setAccelerator( KeyStroke.getKeyStroke( "control P" ) );
codeMenu.add( paraminfoItem );
JMenuItem typeItem = new JMenuItem(
new AbstractAction( "Expression Type" )
{
@Override
public void actionPerformed( ActionEvent e )
{
getCurrentEditor().displayTypeInfoAtCurrentLocation();
}
} );
typeItem.setMnemonic( 'T' );
typeItem.setAccelerator( KeyStroke.getKeyStroke( "control T" ) );
codeMenu.add( typeItem );
codeMenu.addSeparator();
JMenuItem openTypeItem = new JMenuItem(
new AbstractAction( "Open Type..." )
{
@Override
public void actionPerformed( ActionEvent e )
{
GotoTypePopup.display();
}
} );
openTypeItem.setMnemonic( 'O' );
openTypeItem.setAccelerator( KeyStroke.getKeyStroke( "control N" ) );
codeMenu.add( openTypeItem );
}
public GosuEditor getCurrentEditor()
{
return (GosuEditor)_tabPane.getSelectedComponent();
}
private void makeRunMenu( JMenuBar menuBar )
{
JMenu runMenu = new SmartMenu( "Run" );
runMenu.setMnemonic( 'R' );
menuBar.add( runMenu );
JMenuItem runItem = new JMenuItem( new RunActionHandler() );
runItem.setMnemonic( 'R' );
runItem.setAccelerator( KeyStroke.getKeyStroke( "F5" ) );
runMenu.add( runItem );
JMenuItem stopItem = new JMenuItem( new StopActionHandler() );
stopItem.setMnemonic( 'S' );
stopItem.setAccelerator( KeyStroke.getKeyStroke( "control F2" ) );
runMenu.add( stopItem );
runMenu.addSeparator();
JMenuItem clearItem = new JMenuItem(
new AbstractAction( "Clear" )
{
@Override
public void actionPerformed( ActionEvent e )
{
clearOutput();
}
} );
clearItem.setMnemonic( 'C' );
clearItem.setAccelerator( KeyStroke.getKeyStroke( "alt C" ) );
runMenu.add( clearItem );
}
private void makeSearchMenu( JMenuBar menuBar )
{
JMenu searchMenu = new SmartMenu( "Search" );
searchMenu.setMnemonic( 'S' );
menuBar.add( searchMenu );
JMenuItem findItem = new JMenuItem(
new AbstractAction( "Find..." )
{
@Override
public void actionPerformed( ActionEvent e )
{
StandardLocalSearch.performLocalSearch( getCurrentEditor(), false );
}
} );
findItem.setMnemonic( 'F' );
findItem.setAccelerator( KeyStroke.getKeyStroke( "control F" ) );
searchMenu.add( findItem );
JMenuItem replaceItem = new JMenuItem(
new AbstractAction( "Replace..." )
{
@Override
public void actionPerformed( ActionEvent e )
{
StandardLocalSearch.performLocalSearch( getCurrentEditor(), true );
}
} );
replaceItem.setMnemonic( 'R' );
replaceItem.setAccelerator( KeyStroke.getKeyStroke( "control R" ) );
searchMenu.add( replaceItem );
JMenuItem nextItem = new JMenuItem(
new AbstractAction( "Next" )
{
@Override
public void actionPerformed( ActionEvent e )
{
if( StandardLocalSearch.canRepeatFind( getCurrentEditor() ) )
{
StandardLocalSearch.repeatFind( getCurrentEditor() );
}
}
} );
nextItem.setMnemonic( 'N' );
nextItem.setAccelerator( KeyStroke.getKeyStroke( "F3" ) );
searchMenu.add( nextItem );
JMenuItem previousItem = new JMenuItem(
new AbstractAction( "Previous" )
{
@Override
public void actionPerformed( ActionEvent e )
{
if( StandardLocalSearch.canRepeatFind( getCurrentEditor() ) )
{
StandardLocalSearch.repeatFindBackwards( getCurrentEditor() );
}
}
} );
previousItem.setMnemonic( 'P' );
previousItem.setAccelerator( KeyStroke.getKeyStroke( "shift F3" ) );
searchMenu.add( previousItem );
searchMenu.addSeparator();
JMenuItem gotoLineItem = new JMenuItem(
new AbstractAction( "Go To Line" )
{
@Override
public void actionPerformed( ActionEvent e )
{
getCurrentEditor().displayGotoLinePopup();
}
} );
gotoLineItem.setMnemonic( 'G' );
gotoLineItem.setAccelerator( KeyStroke.getKeyStroke( "control G" ) );
searchMenu.add( gotoLineItem );
}
private void makeEditMenu( JMenuBar menuBar )
{
JMenu editMenu = new SmartMenu( "Edit" );
editMenu.setMnemonic( 'E' );
menuBar.add( editMenu );
JMenuItem undoItem = new JMenuItem(
new AbstractAction( "Undo" )
{
@Override
public void actionPerformed( ActionEvent e )
{
if( getUndoManager().canUndo() )
{
getUndoManager().undo();
}
}
@Override
public boolean isEnabled()
{
return getUndoManager().canUndo();
}
} );
undoItem.setMnemonic( 'U' );
undoItem.setAccelerator( KeyStroke.getKeyStroke( "control Z" ) );
editMenu.add( undoItem );
JMenuItem redoItem = new JMenuItem(
new AbstractAction( "Redo" )
{
@Override
public void actionPerformed( ActionEvent e )
{
if( getUndoManager().canRedo() )
{
getUndoManager().redo();
}
}
@Override
public boolean isEnabled()
{
return getUndoManager().canRedo();
}
} );
redoItem.setMnemonic( 'R' );
redoItem.setAccelerator( KeyStroke.getKeyStroke( "control shift Z" ) );
editMenu.add( redoItem );
editMenu.addSeparator();
JMenuItem cutItem = new JMenuItem(
new AbstractAction( "Cut" )
{
@Override
public void actionPerformed( ActionEvent e )
{
getCurrentEditor().clipCut( getClipboard() );
}
} );
cutItem.setMnemonic( 't' );
cutItem.setAccelerator( KeyStroke.getKeyStroke( "control X" ) );
editMenu.add( cutItem );
JMenuItem copyItem = new JMenuItem(
new AbstractAction( "Copy" )
{
@Override
public void actionPerformed( ActionEvent e )
{
getCurrentEditor().clipCopy( getClipboard() );
}
} );
copyItem.setMnemonic( 'C' );
copyItem.setAccelerator( KeyStroke.getKeyStroke( "control C" ) );
editMenu.add( copyItem );
JMenuItem pasteItem = new JMenuItem(
new AbstractAction( "Paste" )
{
@Override
public void actionPerformed( ActionEvent e )
{
getCurrentEditor().clipPaste( getClipboard() );
}
} );
pasteItem.setMnemonic( 'P' );
pasteItem.setAccelerator( KeyStroke.getKeyStroke( "control V" ) );
editMenu.add( pasteItem );
editMenu.addSeparator();
JMenuItem deleteItem = new JMenuItem(
new AbstractAction( "Delete" )
{
@Override
public void actionPerformed( ActionEvent e )
{
getCurrentEditor().delete();
}
} );
deleteItem.setMnemonic( 'D' );
deleteItem.setAccelerator( KeyStroke.getKeyStroke( "DELETE" ) );
editMenu.add( deleteItem );
JMenuItem deletewordItem = new JMenuItem(
new AbstractAction( "Delete Word" )
{
@Override
public void actionPerformed( ActionEvent e )
{
getCurrentEditor().deleteWord();
}
} );
deletewordItem.setMnemonic( 'e' );
deletewordItem.setAccelerator( KeyStroke.getKeyStroke( "control BACKSPACE" ) );
editMenu.add( deletewordItem );
JMenuItem deleteWordForwardItem = new JMenuItem(
new AbstractAction( "Delete Word Forward" )
{
@Override
public void actionPerformed( ActionEvent e )
{
getCurrentEditor().deleteWordForwards();
}
} );
deleteWordForwardItem.setMnemonic( 'F' );
deleteWordForwardItem.setAccelerator( KeyStroke.getKeyStroke( "control DELETE" ) );
editMenu.add( deleteWordForwardItem );
editMenu.addSeparator();
JMenuItem selectWord = new JMenuItem(
new AbstractAction( "Select Word" )
{
@Override
public void actionPerformed( ActionEvent e )
{
getCurrentEditor().selectWord();
}
} );
selectWord.setMnemonic( 'W' );
selectWord.setAccelerator( KeyStroke.getKeyStroke( "control W" ) );
editMenu.add( selectWord );
JMenuItem narraowSelection = new JMenuItem(
new AbstractAction( "Narrow Selection" )
{
@Override
public void actionPerformed( ActionEvent e )
{
getCurrentEditor().narrowSelectWord();
}
} );
narraowSelection.setMnemonic( 'N' );
narraowSelection.setAccelerator( KeyStroke.getKeyStroke( "control shift W" ) );
editMenu.add( narraowSelection );
editMenu.addSeparator();
JMenuItem duplicateItem = new JMenuItem(
new AbstractAction( "Duplicate" )
{
@Override
public void actionPerformed( ActionEvent e )
{
getCurrentEditor().duplicate();
}
} );
duplicateItem.setAccelerator( KeyStroke.getKeyStroke( "control D" ) );
editMenu.add( duplicateItem );
JMenuItem joinItem = new JMenuItem(
new AbstractAction( "Join Lines" )
{
@Override
public void actionPerformed( ActionEvent e )
{
getCurrentEditor().joinLines();
}
} );
joinItem.setAccelerator( KeyStroke.getKeyStroke( "control J" ) );
editMenu.add( joinItem );
JMenuItem indentItem = new JMenuItem(
new AbstractAction( "Indent Selection" )
{
@Override
public void actionPerformed( ActionEvent e )
{
if( !getCurrentEditor().isIntellisensePopupShowing() )
{
getCurrentEditor().handleBulkIndent( false );
}
}
} );
indentItem.setMnemonic( 'I' );
indentItem.setAccelerator( KeyStroke.getKeyStroke( "TAB" ) );
editMenu.add( indentItem );
JMenuItem outdentItem = new JMenuItem(
new AbstractAction( "Outdent Selection" )
{
@Override
public void actionPerformed( ActionEvent e )
{
if( !getCurrentEditor().isIntellisensePopupShowing() )
{
getCurrentEditor().handleBulkIndent( true );
}
}
} );
outdentItem.setMnemonic( 'O' );
outdentItem.setAccelerator( KeyStroke.getKeyStroke( "shift TAB" ) );
editMenu.add( outdentItem );
}
private void makeFileMenu( JMenuBar menuBar )
{
JMenu fileMenu = new SmartMenu( "File" );
fileMenu.setMnemonic( 'F' );
menuBar.add( fileMenu );
JMenuItem newProjectItem = new JMenuItem(
new AbstractAction( "New Project..." )
{
@Override
public void actionPerformed( ActionEvent e )
{
newProject();
}
} );
newProjectItem.setMnemonic( 'P' );
fileMenu.add( newProjectItem );
JMenuItem openProjectItem = new JMenuItem(
new AbstractAction( "Open Project..." )
{
@Override
public void actionPerformed( ActionEvent e )
{
openProject();
}
} );
openProjectItem.setMnemonic( 'J' );
fileMenu.add( openProjectItem );
fileMenu.addSeparator();
JMenuItem newItem = new JMenuItem(
new AbstractAction( "New..." )
{
@Override
public void actionPerformed( ActionEvent e )
{
newSourceFile();
}
} );
newItem.setMnemonic( 'N' );
fileMenu.add( newItem );
JMenuItem openItem = new JMenuItem(
new AbstractAction( "Open..." )
{
@Override
public void actionPerformed( ActionEvent e )
{
openFile();
}
} );
openItem.setMnemonic( 'O' );
fileMenu.add( openItem );
JMenuItem saveItem = new JMenuItem(
new AbstractAction( "Save" )
{
@Override
public void actionPerformed( ActionEvent e )
{
save();
}
} );
saveItem.setMnemonic( 'S' );
saveItem.setAccelerator( KeyStroke.getKeyStroke( "control S" ) );
fileMenu.add( saveItem );
fileMenu.addSeparator();
JMenuItem saveAsItem = new JMenuItem(
new AbstractAction( "Save As..." )
{
@Override
public void actionPerformed( ActionEvent e )
{
saveAs();
}
} );
saveAsItem.setMnemonic( 'A' );
fileMenu.add( saveAsItem );
fileMenu.addSeparator();
JMenuItem classpathItem = new JMenuItem(
new AbstractAction( "Classpath..." )
{
@Override
public void actionPerformed( ActionEvent e )
{
displayClasspath();
}
} );
classpathItem.setMnemonic( 'h' );
fileMenu.add( classpathItem );
fileMenu.addSeparator();
JMenuItem exitItem = new JMenuItem(
new AbstractAction( "Exit" )
{
@Override
public void actionPerformed( ActionEvent e )
{
exit();
}
} );
exitItem.setMnemonic( 'x' );
fileMenu.add( exitItem );
}
private void closeActiveEditor()
{
if( _tabPane.getTabCount() > 1 )
{
_tabPane.removeTabAt( _tabPane.getSelectedIndex() );
}
else
{
exit();
}
}
private void closeOthers()
{
_tabPane.setVisible( false );
try
{
for( int i = 0; i < _tabPane.getTabCount(); i++ )
{
if( _tabPane.getSelectedIndex() != i )
{
_tabPane.removeTabAt( i );
}
}
}
finally
{
_tabPane.setVisible( true );
}
}
private void displayClasspath()
{
ClasspathDialog dlg = new ClasspathDialog( new File( "." ) );
dlg.setVisible( true );
}
public void exit()
{
if( saveIfDirty() )
{
System.exit( 0 );
}
}
public void setSplitPosition( int iPos )
{
if( _splitPane != null )
{
_splitPane.setPosition( iPos );
}
}
public GosuEditor getGosuEditor()
{
return getCurrentEditor();
}
private void mapKeystrokes()
{
// Undo/Redo
mapKeystroke( KeyStroke.getKeyStroke( KeyEvent.VK_Z, InputEvent.CTRL_MASK ),
"Undo", new UndoActionHandler() );
mapKeystroke( KeyStroke.getKeyStroke( KeyEvent.VK_Z, InputEvent.CTRL_MASK | InputEvent.SHIFT_MASK ),
"Redo", new RedoActionHandler() );
mapKeystroke( KeyStroke.getKeyStroke( KeyEvent.VK_Y, InputEvent.CTRL_MASK ),
"Redo2", new RedoActionHandler() );
// Old-style undo/redo
mapKeystroke( KeyStroke.getKeyStroke( KeyEvent.VK_BACK_SPACE, InputEvent.ALT_MASK ),
"UndoOldStyle", new UndoActionHandler() );
mapKeystroke( KeyStroke.getKeyStroke( KeyEvent.VK_BACK_SPACE, InputEvent.ALT_MASK | InputEvent.SHIFT_MASK ),
"RetoOldStyle", new RedoActionHandler() );
// Run
mapKeystroke( KeyStroke.getKeyStroke( KeyEvent.VK_F5, 0 ),
"Run", new RunActionHandler() );
mapKeystroke( KeyStroke.getKeyStroke( KeyEvent.VK_ENTER, InputEvent.CTRL_MASK ),
"Run", new RunActionHandler() );
// Clear and Run
mapKeystroke( KeyStroke.getKeyStroke( KeyEvent.VK_X, InputEvent.ALT_MASK ),
"ClearAndRun", new ClearAndRunActionHandler() );
mapKeystroke( KeyStroke.getKeyStroke( KeyEvent.VK_F2, 0 ),
"ClearAndRun", new ClearAndRunActionHandler() ); // dlank prefers a single keystroke for this action, please leave this unless you need F2 for something else
}
private void mapKeystroke( KeyStroke ks, String strCmd, Action action )
{
enableInputMethods( true );
enableEvents( AWTEvent.KEY_EVENT_MASK );
InputMap imap = getInputMap( JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT );
Object key = imap.get( ks );
if( key == null )
{
key = strCmd;
imap.put( ks, key );
}
getActionMap().put( key, action );
}
void resetChangeHandler()
{
ScriptChangeHandler handler = new ScriptChangeHandler( getUndoManager() );
handler.establishUndoableEditListener( getCurrentEditor() );
}
public void openFile()
{
JFileChooser fc = new JFileChooser( getCurrentFile().getParentFile() );
fc.setDialogTitle( "Open Gosu File" );
fc.setDialogType( JFileChooser.OPEN_DIALOG );
fc.setCurrentDirectory( getCurrentFile().getParentFile() );
fc.setFileFilter(
new FileFilter()
{
public boolean accept( File f )
{
return f.isDirectory() || isValidGosuSourceFile( f );
}
public String getDescription()
{
return "Gosu source file (*.gsp; *.gs; *.gsx; *.gst)";
}
} );
int returnVal = fc.showOpenDialog( editor.util.EditorUtilities.frameForComponent( this ) );
if( returnVal == JFileChooser.APPROVE_OPTION )
{
openFile( fc.getSelectedFile() );
}
}
public void openFile( final File file )
{
openFile( makePartId( file ), file );
}
public static IScriptPartId makePartId( File file )
{
TypeSystem.pushGlobalModule();
try
{
if( file == null )
{
return new ScriptPartId( "New Program", null );
}
else if( file.getName().endsWith( ".gs" ) ||
file.getName().endsWith( ".gsx" ) ||
file.getName().endsWith( ".gsp" ) ||
file.getName().endsWith( ".gst" ) )
{
String classNameForFile = TypeNameUtil.getClassNameForFile( file );
return new ScriptPartId( classNameForFile, null );
}
else
{
return new ScriptPartId( "Unknown Resource Type", null );
}
}
finally
{
TypeSystem.popGlobalModule();
}
}
public void openInitialFile( IScriptPartId partId, File file )
{
_initialFile = true;
try
{
if( file != null || _tabPane.getTabCount() == 0 )
{
openFile( partId, file );
}
}
finally
{
_initialFile = false;
}
}
private void openFile( IScriptPartId partId, File file )
{
if( openTab( file ) )
{
return;
}
final GosuEditor editor = createEditor();
if( partId == null )
{
partId = new TypelessScriptPartId( "Untitled.gsp" );
}
initEditorMode( file, editor );
file = file == null ? new File( "Untitled.gsp" ) : file;
editor.putClientProperty( "_file", file );
removeLruTab();
_tabPane.addTab( file.getName(), editor );
_tabPane.setSelectedComponent( editor );
editor.getEditor().requestFocus();
String strSource;
if( !file.exists() )
{
strSource = "";
}
else
{
try
{
strSource = StreamUtil.getContent( StreamUtil.getInputStreamReader( new FileInputStream( file ) ) );
}
catch( IOException e )
{
throw new RuntimeException( e );
}
}
if( _parentFrame != null )
{
updateTitle();
}
try
{
editor.read( partId, strSource, "" );
resetChangeHandler();
EventQueue.invokeLater(
() -> {
editor.parse();
editor.getEditor().requestFocus();
} );
}
catch( Throwable t )
{
throw new RuntimeException( t );
}
}
private void removeLruTab()
{
if( _tabPane.getTabCount() < MAX_TABS )
{
return;
}
List<ITabHistoryContext> mruList = getTabSelectionHistory().getMruList();
for( int i = mruList.size() - 1; i >= 0; i
{
ITabHistoryContext tabCtx = mruList.get( i );
File file = (File)tabCtx.getContentId();
GosuEditor editor = findTab( file );
if( editor != null )
{
closeTab( file );
}
}
}
private void updateTitle()
{
File file = getCurrentFile();
_parentFrame.setTitle( "[" + (file == null ? "Untitled" : file.getAbsolutePath()) + "] - " +
"Gosu Editor" );
}
private boolean openTab( File file )
{
GosuEditor editor = findTab( file );
if( editor != null )
{
_tabPane.setSelectedComponent( editor );
editor.getEditor().requestFocus();
return true;
}
return false;
}
private GosuEditor findTab( File file )
{
if( file == null )
{
return null;
}
for( int i = 0; i < _tabPane.getTabCount(); i++ )
{
GosuEditor editor = (GosuEditor)_tabPane.getComponentAt( i );
if( editor != null && file.equals( editor.getClientProperty( "_file" ) ) )
{
return editor;
}
}
return null;
}
private void setCurrentFile( File file )
{
getCurrentEditor().putClientProperty( "_file", file );
_tabPane.setTitleAt( _tabPane.getSelectedIndex(), file.getName() );
}
private File getCurrentFile()
{
return (File)getCurrentEditor().getClientProperty( "_file" );
}
public boolean save()
{
if( getCurrentFile() == null )
{
JFileChooser fc = new JFileChooser();
fc.setDialogTitle( "Save Gosu File" );
fc.setDialogType( JFileChooser.SAVE_DIALOG );
fc.setCurrentDirectory( new File( "." ) );
fc.setFileFilter( new FileFilter()
{
public boolean accept( File f )
{
return f.isDirectory() || isValidGosuSourceFile( f );
}
public String getDescription()
{
return "Gosu source file (*.gsp; *.gs; *.gsx; *.gst)";
}
} );
int returnVal = fc.showOpenDialog( editor.util.EditorUtilities.frameForComponent( this ) );
if( returnVal == JFileChooser.APPROVE_OPTION )
{
setCurrentFile( fc.getSelectedFile() );
}
else
{
return false;
}
}
if( !getCurrentFile().exists() )
{
boolean created = false;
String msg = "";
try
{
created = getCurrentFile().createNewFile();
}
catch( IOException e )
{
//ignore
e.printStackTrace();
msg += " : " + e.getMessage();
}
if( !created )
{
JOptionPane.showMessageDialog( this, "Could not create file " + getCurrentFile().getName() + msg );
return false;
}
}
saveAndReloadType( getCurrentFile(), getCurrentEditor() );
return true;
}
private void debug( String s )
{
if( DEBUG )
{
System.out.println( s );
}
}
public boolean save( File file, GosuEditor editor )
{
if( !file.exists() )
{
boolean created = false;
String msg = "";
try
{
created = file.createNewFile();
}
catch( IOException e )
{
//ignore
e.printStackTrace();
msg += " : " + e.getMessage();
}
if( !created )
{
JOptionPane.showMessageDialog( this, "Could not create file " + file.getName() + msg );
return false;
}
}
saveAndReloadType( file, editor );
return true;
}
private void saveAndReloadType( File file, GosuEditor editor )
{
try
{
StreamUtil.copy( new StringReader( editor.getText() ), new FileOutputStream( file ) );
setDirty( editor, false );
reload( editor.getScriptPart().getContainingType() );
}
catch( IOException ex )
{
throw new RuntimeException( ex );
}
}
private void reload( IType type )
{
if( type == null )
{
return;
}
TypeSystem.refresh( (ITypeRef)type );
// EventQueue.invokeLater(
// new Runnable()
// public void run()
// TypeSystem.getGosuClassLoader().reloadDisposedClasses();
}
public boolean saveIfDirty()
{
if( isDirty( getCurrentEditor() ) )
{
return save();
}
return true;
}
public void newSourceFile()
{
JFileChooser fc = new JFileChooser( getCurrentFile() );
fc.setDialogTitle( "New Gosu File" );
fc.setDialogType( JFileChooser.SAVE_DIALOG );
fc.setCurrentDirectory( getCurrentFile() != null ? getCurrentFile().getParentFile() : new File( "." ) );
fc.setFileFilter(
new FileFilter()
{
public boolean accept( File f )
{
return f.isDirectory() || isValidGosuSourceFile( f );
}
public String getDescription()
{
return "Gosu source file (*.gsp; *.gs; *.gsx; *.gst)";
}
} );
int returnVal = fc.showOpenDialog( editor.util.EditorUtilities.frameForComponent( this ) );
if( returnVal == JFileChooser.APPROVE_OPTION )
{
File selectedFile = fc.getSelectedFile();
if( isValidGosuSourceFile( selectedFile ) )
{
if( selectedFile.exists() )
{
MessageDisplay.displayError( "File: " + selectedFile.getName() + " already exists. Please select a unique name." );
}
else
{
saveIfDirty();
createSourceFile( selectedFile );
}
}
else
{
MessageDisplay.displayError( "File: " + selectedFile.getName() + " is not a valid Gosu source file name." );
}
}
}
public void newProject()
{
String project = JOptionPane.showInputDialog( "Project Name" );
if( project == null )
{
return;
}
File projectsDir = EditorUtilities.getProjectsDir();
File dir = new File( projectsDir, project );
if( dir.mkdirs() ) {
clearTabs();
EventQueue.invokeLater( () -> restoreProjectState( project ) );
}
}
public void openProject()
{
LabelListPopup popup = new LabelListPopup( "Projects", EditorUtilities.getProjects().stream().map( s -> new ILabel() {
public String getDisplayName()
{
return s;
}
public Icon getIcon( int iTypeFlags )
{
return null;
}
} ).collect( Collectors.toList() ), "No projects" );
popup.addNodeChangeListener(
e -> {
clearTabs();
String project = ((ILabel)e.getSource()).getDisplayName();
File projectDir = EditorUtilities.getProjectDir( project );
if( projectDir != null )
{
EventQueue.invokeLater( () -> restoreProjectState( project ) );
}
} );
popup.show( this, getWidth() / 2 - 100, getHeight() / 2 - 200 );
}
private boolean isValidGosuSourceFile( File file )
{
if( file == null )
{
return false;
}
String strName = file.getName().toLowerCase();
return strName.endsWith( ".gs" ) ||
strName.endsWith( ".gsx" ) ||
strName.endsWith( ".gst" ) ||
strName.endsWith( ".gsp" );
}
private void createSourceFile( File selectedFile )
{
try
{
if( selectedFile.createNewFile() )
{
if( !writeStub( selectedFile ) )
{
//noinspection ResultOfMethodCallIgnored
selectedFile.delete();
return;
}
}
}
catch( IOException e )
{
throw new RuntimeException( e );
}
TypeSystem.created( CommonServices.getFileSystem().getIFile( selectedFile ) );
TypeSystem.refresh( TypeSystem.getGlobalModule() );
openFile( selectedFile );
}
private boolean writeStub( File file )
{
String strFile = file.getName().toLowerCase();
if( strFile.endsWith( ".gs" ) )
{
return writeClassStub( file );
}
if( strFile.endsWith( ".gsx" ) )
{
return writeEnhancementStub( file );
}
else if( strFile.endsWith( ".gst" ) )
{
return writeTempateStub( file );
}
return true;
}
private boolean writeClassStub( File file )
{
String strName = TypeNameUtil.getClassNameForFile( file );
if( strName == null )
{
int iOption = displayTypeWarning( file );
if( iOption != JOptionPane.YES_OPTION )
{
return false;
}
if( file.getParentFile() == null )
{
MessageDisplay.displayError( "A class must have a parent directory" );
return false;
}
strName = file.getParentFile().getName() + '.' + file.getName().substring( 0, file.getName().lastIndexOf( '.' ) );
}
int iLastDot = strName.lastIndexOf( '.' );
String strRelativeName = strName.substring( iLastDot + 1 );
String strPackage = iLastDot > 0 ? strName.substring( 0, iLastDot ) : "";
try
{
FileWriter writer = new FileWriter( file );
String eol = System.getProperty( "line.separator" );
writer.write( "package " + strPackage + eol +
eol +
"class " + strRelativeName + " {" + eol +
eol +
"}" );
writer.flush();
writer.close();
}
catch( IOException e )
{
throw new RuntimeException( e );
}
return true;
}
private boolean writeTempateStub( File file )
{
String strName = TypeNameUtil.getClassNameForFile( file );
if( strName == null )
{
int iOption = displayTypeWarning( file );
if( iOption != JOptionPane.YES_OPTION )
{
return false;
}
if( file.getParentFile() == null )
{
MessageDisplay.displayError( "A template must have a parent directory" );
return false;
}
strName = file.getParentFile().getName() + '.' + file.getName().substring( 0, file.getName().lastIndexOf( '.' ) );
}
int iLastDot = strName.lastIndexOf( '.' );
String strRelativeName = strName.substring( iLastDot + 1 );
try
{
FileWriter writer = new FileWriter( file );
String eol = System.getProperty( "line.separator" );
writer.write( "<%@ params( myParam: String ) %>" + eol +
eol +
"The content of my param is: ${myParam}" + eol +
eol +
"Note you can render this template from a class or program" + eol +
"simply by calling one of its render methods:" + eol +
eol +
" " + strRelativeName + ".renderToString( \"wow\" )" );
writer.flush();
writer.close();
}
catch( IOException e )
{
throw new RuntimeException( e );
}
return true;
}
private int displayTypeWarning( File file )
{
return MessageDisplay.displayConfirmation( "<html>The class " + file.getName() + " is not on the current classpath. " +
"Create the class anyway and put it's parent directory in the classpath? " +
"<br><br>" +
"WARNING!!! Ensure that the parent directory does not cover other files and directories you don't want in your class path." +
"<br><br>" +
"Consider creating a \"src\" directory and create package folders in there.", JOptionPane.YES_NO_OPTION );
}
private boolean writeEnhancementStub( File file )
{
String strName = TypeNameUtil.getClassNameForFile( file );
if( strName == null )
{
int iOption = displayTypeWarning( file );
if( iOption != JOptionPane.YES_OPTION )
{
return false;
}
if( file.getParentFile() == null )
{
MessageDisplay.displayError( "A class must have a parent directory" );
return false;
}
strName = file.getParentFile().getName() + '.' + file.getName().substring( 0, file.getName().lastIndexOf( '.' ) );
}
int iLastDot = strName.lastIndexOf( '.' );
String strRelativeName = strName.substring( iLastDot + 1 );
String strPackage = iLastDot > 0 ? strName.substring( 0, iLastDot ) : "";
try
{
FileWriter writer = new FileWriter( file );
String eol = System.getProperty( "line.separator" );
writer.write( "package " + strPackage + eol +
eol +
"enhancement " + strRelativeName + " : Object //## todo: change me " + eol +
"{" + eol +
eol +
"}" );
writer.flush();
writer.close();
}
catch( IOException e )
{
throw new RuntimeException( e );
}
return true;
}
public void saveAs()
{
JFileChooser fc = new JFileChooser( getCurrentFile() );
fc.setDialogTitle( "Save Gosu File" );
fc.setDialogType( JFileChooser.SAVE_DIALOG );
fc.setCurrentDirectory( getCurrentFile() != null ? getCurrentFile().getParentFile() : new File( "." ) );
fc.setFileFilter(
new FileFilter()
{
public boolean accept( File f )
{
return f.isDirectory() || isValidGosuSourceFile( f );
}
public String getDescription()
{
return "Gosu source file (*.gsp; *.gs; *.gsx; *.gst)";
}
} );
int returnVal = fc.showOpenDialog( editor.util.EditorUtilities.frameForComponent( this ) );
if( returnVal == JFileChooser.APPROVE_OPTION )
{
setCurrentFile( fc.getSelectedFile() );
save();
}
}
void execute()
{
try
{
if( _bRunning )
{
return;
}
//## todo: Run in separate process (or classloader) so that changes can take effect
saveAndReloadType( getCurrentFile(), getCurrentEditor() );
ClassLoader loader = getClass().getClassLoader();
URLClassLoader runLoader = new URLClassLoader( getAllUrlsAboveGosuclassProtocol( (URLClassLoader)loader ), loader.getParent() );
TaskQueue queue = TaskQueue.getInstance( "_execute_gosu" );
addBusySignal();
queue.postTask(
() -> {
IGosuProgram program = (IGosuProgram)getCurrentEditor().getParsedClass();
try
{
Class<?> runnerClass = Class.forName( "editor.GosuPanel$Runner", true, runLoader );
try
{
String result = (String)runnerClass.getMethod( "run", String.class, List.class ).invoke( null, program.getName(), Gosu.getClasspath() );
EventQueue.invokeLater(
() -> {
removeBusySignal();
if( result != null )
{
System.out.print( result );
}
} );
}
finally {
GosuClassPathThing.addOurProtocolHandler();
}
}
catch( Exception e )
{
throw new RuntimeException( e );
}
} );
}
catch( Throwable t )
{
editor.util.EditorUtilities.handleUncaughtException( t );
}
}
private URL[] getAllUrlsAboveGosuclassProtocol( URLClassLoader loader )
{
List<URL> urls = new ArrayList<>();
boolean bAdd = true;
for( URL url: loader.getURLs() ) {
if( bAdd && !url.getProtocol().contains( "gosu" ) ) {
urls.add( url );
}
else {
bAdd = false;
}
}
return urls.toArray( new URL[urls.size()] );
}
public static class Runner
{
public static String run( String programName, List<File> classpath )
{
Gosu.init( classpath );
GosuClassPathThing.addOurProtocolHandler();
GosuClassPathThing.init();
IGosuProgram program = (IGosuProgram)TypeSystem.getByFullNameIfValid( programName );
Object result = program.evaluate( null );
return (String)CommonServices.getCoercionManager().convertValue( result, JavaTypes.STRING() );
}
}
private void addBusySignal()
{
_bRunning = true;
Timer t =
new Timer( 2000,
e -> {
//noinspection ConstantConditions
if( _bRunning )
{
_status.setIcon( EditorUtilities.loadIcon( "images/status_anim.gif" ) );
_status.setText( "<html>Running <i>" + getCurrentFile().getName() + "</i></html>" );
_statPanel.setVisible( true );
_statPanel.revalidate();
}
} );
t.setRepeats( false );
t.start();
}
private void removeBusySignal()
{
if( _bRunning )
{
_bRunning = false;
_statPanel.setVisible( false );
_statPanel.revalidate();
}
}
void executeTemplate()
{
try
{
System.out.println( "Will prompt for args soon, for now run the template programmatically from a program" );
}
catch( Throwable t )
{
t.printStackTrace();
}
}
void clearOutput()
{
_resultPanel.clear();
}
private void showOptions()
{
final JDialog dialog = new JDialog( editor.util.EditorUtilities.frameForComponent( this ), "Options", true );
JPanel centerPanel = new JPanel();
JLabel commandLineLabel = new JLabel( "Program Arguments:" );
final JTextField commandLineField = new JTextField( _commandLine, 30 );
centerPanel.add( commandLineLabel );
centerPanel.add( commandLineField );
dialog.add( centerPanel, BorderLayout.CENTER );
JPanel buttonPanel = new JPanel();
JButton okButton = new JButton( "Ok" );
JButton cancelButton = new JButton( "Cancel" );
buttonPanel.add( okButton );
buttonPanel.add( cancelButton );
dialog.add( buttonPanel, BorderLayout.SOUTH );
dialog.pack();
ActionListener okAction = new ActionListener()
{
public void actionPerformed( ActionEvent e )
{
_commandLine = commandLineField.getText();
dialog.dispose();
}
};
final ActionListener cancelAction = new ActionListener()
{
public void actionPerformed( ActionEvent e )
{
dialog.dispose();
}
};
commandLineField.addActionListener( okAction );
okButton.addActionListener( okAction );
cancelButton.addActionListener( cancelAction );
commandLineField.addKeyListener( new KeyAdapter()
{
@Override
public void keyPressed( KeyEvent e )
{
super.keyPressed( e );
if( e.getKeyCode() == KeyEvent.VK_ESCAPE )
{
cancelAction.actionPerformed( null );
}
}
} );
editor.util.EditorUtilities.centerWindowInFrame( dialog, editor.util.EditorUtilities.frameForComponent( this ) );
dialog.setVisible( true );
}
public AtomicUndoManager getUndoManager()
{
return getCurrentEditor() != null
? getCurrentEditor().getUndoManager()
: _defaultUndoMgr;
}
public void selectTab( File file )
{
for( int i = 0; i < _tabPane.getTabCount(); i++ )
{
GosuEditor editor = (GosuEditor)_tabPane.getComponentAt( i );
if( editor != null )
{
if( editor.getClientProperty( "_file" ).equals( file ) )
{
_tabPane.setSelectedIndex( i );
return;
}
}
}
openFile( file );
}
public void closeTab( File file )
{
for( int i = 0; i < _tabPane.getTabCount(); i++ )
{
GosuEditor editor = (GosuEditor)_tabPane.getComponentAt( i );
if( editor != null )
{
if( editor.getClientProperty( "_file" ).equals( file ) )
{
_tabPane.removeTabAt( i );
return;
}
}
}
}
public void goBackward()
{
getTabSelectionHistory().goBackward();
}
public boolean canGoBackward()
{
return getTabSelectionHistory().canGoBackward();
}
public void goForward()
{
getTabSelectionHistory().goForward();
}
public boolean canGoForward()
{
return getTabSelectionHistory().canGoForward();
}
public void displayRecentViewsPopup()
{
List<ITabHistoryContext> mruViewsList = new ArrayList<ITabHistoryContext>( getTabSelectionHistory().getMruList() );
for( int i = 0; i < mruViewsList.size(); i++ )
{
ITabHistoryContext ctx = mruViewsList.get( i );
if( ctx != null && ctx.represents( getCurrentEditor() ) )
{
mruViewsList.remove( ctx );
}
}
LabelListPopup popup = new LabelListPopup( "Recent Views", mruViewsList, "No recent views" );
popup.addNodeChangeListener(
new ChangeListener()
{
@Override
public void stateChanged( ChangeEvent e )
{
ITabHistoryContext context = (ITabHistoryContext)e.getSource();
getTabSelectionHistory().getTabHistoryHandler().selectTab( context );
}
} );
popup.show( this, getWidth() / 2 - 100, getHeight() / 2 - 200 );
}
public boolean isDirty( GosuEditor editor )
{
Boolean bDirty = (Boolean)editor.getClientProperty( "_bDirty" );
return bDirty == null ? false : bDirty;
}
public void setDirty( GosuEditor editor, boolean bDirty )
{
editor.putClientProperty( "_bDirty", bDirty );
}
class UndoActionHandler extends AbstractAction
{
public void actionPerformed( ActionEvent e )
{
if( isEnabled() )
{
getUndoManager().undo();
}
}
public boolean isEnabled()
{
return getUndoManager().canUndo();
}
}
class RedoActionHandler extends AbstractAction
{
public void actionPerformed( ActionEvent e )
{
if( isEnabled() )
{
getUndoManager().redo();
}
}
public boolean isEnabled()
{
return getUndoManager().canRedo();
}
}
class ClearAndRunActionHandler extends AbstractAction
{
public void actionPerformed( ActionEvent e )
{
clearOutput();
new RunActionHandler().actionPerformed( e );
}
}
class RunActionHandler extends AbstractAction
{
public RunActionHandler()
{
super( "Run" );
}
public void actionPerformed( ActionEvent e )
{
if( isEnabled() )
{
// CommandLineAccess.setRawArgs( Arrays.asList( _commandLine.split( " +" ) ) );
// CommandLineAccess.setExitEnabled( false );
if( getCurrentEditor().isTemplate() )
{
executeTemplate();
}
else
{
execute();
}
}
}
public boolean isEnabled()
{
return getCurrentEditor() != null && !getCurrentEditor().isClass() && !getCurrentEditor().isEnhancement() && !_bRunning;
}
}
class StopActionHandler extends AbstractAction
{
public StopActionHandler()
{
super( "Stop" );
}
public void actionPerformed( ActionEvent e )
{
if( isEnabled() )
{
TaskQueue queue = TaskQueue.getInstance( "_execute_gosu" );
TaskQueue.emptyAndRemoveQueue( "_execute_gosu" );
//noinspection deprecation
queue.stop();
removeBusySignal();
}
}
}
public Clipboard getClipboard()
{
return Toolkit.getDefaultToolkit().getSystemClipboard();
}
private class SmartMenu extends JMenu implements MenuListener
{
public SmartMenu( String strLabel )
{
super( strLabel );
addMenuListener( this );
}
@Override
public void menuSelected( MenuEvent e )
{
for( int i = 0; i < getItemCount(); i++ )
{
JMenuItem item = getItem( i );
if( item != null && item.getAction() != null )
{
item.setEnabled( item.getAction().isEnabled() );
}
}
}
@Override
public void menuDeselected( MenuEvent e )
{
}
@Override
public void menuCanceled( MenuEvent e )
{
}
}
}
|
package com.catherine.singleton;
/**
*
* <p>
*
*
* JVM
* BillPughSingletonSingletonHolder
*
*/
public class BillPughSingleton {
private BillPughSingleton() {
}
private static class SingletonHolder {
private static BillPughSingleton instance = new BillPughSingleton();
}
public static BillPughSingleton getInstance() {
return SingletonHolder.instance;
}
public void print() {
System.out.println("Singleton:BillPughSingleton");
}
}
|
package com.dom_distiller.client;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.logging.Logger;
import com.google.gwt.dom.client.AnchorElement;
import com.google.gwt.dom.client.Document;
import com.google.gwt.dom.client.Element;
import com.google.gwt.dom.client.ImageElement;
import com.google.gwt.dom.client.Node;
import com.google.gwt.dom.client.NodeList;
import de.l3s.boilerpipe.BoilerpipeProcessingException;
import de.l3s.boilerpipe.document.TextBlock;
import de.l3s.boilerpipe.document.TextDocument;
import de.l3s.boilerpipe.extractors.CommonExtractors;
import de.l3s.boilerpipe.labels.DefaultLabels;
import de.l3s.boilerpipe.sax.BoilerpipeHTMLContentHandler;
import org.timepedia.exporter.client.Export;
import org.timepedia.exporter.client.Exportable;
import org.xml.sax.AttributesImpl;
import org.xml.sax.SAXException;
@Export()
public class ContentExtractor implements Exportable {
static Logger logger = Logger.getLogger("DomDistiller");
public static String extractContent() {
BoilerpipeHTMLContentHandler htmlParser = new BoilerpipeHTMLContentHandler();
List<Node> textNodes = null;
try {
htmlParser.startDocument();
Element documentElement = Document.get().getDocumentElement();
textNodes = DomToSaxParser.parse(documentElement, htmlParser);
htmlParser.endDocument();
} catch (SAXException e) {
logger.warning("Parsing failed.");
return "";
}
TextDocument document = htmlParser.toTextDocument();
try {
CommonExtractors.ARTICLE_EXTRACTOR.process(document);
} catch (BoilerpipeProcessingException e) {
logger.warning("Processing failed.");
return "";
}
List<Integer> contentTextIndexes = new ArrayList<Integer>();
for (TextBlock tb : document.getTextBlocks()) {
if (!tb.hasLabel(DefaultLabels.TITLE)) {
contentTextIndexes.addAll(tb.getContainedTextElements());
}
}
Collections.sort(contentTextIndexes);
// Boilerpipe's text node indexes start at 1.
List<Node> contentNodes = new ArrayList<Node>(contentTextIndexes.size());
for (Integer i : contentTextIndexes) {
contentNodes.add(textNodes.get(i - 1));
}
List<Node> contentAndImages = RelevantImageFinder.findAndAddImages(
contentNodes, Document.get().getDocumentElement());
Node clonedSubtree = NodeListExpander.expand(contentAndImages).cloneSubtree();
if (clonedSubtree.getNodeType() != Node.ELEMENT_NODE) {
return "";
}
// The base URL in the distilled page viewer is different from that in
// the live page. This breaks all relative links (in anchors and
// images), so make them absolute in the distilled content.
makeAllLinksAbsolute(clonedSubtree);
// TODO(cjhopman): this discards the top element and just returns its children. This might
// break in some cases.
return Element.as(clonedSubtree).getInnerHTML();
}
private static void makeAllLinksAbsolute(Node rootNode) {
Element root = Element.as(rootNode);
// AnchorElement.getHref() and ImageElement.getSrc() both return the
// absolute URI, so simply set them as the respective attributes.
NodeList<Element> allLinks = root.getElementsByTagName("A");
for (int i = 0; i < allLinks.getLength(); i++) {
AnchorElement link = AnchorElement.as(allLinks.getItem(i));
link.setHref(link.getHref());
}
NodeList<Element> allImages = root.getElementsByTagName("IMG");
for (int i = 0; i < allImages.getLength(); i++) {
ImageElement image = ImageElement.as(allImages.getItem(i));
image.setSrc(image.getSrc());
}
}
}
|
package com.github.noxan.aves.client;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import com.github.noxan.aves.net.Connection;
import com.github.noxan.aves.protocol.InputProtocol;
import com.github.noxan.aves.protocol.OutputProtocol;
import com.github.noxan.aves.protocol.ProtocolFactory;
import com.github.noxan.aves.protocol.string.StringProtocolFactory;
import com.github.noxan.aves.util.Tuple;
public class SocketClient implements Client, Connection {
private String host;
private int port;
private Socket socket;
private ClientHandler handler;
private ProtocolFactory factory;
private InputProtocol in;
private OutputProtocol out;
private InputManager inputManager;
private Thread inputThread;
private BlockingQueue<Tuple<ClientEvent, Object>> clientEvents;
public SocketClient(ClientHandler handler) {
this("localhost", 1666, handler, new StringProtocolFactory());
}
public SocketClient(String host, int port, ClientHandler handler, ProtocolFactory factory) {
this.host = host;
this.port = port;
this.handler = handler;
this.factory = factory;
this.clientEvents = new LinkedBlockingQueue<Tuple<ClientEvent, Object>>();
}
@Override
public void write(Object data) throws IOException {
out.write(data);
}
@Override
public String getHost() {
return host;
}
@Override
public int getPort() {
return port;
}
@Override
public void connect() throws IOException {
handler.clientConnect();
socket = new Socket();
socket.connect(new InetSocketAddress(host, port));
in = factory.createInputProtocol(socket.getInputStream());
out = factory.createOutputProtocol(socket.getOutputStream());
inputManager = new InputManager();
inputThread = new Thread(inputManager);
inputThread.start();
}
@Override
public void disconnect() {
handler.clientDisconnect();
}
private void offerEvent(ClientEvent event, Object data) {
clientEvents.offer(new Tuple<ClientEvent, Object>(event, data));
}
private class EventManager implements Runnable {
@Override
public void run() {
while(true) {
Tuple<ClientEvent, Object> event = clientEvents.poll();
switch(event.getFirst()) {
case DATA_READ:
handler.readData(event.getSecond());
break;
}
}
}
}
private class InputManager implements Runnable {
@Override
public void run() {
while(true) {
try {
Object data = in.read();
offerEvent(ClientEvent.DATA_READ, data);
} catch(IOException e) {
e.printStackTrace();
}
}
}
}
}
|
package org.bimserver.database;
import java.nio.BufferOverflowException;
import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Scanner;
import java.util.Set;
import org.bimserver.ServerIfcModel;
import org.bimserver.database.actions.BimDatabaseAction;
import org.bimserver.database.berkeley.BimserverConcurrentModificationDatabaseException;
import org.bimserver.database.query.conditions.Condition;
import org.bimserver.database.query.conditions.IsOfTypeCondition;
import org.bimserver.emf.IdEObject;
import org.bimserver.emf.IdEObjectImpl;
import org.bimserver.emf.IdEObjectImpl.State;
import org.bimserver.emf.IfcModelInterface;
import org.bimserver.emf.IfcModelInterfaceException;
import org.bimserver.emf.LazyLoader;
import org.bimserver.emf.MetaDataException;
import org.bimserver.emf.MetaDataManager;
import org.bimserver.emf.OidProvider;
import org.bimserver.emf.PackageMetaData;
import org.bimserver.emf.QueryInterface;
import org.bimserver.ifc.BasicIfcModel;
import org.bimserver.models.store.Checkout;
import org.bimserver.models.store.ConcreteRevision;
import org.bimserver.models.store.DatabaseInformation;
import org.bimserver.models.store.DatabaseInformationCategory;
import org.bimserver.models.store.DatabaseInformationItem;
import org.bimserver.models.store.Project;
import org.bimserver.models.store.StoreFactory;
import org.bimserver.models.store.StorePackage;
import org.bimserver.models.store.User;
import org.bimserver.shared.exceptions.ServerException;
import org.bimserver.shared.exceptions.ServiceException;
import org.bimserver.shared.exceptions.UserException;
import org.bimserver.utils.BinUtils;
import org.eclipse.emf.common.util.AbstractEList;
import org.eclipse.emf.common.util.BasicEList;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.common.util.Enumerator;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EClassifier;
import org.eclipse.emf.ecore.EDataType;
import org.eclipse.emf.ecore.EEnum;
import org.eclipse.emf.ecore.EEnumLiteral;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.ecore.EcorePackage;
import org.eclipse.emf.ecore.impl.EEnumImpl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Charsets;
import com.sleepycat.je.LockConflictException;
import com.sleepycat.je.LockTimeoutException;
import com.sleepycat.je.TransactionTimeoutException;
public class DatabaseSession implements LazyLoader, OidProvider<Long> {
public static final int DEFAULT_CONFLICT_RETRIES = 10;
private static final boolean DEVELOPER_DEBUG = false;
private static final Logger LOGGER = LoggerFactory.getLogger(DatabaseSession.class);
private final Database database;
private BimTransaction bimTransaction;
private final Set<PostCommitAction> postCommitActions = new LinkedHashSet<PostCommitAction>();
private final ObjectsToCommit objectsToCommit = new ObjectsToCommit();
private final ObjectsToDelete objectsToDelete = new ObjectsToDelete();
private StackTraceElement[] stackTrace;
private final ObjectCache objectCache = new ObjectCache();
private int reads;
private enum SessionState {
OPEN, CLOSED
}
private SessionState state = SessionState.OPEN;
private boolean overwriteEnabled;
public DatabaseSession(Database database, BimTransaction bimTransaction) {
this.database = database;
this.bimTransaction = bimTransaction;
this.stackTrace = Thread.currentThread().getStackTrace();
if (DEVELOPER_DEBUG) {
LOGGER.info("");
LOGGER.info("NEW SESSION");
}
}
public void setOverwriteEnabled(boolean overwriteEnabled) {
this.overwriteEnabled = overwriteEnabled;
}
public EClass getEClassForName(String packageName, String className) {
return database.getEClassForName(packageName, className);
}
public void addPostCommitAction(PostCommitAction postCommitAction) {
postCommitActions.add(postCommitAction);
}
public void addToObjectsToCommit(IdEObject idEObject) throws BimserverDatabaseException {
if (idEObject.getOid() == -1) {
throw new BimserverDatabaseException("Cannot store object with oid -1");
}
objectsToCommit.put(idEObject);
}
public void close() {
state = SessionState.CLOSED;
database.unregisterSession(this);
database.incrementReads(reads);
// for (Long oid : oidsRead.keySet()) {
// AtomicInteger atomicInteger = oidsRead.get(oid);
// if (atomicInteger.get() > 1) {
// System.out.println(oid + ": " + atomicInteger.get());
if (bimTransaction != null) {
try {
bimTransaction.close();
} catch (IllegalStateException e) {
database.getKeyValueStore().dumpOpenCursors();
}
}
if (DEVELOPER_DEBUG) {
LOGGER.info("END SESSION");
}
}
public void commit(ProgressHandler progressHandler) throws BimserverDatabaseException, ServiceException {
checkOpen();
try {
if (progressHandler != null) {
progressHandler.progress(0, objectsToCommit.size());
}
int current = 0;
int writes = 0;
ByteBuffer keyBuffer = ByteBuffer.wrap(new byte[16]);
for (RecordIdentifierPlusType recordIdentifier : objectsToDelete) {
fillKeyBuffer(keyBuffer, recordIdentifier);
database.getKeyValueStore().storeNoOverwrite(recordIdentifier.getPackageName() + "_" + recordIdentifier.getClassName(), keyBuffer.array(), new byte[] { -1 }, this);
writes++;
}
// This buffer is reused for the values, it's position must be reset at the end of the loop, and the convertObjectToByteArray function is responsible for setting the buffer's position to the end of the (used part of the) buffer
ByteBuffer reusableBuffer = ByteBuffer.allocate(32768);
for (IdEObject object : objectsToCommit) {
if (object.getOid() == -1) {
throw new BimserverDatabaseException("Cannot store object with oid -1");
}
fillKeyBuffer(keyBuffer, object);
if (DEVELOPER_DEBUG) {
LOGGER.info("Write: " + object.eClass().getName() + " " + "pid=" + object.getPid() + " oid=" + object.getOid() + " rid=" + object.getRid());
}
ByteBuffer valueBuffer = convertObjectToByteArray(object, reusableBuffer, getMetaDataManager().getPackageMetaData(object.eClass().getEPackage().getName()));
if (object.eClass().getEAnnotation("nolazyload") == null && !overwriteEnabled) {
database.getKeyValueStore().storeNoOverwrite(object.eClass().getEPackage().getName() + "_" + object.eClass().getName(), keyBuffer.array(),
valueBuffer.array(), 0, valueBuffer.position(), this);
} else {
database.getKeyValueStore().store(object.eClass().getEPackage().getName() + "_" + object.eClass().getName(), keyBuffer.array(),
valueBuffer.array(), 0, valueBuffer.position(), this);
}
if (progressHandler != null) {
progressHandler.progress(++current, objectsToCommit.size());
}
writes++;
reusableBuffer = valueBuffer; // bimServerClient may have increased the size of the buffer by creating a new one, we keep using it for other objects
reusableBuffer.position(0);
}
bimTransaction.commit();
database.incrementCommittedWrites(writes);
close();
for (PostCommitAction postCommitAction : postCommitActions) {
postCommitAction.execute();
}
} catch (BimserverDatabaseException e) {
throw e;
} catch (ServiceException e) {
throw e;
}
}
private void fillKeyBuffer(ByteBuffer buffer, RecordIdentifierPlusType recordIdentifier) {
fillKeyBuffer(buffer, recordIdentifier.getPid(), recordIdentifier.getOid(), recordIdentifier.getRid());
}
private IdEObjectImpl createInternal(EClass eClass, QueryInterface queryInterface) {
IdEObjectImpl object = (IdEObjectImpl) eClass.getEPackage().getEFactoryInstance().create(eClass);
object.setQueryInterface(queryInterface);
return object;
}
@SuppressWarnings({ "unchecked", "unused" })
private IdEObject convertByteArrayToObject(IdEObject idEObject, EClass originalQueryClass, EClass eClass, long oid, ByteBuffer buffer, IfcModelInterface model, int rid, QueryInterface query, TodoList todoList) throws BimserverDatabaseException {
try {
if (idEObject == null) {
idEObject = createInternal(eClass, query);
((IdEObjectImpl) idEObject).setOid(oid);
((IdEObjectImpl) idEObject).setPid(query.getPid());
if (rid == Integer.MAX_VALUE) {
throw new BimserverDatabaseException("Database corrupt, rid cannot be " + Integer.MAX_VALUE);
}
}
if (idEObject.eClass().getEAnnotation("wrapped") == null) {
try {
model.addAllowMultiModel(oid, idEObject);
} catch (IfcModelInterfaceException e) {
throw new BimserverDatabaseException(e);
}
}
((IdEObjectImpl) idEObject).setRid(rid);
((IdEObjectImpl) idEObject).useInverses(false);
if (DEVELOPER_DEBUG && StorePackage.eINSTANCE == idEObject.eClass().getEPackage()) {
LOGGER.info("Read: " + idEObject.eClass().getName() + " pid=" + query.getPid() + " oid=" + oid + " rid=" + rid);
}
((IdEObjectImpl) idEObject).setLoadingState(State.LOADING);
objectCache.put(new RecordIdentifier(query.getPid(), oid, rid), idEObject);
byte unsettedLength = buffer.get();
byte[] unsetted = new byte[unsettedLength];
buffer.get(unsetted);
int fieldCounter = 0;
for (EStructuralFeature feature : eClass.getEAllStructuralFeatures()) {
boolean isUnsetted = (unsetted[fieldCounter / 8] & (1 << (fieldCounter % 8))) != 0;
if (isUnsetted) {
if (feature.isUnsettable()) {
idEObject.eUnset(feature);
} else if (feature.isMany()) {
// do nothing
} else if (feature.getDefaultValue() != null) {
idEObject.eSet(feature, feature.getDefaultValue());
}
} else {
if (!query.shouldFollowReference(originalQueryClass, eClass, feature)) {
// we have to do some reading to maintain a correct
// index
fakeRead(buffer, feature);
} else {
Object newValue = null;
if (feature.isMany()) {
if (feature.getEType() instanceof EEnum) {
} else if (feature.getEType() instanceof EClass) {
if (buffer.capacity() == 1 && buffer.get(0) == -1) {
buffer.position(buffer.position() + 1);
} else {
/*
* TODO There still is a problem with this,
* when readReference (and all calls beyond
* that call) alter (by opposites) this
* list, this list can potentially grow too
* large
*
* Only can happen with non-unique
* references
*/
int listSize = buffer.getInt();
AbstractEList<Object> list = (AbstractEList<Object>) idEObject.eGet(feature);
for (int i = 0; i < listSize; i++) {
IdEObject referencedObject = null;
short cid = buffer.getShort();
if (cid == -1) {
// null, do nothing
} else if (cid < 0) {
// negative cid means value is
// embedded
// in record
EClass referenceClass = database.getEClassForCid((short) (-cid));
referencedObject = readWrappedValue(feature, buffer, referenceClass, query);
} else if (cid > 0) {
// positive cid means value is a
// reference
// to another record
EClass referenceClass = database.getEClassForCid(cid);
referencedObject = readReference(originalQueryClass, buffer, model, idEObject, feature, referenceClass, query, todoList);
}
if (referencedObject != null) {
if (!feature.getEType().isInstance(referencedObject)) {
throw new BimserverDatabaseException(referencedObject.getClass().getSimpleName() + " cannot be stored in list of "
+ feature.getName());
}
EReference eReference = (EReference)feature;
if (feature.isUnique()) {
list.add(referencedObject);
} else {
list.addUnique(referencedObject);
}
}
}
}
} else if (feature.getEType() instanceof EDataType) {
int listSize = buffer.getInt();
BasicEList<Object> list = new BasicEList<Object>(listSize);
for (int i = 0; i < listSize; i++) {
Object reference = readPrimitiveValue(feature.getEType(), buffer, query);
if (reference != null) {
list.addUnique(reference);
}
}
newValue = list;
}
} else {
if (feature.getEType() instanceof EEnum) {
int enumOrdinal = buffer.getInt();
if (enumOrdinal == -1) {
newValue = null;
} else {
EClassifier eType = feature.getEType();
EEnumLiteral enumLiteral = ((EEnumImpl) eType).getEEnumLiteral(enumOrdinal);
if (enumLiteral != null) {
newValue = enumLiteral.getInstance();
}
}
} else if (feature.getEType() instanceof EClass) {
// EReference eReference = (EReference) feature;
short cid = buffer.getShort();
if (cid == -1) {
// null, do nothing
} else if (cid < 0) {
// negative cid means value is embedded in
// record
EClass referenceClass = database.getEClassForCid((short) (-cid));
newValue = readWrappedValue(feature, buffer, referenceClass, query);
} else if (cid > 0) {
// positive cid means value is reference to
// other record
EClass referenceClass = database.getEClassForCid(cid);
newValue = readReference(originalQueryClass, buffer, model, idEObject, feature, referenceClass, query, todoList);
// if (eReference.getEOpposite() != null &&
// ((IdEObjectImpl)
// newValue).isLoadedOrLoading()) {
// newValue = null;
}
} else if (feature.getEType() instanceof EDataType) {
newValue = readPrimitiveValue(feature.getEType(), buffer, query);
}
}
if (newValue != null) {
idEObject.eSet(feature, newValue);
}
}
}
fieldCounter++;
}
((IdEObjectImpl) idEObject).setLoaded();
((IdEObjectImpl) idEObject).useInverses(true);
if (DEVELOPER_DEBUG && idEObject.getRid() > 100000 || idEObject.getRid() < -100000) {
LOGGER.debug("Improbable rid " + idEObject.getRid() + " - " + idEObject);
}
return idEObject;
} catch (BufferUnderflowException e) {
throw new BimserverDatabaseException("Reading " + eClass.getName(), e);
} catch (BufferOverflowException e) {
throw new BimserverDatabaseException("Reading " + eClass.getName(), e);
}
}
private boolean useUnsetBit(EStructuralFeature feature, IdEObject object) {
// TODO non-unsettable boolean values can also be stored in these bits
Object value = object.eGet(feature);
if (feature.isUnsettable()) {
if (!object.eIsSet(feature)) {
return true;
}
} else {
if (feature.isMany() && ((List<?>)value).isEmpty()) {
return true;
}
if (feature.getDefaultValue() == value || (feature.getDefaultValue() != null && feature.getDefaultValue().equals(value))) {
return true;
}
}
return false;
}
private ByteBuffer convertObjectToByteArray(IdEObject object, ByteBuffer buffer, PackageMetaData packageMetaData) throws BimserverDatabaseException {
int bufferSize = getExactSize(object);
if (bufferSize > buffer.capacity()) {
LOGGER.debug("Buffer too small (" + bufferSize + ")");
buffer = ByteBuffer.allocate(bufferSize);
}
byte[] unsetted = new byte[(int) Math.ceil(object.eClass().getEAllStructuralFeatures().size() / 8.0)];
int fieldCounter = 0;
for (EStructuralFeature feature : object.eClass().getEAllStructuralFeatures()) {
if (useUnsetBit(feature, object)) {
unsetted[fieldCounter / 8] |= (1 << (fieldCounter % 8));
}
fieldCounter++;
}
buffer.put((byte) unsetted.length);
buffer.put(unsetted);
EClass eClass = getEClassForOid(object.getOid());
if (!eClass.isSuperTypeOf(object.eClass())) {
throw new BimserverDatabaseException("Object with oid " + object.getOid() + " is a " + object.eClass().getName() + " but it's cid-part says it's a " + eClass.getName());
}
for (EStructuralFeature feature : object.eClass().getEAllStructuralFeatures()) {
if (!useUnsetBit(feature, object)) {
if (feature.isMany()) {
if (feature.getEType() instanceof EEnum) {
// Aggregate relations to enums never occur... at this
// moment
} else if (feature.getEType() instanceof EClass) {
EList<?> list = (EList<?>) object.eGet(feature);
buffer.putInt(list.size());
for (Object o : list) {
if (o == null) {
buffer.putShort((short) -1);
} else {
IdEObject listObject = (IdEObject) o;
if (listObject.eClass().getEAnnotation("wrapped") != null || listObject.eClass().getEStructuralFeature("wrappedValue") != null) {
writeWrappedValue(object.getPid(), object.getRid(), listObject, buffer, packageMetaData);
} else {
writeReference(object, listObject, buffer, feature);
}
}
}
} else if (feature.getEType() instanceof EDataType) {
EList<?> list = (EList<?>) object.eGet(feature);
buffer.putInt(list.size());
for (Object o : list) {
writePrimitiveValue(feature, o, buffer);
}
}
} else {
Object value = object.eGet(feature);
if (feature.getEType() instanceof EEnum) {
if (value == null) {
buffer.putInt(-1);
} else {
EEnum eEnum = (EEnum) feature.getEType();
EEnumLiteral eEnumLiteral = eEnum.getEEnumLiteralByLiteral(((Enum<?>) value).toString());
if (eEnumLiteral != null) {
buffer.putInt(eEnumLiteral.getValue());
} else {
LOGGER.error(((Enum<?>) value).toString() + " not found");
buffer.putInt(-1);
}
}
} else if (feature.getEType() instanceof EClass) {
if (value == null) {
buffer.putShort((short) -1);
} else {
IdEObject referencedObject = (IdEObject) value;
EClass referencedClass = referencedObject.eClass();
if (referencedClass.getEAnnotation("wrapped") != null) {
writeWrappedValue(object.getPid(), object.getRid(), value, buffer, packageMetaData);
} else {
writeReference(object, value, buffer, feature);
}
}
} else if (feature.getEType() instanceof EDataType) {
writePrimitiveValue(feature, value, buffer);
}
}
}
}
if (buffer.position() != bufferSize) {
throw new BimserverDatabaseException("Value buffer sizes do not match for " + object.eClass().getName() + " " + buffer.position() + "/" + bufferSize);
}
return buffer;
}
@SuppressWarnings("unused")
private ByteBuffer fillKeyBuffer(ByteBuffer buffer, IdEObject object) {
if (DEVELOPER_DEBUG && object.getRid() > 100000 || object.getRid() < -100000) {
LOGGER.debug("Improbable rid: " + object.getRid() + " - " + object);
}
return fillKeyBuffer(buffer, object.getPid(), object.getOid(), object.getRid());
}
private ByteBuffer fillKeyBuffer(ByteBuffer buffer, int pid, long oid, int rid) {
buffer.position(0);
buffer.putInt(pid);
buffer.putLong(oid);
buffer.putInt(-rid);
return buffer;
}
private ByteBuffer createKeyBuffer(int pid, long oid, int rid) {
ByteBuffer keyBuffer = ByteBuffer.allocate(16);
fillKeyBuffer(keyBuffer, pid, oid, rid);
return keyBuffer;
}
private ByteBuffer createKeyBuffer(int pid, long oid) {
ByteBuffer keyBuffer = ByteBuffer.allocate(12);
fillKeyBuffer(keyBuffer, pid, oid);
return keyBuffer;
}
private ByteBuffer fillKeyBuffer(ByteBuffer buffer, int pid, long oid) {
buffer.position(0);
buffer.putInt(pid);
buffer.putLong(oid);
return buffer;
}
public void delete(IdEObject object, Integer newRid) throws BimserverDatabaseException {
checkOpen();
// if (perRecordVersioning(object)) {
objectsToDelete.put(object.eClass(), object.getPid(), newRid, object.getOid());
if (objectsToCommit.containsObject(object)) {
objectsToCommit.remove(object);
}
// } else {
// TODO implement
// throw new BimserverDatabaseException("This is not supported");
}
public <T> T executeAndCommitAction(BimDatabaseAction<T> action, ProgressHandler progressHandler) throws BimserverDatabaseException, ServiceException {
checkOpen();
return executeAndCommitAction(action, DEFAULT_CONFLICT_RETRIES, progressHandler);
}
public <T> T executeAndCommitAction(BimDatabaseAction<T> action) throws BimserverDatabaseException, UserException, ServerException {
checkOpen();
return executeAndCommitAction(action, DEFAULT_CONFLICT_RETRIES, null);
}
public <T> T executeAndCommitAction(BimDatabaseAction<T> action, int retries, ProgressHandler progressHandler) throws BimserverDatabaseException, UserException,
ServerException {
checkOpen();
for (int i = 0; i < retries; i++) {
try {
T result = action.execute();
if (objectsToCommit.size() > 0 || objectsToDelete.size() > 0) {
commit(progressHandler);
}
return result;
} catch (BimserverConcurrentModificationDatabaseException e) {
if (progressHandler != null) {
progressHandler.retry(i + 1);
}
bimTransaction.rollback();
objectCache.clear();
objectsToCommit.clear();
bimTransaction = database.getKeyValueStore().startTransaction();
} catch (BimserverLockConflictException e) {
bimTransaction.rollback();
objectCache.clear();
objectsToCommit.clear();
bimTransaction = database.getKeyValueStore().startTransaction();
if (DEVELOPER_DEBUG) {
LockConflictException lockException = e.getLockException();
if (lockException instanceof TransactionTimeoutException) {
} else if (lockException instanceof LockTimeoutException) {
}
LOGGER.info("Lock while executing " + action.getClass().getSimpleName() + " run (" + i + ")", lockException);
long[] ownerTxnIds = e.getLockException().getOwnerTxnIds();
for (long txnid : ownerTxnIds) {
DatabaseSession databaseSession = database.getDatabaseSession(txnid);
if (databaseSession != null) {
LOGGER.info("Owner: " + databaseSession);
StackTraceElement[] stackTraceElements = databaseSession.getStackTrace();
for (StackTraceElement stackTraceElement : stackTraceElements) {
LOGGER.info("\tat " + stackTraceElement);
}
}
}
}
} catch (UncheckedBimserverLockConflictException e) {
bimTransaction.rollback();
objectCache.clear();
objectsToCommit.clear();
bimTransaction = database.getKeyValueStore().startTransaction();
} catch (BimserverDatabaseException e) {
throw e;
} catch (ServiceException e) {
if (e instanceof UserException) {
throw ((UserException) e);
} else if (e instanceof ServerException) {
throw ((ServerException) e);
} else {
LOGGER.error("", e);
}
}
if (i < retries - 1) {
try {
Thread.sleep(new Random().nextInt((i + 1) * 1000));
} catch (InterruptedException e1) {
LOGGER.error("", e1);
}
}
}
throw new BimserverDatabaseException("Too many conflicts, tried " + retries + " times");
}
@SuppressWarnings("unchecked")
public <T extends IdEObject> T get(EClass eClass, IfcModelInterface model, IdEObject idEObject, long oid, QueryInterface query, TodoList todoList)
throws BimserverDatabaseException {
checkOpen();
return (T) get(idEObject, oid, model, query, todoList);
}
@SuppressWarnings("unchecked")
public <T extends IdEObject> T get(long oid, QueryInterface query) throws BimserverDatabaseException {
checkOpen();
TodoList todoList = new TodoList();
IfcModelInterface model = createModel(query);
IdEObject idEObject = get(null, oid, model, query, todoList);
processTodoList(model, todoList, query);
return (T) idEObject;
}
@SuppressWarnings("unchecked")
public <T extends IdEObject> T get(IfcModelInterface model, long oid, QueryInterface query) throws BimserverDatabaseException {
checkOpen();
TodoList todoList = new TodoList();
IdEObject idEObject = get(null, oid, model, query, todoList);
processTodoList(model, todoList, query);
return (T) idEObject;
}
public <T extends IdEObject> T get(EClass eClass, long oid, QueryInterface query) throws BimserverDatabaseException {
checkOpen();
if (oid == -1) {
return null;
}
TodoList todoList = new TodoList();
IfcModelInterface model = createModel(query);
T t = get(null, oid, model, query, todoList);
processTodoList(model, todoList, query);
return t;
}
public <T extends IdEObject> T get(IfcModelInterface model, EClass eClass, long oid, QueryInterface query) throws BimserverDatabaseException {
checkOpen();
if (oid == -1) {
return null;
}
TodoList todoList = new TodoList();
T t = get(null, oid, model, query, todoList);
processTodoList(model, todoList, query);
return t;
}
private void checkOpen() throws BimserverDatabaseException {
if (state == SessionState.CLOSED) {
throw new BimserverDatabaseException("Database session is closed");
}
}
public <T extends IdEObject> T get(IdEObject idEObject, long oid, IfcModelInterface model, QueryInterface query) throws BimserverDatabaseException {
checkOpen();
TodoList todoList = new TodoList();
T result = get(idEObject, oid, model, query, todoList);
processTodoList(model, todoList, query);
return result;
}
@SuppressWarnings("unchecked")
public <T extends IdEObject> T get(IdEObject idEObject, long oid, IfcModelInterface model, QueryInterface query, TodoList todoList) throws BimserverDatabaseException {
checkOpen();
if (oid == -1) {
throw new BimserverDatabaseException("Cannot get object for oid " + oid);
}
if (objectsToCommit.containsOid(oid)) {
return (T) objectsToCommit.getByOid(oid);
}
EClass eClass = getEClassForOid(oid);
if (idEObject != null) {
if (!eClass.isSuperTypeOf(idEObject.eClass())) {
throw new BimserverDatabaseException("Object with oid " + oid + " is a " + idEObject.eClass().getName() + " but it's cid-part says it's a " + eClass.getName());
}
}
RecordIdentifier recordIdentifier = new RecordIdentifier(query.getPid(), oid, query.getRid());
IdEObjectImpl cachedObject = (IdEObjectImpl) objectCache.get(recordIdentifier);
if (cachedObject != null) {
idEObject = cachedObject;
if (cachedObject.getLoadingState() == State.LOADED && cachedObject.getRid() != Integer.MAX_VALUE) {
cachedObject.load();
return (T) cachedObject;
}
}
ByteBuffer mustStartWith = ByteBuffer.wrap(new byte[12]);
mustStartWith.putInt(query.getPid());
mustStartWith.putLong(oid);
ByteBuffer startSearchWith = ByteBuffer.wrap(new byte[16]);
startSearchWith.putInt(query.getPid());
startSearchWith.putLong(oid);
startSearchWith.putInt(-query.getRid());
SearchingRecordIterator recordIterator = database.getKeyValueStore().getRecordIterator(eClass.getEPackage().getName() + "_" + eClass.getName(), mustStartWith.array(),
startSearchWith.array(), this);
try {
Record record = recordIterator.next();
if (record == null) {
return null;
}
reads++;
ByteBuffer keyBuffer = ByteBuffer.wrap(record.getKey());
ByteBuffer valueBuffer = ByteBuffer.wrap(record.getValue());
keyBuffer.getInt(); // pid
long keyOid = keyBuffer.getLong();
int keyRid = -keyBuffer.getInt();
if (keyRid <= query.getRid()) {
if (idEObject != null && idEObject.getRid() == Integer.MAX_VALUE) {
((IdEObjectImpl) idEObject).setRid(keyRid);
}
if (model.contains(keyOid) && ((IdEObjectImpl) model.get(keyOid)).getLoadingState() == State.LOADED) {
return (T) model.get(keyOid);
} else {
if (valueBuffer.capacity() == 1 && valueBuffer.get(0) == -1) {
valueBuffer.position(valueBuffer.position() + 1);
return null;
// deleted entity
} else {
T convertByteArrayToObject = (T) convertByteArrayToObject(idEObject, eClass, eClass, keyOid, valueBuffer, model, keyRid, query, todoList);
if (convertByteArrayToObject.getRid() == Integer.MAX_VALUE) {
((IdEObjectImpl) convertByteArrayToObject).setRid(keyRid);
}
objectCache.put(recordIdentifier, convertByteArrayToObject);
return convertByteArrayToObject;
}
}
} else {
return null;
}
} finally {
recordIterator.close();
}
}
public IfcModelInterface getAllOfType(EClass eClass, QueryInterface query) throws BimserverDatabaseException {
IfcModelInterface model = createModel(query);
return getAllOfType(model, eClass, query);
}
public IfcModelInterface getAllOfType(IfcModelInterface model, EClass eClass, QueryInterface query) throws BimserverDatabaseException {
checkOpen();
TodoList todoList = new TodoList();
getMap(eClass, model, query, todoList);
processTodoList(model, todoList, query);
return model;
}
public IfcModelInterface getAllOfTypes(Set<EClass> eClasses, QueryInterface query) throws BimserverDatabaseException {
IfcModelInterface model = createModel(query);
return getAllOfTypes(model, eClasses, query);
}
public IfcModelInterface getAllOfTypes(IfcModelInterface model, Set<EClass> eClasses, QueryInterface query) throws BimserverDatabaseException {
checkOpen();
TodoList todoList = new TodoList();
for (EClass eClass : eClasses) {
getMap(eClass, model, query, todoList);
}
processTodoList(model, todoList, query);
return model;
}
public IfcModelInterface getAllOfType(String packageName, String className, QueryInterface query) throws BimserverDatabaseException {
checkOpen();
return getAllOfType(getEClass(packageName, className), query);
}
public IfcModelInterface getAllOfType(IfcModelInterface model, String packageName, String className, QueryInterface query) throws BimserverDatabaseException {
checkOpen();
return getAllOfType(model, getEClass(packageName, className), query);
}
public BimTransaction getBimTransaction() {
return bimTransaction;
}
public short getCid(EClass eClass) throws BimserverDatabaseException {
Short cidOfEClass = database.getCidOfEClass(eClass);
if (cidOfEClass == null) {
throw new BimserverDatabaseException("EClass " + eClass.getName() + " not registered");
}
return cidOfEClass;
}
public short getCidForClassName(String packageName, String className) throws BimserverDatabaseException {
return database.getCidOfEClass(getEClass(packageName, className));
}
public short getCidOfEClass(EClass eClass) {
return database.getCidOfEClass(eClass);
}
public List<String> getClassList() {
return database.getAvailableClasses();
}
public int getCount(EClass eClass, IfcModelInterface model, int pid, int rid) throws BimserverDatabaseException {
checkOpen();
int count = 0;
SearchingRecordIterator recordIterator = database.getKeyValueStore().getRecordIterator(eClass.getEPackage().getName() + "_" + eClass.getName(),
BinUtils.intToByteArray(pid), BinUtils.intToByteArray(pid), this);
try {
Record record = recordIterator.next();
ByteBuffer nextKeyStart = ByteBuffer.allocate(12);
byte[] nullReference = new byte[] { -1 };
while (record != null) {
reads++;
ByteBuffer keyBuffer = ByteBuffer.wrap(record.getKey());
int keyPid = keyBuffer.getInt();
long oid = keyBuffer.getLong();
int keyRid = -keyBuffer.getInt();
int map = getCount(model, pid, rid, keyPid, keyRid);
if (map == 1) {
if (!Arrays.equals(record.getValue(), nullReference)) {
count++;
}
nextKeyStart.position(0);
nextKeyStart.putInt(pid);
nextKeyStart.putLong(oid + 1);
record = recordIterator.next(nextKeyStart.array());
} else {
record = recordIterator.next();
}
}
} finally {
recordIterator.close();
}
return count;
}
private int getCount(IfcModelInterface model, int pid, int rid, int keyPid, int keyRid) {
if (keyPid == pid) {
if (keyRid <= rid) {
return 1;
} else {
return -1;
}
} else {
return 0;
}
}
public Date getCreatedDate() {
return database.getCreated();
}
public DatabaseInformation getDatabaseInformation() throws BimserverDatabaseException {
DatabaseInformation databaseInformation = StoreFactory.eINSTANCE.createDatabaseInformation();
databaseInformation.setNumberOfProjects(getObjectCount(Project.class, Query.getDefault()));
databaseInformation.setNumberOfUsers(getObjectCount(User.class, Query.getDefault()));
databaseInformation.setNumberOfCheckouts(getObjectCount(Checkout.class, Query.getDefault()));
databaseInformation.setNumberOfRevisions(getObjectCount(ConcreteRevision.class, Query.getDefault()));
databaseInformation.setType(database.getKeyValueStore().getType());
databaseInformation.setCreated(database.getCreated());
databaseInformation.setDatabaseSizeInBytes(database.getKeyValueStore().getDatabaseSizeInBytes());
databaseInformation.setSchemaVersion(database.getRegistry().readInt(Database.SCHEMA_VERSION, this));
String stats = database.getKeyValueStore().getStats();
Scanner scanner = new Scanner(stats);
try {
DatabaseInformationCategory category = StoreFactory.eINSTANCE.createDatabaseInformationCategory();
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
if (line.contains("=")) {
DatabaseInformationItem item = StoreFactory.eINSTANCE.createDatabaseInformationItem();
category.getItems().add(item);
item.setKey(line.substring(0, line.indexOf("=")));
item.setValue(line.substring(line.indexOf("=") + 1));
} else {
category = StoreFactory.eINSTANCE.createDatabaseInformationCategory();
category.setTitle(line);
databaseInformation.getCategories().add(category);
}
}
} finally {
scanner.close();
}
databaseInformation.setLocation(database.getKeyValueStore().getLocation());
return databaseInformation;
}
public EClass getEClass(short cid) {
return database.getEClassForCid(cid);
}
public EClass getEClass(String packageName, String className) throws BimserverDatabaseException {
return database.getEClass(packageName, className);
}
private int getExactSize(IdEObject idEObject) {
int size = 0;
size += 1; // Length of unsetted bytes
int bits = 0;
for (EStructuralFeature eStructuralFeature : idEObject.eClass().getEAllStructuralFeatures()) {
bits++;
if (!useUnsetBit(eStructuralFeature, idEObject)) {
Object val = idEObject.eGet(eStructuralFeature);
if (eStructuralFeature instanceof EAttribute) {
EAttribute eAttribute = (EAttribute) eStructuralFeature;
if (eAttribute.isMany()) {
size += 4;
for (Object v : ((List<?>) val)) {
size += getPrimitiveSize(eAttribute.getEAttributeType(), v);
}
} else {
size += getPrimitiveSize(eAttribute.getEAttributeType(), val);
}
} else if (eStructuralFeature instanceof EReference) {
EReference eReference = (EReference) eStructuralFeature;
if (eReference.isMany()) {
size += 4;
for (Object v : ((List<?>) val)) {
size += getWrappedValueSize(v);
}
} else {
if (val == null) {
size += 2;
} else {
size += getWrappedValueSize(val);
}
}
}
}
}
size += (int) Math.ceil(bits / 8.0);
return size;
}
private enum GetResult {
STOP, CONTINUE_WITH_NEXT_RECORD, CONTINUE_WITH_NEXT_OID,
}
private GetResult getMap(EClass originalQueryClass, EClass eClass, IfcModelInterface model, ByteBuffer buffer, int keyPid, long keyOid, int keyRid, QueryInterface query, TodoList todoList) throws BimserverDatabaseException {
checkOpen();
if (keyPid == query.getPid()) {
if (keyRid <= query.getRid() && keyRid >= query.getStopRid()) {
RecordIdentifier recordIdentifier = new RecordIdentifier(query.getPid(), keyOid, keyRid);
IdEObject cachedObject = objectCache.get(recordIdentifier);
if (cachedObject != null && ((IdEObjectImpl)cachedObject).getLoadingState() == State.LOADED) {
if (!model.contains(keyOid) && cachedObject.eClass().getEAnnotation("wrapped") == null) {
try {
model.addAllowMultiModel(keyOid, cachedObject);
} catch (IfcModelInterfaceException e) {
throw new BimserverDatabaseException(e);
}
}
return GetResult.CONTINUE_WITH_NEXT_OID;
} else {
IdEObject object = null;
if (model.contains(keyOid)) {
object = model.get(keyOid);
} else {
if (buffer.capacity() == 1 && buffer.get(0) == -1) {
buffer.position(buffer.position() + 1);
return GetResult.CONTINUE_WITH_NEXT_OID;
// deleted entity
} else {
object = convertByteArrayToObject(cachedObject, originalQueryClass, eClass, keyOid, buffer, model, keyRid, query, todoList);
}
}
if (object != null) {
objectCache.put(recordIdentifier, object);
return GetResult.CONTINUE_WITH_NEXT_OID;
}
}
} else {
return GetResult.CONTINUE_WITH_NEXT_RECORD;
}
} else {
return GetResult.STOP;
}
return GetResult.STOP;
}
public void getMap(EClass eClass, IfcModelInterface ifcModel, QueryInterface query, TodoList todoList) throws BimserverDatabaseException {
checkOpen();
SearchingRecordIterator recordIterator = null;
String tableName = eClass.getEPackage().getName() + "_" + eClass.getName();
if (query.getOidCounters() != null) {
if (!query.getOidCounters().containsKey(eClass)) {
return;
}
long startOid = query.getOidCounters().get(eClass);
ByteBuffer tmp = ByteBuffer.allocate(12);
tmp.putInt(query.getPid());
tmp.putLong(startOid + 1);
recordIterator = database.getKeyValueStore().getRecordIterator(tableName, BinUtils.intToByteArray(query.getPid()), tmp.array(), this);
} else {
// LOGGER.warn("Potential too-many-reads");
recordIterator = database.getKeyValueStore().getRecordIterator(tableName, BinUtils.intToByteArray(query.getPid()), BinUtils.intToByteArray(query.getPid()), this);
}
try {
Record record = recordIterator.next();
ByteBuffer nextKeyStart = ByteBuffer.allocate(12);
while (record != null) {
if (Thread.currentThread().isInterrupted()) {
throw new BimserverThreadInterruptedException("Thread interrupted");
}
reads++;
ByteBuffer keyBuffer = ByteBuffer.wrap(record.getKey());
int keyPid = keyBuffer.getInt();
long keyOid = keyBuffer.getLong();
int keyRid = -keyBuffer.getInt();
ByteBuffer valueBuffer = ByteBuffer.wrap(record.getValue());
GetResult map = getMap(eClass, eClass, ifcModel, valueBuffer, keyPid, keyOid, keyRid, query, todoList);
if (map == GetResult.CONTINUE_WITH_NEXT_OID) {
nextKeyStart.position(0);
nextKeyStart.putInt(query.getPid());
nextKeyStart.putLong(keyOid + 1);
record = recordIterator.next(nextKeyStart.array());
} else {
record = recordIterator.next();
}
}
} finally {
recordIterator.close();
}
}
public void getMap(IfcModelInterface ifcModel, QueryInterface query) throws BimserverDatabaseException {
checkOpen();
TodoList todoList = new TodoList();
if (query.getOidCounters() != null) {
for (EClass eClass : query.getOidCounters().keySet()) {
if (Thread.currentThread().isInterrupted()) {
throw new BimserverDatabaseException("Thread interrupted");
}
if (eClass.getEAnnotation("nolazyload") == null && eClass.getEAnnotation("nodatabase") == null) {
if (query.shouldIncludeClass(eClass)) {
getMap(eClass, ifcModel, query, todoList);
}
}
}
} else {
LOGGER.info("Inefficient getMap");
for (EClass eClass : database.getClasses()) {
if (Thread.currentThread().isInterrupted()) {
throw new BimserverDatabaseException("Thread interrupted");
}
if (eClass.getEAnnotation("nolazyload") == null && eClass.getEAnnotation("nodatabase") == null) {
if (query.shouldIncludeClass(eClass)) {
getMap(eClass, ifcModel, query, todoList);
}
}
}
}
processTodoList(ifcModel, todoList, query);
}
public IfcModelInterface getMapWithOid(QueryInterface query, short cid, long oid, IfcModelInterface model) throws BimserverDatabaseException {
checkOpen();
EClass eClass = database.getEClassForCid(cid);
if (eClass == null) {
return model;
}
TodoList todoList = new TodoList();
ByteBuffer mustStartWith = createKeyBuffer(query.getPid(), oid);
ByteBuffer key = createKeyBuffer(query.getPid(), oid, -query.getStopRid());
SearchingRecordIterator recordIterator = database.getKeyValueStore().getRecordIterator(eClass.getEPackage().getName() + "_" + eClass.getName(), mustStartWith.array(),
key.array(), this);
checkOpen();
try {
Record record = recordIterator.next();
ByteBuffer nextKeyStart = ByteBuffer.allocate(12);
while (record != null) {
reads++;
ByteBuffer keyBuffer = ByteBuffer.wrap(record.getKey());
int keyPid = keyBuffer.getInt();
long keyOid = keyBuffer.getLong();
int keyRid = -keyBuffer.getInt();
ByteBuffer valueBuffer = ByteBuffer.wrap(record.getValue());
GetResult map = getMap(eClass, eClass, model, valueBuffer, keyPid, keyOid, keyRid, query, todoList);
if (map == GetResult.CONTINUE_WITH_NEXT_OID) {
nextKeyStart.position(0);
nextKeyStart.putInt(query.getPid());
nextKeyStart.putLong(keyOid + 1);
record = recordIterator.next(nextKeyStart.array());
} else {
record = recordIterator.next();
}
}
} finally {
recordIterator.close();
}
processTodoList(model, todoList, query);
return model;
}
private void processTodoList(IfcModelInterface model, TodoList todoList, QueryInterface query) throws BimserverDatabaseException {
IdEObject idEObject = todoList.poll();
while (idEObject != null) {
IdEObject result = get(idEObject, idEObject.getOid(), model, query, todoList);
if (result == null) {
throw new BimserverDatabaseException("Object not found: " + query.getPid() + " " + query.getRid() + " " + idEObject.getOid() + " " + idEObject.eClass().getName());
}
if (!model.contains(result.getOid())) {
try {
model.addAllowMultiModel(result.getOid(), result);
} catch (IfcModelInterfaceException e) {
throw new BimserverDatabaseException(e);
}
}
idEObject = todoList.poll();
}
}
public void getMapWithOids(IfcModelInterface model, Set<Long> oids, QueryInterface query) throws BimserverDatabaseException {
checkOpen();
for (Long oid : oids) {
EClass eClass = getEClassForOid(oid);
if (eClass != null) {
getMapWithOid(query, database.getCidOfEClass(eClass), oid, model);
} else {
throw new BimserverDatabaseException("No class found for oid " + oid);
}
}
}
public MetaDataManager getMetaDataManager() {
return database.getMetaDataManager();
}
private int getObjectCount(Class<? extends IdEObject> clazz, QueryInterface query) throws BimserverDatabaseException {
Condition condition = new IsOfTypeCondition((EClass) StorePackage.eINSTANCE.getEClassifier(clazz.getSimpleName()));
return query(condition, clazz, query).size();
}
public ObjectIdentifier getOidOfGuid(String schema, String guid, int pid, int rid) throws BimserverDatabaseException {
for (EClass eClass : getMetaDataManager().getPackageMetaData(schema).getAllSubClasses(getMetaDataManager().getPackageMetaData(schema).getEClass("IfcRoot"))) {
RecordIterator recordIterator = database.getKeyValueStore().getRecordIterator(eClass.getEPackage().getName() + "_" + eClass.getName(), BinUtils.intToByteArray(pid),
BinUtils.intToByteArray(pid), this);
try {
Record record = recordIterator.next();
while (record != null) {
reads++;
ByteBuffer buffer = ByteBuffer.wrap(record.getKey());
int pidOfRecord = buffer.getInt();
long oid = buffer.getLong();
int ridOfRecord = -buffer.getInt();
if (ridOfRecord == rid && pid == pidOfRecord) {
ByteBuffer value = ByteBuffer.wrap(record.getValue());
// Skip the unsettable part
byte unsettablesSize = value.get();
value.position(value.position() + unsettablesSize);
if (value.capacity() > 1) {
int stringLength = value.getInt();
if (stringLength == -1) {
return null;
} else {
String s = BinUtils.readString(value, stringLength);
if (s.equals(guid)) {
return new ObjectIdentifier(oid, getCid(eClass));
}
}
}
}
record = recordIterator.next();
}
} finally {
recordIterator.close();
}
}
return null;
}
public Set<ObjectIdentifier> getOidsOfName(String schema, String name, int pid, int rid) throws BimserverDatabaseException, MetaDataException {
Set<ObjectIdentifier> result = new HashSet<ObjectIdentifier>();
for (EClass eClass : getMetaDataManager().getPackageMetaData(schema).getAllSubClasses(getMetaDataManager().getPackageMetaData(schema).getEClass("IfcRoot"))) {
RecordIterator recordIterator = database.getKeyValueStore().getRecordIterator(eClass.getEPackage().getName() + "_" + eClass.getName(), BinUtils.intToByteArray(pid),
BinUtils.intToByteArray(pid), this);
try {
Record record = recordIterator.next();
while (record != null) {
reads++;
ByteBuffer buffer = ByteBuffer.wrap(record.getKey());
int pidOfRecord = buffer.getInt();
long oid = buffer.getLong();
int ridOfRecord = -buffer.getInt();
if (ridOfRecord == rid && pid == pidOfRecord) {
ByteBuffer value = ByteBuffer.wrap(record.getValue());
// Skip the unsettable part
byte unsettablesSize = value.get();
value.position(value.position() + unsettablesSize);
if (value.capacity() > 1) {
int stringLength = value.getInt();
if (stringLength == -1) {
return null;
} else {
BinUtils.readString(value, stringLength); // GUID
if (value.getShort() != -1) { // CID of OwnerHistory
value.getLong(); // OID of OwnerHistory
}
stringLength = value.getInt();
if (stringLength != -1) {
String foundName = BinUtils.readString(value, stringLength);
if (name.equals(foundName)) {
result.add(new ObjectIdentifier(oid, getCid(eClass)));
}
}
}
}
}
record = recordIterator.next();
}
} finally {
recordIterator.close();
}
}
return result;
}
private int getPrimitiveSize(EDataType eDataType, Object val) {
if (eDataType == EcorePackage.eINSTANCE.getEInt() || eDataType == EcorePackage.eINSTANCE.getEIntegerObject()) {
return 4;
} else if (eDataType == EcorePackage.eINSTANCE.getEFloat() || eDataType == EcorePackage.eINSTANCE.getEFloatObject()) {
return 4;
} else if (eDataType == EcorePackage.eINSTANCE.getEBoolean() || eDataType == EcorePackage.eINSTANCE.getEBooleanObject()) {
return 1;
} else if (eDataType == EcorePackage.eINSTANCE.getEDate()) {
return 8;
} else if (eDataType == EcorePackage.eINSTANCE.getELong() || eDataType == EcorePackage.eINSTANCE.getELongObject()) {
return 8;
} else if (eDataType == EcorePackage.eINSTANCE.getEDouble() || eDataType == EcorePackage.eINSTANCE.getEDoubleObject()) {
return 8;
} else if (eDataType == EcorePackage.eINSTANCE.getEString()) {
if (val != null) {
return 4 + ((String) val).getBytes(Charsets.UTF_8).length;
}
return 4;
} else if (eDataType == EcorePackage.eINSTANCE.getEByteArray()) {
if (val != null) {
return 4 + ((byte[]) val).length;
}
return 4;
} else if (eDataType instanceof EEnum) {
return 4;
}
throw new RuntimeException("Unimplemented: " + eDataType);
}
private int getWrappedValueSize(Object val) {
if (val == null) {
return 2;
}
if (val instanceof EObject) {
EObject eObject = (EObject) val;
int refSize = 10;
if (eObject.eClass().getEAnnotation("wrapped") != null) {
IdEObject wrappedValue = (IdEObject) val;
EStructuralFeature wrappedValueFeature = wrappedValue.eClass().getEStructuralFeature("wrappedValue");
Object wrappedVal = eObject.eGet(wrappedValueFeature);
refSize = 2 + getPrimitiveSize((EDataType) wrappedValueFeature.getEType(), wrappedVal);
}
return refSize;
}
return 10;
}
public boolean perRecordVersioning(IdEObject idEObject) {
return perRecordVersioning(idEObject.eClass());
}
public boolean perRecordVersioning(EClass eClass) {
return eClass.getEPackage().getName().equals("Ifc2x3tc1") || eClass.getEPackage().getName().equals("Ifc4");
}
public IfcModelInterface createModel(PackageMetaData packageMetaData, Map<Integer, Long> pidRoidMap) {
return new BasicIfcModel(packageMetaData, pidRoidMap);
}
public IfcModelInterface createModel(QueryInterface queryInterface) {
HashMap<Integer, Long> map = new HashMap<Integer, Long>();
map.put(queryInterface.getPid(), queryInterface.getRoid());
return new ServerIfcModel(queryInterface.getPackageMetaData(), map, this);
}
@SuppressWarnings("unused")
public IdEObject lazyLoad(IdEObject idEObject) throws BimserverDatabaseException {
if (DEVELOPER_DEBUG) {
LOGGER.info("Lazy loading " + idEObject.eClass().getName() + " " + idEObject.getOid());
}
IfcModelInterface model = ((IdEObjectImpl)idEObject).getModel();
if (model == null) {
Map<Integer, Long> pidToRoid = new HashMap<Integer, Long>();
model = createModel(getMetaDataManager().getPackageMetaData(idEObject.eClass().getEPackage().getName()), pidToRoid);
}
idEObject = get(idEObject, idEObject.getOid(), model, ((IdEObjectImpl) idEObject).getQueryInterface(), new TodoList());
if (idEObject != null) {
if (DEVELOPER_DEBUG && idEObject.getRid() > 100000 || idEObject.getRid() < -100000) {
LOGGER.debug("Improbable rid " + idEObject.getRid() + " - " + idEObject);
}
}
return idEObject;
}
@Override
public void load(IdEObject idEObject) {
try {
lazyLoad(idEObject);
} catch (BimserverLockConflictException e) {
throw new UncheckedBimserverLockConflictException(e);
} catch (BimserverDatabaseException e) {
throw new UncheckedBimserverDatabaseException(e);
}
}
public Long newOid(EClass eClass) {
return database.newOid(eClass);
}
public int newPid() {
return database.newPid();
}
public <T extends IdEObject> Map<Long, T> query(Condition condition, Class<T> clazz, QueryInterface query) throws BimserverDatabaseException {
IfcModelInterface model = createModel(query);
return query(model, condition, clazz, query);
}
public <T extends IdEObject> Map<Long, T> query(IfcModelInterface model, Condition condition, Class<T> clazz, QueryInterface query) throws BimserverDatabaseException {
Map<Long, T> map = new HashMap<Long, T>();
Set<EClass> eClasses = new HashSet<EClass>();
condition.getEClassRequirements(eClasses);
for (EClass eClass : eClasses) {
TodoList todoList = new TodoList();
getMap(eClass, model, query, todoList);
processTodoList(model, todoList, query);
List<IdEObject> list = new ArrayList<IdEObject>(model.getValues());
for (IdEObject object : list) {
if (clazz.isInstance(object)) {
if (condition.matches(object)) {
map.put(object.getOid(), clazz.cast(object));
}
}
}
}
return map;
}
public <T extends IdEObject> T querySingle(Condition condition, Class<T> clazz, QueryInterface query) throws BimserverDatabaseException {
checkOpen();
Collection<T> values = query(condition, clazz, query).values();
if (values.size() == 0) {
return null;
}
return values.iterator().next();
}
private Object readPrimitiveValue(EClassifier classifier, ByteBuffer buffer, QueryInterface query) {
if (classifier == EcorePackage.eINSTANCE.getEString()) {
int length = buffer.getInt();
if (length != -1) {
return BinUtils.readString(buffer, length);
} else {
return null;
}
} else if (classifier == EcorePackage.eINSTANCE.getEInt() || classifier == EcorePackage.eINSTANCE.getEIntegerObject()) {
return buffer.getInt();
} else if (classifier == EcorePackage.eINSTANCE.getELong() || classifier == EcorePackage.eINSTANCE.getELongObject()) {
return buffer.getLong();
} else if (classifier == EcorePackage.eINSTANCE.getEFloat() || classifier == EcorePackage.eINSTANCE.getEFloatObject()) {
return buffer.getFloat();
} else if (classifier == EcorePackage.eINSTANCE.getEDouble() || classifier == EcorePackage.eINSTANCE.getEDoubleObject()) {
return buffer.getDouble();
} else if (classifier == EcorePackage.eINSTANCE.getEBoolean() || classifier == EcorePackage.eINSTANCE.getEBooleanObject()) {
return buffer.get() == 1;
} else if (classifier == EcorePackage.eINSTANCE.getEDate()) {
long val = buffer.getLong();
if (val == -1L) {
return null;
}
return new Date(val);
} else if (classifier == EcorePackage.eINSTANCE.getEByteArray()) {
int size = buffer.getInt();
byte[] result = new byte[size];
buffer.get(result);
return result;
} else if (classifier.getName().equals("Tristate")) {
int ordinal = buffer.getInt();
EEnum tristateEnum = query.getPackageMetaData().getEEnum("Tristate");
return tristateEnum.getEEnumLiteral(ordinal).getInstance();
} else if (classifier instanceof EEnum) {
int ordinal = buffer.getInt();
EEnum eEnum = (EEnum) classifier;
return eEnum.getEEnumLiteral(ordinal).getInstance();
} else {
throw new RuntimeException("Unsupported type " + classifier.getName());
}
}
private void fakeRead(ByteBuffer buffer, EStructuralFeature feature) {
boolean wrappedValue = feature.getEType().getEAnnotation("wrapped") != null;
if (feature.isMany()) {
if (feature.getEType() instanceof EEnum) {
} else if (feature.getEType() instanceof EClass) {
if (buffer.capacity() == 1 && buffer.get(0) == -1) {
buffer.position(buffer.position() + 1);
} else {
int listSize = buffer.getInt();
for (int i = 0; i < listSize; i++) {
short cid = buffer.getShort();
if (cid != -1) {
if (wrappedValue) {
EClass eClass = (EClass) feature.getEType();
fakePrimitiveRead(eClass.getEStructuralFeature("wrappedValue").getEType(), buffer);
} else {
buffer.position(buffer.position() + 8);
}
}
}
}
} else if (feature.getEType() instanceof EDataType) {
int listSize = buffer.getInt();
for (int i = 0; i < listSize; i++) {
fakePrimitiveRead(feature.getEType(), buffer);
}
}
} else {
if (feature.getEType() instanceof EEnum) {
buffer.position(buffer.position() + 4);
} else if (feature.getEType() instanceof EClass) {
if (buffer.capacity() == 1 && buffer.get(0) == -1) {
buffer.position(buffer.position() + 1);
} else {
short cid = buffer.getShort();
if (wrappedValue) {
fakePrimitiveRead(feature.getEType(), buffer);
} else {
if (cid != -1) {
buffer.position(buffer.position() + 8);
}
}
}
} else if (feature.getEType() instanceof EDataType) {
fakePrimitiveRead(feature.getEType(), buffer);
}
}
}
private void fakePrimitiveRead(EClassifier classifier, ByteBuffer buffer) {
if (classifier == EcorePackage.eINSTANCE.getEString()) {
short length = buffer.getShort();
buffer.position(buffer.position() + length);
} else if (classifier == EcorePackage.eINSTANCE.getEInt()) {
buffer.position(buffer.position() + 4);
} else if (classifier == EcorePackage.eINSTANCE.getELong()) {
buffer.position(buffer.position() + 8);
} else if (classifier == EcorePackage.eINSTANCE.getEFloat()) {
buffer.position(buffer.position() + 4);
} else if (classifier == EcorePackage.eINSTANCE.getEDouble()) {
buffer.position(buffer.position() + 8);
} else if (classifier == EcorePackage.eINSTANCE.getEBoolean()) {
buffer.position(buffer.position() + 1);
} else if (classifier == EcorePackage.eINSTANCE.getEDate()) {
buffer.position(buffer.position() + 8);
}
}
private IdEObject readReference(EClass originalQueryClass, ByteBuffer buffer, IfcModelInterface model, IdEObject object, EStructuralFeature feature, EClass eClass,
QueryInterface query, TodoList todoList) throws BimserverDatabaseException {
if (buffer.capacity() == 1 && buffer.get(0) == -1) {
buffer.position(buffer.position() + 1);
return null;
}
long oid = buffer.getLong();
RecordIdentifier recordIdentifier = new RecordIdentifier(query.getPid(), oid, query.getRid());
IdEObject foundInCache = objectCache.get(oid);
if (foundInCache != null) {
return foundInCache;
}
if (model.contains(oid)) {
return model.get(oid);
}
IdEObjectImpl newObject = createInternal(eClass, query);
newObject.setOid(oid);
if (perRecordVersioning(newObject)) {
newObject.setPid(Database.STORE_PROJECT_ID);
} else {
newObject.setPid(query.getPid());
}
newObject.setRid(query.getRid());
try {
newObject.setModel(model);
} catch (IfcModelInterfaceException e) {
LOGGER.error("", e);
}
objectCache.put(recordIdentifier, newObject);
if (query.isDeep() && object.eClass().getEAnnotation("wrapped") == null) {
if (feature.getEAnnotation("nolazyload") == null) {
todoList.add(newObject);
}
} else {
if (object.eClass().getEAnnotation("wrapped") == null) {
try {
model.addAllowMultiModel(oid, newObject);
} catch (IfcModelInterfaceException e) {
throw new BimserverDatabaseException(e);
}
}
}
return newObject;
}
private IdEObject readWrappedValue(EStructuralFeature feature, ByteBuffer buffer, EClass eClass, QueryInterface query) {
EStructuralFeature eStructuralFeature = eClass.getEStructuralFeature("wrappedValue");
Object primitiveValue = readPrimitiveValue(eStructuralFeature.getEType(), buffer, query);
IdEObject eObject = createInternal(eClass, query);
((IdEObjectImpl) eObject).setLoaded(); // We don't want to go lazy load
// this
eObject.eSet(eStructuralFeature, primitiveValue);
return eObject;
}
public void store(Collection<? extends IdEObject> values) throws BimserverDatabaseException {
checkOpen();
store(values, Database.STORE_PROJECT_ID, Integer.MAX_VALUE);
}
public void store(Collection<? extends IdEObject> values, int pid, int rid) throws BimserverDatabaseException {
checkOpen();
for (IdEObject object : values) {
store(object, pid, rid);
}
}
public void store(IdEObject object, boolean deep) throws BimserverDatabaseException {
checkOpen();
Set<IdEObject> done = new HashSet<IdEObject>();
storeDeep(object, done);
}
private void storeDeep(IdEObject object, Set<IdEObject> done) throws BimserverDatabaseException {
if (object == null || done.contains(object)) {
return;
}
done.add(object);
store(object);
for (EReference eReference : object.eClass().getEAllReferences()) {
if (eReference.isMany()) {
List<?> list = (List<?>) object.eGet(eReference);
for (Object v : list) {
storeDeep((IdEObject) v, done);
}
} else {
IdEObject reference = (IdEObject) object.eGet(eReference);
storeDeep(reference, done);
}
}
}
public long store(IdEObject object) throws BimserverDatabaseException {
checkOpen();
return store(object, Database.STORE_PROJECT_ID, Integer.MAX_VALUE);
}
public long store(IdEObject object, int pid, int rid) throws BimserverDatabaseException {
checkOpen();
if (!objectsToCommit.containsObject(object) && !objectsToDelete.contains(object)) {
objectCache.put(new RecordIdentifier(pid, object.getOid(), rid), object);
boolean wrappedValue = object.eClass().getEAnnotation("wrapped") != null;
if (!wrappedValue) {
if (object.getOid() == -1) {
long newOid = newOid(object.eClass());
((IdEObjectImpl) object).setOid(newOid);
}
object.load();
((IdEObjectImpl) object).setPid(pid);
if (rid == Integer.MAX_VALUE) {
((IdEObjectImpl) object).setRid(object.getRid() + 1);
} else {
((IdEObjectImpl) object).setRid(rid);
}
addToObjectsToCommit(object);
}
}
return object.getOid();
}
public void removeFromCommit(IdEObject idEObject) {
objectsToCommit.remove(idEObject);
}
private void writePrimitiveValue(EStructuralFeature feature, Object value, ByteBuffer buffer) throws BimserverDatabaseException {
EClassifier type = feature.getEType();
if (type == EcorePackage.eINSTANCE.getEString()) {
if (value == null) {
buffer.putInt(-1);
} else {
String stringValue = (String) value;
byte[] bytes = stringValue.getBytes(Charsets.UTF_8);
if (bytes.length > Integer.MAX_VALUE) {
throw new BimserverDatabaseException("String value too long (max length is " + Integer.MAX_VALUE + ")");
}
buffer.putInt(bytes.length);
buffer.put(bytes);
}
} else if (type == EcorePackage.eINSTANCE.getEInt() || type == EcorePackage.eINSTANCE.getEIntegerObject()) {
if (value == null) {
buffer.putInt(0);
} else {
buffer.putInt((Integer) value);
}
} else if (type == EcorePackage.eINSTANCE.getEDouble() || type == EcorePackage.eINSTANCE.getEDoubleObject()) {
if (value == null) {
buffer.putDouble(0D);
} else {
buffer.putDouble((Double) value);
}
} else if (type == EcorePackage.eINSTANCE.getEFloat() || type == EcorePackage.eINSTANCE.getEFloatObject()) {
if (value == null) {
buffer.putFloat(0F);
} else {
buffer.putFloat((Float) value);
}
} else if (type == EcorePackage.eINSTANCE.getELong() || type == EcorePackage.eINSTANCE.getELongObject()) {
if (value == null) {
buffer.putLong(0L);
} else {
buffer.putLong((Long) value);
}
} else if (type == EcorePackage.eINSTANCE.getEBoolean() || type == EcorePackage.eINSTANCE.getEBooleanObject()) {
if (value == null) {
buffer.put((byte) 0);
} else {
buffer.put(((Boolean) value) ? (byte) 1 : (byte) 0);
}
} else if (type == EcorePackage.eINSTANCE.getEDate()) {
if (value == null) {
buffer.putLong(-1L);
} else {
buffer.putLong(((Date) value).getTime());
}
} else if (type.getName().equals("Tristate")) {
Enumerator eEnumLiteral = (Enumerator) value;
buffer.putInt(eEnumLiteral.getValue());
} else if (value instanceof Enumerator) {
Enumerator eEnumLiteral = (Enumerator) value;
buffer.putInt(eEnumLiteral.getValue());
} else if (type == EcorePackage.eINSTANCE.getEByteArray()) {
if (value == null) {
buffer.putInt(0);
} else {
byte[] bytes = (byte[]) value;
buffer.putInt(bytes.length);
buffer.put(bytes);
}
} else {
throw new RuntimeException("Unsupported type " + type.getName());
}
}
private void writeReference(IdEObject object, Object value, ByteBuffer buffer, EStructuralFeature feature) throws BimserverDatabaseException {
Short cid = database.getCidOfEClass(((EObject) value).eClass());
buffer.putShort(cid);
IdEObject idEObject = (IdEObject) value;
if (idEObject.getOid() < 0) {
throw new BimserverDatabaseException("Writing a reference with oid " + idEObject.getOid() + ", this is not supposed to happen, referenced: " + idEObject.getOid() + " " + value + " from " + object.getOid() + " " + object);
}
buffer.putLong(idEObject.getOid());
}
private void writeWrappedValue(int pid, int rid, Object value, ByteBuffer buffer, PackageMetaData packageMetaData) throws BimserverDatabaseException {
IdEObject wrappedValue = (IdEObject) value;
EStructuralFeature eStructuralFeature = wrappedValue.eClass().getEStructuralFeature("wrappedValue");
Short cid = database.getCidOfEClass(wrappedValue.eClass());
buffer.putShort((short) -cid);
writePrimitiveValue(eStructuralFeature, wrappedValue.eGet(eStructuralFeature), buffer);
if (wrappedValue.eClass().getName().equals("IfcGloballyUniqueId")) {
EClass eClass = packageMetaData.getEClass("IfcGloballyUniqueId");
if (wrappedValue.getOid() == -1) {
((IdEObjectImpl) wrappedValue).setOid(newOid(eClass));
}
ByteBuffer valueBuffer = convertObjectToByteArray(wrappedValue, ByteBuffer.allocate(getExactSize(wrappedValue)), packageMetaData);
ByteBuffer keyBuffer = createKeyBuffer(pid, wrappedValue.getOid(), rid);
try {
database.getKeyValueStore().storeNoOverwrite(eClass.getEPackage().getName() + "_" + eClass.getName(),
keyBuffer.array(), valueBuffer.array(), this);
database.incrementCommittedWrites(1);
} catch (BimserverLockConflictException e) {
LOGGER.error("", e);
}
}
}
public Set<String> getAvailableClassesInRevision(QueryInterface query) throws BimserverDatabaseException {
IfcModelInterface model = createModel(query);
return getAvailableClassesInRevision(model, query);
}
public Set<String> getAvailableClassesInRevision(IfcModelInterface ifcModel, QueryInterface query) throws BimserverDatabaseException {
checkOpen();
try {
getMap(ifcModel, query);
Set<String> classes = new HashSet<String>();
for (IdEObject idEObject : ifcModel.getValues()) {
classes.add(idEObject.eClass().getName());
}
return classes;
} catch (BimserverDatabaseException e) {
LOGGER.error("", e);
}
return null;
}
public long getTransactionId() {
return bimTransaction.getId();
}
public StackTraceElement[] getStackTrace() {
return stackTrace;
}
@SuppressWarnings("unchecked")
public <T> T create(EClass eClass, int pid, int rid) throws BimserverDatabaseException {
checkOpen();
IdEObject idEObject = createInternal(eClass, null);
store(idEObject, pid, rid);
return (T) idEObject;
}
@SuppressWarnings("unchecked")
public <T extends IdEObject> T getSingle(EClass eClass, QueryInterface query) throws BimserverDatabaseException {
IfcModelInterface model = createModel(query);
List<T> all = getAllOfType(model, eClass, query).getAll((Class<T>) eClass.getInstanceClass());
if (all.size() > 0) {
return all.get(0);
}
return null;
}
/**
* Only call this method when you are sure no other processes are
* altering/using the same data. Basically only when the server is starting
*
* @throws BimserverDatabaseException
* @throws ServiceException
*/
public void commit() throws BimserverDatabaseException, ServiceException {
commit(null);
}
@SuppressWarnings("unchecked")
public <T extends IdEObject> T create(Class<T> clazz) throws BimserverDatabaseException {
return (T) create(database.getEClass(clazz.getPackage().getName(), clazz.getSimpleName()));
}
public EObject create(EClass eClass) {
// checkOpen();
IdEObjectImpl idEObject = createInternal(eClass, null);
try {
store(idEObject, Database.STORE_PROJECT_ID, Integer.MAX_VALUE);
} catch (BimserverDatabaseException e) {
LOGGER.error("", e);
}
return idEObject;
}
public <T extends IdEObject> List<T> getAllOfType(EClass eClass, Class<T> clazz, QueryInterface query) throws BimserverDatabaseException {
IfcModelInterface allOfType = getAllOfType(eClass.getEPackage().getName(), eClass.getName(), query);
return allOfType.getAllWithSubTypes(clazz);
}
public EClass getEClassForOid(long oid) throws BimserverDatabaseException {
return database.getEClassForOid(oid);
}
public long getCounter(EClass eClass) {
return database.getCounter(eClass);
}
}
|
package com.jcwhatever.bukkit.generic;
import com.jcwhatever.bukkit.generic.internal.ScriptManager;
import com.jcwhatever.bukkit.generic.internal.commands.CommandHandler;
import com.jcwhatever.bukkit.generic.internal.listeners.JCGEventListener;
import com.jcwhatever.bukkit.generic.inventory.KitManager;
import com.jcwhatever.bukkit.generic.items.equipper.EntityEquipperManager;
import com.jcwhatever.bukkit.generic.items.equipper.IEntityEquipper;
import com.jcwhatever.bukkit.generic.jail.JailManager;
import com.jcwhatever.bukkit.generic.regions.RegionManager;
import com.jcwhatever.bukkit.generic.scheduler.BukkitTaskScheduler;
import com.jcwhatever.bukkit.generic.scheduler.ITaskScheduler;
import com.jcwhatever.bukkit.generic.scripting.GenericsScriptEngineManager;
import com.jcwhatever.bukkit.generic.utils.PreCon;
import com.jcwhatever.bukkit.generic.utils.ScriptUtils;
import org.bukkit.entity.EntityType;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import javax.script.ScriptEngineManager;
/**
* GenericsLib Bukkit plugin.
*/
public class GenericsLib extends GenericsPlugin {
private static GenericsLib _instance;
private static Map<String, GenericsPlugin> _pluginNameMap = new HashMap<>(25);
private static Map<Class<? extends GenericsPlugin>, GenericsPlugin> _pluginClassMap = new HashMap<>(25);
private JailManager _jailManager;
private RegionManager _regionManager;
private EntityEquipperManager _equipperManager;
private ITaskScheduler _scheduler;
private ScriptEngineManager _scriptEngineManager;
private ScriptManager _scriptManager;
private KitManager _kitManager;
private CommandHandler _commandHandler;
/**
* Get the {@code GenericsLib} plugin instance.
*/
public static GenericsLib getLib() {
return _instance;
}
/**
* Get a Bukkit plugin that implements {@code GenericsPlugin} by name.
*
* @param name The name of the plugin.
*
* @return Null if not found.
*/
@Nullable
public static GenericsPlugin getGenericsPlugin(String name) {
PreCon.notNullOrEmpty(name);
return _pluginNameMap.get(name.toLowerCase());
}
/**
* Get a Bukkit plugin that implements {@code GenericsPlugin}.
*
* @param pluginClass The plugin class.
*
* @param <T> The plugin type.
*
* @return Null if not found.
*/
@Nullable
public static <T extends GenericsPlugin> T getGenericsPlugin(Class<T> pluginClass) {
PreCon.notNull(pluginClass);
GenericsPlugin plugin = _pluginClassMap.get(pluginClass);
if (plugin == null)
return null;
return pluginClass.cast(plugin);
}
/**
* Get all Bukkit plugins that implement {@code GenericsPlugin}.
*/
public static List<GenericsPlugin> getGenericsPlugins() {
return new ArrayList<>(_pluginNameMap.values());
}
/**
* Get the default task scheduler.
*/
public static ITaskScheduler getScheduler() {
return _instance._scheduler;
}
/**
* Get the global {@code RegionManager}.
*/
public static RegionManager getRegionManager() {
return _instance._regionManager;
}
/**
* Get the default Jail Manager.
*/
public static JailManager getJailManager() {
return _instance._jailManager;
}
/**
* Get the default entity equipper manager.
*/
public static EntityEquipperManager getEquipperManager() {
return _instance._equipperManager;
}
/**
* Get the default script engine manager.
*
* <p>Returns an instance of {@code GenericsScriptEngineManager}.</p>
*
* <p>Engines returned from the script engine manager are singleton
* instances that are used globally.</p>
*/
public static ScriptEngineManager getScriptEngineManager() {
return _instance._scriptEngineManager;
}
/**
* Get the default script manager.
*/
public static ScriptManager getScriptManager() {
return _instance._scriptManager;
}
/**
* Get an entity equipper from the default entity equipper manager
* for the specified entity type.
*
* @param entityType The entity type
*/
public static IEntityEquipper getEquipper(EntityType entityType) {
return _instance._equipperManager.getEquipper(entityType);
}
/**
* Get the default kit manager.
*/
public static KitManager getKitManager() {
return _instance._kitManager;
}
/**
* Get GenericsLib's internal command handler.
*/
public CommandHandler getCommandHandler() {
return _commandHandler;
}
/**
* Constructor.
*/
public GenericsLib() {
super();
_instance = this;
}
/**
* Get the chat prefix.
*/
@Override
public String getChatPrefix() {
return "[GenericsLib] ";
}
/**
* Get the console prefix.
*/
@Override
public String getConsolePrefix() {
return getChatPrefix();
}
@Override
protected void onEnablePlugin() {
_commandHandler = new CommandHandler();
_scheduler = new BukkitTaskScheduler();
_scriptEngineManager = new GenericsScriptEngineManager();
_kitManager = new KitManager(this, getDataNode().getNode("kits"));
_regionManager = new RegionManager(this);
_jailManager = new JailManager(this, "default", getDataNode().getNode("jail"));
_equipperManager = new EntityEquipperManager();
registerEventListeners(new JCGEventListener());
registerCommands(_commandHandler);
loadScriptManager();
}
@Override
protected void onDisablePlugin() {
// do nothing
// Note: Disabling GenericsLib can break plugins.
}
/*
* Register GenericsPlugin instance.
*/
void registerPlugin(GenericsPlugin plugin) {
_pluginNameMap.put(plugin.getName().toLowerCase(), plugin);
_pluginClassMap.put(plugin.getClass(), plugin);
}
/*
* Unregister GenericsPlugin instance.
*/
void unregisterPlugin(GenericsPlugin plugin) {
_pluginNameMap.remove(plugin.getName().toLowerCase());
_pluginClassMap.remove(plugin.getClass());
}
private void loadScriptManager() {
File scriptFolder = new File(getDataFolder(), "scripts");
if (!scriptFolder.exists() && !scriptFolder.mkdirs()) {
throw new RuntimeException("Failed to create script folder.");
}
_scriptManager = new ScriptManager(this, scriptFolder);
_scriptManager.addScriptApi(ScriptUtils.getDefaultApi(this, _scriptManager));
_scriptManager.reload();
}
}
|
package com.namesny.binarysearchtree;
public class RedBlackTree<T extends Comparable<? super T>> implements BinarySearchTree<T> {
/**
* Tree root
*/
protected RedBlackNode<T> root;
public RedBlackTree() {
root = new RedBlackNode<T>();
}
/**
* This class represents a node of a Red-Black tree
*
* @param <T>
*/
protected static class RedBlackNode<T extends Comparable<? super T>> {
/**
* Node value
*/
protected T value;
/**
* Left child
*/
protected RedBlackNode<T> left;
/**
* Right child
*/
protected RedBlackNode<T> right;
/**
* Parent of the node
*/
protected RedBlackNode<T> parent;
/**
* Node color
*/
protected Color color;
/**
* Creates one node
*
* @param value node value
* @param left left child
* @param right right child
*/
public RedBlackNode(T value, RedBlackNode<T> left, RedBlackNode<T> right, Color color, RedBlackNode<T> parent) {
this.value = value;
this.left = left;
this.right = right;
this.color = color;
this.parent = parent;
}
/**
* Creates one node without children
*
* @param value
*/
public RedBlackNode(T value, RedBlackNode<T> parent) {
this(value, new RedBlackNode<T>(), new RedBlackNode<T>(), Color.RED, parent);
}
public RedBlackNode() {
this(null, null, null, Color.BLACK, null);
}
}
/**
* Represents the color of a node
*/
protected static enum Color {
RED, BLACK
}
@Override
public void insert(T value) throws DuplicateValueException {
if (value == null) {
throw new IllegalArgumentException("Value cannot be null");
}
if (root.value == null) {
root = new RedBlackNode<>(value, null);
root.left.parent = root;
root.right.parent = root;
} else {
RedBlackNode<T> node = root;
RedBlackNode<T> previous = null;
while (node.value != null) {
if (value.compareTo(node.value) < 0) {
previous = node;
node = node.left;
} else if (value.compareTo(node.value) > 0) {
previous = node;
node = node.right;
} else {
throw new DuplicateValueException("Duplicate value: " + value);
}
}
node = new RedBlackNode<>(value, previous);
node.left.parent = node;
node.right.parent = node;
if (value.compareTo(previous.value) < 0) {
previous.left = node;
} else {
previous.right = node;
}
rebalanceInsert(node);
}
root.color = Color.BLACK;
}
@Override
public void delete(T key) {
if (key == null) {
throw new IllegalArgumentException("Value cannot be null");
}
delete(key, root);
}
@Override
public T find(T key) {
if (key == null) {
throw new IllegalArgumentException("Value cannot be null");
}
return find(key, root);
}
/**
* Clears the tree
*/
@Override
public void clear() {
this.root = new RedBlackNode<>();
}
/**
*
* @return True if the tree is empty
*/
@Override
public boolean isEmpty() {
return root.value == null;
}
/**
* Finds minimum in the tree
*
* @return Object with minimal value
*/
@Override
public T findMin() {
return findMin(root).value;
}
/**
* Finds maximum in the tree
*
* @return Object with maximal value
*/
@Override
public T findMax() {
return findMax(root).value;
}
/**
* Helper method for deleting a node from the tree
*
* @param key the key to delete
* @param node root of a subtree from which to delete the object
*/
private void delete(T key, RedBlackNode<T> node) {
// First we need to find the node we want to delete
while ((node.value != null) && (node.value != key)) {
if (key.compareTo(node.value) < 0) {
node = node.left;
} else {
node = node.right;
}
}
// if the node doesn't exist
if (node.value == null) {
return;
}
// The node has two children
if ((node.left.value != null) && (node.right.value != null)) {
RedBlackNode<T> successor = findMin(node.right);
node.value = successor.value;
delete(successor.value, node);
} else {
// The node has at most one internal child
RedBlackNode<T> child = node.left.value != null ? node.left : node.right;
// Deleted node is Red => just replace it with its child
if (isRed(node)) {
if (node.parent.left == node) {
node.parent.left = child;
} else {
node.parent.right = child;
}
child.parent = node.parent;
} else {
// Deleted node is black but has a red child => recolor the child
if (isRed(child)) {
child.color = Color.BLACK;
if (node.parent.left == node) {
node.parent.left = child;
} else {
node.parent.right = child;
}
child.parent = node.parent;
} else {
// Deleted node is black and has a black child
if (node.parent.left == node) {
node.parent.left = child;
} else {
node.parent.right = child;
}
child.parent = node.parent;
rebalanceDelete(child);
}
}
}
}
/**
* Helper method for finding minimum
*
* @param node root of a subtree where to find minimum
* @return the node with minimal value
*/
private RedBlackNode<T> findMin(RedBlackNode<T> node) {
// Minimum is in the leftmost node
while (node.left.value != null) {
node = node.left;
}
return node;
}
/**
* Helper method for finding maximum
*
* @param node root of a subtree where to find minimum
* @return the node with maximal value
*/
private RedBlackNode<T> findMax(RedBlackNode<T> node) {
// Maximum is in the rightmost node
while (node.right.value != null) {
node = node.right;
}
return node;
}
/**
* Helper method for finding a node in the tree
*
* @param key the value of a node we are looking for
* @param node root of a subtree where to look
* @return the node with a value equal to key
*/
private T find(T key, RedBlackNode<T> node) {
// Traverse the tree until we find the value or an external node
while ((node.value != null) && (node.value != key)) {
if (key.compareTo(node.value) < 0) {
node = node.left;
} else {
node = node.right;
}
}
return node.value;
}
/**
* Determines the color of a node
*
* @param node the node which color we want to know
* @return True if the node is red
*/
protected boolean isRed(RedBlackNode<T> node) {
return (node.value != null) && (node.color == Color.RED);
}
/**
* Re-balances the tree after an insertion
*
* @param node root of a subtree that needs re-balancing
*/
private void rebalanceInsert(RedBlackNode<T> node) {
/**
* First situation. The inserted node, its parent and its uncle are red.
* This is corrected two levels up, so node variable represents the
* grandparent of the inserted node. Split into two ifs for readability
*/
if ((node.left.value != null)
&& (isRed(node.left.left) || isRed(node.left.right))
&& isRed(node.left)
&& isRed(node.right)) {
node.left.color = Color.BLACK;
node.right.color = Color.BLACK;
node.color = Color.RED;
} else if ((node.right.value != null)
&& (isRed(node.right.left) || isRed(node.right.right))
&& isRed(node.right)
&& isRed(node.left)) {
node.right.color = Color.BLACK;
node.left.color = Color.BLACK;
node.color = Color.RED;
}
/**
* Second situation. The inserted node and its parent are red. Parents
* sibling is black and the inserted node is the opposite child than the
* parent is the child of the grandparent. The node variable represents
* grandfather of the inserted node. This does not fix the problem but
* instead it transforms it into third situation
*/
if ((node.left.value != null)
&& isRed(node.left)
&& isRed(node.left.right)
&& !isRed(node.right)) {
rotateLeft(node.left);
} else if ((node.right.value != null)
&& isRed(node.right)
&& isRed(node.right.left)
&& !isRed(node.left)) {
rotateRight(node.right);
}
/**
* Third situation. The inserted node and its parent are red. Parents
* sibling is black and the inserted node is the same child as the
* parent node is the child of the grandparent.
*/
if ((node.left.value != null)
&& isRed(node.left)
&& isRed(node.left.left)
&& !isRed(node.right)) {
rotateRight(node);
node.parent.right.color = Color.RED;
node.parent.color = Color.BLACK;
} else if ((node.right.value != null)
&& isRed(node.right)
&& isRed(node.right.right)
&& !isRed(node.left)) {
rotateLeft(node);
node.parent.left.color = Color.RED;
node.parent.color = Color.BLACK;
}
// Rebalance parent
if (node.parent != null) {
rebalanceInsert(node.parent);
}
}
/**
* Rotates tree to the left
*
* @param node the node where to rotate
* @return new rotated tree
*/
private void rotateLeft(RedBlackNode<T> node) {
RedBlackNode<T> newRoot = node.right;
RedBlackNode<T> parent = node.parent;
if (node == root) {
root = newRoot;
}
newRoot.parent = parent;
if (parent != null) {
if (parent.value.compareTo(newRoot.value) < 0) {
parent.right = newRoot;
} else {
parent.left = newRoot;
}
}
node.right = newRoot.left;
if (newRoot.left != null) {
newRoot.left.parent = node;
}
newRoot.left = node;
node.parent = newRoot;
}
/**
* Rotates tree to the right
*
* @param node the node where to rotate
* @return new rotated tree
*/
private void rotateRight(RedBlackNode<T> node) {
RedBlackNode<T> newRoot = node.left;
RedBlackNode<T> parent = node.parent;
if (node == root) {
root = newRoot;
}
newRoot.parent = parent;
if (parent != null) {
if (parent.value.compareTo(newRoot.value) < 0) {
parent.right = newRoot;
} else {
parent.left = newRoot;
}
}
node.left = newRoot.right;
if (newRoot.right != null) {
newRoot.right.parent = node;
}
newRoot.right = node;
node.parent = newRoot;
}
/**
* Re-balances the tree after deleting a node.
*
* @param node double-black node that needs re-balancing
*/
private void rebalanceDelete(RedBlackNode<T> node) {
if (node == root) {
return;
}
/*
* If the double-black node is left child
* The cases for the node being right child are symetrical
*/
if (node.parent.left == node) {
RedBlackNode<T> sibling = node.parent.right;
// Case 1: The sibling of the double-black node is red
if (isRed(sibling)) {
node.parent.color = Color.RED;
sibling.color = Color.BLACK;
rotateLeft(node.parent);
// We have transformed it into case 2,3 or 4
}
sibling = node.parent.right; // The sibling could have changed after the rotation in case 1
// Case 2: The sibling is black and has two black children
if (!isRed(sibling) && !isRed(sibling.left) && !isRed(sibling.right)) {
sibling.color = Color.RED;
if (isRed(node.parent)) {
// If the parent is red we recolor it black and we are done
node.parent.color = Color.BLACK;
return;
} else {
// If the parent is black we make it double-black and move up a level to rebalance it
rebalanceDelete(node.parent);
return;
}
}
sibling = node.parent.right; // The sibling could have changed
// Case 3: The sibling is black, its left child is red and its right child is black
if (!isRed(sibling) && isRed(sibling.left) && !isRed(sibling.right)) {
sibling.left.color = Color.BLACK;
sibling.color = Color.RED;
rotateRight(sibling);
// This is now case 4
}
sibling = node.parent.right; // The sibling could have changed
// Case 4: The sibling is black, its left child is black and its right child is red
if (!isRed(sibling) && !isRed(sibling.left) && isRed(sibling.right)) {
sibling.right.color = Color.BLACK;
if (isRed(node.parent)) {
node.parent.color = Color.BLACK;
sibling.color = Color.RED;
}
rotateLeft(node.parent);
}
} else {
RedBlackNode<T> sibling = node.parent.left;
// Case 1: The sibling of the double-black node is red
if (isRed(sibling)) {
node.parent.color = Color.RED;
sibling.color = Color.BLACK;
rotateRight(node.parent);
// We have transformed it into case 2,3 or 4
}
sibling = node.parent.left; // The sibling could have changed after the rotation in case 1
// Case 2: The sibling is black and has two black children
if (!isRed(sibling) && !isRed(sibling.left) && !isRed(sibling.right)) {
sibling.color = Color.RED;
if (isRed(node.parent)) {
// If the parent is red we recolor it black and we are done
node.parent.color = Color.BLACK;
return;
} else {
// If the parent is black we make it double-black and move up a level to rebalance it
rebalanceDelete(node.parent);
return;
}
}
sibling = node.parent.left; // The sibling could have changed
// Case 3: The sibling is black, its left child is black and its right child is red
if (!isRed(sibling) && !isRed(sibling.left) && isRed(sibling.right)) {
sibling.right.color = Color.BLACK;
sibling.color = Color.RED;
rotateLeft(sibling);
// This is now case 4
}
sibling = node.parent.left; // The sibling could have changed
// Case 4: The sibling is black, its left child is red and its right child is black
if (!isRed(sibling) && isRed(sibling.left) && !isRed(sibling.right)) {
sibling.left.color = Color.BLACK;
if (isRed(node.parent)) {
node.parent.color = Color.BLACK;
sibling.color = Color.RED;
}
rotateRight(node.parent);
}
}
}
}
|
package com.opera.core.systems.util;
import java.util.Iterator;
import java.io.IOException;
import java.nio.channels.*;
import java.nio.channels.spi.SelectorProvider;
import java.util.logging.Logger;
/**
* This will monitor any selectable channel, such as a SocketChannel or
* ServerSocketChannel and fire canWrite() or canRead() events when such events
* are detected by poll().
*
* @author Jan Vidar Krey
*/
public class SocketMonitor {
private final Logger logger = Logger.getLogger(this.getClass().getName());
private Selector selector;
private static SocketMonitor monitor = null;
public static SocketMonitor instance() {
if (monitor == null)
monitor = new SocketMonitor();
return monitor;
}
private SocketMonitor() {
try {
selector = SelectorProvider.provider().openSelector();
logger.fine("Starting up...");
} catch (IOException e) {
e.printStackTrace();
}
}
public boolean add(SelectableChannel channel, SocketListener listener, int selectMask) {
try {
logger.fine("Add channel: " + channel.toString());
channel.register(selector, selectMask, listener);
if (channel.isBlocking())
channel.configureBlocking(false);
return true;
} catch (ClosedChannelException e) {
e.printStackTrace(); // FIXME
} catch (IOException e) {
e.printStackTrace(); // FIXME
}
return false;
}
public boolean modify(SelectableChannel channel, SocketListener listener, int selectMask) {
logger.fine("Modify channel: " + channel.toString());
try {
channel.register(selector, selectMask);
return true;
} catch (ClosedChannelException e) {
e.printStackTrace();
return false;
}
}
public void remove(SelectableChannel channel) {
logger.fine("Remove channel: " + channel.toString());
selector.keys().remove(channel);
}
public static boolean poll()
{
return instance().pollSockets(java.lang.Long.MAX_VALUE);
}
public static boolean poll(long ms)
{
return instance().pollSockets(ms);
}
public boolean pollSockets(long ms) {
logger.finest("Poll " + selector.keys().size() + " sockets");
if (selector.keys().isEmpty())
return false;
try {
selector.select(ms);
} catch (IOException e) {
e.printStackTrace();
return false;
}
Iterator<SelectionKey> iterator = selector.selectedKeys().iterator();
while (iterator.hasNext()) {
SelectionKey key = iterator.next();
iterator.remove();
try {
processSelectionKey(key);
} catch (IOException e) {
SelectableChannel channel = key.channel();
key.cancel();
}
}
return true;
}
protected void processSelectionKey(SelectionKey key) throws IOException
{
SelectableChannel channel = key.channel();
SocketListener listener = (SocketListener) key.attachment();
int currentMask = key.interestOps();
int triggerMask = key.readyOps();
int wantedMask = 0;
if (key.isValid() && key.isAcceptable()) {
if (listener.canRead(channel))
wantedMask |= SelectionKey.OP_ACCEPT;
}
if (key.isValid() && key.isConnectable()) {
if (listener.canRead(channel))
wantedMask |= SelectionKey.OP_CONNECT;
}
if (key.isValid() && key.isReadable()) {
if (listener.canRead(channel))
wantedMask |= SelectionKey.OP_READ;
}
if (key.isValid() && key.isWritable()) {
if (listener.canWrite(channel))
wantedMask |= SelectionKey.OP_WRITE;
}
// In case we did not trigger something we want to poll for
int not_triggered = (currentMask & ~triggerMask);
wantedMask |= not_triggered;
// Update the selection mask, if it now differs
if (wantedMask != currentMask)
{
if (wantedMask != 0)
{
key.interestOps(wantedMask);
}
else
{
key.cancel();
}
}
}
/*
private String debugMask(int mask)
{
String str = "{";
if ((mask & SelectionKey.OP_READ) == SelectionKey.OP_READ)
str += " READ";
if ((mask & SelectionKey.OP_WRITE) == SelectionKey.OP_WRITE)
str += " WRITE";
if ((mask & SelectionKey.OP_ACCEPT) == SelectionKey.OP_ACCEPT)
str += " ACCEPT";
if ((mask & SelectionKey.OP_CONNECT) == SelectionKey.OP_CONNECT)
str += " CONNECT";
str += " }";
return str;
}
*/
}
|
package com.alipics.testassets.testclient.service;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.alipics.testassets.testclient.enums.ActionType;
import com.alipics.testassets.testclient.enums.ApiKeyword;
import com.alipics.testassets.testclient.enums.CheckPointResult;
import com.alipics.testassets.testclient.enums.CheckPointType;
import com.alipics.testassets.testclient.enums.HistoryFolderName;
import com.alipics.testassets.testclient.enums.LoopParameterNameInForm;
import com.alipics.testassets.testclient.enums.PreConfigType;
import com.alipics.testassets.testclient.enums.SeperatorDefinition;
import com.alipics.testassets.testclient.enums.TestStatus;
import com.alipics.testassets.testclient.factory.JsonObjectMapperFactory;
import com.alipics.testassets.testclient.httpmodel.CheckPointItem;
import com.alipics.testassets.testclient.httpmodel.Json;
import com.alipics.testassets.testclient.httpmodel.MixActionSettingContainer;
import com.alipics.testassets.testclient.httpmodel.MixActionSettingInfo;
import com.alipics.testassets.testclient.httpmodel.PreConfigItem;
import com.alipics.testassets.testclient.httpmodel.ServiceBoundDataItem;
import com.alipics.testassets.testclient.httpmodel.SqlEntity;
import com.alipics.testassets.testclient.httpmodel.TestResultItem;
import com.alipics.testassets.testclient.model.CheckPointContianer;
import com.alipics.testassets.testclient.model.HttpTarget;
import com.alipics.testassets.testclient.model.KeyValue;
import com.alipics.testassets.testclient.model.Parameter;
import com.alipics.testassets.testclient.model.PreConfigContainer;
import com.alipics.testassets.testclient.model.SqlQueryReturn;
import com.alipics.testassets.testclient.utils.Auto;
import com.alipics.testassets.testclient.utils.FileNameUtils;
import com.alipics.testassets.testclient.utils.HTTPFacade;
import com.alipics.testassets.testclient.utils.HttpServletRequestUtils;
import com.alipics.testassets.testclient.utils.JdbcUtils;
import com.alipics.testassets.testclient.utils.TemplateUtils;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
@Service("testExecuteService")
public class TestExecuteService {
private static final String NODE_PATH="/opt/node";
private static final String NODE_MODULES="/opt/node_modules";
private final String SIGN="__DATASIGN__";
private final String FIELD2BEREPLACED="sign";
private final String SECRETKEY="21218CCA77804D2BA1922C33E0151105";
private static final Logger logger = Logger.getLogger(TestExecuteService.class);
@Autowired
BatchTestService batchTestService;
@Autowired
OutputParameterService outputParameterService;
public Json executeTestInFront(HttpServletRequest request) {
Json j = new Json();
Map<String,Map<String,String>> global_reference_in=new HashMap<String,Map<String,String>>(),global_reference_out=new HashMap<String,Map<String,String>>();
List<TestResultItem> objlist=new ArrayList<TestResultItem>();
Map requestmap =new HashMap();
try{
String reqbody=HttpServletRequestUtils.getHttpServletRequestBody(request);
String path = HttpServletRequestUtils.getParameter(request, reqbody ,"testPath");
if(path==null || path.isEmpty()){
j.setSuccess(false);
j.setMsg("path is null or empty");
return j;
}
String looptimes=HttpServletRequestUtils.getParameter(request, reqbody ,LoopParameterNameInForm.name);
looptimes=(looptimes!=null && !looptimes.isEmpty() )?looptimes:"1";
String[] loopparas=looptimes.split(SeperatorDefinition.seperator);
looptimes=loopparas[0];
if(!looptimes.isEmpty() && StringUtils.isNumeric(looptimes)){
for(int i=0;i<Integer.parseInt(looptimes);i++){
try{
setupAction(path,requestmap,global_reference_in,global_reference_out);
requestmap = getRequestParameterMap(reqbody,path,global_reference_in,global_reference_out);
TestResultItem testresult = getTestResultItem(path,requestmap);
if(loopparas.length>1)
Thread.sleep(Integer.parseInt(StringUtils.isNumeric(loopparas[1])?loopparas[1]:"1"));
if(!testresult.getResult().equals(TestStatus.exception)){
getCheckpointsAndResultFromFile(path, requestmap, testresult.getResponseInfo(),testresult);
j.setSuccess(true);
}else{
j.setMsg("\n" + testresult.getComment());
j.setSuccess(false);
}
j.setObj(testresult);
}catch(Exception e){
j.setMsg(e.getClass()+e.getMessage());
j.setSuccess(false);
}finally{
try{
TestResultItem result=(TestResultItem)j.getObj();
objlist.add(result);
teardownAction(path,requestmap,result.getResponseInfo(),global_reference_in,global_reference_out);
}catch(Exception e){
j.setMsg(e.getClass()+e.getMessage());
j.setSuccess(false);
}
}
}
j.setObj(objlist);
}else{
j.setMsg("");
j.setSuccess(false);
}
}catch(Exception e){
j.setMsg(e.getClass()+e.getMessage());
logger.error(e.getClass()+e.getMessage());
j.setSuccess(false);
}
return j;
}
public void generateHistoryFile(HttpServletRequest request){
String reqbody=HttpServletRequestUtils.getHttpServletRequestBody(request);
JSONObject obj=JSONObject.fromObject(reqbody);
String foldername =obj.getString("foldername");
String reqstate=obj.getString("reqstate");
if(reqstate.equalsIgnoreCase("success")){
String json=obj.getString("testresultitemcollectionjson");//.replaceAll("__AND", "&");
JSONArray ja= JSONArray.fromObject(json);
for(int i=0;i<ja.length();i++){
TestResultItem tri=new TestResultItem();
try{
JSONObject itemobj=ja.getJSONObject(i);
String result=itemobj.getString("result");
tri.setResult(result);
if(!result.equals(TestStatus.exception)){
Set<CheckPointItem> cps=new HashSet<CheckPointItem>();
tri.setTime(itemobj.getString("time"));
tri.setRequestInfo(itemobj.getString("requestInfo"));
tri.setResponseInfo(itemobj.getString("responseInfo"));
tri.setDuration(itemobj.getString("duration"));
JSONArray jsonarr=JSONArray.fromObject(itemobj.get("checkPoint"));
for(int j=0;j<jsonarr.length();j++){
CheckPointItem item=(CheckPointItem)JSONObject.toBean(jsonarr.getJSONObject(j), CheckPointItem.class);
cps.add(item);
}
tri.setCheckPoint(cps);
}else
tri.setComment(itemobj.getString("comment"));
}catch(Exception e){
tri.setDuration("");
tri.setResult(TestStatus.exception);
tri.setComment(e.getClass().toString()+": "+e.getMessage());
}finally{
generateHistoryFile(foldername, tri);
}
}
}else{
TestResultItem tri=new TestResultItem();
tri.setDuration("");
tri.setResult(TestStatus.exception);
String comment="";
String json=obj.getString("obj");
if(json.startsWith("{") && json.endsWith("}"))
comment=JSONObject.fromObject(json).get("comment").toString();
else if(json.startsWith("[") && json.endsWith("]"))
comment=JSONArray.fromObject(json).getJSONObject(0).get("comment").toString();
tri.setComment(comment);
generateHistoryFile(foldername, tri);
}
}
public void setupAction(String testPath,Map requestmap,Map<String,Map<String,String>> global_reference_in,Map<String,Map<String,String>> global_reference_out){
executeMixAction(testPath, ActionType.setup, requestmap, null,global_reference_in,global_reference_out);
}
public void teardownAction(String testPath,Map requestParas,String response,Map<String,Map<String,String>> global_reference_in,Map<String,Map<String,String>> global_reference_out){
executeMixAction(testPath, ActionType.teardown, requestParas, response,global_reference_in,global_reference_out);
}
private int executeSqlActionFromJson(String testPath, String actionType, String sqlactionstr, Map reqParas, String response){
try{
sqlactionstr = parseText(sqlactionstr,testPath,reqParas,null,null);
ObjectMapper mapper = JsonObjectMapperFactory.getObjectMapper();
SqlEntity e = mapper.readValue(sqlactionstr, SqlEntity.class);
String source=e.getSource();
String server=e.getServer();
String port=e.getPort();
String username=e.getUsername();
String password=e.getPassword();
String database=e.getDatabase();
String sql=e.getSql();
if(actionType.equalsIgnoreCase(ActionType.teardown)){
sql=processOutputParameter(testPath, response, sql);
}
return new JdbcUtils(source, server, port, username, password, database).executeSqlAction(sql);
}catch(Exception ex){
return 0;
}
}
private void executeMixAction(String testPath, String action, Map reqParas,String response,
Map<String,Map<String,String>> global_reference_in,Map<String,Map<String,String>> global_reference_out){
String filename= action.equalsIgnoreCase(ActionType.setup) ? FileNameUtils.getSetupActionPath(testPath) :
FileNameUtils.getTeardownActionPath(testPath);
File f=new File(filename);
if(f.exists()){
try {
String settings = FileUtils.readFileToString(f, "UTF-8");
settings = parseText(settings,testPath,reqParas,global_reference_in,global_reference_out);
ObjectMapper mapper = JsonObjectMapperFactory.getObjectMapper();
MixActionSettingContainer c = mapper.readValue(f, MixActionSettingContainer.class);
for(Entry<String,MixActionSettingInfo> entry : c.getMixActionSettings().entrySet()){
MixActionSettingInfo info=entry.getValue();
String setting=info.getSetting();
String type=info.getType();
if(type.equalsIgnoreCase("service")){
if(new File(setting).exists()){
batchTestService.executeTestByPathWithoutCheckpoint(setting,global_reference_in,global_reference_out);
}
}else if(type.equalsIgnoreCase("sql")){
executeSqlActionFromJson(testPath,action,setting,reqParas,response);
}
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
public Set<CheckPointItem> getCheckpointsAndResultFromFile(String foldername,Map parameters, String responseinfo, TestResultItem testresult){
try{
File checkpoint=new File(FileNameUtils.getCheckPointsFilePath(foldername));
if(checkpoint.exists()){
ObjectMapper mapper = JsonObjectMapperFactory.getObjectMapper();
String ckstr = FileUtils.readFileToString(checkpoint, "UTF-8");
ckstr=parseText(ckstr,foldername,parameters,null,null);
CheckPointContianer c = mapper.readValue(ckstr, CheckPointContianer.class);
testresult.setResult(TestStatus.pass);
if(c.getCheckPoint().entrySet().size()==0){
testresult.setResult(TestStatus.invalid);
return testresult.getCheckPoint();
}
//String responsebody=responseinfo.substring(responseinfo.indexOf("[body]:")+1);
for(Entry<String,CheckPointItem> entry:c.getCheckPoint().entrySet()){
CheckPointItem item = new CheckPointItem();
item=entry.getValue();
String checktype=item.getType();
String checkInfo=item.getCheckInfo();
item.setCheckInfo(checkInfo);
if(checktype.equals(CheckPointType.CheckSql)){
checkInfo=processOutputParameter(foldername, responseinfo, checkInfo);
addCheckPointItemsForDBVerification(testresult,checkInfo,responseinfo);
}else if(checktype.equals(CheckPointType.CheckJsExp)){
String[] arr=checkInfo.split(SeperatorDefinition.checkInfoSeperator);
String objtext=responseinfo.replaceAll("\\n","").replaceAll("\\r","");
objtext=StringUtils.substringAfter(objtext, arr[0]);
if(!arr[1].isEmpty()){
objtext=StringUtils.substringBefore(objtext, arr[1]);
}
addCheckPointItemsForJsExpVerification(testresult,arr[2].split("`"),objtext.trim());
}else{
boolean r=false;
if(checktype.equals(CheckPointType.CheckContain)){
r = responseinfo.contains(checkInfo);
}else if(checktype.equals(CheckPointType.CheckRegExp)){
try{
r = responseinfo.replaceAll("\\n","").replaceAll("\\r","").matches(checkInfo);
}catch(Exception e){
r=false;
item.setCheckInfo(checkInfo+"\n"+""+e.getMessage());
}
}
if(r){
item.setResult(CheckPointResult.pass);
}else{
item.setResult(CheckPointResult.fail);
if(testresult.getResult().equalsIgnoreCase(CheckPointResult.pass))
testresult.setResult(TestStatus.fail);
}
testresult.getCheckPoint().add(item);
}
}
}
else
testresult.setResult(TestStatus.invalid);
}catch(Exception e){
logger.error("test execute error",e);
}
return testresult.getCheckPoint();
}
private void addCheckPointItemsForDBVerification(TestResultItem testresult, String setting, String response){
String[] arr=setting.split(SeperatorDefinition.checkInfoSeperator);
if(arr.length==8){
String source=arr[0];
String server=arr[1];
String port=arr[2];
String username=arr[3];
String password=arr[4];
String db=arr[5];
String sql=arr[6];
String data=arr[7];
SqlQueryReturn sqr= new JdbcUtils(source,server,port,username,password,db).getReturnedColumnsAndRows(sql);
for(String item : data.split(SeperatorDefinition.verifiedDataRow)){
String[] a=item.split(SeperatorDefinition.verifiedDataItem);
String column=a[0];
String rowIndex=a[1];
String comparedType=a[2];
String expectedValue=a[3].trim();
String actualValue=new JdbcUtils(source,server,port,username,password,db).getValueByColumnAndRowIndex(sqr,column,rowIndex);
actualValue=actualValue.trim();
boolean res=false;
if(comparedType.equalsIgnoreCase("equal")){
res=expectedValue.equalsIgnoreCase(actualValue);
}else if(comparedType.equalsIgnoreCase("contain")){
res=expectedValue.contains(actualValue);
}else if(comparedType.equalsIgnoreCase("regExp")){
res=actualValue.matches(expectedValue);
}else if(comparedType.equalsIgnoreCase("equalFromResponse")){
String[] str = expectedValue.split(SeperatorDefinition.shrinkResponseSeperator);
expectedValue=getParaValueFromResponse(response,str[0],str[1],Integer.parseInt(str[2]));
res=expectedValue.equalsIgnoreCase(actualValue);
}
CheckPointItem cp=new CheckPointItem();
cp.setName("Verify Column: "+column+" in DB: "+db);
cp.setType("sql "+comparedType);
cp.setCheckInfo("Expected: "+expectedValue+"; Actual: "+actualValue);
cp.setResult(res ? CheckPointResult.pass : CheckPointResult.fail);
testresult.getCheckPoint().add(cp);
if(testresult.getResult().equalsIgnoreCase(CheckPointResult.pass)){
if(!res)
testresult.setResult(CheckPointResult.fail);
}
}
}
}
//modejs
private void addCheckPointItemsForJsExpVerification(TestResultItem testresult, String[] exps, String objtext){
String objDef="";
String res="";
if(!objtext.isEmpty()){
if(objtext.indexOf("{")>-1 & objtext.indexOf("{")<objtext.indexOf("}")){
objDef="var obj=JSON.parse('"+objtext.replace(" ", "").replace("'", "\"")+"');";
}
//xmldom npm
else if(objtext.indexOf("<")>-1 & objtext.indexOf("<")<objtext.indexOf(">")){
objDef="var DOMParser = require('"+NODE_MODULES+"/xmldom').DOMParser;var obj=new DOMParser().parseFromString('"+objtext.replace("'", "\"")+"','text/xml');";
}
for(int i=0;i<exps.length;i++){
objDef+="console.info("+exps[i]+");";
}
String filename=new Date().getTime()+".js";
File f=new File(filename);
try{
f.createNewFile();
FileUtils.writeStringToFile(f, objDef);
Runtime runtime = Runtime.getRuntime();
Process p = runtime.exec(NODE_PATH+" "+f.getAbsolutePath());
InputStream err = p.getErrorStream();
InputStream is = p.getInputStream();
p.getOutputStream().close();
res = IOUtils.toString(err,"gbk");
res += IOUtils.toString(is,"gbk");
res = StringUtils.substringBeforeLast(res,"\\n");
int exitVal = p.waitFor();
}catch(Exception e){
res=e.getMessage();
}finally{
if(f.exists())
f.delete();
}
}
res = res!=null ? res:"";
String[] result=res.split("\n");
for(int i=0;i<exps.length;i++){
CheckPointItem cp=new CheckPointItem();
cp.setName("Verify content by js expression "+(i+1));
cp.setType("jsExp");
cp.setCheckInfo(exps[i]);
boolean r=Boolean.parseBoolean(result.length==exps.length ? result[i] : "false");
cp.setResult(r ? CheckPointResult.pass : CheckPointResult.fail);
testresult.getCheckPoint().add(cp);
if(testresult.getResult().equalsIgnoreCase(CheckPointResult.pass)){
if(!r)
testresult.setResult(CheckPointResult.fail);
}
}
}
private String getParameterValueAfterRequest(String extraInfo){
String[] parainfo=extraInfo.split(SeperatorDefinition.paraForReferencedService);
String path=parainfo[0];
String lb=parainfo[1];
String rb=parainfo[2];
String res = getTestResponseBody(path,null,null).getObj().toString();
return getParaValueFromResponse(res,lb,rb,1);
}
private String getParaValueFromResponse(String response,String lb,String rb,int times){
String[] arr= StringUtils.substringsBetween(response, lb, rb);
String res="";
if(arr!=null){
res=arr.length>=times ? arr[times-1] : "";
}
return res;
}
//bacuse the function could be used in parsing request parameters,it doesn't include parsing output parameter
private String parseText(String val,String path,Map<String,Object> request,
Map<String,Map<String,String>> global_reference_in,Map<String,Map<String,String>> global_reference_out){
try {
if(val.contains("[[") && val.contains("]]"))
val=processEnv(loadEnv(path),val);
//if defaultvalue is returned function of Auto class.
val = parseOtherServiceReqParameter(val,global_reference_in,global_reference_out);
val = parseOtherServiceOutParameter(val,global_reference_out);
val = TemplateUtils.getString(val, request);
return val;
} catch (Exception e) {
// TODO Auto-generated catch block
return val;
}
}
//for back-end test execution usage
public Map<String,Object> getRequestParameterMap(String path,Map<String,Map<String,String>> global_reference_in,Map<String,Map<String,String>> global_reference_out){
Map<String,Object> request=new HashMap<String,Object>();
try {
request.put("auto", new Auto());
ObjectMapper mapper = JsonObjectMapperFactory.getObjectMapper();
String targetjson="";
Map<String, Parameter> paras = new HashMap<String, Parameter>();
if(path.endsWith("-c")){
targetjson=FileNameUtils.getHttpTarget(path);
HttpTarget target = mapper.readValue(new File(targetjson), HttpTarget.class);
paras=target.getParameters();
}
for(Parameter p : paras.values()){
String name=p.getName();
String val=p.getDefaultValue();
if(global_reference_in.containsKey(path)){
Map<String,String> reqparas=global_reference_in.get(path);
if(reqparas.containsKey(name)){
val=reqparas.get(name);
request.put(name, val);
continue;
}
}
val=parseText(val,path,request,global_reference_in,global_reference_out);
request.put(name, val);
}
request=getParametersFromPreConfigFile(path,request,global_reference_in,global_reference_out);
}catch (Exception e) {
// TODO Auto-generated catch block
logger.error(e.getClass().toString()+": "+e.getMessage());
}
return request;
}
//for submit form usage
private Map<String,Object> getRequestParameterMap(String reqbody,String path,
Map<String,Map<String,String>> global_reference_in,Map<String,Map<String,String>> global_reference_out){
Map<String,Object> requestmap =new HashMap<String,Object>();
try{
Map<String,String> map=HttpServletRequestUtils.getParameterMapFromFormData(reqbody);
requestmap.put("auto", new Auto());
for(Entry entry: map.entrySet()){
String parakey = entry.getKey().toString();
String paravalue = entry.getValue().toString();
paravalue=parseText(paravalue,path,requestmap,global_reference_in,global_reference_out);
requestmap.put(parakey, paravalue);
}
requestmap=getParametersFromPreConfigFile(path,requestmap,global_reference_in,global_reference_out);
}catch(Exception e){
logger.error(e.getClass().toString()+": "+e.getMessage());
}
return requestmap;
}
public String parseOtherServiceReqParameter(String val,Map<String,Map<String,String>> global_reference_in,Map<String,Map<String,String>> global_reference_out){
if(StringUtils.contains(val, ApiKeyword.preapi+"(")){
String path=StringUtils.substringBetween(val, ApiKeyword.preapi+"(", ")");
String name=StringUtils.substringAfter(val, path+")").trim();
if(global_reference_in.containsKey(path)){
Map<String,String> reqparas=global_reference_in.get(path);
if(reqparas.containsKey(name)){
return reqparas.get(name);
}
}
Map<String,Object> paras = getRequestParameterMap(path.trim(),global_reference_in,global_reference_out);
for(Entry<String,Object> en : paras.entrySet()){
if(en.getKey().equalsIgnoreCase(name)){
val=en.getValue().toString();
Map<String,String> reqparas=global_reference_in.containsKey(path) ? global_reference_in.get(path) : new HashMap<String,String>();
reqparas.put(name, val);
global_reference_in.put(path, reqparas);
break;
}
}
}
return val;
}
public String parseOtherServiceOutParameter(String val,Map<String,Map<String,String>> global_reference_out){
if(StringUtils.contains(val, ApiKeyword.outvar+"(")){
String path=StringUtils.substringBetween(val, ApiKeyword.outvar+"(", ")");
String outpara=StringUtils.substringAfter(val, path+")").trim();
if(global_reference_out.containsKey(path)){
Map<String,String> resparas=global_reference_out.get(outpara);
if(resparas.containsKey(outpara)){
return resparas.get(outpara);
}
}
val = processOutputParameter(path, "{{"+outpara+"}}");
Map<String,String> outparas=global_reference_out.containsKey(path) ? global_reference_out.get(path) : new HashMap<String,String>();
outparas.put(outpara, val);
global_reference_out.put(path, outparas);
}
return val;
}
private Map<String,Object> getParametersFromPreConfigFile(String testPath,Map<String,Object> request,
Map<String,Map<String,String>> global_reference_in,Map<String,Map<String,String>> global_reference_out){
Map<String,Object> para=new HashMap<String,Object>();
para.putAll(request);
try {
File f=new File(FileNameUtils.getPreConfigFilePath(testPath));
if(f.exists()){
ObjectMapper mapper = JsonObjectMapperFactory.getObjectMapper();
PreConfigContainer c = mapper.readValue(f, PreConfigContainer.class);
//service/
for(Entry<String,PreConfigItem> entry:c.getPreConfig().entrySet()){
String type=entry.getValue().getType();
if(type.equalsIgnoreCase(PreConfigType.service)){
String setting=entry.getValue().getSetting();
String[] arr=setting.split(SeperatorDefinition.paraForReferencedService);
String path=arr[0];
String[] configs=arr[1].split(SeperatorDefinition.queryBoundRow);
String res="";
Map<String,String> reqparas=global_reference_in.containsKey(testPath) ? global_reference_in.get(testPath) : new HashMap<String,String>();
for(String item : configs){
String[] info=item.split(SeperatorDefinition.queryBoundItem);
if(global_reference_in.containsKey(path)){
if(reqparas.containsKey(info[0])){
para.put(info[0], reqparas.get(info[0]));
continue;
}
}
String lb=info[1];
String rb=info[2];
int times=Integer.parseInt(info[3]);
if(res.isEmpty()){
res = getTestResponseBody(path,global_reference_in,global_reference_out).getObj().toString();
}
String value=getParaValueFromResponse(res,lb,rb,times);
para.put(info[0], value);
reqparas.put(info[0], value);
}
global_reference_in.put(testPath, reqparas);
}
}
String preconfigstr = FileUtils.readFileToString(f, "UTF-8");
preconfigstr=parseText(preconfigstr,testPath,para,global_reference_in,global_reference_out);
c = mapper.readValue(preconfigstr, PreConfigContainer.class);
for(Entry<String,PreConfigItem> entry:c.getPreConfig().entrySet()){
String type=entry.getValue().getType();
if(type.equalsIgnoreCase(PreConfigType.query)){
String setting=entry.getValue().getSetting();
String[] arr=setting.split(SeperatorDefinition.paraForReferencedService);
String datasource=arr[0];
String server=arr[1];
String port=arr[2];
String username=arr[3];
String password=arr[4];
String db=arr[5];
String sql=arr[6];
String[] configs=arr[7].split(SeperatorDefinition.queryBoundRow);
SqlQueryReturn sqr= new JdbcUtils(datasource,server,port,username,password,db).getReturnedColumnsAndRows(sql);
for(String item : configs){
String[] info=item.split(SeperatorDefinition.queryBoundItem);
String columnLabel=info[1];
String rowIndex=info[2];
String value=new JdbcUtils(datasource,server,port,username,password,db).getValueByColumnAndRowIndex(sqr,columnLabel,rowIndex);
para.put(info[0], value);
}
}
}
}
}catch (IOException e) {
logger.error(e.getClass()+e.getMessage());
}
return para;
}
public void generateHistoryFile(String foldername, TestResultItem testresult) {
try{
String folder = foldername + "/"+HistoryFolderName.folderName;
String filename = FileNameUtils.getResultFile(testresult.getTime(), testresult.getDuration(),testresult.getResult());
File dir=new File(folder);
if(!dir.exists()){
dir.mkdirs();
}
File file=new File(dir,filename);
if(!file.exists()){
file.createNewFile();
}
ObjectMapper mapper = JsonObjectMapperFactory.getObjectMapper();
mapper.writeValue(file, testresult);
} catch (JsonGenerationException e) {
logger.error("", e);
} catch (JsonMappingException e) {
logger.error("", e);
} catch (IOException e) {
logger.error("", e);
}
}
private int executeHttpServiceRequest(String path, Map request){
int reponsestatus=0;
try{
Map<String,String> evnmap=loadEnv(path);
ObjectMapper mapper = JsonObjectMapperFactory.getObjectMapper();
String httptargetjson=FileNameUtils.getHttpTarget(path);
HttpTarget target = mapper.readValue(new File(httptargetjson), HttpTarget.class);
String url=processEnv(evnmap,target.getPath());
url=TemplateUtils.getString(url, request);
HTTPFacade hf=new HTTPFacade();
hf.setRequesttimeout(600*1000);
hf.setUrl(url);
String body=processEnv(evnmap,target.getRequestBody());
body=TemplateUtils.getString(body, request);
Set<KeyValue> headset=target.getHeads();
for(KeyValue kv:headset){
hf.addHeaderValue(kv.getKey(), kv.getValue());
}
if(body==null || body.trim().equals("")){
hf.get();
}else{
for(Object e : request.entrySet()){
Object v=((Entry<String,String>)e).getValue();
if(v instanceof String){
String k=((Entry<String,String>)e).getKey();
hf.addParamValue(k, v.toString());
}
}
hf.addRequestBody(body);
hf.postWithQueryStrInUrl();
}
reponsestatus= hf.getStatus();
}catch(Exception e){
logger.error(e.getClass()+e.getMessage());
}
return reponsestatus;
}
private void executeServiceRequest(String path, Map request){
if(path.endsWith("-c")){
executeHttpServiceRequest(path,request);
}
}
public TestResultItem getTestResultItem(String folderName, Map request){
TestResultItem testresult=new TestResultItem();
if(folderName.endsWith("-c")){
testresult=getHttpTestResultItem(folderName,request);
}
return testresult;
}
public Json getTestResponseBody(String path,Map<String,Map<String,String>> global_reference_in,Map<String,Map<String,String>> global_reference_out){
Json j=new Json();
Map params=new HashMap();
String res="";
try{
params = getRequestParameterMap(path,global_reference_in,global_reference_out);
setupAction(path,params,global_reference_in,global_reference_out);
TestResultItem tri = getTestResultItem(path,params);
if(!tri.getResult().equals(TestStatus.exception)){
j.setSuccess(true);
j.setObj(tri.getResponseInfo());
res=tri.getResponseInfo();
}else{
j.setSuccess(false);
j.setMsg(tri.getComment());
res=tri.getComment();
}
}catch(Exception ex){
j.setSuccess(false);
j.setMsg(ex.getMessage());
logger.error(ex);
}finally{
teardownAction(path,params,res,global_reference_in,global_reference_out);
}
return j;
}
public String parseDataSign(String body){
String encodedString=body.replace("=", "");
if(encodedString.contains(SIGN)){
if(encodedString.contains(SIGN+"&")){
encodedString=encodedString.replace(FIELD2BEREPLACED+SIGN+"&", "");
}else{
encodedString=encodedString.replace("&"+FIELD2BEREPLACED+SIGN, "");
}
String[] arr=encodedString.split("&");
Arrays.sort(arr);
String newbody="";
for(int i=0;i<arr.length;i++){
newbody+=arr[i];
}
newbody+=SECRETKEY;
String md5=getMd5Code(newbody);
return body.replace(SIGN, md5);
}else{
return body;
}
}
private String getMd5Code(String encodedString){
MessageDigest md=null;
try {
md=MessageDigest.getInstance("md5");
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
}
md.reset();
md.update(encodedString.getBytes());
byte[] encodedPassword=md.digest();
StringBuffer sb=new StringBuffer();
for(int i=0;i<encodedPassword.length;i++){
if ((encodedPassword[i] & 0xff) < 0x10) {
sb.append("0");
}
sb.append(Long.toString(encodedPassword[i] & 0xff, 16));
}
return sb.toString();
}
private String urlEncodeValueForKVReqBody(String body){
String ret="";
for(String kv : body.split("&")){
String value=StringUtils.substringAfter(kv, "=");
String encoded=value;
try {
if(encoded.contains("\"") || encoded.contains("'"))
encoded=URLEncoder.encode(value, "utf-8");
} catch (UnsupportedEncodingException e) {
// TODO Auto-generated catch block
}
ret+=kv.replace(value, encoded)+"&";
}
return ret.substring(0, ret.length()-1);
}
private TestResultItem getHttpTestResultItem(String path, Map request){
TestResultItem testresult=new TestResultItem();
try{
String requestinfo="";
String resopnseinfo="";
ObjectMapper mapper = JsonObjectMapperFactory.getObjectMapper();
String httptargetjson=FileNameUtils.getHttpTarget(path);
HttpTarget target = mapper.readValue(new File(httptargetjson), HttpTarget.class);
String url=retrieveString(target.getPath(),path, request).trim();
String body=retrieveString(target.getRequestBody(),path, request);
body=parseDataSign(body);
if(body.contains("&") && body.contains("=")){
body=urlEncodeValueForKVReqBody(body);
}
// String data=StringUtils.substringBetween(body, "data=", "}");
// String d=URLEncoder.encode(data+"}", "utf-8");
// body=body.replace(data+"}", d);
boolean ishttps=url.startsWith("https") ? true : false;
HTTPFacade hf=new HTTPFacade(ishttps);
hf.setRequesttimeout(600*1000);
hf.setUrl(url);
requestinfo="[url]:\n"+url+"\n[request headers]:\n";
Set<KeyValue> headset=target.getHeads();
for(KeyValue kv:headset){
String k=retrieveString(kv.getKey(),path, request);
String v=retrieveString(kv.getValue(),path, request);
hf.addHeaderValue(k, v);
requestinfo+=k + ":"+v+"\n";
}
requestinfo+="[request body]:\n"+URLDecoder.decode(body,"utf-8");
String method=target.getMethod();
long start = System.currentTimeMillis();
if(body==null || body.trim().equals("")){
if(null==method || method.isEmpty() || method.equals("default")){
hf.get();
}else if(method.equals("PUT")){
hf.put();
}else if(method.equals("DELETE")){
hf.delete();
}
}else{
//add form parameters to url params
// for(Object e : request.entrySet()){
// Object v=((Entry<String,String>)e).getValue();
// if(v instanceof String){
// String k=((Entry<String,String>)e).getKey();
// hf.addParamValue(k, v.toString());
hf.addRequestBody(body);
if(null==method || method.isEmpty() || method.equals("default")){
hf.postWithQueryStrInUrl();
}else if(method.equals("PUT")){
hf.putWithQueryStrInUrl();
}else if(method.equals("DELETE")){
hf.deleteWithQueryStrInUrl();
}
}
long end = System.currentTimeMillis();
long duration = end - start;
testresult.setDuration(String.valueOf(duration));
String responsebody=hf.getResponseBody();
int responsestatus=hf.getStatus();
String responseheader="";
if(!responsebody.isEmpty()){
responseheader=hf.getResponseheaders();
}
logger.info("REQUEST finish with status:"+responsestatus+"\nresponse body:"+responsebody+"\n reponse heads:"+responseheader);
resopnseinfo="[status]:\n" + responsestatus + "\n" ;
resopnseinfo+="[response headers]:\n" + responseheader + "\n" ;
resopnseinfo+="[body]:\n" + responsebody;
if(responsestatus!=0){
requestinfo=requestinfo.replaceAll("<", "<").replaceAll(">", ">").replaceAll("'", "'").replaceAll(""","\"").replaceAll("&", "&");
resopnseinfo=resopnseinfo.replaceAll("<", "<").replaceAll(">", ">").replaceAll("'", "'").replaceAll(""","\"").replaceAll("&", "&");
testresult.setRequestInfo(requestinfo);
testresult.setResponseInfo(resopnseinfo);
}else{
testresult.setResult(TestStatus.exception);
testresult.setComment("communication failure! response status:"+responsestatus);
}
}catch(Exception e){
testresult.setResult(TestStatus.exception);
testresult.setComment(e.getClass().toString()+": "+e.getMessage());
}
return testresult;
}
private String retrieveString(String content,String folderName, Map request){
try {
Map<String,String> evnmap=loadEnv(folderName);
content=processEnv(evnmap,content);
content=TemplateUtils.getString(content, request);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return content;
}
public Map<String,String> loadEnv(String testPath){
Map<String,String> m=new HashMap<String,String>();
File f=new File(FileNameUtils.getEnvFilePath(testPath));
while(true){
if(f.exists()){
try {
String fs=FileUtils.readFileToString(f);
if(!fs.isEmpty()){
String[] arr=fs.split("\n");
for(String s:arr){
String[] kv=s.split("=");
String k=kv[0].trim();
if(!m.containsKey(k)){
if(kv.length==2){
m.put(k, kv[1].trim());
}else if(kv.length==1){
m.put(k, "");
}else if(kv.length>2){
m.put(k,StringUtils.substringAfter(s, "=").trim());
}
}
}
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
String parentFileName=f.getParentFile().getName();
if(StringUtils.substringAfterLast(parentFileName, "-").length()!=1){
break;
}else
f=new File(FileNameUtils.getEnvFilePath(f.getParentFile().getParent()));
}
return m;
}
public String processEnv(Map<String,String> m,String content){
String result=content;
if(content.contains("[[") && content.contains("]]")){
for(Entry<String, String> e:m.entrySet()){
result=result.replace("[["+e.getKey()+"]]", e.getValue());
}
}
return result;
}
public String processVariableInEnv(Map<String,String> m,String variable){
variable=variable.replace("[[", "").replace("]]", "");
return m.get(variable);
}
private List<ServiceBoundDataItem> loadOutputParameter(String testPath){
return outputParameterService.getOutputParameterDataItems(testPath).getRows();
}
public String processOutputParameter(String testPath,String responseInfo,String content){
int pos1=content.indexOf("{{");
int pos2=content.indexOf("}}");
while(pos1>=0 && pos1<pos2){
List<ServiceBoundDataItem> parameters = loadOutputParameter(testPath);
String name=content.substring(pos1+2, pos2);
String value="";
for(ServiceBoundDataItem p : parameters){
if(name.equalsIgnoreCase(p.getName())){
String lb=p.getLb();
String rb=p.getRb();
String times=p.getTimes();
value=getParaValueFromResponse(responseInfo,lb,rb,Integer.parseInt(times));
break;
}
}
content=content.replace("{{"+name+"}}", value);
pos1=content.indexOf("{{");
pos2=content.indexOf("}}");
}
return content;
}
public String processOutputParameter(String testPath,String content){
int pos1=content.indexOf("{{");
int pos2=content.indexOf("}}");
String responseinfo="";
if(pos2>pos1 && pos1>=0){
Json j=getTestResponseBody(testPath,null,null);
if(j.isSuccess()){
responseinfo=j.getObj().toString();
}
}
while(pos1>=0 && pos1<pos2){
List<ServiceBoundDataItem> parameters = loadOutputParameter(testPath);
String name=content.substring(pos1+2, pos2);
String value="";
for(ServiceBoundDataItem p : parameters){
if(name.equalsIgnoreCase(p.getName())){
String lb=p.getLb();
String rb=p.getRb();
String times=p.getTimes();
if(!responseinfo.isEmpty())
value=getParaValueFromResponse(responseinfo,lb,rb,Integer.parseInt(times));
break;
}
}
content=content.replace("{{"+name+"}}", value);
pos1=content.indexOf("{{");
pos2=content.indexOf("}}");
}
return content;
}
public static void main(String args[]){
String body="timestamp=1446025321000&v=1.0&channelCode=TAOBAO&appVersion=2.9.101&data={\"cityCode\":\"\",\"page\":0}&api=ykse.film.getHotFilms&lang=zh-cn";
String encoded=body.replace("=", "");
System.out.println(encoded);
// String key="";
// StringUtils.substringAfter("qqwww123","q");
// String exp="JSON.parse(\"{\\\\\"id\\\\\":1}\").id==1";
// String filename=new Date().getTime()+".js";
// File f=new File(filename);
// try{
// f.createNewFile();
// FileUtils.writeStringToFile(f, "console.log(eval(\""+exp.replace("\"", "\\\"")+"\"))");
// Runtime runtime = Runtime.getRuntime();
// Process p = runtime.exec("cmd /k node "+f.getAbsolutePath());
// InputStream is = p.getInputStream();
// OutputStream os = p.getOutputStream();
// os.close();
// key = IOUtils.toString(is,"gbk");
// key=StringUtils.substringBetween(key, "", "\n\r");
// }catch(Exception e){
// key=e.getMessage();
// }finally{
// if(f.exists())
// f.delete();
// System.out.println(key);
}
}
|
package com.thaiopensource.relaxng;
import org.xml.sax.SAXException;
import org.xml.sax.Locator;
class ElementPattern extends Pattern {
private Pattern p;
private NameClass nameClass;
private boolean expanded = false;
private boolean checkedRestrictions = false;
private Locator loc;
ElementPattern(NameClass nameClass, Pattern p, Locator loc) {
super(false,
MIXED_CONTENT_TYPE,
combineHashCode(ELEMENT_HASH_CODE,
nameClass.hashCode(),
p.hashCode()));
this.nameClass = nameClass;
this.p = p;
this.loc = loc;
}
Pattern residual(PatternBuilder b, Atom a) {
if (a.matchesElement(nameClass, p))
return b.makeEmptySequence();
else
return b.makeEmptyChoice();
}
void initialContentPatterns(String namespaceURI,
String localName,
PatternSet ts) {
if (nameClass.contains(namespaceURI, localName))
ts.add(p);
}
Pattern combinedInitialContentPattern(PatternBuilder b,
String namespaceURI,
String localName,
int recoveryLevel) {
if (nameClass.contains(namespaceURI, localName))
return p;
if (recoveryLevel > 1)
return p.combinedInitialContentPattern(b,
namespaceURI,
localName,
recoveryLevel - 1);
return b.makeEmptyChoice();
}
PatternPair unambigContentPattern(PatternBuilder b,
String namespaceURI,
String localName) {
if (nameClass.contains(namespaceURI, localName))
return new PatternPair(p, b.makeEmptySequence());
return b.makeEmptyPatternPair();
}
void checkRestrictions(int context, DuplicateAttributeDetector dad) throws RestrictionViolationException {
if (checkedRestrictions)
return;
switch (context) {
case DATA_EXCEPT_CONTEXT:
throw new RestrictionViolationException("data_except_contains_element");
case LIST_CONTEXT:
throw new RestrictionViolationException("list_contains_element");
case ATTRIBUTE_CONTEXT:
throw new RestrictionViolationException("attribute_contains_element");
}
checkedRestrictions = true;
try {
p.checkRestrictions(ELEMENT_CONTEXT, new DuplicateAttributeDetector());
}
catch (RestrictionViolationException e) {
checkedRestrictions = false;
e.maybeSetLocator(loc);
throw e;
}
}
Pattern expand(PatternBuilder b) {
if (!expanded) {
expanded = true;
p = p.expand(b);
if (p.isEmptyChoice())
nameClass = new NullNameClass();
}
return this;
}
boolean samePattern(Pattern other) {
if (!(other instanceof ElementPattern))
return false;
ElementPattern ep = (ElementPattern)other;
return nameClass.equals(ep.nameClass) && p == ep.p;
}
void checkRecursion(int depth) throws SAXException {
p.checkRecursion(depth + 1);
}
void accept(PatternVisitor visitor) {
visitor.visitElement(nameClass, p);
}
}
|
package net.xmeter.gui;
import java.awt.BorderLayout;
import java.awt.FlowLayout;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.util.Arrays;
import java.util.List;
import javax.swing.BorderFactory;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JFileChooser;
import javax.swing.JPanel;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.apache.jmeter.gui.util.FileDialoger;
import org.apache.jmeter.gui.util.HorizontalPanel;
import org.apache.jmeter.gui.util.VerticalPanel;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.gui.JLabeledChoice;
import org.apache.jorphan.gui.JLabeledTextField;
import net.xmeter.Constants;
import net.xmeter.Util;
import net.xmeter.samplers.AbstractMQTTSampler;
import net.xmeter.samplers.mqtt.MQTT;
public class CommonConnUI implements ChangeListener, ActionListener, Constants{
private final JLabeledTextField serverAddr = new JLabeledTextField("Server name or IP:");
private final JLabeledTextField serverPort = new JLabeledTextField("Port number:", 5);
private JLabeledChoice mqttVersion = new JLabeledChoice("MQTT version:", new String[] { MQTT_VERSION_3_1, MQTT_VERSION_3_1_1 }, false, false);;
private final JLabeledTextField timeout = new JLabeledTextField("Timeout(s):", 5);
private final JLabeledTextField userNameAuth = new JLabeledTextField("User name:");
private final JLabeledTextField passwordAuth = new JLabeledTextField("Password:");
private JLabeledChoice protocols;
// private JLabeledChoice clientNames;
private JCheckBox dualAuth = new JCheckBox("Dual SSL authentication");
private JLabeledTextField wsPath = new JLabeledTextField("WS Path: ", 10);
// private final JLabeledTextField tksFilePath = new JLabeledTextField("Trust Key Store(*.jks): ", 25);
private final JLabeledTextField ccFilePath = new JLabeledTextField("Client Certification(*.p12):", 25);
// private final JLabeledTextField tksPassword = new JLabeledTextField("Secret:", 10);
private final JLabeledTextField ccPassword = new JLabeledTextField("Secret:", 10);
// private JButton tksBrowseButton;
private JButton ccBrowseButton;
// private static final String TKS_BROWSE = "tks_browse";
private static final String CC_BROWSE = "cc_browse";
public final JLabeledTextField connNamePrefix = new JLabeledTextField("ClientId:", 8);
private JCheckBox connNameSuffix = new JCheckBox("Add random suffix for ClientId");
private final JLabeledTextField connKeepAlive = new JLabeledTextField("Keep alive(s):", 3);
private final JLabeledTextField connAttmptMax = new JLabeledTextField("Connect attampt max:", 3);
private final JLabeledTextField reconnAttmptMax = new JLabeledTextField("Reconnect attampt max:", 3);
private final JLabeledTextField connCleanSession = new JLabeledTextField("Clean session:", 3);
// private final List<String> clientNamesList = MQTT.getAvailableNames();
public JPanel createConnPanel() {
JPanel con = new HorizontalPanel();
JPanel connPanel = new HorizontalPanel();
connPanel.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), "MQTT connection"));
connPanel.add(serverAddr);
connPanel.add(serverPort);
connPanel.add(mqttVersion);
JPanel timeoutPannel = new HorizontalPanel();
timeoutPannel.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), "Timeout"));
timeoutPannel.add(timeout);
con.add(connPanel);
con.add(timeoutPannel);
return con;
}
public JPanel createConnOptions() {
JPanel optsPanelCon = new VerticalPanel();
optsPanelCon.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), "Connection options"));
JPanel optsPanel0 = new HorizontalPanel();
optsPanel0.add(connNamePrefix);
optsPanel0.add(connNameSuffix);
connNameSuffix.setSelected(true);
optsPanelCon.add(optsPanel0);
JPanel optsPanel1 = new HorizontalPanel();
optsPanel1.add(connKeepAlive);
optsPanelCon.add(optsPanel1);
optsPanel1.add(connAttmptMax);
optsPanel1.add(reconnAttmptMax);
optsPanel1.add(connCleanSession);
optsPanelCon.add(optsPanel1);
return optsPanelCon;
}
public JPanel createAuthentication() {
JPanel optsPanelCon = new VerticalPanel();
optsPanelCon.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), "User authentication"));
JPanel optsPanel = new HorizontalPanel();
optsPanel.add(userNameAuth);
optsPanel.add(passwordAuth);
optsPanelCon.add(optsPanel);
return optsPanelCon;
}
public JPanel createProtocolPanel() {
JPanel protocolPanel = new VerticalPanel();
protocolPanel.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), "Protocols"));
JPanel pPanel = new JPanel();
pPanel.setLayout(new BorderLayout());
//pPanel.setLayout(new GridLayout(1, 2));
JPanel pCenter = new JPanel(new FlowLayout(FlowLayout.LEFT));
// clientNames = new JLabeledChoice("Clients:", clientNamesList.toArray(new String[] {}), true, false);
// clientNames.addChangeListener(this);
// pCenter.add(clientNames);
protocols = new JLabeledChoice("Protocols:", false);
//JComboBox<String> component = (JComboBox) protocols.getComponentList().get(1);
//component.setSize(new Dimension(40, component.getHeight()));
protocols.addChangeListener(this);
pCenter.add(protocols);
wsPath.setFont(null);
wsPath.setVisible(false);
pCenter.add(wsPath);
pPanel.add(pCenter, BorderLayout.CENTER);
dualAuth.setSelected(false);
dualAuth.setFont(null);
dualAuth.setVisible(false);
dualAuth.addChangeListener(this);
pPanel.add(dualAuth, BorderLayout.SOUTH);
JPanel panel = new JPanel(new GridBagLayout());
GridBagConstraints c = new GridBagConstraints();
c.anchor = GridBagConstraints.SOUTHWEST;
// c.gridx = 0; c.gridy = 0; c.gridwidth = 2;
// tksFilePath.setVisible(false);
// panel.add(tksFilePath, c);
// c.gridx = 2; c.gridy = 0; c.gridwidth = 1;
// tksBrowseButton = new JButton(JMeterUtils.getResString("browse"));
// tksBrowseButton.setActionCommand(TKS_BROWSE);
// tksBrowseButton.addActionListener(this);
// tksBrowseButton.setVisible(false);
// panel.add(tksBrowseButton, c);
// c.gridx = 3; c.gridy = 0; c.gridwidth = 2;
// tksPassword.setVisible(false);
// panel.add(tksPassword, c);
//c.weightx = 0.0;
c.gridx = 0; c.gridy = 1; c.gridwidth = 2;
ccFilePath.setVisible(false);
panel.add(ccFilePath, c);
c.gridx = 2; c.gridy = 1; c.gridwidth = 1;
ccBrowseButton = new JButton(JMeterUtils.getResString("browse"));
ccBrowseButton.setActionCommand(CC_BROWSE);
ccBrowseButton.addActionListener(this);
ccBrowseButton.setVisible(false);
panel.add(ccBrowseButton, c);
c.gridx = 3; c.gridy = 1; c.gridwidth = 2;
ccPassword.setVisible(false);
panel.add(ccPassword, c);
protocolPanel.add(pPanel);
protocolPanel.add(panel);
return protocolPanel;
}
@Override
public void actionPerformed(ActionEvent e) {
String action = e.getActionCommand();
// if(TKS_BROWSE.equals(action)) {
// String path = browseAndGetFilePath();
// tksFilePath.setText(path);
// }else
if(CC_BROWSE.equals(action)) {
String path = browseAndGetFilePath();
ccFilePath.setText(path);
}
}
private String browseAndGetFilePath() {
String path = "";
JFileChooser chooser = FileDialoger.promptToOpenFile();
if (chooser != null) {
File file = chooser.getSelectedFile();
if (file != null) {
path = file.getPath();
}
}
return path;
}
@Override
public void stateChanged(ChangeEvent e) {
if(e.getSource() == dualAuth) {
if(dualAuth.isSelected()) {
// tksFilePath.setVisible(true);
// tksBrowseButton.setVisible(true);
// tksPassword.setVisible(true);
ccFilePath.setVisible(true);
ccBrowseButton.setVisible(true);
ccPassword.setVisible(true);
} else {
// tksFilePath.setVisible(false);
// tksBrowseButton.setVisible(false);
// tksPassword.setVisible(false);
ccFilePath.setVisible(false);
ccBrowseButton.setVisible(false);
ccPassword.setVisible(false);
}
} else if(e.getSource() == protocols) {
boolean isSecure = Util.isSecureProtocol(protocols.getText());
dualAuth.setVisible(isSecure);
dualAuth.setEnabled(isSecure);
boolean wsProtocol = Util.isWebSocketProtocol(protocols.getText());
wsPath.setVisible(wsProtocol);
wsPath.setEnabled(wsProtocol);
// } else if (e.getSource() == clientNames) {
// int index = clientNames.getSelectedIndex();
// if (index > -1) {
// String clientName = clientNames.getItems()[index];
// List<String> supportedProtocols = MQTT.getSupportedProtocols(clientName);
// protocols.setValues(supportedProtocols.toArray(new String[supportedProtocols.size()]));
// } else {
// protocols.setValues(new String[0]);
}
}
public void configure(AbstractMQTTSampler sampler) {
serverAddr.setText(sampler.getServer());
serverPort.setText(sampler.getPort());
if(sampler.getMqttVersion().equals(MQTT_VERSION_3_1)) {
mqttVersion.setSelectedIndex(0);
} else if(sampler.getMqttVersion().equals(MQTT_VERSION_3_1_1)) {
mqttVersion.setSelectedIndex(1);
}
timeout.setText(sampler.getConnTimeout());
// if (sampler.getProtocol().trim().indexOf(JMETER_VARIABLE_PREFIX) == -1) {
// int index = clientNamesList.indexOf(sampler.getMqttClientName());
// clientNames.setSelectedIndex(index);
// } else{
// clientNames.setText(sampler.getMqttClientName());
if(sampler.getProtocol().trim().indexOf(JMETER_VARIABLE_PREFIX) == -1) {
List<String> items = Arrays.asList(protocols.getItems());
int index = items.indexOf(sampler.getProtocol());
protocols.setSelectedIndex(index);
} else {
protocols.setText(sampler.getProtocol());
}
boolean wsProtocol = Util.isWebSocketProtocol(sampler.getProtocol());
wsPath.setText(sampler.getWsPath());
wsPath.setVisible(wsProtocol);
wsPath.setEnabled(wsProtocol);
if(sampler.isDualSSLAuth()) {
dualAuth.setVisible(true);
dualAuth.setSelected(sampler.isDualSSLAuth());
}
// tksFilePath.setText(sampler.getKeyStoreFilePath());
// tksPassword.setText(sampler.getKeyStorePassword());
ccFilePath.setText(sampler.getClientCertFilePath());
ccPassword.setText(sampler.getClientCertPassword());
userNameAuth.setText(sampler.getUserNameAuth());
passwordAuth.setText(sampler.getPasswordAuth());
connNamePrefix.setText(sampler.getConnPrefix());
if(sampler.isClientIdSuffix()) {
connNameSuffix.setSelected(true);
} else {
connNameSuffix.setSelected(false);
}
connKeepAlive.setText(sampler.getConnKeepAlive());
connAttmptMax.setText(sampler.getConnAttamptMax());
reconnAttmptMax.setText(sampler.getConnReconnAttamptMax());
connCleanSession.setText(sampler.getConnCleanSession().toString());
}
public void setupSamplerProperties(AbstractMQTTSampler sampler) {
sampler.setServer(serverAddr.getText());
sampler.setPort(serverPort.getText());
sampler.setMqttVersion(mqttVersion.getText());
sampler.setConnTimeout(timeout.getText());
// sampler.setMqttClientName(clientNames.getText());
sampler.setProtocol(protocols.getText());
sampler.setWsPath(wsPath.getText());
sampler.setDualSSLAuth(dualAuth.isSelected());
// sampler.setKeyStoreFilePath(tksFilePath.getText());
// sampler.setKeyStorePassword(tksPassword.getText());
sampler.setClientCertFilePath(ccFilePath.getText());
sampler.setClientCertPassword(ccPassword.getText());
sampler.setUserNameAuth(userNameAuth.getText());
sampler.setPasswordAuth(passwordAuth.getText());
sampler.setConnPrefix(connNamePrefix.getText());
sampler.setClientIdSuffix(connNameSuffix.isSelected());
sampler.setConnKeepAlive(connKeepAlive.getText());
sampler.setConnAttamptMax(connAttmptMax.getText());
sampler.setConnReconnAttamptMax(reconnAttmptMax.getText());
sampler.setConnCleanSession(connCleanSession.getText());
}
public static int parseInt(String value) {
if(value == null || "".equals(value.trim())) {
return 0;
}
return Integer.parseInt(value);
}
public void clearUI() {
serverAddr.setText(DEFAULT_SERVER);
serverPort.setText(DEFAULT_PORT);
mqttVersion.setSelectedIndex(0);
timeout.setText(DEFAULT_CONN_TIME_OUT);
// clientNames.setSelectedIndex(clientNamesList.indexOf(DEFAULT_MQTT_CLIENT_NAME));
protocols.setValues(MQTT.getSupportedProtocols(DEFAULT_MQTT_CLIENT_NAME).toArray(new String[] {}));
protocols.setSelectedIndex(0);
dualAuth.setSelected(false);
wsPath.setText("");
// tksFilePath.setText("");
// tksPassword.setText("");
ccFilePath.setText("");
ccPassword.setText("");
userNameAuth.setText("");
passwordAuth.setText("");
connNamePrefix.setText(DEFAULT_CONN_PREFIX_FOR_CONN);
connNameSuffix.setSelected(true);
connAttmptMax.setText(DEFAULT_CONN_ATTAMPT_MAX);
connKeepAlive.setText(DEFAULT_CONN_KEEP_ALIVE);
reconnAttmptMax.setText(DEFAULT_CONN_RECONN_ATTAMPT_MAX);
connCleanSession.setText("true");
}
}
|
package com.valkryst.VTerminal.component;
import com.valkryst.VTerminal.Screen;
import com.valkryst.VTerminal.Tile;
import com.valkryst.VTerminal.TileGrid;
import com.valkryst.VTerminal.builder.TextAreaBuilder;
import com.valkryst.VTerminal.font.Font;
import com.valkryst.VTerminal.palette.ColorPalette;
import lombok.Getter;
import lombok.NonNull;
import lombok.Setter;
import lombok.ToString;
import org.apache.commons.lang.WordUtils;
import java.awt.Color;
import java.awt.Point;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.util.Arrays;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@ToString
public class TextArea extends Component {
/** The foreground color of the caret. */
@Getter @Setter private Color caretForegroundColor;
/** The background color of the caret. */
@Getter @Setter private Color caretBackgroundColor;
/** The foreground color of non-caret characters. */
@Getter @Setter private Color foregroundColor;
/** The background color of non-caret characters. */
@Getter @Setter private Color backgroundColor;
/** Whether or not the TextArea can be edited. */
@Getter @Setter private boolean editable;
/** The current position of the caret. */
@Getter private Point caretPosition = new Point(0, 0);
/** The text entered by the user. */
@Getter private char[][] enteredText;
/** Whether the text area is in-focus. */
private boolean isFocused;
/** The pattern used to determine which typed characters can be entered into the field. */
@Getter @Setter private Pattern allowedCharacterPattern;
/** Whether the special listener code for the Enter key is enabled. */
@Getter @Setter private boolean isEnterKeyEnabled;
/** Whether the special listener code for the Backspace key is enabled. */
@Getter @Setter private boolean isBackspaceKeyEnabled;
/** Whether the special listener code for the Delete key is enabled. */
@Getter @Setter private boolean isDeleteKeyEnabled;
/** Whether the special listener code for the Home key is enabled. */
@Getter @Setter private boolean isHomeKeyEnabled;
/** Whether the special listener code for the End key is enabled. */
@Getter @Setter private boolean isEndKeyEnabled;
/** Whether the special listener code for the Page Up key is enabled. */
@Getter @Setter private boolean isPageUpKeyEnabled;
/** Whether the special listener code for the Page Down key is enabled. */
@Getter @Setter private boolean isPageDownKeyEnabled;
/** Whether the special listener code for the Up Arrow key is enabled. */
@Getter @Setter private boolean isUpArrowKeyEnabled;
/** Whether the special listener code for the Down Arrow key is enabled. */
@Getter @Setter private boolean isDownArrowKeyEnabled;
/** Whether the special listener code for the Left Arrow key is enabled. */
@Getter @Setter private boolean isLeftArrowKeyEnabled;
/** Whether the special listener code for the Right Arrow key is enabled. */
@Getter @Setter private boolean isRightArrowKeyEnabled;
/**
* Constructs a new AsciiTextField.
*
* @param builder
* The builder to use.
*
* @throws NullPointerException
* If the builder is null.
*/
public TextArea(final @NonNull TextAreaBuilder builder) {
super(builder.getDimensions(), builder.getPosition());
final ColorPalette colorPalette = builder.getColorPalette();
caretForegroundColor = colorPalette.getTextArea_caretForeground();
caretBackgroundColor = colorPalette.getTextArea_caretBackground();
foregroundColor = colorPalette.getTextArea_defaultForeground();
backgroundColor = colorPalette.getTextArea_defaultBackground();
editable = builder.isEditable();
enteredText = new char[builder.getHeight()][builder.getWidth()];
allowedCharacterPattern = builder.getAllowedCharacterPattern();
isEnterKeyEnabled = builder.isEnterKeyEnabled();
isBackspaceKeyEnabled = builder.isBackspaceKeyEnabled();
isDeleteKeyEnabled = builder.isDeleteKeyEnabled();
isHomeKeyEnabled = builder.isHomeKeyEnabled();
isEndKeyEnabled = builder.isEndKeyEnabled();
isPageUpKeyEnabled = builder.isPageUpKeyEnabled();
isPageDownKeyEnabled = builder.isPageDownKeyEnabled();
isUpArrowKeyEnabled = builder.isUpArrowKeyEnabled();
isDownArrowKeyEnabled = builder.isDownArrowKeyEnabled();
isLeftArrowKeyEnabled = builder.isLeftArrowKeyEnabled();
isRightArrowKeyEnabled = builder.isRightArrowKeyEnabled();
// Set the area's initial colors:
for (int y = 0 ; y < super.tiles.getHeight() ; y++) {
for (int x = 0 ; x < super.tiles.getWidth() ; x++) {
final Tile tile = super.getTileAt(x, y);
tile.setBackgroundColor(backgroundColor);
tile.setForegroundColor(foregroundColor);
enteredText[y][x] = ' ';
}
}
// Set initial caret position:
changeCaretPosition(caretPosition.x, caretPosition.y);
}
@Override
public void createEventListeners(final @NonNull Screen parentScreen) {
if (super.getEventListeners().size() > 0) {
return;
}
final TileGrid tiles = super.tiles;
final MouseListener mouseListener = new MouseListener() {
@Override
public void mouseClicked(final MouseEvent e) {
if (editable == false) {
return;
}
if (e.getButton() != MouseEvent.BUTTON1) {
return;
}
if (intersects(parentScreen.getMousePosition()) == false) {
return;
}
final Font font = parentScreen.getImageCache().getFont();
final int columnIndexInArea = (e.getX() / font.getWidth()) - tiles.getXPosition();
final int rowIndexInArea = (e.getY() / font.getHeight()) - tiles.getYPosition();
int dx = columnIndexInArea - caretPosition.x;
int dy = rowIndexInArea - caretPosition.y;
while (dx != 0) {
if (dx > 0) {
moveCaretRight();
dx
} else {
moveCaretLeft();
dx++;
}
}
while (dy != 0) {
if (dy > 0) {
moveCaretDown();
dy
} else {
moveCaretUp();
dy++;
}
}
updateDisplayedCharacters();
}
@Override
public void mousePressed(final MouseEvent e) {
isFocused = intersects(parentScreen.getMousePosition());
}
@Override
public void mouseReleased(final MouseEvent e) {}
@Override
public void mouseEntered(final MouseEvent e) {}
@Override
public void mouseExited(final MouseEvent e) {}
};
final KeyListener keyListener = new KeyListener() {
@Override
public void keyTyped(final KeyEvent e) {
if (editable == false || isFocused == false) {
return;
}
final char character = e.getKeyChar();
final Matcher matcher = allowedCharacterPattern.matcher(character + "");
if (matcher.matches()) {
tiles.getTileAt(caretPosition.x, caretPosition.y).setCharacter(character);
enteredText[caretPosition.y][caretPosition.x] = character;
final boolean caretAtEndOfLine = caretPosition.x == tiles.getWidth() - 1;
if (caretAtEndOfLine) {
if (caretPosition.y < tiles.getHeight() - 1) {
moveCaretDown();
moveCaretToStartOfLine();
}
} else {
moveCaretRight();
}
updateDisplayedCharacters();
}
}
@Override
public void keyPressed(final KeyEvent e) {
if (editable == false || isFocused == false) {
return;
}
switch (e.getKeyCode()) {
// Move the caret to the first position of the next row:
case KeyEvent.VK_ENTER: {
if (isEnterKeyEnabled == false) {
return;
}
if (caretPosition.y < tiles.getHeight() - 1) {
moveCaretDown();
moveCaretToStartOfLine();
updateDisplayedCharacters();
}
break;
}
// Delete the character to the left of the caret, then move the caret one position left:
case KeyEvent.VK_BACK_SPACE: {
if (isBackspaceKeyEnabled == false) {
return;
}
final boolean caretAtStartOfLine = caretPosition.x == 0;
final boolean caretAtEndOfLine = caretPosition.x == tiles.getWidth() - 1;
if (caretAtStartOfLine) {
if (caretPosition.y > 0) {
moveCaretUp();
moveCaretToEndOfLine();
}
} else if (caretAtEndOfLine) {
final Tile currentChar = tiles.getTileAt(caretPosition.x, caretPosition.y);
if (currentChar.getCharacter() == ' ') {
moveCaretLeft();
}
} else {
moveCaretLeft();
}
clearCurrentCell();
updateDisplayedCharacters();
break;
}
}
}
@Override
public void keyReleased(final KeyEvent e) {
if (editable == false || isFocused == false) {
return;
}
int keyCode = e.getKeyCode();
switch (keyCode) {
// Erase the current character:
case KeyEvent.VK_DELETE: {
if (isDeleteKeyEnabled) {
clearCurrentCell();
}
break;
}
// Move the caret to the first position on the left:
case KeyEvent.VK_HOME: {
if (isHomeKeyEnabled) {
moveCaretToStartOfLine();
}
break;
}
// Move the caret to the last position on the right:
case KeyEvent.VK_END: {
if (isEndKeyEnabled) {
moveCaretToEndOfLine();
}
break;
}
// Move the caret to the first row:
case KeyEvent.VK_PAGE_UP: {
if (isPageUpKeyEnabled) {
moveCaretToFirstLine();
}
break;
}
// Move the caret to the last row:
case KeyEvent.VK_PAGE_DOWN: {
if (isPageDownKeyEnabled) {
moveCaretToLastLine();
}
break;
}
// Move the caret one position to the left:
case KeyEvent.VK_LEFT: {
if (isLeftArrowKeyEnabled) {
return;
}
boolean moveToPreviousLine = caretPosition.x == 0;
moveToPreviousLine &= caretPosition.y > 0;
if (moveToPreviousLine) {
moveCaretUp();
moveCaretToEndOfLine();
} else {
moveCaretLeft();
}
break;
}
// Move the caret one position to the right:
case KeyEvent.VK_RIGHT: {
if (isRightArrowKeyEnabled) {
return;
}
boolean moveToNextLine = caretPosition.x == tiles.getWidth() - 1;
moveToNextLine &= caretPosition.y < tiles.getHeight() - 1;
if (moveToNextLine) {
moveCaretDown();
moveCaretToStartOfLine();
} else {
moveCaretRight();
}
break;
}
// Move the caret one position up:
case KeyEvent.VK_UP: {
if (isUpArrowKeyEnabled) {
moveCaretUp();
}
break;
}
// Move the caret one position down:
case KeyEvent.VK_DOWN: {
if (isDownArrowKeyEnabled) {
moveCaretDown();
}
break;
}
}
updateDisplayedCharacters();
}
};
super.eventListeners.add(keyListener);
super.eventListeners.add(mouseListener);
}
@Override
public void setColorPalette(final ColorPalette colorPalette, final boolean redraw) {
if (colorPalette == null) {
return;
}
// Set the instance variables.
this.colorPalette = colorPalette;
this.caretBackgroundColor = colorPalette.getTextArea_caretBackground();
this.caretForegroundColor = colorPalette.getTextArea_caretForeground();
this.backgroundColor = colorPalette.getTextArea_defaultBackground();
this.foregroundColor = colorPalette.getTextArea_defaultForeground();
// Color All Tiles
for (int y = 0 ; y < tiles.getHeight() ; y++) {
for (int x = 0 ; x < tiles.getWidth() ; x++) {
final Tile tile = tiles.getTileAt(x, y);
tile.setBackgroundColor(backgroundColor);
tile.setForegroundColor(foregroundColor);
}
}
// Color Caret
final Tile tile = tiles.getTileAt(caretPosition.x, caretPosition.y);
tile.setBackgroundColor(caretBackgroundColor);
tile.setForegroundColor(caretForegroundColor);
if (redraw) {
try {
redrawFunction.run();
} catch (final IllegalStateException ignored) {
}
}
}
/** Moves the caret one cell up. */
private void moveCaretUp() {
if (caretPosition.y > 0) {
changeCaretPosition(caretPosition.x, caretPosition.y - 1);
}
}
/** Moves the caret one cell down. */
private void moveCaretDown() {
if (caretPosition.y < super.tiles.getHeight() - 1) {
changeCaretPosition(caretPosition.x, caretPosition.y + 1);
}
}
/** Moves the caret one cell left. */
private void moveCaretLeft() {
if (caretPosition.x > 0) {
changeCaretPosition(caretPosition.x - 1, caretPosition.y);
}
}
/** Moves the caret one cell right. */
private void moveCaretRight() {
if (caretPosition.x < super.tiles.getWidth() - 1) {
changeCaretPosition(caretPosition.x + 1, caretPosition.y);
}
}
/** Moves the caret to the first line. Does not change the x-axis position of the caret. */
private void moveCaretToFirstLine() {
changeCaretPosition(caretPosition.x, 0);
}
/** Moves the caret to the last line. Does not change the x-axis position of the caret. */
private void moveCaretToLastLine() {
changeCaretPosition(caretPosition.x, super.tiles.getHeight() - 1);
}
/** Moves the caret to the beginning of the current line. */
private void moveCaretToStartOfLine() {
changeCaretPosition(0, caretPosition.y);
}
/** Moves the caret to the end of the current line. */
private void moveCaretToEndOfLine() {
changeCaretPosition(super.tiles.getWidth() - 1, caretPosition.y);
}
/** Deletes the character in the current cell. */
private void clearCurrentCell() {
super.getTiles().getTileAt(caretPosition.x, caretPosition.y).setCharacter(' ');
enteredText[caretPosition.y][caretPosition.x] = ' ';
}
/**
* Moves the caret to a new position.
*
* @param x
* The x-axis coordinate of the new position.
*
* @param y
* The y-axis coordinate of the new position.
*/
private void changeCaretPosition(final int x, final int y) {
// Reset current position.
Tile tile = super.tiles.getTileAt(caretPosition.x, caretPosition.y);
tile.setBackgroundColor(backgroundColor);
tile.setForegroundColor(foregroundColor);
// Set new position.
caretPosition.setLocation(x, y);
if (editable) {
tile = super.tiles.getTileAt(caretPosition.x, caretPosition.y);
tile.setBackgroundColor(caretBackgroundColor);
tile.setForegroundColor(caretForegroundColor);
}
}
/** Copies the entered text onto the tiles, so the user's input is displayed. */
private void updateDisplayedCharacters() {
for (int y = 0 ; y < super.tiles.getHeight() ; y++) {
for (int x = 0 ; x < super.tiles.getWidth() ; x++) {
super.tiles.getTileAt(x, y).setCharacter(enteredText[y][x]);
}
}
super.redrawFunction.run();
}
/**
* Appends a string to the first empty row of the text area.
*
* If the string is too long to be displayed on a single line of the area, then it is split and displayed on
* multiple lines.
*
* Newline '\n' characters result in a new line being appended.
* Tab '\t' characters are converted to two space ' ' characters.
*
* @param text
* The text to append.
*/
public void appendText(String text) {
if (text == null) {
text = "";
}
// Convert Special Characters
text = text.replace("\t", " ");
// Split the text into separate lines if required.
final int width = super.getTiles().getWidth();
text = WordUtils.wrap(text, width, "\n", true);
final String[] textLines = text.split("\n");
// Convert Text Lines to Tile Lines and append them to the area.
for (final String textLine : textLines) {
System.out.println(textLine);
final Tile[] tileLine = new Tile[textLine.length()];
for (int i = 0 ; i < textLine.length() ; i++) {
final Tile tile = new Tile(textLine.charAt(i));
tile.setBackgroundColor(backgroundColor);
tile.setForegroundColor(foregroundColor);
tileLine[i] = tile;
}
appendText(tileLine);
}
}
/**
* Appends a string of tiles to the first empty row of the text area.
*
* If there are no empty rows, then the first row is discarded and all rows are moved up by one row. The
* text is then appended to the bottom row.
*
* @param text
* The new text.
*/
public void appendText(final Tile[] text) {
// Find first empty row and append text:
for (int y = 0 ; y < super.tiles.getHeight() ; y++) {
boolean rowIsEmpty = true;
for (int x = 0 ; x < super.tiles.getWidth() ; x++) {
final char character = super.tiles.getTileAt(x, y).getCharacter();
rowIsEmpty &= Character.isSpaceChar(character);
}
if (rowIsEmpty) {
setText(y, text);
return;
}
}
// If no empty rows found, move all rows up:
for (int y = 0 ; y < super.tiles.getHeight() - 1 ;y++) {
setText(y, super.tiles.getRow(y + 1));
}
setText(super.tiles.getHeight() - 1, text);
updateDisplayedCharacters();
}
/**
* Sets the text contained within a row of the area.
*
* @param rowIndex
* The row index.
*
* @param text
* The text.
*/
public void setText(final int rowIndex, final Tile[] text) {
clearText(rowIndex);
if (text != null) {
for (int x = 0; x < Math.min(super.tiles.getWidth(), text.length); x++) {
super.tiles.getTileAt(x, rowIndex).copy(text[x]);
enteredText[rowIndex][x] = text[x].getCharacter();
}
}
updateDisplayedCharacters();
}
/**
* Clears text from a row.
*
* @param rowIndex
* The row index.
*/
public void clearText(final int rowIndex) {
if (rowIndex < 0 || rowIndex > super.tiles.getHeight()) {
return;
}
Arrays.fill(enteredText[rowIndex], ' ');
for (int x = 0 ; x < super.tiles.getWidth() ; x++) {
final Tile tile = super.tiles.getTileAt(x, rowIndex);
tile.reset();
tile.setBackgroundColor(backgroundColor);
tile.setForegroundColor(foregroundColor);
}
updateDisplayedCharacters();
}
/** Clears all text from the field. */
public void clearText() {
for (final char[] line : enteredText) {
Arrays.fill(line, ' ');
}
for (int y = 0 ; y < super.tiles.getHeight() ; y++) {
for (int x = 0 ; x < super.tiles.getWidth() ; x++) {
super.tiles.getTileAt(x, y).setCharacter(' ');
}
}
updateDisplayedCharacters();
}
}
|
package com.valkryst.VTerminal.font;
import lombok.Getter;
import lombok.ToString;
import java.awt.*;
import java.awt.geom.AffineTransform;
import java.awt.image.AffineTransformOp;
import java.awt.image.BufferedImage;
@ToString
class FontCharacter {
/** The character. */
@Getter private final int character;
/** The character's image. */
@Getter private BufferedImage image;
/**
* Constructs a new FontCharacter.
*
* @param character
* The character.
*
* @param image
* The character's image.
*/
FontCharacter(final int character, BufferedImage image) {
if (image == null || image.getWidth() < 1 || image.getHeight() < 1) {
image = new BufferedImage(1, 1, BufferedImage.TYPE_INT_ARGB);
final Graphics gc = image.getGraphics();
gc.setColor(Color.MAGENTA);
gc.drawRect(0, 0, image.getWidth(), image.getHeight());
gc.dispose();
}
this.character = character;
this.image = image;
}
/**
* Resizes the character's image by some scale percentages.
*
* @param scaleWidth
* The amount to scale the width by.
*
* @param scaleHeight
* The amount to scale the height by.
*/
void resizeImage(final double scaleWidth, final double scaleHeight) {
if (scaleWidth <= 0 || scaleHeight <= 0 || (scaleWidth == 1 && scaleHeight == 1)) {
return;
}
final AffineTransform tx = AffineTransform.getScaleInstance(scaleWidth, scaleHeight);
final AffineTransformOp op = new AffineTransformOp(tx, AffineTransformOp.TYPE_NEAREST_NEIGHBOR);
image = op.filter(image, null);
}
/**
* Resizes the character's image to a specific width and height.
*
* @param width
* The new width.
*
* @param height
* The new height.
*/
void resizeImage(final int width, final int height) {
if (width <= 0 || height <= 0 || (width == 1 && height == 1)) {
return;
}
final int imgWidth = getWidth();
final int imgHeight = getHeight();
final double scaleWidth = Math.abs(width - imgWidth) / (double) imgWidth;
final double scaleHeight = Math.abs(height - imgHeight) / (double) imgHeight;
resizeImage(scaleWidth, scaleHeight);
}
/**
* Retrieves the width of the character's image.
*
* @return
* The width of the character's image.
*/
public int getWidth() {
return image.getWidth();
}
/**
* Retrieves the height of the character's image.
*
* @return
* The height of the character's image.
*/
public int getHeight() {
return image.getHeight();
}
}
|
package com.xtremelabs.droidsugar.view;
import java.io.Serializable;
import java.util.HashMap;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.os.Parcelable;
import com.xtremelabs.droidsugar.ProxyDelegatingHandler;
@SuppressWarnings({"UnusedDeclaration"})
public class FakeIntent {
private Intent realIntent;
public HashMap extras = new HashMap();
public String action;
public Class<?> componentClass;
public Uri data;
public FakeIntent(Intent realIntent) {
this.realIntent = realIntent;
}
public void __constructor__(Context packageContext, Class cls) {
componentClass = cls;
}
public void __constructor__(String action, Uri uri) {
this.action = action;
data = uri;
}
public Intent setAction(String action) {
this.action = action;
return realIntent;
}
public String getAction() {
return action;
}
public Intent setData(Uri data) {
this.data = data;
return realIntent;
}
public Intent putExtras(Intent src) {
FakeIntent srcFakeIntent = (FakeIntent) ProxyDelegatingHandler.getInstance().proxyFor(src);
extras = new HashMap(srcFakeIntent.extras);
return realIntent;
}
public Bundle getExtras() {
return new Bundle();
}
public void putExtra(String key, int value) {
extras.put(key, value);
}
public void putExtra(String key, long value) {
extras.put(key, value);
}
public void putExtra(String key, Serializable value) {
extras.put(key, value);
}
public void putExtra(String key, Parcelable value) {
extras.put(key, value);
}
public void putExtra(String key, String value) {
extras.put(key, value);
}
public void putExtra(String key, byte[] value) {
extras.put(key, value);
}
public String getStringExtra(String name) {
return (String) extras.get(name);
}
public Parcelable getParcelableExtra(String name) {
return (Parcelable) extras.get(name);
}
public int getIntExtra(String name, int defaultValue) {
Integer foundValue = (Integer) extras.get(name);
return foundValue == null ? defaultValue : foundValue;
}
public byte[] getByteArrayExtra(String name) {
return (byte[]) extras.get(name);
}
public boolean realIntentEquals(FakeIntent o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (action != null ? !action.equals(o.action) : o.action != null) return false;
if (componentClass != null ? !componentClass.equals(o.componentClass) : o.componentClass != null)
return false;
if (data != null ? !data.equals(o.data) : o.data != null) return false;
if (extras != null ? !extras.equals(o.extras) : o.extras != null) return false;
return true;
}
}
|
package codeine.servlet;
import java.security.Principal;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.servlet.http.HttpServletRequest;
import org.apache.log4j.Logger;
import codeine.configuration.IConfigurationManager;
import codeine.jsons.auth.AuthenticationMethod;
import codeine.jsons.auth.CodeineUser;
import codeine.jsons.auth.CompoundUserPermissions;
import codeine.jsons.auth.IUserPermissions;
import codeine.jsons.auth.PermissionsConfJson;
import codeine.jsons.auth.UserPermissions;
import codeine.jsons.auth.UserProjectPermissions;
import codeine.jsons.global.GlobalConfigurationJsonStore;
import codeine.jsons.global.UserPermissionsJsonStore;
import codeine.jsons.project.ProjectJson;
import codeine.model.Constants;
import codeine.permissions.GroupsManager;
import codeine.utils.StringUtils;
import com.google.common.collect.Maps;
public class PermissionsManager {
private static final Logger log = Logger.getLogger(PermissionsManager.class);
private UserPermissionsJsonStore permissionsConfigurationJsonStore;
private IConfigurationManager configurationManager;
private GlobalConfigurationJsonStore globalConfigurationJson;
private GroupsManager groupsManager;
private UsersManager usersManager;
private final UserPermissions ADMIN_GUEST = new UserPermissions("Guest", true);
@Inject
public PermissionsManager(UserPermissionsJsonStore permissionsConfigurationJsonStore,
GlobalConfigurationJsonStore globalConfigurationJson, UsersManager usersManager, IConfigurationManager configurationManager, GroupsManager groupsManager) {
super();
this.permissionsConfigurationJsonStore = permissionsConfigurationJsonStore;
this.globalConfigurationJson = globalConfigurationJson;
this.usersManager = usersManager;
this.configurationManager = configurationManager;
this.groupsManager = groupsManager;
}
public boolean canRead(String projectName, HttpServletRequest request){
return user(request).canRead(projectName);
}
private boolean ignoreSecurity() {
return Boolean.getBoolean("ignoreSecurity") || globalConfigurationJson.get().authentication_method() == AuthenticationMethod.Disabled || !Constants.SECURITY_ENABLED;
}
public boolean canCommand(String projectName, HttpServletRequest request){
return user(request).canCommand(projectName);
}
public boolean canCommand(String projectName, String nodeAlias, HttpServletRequest request) {
return user(request).canCommand(projectName, nodeAlias);
}
public boolean isAdministrator(HttpServletRequest request){
return user(request).isAdministrator();
}
public IUserPermissions user(HttpServletRequest request){
if (ignoreSecurity()) {
return ADMIN_GUEST;
}
String user = userInternal(request);
IUserPermissions userPermissions = getUser(user);
return userPermissions;
}
private IUserPermissions getUser(String user) {
UserPermissions userPermissions = permissionsConfigurationJsonStore.get().getOrNull(user);
if (null == userPermissions) {
userPermissions = guest(user);
}
Map<String, UserProjectPermissions> p = getProjectPermissions(user);
Map<String, UserPermissions> groupPermissions = getGroupsPermissions(user);
Map<String, Map<String, UserProjectPermissions>> groupProjectsPermissions = getGroupsProjectsPermissions(user);
return new CompoundUserPermissions(userPermissions, p, groupPermissions, groupProjectsPermissions);
}
private HashMap<String, Map<String, UserProjectPermissions>> getGroupsProjectsPermissions(String user) {
HashMap<String, Map<String, UserProjectPermissions>> $ = Maps.newHashMap();
List<String> groups = groupsManager.groups(user);
for (String group : groups) {
Map<String, UserProjectPermissions> projectPermissions = getProjectPermissions(group);
if (!projectPermissions.isEmpty()) {
$.put(group, projectPermissions);
}
}
return $;
}
private HashMap<String, UserPermissions> getGroupsPermissions(String user) {
HashMap<String, UserPermissions> $ = Maps.newHashMap();
List<String> groups = groupsManager.groups(user);
for (String group : groups) {
UserPermissions userPermissions = permissionsConfigurationJsonStore.get().getOrNull(group);
if (null != userPermissions) {
$.put(group, userPermissions);
}
}
return $;
}
private Map<String, UserProjectPermissions> getProjectPermissions(String theUser) {
List<ProjectJson> configuredProjects = configurationManager.getConfiguredProjects();
Map<String, UserProjectPermissions> p = Maps.newHashMap();
for (ProjectJson projectJson : configuredProjects) {
for (UserProjectPermissions u : projectJson.permissions()) {
if (u.username().equals(theUser)){
p.put(projectJson.name(), u);
}
}
}
return p;
}
private final UserPermissions guest(String user) {
return new UserPermissions(user, false);
}
private String userInternal(HttpServletRequest request) {
String userFromCommandLine = System.getProperty("codeineUser");
if (null != userFromCommandLine){
return userFromCommandLine;
}
String api_token = request.getHeader(Constants.API_TOKEN);
if (!StringUtils.isEmpty(api_token)) {
return usersManager.userByApiToken(api_token).username();
}
Principal userPrincipal = request.getUserPrincipal();
if (null == userPrincipal){
return "Guest";
}
String username = userPrincipal.getName();
log.debug("handling request from user " + username);
if (username.contains("@")){
username = username.substring(0, username.indexOf("@"));
}
String viewas = request.getHeader(Constants.UrlParameters.VIEW_AS);
if (!StringUtils.isEmpty(viewas) && getUser(username).isAdministrator()) {
CodeineUser user = usersManager.user(viewas);
log.debug("Using VIEW_AS Mode - " + user.username());
return user.username();
}
return username;
}
public boolean canConfigure(String projectName, HttpServletRequest request) {
return user(request).canConfigure(projectName);
}
public void makeAdmin(String user) {
PermissionsConfJson permissionsConfJson = permissionsConfigurationJsonStore.get();
permissionsConfJson.makeAdmin(user);
permissionsConfigurationJsonStore.store(permissionsConfJson);
}
}
|
package com.reader.common.impl;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import com.reader.common.TextWidth;
import com.reader.common.impl.SimpleTextWithSymbolsParser;
import com.reader.common.pages.AbstractSection;
import com.reader.common.pages.Page;
public class SectionImpl extends AbstractSection implements Serializable {
private static final long serialVersionUID = 5843547669613571263L;
private final char[] t;
private List<Page> pages = new ArrayList<Page>();
private int currentPage;
private com.reader.common.book.Section section;
public SectionImpl(com.reader.common.book.Section section) {
this.section = section;
StringBuilder builder = new StringBuilder();
for (String s : section.getParagraphs()) {
builder.append(s);
builder.append(' ');
}
t = new char[builder.length()];
builder.getChars(0, t.length, t, 0);
}
public SectionImpl(com.reader.common.book.Section section, char[] text) {
this.section = section;
t = text;
}
@Override
public void splitOnPages(final TextWidth textWidth, final int width,
final int maxLineCount) {
pages.clear();
final int spaceWidth = textWidth.getWidth(new char[] { 't' }, 0, 1);
pages.add(new Page(t, maxLineCount));
final Iterator<String> paragraphs = section.getParagraphs().iterator();
class SimpleTextWithSymbolsParserA extends SimpleTextWithSymbolsParser {
private int line;
private int lineWidth;
private Page page;
{
page = pages.get(0);
}
private int st;
private int len;
private String p;
private int pIndex;
@Override
public void processWord(char[] txt, int start, int length) {
int w = textWidth.getWidth(txt, start, length);
if (lineWidth > 0)
lineWidth += spaceWidth;
lineWidth += w;
if (paragraphChanged(txt, start, length)) {
add(true);
lineWidth = w;
st = start;
len = 0;
line++;
if (line + 1 < maxLineCount)
line++;
} else if (lineWidth > width) {
add(false);
lineWidth = w;
st = start;
len = 0;
line++;
}
len = start - st + length;
}
private boolean paragraphChanged(char[] txt, int start, int length) {
if (p == null)
p = paragraphs.next();
boolean res = false;
while (true) {
if (pIndex >= p.length()) {
res = true;
p = paragraphs.next();
if(p.length()==0)
break;
pIndex = 0;
while (p.charAt(pIndex) != txt[start]) {
pIndex++;
}
break;
}
if (p.charAt(pIndex) == txt[start])
break;
pIndex++;
}
pIndex += length;
return res;
}
private void add(boolean end) {
if (line >= maxLineCount) {
page = new Page(t, maxLineCount);
pages.add(page);
line = 0;
}
page.startLines[line] = st;
page.lengthLines[line] = len;
page.end[line] = end;
}
}
SimpleTextWithSymbolsParserA parser = new SimpleTextWithSymbolsParserA();
parser.parse(t);
parser.add(true);
}
@Override
public int getPageCount() {
return pages.size();
}
@Override
public Page getPage() {
return pages.get(currentPage);
}
@Override
public int getCurrentPage() {
return currentPage;
}
@Override
public void setCurrentPage(int currentPage) {
this.currentPage = currentPage;
}
@Override
public void setSection(com.reader.common.book.Section section) {
this.section = section;
}
@Override
public int getCurrentCharacter() {
return getPage().startLines[0];
}
@Override
public void setCurrentPageByCharacteNumber(int i) {
for (int j = 1; j < pages.size(); j++) {
if (pages.get(j).startLines[0] > i) {
currentPage = j - 1;
break;
}
}
}
public List<Page> getPages() {
return pages;
}
public char[] getT() {
return t;
}
}
|
package edu.umd.cs.piccolo;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.GraphicsConfiguration;
import java.awt.GraphicsEnvironment;
import java.awt.Image;
import java.awt.Paint;
import java.awt.Transparency;
import java.awt.geom.AffineTransform;
import java.awt.geom.Dimension2D;
import java.awt.geom.NoninvertibleTransformException;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.awt.image.BufferedImage;
import java.awt.print.Book;
import java.awt.print.PageFormat;
import java.awt.print.Paper;
import java.awt.print.Printable;
import java.awt.print.PrinterJob;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import javax.swing.event.EventListenerList;
import javax.swing.event.SwingPropertyChangeSupport;
import javax.swing.text.MutableAttributeSet;
import javax.swing.text.SimpleAttributeSet;
import edu.umd.cs.piccolo.activities.PActivity;
import edu.umd.cs.piccolo.activities.PColorActivity;
import edu.umd.cs.piccolo.activities.PInterpolatingActivity;
import edu.umd.cs.piccolo.activities.PTransformActivity;
import edu.umd.cs.piccolo.event.PInputEventListener;
import edu.umd.cs.piccolo.util.PAffineTransform;
import edu.umd.cs.piccolo.util.PBounds;
import edu.umd.cs.piccolo.util.PNodeFilter;
import edu.umd.cs.piccolo.util.PObjectOutputStream;
import edu.umd.cs.piccolo.util.PPaintContext;
import edu.umd.cs.piccolo.util.PPickPath;
import edu.umd.cs.piccolo.util.PUtil;
/**
* <b>PNode</b> is the central abstraction in Piccolo. All objects that are
* visible on the screen are instances of the node class. All nodes may have
* other "child" nodes added to them.
* <p>
* See edu.umd.piccolo.examples.NodeExample.java for demonstrations of how nodes
* can be used and how new types of nodes can be created.
* <P>
*
* @version 1.0
* @author Jesse Grosjean
*/
public class PNode implements Cloneable, Serializable, Printable {
/**
* The property name that identifies a change in this node's client
* propertie (see {@link #getClientProperty getClientProperty}). In an
* property change event the new value will be a reference to the map of
* client properties but old value will always be null.
*/
public static final String PROPERTY_CLIENT_PROPERTIES = "clientProperties";
public static final int PROPERTY_CODE_CLIENT_PROPERTIES = 1 << 0;
/**
* The property name that identifies a change of this node's bounds (see
* {@link #getBounds getBounds}, {@link #getBoundsReference
* getBoundsReference}). In any property change event the new value will be
* a reference to this node's bounds, but old value will always be null.
*/
public static final String PROPERTY_BOUNDS = "bounds";
public static final int PROPERTY_CODE_BOUNDS = 1 << 1;
/**
* The property name that identifies a change of this node's full bounds
* (see {@link #getFullBounds getFullBounds},
* {@link #getFullBoundsReference getFullBoundsReference}). In any property
* change event the new value will be a reference to this node's full bounds
* cache, but old value will always be null.
*/
public static final String PROPERTY_FULL_BOUNDS = "fullBounds";
public static final int PROPERTY_CODE_FULL_BOUNDS = 1 << 2;
/**
* The property name that identifies a change of this node's transform (see
* {@link #getTransform getTransform}, {@link #getTransformReference
* getTransformReference}). In any property change event the new value will
* be a reference to this node's transform, but old value will always be
* null.
*/
public static final String PROPERTY_TRANSFORM = "transform";
public static final int PROPERTY_CODE_TRANSFORM = 1 << 3;
/**
* The property name that identifies a change of this node's visibility (see
* {@link #getVisible getVisible}). Both old value and new value will be
* null in any property change event.
*/
public static final String PROPERTY_VISIBLE = "visible";
public static final int PROPERTY_CODE_VISIBLE = 1 << 4;
/**
* The property name that identifies a change of this node's paint (see
* {@link #getPaint getPaint}). Both old value and new value will be set
* correctly in any property change event.
*/
public static final String PROPERTY_PAINT = "paint";
public static final int PROPERTY_CODE_PAINT = 1 << 5;
/**
* The property name that identifies a change of this node's transparency
* (see {@link #getTransparency getTransparency}). Both old value and new
* value will be null in any property change event.
*/
public static final String PROPERTY_TRANSPARENCY = "transparency";
public static final int PROPERTY_CODE_TRANSPARENCY = 1 << 6;
/**
* The property name that identifies a change of this node's pickable status
* (see {@link #getPickable getPickable}). Both old value and new value will
* be null in any property change event.
*/
public static final String PROPERTY_PICKABLE = "pickable";
public static final int PROPERTY_CODE_PICKABLE = 1 << 7;
/**
* The property name that identifies a change of this node's children
* pickable status (see {@link #getChildrenPickable getChildrenPickable}).
* Both old value and new value will be null in any property change event.
*/
public static final String PROPERTY_CHILDREN_PICKABLE = "childrenPickable";
public static final int PROPERTY_CODE_CHILDREN_PICKABLE = 1 << 8;
/**
* The property name that identifies a change in the set of this node's
* direct children (see {@link #getChildrenReference getChildrenReference},
* {@link #getChildrenIterator getChildrenIterator}). In any property change
* event the new value will be a reference to this node's children, but old
* value will always be null.
*/
public static final String PROPERTY_CHILDREN = "children";
public static final int PROPERTY_CODE_CHILDREN = 1 << 9;
/**
* The property name that identifies a change of this node's parent (see
* {@link #getParent getParent}). Both old value and new value will be set
* correctly in any property change event.
*/
public static final String PROPERTY_PARENT = "parent";
public static final int PROPERTY_CODE_PARENT = 1 << 10;
private static final PBounds TEMP_REPAINT_BOUNDS = new PBounds();
/**
* The single scene graph delegate that recives low level node events.
*/
public static PSceneGraphDelegate SCENE_GRAPH_DELEGATE = null;
/**
* <b>PSceneGraphDelegate</b> is an interface to recive low level node
* events. It together with PNode.SCENE_GRAPH_DELEGATE gives Piccolo users
* an efficient way to learn about low level changes in Piccolo's scene
* graph. Most users will not need to use this.
*/
public interface PSceneGraphDelegate {
public void nodePaintInvalidated(PNode node);
public void nodeFullBoundsInvalidated(PNode node);
}
private transient PNode parent;
private List children;
private PBounds bounds;
private PAffineTransform transform;
private Paint paint;
private float transparency;
private MutableAttributeSet clientProperties;
private PBounds fullBoundsCache;
private int propertyChangeParentMask = 0;
private transient SwingPropertyChangeSupport changeSupport;
private transient EventListenerList listenerList;
private boolean pickable;
private boolean childrenPickable;
private boolean visible;
private boolean childBoundsVolatile;
private boolean paintInvalid;
private boolean childPaintInvalid;
private boolean boundsChanged;
private boolean fullBoundsInvalid;
private boolean childBoundsInvalid;
private boolean occluded;
/**
* Constructs a new PNode.
* <P>
* By default a node's paint is null, and bounds are empty. These values
* must be set for the node to show up on the screen once it's added to a
* scene graph.
*/
public PNode() {
bounds = new PBounds();
fullBoundsCache = new PBounds();
transparency = 1.0f;
pickable = true;
childrenPickable = true;
visible = true;
}
// Animation - Methods to animate this node.
// Note that animation is implemented by activities (PActivity),
// so if you need more control over your animation look at the
// activities package. Each animate method creates an animation that
// will animate the node from its current state to the new state
// specified over the given duration. These methods will try to
// automatically schedule the new activity, but if the node does not
// descend from the root node when the method is called then the
// activity will not be scheduled and you must schedule it manually.
/**
* Animate this node's bounds from their current location when the activity
* starts to the specified bounds. If this node descends from the root then
* the activity will be scheduled, else the returned activity should be
* scheduled manually. If two different transform activities are scheduled
* for the same node at the same time, they will both be applied to the
* node, but the last one scheduled will be applied last on each frame, so
* it will appear to have replaced the original. Generally you will not want
* to do that. Note this method animates the node's bounds, but does not
* change the node's transform. Use animateTransformToBounds() to animate
* the node's transform instead.
*
* @param duration amount of time that the animation should take
* @return the newly scheduled activity
*/
public PInterpolatingActivity animateToBounds(double x, double y, double width, double height, long duration) {
if (duration == 0) {
setBounds(x, y, width, height);
return null;
}
else {
final PBounds dst = new PBounds(x, y, width, height);
PInterpolatingActivity ta = new PInterpolatingActivity(duration, PUtil.DEFAULT_ACTIVITY_STEP_RATE) {
private PBounds src;
protected void activityStarted() {
src = getBounds();
startResizeBounds();
super.activityStarted();
}
public void setRelativeTargetValue(float zeroToOne) {
PNode.this.setBounds(src.x + (zeroToOne * (dst.x - src.x)), src.y + (zeroToOne * (dst.y - src.y)),
src.width + (zeroToOne * (dst.width - src.width)), src.height
+ (zeroToOne * (dst.height - src.height)));
}
protected void activityFinished() {
super.activityFinished();
endResizeBounds();
}
};
addActivity(ta);
return ta;
}
}
/**
* Animate this node from it's current transform when the activity starts a
* new transform that will fit the node into the given bounds. If this node
* descends from the root then the activity will be scheduled, else the
* returned activity should be scheduled manually. If two different
* transform activities are scheduled for the same node at the same time,
* they will both be applied to the node, but the last one scheduled will be
* applied last on each frame, so it will appear to have replaced the
* original. Generally you will not want to do that. Note this method
* animates the node's transform, but does not directly change the node's
* bounds rectangle. Use animateToBounds() to animate the node's bounds
* rectangle instead.
*
* @param duration amount of time that the animation should take
* @return the newly scheduled activity
*/
public PTransformActivity animateTransformToBounds(double x, double y, double width, double height, long duration) {
PAffineTransform t = new PAffineTransform();
t.setToScale(width / getWidth(), height / getHeight());
double scale = t.getScale();
t.setOffset(x - (getX() * scale), y - (getY() * scale));
return animateToTransform(t, duration);
}
/**
* Animate this node's transform from its current location when the activity
* starts to the specified location, scale, and rotation. If this node
* descends from the root then the activity will be scheduled, else the
* returned activity should be scheduled manually. If two different
* transform activities are scheduled for the same node at the same time,
* they will both be applied to the node, but the last one scheduled will be
* applied last on each frame, so it will appear to have replaced the
* original. Generally you will not want to do that.
*
* @param duration amount of time that the animation should take
* @param theta final theta value (in radians) for the animation
* @return the newly scheduled activity
*/
public PTransformActivity animateToPositionScaleRotation(double x, double y, double scale, double theta,
long duration) {
PAffineTransform t = getTransform();
t.setOffset(x, y);
t.setScale(scale);
t.setRotation(theta);
return animateToTransform(t, duration);
}
/**
* Animate this node's transform from its current values when the activity
* starts to the new values specified in the given transform. If this node
* descends from the root then the activity will be scheduled, else the
* returned activity should be scheduled manually. If two different
* transform activities are scheduled for the same node at the same time,
* they will both be applied to the node, but the last one scheduled will be
* applied last on each frame, so it will appear to have replaced the
* original. Generally you will not want to do that.
*
* @param destTransform the final transform value
* @param duration amount of time that the animation should take
* @return the newly scheduled activity
*/
public PTransformActivity animateToTransform(AffineTransform destTransform, long duration) {
if (duration == 0) {
setTransform(destTransform);
return null;
}
else {
PTransformActivity.Target t = new PTransformActivity.Target() {
public void setTransform(AffineTransform aTransform) {
PNode.this.setTransform(aTransform);
}
public void getSourceMatrix(double[] aSource) {
PNode.this.getTransformReference(true).getMatrix(aSource);
}
};
PTransformActivity ta = new PTransformActivity(duration, PUtil.DEFAULT_ACTIVITY_STEP_RATE, t, destTransform);
addActivity(ta);
return ta;
}
}
/**
* Animate this node's color from its current value to the new value
* specified. This meathod assumes that this nodes paint property is of type
* color. If this node descends from the root then the activity will be
* scheduled, else the returned activity should be scheduled manually. If
* two different color activities are scheduled for the same node at the
* same time, they will both be applied to the node, but the last one
* scheduled will be applied last on each frame, so it will appear to have
* replaced the original. Generally you will not want to do that.
*
* @param destColor final color value.
* @param duration amount of time that the animation should take
* @return the newly scheduled activity
*/
public PInterpolatingActivity animateToColor(Color destColor, long duration) {
if (duration == 0) {
setPaint(destColor);
return null;
}
else {
PColorActivity.Target t = new PColorActivity.Target() {
public Color getColor() {
return (Color) getPaint();
}
public void setColor(Color color) {
setPaint(color);
}
};
PColorActivity ca = new PColorActivity(duration, PUtil.DEFAULT_ACTIVITY_STEP_RATE, t, destColor);
addActivity(ca);
return ca;
}
}
/**
* Animate this node's transparency from its current value to the new value
* specified. Transparency values must range from zero to one. If this node
* descends from the root then the activity will be scheduled, else the
* returned activity should be scheduled manually. If two different
* transparency activities are scheduled for the same node at the same time,
* they will both be applied to the node, but the last one scheduled will be
* applied last on each frame, so it will appear to have replaced the
* original. Generally you will not want to do that.
*
* @param zeroToOne final transparency value.
* @param duration amount of time that the animation should take
* @return the newly scheduled activity
*/
public PInterpolatingActivity animateToTransparency(float zeroToOne, long duration) {
if (duration == 0) {
setTransparency(zeroToOne);
return null;
}
else {
final float dest = zeroToOne;
PInterpolatingActivity ta = new PInterpolatingActivity(duration, PUtil.DEFAULT_ACTIVITY_STEP_RATE) {
private float source;
protected void activityStarted() {
source = getTransparency();
super.activityStarted();
}
public void setRelativeTargetValue(float zeroToOne) {
PNode.this.setTransparency(source + (zeroToOne * (dest - source)));
}
};
addActivity(ta);
return ta;
}
}
/**
* Schedule the given activity with the root, note that only scheduled
* activities will be stepped. If the activity is successfully added true is
* returned, else false.
*
* @param activity new activity to schedule
* @return true if the activity is successfully scheduled.
*/
public boolean addActivity(PActivity activity) {
PRoot r = getRoot();
if (r != null) {
return r.addActivity(activity);
}
return false;
}
// Client Properties - Methods for managing client properties for
// this node.
// Client properties provide a way for programmers to attach
// extra information to a node without having to subclass it and
// add new instance variables.
/**
* Return mutable attributed set of client properites associated with this
* node.
*/
public MutableAttributeSet getClientProperties() {
if (clientProperties == null) {
clientProperties = new SimpleAttributeSet();
}
return clientProperties;
}
/**
* Returns the value of the client attribute with the specified key. Only
* attributes added with <code>addAttribute</code> will return a non-null
* value.
*
* @return the value of this attribute or null
*/
public Object getAttribute(Object key) {
if (clientProperties == null || key == null) {
return null;
}
else {
return clientProperties.getAttribute(key);
}
}
public void addAttribute(Object key, Object value) {
if (value == null && clientProperties == null)
return;
Object oldValue = getAttribute(key);
if (value != oldValue) {
if (clientProperties == null) {
clientProperties = new SimpleAttributeSet();
}
if (value == null) {
clientProperties.removeAttribute(key);
}
else {
clientProperties.addAttribute(key, value);
}
if (clientProperties.getAttributeCount() == 0 && clientProperties.getResolveParent() == null) {
clientProperties = null;
}
firePropertyChange(PROPERTY_CODE_CLIENT_PROPERTIES, PROPERTY_CLIENT_PROPERTIES, null, clientProperties);
firePropertyChange(PROPERTY_CODE_CLIENT_PROPERTIES, key.toString(), oldValue, value);
}
}
/**
* Returns an enumeration of all keys maped to attribute values values.
*
* @return an Enumeration over attribute keys
*/
public Enumeration getClientPropertyKeysEnumeration() {
if (clientProperties == null) {
return PUtil.NULL_ENUMERATION;
}
else {
return clientProperties.getAttributeNames();
}
}
// convenience methods for attributes
public Object getAttribute(Object key, Object def) {
Object o = getAttribute(key);
return (o == null ? def : o);
}
public boolean getBooleanAttribute(Object key, boolean def) {
Boolean b = (Boolean) getAttribute(key);
return (b == null ? def : b.booleanValue());
}
public int getIntegerAttribute(Object key, int def) {
Number n = (Number) getAttribute(key);
return (n == null ? def : n.intValue());
}
public double getDoubleAttribute(Object key, double def) {
Number n = (Number) getAttribute(key);
return (n == null ? def : n.doubleValue());
}
/**
* @deprecated use getAttribute(Object key)instead.
*/
public Object getClientProperty(Object key) {
return getAttribute(key);
}
/**
* @deprecated use addAttribute(Object key, Object value)instead.
*/
public void addClientProperty(Object key, Object value) {
addAttribute(key, value);
}
/**
* @deprecated use getClientPropertyKeysEnumerator() instead.
*/
public Iterator getClientPropertyKeysIterator() {
final Enumeration enumeration = getClientPropertyKeysEnumeration();
return new Iterator() {
public boolean hasNext() {
return enumeration.hasMoreElements();
}
public Object next() {
return enumeration.nextElement();
}
public void remove() {
throw new UnsupportedOperationException();
}
};
}
// Copying - Methods for copying this node and its descendants.
// Copying is implemened in terms of serialization.
/**
* The copy method copies this node and all of its descendents. Note that
* copying is implemented in terms of java serialization. See the
* serialization notes for more information.
*
* @return new copy of this node or null if the node was not serializable
*/
public Object clone() {
try {
byte[] ser = PObjectOutputStream.toByteArray(this);
return (PNode) new ObjectInputStream(new ByteArrayInputStream(ser)).readObject();
}
catch (IOException e) {
e.printStackTrace();
}
catch (ClassNotFoundException e) {
e.printStackTrace();
}
return null;
}
// Coordinate System Conversions - Methods for converting
// geometry between this nodes local coordinates and the other
// major coordinate systems.
// Each nodes has an affine transform that it uses to define its
// own coordinate system. For example if you create a new node and
// add it to the canvas it will appear in the upper right corner. Its
// coordinate system matches the coordinate system of its parent
// (the root node) at this point. But if you move this node by calling
// node.translate() the nodes affine transform will be modified and the
// node will appear at a different location on the screen. The node
// coordinate system no longer matches the coordinate system of its
// parent.
// This is useful because it means that the node's methods for
// rendering and picking don't need to worry about the fact that
// the node has been moved to another position on the screen, they
// keep working just like they did when it was in the upper right
// hand corner of the screen.
// The problem is now that each node defines its own coordinate
// system it is difficult to compare the positions of two node with
// each other. These methods are all meant to help solve that problem.
// The terms used in the methods are as follows:
// local - The local or base coordinate system of a node.
// parent - The coordinate system of a node's parent
// global - The topmost coordinate system, above the root node.
// Normally when comparing the positions of two nodes you will
// convert the local position of each node to the global coordinate
// system, and then compare the positions in that common coordinate
// system.
/**
* Transform the given point from this node's local coordinate system to its
* parent's local coordinate system. Note that this will modify the point
* parameter.
*
* @param localPoint point in local coordinate system to be transformed.
* @return point in parent's local coordinate system
*/
public Point2D localToParent(Point2D localPoint) {
if (transform == null)
return localPoint;
return transform.transform(localPoint, localPoint);
}
/**
* Transform the given dimension from this node's local coordinate system to
* its parent's local coordinate system. Note that this will modify the
* dimension parameter.
*
* @param localDimension dimension in local coordinate system to be
* transformed.
* @return dimension in parent's local coordinate system
*/
public Dimension2D localToParent(Dimension2D localDimension) {
if (transform == null)
return localDimension;
return transform.transform(localDimension, localDimension);
}
/**
* Transform the given rectangle from this node's local coordinate system to
* its parent's local coordinate system. Note that this will modify the
* rectangle parameter.
*
* @param localRectangle rectangle in local coordinate system to be
* transformed.
* @return rectangle in parent's local coordinate system
*/
public Rectangle2D localToParent(Rectangle2D localRectangle) {
if (transform == null)
return localRectangle;
return transform.transform(localRectangle, localRectangle);
}
/**
* Transform the given point from this node's parent's local coordinate
* system to the local coordinate system of this node. Note that this will
* modify the point parameter.
*
* @param parentPoint point in parent's coordinate system to be transformed.
* @return point in this node's local coordinate system
*/
public Point2D parentToLocal(Point2D parentPoint) {
if (transform == null)
return parentPoint;
try {
return transform.inverseTransform(parentPoint, parentPoint);
}
catch (NoninvertibleTransformException e) {
e.printStackTrace();
}
return null;
}
/**
* Transform the given dimension from this node's parent's local coordinate
* system to the local coordinate system of this node. Note that this will
* modify the dimension parameter.
*
* @param parentDimension dimension in parent's coordinate system to be
* transformed.
* @return dimension in this node's local coordinate system
*/
public Dimension2D parentToLocal(Dimension2D parentDimension) {
if (transform == null)
return parentDimension;
return transform.inverseTransform(parentDimension, parentDimension);
}
/**
* Transform the given rectangle from this node's parent's local coordinate
* system to the local coordinate system of this node. Note that this will
* modify the rectangle parameter.
*
* @param parentRectangle rectangle in parent's coordinate system to be
* transformed.
* @return rectangle in this node's local coordinate system
*/
public Rectangle2D parentToLocal(Rectangle2D parentRectangle) {
if (transform == null)
return parentRectangle;
return transform.inverseTransform(parentRectangle, parentRectangle);
}
/**
* Transform the given point from this node's local coordinate system to the
* global coordinate system. Note that this will modify the point parameter.
*
* @param localPoint point in local coordinate system to be transformed.
* @return point in global coordinates
*/
public Point2D localToGlobal(Point2D localPoint) {
PNode n = this;
while (n != null) {
localPoint = n.localToParent(localPoint);
n = n.parent;
}
return localPoint;
}
/**
* Transform the given dimension from this node's local coordinate system to
* the global coordinate system. Note that this will modify the dimension
* parameter.
*
* @param localDimension dimension in local coordinate system to be
* transformed.
* @return dimension in global coordinates
*/
public Dimension2D localToGlobal(Dimension2D localDimension) {
PNode n = this;
while (n != null) {
localDimension = n.localToParent(localDimension);
n = n.parent;
}
return localDimension;
}
/**
* Transform the given rectangle from this node's local coordinate system to
* the global coordinate system. Note that this will modify the rectangle
* parameter.
*
* @param localRectangle rectangle in local coordinate system to be
* transformed.
* @return rectangle in global coordinates
*/
public Rectangle2D localToGlobal(Rectangle2D localRectangle) {
PNode n = this;
while (n != null) {
localRectangle = n.localToParent(localRectangle);
n = n.parent;
}
return localRectangle;
}
/**
* Transform the given point from global coordinates to this node's local
* coordinate system. Note that this will modify the point parameter.
*
* @param globalPoint point in global coordinates to be transformed.
* @return point in this node's local coordinate system.
*/
public Point2D globalToLocal(Point2D globalPoint) {
if (parent != null) {
globalPoint = parent.globalToLocal(globalPoint);
}
return parentToLocal(globalPoint);
}
/**
* Transform the given dimension from global coordinates to this node's
* local coordinate system. Note that this will modify the dimension
* parameter.
*
* @param globalDimension dimension in global coordinates to be transformed.
* @return dimension in this node's local coordinate system.
*/
public Dimension2D globalToLocal(Dimension2D globalDimension) {
if (parent != null) {
globalDimension = parent.globalToLocal(globalDimension);
}
return parentToLocal(globalDimension);
}
/**
* Transform the given rectangle from global coordinates to this node's
* local coordinate system. Note that this will modify the rectangle
* parameter.
*
* @param globalRectangle rectangle in global coordinates to be transformed.
* @return rectangle in this node's local coordinate system.
*/
public Rectangle2D globalToLocal(Rectangle2D globalRectangle) {
if (parent != null) {
globalRectangle = parent.globalToLocal(globalRectangle);
}
return parentToLocal(globalRectangle);
}
/**
* Return the transform that converts local coordinates at this node to the
* global coordinate system.
*
* @return The concatenation of transforms from the top node down to this
* node.
*/
public PAffineTransform getLocalToGlobalTransform(PAffineTransform dest) {
if (parent != null) {
dest = parent.getLocalToGlobalTransform(dest);
if (transform != null)
dest.concatenate(transform);
}
else {
if (dest == null) {
dest = getTransform();
}
else {
if (transform != null) {
dest.setTransform(transform);
}
else {
dest.setToIdentity();
}
}
}
return dest;
}
/**
* Return the transform that converts global coordinates to local
* coordinates of this node.
*
* @return The inverse of the concatenation of transforms from the root down
* to this node.
*/
public PAffineTransform getGlobalToLocalTransform(PAffineTransform dest) {
try {
dest = getLocalToGlobalTransform(dest);
dest.setTransform(dest.createInverse());
return dest;
}
catch (NoninvertibleTransformException e) {
e.printStackTrace();
}
return null;
}
// Event Listeners - Methods for adding and removing event listeners
// from a node.
// Here methods are provided to add property change listeners and
// input event listeners. The property change listeners are notified
// when certain properties of this node change, and the input event
// listeners are notified when the nodes receives new key and mouse
// events.
/**
* Return the list of event listeners associated with this node.
*
* @return event listener list or null
*/
public EventListenerList getListenerList() {
return listenerList;
}
/**
* Adds the specified input event listener to receive input events from this
* node.
*
* @param listener the new input listener
*/
public void addInputEventListener(PInputEventListener listener) {
if (listenerList == null)
listenerList = new EventListenerList();
getListenerList().add(PInputEventListener.class, listener);
}
/**
* Removes the specified input event listener so that it no longer receives
* input events from this node.
*
* @param listener the input listener to remove
*/
public void removeInputEventListener(PInputEventListener listener) {
if (listenerList == null)
return;
getListenerList().remove(PInputEventListener.class, listener);
if (listenerList.getListenerCount() == 0) {
listenerList = null;
}
}
/**
* Add a PropertyChangeListener to the listener list. The listener is
* registered for all properties. See the fields in PNode and subclasses
* that start with PROPERTY_ to find out which properties exist.
*
* @param listener The PropertyChangeListener to be added
*/
public void addPropertyChangeListener(PropertyChangeListener listener) {
if (changeSupport == null) {
changeSupport = new SwingPropertyChangeSupport(this);
}
changeSupport.addPropertyChangeListener(listener);
}
/**
* Add a PropertyChangeListener for a specific property. The listener will
* be invoked only when a call on firePropertyChange names that specific
* property. See the fields in PNode and subclasses that start with
* PROPERTY_ to find out which properties are supported.
*
* @param propertyName The name of the property to listen on.
* @param listener The PropertyChangeListener to be added
*/
public void addPropertyChangeListener(String propertyName, PropertyChangeListener listener) {
if (listener == null) {
return;
}
if (changeSupport == null) {
changeSupport = new SwingPropertyChangeSupport(this);
}
changeSupport.addPropertyChangeListener(propertyName, listener);
}
/**
* Remove a PropertyChangeListener from the listener list. This removes a
* PropertyChangeListener that was registered for all properties.
*
* @param listener The PropertyChangeListener to be removed
*/
public void removePropertyChangeListener(PropertyChangeListener listener) {
if (changeSupport != null) {
changeSupport.removePropertyChangeListener(listener);
}
}
/**
* Remove a PropertyChangeListener for a specific property.
*
* @param propertyName The name of the property that was listened on.
* @param listener The PropertyChangeListener to be removed
*/
public void removePropertyChangeListener(String propertyName, PropertyChangeListener listener) {
if (listener == null) {
return;
}
if (changeSupport == null) {
return;
}
changeSupport.removePropertyChangeListener(propertyName, listener);
}
/**
* Return the propertyChangeParentMask that determines which property change
* events are forwared to this nodes parent so that its property change
* listeners will also be notified.
*/
public int getPropertyChangeParentMask() {
return propertyChangeParentMask;
}
/**
* Set the propertyChangeParentMask that determines which property change
* events are forwared to this nodes parent so that its property change
* listeners will also be notified.
*/
public void setPropertyChangeParentMask(int propertyChangeParentMask) {
this.propertyChangeParentMask = propertyChangeParentMask;
}
/**
* Report a bound property update to any registered listeners. No event is
* fired if old and new are equal and non-null. If the propertyCode exists
* in this node's propertyChangeParentMask then a property change event will
* also be fired on this nodes parent.
*
* @param propertyCode The code of the property changed.
* @param propertyName The programmatic name of the property that was
* changed.
* @param oldValue The old value of the property.
* @param newValue The new value of the property.
*/
protected void firePropertyChange(int propertyCode, String propertyName, Object oldValue, Object newValue) {
PropertyChangeEvent event = null;
if (changeSupport != null) {
event = new PropertyChangeEvent(this, propertyName, oldValue, newValue);
changeSupport.firePropertyChange(event);
}
if (parent != null && (propertyCode & propertyChangeParentMask) != 0) {
if (event == null)
event = new PropertyChangeEvent(this, propertyName, oldValue, newValue);
parent.fireChildPropertyChange(event, propertyCode);
}
}
/**
* Called by child node to forward property change events up the node tree
* so that property change listeners registered with this node will be
* notified of property changes of its children nodes. For performance
* reason only propertyCodes listed in the propertyChangeParentMask are
* forwarded.
*
* @param event The property change event containing source node and changed
* values.
* @param propertyCode The code of the property changed.
*/
protected void fireChildPropertyChange(PropertyChangeEvent event, int propertyCode) {
if (changeSupport != null) {
changeSupport.firePropertyChange(event);
}
if (parent != null && (propertyCode & propertyChangeParentMask) != 0) {
parent.fireChildPropertyChange(event, propertyCode);
}
}
// Bounds Geometry - Methods for setting and querying the bounds
// of this node.
// The bounds of a node store the node's position and size in
// the nodes local coordinate system. Many node subclasses will need
// to override the setBounds method so that they can update their
// internal state appropriately. See PPath for an example.
// Since the bounds are stored in the local coordinate system
// they WILL NOT change if the node is scaled, translated, or rotated.
// The bounds may be accessed with either getBounds, or
// getBoundsReference. The former returns a copy of the bounds
// the latter returns a reference to the nodes bounds that should
// normally not be modified. If a node is marked as volatile then
// it may modify its bounds before returning them from getBoundsReference,
// otherwise it may not.
/**
* Return a copy of this node's bounds. These bounds are stored in the local
* coordinate system of this node and do not include the bounds of any of
* this node's children.
*/
public PBounds getBounds() {
return (PBounds) getBoundsReference().clone();
}
/**
* Return a direct reference to this node's bounds. These bounds are stored
* in the local coordinate system of this node and do not include the bounds
* of any of this node's children. The value returned should not be
* modified.
*/
public PBounds getBoundsReference() {
return bounds;
}
/**
* Notify this node that you will begin to repeatedly call <code>setBounds
* </code>. When you are done call <code>endResizeBounds</code> to let the
* node know that you are done.
*/
public void startResizeBounds() {
}
/**
* Notify this node that you have finished a resize bounds sequence.
*/
public void endResizeBounds() {
}
public boolean setX(double x) {
return setBounds(x, getY(), getWidth(), getHeight());
}
public boolean setY(double y) {
return setBounds(getX(), y, getWidth(), getHeight());
}
public boolean setWidth(double width) {
return setBounds(getX(), getY(), width, getHeight());
}
public boolean setHeight(double height) {
return setBounds(getX(), getY(), getWidth(), height);
}
/**
* Set the bounds of this node to the given value. These bounds are stored
* in the local coordinate system of this node.
*
* @return true if the bounds changed.
*/
public boolean setBounds(Rectangle2D newBounds) {
return setBounds(newBounds.getX(), newBounds.getY(), newBounds.getWidth(), newBounds.getHeight());
}
/**
* Set the bounds of this node to the given value. These bounds are stored
* in the local coordinate system of this node.
*
* If the width or height is less then or equal to zero then the bound's
* emtpy bit will be set to true.
*
* Subclasses must call the super.setBounds() method.
*
* @return true if the bounds changed.
*/
public boolean setBounds(double x, double y, double width, double height) {
if (bounds.x != x || bounds.y != y || bounds.width != width || bounds.height != height) {
bounds.setRect(x, y, width, height);
if (width <= 0 || height <= 0) {
bounds.reset();
}
internalUpdateBounds(x, y, width, height);
invalidatePaint();
signalBoundsChanged();
return true;
}
// Don't put any invalidating code here or else nodes with volatile
// bounds will
// create a soft infinite loop (calling Swing.invokeLater()) when they
// validate
// their bounds.
return false;
}
/**
* Gives nodes a chance to update their internal structure before bounds
* changed notifications are sent. When this message is recived the nodes
* bounds field will contain the new value.
*
* See PPath for an example that uses this method.
*/
protected void internalUpdateBounds(double x, double y, double width, double height) {
}
/**
* Set the empty bit of this bounds to true.
*/
public void resetBounds() {
setBounds(0, 0, 0, 0);
}
/**
* Return the x position (in local coords) of this node's bounds.
*/
public double getX() {
return getBoundsReference().getX();
}
/**
* Return the y position (in local coords) of this node's bounds.
*/
public double getY() {
return getBoundsReference().getY();
}
/**
* Return the width (in local coords) of this node's bounds.
*/
public double getWidth() {
return getBoundsReference().getWidth();
}
/**
* Return the height (in local coords) of this node's bounds.
*/
public double getHeight() {
return getBoundsReference().getHeight();
}
/**
* Return a copy of the bounds of this node in the global coordinate system.
*
* @return the bounds in global coordinate system.
*/
public PBounds getGlobalBounds() {
return (PBounds) localToGlobal(getBounds());
}
/**
* Center the bounds of this node so that they are centered on the given
* point specified on the local coords of this node. Note that this meathod
* will modify the nodes bounds, while centerFullBoundsOnPoint will modify
* the nodes transform.
*
* @return true if the bounds changed.
*/
public boolean centerBoundsOnPoint(double localX, double localY) {
double dx = localX - bounds.getCenterX();
double dy = localY - bounds.getCenterY();
return setBounds(bounds.x + dx, bounds.y + dy, bounds.width, bounds.height);
}
/**
* Center the ffull bounds of this node so that they are centered on the
* given point specified on the local coords of this nodes parent. Note that
* this meathod will modify the nodes transform, while centerBoundsOnPoint
* will modify the nodes bounds.
*/
public void centerFullBoundsOnPoint(double parentX, double parentY) {
double dx = parentX - getFullBoundsReference().getCenterX();
double dy = parentY - getFullBoundsReference().getCenterY();
offset(dx, dy);
}
/**
* Return true if this node intersects the given rectangle specified in
* local bounds. If the geometry of this node is complex this method can
* become expensive, it is therefore recommended that
* <code>fullIntersects</code> is used for quick rejects before calling this
* method.
*
* @param localBounds the bounds to test for intersection against
* @return true if the given rectangle intersects this nodes geometry.
*/
public boolean intersects(Rectangle2D localBounds) {
if (localBounds == null)
return true;
return getBoundsReference().intersects(localBounds);
}
// Full Bounds - Methods for computing and querying the
// full bounds of this node.
// The full bounds of a node store the nodes bounds
// together with the union of the bounds of all the
// node's descendents. The full bounds are stored in the parent
// coordinate system of this node, the full bounds DOES change
// when you translate, scale, or rotate this node.
// The full bounds may be accessed with either getFullBounds, or
// getFullBoundsReference. The former returns a copy of the full bounds
// the latter returns a reference to the node's full bounds that should
// not be modified.
/**
* Return a copy of this node's full bounds. These bounds are stored in the
* parent coordinate system of this node and they include the union of this
* node's bounds and all the bounds of it's descendents.
*
* @return a copy of this node's full bounds.
*/
public PBounds getFullBounds() {
return (PBounds) getFullBoundsReference().clone();
}
/**
* Return a reference to this node's full bounds cache. These bounds are
* stored in the parent coordinate system of this node and they include the
* union of this node's bounds and all the bounds of it's descendents. The
* bounds returned by this method should not be modified.
*
* @return a reference to this node's full bounds cache.
*/
public PBounds getFullBoundsReference() {
validateFullBounds();
return fullBoundsCache;
}
/**
* Compute and return the full bounds of this node. If the dstBounds
* parameter is not null then it will be used to return the results instead
* of creating a new PBounds.
*
* @param dstBounds if not null the new bounds will be stored here
* @return the full bounds in the parent coordinate system of this node
*/
public PBounds computeFullBounds(PBounds dstBounds) {
PBounds result = getUnionOfChildrenBounds(dstBounds);
result.add(getBoundsReference());
localToParent(result);
return result;
}
/**
* Compute and return the union of the full bounds of all the children of
* this node. If the dstBounds parameter is not null then it will be used to
* return the results instead of creating a new PBounds.
*
* @param dstBounds if not null the new bounds will be stored here
*/
public PBounds getUnionOfChildrenBounds(PBounds dstBounds) {
if (dstBounds == null) {
dstBounds = new PBounds();
}
else {
dstBounds.resetToZero();
}
int count = getChildrenCount();
for (int i = 0; i < count; i++) {
PNode each = (PNode) children.get(i);
dstBounds.add(each.getFullBoundsReference());
}
return dstBounds;
}
/**
* Return a copy of the full bounds of this node in the global coordinate
* system.
*
* @return the full bounds in global coordinate system.
*/
public PBounds getGlobalFullBounds() {
PBounds b = getFullBounds();
if (parent != null) {
parent.localToGlobal(b);
}
return b;
}
/**
* Return true if the full bounds of this node intersects with the specified
* bounds.
*
* @param parentBounds the bounds to test for intersection against
* (specified in parent's coordinate system)
* @return true if this nodes full bounds intersect the given bounds.
*/
public boolean fullIntersects(Rectangle2D parentBounds) {
if (parentBounds == null)
return true;
return getFullBoundsReference().intersects(parentBounds);
}
// Bounds Damage Management - Methods used to invalidate and validate
// the bounds of nodes.
/**
* Return true if this nodes bounds may change at any time. The default
* behavior is to return false, subclasses that override this method to
* return true should also override getBoundsReference() and compute their
* volatile bounds there before returning the reference.
*
* @return true if this node has volatile bounds
*/
protected boolean getBoundsVolatile() {
return false;
}
/**
* Return true if this node has a child with volatile bounds.
*
* @return true if this node has a child with volatile bounds
*/
protected boolean getChildBoundsVolatile() {
return childBoundsVolatile;
}
/**
* Set if this node has a child with volatile bounds. This should normally
* be managed automatically by the bounds validation process.
*
* @param childBoundsVolatile true if this node has a descendent with
* volatile bounds
*/
protected void setChildBoundsVolatile(boolean childBoundsVolatile) {
this.childBoundsVolatile = childBoundsVolatile;
}
/**
* Return true if this node's bounds have recently changed. This flag will
* be reset on the next call of validateFullBounds.
*
* @return true if this node's bounds have changed.
*/
protected boolean getBoundsChanged() {
return boundsChanged;
}
/**
* Set the bounds chnaged flag. This flag will be reset on the next call of
* validateFullBounds.
*
* @param boundsChanged true if this nodes bounds have changed.
*/
protected void setBoundsChanged(boolean boundsChanged) {
this.boundsChanged = boundsChanged;
}
/**
* Return true if the full bounds of this node are invalid. This means that
* the full bounds of this node have changed and need to be recomputed.
*
* @return true if the full bounds of this node are invalid
*/
protected boolean getFullBoundsInvalid() {
return fullBoundsInvalid;
}
/**
* Set the full bounds invalid flag. This flag is set when the full bounds
* of this node need to be recomputed as is the case when this node is
* transformed or when one of this node's children changes geometry.
*/
protected void setFullBoundsInvalid(boolean fullBoundsInvalid) {
this.fullBoundsInvalid = fullBoundsInvalid;
}
/**
* Return true if one of this node's descendents has invalid bounds.
*/
protected boolean getChildBoundsInvalid() {
return childBoundsInvalid;
}
/**
* Set the flag indicating that one of this node's descendents has invalid
* bounds.
*/
protected void setChildBoundsInvalid(boolean childBoundsInvalid) {
this.childBoundsInvalid = childBoundsInvalid;
}
/**
* This method should be called when the bounds of this node are changed. It
* invalidates the full bounds of this node, and also notifies each of this
* nodes children that their parent's bounds have changed. As a result of
* this method getting called this nodes layoutChildren will be called.
*/
public void signalBoundsChanged() {
invalidateFullBounds();
setBoundsChanged(true);
firePropertyChange(PROPERTY_CODE_BOUNDS, PROPERTY_BOUNDS, null, bounds);
int count = getChildrenCount();
for (int i = 0; i < count; i++) {
PNode each = (PNode) children.get(i);
each.parentBoundsChanged();
}
}
/**
* Invalidate this node's layout, so that later layoutChildren will get
* called.
*/
public void invalidateLayout() {
invalidateFullBounds();
}
/**
* A notification that the bounds of this node's parent have changed.
*/
protected void parentBoundsChanged() {
}
/**
* Invalidates the full bounds of this node, and sets the child bounds
* invalid flag on each of this node's ancestors.
*/
public void invalidateFullBounds() {
setFullBoundsInvalid(true);
PNode n = parent;
while (n != null && !n.getChildBoundsInvalid()) {
n.setChildBoundsInvalid(true);
n = n.parent;
}
if (SCENE_GRAPH_DELEGATE != null)
SCENE_GRAPH_DELEGATE.nodeFullBoundsInvalidated(this);
}
/**
* This method is called to validate the bounds of this node and all of its
* descendents. It returns true if this nodes bounds or the bounds of any of
* its descendents are marked as volatile.
*
* @return true if this node or any of its descendents have volatile bounds
*/
protected boolean validateFullBounds() {
boolean boundsVolatile = getBoundsVolatile();
// 1. Only compute new bounds if invalid flags are set.
if (fullBoundsInvalid || childBoundsInvalid || boundsVolatile || childBoundsVolatile) {
// 2. If my bounds are volatile and they have not been changed then
// signal a change.
// For most cases this will do nothing, but if a nodes bounds depend
// on its model, then
// validate bounds has the responsibility of making the bounds match
// the models value.
// For example PPaths validateBounds method makes sure that the
// bounds are equal to the
// bounds of the GeneralPath model.
if (boundsVolatile && !boundsChanged) {
signalBoundsChanged();
}
// 3. If the bounds of on of my decendents are invalidate then
// validate the bounds of all of my children.
if (childBoundsInvalid || childBoundsVolatile) {
childBoundsVolatile = false;
int count = getChildrenCount();
for (int i = 0; i < count; i++) {
PNode each = (PNode) children.get(i);
childBoundsVolatile |= each.validateFullBounds();
}
}
// 4. Now that my children's bounds are valid and my own bounds are
// valid run any layout algorithm here. Note that if you try to
// layout volatile
// children piccolo will most likely start a "soft" infinite loop.
// It won't freeze
// your program, but it will make an infinite number of calls to
// SwingUtilities
// invoke later. You don't want to do that.
layoutChildren();
// 5. If the full bounds cache is invalid then recompute the full
// bounds cache here after our own bounds and the children's bounds
// have been computed above.
if (fullBoundsInvalid) {
double oldX = fullBoundsCache.x;
double oldY = fullBoundsCache.y;
double oldWidth = fullBoundsCache.width;
double oldHeight = fullBoundsCache.height;
boolean oldEmpty = fullBoundsCache.isEmpty();
// 6. This will call getFullBoundsReference on all of the
// children. So if the above
// layoutChildren method changed the bounds of any of the
// children they will be
// validated again here.
fullBoundsCache = computeFullBounds(fullBoundsCache);
boolean fullBoundsChanged = fullBoundsCache.x != oldX || fullBoundsCache.y != oldY
|| fullBoundsCache.width != oldWidth || fullBoundsCache.height != oldHeight
|| fullBoundsCache.isEmpty() != oldEmpty;
// 7. If the new full bounds cache differs from the previous
// cache then
// tell our parent to invalidate their full bounds. This is how
// bounds changes
// deep in the tree percolate up.
if (fullBoundsChanged) {
if (parent != null)
parent.invalidateFullBounds();
firePropertyChange(PROPERTY_CODE_FULL_BOUNDS, PROPERTY_FULL_BOUNDS, null, fullBoundsCache);
// 8. If our paint was invalid make sure to repaint our old
// full bounds. The
// new bounds will be computed later in the validatePaint
// pass.
if (paintInvalid && !oldEmpty) {
TEMP_REPAINT_BOUNDS.setRect(oldX, oldY, oldWidth, oldHeight);
repaintFrom(TEMP_REPAINT_BOUNDS, this);
}
}
}
// 9. Clear the invalid bounds flags.
boundsChanged = false;
fullBoundsInvalid = false;
childBoundsInvalid = false;
}
return boundsVolatile || childBoundsVolatile;
}
/**
* Nodes that apply layout constraints to their children should override
* this method and do the layout there.
*/
protected void layoutChildren() {
}
// Node Transform - Methods to manipulate the node's transform.
// Each node has a transform that is used to define the nodes
// local coordinate system. IE it is applied before picking and
// rendering the node.
// The usual way to move nodes about on the canvas is to manipulate
// this transform, as opposed to changing the bounds of the
// node.
// Since this transform defines the local coordinate system of this
// node the following methods with affect the global position both
// this node and all of its descendents.
/**
* Returns the rotation applied by this node's transform in radians. This
* rotation affects this node and all its descendents. The value returned
* will be between 0 and 2pi radians.
*
* @return rotation in radians.
*/
public double getRotation() {
if (transform == null)
return 0;
return transform.getRotation();
}
/**
* Sets the rotation of this nodes transform in radians. This will affect
* this node and all its descendents.
*
* @param theta rotation in radians
*/
public void setRotation(double theta) {
rotate(theta - getRotation());
}
/**
* Rotates this node by theta (in radians) about the 0,0 point. This will
* affect this node and all its descendents.
*
* @param theta the amount to rotate by in radians
*/
public void rotate(double theta) {
rotateAboutPoint(theta, 0, 0);
}
/**
* Rotates this node by theta (in radians), and then translates the node so
* that the x, y position of its fullBounds stays constant.
*
* @param theta the amount to rotate by in radians
*/
public void rotateInPlace(double theta) {
PBounds b = getFullBoundsReference();
double px = b.x;
double py = b.y;
rotateAboutPoint(theta, 0, 0);
b = getFullBoundsReference();
offset(px - b.x, py - b.y);
}
/**
* Rotates this node by theta (in radians) about the given point. This will
* affect this node and all its descendents.
*
* @param theta the amount to rotate by in radians
*/
public void rotateAboutPoint(double theta, Point2D point) {
rotateAboutPoint(theta, point.getX(), point.getY());
}
/**
* Rotates this node by theta (in radians) about the given point. This will
* affect this node and all its descendents.
*
* @param theta the amount to rotate by in radians
*/
public void rotateAboutPoint(double theta, double x, double y) {
getTransformReference(true).rotate(theta, x, y);
invalidatePaint();
invalidateFullBounds();
firePropertyChange(PROPERTY_CODE_TRANSFORM, PROPERTY_TRANSFORM, null, transform);
}
/**
* Return the total amount of rotation applied to this node by its own
* transform together with the transforms of all its ancestors. The value
* returned will be between 0 and 2pi radians.
*
* @return the total amount of rotation applied to this node in radians
*/
public double getGlobalRotation() {
return getLocalToGlobalTransform(null).getRotation();
}
/**
* Set the global rotation (in radians) of this node. This is implemented by
* rotating this nodes transform the required amount so that the nodes
* global rotation is as requested.
*
* @param theta the amount to rotate by in radians relative to the global
* coord system.
*/
public void setGlobalRotation(double theta) {
if (parent != null) {
setRotation(theta - parent.getGlobalRotation());
}
else {
setRotation(theta);
}
}
/**
* Return the scale applied by this node's transform. The scale is effecting
* this node and all its descendents.
*
* @return scale applied by this nodes transform.
*/
public double getScale() {
if (transform == null)
return 1;
return transform.getScale();
}
/**
* Set the scale of this node's transform. The scale will affect this node
* and all its descendents.
*
* @param scale the scale to set the transform to
*/
public void setScale(double scale) {
if (scale == 0)
throw new RuntimeException("Can't set scale to 0");
scale(scale / getScale());
}
/**
* Scale this nodes transform by the given amount. This will affect this
* node and all of its descendents.
*
* @param scale the amount to scale by
*/
public void scale(double scale) {
scaleAboutPoint(scale, 0, 0);
}
/**
* Scale this nodes transform by the given amount about the specified point.
* This will affect this node and all of its descendents.
*
* @param scale the amount to scale by
* @param point the point to scale about
*/
public void scaleAboutPoint(double scale, Point2D point) {
scaleAboutPoint(scale, point.getX(), point.getY());
}
/**
* Scale this nodes transform by the given amount about the specified point.
* This will affect this node and all of its descendents.
*
* @param scale the amount to scale by
*/
public void scaleAboutPoint(double scale, double x, double y) {
getTransformReference(true).scaleAboutPoint(scale, x, y);
invalidatePaint();
invalidateFullBounds();
firePropertyChange(PROPERTY_CODE_TRANSFORM, PROPERTY_TRANSFORM, null, transform);
}
/**
* Return the global scale that is being applied to this node by its
* transform together with the transforms of all its ancestors.
*/
public double getGlobalScale() {
return getLocalToGlobalTransform(null).getScale();
}
/**
* Set the global scale of this node. This is implemented by scaling this
* nodes transform the required amount so that the nodes global scale is as
* requested.
*
* @param scale the desired global scale
*/
public void setGlobalScale(double scale) {
if (parent != null) {
setScale(scale / parent.getGlobalScale());
}
else {
setScale(scale);
}
}
public double getXOffset() {
if (transform == null)
return 0;
return transform.getTranslateX();
}
public double getYOffset() {
if (transform == null)
return 0;
return transform.getTranslateY();
}
/**
* Return the offset that is being applied to this node by its transform.
* This offset effects this node and all of its descendents and is specified
* in the parent coordinate system. This returns the values that are in the
* m02 and m12 positions in the affine transform.
*
* @return a point representing the x and y offset
*/
public Point2D getOffset() {
if (transform == null)
return new Point2D.Double();
return new Point2D.Double(transform.getTranslateX(), transform.getTranslateY());
}
/**
* Set the offset that is being applied to this node by its transform. This
* offset effects this node and all of its descendents and is specified in
* the nodes parent coordinate system. This directly sets the values of the
* m02 and m12 positions in the affine transform. Unlike "PNode.translate()"
* it is not effected by the transforms scale.
*
* @param point a point representing the x and y offset
*/
public void setOffset(Point2D point) {
setOffset(point.getX(), point.getY());
}
/**
* Set the offset that is being applied to this node by its transform. This
* offset effects this node and all of its descendents and is specified in
* the nodes parent coordinate system. This directly sets the values of the
* m02 and m12 positions in the affine transform. Unlike "PNode.translate()"
* it is not effected by the transforms scale.
*
* @param x amount of x offset
* @param y amount of y offset
*/
public void setOffset(double x, double y) {
getTransformReference(true).setOffset(x, y);
invalidatePaint();
invalidateFullBounds();
firePropertyChange(PROPERTY_CODE_TRANSFORM, PROPERTY_TRANSFORM, null, transform);
}
/**
* Offset this node relative to the parents coordinate system, and is NOT
* effected by this nodes current scale or rotation. This is implemented by
* directly adding dx to the m02 position and dy to the m12 position in the
* affine transform.
*/
public void offset(double dx, double dy) {
getTransformReference(true);
setOffset(transform.getTranslateX() + dx, transform.getTranslateY() + dy);
}
/**
* Translate this node's transform by the given amount, using the standard
* affine transform translate method. This translation effects this node and
* all of its descendents.
*/
public void translate(double dx, double dy) {
getTransformReference(true).translate(dx, dy);
invalidatePaint();
invalidateFullBounds();
firePropertyChange(PROPERTY_CODE_TRANSFORM, PROPERTY_TRANSFORM, null, transform);
}
/**
* Return the global translation that is being applied to this node by its
* transform together with the transforms of all its ancestors.
*/
public Point2D getGlobalTranslation() {
Point2D p = getOffset();
if (parent != null) {
parent.localToGlobal(p);
}
return p;
}
/**
* Set the global translation of this node. This is implemented by
* translating this nodes transform the required amount so that the nodes
* global scale is as requested.
*
* @param globalPoint the desired global translation
*/
public void setGlobalTranslation(Point2D globalPoint) {
if (parent != null) {
parent.getGlobalToLocalTransform(null).transform(globalPoint, globalPoint);
}
setOffset(globalPoint);
}
/**
* Transform this nodes transform by the given transform.
*
* @param aTransform the transform to apply.
*/
public void transformBy(AffineTransform aTransform) {
getTransformReference(true).concatenate(aTransform);
invalidatePaint();
invalidateFullBounds();
firePropertyChange(PROPERTY_CODE_TRANSFORM, PROPERTY_TRANSFORM, null, transform);
}
/**
* Linearly interpolates between a and b, based on t. Specifically, it
* computes lerp(a, b, t) = a + t*(b - a). This produces a result that
* changes from a (when t = 0) to b (when t = 1).
*
* @param a from point
* @param b to Point
* @param t variable 'time' parameter
*/
static public double lerp(double t, double a, double b) {
return (a + t * (b - a));
}
/**
* This will calculate the necessary transform in order to make this node
* appear at a particular position relative to the specified bounding box.
* The source point specifies a point in the unit square (0, 0) - (1, 1)
* that represents an anchor point on the corresponding node to this
* transform. The destination point specifies an anchor point on the
* reference node. The position method then computes the transform that
* results in transforming this node so that the source anchor point
* coincides with the reference anchor point. This can be useful for layout
* algorithms as it is straightforward to position one object relative to
* another.
* <p>
* For example, If you have two nodes, A and B, and you call
*
* <PRE>
* Point2D srcPt = new Point2D.Double(1.0, 0.0);
* Point2D destPt = new Point2D.Double(0.0, 0.0);
* A.position(srcPt, destPt, B.getGlobalBounds(), 750, null);
* </PRE>
*
* The result is that A will move so that its upper-right corner is at the
* same place as the upper-left corner of B, and the transition will be
* smoothly animated over a period of 750 milliseconds.
*
* @param srcPt The anchor point on this transform's node (normalized to a
* unit square)
* @param destPt The anchor point on destination bounds (normalized to a
* unit square)
* @param destBounds The bounds (in global coordinates) used to calculate
* this transform's node
* @param millis Number of milliseconds over which to perform the animation
*/
public PActivity animateToRelativePosition(Point2D srcPt, Point2D destPt, Rectangle2D destBounds, int millis) {
double srcx, srcy;
double destx, desty;
double dx, dy;
Point2D pt1, pt2;
if (parent == null) {
return null;
} else {
// First compute translation amount in global coordinates
Rectangle2D srcBounds = getGlobalFullBounds();
srcx = lerp(srcPt.getX(), srcBounds.getX(), srcBounds.getX() + srcBounds.getWidth());
srcy = lerp(srcPt.getY(), srcBounds.getY(), srcBounds.getY() + srcBounds.getHeight());
destx = lerp(destPt.getX(), destBounds.getX(), destBounds.getX() + destBounds.getWidth());
desty = lerp(destPt.getY(), destBounds.getY(), destBounds.getY() + destBounds.getHeight());
// Convert vector to local coordinates
pt1 = new Point2D.Double(srcx, srcy);
globalToLocal(pt1);
pt2 = new Point2D.Double(destx, desty);
globalToLocal(pt2);
dx = (pt2.getX() - pt1.getX());
dy = (pt2.getY() - pt1.getY());
// Finally, animate change
PAffineTransform at = new PAffineTransform(getTransformReference(true));
at.translate(dx, dy);
return animateToTransform(at, millis);
}
}
/**
* @deprecated in favor of animateToRelativePosition
*
* It will calculate the necessary transform in order to make this node
* appear at a particular position relative to the specified bounding box.
* The source point specifies a point in the unit square (0, 0) - (1, 1)
* that represents an anchor point on the corresponding node to this
* transform. The destination point specifies an anchor point on the
* reference node. The position method then computes the transform that
* results in transforming this node so that the source anchor point
* coincides with the reference anchor point. This can be useful for layout
* algorithms as it is straightforward to position one object relative to
* another.
* <p>
* For example, If you have two nodes, A and B, and you call
*
* <PRE>
* Point2D srcPt = new Point2D.Double(1.0, 0.0);
* Point2D destPt = new Point2D.Double(0.0, 0.0);
* A.position(srcPt, destPt, B.getGlobalBounds(), 750, null);
* </PRE>
*
* The result is that A will move so that its upper-right corner is at the
* same place as the upper-left corner of B, and the transition will be
* smoothly animated over a period of 750 milliseconds.
*
* @param srcPt The anchor point on this transform's node (normalized to a
* unit square)
* @param destPt The anchor point on destination bounds (normalized to a
* unit square)
* @param destBounds The bounds (in global coordinates) used to calculate
* this transform's node
* @param millis Number of milliseconds over which to perform the animation
*/
public void position(Point2D srcPt, Point2D destPt, Rectangle2D destBounds, int millis) {
animateToRelativePosition(srcPt, destPt, destBounds, millis);
};
/**
* Return a copy of the transform associated with this node.
*
* @return copy of this node's transform
*/
public PAffineTransform getTransform() {
if (transform == null) {
return new PAffineTransform();
}
else {
return (PAffineTransform) transform.clone();
}
}
/**
* Return a reference to the transform associated with this node. This
* returned transform should not be modified. PNode transforms are created
* lazily when needed. If you access the transform reference before the
* transform has been created it may return null. The
* createNewTransformIfNull parameter is used to specify that the PNode
* should create a new transform (and assign that transform to the nodes
* local transform variable) instead of returning null.
*
* @return reference to this node's transform
*/
public PAffineTransform getTransformReference(boolean createNewTransformIfNull) {
if (transform == null && createNewTransformIfNull) {
transform = new PAffineTransform();
}
return transform;
}
/**
* Return an inverted copy of the transform associated with this node.
*
* @return inverted copy of this node's transform
*/
public PAffineTransform getInverseTransform() {
if (transform == null) {
return new PAffineTransform();
}
else {
try {
return new PAffineTransform(transform.createInverse());
}
catch (NoninvertibleTransformException e) {
e.printStackTrace();
}
return null;
}
}
/**
* Set the transform applied to this node.
*
* @param newTransform the new transform value
*/
public void setTransform(AffineTransform newTransform) {
if (newTransform == null) {
transform = null;
}
else {
getTransformReference(true).setTransform(newTransform);
}
invalidatePaint();
invalidateFullBounds();
firePropertyChange(PROPERTY_CODE_TRANSFORM, PROPERTY_TRANSFORM, null, transform);
}
// Paint Damage Management - Methods used to invalidate the areas of
// the screen that this node appears in so that they will later get
// painted.
// Generally you will not need to call these invalidate methods
// when starting out with Piccolo because methods such as setPaint
// already automatically call them for you. You will need to call
// them when you start creating your own nodes.
// When you do create you own nodes the only method that you will
// normally need to call is invalidatePaint. This method marks the
// nodes as having invalid paint, the root node's UI cycle will then
// later discover this damage and report it to the Java repaint manager.
// Repainting is normally done with PNode.invalidatePaint() instead of
// directly calling PNode.repaint() because PNode.repaint() requires
// the nodes bounds to be computed right away. But with invalidatePaint
// the bounds computation can be delayed until the end of the root's UI
// cycle, and this can add up to a bit savings when modifying a
// large number of nodes all at once.
// The other methods here will rarely be called except internally
// from the framework.
/**
* Return true if this nodes paint is invalid, in which case the node needs
* to be repainted.
*
* @return true if this node needs to be repainted
*/
public boolean getPaintInvalid() {
return paintInvalid;
}
/**
* Mark this node as having invalid paint. If this is set the node will
* later be repainted. Node this method is most often used internally.
*
* @param paintInvalid true if this node should be repainted
*/
public void setPaintInvalid(boolean paintInvalid) {
this.paintInvalid = paintInvalid;
}
/**
* Return true if this node has a child with invalid paint.
*
* @return true if this node has a child with invalid paint
*/
public boolean getChildPaintInvalid() {
return childPaintInvalid;
}
/**
* Mark this node as having a child with invalid paint.
*
* @param childPaintInvalid true if this node has a child with invalid paint
*/
public void setChildPaintInvalid(boolean childPaintInvalid) {
this.childPaintInvalid = childPaintInvalid;
}
/**
* Invalidate this node's paint, and mark all of its ancestors as having a
* node with invalid paint.
*/
public void invalidatePaint() {
setPaintInvalid(true);
PNode n = parent;
while (n != null && !n.getChildPaintInvalid()) {
n.setChildPaintInvalid(true);
n = n.parent;
}
if (SCENE_GRAPH_DELEGATE != null)
SCENE_GRAPH_DELEGATE.nodePaintInvalidated(this);
}
/**
* Repaint this node and any of its descendents if they have invalid paint.
*/
public void validateFullPaint() {
if (getPaintInvalid()) {
repaint();
setPaintInvalid(false);
}
if (getChildPaintInvalid()) {
int count = getChildrenCount();
for (int i = 0; i < count; i++) {
PNode each = (PNode) children.get(i);
each.validateFullPaint();
}
setChildPaintInvalid(false);
}
}
/**
* Mark the area on the screen represented by this nodes full bounds as
* needing a repaint.
*/
public void repaint() {
TEMP_REPAINT_BOUNDS.setRect(getFullBoundsReference());
repaintFrom(TEMP_REPAINT_BOUNDS, this);
}
/**
* Pass the given repaint request up the tree, so that any cameras can
* invalidate that region on their associated canvas.
*
* @param localBounds the bounds to repaint
* @param childOrThis if childOrThis does not equal this then this nodes
* transform will be applied to the localBounds param
*/
public void repaintFrom(PBounds localBounds, PNode childOrThis) {
if (parent != null) {
if (childOrThis != this) {
localToParent(localBounds);
}
else if (!getVisible()) {
return;
}
parent.repaintFrom(localBounds, this);
}
}
// Occluding - Methods to support occluding optimization. Not yet
// complete.
public boolean isOpaque(Rectangle2D boundary) {
return false;
}
public boolean getOccluded() {
return occluded;
}
public void setOccluded(boolean isOccluded) {
occluded = isOccluded;
}
// Painting - Methods for painting this node and its children
// Painting is how a node defines its visual representation on the
// screen, and is done in the local coordinate system of the node.
// The default painting behavior is to first paint the node, and
// then paint the node's children on top of the node. If a node
// needs wants specialized painting behavior it can override:
// paint() - Painting here will happen before the children
// are painted, so the children will be painted on top of painting done
// here.
// paintAfterChildren() - Painting here will happen after the children
// are painted, so it will paint on top of them.
// Note that you should not normally need to override fullPaint().
// The visible flag can be used to make a node invisible so that
// it will never get painted.
/**
* Return true if this node is visible, that is if it will paint itself and
* descendents.
*
* @return true if this node and its descendents are visible.
*/
public boolean getVisible() {
return visible;
}
/**
* Set the visibility of this node and its descendents.
*
* @param isVisible true if this node and its descendents are visible
*/
public void setVisible(boolean isVisible) {
if (getVisible() != isVisible) {
if (!isVisible)
repaint();
visible = isVisible;
firePropertyChange(PROPERTY_CODE_VISIBLE, PROPERTY_VISIBLE, null, null);
invalidatePaint();
}
}
/**
* Return the paint used to paint this node. This value may be null.
*/
public Paint getPaint() {
return paint;
}
/**
* Set the paint used to paint this node. This value may be set to null.
*/
public void setPaint(Paint newPaint) {
if (paint == newPaint)
return;
Paint old = paint;
paint = newPaint;
invalidatePaint();
firePropertyChange(PROPERTY_CODE_PAINT, PROPERTY_PAINT, old, paint);
}
/**
* Return the transparency used when painting this node. Note that this
* transparency is also applied to all of the node's descendents.
*/
public float getTransparency() {
return transparency;
}
/**
* Set the transparency used to paint this node. Note that this transparency
* applies to this node and all of its descendents.
*/
public void setTransparency(float zeroToOne) {
if (transparency == zeroToOne)
return;
transparency = zeroToOne;
invalidatePaint();
firePropertyChange(PROPERTY_CODE_TRANSPARENCY, PROPERTY_TRANSPARENCY, null, null);
}
/**
* Paint this node behind any of its children nodes. Subclasses that define
* a different appearance should override this method and paint themselves
* there.
*
* @param paintContext the paint context to use for painting the node
*/
protected void paint(PPaintContext paintContext) {
if (paint != null) {
Graphics2D g2 = paintContext.getGraphics();
g2.setPaint(paint);
g2.fill(getBoundsReference());
}
}
/**
* Paint this node and all of its descendents. Most subclasses do not need
* to override this method, they should override <code>paint</code> or
* <code>paintAfterChildren</code> instead.
*
* @param paintContext the paint context to use for painting this node and
* its children
*/
public void fullPaint(PPaintContext paintContext) {
if (getVisible() && fullIntersects(paintContext.getLocalClip())) {
paintContext.pushTransform(transform);
paintContext.pushTransparency(transparency);
if (!getOccluded())
paint(paintContext);
int count = getChildrenCount();
for (int i = 0; i < count; i++) {
PNode each = (PNode) children.get(i);
each.fullPaint(paintContext);
}
paintAfterChildren(paintContext);
paintContext.popTransparency(transparency);
paintContext.popTransform(transform);
}
}
/**
* Subclasses that wish to do additional painting after their children are
* painted should override this method and do that painting here.
*
* @param paintContext the paint context to sue for painting after the
* children are painted
*/
protected void paintAfterChildren(PPaintContext paintContext) {
}
/**
* Return a new Image representing this node and all of its children. The
* image size will be equal to the size of this nodes full bounds.
*
* @return a new image representing this node and its descendents
*/
public Image toImage() {
PBounds b = getFullBoundsReference();
return toImage((int) Math.ceil(b.getWidth()), (int) Math.ceil(b.getHeight()), null);
}
/**
* Return a new Image of the requested size representing this node and all
* of its children. If backGroundPaint is null the resulting image will have
* transparent regions, otherwise those regions will be filled with the
* backgroundPaint.
*
* @param width pixel width of the resulting image
* @param height pixel height of the resulting image
* @return a new image representing this node and its descendents
*/
public Image toImage(int width, int height, Paint backGroundPaint) {
PBounds imageBounds = getFullBounds();
imageBounds.expandNearestIntegerDimensions();
if (width / imageBounds.width < height / imageBounds.height) {
double scale = width / imageBounds.width;
height = (int) (imageBounds.height * scale);
}
else {
double scale = height / imageBounds.height;
width = (int) (imageBounds.width * scale);
}
GraphicsConfiguration graphicsConfiguration = GraphicsEnvironment.getLocalGraphicsEnvironment()
.getDefaultScreenDevice().getDefaultConfiguration();
BufferedImage result = graphicsConfiguration.createCompatibleImage(width, height, Transparency.TRANSLUCENT);
return toImage(result, backGroundPaint);
}
/**
* Paint a representation of this node into the specified buffered image. If
* background, paint is null, then the image will not be filled with a color
* prior to rendering
*
* @return a rendering of this image and its descendents into the specified
* image
*/
public Image toImage(BufferedImage image, Paint backGroundPaint) {
int width = image.getWidth();
int height = image.getHeight();
Graphics2D g2 = image.createGraphics();
if (backGroundPaint != null) {
g2.setPaint(backGroundPaint);
g2.fillRect(0, 0, width, height);
}
// reuse print method
Paper paper = new Paper();
paper.setSize(width, height);
paper.setImageableArea(0, 0, width, height);
PageFormat pageFormat = new PageFormat();
pageFormat.setPaper(paper);
print(g2, pageFormat, 0);
return image;
}
/**
* Constructs a new PrinterJob, allows the user to select which printer to
* print to, And then prints the node.
*/
public void print() {
PrinterJob printJob = PrinterJob.getPrinterJob();
PageFormat pageFormat = printJob.defaultPage();
Book book = new Book();
book.append(this, pageFormat);
printJob.setPageable(book);
if (printJob.printDialog()) {
try {
printJob.print();
}
catch (Exception e) {
System.out.println("Error Printing");
e.printStackTrace();
}
}
}
/**
* Prints the node into the given Graphics context using the specified
* format. The zero based index of the requested page is specified by
* pageIndex. If the requested page does not exist then this method returns
* NO_SUCH_PAGE; otherwise PAGE_EXISTS is returned. If the printable object
* aborts the print job then it throws a PrinterException.
*
* @param graphics the context into which the node is drawn
* @param pageFormat the size and orientation of the page
* @param pageIndex the zero based index of the page to be drawn
*/
public int print(Graphics graphics, PageFormat pageFormat, int pageIndex) {
if (pageIndex != 0) {
return NO_SUCH_PAGE;
}
Graphics2D g2 = (Graphics2D) graphics;
PBounds imageBounds = getFullBounds();
imageBounds.expandNearestIntegerDimensions();
g2.setClip(0, 0, (int) pageFormat.getWidth(), (int) pageFormat.getHeight());
g2.translate(pageFormat.getImageableX(), pageFormat.getImageableY());
// scale the graphics so node's full bounds fit in the imageable bounds.
double scale = pageFormat.getImageableWidth() / imageBounds.getWidth();
if (pageFormat.getImageableHeight() / imageBounds.getHeight() < scale) {
scale = pageFormat.getImageableHeight() / imageBounds.getHeight();
}
g2.scale(scale, scale);
g2.translate(-imageBounds.x, -imageBounds.y);
PPaintContext pc = new PPaintContext(g2);
pc.setRenderQuality(PPaintContext.HIGH_QUALITY_RENDERING);
fullPaint(pc);
return PAGE_EXISTS;
}
// Picking - Methods for picking this node and its children.
// Picking is used to determine the node that intersects a point or
// rectangle on the screen. It is most frequently used by the
// PInputManager to determine the node that the cursor is over.
// The intersects() method is used to determine if a node has
// been picked or not. The default implementation just test to see
// if the pick bounds intersects the bounds of the node. Subclasses
// whose geometry (a circle for example) does not match up exactly with
// the bounds should override the intersects() method.
// The default picking behavior is to first try to pick the nodes
// children, and then try to pick the nodes own bounds. If a node
// wants specialized picking behavior it can override:
// pick() - Pick nodes here that should be picked before the nodes
// children are picked.
// pickAfterChildren() - Pick nodes here that should be picked after the
// node's children are picked.
// Note that fullPick should not normally be overridden.
// The pickable and childrenPickable flags can be used to make a
// node or it children not pickable even if their geometry does
// intersect the pick bounds.
/**
* Return true if this node is pickable. Only pickable nodes can receive
* input events. Nodes are pickable by default.
*
* @return true if this node is pickable
*/
public boolean getPickable() {
return pickable;
}
/**
* Set the pickable flag for this node. Only pickable nodes can receive
* input events. Nodes are pickable by default.
*
* @param isPickable true if this node is pickable
*/
public void setPickable(boolean isPickable) {
if (getPickable() != isPickable) {
pickable = isPickable;
firePropertyChange(PROPERTY_CODE_PICKABLE, PROPERTY_PICKABLE, null, null);
}
}
/**
* Return true if the children of this node should be picked. If this flag
* is false then this node will not try to pick its children. Children are
* pickable by default.
*
* @return true if this node tries to pick its children
*/
public boolean getChildrenPickable() {
return childrenPickable;
}
/**
* Set the children pickable flag. If this flag is false then this node will
* not try to pick its children. Children are pickable by default.
*
* @param areChildrenPickable true if this node tries to pick its children
*/
public void setChildrenPickable(boolean areChildrenPickable) {
if (getChildrenPickable() != areChildrenPickable) {
childrenPickable = areChildrenPickable;
firePropertyChange(PROPERTY_CODE_CHILDREN_PICKABLE, PROPERTY_CHILDREN_PICKABLE, null, null);
}
}
/**
* Try to pick this node before its children have had a chance to be picked.
* Nodes that paint on top of their children may want to override this
* method to if the pick path intersects that paint.
*
* @param pickPath the pick path used for the pick operation
* @return true if this node was picked
*/
protected boolean pick(PPickPath pickPath) {
return false;
}
/**
* Try to pick this node and all of its descendents. Most subclasses should
* not need to override this method. Instead they should override
* <code>pick</code> or <code>pickAfterChildren</code>.
*
* @param pickPath the pick path to add the node to if its picked
* @return true if this node or one of its descendents was picked.
*/
public boolean fullPick(PPickPath pickPath) {
if ((getPickable() || getChildrenPickable()) && fullIntersects(pickPath.getPickBounds())) {
pickPath.pushNode(this);
pickPath.pushTransform(transform);
boolean thisPickable = getPickable() && pickPath.acceptsNode(this);
if (thisPickable) {
if (pick(pickPath)) {
return true;
}
}
if (getChildrenPickable()) {
int count = getChildrenCount();
for (int i = count - 1; i >= 0; i
PNode each = (PNode) children.get(i);
if (each.fullPick(pickPath))
return true;
}
}
if (thisPickable) {
if (pickAfterChildren(pickPath)) {
return true;
}
}
pickPath.popTransform(transform);
pickPath.popNode(this);
}
return false;
}
public void findIntersectingNodes(Rectangle2D fullBounds, ArrayList results) {
if (fullIntersects(fullBounds)) {
Rectangle2D localBounds = parentToLocal((Rectangle2D) fullBounds.clone());
if (intersects(localBounds)) {
results.add(this);
}
int count = getChildrenCount();
for (int i = count - 1; i >= 0; i
PNode each = (PNode) children.get(i);
each.findIntersectingNodes(localBounds, results);
}
}
}
/**
* Try to pick this node after its children have had a chance to be picked.
* Most subclasses the define a different geometry will need to override
* this method.
*
* @param pickPath the pick path used for the pick operation
* @return true if this node was picked
*/
protected boolean pickAfterChildren(PPickPath pickPath) {
if (intersects(pickPath.getPickBounds())) {
return true;
}
return false;
}
// Structure - Methods for manipulating and traversing the
// parent child relationship
// Most of these methods won't need to be overridden by subclasses
// but you will use them frequently to build up your node structures.
/**
* Add a node to be a new child of this node. The new node is added to the
* end of the list of this node's children. If child was previously a child
* of another node, it is removed from that first.
*
* @param child the new child to add to this node
*/
public void addChild(PNode child) {
int insertIndex = getChildrenCount();
if (child.parent == this)
insertIndex
addChild(insertIndex, child);
}
/**
* Add a node to be a new child of this node at the specified index. If
* child was previously a child of another node, it is removed from that
* node first.
*
* @param child the new child to add to this node
*/
public void addChild(int index, PNode child) {
PNode oldParent = child.getParent();
if (oldParent != null) {
oldParent.removeChild(child);
}
child.setParent(this);
getChildrenReference().add(index, child);
child.invalidatePaint();
invalidateFullBounds();
firePropertyChange(PROPERTY_CODE_CHILDREN, PROPERTY_CHILDREN, null, children);
}
/**
* Add a collection of nodes to be children of this node. If these nodes
* already have parents they will first be removed from those parents.
*
* @param nodes a collection of nodes to be added to this node
*/
public void addChildren(Collection nodes) {
Iterator i = nodes.iterator();
while (i.hasNext()) {
PNode each = (PNode) i.next();
addChild(each);
}
}
/**
* Return true if this node is an ancestor of the parameter node.
*
* @param node a possible descendent node
* @return true if this node is an ancestor of the given node
*/
public boolean isAncestorOf(PNode node) {
PNode p = node.parent;
while (p != null) {
if (p == this)
return true;
p = p.parent;
}
return false;
}
/**
* Return true if this node is a descendent of the parameter node.
*
* @param node a possible ancestor node
* @return true if this nodes descends from the given node
*/
public boolean isDescendentOf(PNode node) {
PNode p = parent;
while (p != null) {
if (p == node)
return true;
p = p.parent;
}
return false;
}
/**
* Return true if this node descends from the root.
*/
public boolean isDescendentOfRoot() {
return getRoot() != null;
}
/**
* Change the order of this node in its parent's children list so that it
* will draw in back of all of its other sibling nodes.
*/
public void moveToBack() {
PNode p = parent;
if (p != null) {
p.removeChild(this);
p.addChild(0, this);
}
}
/**
* Change the order of this node in its parent's children list so that it
* will draw in front of all of its other sibling nodes.
*/
public void moveInBackOf(PNode sibling) {
PNode p = parent;
if (p != null && p == sibling.getParent()) {
p.removeChild(this);
int index = p.indexOfChild(sibling);
p.addChild(index, this);
}
}
/**
* Change the order of this node in its parent's children list so that it
* will draw after the given sibling node.
*/
public void moveToFront() {
PNode p = parent;
if (p != null) {
p.removeChild(this);
p.addChild(this);
}
}
/**
* Change the order of this node in its parent's children list so that it
* will draw before the given sibling node.
*/
public void moveInFrontOf(PNode sibling) {
PNode p = parent;
if (p != null && p == sibling.getParent()) {
p.removeChild(this);
int index = p.indexOfChild(sibling);
p.addChild(index + 1, this);
}
}
/**
* Return the parent of this node. This will be null if this node has not
* been added to a parent yet.
*
* @return this nodes parent or null
*/
public PNode getParent() {
return parent;
}
/**
* Set the parent of this node. Note this is set automatically when adding
* and removing children.
*/
public void setParent(PNode newParent) {
PNode old = parent;
parent = newParent;
firePropertyChange(PROPERTY_CODE_PARENT, PROPERTY_PARENT, old, parent);
}
/**
* Return the index where the given child is stored.
*/
public int indexOfChild(PNode child) {
if (children == null)
return -1;
return children.indexOf(child);
}
/**
* Remove the given child from this node's children list. Any subsequent
* children are shifted to the left (one is subtracted from their indices).
* The removed child's parent is set to null.
*
* @param child the child to remove
* @return the removed child
*/
public PNode removeChild(PNode child) {
int index = indexOfChild(child);
if (index == -1) {
return null;
}
return removeChild(index);
}
/**
* Remove the child at the specified position of this group node's children.
* Any subsequent children are shifted to the left (one is subtracted from
* their indices). The removed child's parent is set to null.
*
* @param index the index of the child to remove
* @return the removed child
*/
public PNode removeChild(int index) {
if (children == null) {
return null;
}
PNode child = (PNode) children.remove(index);
if (children.size() == 0) {
children = null;
}
child.repaint();
child.setParent(null);
invalidateFullBounds();
firePropertyChange(PROPERTY_CODE_CHILDREN, PROPERTY_CHILDREN, null, children);
return child;
}
/**
* Remove all the children in the given collection from this node's list of
* children. All removed nodes will have their parent set to null.
*
* @param childrenNodes the collection of children to remove
*/
public void removeChildren(Collection childrenNodes) {
Iterator i = childrenNodes.iterator();
while (i.hasNext()) {
PNode each = (PNode) i.next();
removeChild(each);
}
}
/**
* Remove all the children from this node. Node this method is more
* efficient then removing each child individually.
*/
public void removeAllChildren() {
if (children != null) {
int count = children.size();
for (int i = 0; i < count; i++) {
PNode each = (PNode) children.get(i);
each.setParent(null);
}
children = null;
invalidatePaint();
invalidateFullBounds();
firePropertyChange(PROPERTY_CODE_CHILDREN, PROPERTY_CHILDREN, null, children);
}
}
/**
* Delete this node by removing it from its parent's list of children.
*/
public void removeFromParent() {
if (parent != null) {
parent.removeChild(this);
}
}
/**
* Set the parent of this node, and transform the node in such a way that it
* doesn't move in global coordinates.
*
* @param newParent The new parent of this node.
*/
public void reparent(PNode newParent) {
AffineTransform originalTransform = getLocalToGlobalTransform(null);
AffineTransform newTransform = newParent.getGlobalToLocalTransform(null);
newTransform.concatenate(originalTransform);
removeFromParent();
setTransform(newTransform);
newParent.addChild(this);
computeFullBounds(fullBoundsCache);
}
/**
* Swaps this node out of the scene graph tree, and replaces it with the
* specified replacement node. This node is left dangling, and it is up to
* the caller to manage it. The replacement node will be added to this
* node's parent in the same position as this was. That is, if this was the
* 3rd child of its parent, then after calling replaceWith(), the
* replacement node will also be the 3rd child of its parent. If this node
* has no parent when replace is called, then nothing will be done at all.
*
* @param replacementNode the new node that replaces the current node in the
* scene graph tree.
*/
public void replaceWith(PNode replacementNode) {
if (parent != null) {
PNode p = this.parent;
int index = p.getChildrenReference().indexOf(this);
p.removeChild(this);
p.addChild(index, replacementNode);
}
}
/**
* Return the number of children that this node has.
*
* @return the number of children
*/
public int getChildrenCount() {
if (children == null) {
return 0;
}
return children.size();
}
/**
* Return the child node at the specified index.
*
* @param index a child index
* @return the child node at the specified index
*/
public PNode getChild(int index) {
return (PNode) children.get(index);
}
/**
* Return a reference to the list used to manage this node's children. This
* list should not be modified.
*
* @return reference to the children list
*/
public List getChildrenReference() {
if (children == null) {
children = new ArrayList();
}
return children;
}
/**
* Return an iterator over this node's direct descendent children.
*
* @return iterator over this nodes children
*/
public ListIterator getChildrenIterator() {
if (children == null) {
return Collections.EMPTY_LIST.listIterator();
}
return Collections.unmodifiableList(children).listIterator();
}
/**
* Return the root node (instance of PRoot). If this node does not descend
* from a PRoot then null will be returned.
*/
public PRoot getRoot() {
if (parent != null) {
return parent.getRoot();
}
return null;
}
/**
* Return a collection containing this node and all of its descendent nodes.
*
* @return a new collection containing this node and all descendents
*/
public Collection getAllNodes() {
return getAllNodes(null, null);
}
/**
* Return a collection containing the subset of this node and all of its
* descendent nodes that are accepted by the given node filter. If the
* filter is null then all nodes will be accepted. If the results parameter
* is not null then it will be used to collect this subset instead of
* creating a new collection.
*
* @param filter the filter used to determine the subset
* @return a collection containing this node and all descendents
*/
public Collection getAllNodes(PNodeFilter filter, Collection results) {
if (results == null)
results = new ArrayList();
if (filter == null || filter.accept(this))
results.add(this);
if (filter == null || filter.acceptChildrenOf(this)) {
int count = getChildrenCount();
for (int i = 0; i < count; i++) {
PNode each = (PNode) children.get(i);
each.getAllNodes(filter, results);
}
}
return results;
}
// Serialization - Nodes conditionally serialize their parent.
// This means that only the parents that were unconditionally
// (using writeObject) serialized by someone else will be restored
// when the node is unserialized.
/**
* Write this node and all of its descendent nodes to the given outputsteam.
* This stream must be an instance of PObjectOutputStream or serialization
* will fail. This nodes parent is written out conditionally, that is it
* will only be written out if someone else writes it out unconditionally.
*
* @param out the output stream to write to, must be an instance of
* PObjectOutputStream
*/
private void writeObject(ObjectOutputStream out) throws IOException {
out.defaultWriteObject();
((PObjectOutputStream) out).writeConditionalObject(parent);
}
/**
* Read this node and all of its descendents in from the given input stream.
*
* @param in the stream to read from
*/
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
parent = (PNode) in.readObject();
}
// Debugging - methods for debugging
/**
* Returns a string representation of this object for debugging purposes.
*/
public String toString() {
String result = super.toString().replaceAll(".*\\.", "");
return result + "[" + paramString() + "]";
}
/**
* Returns a string representing the state of this node. This method is
* intended to be used only for debugging purposes, and the content and
* format of the returned string may vary between implementations. The
* returned string may be empty but may not be <code>null</code>.
*
* @return a string representation of this node's state
*/
protected String paramString() {
StringBuffer result = new StringBuffer();
result.append("bounds=" + (bounds == null ? "null" : bounds.toString()));
result.append(",fullBounds=" + (fullBoundsCache == null ? "null" : fullBoundsCache.toString()));
result.append(",transform=" + (transform == null ? "null" : transform.toString()));
result.append(",paint=" + (paint == null ? "null" : paint.toString()));
result.append(",transparency=" + transparency);
result.append(",childrenCount=" + getChildrenCount());
if (fullBoundsInvalid) {
result.append(",fullBoundsInvalid");
}
if (pickable) {
result.append(",pickable");
}
if (childrenPickable) {
result.append(",childrenPickable");
}
if (visible) {
result.append(",visible");
}
return result.toString();
}
}
|
package hudson.maven;
import hudson.CopyOnWrite;
import hudson.FilePath;
import hudson.model.AbstractProject;
import hudson.model.DependencyGraph;
import hudson.model.Descriptor;
import hudson.model.Descriptor.FormException;
import hudson.model.Hudson;
import hudson.model.Item;
import hudson.model.ItemGroup;
import hudson.model.Job;
import hudson.util.DescribableList;
import org.apache.maven.project.MavenProject;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
* {@link Job} that builds projects based on Maven2.
*
* @author Kohsuke Kawaguchi
*/
public final class MavenModule extends AbstractProject<MavenModule,MavenBuild> implements DescribableList.Owner {
private DescribableList<MavenReporter,Descriptor<MavenReporter>> reporters =
new DescribableList<MavenReporter,Descriptor<MavenReporter>>(this);
/**
* Name taken from {@link MavenProject#getName()}.
*/
private String displayName;
private transient ModuleName moduleName;
/**
* Relative path to this module's root directory
* from {@link MavenModuleSet#getWorkspace()}
*/
private String relativePath;
/**
* List of modules that this module declares direct dependencies on.
*/
@CopyOnWrite
private Set<ModuleName> dependencies;
/*package*/ MavenModule(MavenModuleSet parent, PomInfo pom) {
super(parent, pom.name.toFileSystemName());
this.displayName = pom.displayName;
this.relativePath = pom.relativePath;
this.dependencies = pom.dependencies;
}
protected void doSetName(String name) {
moduleName = ModuleName.fromFileSystemName(name);
super.doSetName(moduleName.toString());
}
@Override
public void onLoad(ItemGroup<? extends Item> parent, String name) throws IOException {
super.onLoad(parent,name);
if(reporters==null)
reporters = new DescribableList<MavenReporter, Descriptor<MavenReporter>>(this);
reporters.setOwner(this);
if(dependencies==null)
dependencies = Collections.emptySet();
}
@Override
public FilePath getWorkspace() {
return getParent().getWorkspace().child(relativePath);
}
public ModuleName getModuleName() {
return moduleName;
}
@Override
public String getDisplayName() {
return displayName;
}
public MavenModuleSet getParent() {
return (MavenModuleSet)super.getParent();
}
@Override
public MavenBuild newBuild() throws IOException {
MavenBuild lastBuild = new MavenBuild(this);
builds.put(lastBuild);
return lastBuild;
}
@Override
protected MavenBuild loadBuild(File dir) throws IOException {
return new MavenBuild(this,dir);
}
@Override
public boolean isFingerprintConfigured() {
return true;
}
protected void buildDependencyGraph(DependencyGraph graph) {
Map<ModuleName,MavenModule> modules = new HashMap<ModuleName,MavenModule>();
for (MavenModule m : Hudson.getInstance().getAllItems(MavenModule.class))
modules.put(m.getModuleName(),m);
for (ModuleName d : dependencies) {
MavenModule src = modules.get(d);
if(src!=null)
graph.addDependency(src,this);
}
}
/**
* List of active {@link MavenReporter}s configured for this project.
*/
public DescribableList<MavenReporter, Descriptor<MavenReporter>> getReporters() {
return reporters;
}
public void doConfigSubmit(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
super.doConfigSubmit(req, rsp);
try {
reporters.rebuild(req,MavenReporters.LIST,"reporter");
} catch (FormException e) {
sendError(e,req,rsp);
}
save();
// dependency setting might have been changed by the user, so rebuild.
Hudson.getInstance().rebuildDependencyGraph();
}
}
|
package verification.platu.project;
import java.io.IOException;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import lpn.parser.LhpnFile;
import org.antlr.runtime.ANTLRFileStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.TokenStream;
import verification.platu.lpn.DualHashMap;
import verification.platu.lpn.io.Instance;
import verification.platu.lpn.io.PlatuGrammarLexer;
import verification.platu.lpn.io.PlatuInstLexer;
import verification.platu.main.Options;
import verification.platu.stategraph.*;
import verification.platu.TimingAnalysis.*;
import verification.platu.logicAnalysis.Analysis;
import verification.platu.logicAnalysis.CompositionalAnalysis;
import verification.platu.lpn.LPN;
import verification.platu.lpn.LPNTranRelation;
import verification.platu.lpn.io.PlatuGrammarParser;
import verification.platu.lpn.io.PlatuInstParser;
import verification.platu.stategraph.State;
import verification.platu.stategraph.StateGraph;
public class Project {
protected String label;
/* 1. Each design unit has an unique label index.
* 2. The indices of all design units are sequential starting from 0.
* */
protected List<StateGraph> designUnitSet;
protected LPNTranRelation lpnTranRelation = null;
protected CompositionalAnalysis analysis = null;
public Project() {
this.label = "";
this.designUnitSet = new ArrayList<StateGraph>(1);
lpnTranRelation = new LPNTranRelation(this.designUnitSet);
}
public Project(LhpnFile lpn) {
this.label = "";
this.designUnitSet = new ArrayList<StateGraph>(1);
StateGraph stateGraph = new StateGraph(lpn);
designUnitSet.add(stateGraph);
//stateGraph.printStates();
}
public Project(ArrayList<LhpnFile> lpns) {
this.label = "";
this.designUnitSet = new ArrayList<StateGraph>(lpns.size());
for (int i=0; i<lpns.size(); i++) {
LhpnFile lpn = lpns.get(i);
StateGraph stateGraph = new StateGraph(lpn);
designUnitSet.add(stateGraph);
}
}
/**
* Find the SG for the entire project where each project state is a tuple of
* local states
*
*/
public StateGraph[] search() {
validateInputs();
// if(Options.getSearchType().equals("compositional")){
// this.analysis = new CompositionalAnalysis();
// if(Options.getParallelFlag()){
// this.analysis.parallelCompositionalFindSG(this.designUnitSet);
// else{
// this.analysis.findReducedSG(this.designUnitSet);
// return;
long start = System.currentTimeMillis();
int lpnCnt = designUnitSet.size();
/* Prepare search by placing LPNs in an array in the order of their indices.*/
StateGraph[] sgArray = new StateGraph[lpnCnt];
int idx = 0;
for (StateGraph du : designUnitSet) {
LhpnFile lpn = du.getLpn();
lpn.setIndex(idx++);
sgArray[lpn.getIndex()] = du;
}
// Initialize the project state
HashMap<String, Integer> varValMap = new HashMap<String, Integer>();
State[] initStateArray = new State[lpnCnt];
for (int index = 0; index < lpnCnt; index++) {
LhpnFile curLpn = sgArray[index].getLpn();
StateGraph curSg = sgArray[index];
initStateArray[index] = curSg.getInitState(); //curLpn.getInitState();
int[] curStateVector = initStateArray[index].getVector();
varValMap = curLpn.getAllVarsWithValues(curStateVector);
// HashMap<String, String> vars = curLpn.getAllOutputs();//curLpn.getAllOutputs();
// DualHashMap<String, Integer> VarIndexMap = curLpn.getVarIndexMap();
// for(String var : vars.keySet()) {
// varValMap.put(var, curStateVector[VarIndexMap.getValue(var)]);
}
// TODO: (future) Need to adjust the transition vector as well?
// Adjust the value of the input variables in LPN in the initial state.
// Add the initial states into their respective LPN.
for (int index = 0; index < lpnCnt; index++) {
StateGraph curSg = sgArray[index];
initStateArray[index].update(curSg, varValMap, curSg.getLpn().getVarIndexMap());
initStateArray[index] = curSg.addState(initStateArray[index]);
}
// if (Options.getTimingAnalysisFlag()) {
// new TimingAnalysis(sgArray);
// return;
// else if(!Options.getTimingAnalysisFlag()) {
// Analysis tmp = new Analysis(sgArray, initStateArray, lpnTranRelation, Options.getSearchType());
// // Analysis tmp = new Analysis(lpnList, curStateArray,
// // lpnTranRelation, "dfs_por");
// //Analysis tmp = new Analysis(modArray, initStateArray, lpnTranRelation, "dfs");
// //Analysis tmp = new Analysis(modArray, initStateArray, lpnTranRelation, "dfs_noDisabling");
// else {
// return;
Analysis dfsStateExploration = new Analysis(sgArray);
StateGraph[] stateGraphArray = dfsStateExploration.search_dfs(sgArray, initStateArray);
long elapsedTimeMillis = System.currentTimeMillis() - start;
float elapsedTimeSec = elapsedTimeMillis/1000F;
System.out.println("---> total runtime: " + elapsedTimeSec + " sec\n");
return stateGraphArray;
}
public Set<LPN> readLpn(final String src_file) {
Set<LPN> lpnSet = null;
try {
if (!src_file.endsWith(".lpn")) {
System.err.println("Invalid file extention");
System.exit(1);
}
ANTLRFileStream input = new ANTLRFileStream(src_file);
PlatuGrammarLexer lexer = new PlatuGrammarLexer(input);
TokenStream tokenStream = new CommonTokenStream(lexer);
PlatuGrammarParser parser = new PlatuGrammarParser(tokenStream);
lpnSet = parser.lpn(this);
} catch (Exception ex) {
Logger.getLogger(Project.class.getName()).log(Level.SEVERE, null,
ex);
}
return lpnSet;
}
/**
* Find the SG for the entire project where each project state is a tuple of
* local states. The flag applyPOR gives the user an option to use partial order reduction during dfs search.
* @param outputDotFile
* @return
*
*/
public StateGraph searchWithPOR() {
validateInputs();
// if(Options.getSearchType().equals("compositional")){
// this.analysis = new CompositionalAnalysis();
// if(Options.getParallelFlag()){
// this.analysis.parallelCompositionalFindSG(this.designUnitSet);
// else{
// this.analysis.findReducedSG(this.designUnitSet);
// return;
long start = System.currentTimeMillis();
int lpnCnt = designUnitSet.size();
/* Prepare search by placing LPNs in an array in the order of their indices.*/
StateGraph[] sgArray = new StateGraph[lpnCnt];
int idx = 0;
for (StateGraph du : designUnitSet) {
LhpnFile lpn = du.getLpn();
lpn.setIndex(idx++);
sgArray[lpn.getIndex()] = du;
}
// Initialize the project state
HashMap<String, Integer> varValMap = new HashMap<String, Integer>();
State[] initStateArray = new State[lpnCnt];
for (int index = 0; index < lpnCnt; index++) {
LhpnFile curLpn = sgArray[index].getLpn();
StateGraph curSg = sgArray[index];
initStateArray[index] = curSg.getInitState(); //curLpn.getInitState();
int[] curStateVector = initStateArray[index].getVector();
varValMap = curLpn.getAllVarsWithValues(curStateVector);
// DualHashMap<String, Integer> VarIndexMap = curLpn.getVarIndexMap();
// HashMap<String, String> outVars = curLpn.getAllOutputs();
// for(String var : outVars.keySet()) {
// varValMap.put(var, curStateVector[VarIndexMap.getValue(var)]);
}
// TODO: (future) Need to adjust the transition vector as well?
// Adjust the value of the input variables in LPN in the initial state.
// Add the initial states into their respective LPN.
for (int index = 0; index < lpnCnt; index++) {
StateGraph curLpn = sgArray[index];
initStateArray[index].update(curLpn, varValMap, curLpn.getLpn().getVarIndexMap());
initStateArray[index] = curLpn.addState(initStateArray[index]);
}
StateGraph stateGraph;
Analysis dfsStateExplorationWithPOR = new Analysis(sgArray);
stateGraph = dfsStateExplorationWithPOR.search_dfsWithPOR(sgArray, initStateArray);
long elapsedTimeMillis = System.currentTimeMillis() - start;
float elapsedTimeSec = elapsedTimeMillis/1000F;
System.out.println("---> total runtime: " + elapsedTimeSec + " sec\n");
return stateGraph;
}
public void readLpn(List<String> fileList) {
for(String srcFile : fileList){
if (!srcFile.endsWith(".lpn")) {
System.err.println("Invalid file extention");
System.exit(1);
}
ANTLRFileStream input = null;
try {
input = new ANTLRFileStream(srcFile);
}
catch (IOException e) {
System.err.println("error: error reading " + srcFile);
System.exit(1);
}
PlatuInstLexer lexer = new PlatuInstLexer(input);
TokenStream tokenStream = new CommonTokenStream(lexer);
PlatuInstParser parser = new PlatuInstParser(tokenStream);
try {
parser.parseLpnFile(this);
}
catch (RecognitionException e) {
System.err.println("error: error parsing " + srcFile);
System.exit(1);
}
}
PlatuInstParser.includeSet.removeAll(fileList);
for(String srcFile : PlatuInstParser.includeSet){
if (!srcFile.endsWith(".lpn")) {
System.err.println("Invalid file extention");
System.exit(1);
}
ANTLRFileStream input = null;
try {
input = new ANTLRFileStream(srcFile);
}
catch (IOException e) {
System.err.println("error: error reading " + srcFile);
System.exit(1);
}
PlatuInstLexer lexer = new PlatuInstLexer(input);
TokenStream tokenStream = new CommonTokenStream(lexer);
PlatuInstParser parser = new PlatuInstParser(tokenStream);
try {
parser.parseLpnFile(this);
}
catch (RecognitionException e) {
System.err.println("error: error parsing " + srcFile);
System.exit(1);
}
}
verification.platu.lpn.LPN.nextID = 1;
HashMap<String, LPN> instanceMap = new HashMap<String, LPN>();
for(Instance inst : PlatuInstParser.InstanceList){
LPN lpn = PlatuInstParser.LpnMap.get(inst.getLpnLabel());
if(lpn == null){
System.err.println("error: class " + inst.getLpnLabel() + " does not exist");
System.exit(1);
}
LPN instLpn = lpn.instantiate(inst.getName());
instanceMap.put(instLpn.getLabel(), instLpn);
this.designUnitSet.add(instLpn.getStateGraph());
}
// TODO: (irrelevant) Is this really needed???
/*
for(StateGraph sg : this.designUnitSet){
sg.getLpn().setGlobals(this.designUnitSet);
}
*/
for(Instance inst : PlatuInstParser.InstanceList){
LPN dstLpn = instanceMap.get(inst.getName());
if(dstLpn == null){
System.err.println("error: instance " + inst.getName() + " does not exist");
System.exit(1);
}
List<String> argumentList = dstLpn.getArgumentList();
List<String> varList = inst.getVariableList();
List<String> modList = inst.getModuleList();
if(argumentList.size() != varList.size()){
System.err.println("error: incompatible number of arguments for instance " + inst.getName());
System.exit(1);
}
for(int i = 0; i < argumentList.size(); i++){
LPN srcLpn = instanceMap.get(modList.get(i));
if(srcLpn == null){
System.err.println("error: instance " + modList.get(i) + " does not exist");
System.exit(1);
}
String outputVar = varList.get(i);
String inputVar = argumentList.get(i);
srcLpn.connect(outputVar, dstLpn, inputVar);
}
}
}
/**
* @return the designUnitSet
*/
public List<StateGraph> getDesignUnitSet() {
return designUnitSet;
}
/**
* Validates each lpn's input variables are driven by another lpn's output.
*/
private void validateInputs(){
boolean error = false;
for(StateGraph sg : designUnitSet){
for(String input : sg.getLpn().getAllInputs().keySet()){
boolean connected = false;
for(StateGraph sg2 : designUnitSet){
if(sg == sg2) continue;
if(sg2.getLpn().getAllOutputs().keySet().contains(input)){
connected = true;
break;
}
}
if(!connected){
error = true;
System.err.println("error in lpn " + sg.getLpn().getLabel() + ": input variable '" + input + "' is not dependent on an output");
}
}
}
if(error){
System.exit(1);
}
}
}
|
package wge3.game.engine.ai.tasks;
import java.util.ArrayList;
import java.util.List;
import wge3.game.entity.creatures.Creature;
import static wge3.game.engine.utilities.pathfinding.PathFinder.findPath;
import wge3.game.entity.Tile;
import static wge3.game.engine.utilities.Math.angle;
public final class MoveTask extends AITask {
private Creature executor;
private TurnTask turnTask;
private List<Tile> path;
private int position;
public MoveTask(Creature executor, Tile dest) {
this.executor = executor;
if (dest.isAnOKMoveDestinationFor(executor)) {
path = new ArrayList<Tile>(1);
path.add(dest);
} else {
path = findPath(executor.getTile(), dest);
if (path == null) return;
}
position = 0;
float angle = angle(executor.getX(), executor.getY(), path.get(position).getMiddleX(), path.get(position).getMiddleY());
turnTask = new TurnTask(executor, angle);
}
@Override
public boolean isFinished() {
return path == null || (executor.getTile().equals(getDestination()) && executor.isInCenterOfATile());
}
@Override
public void execute() {
if (!turnTask.isFinished()) {
turnTask.execute();
return;
}
if (!executor.getTile().equals(path.get(position)) || !executor.isInCenterOfATile()) {
executor.goForward();
return;
}
//if (executor.getTile().equals(getDestination())) executor.goForward();
if (executor.getTile().equals(path.get(position)) && executor.isInCenterOfATile()) {
position++;
float angle = angle(executor.getX(), executor.getY(), path.get(position).getMiddleX(), path.get(position).getMiddleY());
turnTask = new TurnTask(executor, angle);
}
}
public Tile getDestination() {
return path == null ? null : path.get(path.size()-1);
}
public void setDestination(Tile dest) {
if (path != null) path.set(path.size()-1, dest);
}
}
|
package water.fvec;
import water.AutoBuffer;
import water.Futures;
import water.H2O;
import water.MemoryManager;
import water.nbhm.NonBlockingHashMap;
import water.parser.ValueString;
import water.util.IcedDouble;
import water.util.IcedInt;
import water.util.PrettyPrint;
import water.util.UnsafeUtils;
import java.util.*;
// An uncompressed chunk of data, supporting an append operation
public class NewChunk extends Chunk {
public final int _cidx;
// We can record the following (mixed) data types:
// 1- doubles, in _ds including NaN for NA & 0; _ls==_xs==null
// 2- scaled decimals from parsing, in _ls & _xs; _ds==null
// 3- zero: requires _ls==0 && _xs==0
// 4- NA: either _ls==0 && _xs==Integer.MIN_VALUE, OR _ds=NaN
// 5- Categorical: _xs==(Integer.MIN_VALUE+1) && _ds==null
// 6- Str: _ss holds appended string bytes (with trailing 0), _is[] holds offsets into _ss[]
// Chunk._len is the count of elements appended
// Sparse: if _sparseLen != _len, then _ls/_ds are compressed to non-zero's only,
// and _xs is the row number. Still _len is count of elements including
// zeros, and _sparseLen is count of non-zeros.
public transient long _ls[]; // Mantissa
public transient int _xs[]; // Exponent, or if _ls==0, NA or Categorical or Rows
public transient int _id[]; // Indices (row numbers) of stored values, used for sparse
public transient double _ds[]; // Doubles, for inflating via doubles
public transient byte _ss[]; // Bytes of appended strings, including trailing 0
public transient int _is[]; // _is[] index of strings - holds offsets into _ss[]. _is[i] == -1 means NA/sparse
long [] alloc_mantissa(int l) { return _ls = MemoryManager.malloc8(l); }
int [] alloc_exponent(int l) { return _xs = MemoryManager.malloc4(l); }
int [] alloc_indices(int l) { return _id = MemoryManager.malloc4(l); }
double[] alloc_doubles(int l) { return _ds = MemoryManager.malloc8d(l); }
int [] alloc_str_indices(int l) { return _is = MemoryManager.malloc4(l); }
final protected long [] mantissa() { return _ls; }
final protected int [] exponent() { return _xs; }
final protected int [] indices() { return _id; }
final protected double[] doubles() { return _ds; }
@Override public boolean isSparse() { return sparse(); }
public int _sslen; // Next offset into _ss for placing next String
public int _sparseLen;
int set_sparseLen(int l) { return this._sparseLen = l; }
@Override public int sparseLen() { return _sparseLen; }
private int _naCnt=-1; // Count of NA's appended
protected int naCnt() { return _naCnt; } // Count of NA's appended
private int _enumCnt; // Count of Categorical's appended
protected int enumCnt() { return _enumCnt; } // Count of Categorical's appended
private int _strCnt; // Count of string's appended
protected int strCnt() { return _strCnt; } // Count of strings's appended
private int _nzCnt; // Count of non-zero's appended
private int _uuidCnt; // Count of UUIDs
public int _timCnt = 0;
protected static final int MIN_SPARSE_RATIO = 32;
private int _sparseRatio = MIN_SPARSE_RATIO;
public NewChunk( Vec vec, int cidx ) { _vec = vec; _cidx = cidx; }
public NewChunk( Vec vec, int cidx, boolean sparse ) {
_vec = vec; _cidx = cidx;
if(sparse) {
_ls = new long[128];
_xs = new int[128];
_id = new int[128];
}
}
public NewChunk(double [] ds) {
_cidx = -1;
_vec = null;
_ds = ds;
_sparseLen = _len = ds.length;
}
public NewChunk( Vec vec, int cidx, long[] mantissa, int[] exponent, int[] indices, double[] doubles) {
_vec = vec; _cidx = cidx;
_ls = mantissa;
_xs = exponent;
_id = indices;
_ds = doubles;
if (_ls != null && sparseLen()==0) set_sparseLen(set_len(_ls.length));
if (_xs != null && sparseLen()==0) set_sparseLen(set_len(_xs.length));
if (_id != null && sparseLen()==0) set_sparseLen(set_len(_id.length));
if (_ds != null && sparseLen()==0) set_sparseLen(set_len(_ds.length));
}
// Constructor used when inflating a Chunk.
public NewChunk( Chunk c ) {
this(c._vec, c.cidx());
_start = c._start;
}
// Pre-sized newchunks.
public NewChunk( Vec vec, int cidx, int len ) {
this(vec,cidx);
_ds = new double[len];
Arrays.fill(_ds, Double.NaN);
set_sparseLen(set_len(len));
}
public NewChunk setSparseRatio(int s) {
_sparseRatio = s;
return this;
}
public void set_vec(Vec vec) { _vec = vec; }
public NewChunk convertEnum2Str(ValueString[] emap) {
NewChunk strChunk = new NewChunk(_vec, _cidx);
int j = 0, l = _len;
for( int i = 0; i < l; ++i ) {
if( _id != null && _id.length > 0 && (j < _id.length && _id[j] == i ) ) // Sparse storage
// adjust for enum ids using 1-based indexing
strChunk.addStr(emap[(int) _ls[j++] - 1]);
else if (_xs[i] != Integer.MIN_VALUE) // Categorical value isn't NA
strChunk.addStr(emap[(int) _ls[i] - 1]);
else
strChunk.addNA();
}
if (_id != null)
assert j == sparseLen() :"j = " + j + ", sparseLen = " + sparseLen();
return strChunk;
}
public final class Value {
int _gId; // row number in dense (ie counting zeros)
int _lId; // local array index of this value, equal to _gId if dense
public Value(int lid, int gid){_lId = lid; _gId = gid;}
public final int rowId0(){return _gId;}
public void add2Chunk(NewChunk c){
if (_ds == null && _ss == null) {
c.addNum(_ls[_lId],_xs[_lId]);
} else {
if (_ls != null) {
c.addUUID(_ls[_lId], Double.doubleToRawLongBits(_ds[_lId]));
} else if (_ss != null) {
int sidx = _is[_lId];
int nextNotNAIdx = _lId+1;
// Find next not-NA value (_is[idx] != -1)
while (nextNotNAIdx < _is.length && _is[nextNotNAIdx] == -1) nextNotNAIdx++;
int slen = nextNotNAIdx < _is.length ? _is[nextNotNAIdx]-sidx : _sslen - sidx;
// null-ValueString represents NA value
ValueString vstr = sidx == -1 ? null : new ValueString().set(_ss, sidx, slen);
c.addStr(vstr);
} else
c.addNum(_ds[_lId]);
}
}
}
public Iterator<Value> values(){ return values(0,_len);}
public Iterator<Value> values(int fromIdx, int toIdx){
final int lId, gId;
final int to = Math.min(toIdx, _len);
if(sparse()){
int x = Arrays.binarySearch(_id,0, sparseLen(),fromIdx);
if(x < 0) x = -x -1;
lId = x;
gId = x == sparseLen() ? _len :_id[x];
} else
lId = gId = fromIdx;
final Value v = new Value(lId,gId);
final Value next = new Value(lId,gId);
return new Iterator<Value>(){
@Override public final boolean hasNext(){return next._gId < to;}
@Override public final Value next(){
if(!hasNext())throw new NoSuchElementException();
v._gId = next._gId; v._lId = next._lId;
next._lId++;
if(sparse()) next._gId = next._lId < sparseLen() ?_id[next._lId]: _len;
else next._gId++;
return v;
}
@Override
public void remove() {throw new UnsupportedOperationException();}
};
}
// Heuristic to decide the basic type of a column
public byte type() {
if( _naCnt == -1 ) { // No rollups yet?
int nas=0, es=0, nzs=0, ss=0;
if( _ds != null && _ls != null ) { // UUID?
for( int i=0; i< sparseLen(); i++ )
if( _xs != null && _xs[i]==Integer.MIN_VALUE ) nas++;
else if( _ds[i] !=0 || _ls[i] != 0 ) nzs++;
_uuidCnt = _len -nas;
} else if( _ds != null ) { // Doubles?
assert _xs==null;
for( int i = 0; i < sparseLen(); ++i) if( Double.isNaN(_ds[i]) ) nas++; else if( _ds[i]!=0 ) nzs++;
} else {
if( _ls != null && _ls.length > 0) // Longs and enums?
for( int i=0; i< sparseLen(); i++ )
if( isNA2(i) ) nas++;
else {
if( isEnum2(i) ) es++;
if( _ls[i] != 0 ) nzs++;
}
if( _is != null ) // Strings
for( int i=0; i< sparseLen(); i++ )
if( isNA2(i) ) nas++;
else ss++;
}
_nzCnt=nzs; _enumCnt =es; _naCnt=nas; _strCnt = ss;
}
// Now run heuristic for type
if(_naCnt == _len) // All NAs ==> NA Chunk
return AppendableVec.NA;
if(_strCnt > 0)
return AppendableVec.STRING;
if(_enumCnt > 0 && _enumCnt + _naCnt == _len)
return AppendableVec.ENUM; // All are Strings+NAs ==> Categorical Chunk
// UUIDs?
if( _uuidCnt > 0 ) return AppendableVec.UUID;
// Larger of time & numbers
int nums = _len -_naCnt-_timCnt;
return _timCnt >= nums ? AppendableVec.TIME : AppendableVec.NUMBER;
}
//what about sparse reps?
protected final boolean isNA2(int idx) {
if (isUUID()) return _ls[idx]==C16Chunk._LO_NA && Double.doubleToRawLongBits(_ds[idx])==C16Chunk._HI_NA;
if (isString()) return _is[idx] == -1;
return (_ds == null) ? (_ls[idx] == Long.MAX_VALUE && _xs[idx] == Integer.MIN_VALUE) : Double.isNaN(_ds[idx]);
}
protected final boolean isEnum2(int idx) {
return _xs!=null && _xs[idx]==Integer.MIN_VALUE+1;
}
protected final boolean isEnum(int idx) {
if(_id == null)return isEnum2(idx);
int j = Arrays.binarySearch(_id,0, sparseLen(),idx);
return j>=0 && isEnum2(j);
}
public void addEnum(int e) {append2(e,Integer.MIN_VALUE+1);}
public void addNA() {
if( isUUID() ) addUUID(C16Chunk._LO_NA, C16Chunk._HI_NA);
else if( isString() ) addStr(null);
else if (_ds != null) addNum(Double.NaN);
else append2(Long.MAX_VALUE,Integer.MIN_VALUE);
}
public void addNum (long val, int exp) {
if( isUUID() || isString() ) addNA();
else if(_ds != null) {
assert _ls == null;
addNum(val*PrettyPrint.pow10(exp));
} else {
if( val == 0 ) exp = 0;// Canonicalize zero
long t; // Remove extra scaling
while( exp < 0 && exp > -9999999 && (t=val/10)*10==val ) { val=t; exp++; }
append2(val,exp);
}
}
// Fast-path append double data
public void addNum(double d) {
if( isUUID() || isString() ) { addNA(); return; }
if(_id == null || d != 0) {
if(_ls != null)switch_to_doubles();
if( _ds == null || sparseLen() >= _ds.length ) {
append2slowd();
// call addNum again since append2slow might have flipped to sparse
addNum(d);
assert sparseLen() <= _len;
return;
}
if(_id != null)_id[sparseLen()] = _len;
_ds[sparseLen()] = d;
set_sparseLen(sparseLen() + 1);
}
set_len(_len + 1);
assert sparseLen() <= _len;
}
private void append_ss(String str) {
if (_ss == null) {
_ss = MemoryManager.malloc1((str.length()+1) * 4);
}
while (_ss.length < (_sslen + str.length() + 1)) {
_ss = MemoryManager.arrayCopyOf(_ss,_ss.length << 1);
}
for (byte b : str.getBytes())
_ss[_sslen++] = b;
_ss[_sslen++] = (byte)0; // for trailing 0;
}
private void append_ss(ValueString str) {
int strlen = str.length();
int off = str.getOffset();
byte b[] = str.getBuffer();
if (_ss == null) {
_ss = MemoryManager.malloc1((strlen + 1) * 4);
}
while (_ss.length < (_sslen + strlen + 1)) {
_ss = MemoryManager.arrayCopyOf(_ss,_ss.length << 1);
}
for (int i = off; i < off+strlen; i++)
_ss[_sslen++] = b[i];
_ss[_sslen++] = (byte)0; // for trailing 0;
}
// Append a String, stored in _ss & _is
public void addStr(ValueString str) {
if(_id == null || str != null) {
if(_is == null || sparseLen() >= _is.length) {
append2slowstr();
addStr(str);
assert sparseLen() <= _len;
return;
}
if (str != null) {
if(_id != null)_id[sparseLen()] = _len;
_is[sparseLen()] = _sslen;
set_sparseLen(sparseLen() + 1);
append_ss(str);
} else if (_id == null) {
_is[sparseLen()] = CStrChunk.NA;
set_sparseLen(sparseLen() + 1);
}
}
set_len(_len + 1);
assert sparseLen() <= _len;
}
public void addStr(Chunk c, long row) {
if( c.isNA_abs(row) ) addNA();
else addStr(c.atStr_abs(new ValueString(), row));
}
public void addStr(Chunk c, int row) {
if( c.isNA(row) ) addNA();
else addStr(c.atStr(new ValueString(), row));
}
// Append a UUID, stored in _ls & _ds
public void addUUID( long lo, long hi ) {
if( _ls==null || _ds== null || sparseLen() >= _ls.length )
append2slowUUID();
_ls[sparseLen()] = lo;
_ds[sparseLen()] = Double.longBitsToDouble(hi);
set_sparseLen(sparseLen() + 1);
set_len(_len + 1);
assert sparseLen() <= _len;
}
public void addUUID( Chunk c, long row ) {
if( c.isNA_abs(row) ) addUUID(C16Chunk._LO_NA,C16Chunk._HI_NA);
else addUUID(c.at16l_abs(row),c.at16h_abs(row));
}
public void addUUID( Chunk c, int row ) {
if( c.isNA(row) ) addUUID(C16Chunk._LO_NA,C16Chunk._HI_NA);
else addUUID(c.at16l(row),c.at16h(row));
}
public final boolean isUUID(){return _ls != null && _ds != null; }
public final boolean isString(){return _is != null; }
public final boolean sparse(){return _id != null;}
public void addZeros(int n){
if(!sparse()) for(int i = 0; i < n; ++i)addNum(0,0);
else set_len(_len + n);
}
// Append all of 'nc' onto the current NewChunk. Kill nc.
public void add( NewChunk nc ) {
assert _cidx >= 0;
assert sparseLen() <= _len;
assert nc.sparseLen() <= nc._len :"_len = " + nc.sparseLen() + ", _len2 = " + nc._len;
if( nc._len == 0 ) return;
if(_len == 0){
_ls = nc._ls; nc._ls = null;
_xs = nc._xs; nc._xs = null;
_id = nc._id; nc._id = null;
_ds = nc._ds; nc._ds = null;
_is = nc._is; nc._is = null;
_ss = nc._ss; nc._ss = null;
set_sparseLen(nc.sparseLen());
set_len(nc._len);
return;
}
if(nc.sparse() != sparse()){ // for now, just make it dense
cancel_sparse();
nc.cancel_sparse();
}
if( _ds != null ) throw H2O.fail();
while( sparseLen() + nc.sparseLen() >= _xs.length )
_xs = MemoryManager.arrayCopyOf(_xs,_xs.length<<1);
_ls = MemoryManager.arrayCopyOf(_ls,_xs.length);
System.arraycopy(nc._ls,0,_ls, sparseLen(), nc.sparseLen());
System.arraycopy(nc._xs,0,_xs, sparseLen(), nc.sparseLen());
if(_id != null) {
assert nc._id != null;
_id = MemoryManager.arrayCopyOf(_id,_xs.length);
System.arraycopy(nc._id,0,_id, sparseLen(), nc.sparseLen());
for(int i = sparseLen(); i < sparseLen() + nc.sparseLen(); ++i) _id[i] += _len;
} else assert nc._id == null;
set_sparseLen(sparseLen() + nc.sparseLen());
set_len(_len + nc._len);
nc._ls = null; nc._xs = null; nc._id = null; nc.set_sparseLen(nc.set_len(0));
assert sparseLen() <= _len;
}
// PREpend all of 'nc' onto the current NewChunk. Kill nc.
public void addr( NewChunk nc ) {
long [] tmpl = _ls; _ls = nc._ls; nc._ls = tmpl;
int [] tmpi = _xs; _xs = nc._xs; nc._xs = tmpi;
tmpi = _id; _id = nc._id; nc._id = tmpi;
double[] tmpd = _ds; _ds = nc._ds; nc._ds = tmpd;
int tmp = _sparseLen; _sparseLen=nc._sparseLen; nc._sparseLen=tmp;
tmp = _len; _len = nc._len; nc._len = tmp;
add(nc);
}
// Fast-path append long data
void append2( long l, int x ) {
if(_id == null || l != 0){
if(_ls == null || sparseLen() == _ls.length) {
append2slow();
// again call append2 since calling append2slow might have changed things (eg might have switched to sparse and l could be 0)
append2(l,x);
return;
}
_ls[sparseLen()] = l;
_xs[sparseLen()] = x;
if(_id != null)_id[sparseLen()] = _len;
set_sparseLen(sparseLen() + 1);
}
set_len(_len + 1);
assert sparseLen() <= _len;
}
// Slow-path append data
private void append2slowd() {
if( sparseLen() > FileVec.DFLT_CHUNK_SIZE )
throw new ArrayIndexOutOfBoundsException(sparseLen());
assert _ls==null;
if(_ds != null && _ds.length > 0){
if(_id == null){ // check for sparseness
int nzs = 0; // assume one non-zero for the element currently being stored
for(double d:_ds)if(d != 0)++nzs;
if((nzs+1)*_sparseRatio < _len)
set_sparse(nzs);
} else _id = MemoryManager.arrayCopyOf(_id, sparseLen() << 1);
_ds = MemoryManager.arrayCopyOf(_ds, sparseLen() << 1);
} else {
alloc_doubles(4);
if (sparse()) alloc_indices(4);
}
assert sparseLen() == 0 || _ds.length > sparseLen() :"_ds.length = " + _ds.length + ", _len = " + sparseLen();
}
// Slow-path append data
private void append2slowUUID() {
if( sparseLen() > FileVec.DFLT_CHUNK_SIZE )
throw new ArrayIndexOutOfBoundsException(sparseLen());
if( _ds==null && _ls!=null ) { // This can happen for columns with all NAs and then a UUID
_xs=null;
alloc_doubles(sparseLen());
Arrays.fill(_ls,C16Chunk._LO_NA);
Arrays.fill(_ds,Double.longBitsToDouble(C16Chunk._HI_NA));
}
if( _ls != null && _ls.length > 0 ) {
_ls = MemoryManager.arrayCopyOf(_ls, sparseLen() <<1);
_ds = MemoryManager.arrayCopyOf(_ds, sparseLen() <<1);
} else {
alloc_mantissa(4);
alloc_doubles(4);
}
assert sparseLen() == 0 || _ls.length > sparseLen() :"_ls.length = " + _ls.length + ", _len = " + sparseLen();
}
// Slow-path append string
private void append2slowstr() {
if( sparseLen() > FileVec.DFLT_CHUNK_SIZE )
throw new ArrayIndexOutOfBoundsException(sparseLen());
// In case of all NAs and then a string, convert NAs to string NAs
if (_xs != null) {
_xs = null; _ls = null;
alloc_str_indices(sparseLen());
Arrays.fill(_is,-1);
}
if(_is != null && _is.length > 0){
// Check for sparseness
if(_id == null){
int nzs = 0; // assume one non-null for the element currently being stored
for( int i:_is) if( i != -1 ) ++nzs;
if( (nzs+1)*_sparseRatio < _len)
set_sparse(nzs);
} else {
if((_sparseRatio*(_sparseLen) >> 1) > _len) cancel_sparse();
else _id = MemoryManager.arrayCopyOf(_id,_sparseLen<<1);
}
_is = MemoryManager.arrayCopyOf(_is, sparseLen()<<1);
/* initialize the memory extension with -1s */
for (int i = sparseLen(); i < _is.length; i++) _is[i] = -1;
} else {
_is = MemoryManager.malloc4 (4);
/* initialize everything with -1s */
for (int i = 0; i < _is.length; i++) _is[i] = -1;
if (sparse()) alloc_indices(4);
}
assert sparseLen() == 0 || _is.length > sparseLen():"_ls.length = " + _is.length + ", _len = " + sparseLen();
}
// Slow-path append data
private void append2slow( ) {
if( sparseLen() > FileVec.DFLT_CHUNK_SIZE )
throw new ArrayIndexOutOfBoundsException(sparseLen());
assert _ds==null;
if(_ls != null && _ls.length > 0){
if(_id == null){ // check for sparseness
int nzs = 0;
for(int i = 0; i < _ls.length; ++i) if(_ls[i] != 0 || _xs[i] != 0)++nzs;
if((nzs+1)*_sparseRatio < _len){
set_sparse(nzs);
assert sparseLen() == 0 || sparseLen() <= _ls.length:"_len = " + sparseLen() + ", _ls.length = " + _ls.length + ", nzs = " + nzs + ", len2 = " + _len;
assert _id.length == _ls.length;
assert sparseLen() <= _len;
return;
}
} else {
// verify we're still sufficiently sparse
if((_sparseRatio*(sparseLen()) >> 1) > _len) cancel_sparse();
else _id = MemoryManager.arrayCopyOf(_id, sparseLen() <<1);
}
_ls = MemoryManager.arrayCopyOf(_ls, sparseLen() <<1);
_xs = MemoryManager.arrayCopyOf(_xs, sparseLen() <<1);
} else {
alloc_mantissa(4);
alloc_exponent(4);
if (_id != null) alloc_indices(4);
}
assert sparseLen() == 0 || sparseLen() < _ls.length:"_len = " + sparseLen() + ", _ls.length = " + _ls.length;
assert _id == null || _id.length == _ls.length;
assert sparseLen() <= _len;
}
// Do any final actions on a completed NewVector. Mostly: compress it, and
// do a DKV put on an appropriate Key. The original NewVector goes dead
// (does not live on inside the K/V store).
public Chunk new_close() {
Chunk chk = compress();
if(_vec instanceof AppendableVec)
((AppendableVec)_vec).closeChunk(this);
return chk;
}
public void close(Futures fs) { close(_cidx,fs); }
protected void switch_to_doubles(){
assert _ds == null;
double [] ds = MemoryManager.malloc8d(sparseLen());
for(int i = 0; i < sparseLen(); ++i)
if(isNA2(i) || isEnum2(i)) ds[i] = Double.NaN;
else ds[i] = _ls[i]*PrettyPrint.pow10(_xs[i]);
_ls = null;
_xs = null;
_ds = ds;
}
protected void set_sparse(int nzeros){
if(sparseLen() == nzeros && _len != 0)return;
if(_id != null) { // we have sparse representation but some 0s in it!
int[] id = MemoryManager.malloc4(nzeros);
int j = 0;
if (_ds != null) {
double[] ds = MemoryManager.malloc8d(nzeros);
for (int i = 0; i < sparseLen(); ++i) {
if (_ds[i] != 0) {
ds[j] = _ds[i];
id[j] = _id[i];
++j;
}
}
_ds = ds;
} else if (_is != null) {
int [] is = MemoryManager.malloc4(nzeros);
for (int i = 0; i < sparseLen(); i++) {
if (_is[i] != -1) {
is[j] = _is[i];
id[j] = id[i];
++j;
}
}
} else {
long [] ls = MemoryManager.malloc8(nzeros);
int [] xs = MemoryManager.malloc4(nzeros);
for(int i = 0; i < sparseLen(); ++i){
if(_ls[i] != 0){
ls[j] = _ls[i];
xs[j] = _xs[i];
id[j] = _id[i];
++j;
}
}
_ls = ls;
_xs = xs;
}
_id = id;
assert j == nzeros;
set_sparseLen(nzeros);
return;
}
assert sparseLen() == _len :"_len = " + sparseLen() + ", _len2 = " + _len + ", nzeros = " + nzeros;
int zs = 0;
if(_is != null) {
assert nzeros < _is.length;
_id = MemoryManager.malloc4(_is.length);
for (int i = 0; i < sparseLen(); i++) {
if (_is[i] == -1) zs++;
else {
_is[i-zs] = _is[i];
_id[i-zs] = i;
}
}
} else if(_ds == null){
if (_len == 0) {
_ls = new long[0];
_xs = new int[0];
_id = new int[0];
set_sparseLen(0);
return;
} else {
assert nzeros < sparseLen();
_id = alloc_indices(_ls.length);
for (int i = 0; i < sparseLen(); ++i) {
if (_ls[i] == 0 && _xs[i] == 0) ++zs;
else {
_ls[i - zs] = _ls[i];
_xs[i - zs] = _xs[i];
_id[i - zs] = i;
}
}
}
} else {
assert nzeros < _ds.length;
_id = alloc_indices(_ds.length);
for(int i = 0; i < sparseLen(); ++i){
if(_ds[i] == 0)++zs;
else {
_ds[i-zs] = _ds[i];
_id[i-zs] = i;
}
}
}
assert zs == (sparseLen() - nzeros);
set_sparseLen(nzeros);
}
protected void cancel_sparse(){
if(sparseLen() != _len){
if(_is != null){
int [] is = MemoryManager.malloc4(_len);
for(int i = 0; i < _len; i++) is[i] = -1;
for (int i = 0; i < sparseLen(); i++) is[_id[i]] = _is[i];
_is = is;
} else if(_ds == null){
int [] xs = MemoryManager.malloc4(_len);
long [] ls = MemoryManager.malloc8(_len);
for(int i = 0; i < sparseLen(); ++i){
xs[_id[i]] = _xs[i];
ls[_id[i]] = _ls[i];
}
_xs = xs;
_ls = ls;
} else {
double [] ds = MemoryManager.malloc8d(_len);
for(int i = 0; i < sparseLen(); ++i) ds[_id[i]] = _ds[i];
_ds = ds;
}
set_sparseLen(_len);
}
_id = null;
}
// Study this NewVector and determine an appropriate compression scheme.
// Return the data so compressed.
public Chunk compress() {
Chunk res = compress2();
// force everything to null after compress to free up the memory
_id = null;
_xs = null;
_ds = null;
_ls = null;
_is = null;
_ss = null;
return res;
}
private static long leRange(long lemin, long lemax){
if(lemin < 0 && lemax >= (Long.MAX_VALUE + lemin))
return Long.MAX_VALUE; // if overflow return 64 as the max possible value
long res = lemax - lemin;
assert res >= 0;
return res;
}
private Chunk compress2() {
// Check for basic mode info: all missing or all strings or mixed stuff
byte mode = type();
if( mode==AppendableVec.NA ) // ALL NAs, nothing to do
return new C0DChunk(Double.NaN, sparseLen());
if( mode==AppendableVec.STRING )
return new CStrChunk(_sslen, _ss, sparseLen(), _len, _is);
boolean rerun=false;
if(mode == AppendableVec.ENUM){
for( int i=0; i< sparseLen(); i++ )
if(isEnum2(i))
_xs[i] = 0;
else if(!isNA2(i)){
setNA_impl2(i);
++_naCnt;
}
// Smack any mismatched string/numbers
} else if(mode == AppendableVec.NUMBER){
for( int i=0; i< sparseLen(); i++ )
if(isEnum2(i)) {
setNA_impl2(i);
rerun = true;
}
}
if( rerun ) { _naCnt = -1; type(); } // Re-run rollups after dropping all numbers/enums
boolean sparse = false;
// sparse? treat as sparse iff we have at least MIN_SPARSE_RATIOx more zeros than nonzeros
if(_sparseRatio*(_naCnt + _nzCnt) < _len) {
set_sparse(_naCnt + _nzCnt);
sparse = true;
} else if (sparseLen() != _len)
cancel_sparse();
// If the data is UUIDs there's not much compression going on
if( _ds != null && _ls != null )
return chunkUUID();
// cut out the easy all NaNs case
if(_naCnt == _len) return new C0DChunk(Double.NaN,_len);
// If the data was set8 as doubles, we do a quick check to see if it's
// plain longs. If not, we give up and use doubles.
if( _ds != null ) {
int i; // check if we can flip to ints
for (i=0; i < sparseLen(); ++i)
if (!Double.isNaN(_ds[i]) && (double) (long) _ds[i] != _ds[i])
break;
boolean isInteger = i == sparseLen();
boolean isConstant = !sparse || sparseLen() == 0;
double constVal = 0;
if (!sparse) { // check the values, sparse with some nonzeros can not be constant - has 0s and (at least 1) nonzero
constVal = _ds[0];
for(int j = 1; j < _len; ++j)
if(_ds[j] != constVal) {
isConstant = false;
break;
}
}
if(isConstant)
return isInteger? new C0LChunk((long)constVal, _len): new C0DChunk(constVal,_len);
if(!isInteger)
return sparse? new CXDChunk(_len, sparseLen(), 8, bufD(8)): chunkD();
// Else flip to longs
_ls = new long[_ds.length];
_xs = new int [_ds.length];
double [] ds = _ds;
_ds = null;
final int naCnt = _naCnt;
for( i=0; i< sparseLen(); i++ ) // Inject all doubles into longs
if( Double.isNaN(ds[i]) )setNA_impl2(i);
else _ls[i] = (long)ds[i];
// setNA_impl2 will set _naCnt to -1!
// we already know what the naCnt is (it did not change!) so set it back to correct value
_naCnt = naCnt;
}
// IF (_len > _sparseLen) THEN Sparse
// Check for compressed *during appends*. Here we know:
// - No specials; _xs[]==0.
// - No floats; _ds==null
// - NZ length in _sparseLen, actual length in _len.
// - Huge ratio between _len and _sparseLen, and we do NOT want to inflate to
// the larger size; we need to keep it all small all the time.
// - Rows in _xs
// Data in some fixed-point format, not doubles
// See if we can sanely normalize all the data to the same fixed-point.
int xmin = Integer.MAX_VALUE; // min exponent found
boolean floatOverflow = false;
double min = Double.POSITIVE_INFINITY;
double max = Double.NEGATIVE_INFINITY;
int p10iLength = PrettyPrint.powers10i.length;
long llo=Long .MAX_VALUE, lhi=Long .MIN_VALUE;
int xlo=Integer.MAX_VALUE, xhi=Integer.MIN_VALUE;
for( int i=0; i< sparseLen(); i++ ) {
if( isNA2(i) ) continue;
long l = _ls[i];
int x = _xs[i];
assert x != Integer.MIN_VALUE:"l = " + l + ", x = " + x;
if( x==Integer.MIN_VALUE+1) x=0; // Replace enum flag with no scaling
assert l!=0 || x==0:"l == 0 while x = " + x + " ls = " + Arrays.toString(_ls); // Exponent of zero is always zero
long t; // Remove extra scaling
while( l!=0 && (t=l/10)*10==l ) { l=t; x++; }
// Compute per-chunk min/max
double d = l*PrettyPrint.pow10(x);
if( d < min ) { min = d; llo=l; xlo=x; }
if( d > max ) { max = d; lhi=l; xhi=x; }
floatOverflow = l < Integer.MIN_VALUE+1 || l > Integer.MAX_VALUE;
xmin = Math.min(xmin,x);
}
if(sparse){ // sparse? then compare vs implied 0s
if( min > 0 ) { min = 0; llo=0; xlo=0; }
if( max < 0 ) { max = 0; lhi=0; xhi=0; }
xmin = Math.min(xmin,0);
}
// Constant column?
if( _naCnt==0 && (min==max)) {
if (llo == lhi && xlo == 0 && xhi == 0)
return new C0LChunk(llo, _len);
else if ((long)min == min)
return new C0LChunk((long)min, _len);
else
return new C0DChunk(min, _len);
}
// Compute min & max, as scaled integers in the xmin scale.
// Check for overflow along the way
boolean overflow = ((xhi-xmin) >= p10iLength) || ((xlo-xmin) >= p10iLength);
long lemax=0, lemin=0;
if( !overflow ) { // Can at least get the power-of-10 without overflow
long pow10 = PrettyPrint.pow10i(xhi-xmin);
lemax = lhi*pow10;
// Hacker's Delight, Section 2-13, checking overflow.
// Note that the power-10 is always positive, so the test devolves this:
if( (lemax/pow10) != lhi ) overflow = true;
// Note that xlo might be > xmin; e.g. { 101e-49 , 1e-48}.
long pow10lo = PrettyPrint.pow10i(xlo-xmin);
lemin = llo*pow10lo;
if( (lemin/pow10lo) != llo ) overflow = true;
}
// Boolean column?
if (max == 1 && min == 0 && xmin == 0 && !overflow) {
if(sparse) { // Very sparse?
return _naCnt==0
? new CX0Chunk(_len, sparseLen(),bufS(0))// No NAs, can store as sparse bitvector
: new CXIChunk(_len, sparseLen(),1,bufS(1)); // have NAs, store as sparse 1byte values
}
int bpv = _enumCnt +_naCnt > 0 ? 2 : 1; // Bit-vector
byte[] cbuf = bufB(bpv);
return new CBSChunk(cbuf, cbuf[0], cbuf[1]);
}
final boolean fpoint = xmin < 0 || min < Long.MIN_VALUE || max > Long.MAX_VALUE;
if( sparse ) {
if(fpoint) return new CXDChunk(_len, sparseLen(),8,bufD(8));
int sz = 8;
if( Short.MIN_VALUE <= min && max <= Short.MAX_VALUE ) sz = 2;
else if( Integer.MIN_VALUE <= min && max <= Integer.MAX_VALUE ) sz = 4;
return new CXIChunk(_len, sparseLen(),sz,bufS(sz));
}
// Exponent scaling: replacing numbers like 1.3 with 13e-1. '13' fits in a
// byte and we scale the column by 0.1. A set of numbers like
// {1.2,23,0.34} then is normalized to always be represented with 2 digits
// to the right: {1.20,23.00,0.34} and we scale by 100: {120,2300,34}.
// This set fits in a 2-byte short.
// We use exponent-scaling for bytes & shorts only; it's uncommon (and not
// worth it) for larger numbers. We need to get the exponents to be
// uniform, so we scale up the largest lmax by the largest scale we need
// and if that fits in a byte/short - then it's worth compressing. Other
// wise we just flip to a float or double representation.
if( overflow || (fpoint && floatOverflow) || -35 > xmin || xmin > 35 )
return chunkD();
final long leRange = leRange(lemin,lemax);
if( fpoint ) {
if( (int)lemin == lemin && (int)lemax == lemax ) {
if(leRange < 255) // Fits in scaled biased byte?
return new C1SChunk( bufX(lemin,xmin,C1SChunk._OFF,0),lemin,PrettyPrint.pow10(xmin));
if(leRange < 65535) { // we use signed 2B short, add -32k to the bias!
long bias = 32767 + lemin;
return new C2SChunk( bufX(bias,xmin,C2SChunk._OFF,1),bias,PrettyPrint.pow10(xmin));
}
}
if(leRange < 4294967295l) {
long bias = 2147483647l + lemin;
return new C4SChunk( bufX(bias,xmin,C4SChunk._OFF,2),bias,PrettyPrint.pow10(xmin));
}
return chunkD();
} // else an integer column
// Compress column into a byte
if(xmin == 0 && 0<=lemin && lemax <= 255 && ((_naCnt + _enumCnt)==0) )
return new C1NChunk( bufX(0,0,C1NChunk._OFF,0));
if( lemin < Integer.MIN_VALUE ) return new C8Chunk( bufX(0,0,0,3));
if( leRange < 255 ) { // Span fits in a byte?
if(0 <= min && max < 255 ) // Span fits in an unbiased byte?
return new C1Chunk( bufX(0,0,C1Chunk._OFF,0));
return new C1SChunk( bufX(lemin,xmin,C1SChunk._OFF,0),lemin,PrettyPrint.pow10i(xmin));
}
// Compress column into a short
if( leRange < 65535 ) { // Span fits in a biased short?
if( xmin == 0 && Short.MIN_VALUE < lemin && lemax <= Short.MAX_VALUE ) // Span fits in an unbiased short?
return new C2Chunk( bufX(0,0,C2Chunk._OFF,1));
long bias = (lemin-(Short.MIN_VALUE+1));
return new C2SChunk( bufX(bias,xmin,C2SChunk._OFF,1),bias,PrettyPrint.pow10i(xmin));
}
// Compress column into ints
if( Integer.MIN_VALUE < min && max <= Integer.MAX_VALUE )
return new C4Chunk( bufX(0,0,0,2));
return new C8Chunk( bufX(0,0,0,3));
}
private static long [] NAS = {C1Chunk._NA,C2Chunk._NA,C4Chunk._NA,C8Chunk._NA};
// Compute a sparse integer buffer
private byte[] bufS(final int valsz){
int log = 0;
while((1 << log) < valsz)++log;
assert valsz == 0 || (1 << log) == valsz;
final int ridsz = _len >= 65535?4:2;
final int elmsz = ridsz + valsz;
int off = CXIChunk._OFF;
byte [] buf = MemoryManager.malloc1(off + sparseLen() *elmsz,true);
for( int i=0; i< sparseLen(); i++, off += elmsz ) {
if(ridsz == 2)
UnsafeUtils.set2(buf,off,(short)_id[i]);
else
UnsafeUtils.set4(buf,off,_id[i]);
if(valsz == 0){
assert _xs[i] == 0 && _ls[i] == 1;
continue;
}
assert _xs[i] == Integer.MIN_VALUE || _xs[i] >= 0:"unexpected exponent " + _xs[i]; // assert we have int or NA
final long lval = _xs[i] == Integer.MIN_VALUE ? NAS[log] : _ls[i]*PrettyPrint.pow10i(_xs[i]);
switch(valsz){
case 1:
buf[off+ridsz] = (byte)lval;
break;
case 2:
short sval = (short)lval;
UnsafeUtils.set2(buf,off+ridsz,sval);
break;
case 4:
int ival = (int)lval;
UnsafeUtils.set4(buf, off + ridsz, ival);
break;
case 8:
UnsafeUtils.set8(buf, off + ridsz, lval);
break;
default:
throw H2O.fail();
}
}
assert off==buf.length;
return buf;
}
// Compute a sparse float buffer
private byte[] bufD(final int valsz){
int log = 0;
while((1 << log) < valsz)++log;
assert (1 << log) == valsz;
final int ridsz = _len >= 65535?4:2;
final int elmsz = ridsz + valsz;
int off = CXDChunk._OFF;
byte [] buf = MemoryManager.malloc1(off + sparseLen() *elmsz,true);
for( int i=0; i< sparseLen(); i++, off += elmsz ) {
if(ridsz == 2)
UnsafeUtils.set2(buf,off,(short)_id[i]);
else
UnsafeUtils.set4(buf,off,_id[i]);
final double dval = _ds == null?isNA2(i)?Double.NaN:_ls[i]*PrettyPrint.pow10(_xs[i]):_ds[i];
switch(valsz){
case 4:
UnsafeUtils.set4f(buf, off + ridsz, (float) dval);
break;
case 8:
UnsafeUtils.set8d(buf, off + ridsz, dval);
break;
default:
throw H2O.fail();
}
}
assert off==buf.length;
return buf;
}
// Compute a compressed integer buffer
private byte[] bufX( long bias, int scale, int off, int log ) {
byte[] bs = new byte[(_len <<log)+off];
int j = 0;
for( int i=0; i< _len; i++ ) {
long le = -bias;
if(_id == null || _id.length == 0 || (j < _id.length && _id[j] == i)){
if( isNA2(j) ) {
le = NAS[log];
} else {
int x = (_xs[j]==Integer.MIN_VALUE+1 ? 0 : _xs[j])-scale;
le += x >= 0
? _ls[j]*PrettyPrint.pow10i( x)
: _ls[j]/PrettyPrint.pow10i(-x);
}
++j;
}
switch( log ) {
case 0: bs [i +off] = (byte)le ; break;
case 1: UnsafeUtils.set2(bs,(i<<1)+off, (short)le); break;
case 2: UnsafeUtils.set4(bs, (i << 2) + off, (int) le); break;
case 3: UnsafeUtils.set8(bs, (i << 3) + off, le); break;
default: throw H2O.fail();
}
}
assert j == sparseLen() :"j = " + j + ", len = " + sparseLen() + ", len2 = " + _len + ", id[j] = " + _id[j];
return bs;
}
// Compute a compressed double buffer
private Chunk chunkD() {
HashMap<Long,Byte> hs = new HashMap<>(CUDChunk.MAX_UNIQUES);
Byte dummy = 0;
final byte [] bs = MemoryManager.malloc1(_len *8,true);
int j = 0;
boolean fitsInUnique = true;
for(int i = 0; i < _len; ++i){
double d = 0;
if(_id == null || _id.length == 0 || (j < _id.length && _id[j] == i)) {
d = _ds != null?_ds[j]:(isNA2(j)||isEnum(j))?Double.NaN:_ls[j]*PrettyPrint.pow10(_xs[j]);
++j;
}
if (fitsInUnique) {
if (hs.size() < CUDChunk.MAX_UNIQUES) //still got space
hs.put(new Long(Double.doubleToLongBits(d)),dummy); //store doubles as longs to avoid NaN comparison issues during extraction
else if (hs.size() == CUDChunk.MAX_UNIQUES) //full, but might not need more space because of repeats
fitsInUnique = hs.containsKey(Double.doubleToLongBits(d));
else //full - no longer try to fit into CUDChunk
fitsInUnique = false;
}
UnsafeUtils.set8d(bs, 8*i, d);
}
assert j == sparseLen() :"j = " + j + ", _len = " + sparseLen();
if (fitsInUnique && CUDChunk.computeByteSize(hs.size(), len()) < 0.8 * bs.length)
return new CUDChunk(bs, hs, len());
else
return new C8DChunk(bs);
}
// Compute a compressed UUID buffer
private Chunk chunkUUID() {
final byte [] bs = MemoryManager.malloc1(_len *16,true);
int j = 0;
for( int i = 0; i < _len; ++i ) {
long lo = 0, hi=0;
if( _id == null || _id.length == 0 || (j < _id.length && _id[j] == i ) ) {
lo = _ls[j];
hi = Double.doubleToRawLongBits(_ds[j++]);
if( _xs != null && _xs[j] == Integer.MAX_VALUE){
lo = Long.MIN_VALUE; hi = 0; // Canonical NA value
}
}
UnsafeUtils.set8(bs, 16*i , lo);
UnsafeUtils.set8(bs, 16 * i + 8, hi);
}
assert j == sparseLen() :"j = " + j + ", _len = " + sparseLen();
return new C16Chunk(bs);
}
// Compute compressed boolean buffer
private byte[] bufB(int bpv) {
assert bpv == 1 || bpv == 2 : "Only bit vectors with/without NA are supported";
final int off = CBSChunk._OFF;
int clen = off + CBSChunk.clen(_len, bpv);
byte bs[] = new byte[clen];
// Save the gap = number of unfilled bits and bpv value
bs[0] = (byte) (((_len *bpv)&7)==0 ? 0 : (8-((_len *bpv)&7)));
bs[1] = (byte) bpv;
// Dense bitvector
int boff = 0;
byte b = 0;
int idx = CBSChunk._OFF;
int j = 0;
for (int i=0; i< _len; i++) {
byte val = 0;
if(_id == null || (j < _id.length && _id[j] == i)) {
assert bpv == 2 || !isNA2(j);
val = (byte)(isNA2(j)?CBSChunk._NA:_ls[j]);
++j;
}
if( bpv==1 )
b = CBSChunk.write1b(b, val, boff);
else
b = CBSChunk.write2b(b, val, boff);
boff += bpv;
if (boff>8-bpv) { assert boff == 8; bs[idx] = b; boff = 0; b = 0; idx++; }
}
assert j == sparseLen();
assert bs[0] == (byte) (boff == 0 ? 0 : 8-boff):"b[0] = " + bs[0] + ", boff = " + boff + ", bpv = " + bpv;
// Flush last byte
if (boff>0) bs[idx] = b;
return bs;
}
// Set & At on NewChunks are weird: only used after inflating some other
// chunk. At this point the NewChunk is full size, no more appends allowed,
// and the xs exponent array should be only full of zeros. Accesses must be
// in-range and refer to the inflated values of the original Chunk.
@Override boolean set_impl(int i, long l) {
if( _ds != null ) return set_impl(i,(double)l);
if(sparseLen() != _len){ // sparse?
int idx = Arrays.binarySearch(_id,0, sparseLen(),i);
if(idx >= 0)i = idx;
else cancel_sparse(); // for now don't bother setting the sparse value
}
_ls[i]=l; _xs[i]=0;
_naCnt = -1;
return true;
}
@Override public boolean set_impl(int i, double d) {
if(_ds == null){
assert sparseLen() == 0 || _ls != null;
switch_to_doubles();
}
if(sparseLen() != _len){ // sparse?
int idx = Arrays.binarySearch(_id,0, sparseLen(),i);
if(idx >= 0)i = idx;
else cancel_sparse(); // for now don't bother setting the sparse value
}
assert i < sparseLen();
_ds[i] = d;
_naCnt = -1;
return true;
}
@Override boolean set_impl(int i, float f) { return set_impl(i,(double)f); }
@Override boolean set_impl(int i, String str) {
if(_is == null && _len > 0) {
assert sparseLen() == 0;
alloc_str_indices(_len);
Arrays.fill(_is,-1);
}
if(sparseLen() != _len){ // sparse?
int idx = Arrays.binarySearch(_id,0, sparseLen(),i);
if(idx >= 0)i = idx;
else cancel_sparse(); // for now don't bother setting the sparse value
}
_is[i] = _sslen;
append_ss(str);
return true;
}
protected final boolean setNA_impl2(int i) {
if( isNA2(i) ) return true;
if( _ls != null ) { _ls[i] = Long.MAX_VALUE; _xs[i] = Integer.MIN_VALUE; }
if( _ds != null ) { _ds[i] = Double.NaN; }
_naCnt = -1;
return true;
}
@Override boolean setNA_impl(int i) {
if( isNA_impl(i) ) return true;
if(sparseLen() != _len){
int idx = Arrays.binarySearch(_id,0, sparseLen(),i);
if(idx >= 0) i = idx;
else cancel_sparse(); // todo - do not necessarily cancel sparse here
}
return setNA_impl2(i);
}
@Override public long at8_impl( int i ) {
if( _len != sparseLen()) {
int idx = Arrays.binarySearch(_id,0, sparseLen(),i);
if(idx >= 0) i = idx;
else return 0;
}
if(isNA2(i))throw new RuntimeException("Attempting to access NA as integer value.");
if( _ls == null ) return (long)_ds[i];
return _ls[i]*PrettyPrint.pow10i(_xs[i]);
}
@Override public double atd_impl( int i ) {
if( _len != sparseLen()) {
int idx = Arrays.binarySearch(_id,0, sparseLen(),i);
if(idx >= 0) i = idx;
else return 0;
}
// if exponent is Integer.MIN_VALUE (for missing value) or >=0, then go the integer path (at8_impl)
// negative exponents need to be handled right here
if( _ds == null ) return isNA2(i) || _xs[i] >= 0 ? at8_impl(i) : _ls[i]*Math.pow(10,_xs[i]);
assert _xs==null; return _ds[i];
}
@Override protected long at16l_impl(int idx) {
if(_ls[idx] == C16Chunk._LO_NA) throw new RuntimeException("Attempting to access NA as integer value.");
return _ls[idx];
}
@Override protected long at16h_impl(int idx) {
long hi = Double.doubleToRawLongBits(_ds[idx]);
if(hi == C16Chunk._HI_NA) throw new RuntimeException("Attempting to access NA as integer value.");
return hi;
}
@Override public boolean isNA_impl( int i ) {
if( _len != sparseLen()) {
int idx = Arrays.binarySearch(_id,0, sparseLen(),i);
if(idx >= 0) i = idx;
else return false;
}
return isNA2(i);
}
@Override public ValueString atStr_impl( ValueString vstr, int i ) {
if( sparseLen() != _len ) {
int idx = Arrays.binarySearch(_id,0,sparseLen(),i);
if(idx >= 0) i = idx;
else return null;
}
if( _is[i] == CStrChunk.NA ) return null;
int len = 0;
while( _ss[_is[i] + len] != 0 ) len++;
return vstr.set(_ss, _is[i], len);
}
@Override public NewChunk read_impl(AutoBuffer bb) { throw H2O.fail(); }
@Override public AutoBuffer write_impl(AutoBuffer bb) { throw H2O.fail(); }
@Override public NewChunk inflate_impl(NewChunk nc) { throw H2O.fail(); }
@Override public String toString() { return "NewChunk._len="+ sparseLen(); }
// We have to explicitly override cidx implementation since we hide _cidx field with new version
@Override
public int cidx() {
return _cidx;
}
}
|
// consider not using a Formulas class for your constants and add them to the class(es) that might need them instead, as suggested
// consider using other type other than primitive arrays, those can't be made not-mutable and constants need to be not-mutable
// you might also want to make these public if they are not implemented inside the class that needs them, as
public class Formulas{
// Instance Variables
// Formulas for pentatonic scales
private int[] MINOR_PENTATONIC = {3, 2, 2, 3, 2}; // minor pentatonic
private int[] MAJOR_PENTATONIC = {2, 2, 3, 2, 3}; // major pentatonic
// Formulas for hexatonic scales
private int[] MINOR_BLUES = {3, 2, 1, 1, 3, 2}; // minor blues
private int[] MAJOR_BLUES = {2, 1, 1, 3, 2, 3}; // major blues
private int[] PROMETHEUS = {2, 2, 2, 3, 1, 2}; // prometheus - mystic
private int[] ISTRIAN = {1, 2, 1, 2, 1, 5}; // istrian
private int[] WHOLE_TONE = {2, 2, 2, 2, 2, 2}; // whole tone
private int[] AUGMENTED = {3, 1, 3, 1, 3, 1}; // augmented - symmetrical augmented
private int[] TRITONE = {1, 3, 2, 1, 3, 2}; // tritone
private int[] TWO_SEMITONE_TRITONE = {1, 1, 4, 1, 1, 4}; // two semitone tritone
// Major derived formulas
private int[] LYDIAN = {2, 2, 2, 1, 2, 2, 1}; // lydian
private int[] IONIAN = {2, 2, 1, 2, 2, 2, 1}; // ionian - major
private int[] MIXOLYDIAN = {2, 2, 1, 2, 2, 1, 2}; // mixolydian
private int[] DORIAN = {2, 1, 2, 2, 2, 1, 2}; // dorian
private int[] AEOLIAN = {2, 1, 2, 2, 1, 2, 2}; // aeolian - minor
private int[] PHRYGIAN = {1, 2, 2, 2, 1, 2, 2}; // phrygian
private int[] LOCRIAN = {1, 2, 2, 1, 2, 2, 2}; // locrian
// Melodic minor derived formulas
private int[] MELODIC_MINOR = {2, 1, 2, 2, 2, 2, 1}; // melodic minor
private int[] DORIAN_FLAT9 = {1, 2, 2, 2, 2, 1, 2}; // dorian flat 9
private int[] LYDIAN_AUGMENTED = {2, 2, 2, 2, 1, 2, 1}; // lydian augmented
private int[] LYDIAN_FLAT7 = {2, 2, 2, 1, 2, 1, 2}; // lydian flat 7
private int[] MIXOLYDIAN_FLAT13 = {2, 2, 1, 2, 1, 2, 2}; // mixolydian flat 13
private int[] SEMI_LOCRIAN = {2, 1, 2, 1, 2, 2, 2}; // semilocrian
private int[] SUPER_LOCRIAN = {1, 2, 1, 2, 2, 2, 2}; // superlocrian
// Harmonic minor derived formulas
private int[] HARMONIC_MINOR = {2, 1, 2, 2, 1, 3, 1}; // harmonic minor
private int[] LOCRIAN_SHARP7 = {1, 2, 2, 1, 3, 1, 2}; // locrian sharp 7
private int[] IONIAN_AUGMENTED = {2, 2, 1, 3, 1, 2, 1}; // ionian augmented
// Getters
// Getters for pentatonic scale formulas
public int[] getMinorPentatonic(){
return MINOR_PENTATONIC;
}
public int[] getMajorPentatonic(){
return MAJOR_PENTATONIC;
}
// Getters for hexatonic scale formulas
public int[] getMinBlues(){
return MINOR_BLUES;
}
public int[] getMajBlues(){
return MAJOR_BLUES;
}
public int[] getTritone(){
return TRITONE;
}
public int[] getAugmented(){
return AUGMENTED;
}
public int[] getIstrian(){
return ISTRIAN;
}
public int[] getWholeTone(){
return WHOLE_TONE;
}
public int[] getPrometheus(){
return PROMETHEUS;
}
public int[] getTwoSemitoneTritone(){
return TWO_SEMITONE_TRITONE;
}
// Getters for melodic minor derived formulas
public int[] getMelodicMinor(){
return MELODIC_MINOR;
}
public int[] getDorianFlat9(){
return DORIAN_FLAT9;
}
public int[] getLydianAugmented(){
return LYDIAN_AUGMENTED;
}
public int[] getLydianFlat7(){
return LYDIAN_FLAT7;
}
public int[] getMixolydianFlat13(){
return MIXOLYDIAN_FLAT13;
}
public int[] getSemiLocrian(){
return SEMI_LOCRIAN;
}
public int[] getSuperLocrian(){
return SUPER_LOCRIAN;
}
// Getters for major derived formulas
public int[] getLydian(){
return LYDIAN;
}
public int[] getIonian(){
return IONIAN;
}
public int[] getMixolydian(){
return MIXOLYDIAN;
}
public int[] getDorian(){
return DORIAN;
}
public int[] getAeolian(){
return AEOLIAN;
}
public int[] getPhrygian(){
return PHRYGIAN;
}
public int[] getLocrian(){
return LOCRIAN;
}
// Getters for harmonic minor derived formulas
public int[] getHarmonicMinor(){
return HARMONIC_MINOR;
}
public int[] getLocrianSharp7(){
return LOCRIAN_SHARP7;
}
public int[] getIonianAugmented(){
return IONIAN_AUGMENTED;
}
}
|
package hudson.model;
import hudson.Util;
import hudson.model.Queue.*;
import hudson.FilePath;
import hudson.util.TimeUnit2;
import hudson.util.InterceptingProxy;
import hudson.security.ACL;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.kohsuke.stapler.export.ExportedBean;
import org.kohsuke.stapler.export.Exported;
import org.acegisecurity.context.SecurityContextHolder;
import javax.servlet.ServletException;
import java.io.IOException;
import java.util.logging.Logger;
import java.util.logging.Level;
import java.lang.reflect.Method;
/**
* Thread that executes builds.
*
* @author Kohsuke Kawaguchi
*/
@ExportedBean
public class Executor extends Thread implements ModelObject {
protected final Computer owner;
private final Queue queue;
private long startTime;
/**
* Used to track when a job was last executed.
*/
private long finishTime;
/**
* Executor number that identifies it among other executors for the same {@link Computer}.
*/
private int number;
/**
* {@link Queue.Executable} being executed right now, or null if the executor is idle.
*/
private volatile Queue.Executable executable;
private Throwable causeOfDeath;
public Executor(Computer owner, int n) {
super("Executor #"+n+" for "+owner.getDisplayName());
this.owner = owner;
this.queue = Hudson.getInstance().getQueue();
this.number = n;
}
@Override
public void run() {
// run as the system user. see ACL.SYSTEM for more discussion about why this is somewhat broken
SecurityContextHolder.getContext().setAuthentication(ACL.SYSTEM);
try {
finishTime = System.currentTimeMillis();
while(shouldRun()) {
executable = null;
synchronized(owner) {
if(owner.getNumExecutors()<owner.getExecutors().size()) {
// we've got too many executors.
owner.removeExecutor(this);
return;
}
}
// clear the interrupt flag as a precaution.
// sometime an interrupt aborts a build but without clearing the flag.
// see issue #1583
if (Thread.interrupted()) continue;
Queue.Item queueItem;
Queue.Task task;
try {
synchronized (queue) {// perform this state change as an atomic operation wrt other queue operations
queueItem = grabJob();
task = queueItem.task;
startTime = System.currentTimeMillis();
executable = task.createExecutable();
}
} catch (IOException e) {
LOGGER.log(Level.SEVERE, "Executor threw an exception", e);
continue;
} catch (InterruptedException e) {
continue;
}
Throwable problems = null;
final String threadName = getName();
try {
owner.taskAccepted(this, task);
if (executable instanceof Actionable) {
for (Action action: queueItem.getActions()) {
((Actionable) executable).addAction(action);
}
}
setName(threadName+" : executing "+executable.toString());
queue.execute(executable, task);
} catch (Throwable e) {
// for some reason the executor died. this is really
// a bug in the code, but we don't want the executor to die,
// so just leave some info and go on to build other things
LOGGER.log(Level.SEVERE, "Executor threw an exception", e);
problems = e;
} finally {
setName(threadName);
finishTime = System.currentTimeMillis();
if (problems == null) {
queueItem.future.set(executable);
owner.taskCompleted(this, task, finishTime - startTime);
} else {
queueItem.future.set(problems);
owner.taskCompletedWithProblems(this, task, finishTime - startTime, problems);
}
}
}
} catch(RuntimeException e) {
causeOfDeath = e;
throw e;
} catch (Error e) {
causeOfDeath = e;
throw e;
}
}
/**
* Returns true if we should keep going.
*/
protected boolean shouldRun() {
return Hudson.getInstance() != null && !Hudson.getInstance().isTerminating();
}
protected Queue.Item grabJob() throws InterruptedException {
return queue.pop();
}
/**
* Returns the current {@link Queue.Task} this executor is running.
*
* @return
* null if the executor is idle.
*/
@Exported
public Queue.Executable getCurrentExecutable() {
return executable;
}
/**
* If {@linkplain #getCurrentExecutable() current executable} is {@link AbstractBuild},
* return the workspace that this executor is using, or null if the build hasn't gotten
* to that point yet.
*/
public FilePath getCurrentWorkspace() {
Executable e = executable;
if(e==null) return null;
if (e instanceof AbstractBuild) {
AbstractBuild ab = (AbstractBuild) e;
return ab.getWorkspace();
}
return null;
}
/**
* Same as {@link #getName()}.
*/
public String getDisplayName() {
return "Executor #"+getNumber();
}
/**
* Gets the executor number that uniquely identifies it among
* other {@link Executor}s for the same computer.
*
* @return
* a sequential number starting from 0.
*/
@Exported
public int getNumber() {
return number;
}
/**
* Returns true if this {@link Executor} is ready for action.
*/
@Exported
public boolean isIdle() {
return executable==null;
}
/**
* The opposite of {@link #isIdle()} — the executor is doing some work.
*/
public boolean isBusy() {
return executable!=null;
}
/**
* If this thread dies unexpectedly, obtain the cause of the failure.
*
* @return null if the death is expected death or the thread is {@link #isAlive() still alive}.
* @since 1.142
*/
public Throwable getCauseOfDeath() {
return causeOfDeath;
}
/**
* Returns the progress of the current build in the number between 0-100.
*
* @return -1
* if it's impossible to estimate the progress.
*/
@Exported
public int getProgress() {
Queue.Executable e = executable;
if(e==null) return -1;
long d = e.getParent().getEstimatedDuration();
if(d<0) return -1;
int num = (int)(getElapsedTime()*100/d);
if(num>=100) num=99;
return num;
}
/**
* Returns true if the current build is likely stuck.
*
* <p>
* This is a heuristics based approach, but if the build is suspiciously taking for a long time,
* this method returns true.
*/
@Exported
public boolean isLikelyStuck() {
Queue.Executable e = executable;
if(e==null) return false;
long elapsed = getElapsedTime();
long d = e.getParent().getEstimatedDuration();
if(d>=0) {
// if it's taking 10 times longer than ETA, consider it stuck
return d*10 < elapsed;
} else {
// if no ETA is available, a build taking longer than a day is considered stuck
return TimeUnit2.MILLISECONDS.toHours(elapsed)>24;
}
}
public long getElapsedTime() {
return System.currentTimeMillis() - startTime;
}
/**
* Gets the string that says how long since this build has started.
*
* @return
* string like "3 minutes" "1 day" etc.
*/
public String getTimestampString() {
return Util.getPastTimeString(getElapsedTime());
}
/**
* Computes a human-readable text that shows the expected remaining time
* until the build completes.
*/
public String getEstimatedRemainingTime() {
Queue.Executable e = executable;
if(e==null) return Messages.Executor_NotAvailable();
long d = e.getParent().getEstimatedDuration();
if(d<0) return Messages.Executor_NotAvailable();
long eta = d-getElapsedTime();
if(eta<=0) return Messages.Executor_NotAvailable();
return Util.getTimeSpanString(eta);
}
/**
* The same as {@link #getEstimatedRemainingTime()} but return
* it as a number of milli-seconds.
*/
public long getEstimatedRemainingTimeMillis() {
Queue.Executable e = executable;
if(e==null) return -1;
long d = e.getParent().getEstimatedDuration();
if(d<0) return -1;
long eta = d-getElapsedTime();
if(eta<=0) return -1;
return eta;
}
/**
* Stops the current build.
*/
public void doStop( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
Queue.Executable e = executable;
if(e!=null) {
e.getParent().checkAbortPermission();
interrupt();
}
rsp.forwardToPreviousPage(req);
}
public boolean hasStopPermission() {
Queue.Executable e = executable;
return e!=null && e.getParent().hasAbortPermission();
}
public Computer getOwner() {
return owner;
}
/**
* Returns when this executor started or should start being idle.
*/
public long getIdleStartMilliseconds() {
if (isIdle())
return Math.max(finishTime, owner.getConnectTime());
else {
return Math.max(startTime + Math.max(0, executable.getParent().getEstimatedDuration()),
System.currentTimeMillis() + 15000);
}
}
/**
* Exposes the executor to the remote API.
*/
public Api getApi() {
return new Api(this);
}
/**
* Creates a proxy object that executes the callee in the context that impersonates
* this executor. Useful to export an object to a remote channel.
*/
public <T> T newImpersonatingProxy(Class<T> type, T core) {
return new InterceptingProxy() {
protected Object call(Object o, Method m, Object[] args) throws Throwable {
final Executor old = IMPERSONATION.get();
IMPERSONATION.set(Executor.this);
try {
return m.invoke(o,args);
} finally {
IMPERSONATION.set(old);
}
}
}.wrap(type,core);
}
/**
* Returns the executor of the current thread or null if current thread is not an executor.
*/
public static Executor currentExecutor() {
Thread t = Thread.currentThread();
if (t instanceof Executor) return (Executor) t;
return IMPERSONATION.get();
}
/**
* Mechanism to allow threads (in particular the channel request handling threads) to
* run on behalf of {@link Executor}.
*/
private static final ThreadLocal<Executor> IMPERSONATION = new ThreadLocal<Executor>();
private static final Logger LOGGER = Logger.getLogger(Executor.class.getName());
}
|
package hudson.model;
import hudson.Extension;
import hudson.Util;
import hudson.diagnosis.OldDataMonitor;
import hudson.model.Descriptor.FormException;
import hudson.util.CaseInsensitiveComparator;
import hudson.util.DescribableList;
import hudson.util.FormValidation;
import hudson.util.HttpResponses;
import hudson.views.ListViewColumn;
import hudson.views.ViewJobFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import javax.annotation.concurrent.GuardedBy;
import javax.servlet.ServletException;
import net.sf.json.JSONObject;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.HttpResponse;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.kohsuke.stapler.interceptor.RequirePOST;
/**
* Displays {@link Job}s in a flat list view.
*
* @author Kohsuke Kawaguchi
*/
public class ListView extends View implements Saveable {
/**
* List of job names. This is what gets serialized.
*/
@GuardedBy("this")
/*package*/ /*almost-final*/ SortedSet<String> jobNames = new TreeSet<String>(CaseInsensitiveComparator.INSTANCE);
private DescribableList<ViewJobFilter, Descriptor<ViewJobFilter>> jobFilters;
private DescribableList<ListViewColumn, Descriptor<ListViewColumn>> columns;
/**
* Include regex string.
*/
private String includeRegex;
/**
* Whether to recurse in ItemGroups
*/
private boolean recurse;
/**
* Compiled include pattern from the includeRegex string.
*/
private transient Pattern includePattern;
/**
* Filter by enabled/disabled status of jobs.
* Null for no filter, true for enabled-only, false for disabled-only.
*/
private Boolean statusFilter;
@DataBoundConstructor
public ListView(String name) {
super(name);
initColumns();
initJobFilters();
}
public ListView(String name, ViewGroup owner) {
this(name);
this.owner = owner;
}
private Object readResolve() {
if(includeRegex!=null) {
try {
includePattern = Pattern.compile(includeRegex);
} catch (PatternSyntaxException x) {
includeRegex = null;
OldDataMonitor.report(this, Collections.<Throwable>singleton(x));
}
}
if (jobNames == null) {
jobNames = new TreeSet<String>(CaseInsensitiveComparator.INSTANCE);
}
initColumns();
initJobFilters();
return this;
}
protected void initColumns() {
if (columns == null)
columns = new DescribableList<ListViewColumn, Descriptor<ListViewColumn>>(this,ListViewColumn.createDefaultInitialColumnList());
}
protected void initJobFilters() {
if (jobFilters == null)
jobFilters = new DescribableList<ViewJobFilter, Descriptor<ViewJobFilter>>(this);
}
/**
* Used to determine if we want to display the Add button.
*/
public boolean hasJobFilterExtensions() {
return !ViewJobFilter.all().isEmpty();
}
public DescribableList<ViewJobFilter, Descriptor<ViewJobFilter>> getJobFilters() {
return jobFilters;
}
public DescribableList<ListViewColumn, Descriptor<ListViewColumn>> getColumns() {
return columns;
}
/**
* Returns a read-only view of all {@link Job}s in this view.
*
* <p>
* This method returns a separate copy each time to avoid
* concurrent modification issue.
*/
public List<TopLevelItem> getItems() {
SortedSet<String> names;
List<TopLevelItem> items = new ArrayList<TopLevelItem>();
synchronized (this) {
names = new TreeSet<String>(jobNames);
}
ItemGroup<? extends TopLevelItem> parent = getOwnerItemGroup();
includeItems(parent, names);
Boolean statusFilter = this.statusFilter; // capture the value to isolate us from concurrent update
for (TopLevelItem item : Items.getAllItems(getOwnerItemGroup(), TopLevelItem.class)) {
if (!names.contains(item.getRelativeNameFrom(getOwnerItemGroup()))) continue;
// Add if no status filter or filter matches enabled/disabled status:
if(statusFilter == null || !(item instanceof AbstractProject)
|| ((AbstractProject)item).isDisabled() ^ statusFilter)
items.add(item);
}
// check the filters
Iterable<ViewJobFilter> jobFilters = getJobFilters();
List<TopLevelItem> allItems = new ArrayList<TopLevelItem>(parent.getItems());
for (ViewJobFilter jobFilter: jobFilters) {
items = jobFilter.filter(items, allItems, this);
}
// for sanity, trim off duplicates
items = new ArrayList<TopLevelItem>(new LinkedHashSet<TopLevelItem>(items));
return items;
}
@Override
public boolean contains(TopLevelItem item) {
return getItems().contains(item);
}
private void includeItems(ItemGroup<? extends TopLevelItem> parent, SortedSet<String> names) {
includeItems(parent, parent, names);
}
private void includeItems(ItemGroup<? extends TopLevelItem> root, ItemGroup<?> parent, SortedSet<String> names) {
if (includePattern != null) {
for (Item item : parent.getItems()) {
if (recurse && item instanceof ItemGroup) {
ItemGroup<?> ig = (ItemGroup<?>) item;
includeItems(root, ig, names);
}
if (item instanceof TopLevelItem) {
String itemName = item.getRelativeNameFrom(root);
if (includePattern.matcher(itemName).matches()) {
names.add(itemName);
}
}
}
}
}
public synchronized boolean jobNamesContains(TopLevelItem item) {
if (item == null) return false;
return jobNames.contains(item.getRelativeNameFrom(getOwnerItemGroup()));
}
/**
* Adds the given item to this view.
*
* @since 1.389
*/
public void add(TopLevelItem item) throws IOException {
synchronized (this) {
jobNames.add(item.getRelativeNameFrom(getOwnerItemGroup()));
}
save();
}
public String getIncludeRegex() {
return includeRegex;
}
public boolean isRecurse() {
return recurse;
}
/*
* For testing purposes
*/
void setRecurse(boolean recurse) {
this.recurse = recurse;
}
/**
* Filter by enabled/disabled status of jobs.
* Null for no filter, true for enabled-only, false for disabled-only.
*/
public Boolean getStatusFilter() {
return statusFilter;
}
public Item doCreateItem(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
ItemGroup<? extends TopLevelItem> ig = getOwnerItemGroup();
if (ig instanceof ModifiableItemGroup) {
TopLevelItem item = ((ModifiableItemGroup<? extends TopLevelItem>)ig).doCreateItem(req, rsp);
if(item!=null) {
synchronized (this) {
jobNames.add(item.getRelativeNameFrom(getOwnerItemGroup()));
}
owner.save();
}
return item;
}
return null;
}
@RequirePOST
public HttpResponse doAddJobToView(@QueryParameter String name) throws IOException, ServletException {
checkPermission(View.CONFIGURE);
if(name==null)
throw new Failure("Query parameter 'name' is required");
if (getOwnerItemGroup().getItem(name) == null)
throw new Failure("Query parameter 'name' does not correspond to a known item");
if (jobNames.add(name))
owner.save();
return HttpResponses.ok();
}
@RequirePOST
public HttpResponse doRemoveJobFromView(@QueryParameter String name) throws IOException, ServletException {
checkPermission(View.CONFIGURE);
if(name==null)
throw new Failure("Query parameter 'name' is required");
if (jobNames.remove(name))
owner.save();
return HttpResponses.ok();
}
@Override
public synchronized void onJobRenamed(Item item, String oldName, String newName) {
if(jobNames.remove(oldName) && newName!=null)
jobNames.add(newName);
}
/**
* Handles the configuration submission.
*
* Load view-specific properties here.
*/
@Override
protected void submit(StaplerRequest req) throws ServletException, FormException, IOException {
JSONObject json = req.getSubmittedForm();
synchronized (this) {
recurse = json.optBoolean("recurse", true);
jobNames.clear();
Iterable<? extends TopLevelItem> items;
if (recurse) {
items = Items.getAllItems(getOwnerItemGroup(), TopLevelItem.class);
} else {
items = getOwnerItemGroup().getItems();
}
for (TopLevelItem item : items) {
String relativeNameFrom = item.getRelativeNameFrom(getOwnerItemGroup());
if(req.getParameter(relativeNameFrom)!=null) {
jobNames.add(relativeNameFrom);
}
}
}
setIncludeRegex(req.getParameter("useincluderegex") != null ? req.getParameter("includeRegex") : null);
if (columns == null) {
columns = new DescribableList<ListViewColumn,Descriptor<ListViewColumn>>(this);
}
columns.rebuildHetero(req, json, ListViewColumn.all(), "columns");
if (jobFilters == null) {
jobFilters = new DescribableList<ViewJobFilter,Descriptor<ViewJobFilter>>(this);
}
jobFilters.rebuildHetero(req, json, ViewJobFilter.all(), "jobFilters");
String filter = Util.fixEmpty(req.getParameter("statusFilter"));
statusFilter = filter != null ? "1".equals(filter) : null;
}
public void setIncludeRegex(String includeRegex) {
this.includeRegex = Util.nullify(includeRegex);
if (this.includeRegex == null)
this.includePattern = null;
else
this.includePattern = Pattern.compile(includeRegex);
}
@Extension
public static class DescriptorImpl extends ViewDescriptor {
public String getDisplayName() {
return Messages.ListView_DisplayName();
}
/**
* Checks if the include regular expression is valid.
*/
public FormValidation doCheckIncludeRegex( @QueryParameter String value ) throws IOException, ServletException, InterruptedException {
String v = Util.fixEmpty(value);
if (v != null) {
try {
Pattern.compile(v);
} catch (PatternSyntaxException pse) {
return FormValidation.error(pse.getMessage());
}
}
return FormValidation.ok();
}
}
/**
* @deprecated as of 1.391
* Use {@link ListViewColumn#createDefaultInitialColumnList()}
*/
public static List<ListViewColumn> getDefaultColumns() {
return ListViewColumn.createDefaultInitialColumnList();
}
}
|
package com.thaiopensource.xml.dtd;
import java.util.Vector;
class Entity {
static class Reference {
Reference(Entity entity, int start, int end) {
this.entity = entity;
this.start = start;
this.end = end;
}
Entity entity;
int start;
int end;
}
final String name;
final boolean isParameter;
Entity(String name, boolean isParameter) {
this.name = name;
this.isParameter = isParameter;
}
char[] text;
String systemId;
String publicId;
String baseUri;
// Which parts of text came from references?
Reference[] references;
boolean open;
String notationName;
Vector atoms;
boolean mustReparse;
static final int INCONSISTENT_LEVEL = -1;
static final int NO_LEVEL = 0;
static final int DECL_LEVEL = 1;
static final int PARAM_LEVEL = 2;
static final int PARTICLE_LEVEL = 3;
int referenceLevel = NO_LEVEL;
static final int GROUP_CONTAINS_OR = 01;
static final int GROUP_CONTAINS_SEQ = 02;
static final int GROUP_CONTAINS_PCDATA = 04;
static final int GROUP_CONTAINS_GROUP = 010;
static final int GROUP_CONTAINS_ELEMENT_NAME = 020;
static final int GROUP_CONTAINS_NMTOKEN = 040;
int groupFlags = 0;
static final int SEMANTIC_NONE = 0;
static final int SEMANTIC_MODEL_GROUP = 1;
static final int SEMANTIC_ATTRIBUTE_GROUP = 2;
static final int SEMANTIC_ENUM_GROUP = 3;
static final int SEMANTIC_DATATYPE = 4;
static final int SEMANTIC_FLAG = 5;
int semantic = SEMANTIC_NONE;
ModelGroup modelGroup;
AttributeGroup attributeGroup;
EnumGroup enumGroup;
Datatype datatype;
Flag flag;
Decl decl;
Vector parsed;
void setParsed(int level, Vector v, int start, int end) {
if (referenceLevel < 0)
return;
if (level == referenceLevel) {
if (!sliceEqual(parsed, v, start, end)) {
// XXX give a warning
parsed = null;
referenceLevel = INCONSISTENT_LEVEL;
System.err.println("Warning: entity used inconsistently: " + name);
}
return;
}
if (referenceLevel == NO_LEVEL) {
parsed = new Vector();
appendSlice(parsed, v, start, end);
referenceLevel = level;
return;
}
if (parsed.size() == 1 && end - start == 1) {
if (level == PARAM_LEVEL && referenceLevel == PARTICLE_LEVEL) {
if (paramParticleConsistent((Param)v.elementAt(start),
(Particle)parsed.elementAt(0))) {
// For element name case, otherwise particle will be
// ambiguous with model group.
referenceLevel = PARAM_LEVEL;
parsed.setElementAt(v.elementAt(start), 0);
return;
}
}
else if (level == PARTICLE_LEVEL && referenceLevel == PARAM_LEVEL) {
if (paramParticleConsistent((Param)parsed.elementAt(0),
(Particle)v.elementAt(start)))
return;
}
}
System.err.println("Warning: entity used inconsistently: " + name);
parsed = null;
referenceLevel = INCONSISTENT_LEVEL;
}
static boolean paramParticleConsistent(Param param, Particle particle) {
if (param.type == Param.MODEL_GROUP && param.group.equals(particle))
return true;
return false;
}
int textIndexToAtomIndex(int ti) {
int nAtoms = atoms.size();
int len = 0;
int atomIndex = 0;
for (;;) {
if (len == ti)
return atomIndex;
if (atomIndex >= nAtoms)
break;
Atom a = (Atom)atoms.elementAt(atomIndex);
len += a.getToken().length();
if (len > ti)
break;
atomIndex++;
}
return -1;
}
void unexpandEntities() {
if (references == null || atoms == null)
return;
Vector newAtoms = null;
int nCopiedAtoms = 0;
for (int i = 0; i < references.length; i++) {
int start = textIndexToAtomIndex(references[i].start);
int end = textIndexToAtomIndex(references[i].end);
if (start >= 0 && end >= 0 && atomsAreProperlyNested(start, end)) {
if (newAtoms == null)
newAtoms = new Vector();
appendSlice(newAtoms, atoms, nCopiedAtoms, start);
newAtoms.addElement(new Atom(references[i].entity));
if (references[i].entity.atoms == null) {
Vector tem = new Vector();
references[i].entity.atoms = tem;
appendSlice(tem, atoms, start, end);
references[i].entity.unexpandEntities();
}
nCopiedAtoms = end;
}
else {
System.err.println("Warning: could not preserve reference to entity \""
+ references[i].entity.name
+ "\" in entity \""
+ this.name
+ "\"");
}
}
if (newAtoms == null)
return;
appendSlice(newAtoms, atoms, nCopiedAtoms, atoms.size());
atoms = newAtoms;
references = null;
}
private boolean atomsAreProperlyNested(int start, int end) {
int level = 0;
for (int i = start; i < end; i++)
switch (((Atom)atoms.elementAt(i)).getTokenType()) {
case Tokenizer.TOK_COND_SECT_OPEN:
case Tokenizer.TOK_OPEN_PAREN:
case Tokenizer.TOK_OPEN_BRACKET:
case Tokenizer.TOK_DECL_OPEN:
level++;
break;
case Tokenizer.TOK_CLOSE_PAREN:
case Tokenizer.TOK_CLOSE_PAREN_ASTERISK:
case Tokenizer.TOK_CLOSE_PAREN_QUESTION:
case Tokenizer.TOK_CLOSE_PAREN_PLUS:
case Tokenizer.TOK_CLOSE_BRACKET:
case Tokenizer.TOK_DECL_CLOSE:
if (--level < 0)
return false;
break;
case Tokenizer.TOK_COND_SECT_CLOSE:
if ((level -= 2) < 0)
return false;
break;
}
return level == 0;
}
static boolean sliceEqual(Vector v1, Vector v2, int start, int end) {
int n = v1.size();
if (end - start != n)
return false;
for (int i = 0; i < n; i++)
if (!v1.elementAt(i).equals(v2.elementAt(start + i)))
return false;
return true;
}
static void appendSlice(Vector to, Vector from, int start, int end) {
for (; start < end; start++)
to.addElement(from.elementAt(start));
}
void analyzeSemantic() {
switch (referenceLevel) {
case PARAM_LEVEL:
analyzeSemanticParam();
break;
case PARTICLE_LEVEL:
analyzeSemanticParticle();
break;
}
}
private void analyzeSemanticParam() {
if (isAttributeGroup())
semantic = SEMANTIC_ATTRIBUTE_GROUP;
else if (isDatatype())
semantic = SEMANTIC_DATATYPE;
else if (isFlag())
semantic = SEMANTIC_FLAG;
else if (isModelGroup())
semantic = SEMANTIC_MODEL_GROUP;
else
System.err.println("Warning: could not understand entity: " + name);
}
private boolean isAttributeGroup() {
ParamStream ps = new ParamStream(parsed);
if (!ps.advance())
return false;
do {
if (ps.type != Param.EMPTY_ATTRIBUTE_GROUP
&& (ps.type != Param.ATTRIBUTE_NAME
|| !ps.advance()
|| (ps.type == Param.ATTRIBUTE_TYPE_NOTATION && !ps.advance())
|| !ps.advance()
|| (ps.type == Param.FIXED && !ps.advance())))
return false;
} while (ps.advance());
return true;
}
private boolean isDatatype() {
ParamStream ps = new ParamStream(parsed);
return (ps.advance()
&& (ps.type == Param.ATTRIBUTE_TYPE
|| ps.type == Param.ATTRIBUTE_VALUE_GROUP
|| (ps.type == Param.ATTRIBUTE_TYPE_NOTATION
&& ps.advance()))
&& !ps.advance());
}
private boolean isFlag() {
ParamStream ps = new ParamStream(parsed);
return (ps.advance()
&& (ps.type == Param.INCLUDE
|| ps.type == Param.IGNORE)
&& !ps.advance());
}
private boolean isModelGroup() {
ParamStream ps = new ParamStream(parsed);
return (ps.advance()
&& (ps.type == Param.MODEL_GROUP
|| ps.type == Param.EMPTY
|| ps.type == Param.EMPTY)
&& !ps.advance());
}
private void analyzeSemanticParticle() {
int n = parsed.size();
if (n == 0) {
analyzeEmptySemanticParticle();
return;
}
for (int i = 0; i < n; i++) {
switch (((Particle)parsed.elementAt(i)).type) {
case Particle.GROUP:
case Particle.ELEMENT_NAME:
case Particle.PCDATA:
semantic = SEMANTIC_MODEL_GROUP;
return;
case Particle.NMTOKEN:
semantic = SEMANTIC_ENUM_GROUP;
return;
}
}
System.err.println("Warning: could not understand entity: " + name);
}
static final int GROUP_MODEL_GROUP_FLAGS
= GROUP_CONTAINS_PCDATA|GROUP_CONTAINS_GROUP|GROUP_CONTAINS_ELEMENT_NAME;
private void analyzeEmptySemanticParticle() {
if ((groupFlags & GROUP_MODEL_GROUP_FLAGS) == 0) {
semantic = SEMANTIC_ENUM_GROUP;
return;
}
if ((groupFlags & GROUP_CONTAINS_NMTOKEN) == 0) {
switch (groupFlags & (GROUP_CONTAINS_SEQ|GROUP_CONTAINS_OR)) {
case GROUP_CONTAINS_SEQ:
case GROUP_CONTAINS_OR:
semantic = SEMANTIC_MODEL_GROUP;
return;
}
}
System.err.println("Warning: could not understand entity: " + name);
}
ModelGroup toModelGroup() {
if (referenceLevel == PARAM_LEVEL)
return Param.paramsToModelGroup(parsed);
if (parsed.size() == 0) {
if ((groupFlags & GROUP_CONTAINS_SEQ) != 0)
return new Sequence(new ModelGroup[0]);
else
return new Choice(new ModelGroup[0]);
}
return Particle.particlesToModelGroup(parsed);
}
}
|
package cashier.tools;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.TimeZone;
import javax.xml.datatype.DatatypeFactory;
import org.joda.time.Chronology;
import org.joda.time.DateTime;
import org.joda.time.chrono.ISOChronology;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
public class TimeTools {
private TimeTools(){}
public static Long iso8601StringToUnixTimestamp(String datetime){
try {
return DatatypeFactory.newInstance().newXMLGregorianCalendar(datetime).toGregorianCalendar().getTime().getTime();
} catch (Exception e) {}
return (long) 0;
}
/**
*
* @param unixTimestamp must be in milliseconds.
* @return
*/
public static String unixTimestampToiso8601String(Long unixTimestamp){
Chronology chrono = ISOChronology.getInstance();
DateTime dateTime = new DateTime(unixTimestamp, chrono);
return dateTime.toString();
}
public static Long httpDateTimestampToUnixTimestamp(String timestamp){
DateTimeFormatter df = DateTimeFormat.forPattern("EEE, dd MMM yyyy HH:mm:ss 'GMT'");
return df.withOffsetParsed().parseDateTime(timestamp).getMillis();
}
}
|
package de.lmu.ifi.dbs.elki.algorithm.outlier;
import java.util.Iterator;
import java.util.List;
import de.lmu.ifi.dbs.elki.algorithm.AbstractAlgorithm;
import de.lmu.ifi.dbs.elki.data.KNNList;
import de.lmu.ifi.dbs.elki.data.RealVector;
import de.lmu.ifi.dbs.elki.database.AssociationID;
import de.lmu.ifi.dbs.elki.database.Database;
import de.lmu.ifi.dbs.elki.database.DistanceResultPair;
import de.lmu.ifi.dbs.elki.distance.Distance;
import de.lmu.ifi.dbs.elki.distance.DoubleDistance;
import de.lmu.ifi.dbs.elki.distance.similarityfunction.SharedNearestNeighborSimilarityFunction;
import de.lmu.ifi.dbs.elki.result.AnnotationFromDatabase;
import de.lmu.ifi.dbs.elki.result.MultiResult;
import de.lmu.ifi.dbs.elki.result.OrderingFromAssociation;
import de.lmu.ifi.dbs.elki.utilities.Description;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.DoubleParameter;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.IntParameter;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.OptionID;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.ParameterException;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.constraints.GreaterConstraint;
import de.lmu.ifi.dbs.elki.utilities.progress.FiniteProgress;
/**
* @author Arthur Zimek
* @param <V> the type of RealVector handled by this Algorithm
* @param <D> the type of Distance used by this Algorithm
*/
// todo arthur comment
public class SOD<V extends RealVector<V, Double>, D extends Distance<D>> extends AbstractAlgorithm<V, MultiResult> {
/**
* The association id to associate a subspace outlier degree.
*/
public static final AssociationID<SODModel<?>> SOD_MODEL = AssociationID.getOrCreateAssociationIDGenerics("SOD", SODModel.class);
/**
* OptionID for {@link #KNN_PARAM}
*/
public static final OptionID KNN_ID = OptionID.getOrCreateOptionID(
"sod.knn",
"The number of shared nearest neighbors to be considered for learning the subspace properties."
);
/**
* Parameter to specify the number of shared nearest neighbors to be considered for learning the subspace properties.,
* must be an integer greater than 0.
* <p>Default value: {@code 1} </p>
* <p>Key: {@code -sod.knn} </p>
*/
private final IntParameter KNN_PARAM = new IntParameter(KNN_ID, new GreaterConstraint(0), 1);
/**
* Holds the value of {@link #KNN_PARAM}.
*/
private int knn;
/**
* OptionID for {@link #ALPHA_PARAM}
*/
public static final OptionID ALPHA_ID = OptionID.getOrCreateOptionID(
"sod.alpha",
"The multiplier for the discriminance value for discerning small from large variances."
);
/**
* Parameter to indicate the multiplier for the discriminance value for discerning small from large variances.
* <p/>
* <p>Default value: 1.1</p>
* <p/>
* <p>Key: {@code -sod.alpha}</p>
*/
public final DoubleParameter ALPHA_PARAM = new DoubleParameter(ALPHA_ID, new GreaterConstraint(0), 1.1);
/**
* Holds the value of {@link #ALPHA_PARAM}.
*/
private double alpha;
/**
* The similarity function.
*/
private SharedNearestNeighborSimilarityFunction<V, D> similarityFunction = new SharedNearestNeighborSimilarityFunction<V, D>();
/**
* Holds the result.
*/
private MultiResult sodResult;
/**
* Provides the SOD algorithm,
* adding parameters
* {@link #KNN_PARAM} and {@link #ALPHA_PARAM}
* to the option handler additionally to parameters of super class.
*/
public SOD() {
super();
addOption(KNN_PARAM);
addOption(ALPHA_PARAM);
addParameterizable(similarityFunction);
}
/**
* Performs the SOD algorithm on the given database.
*/
@Override
protected MultiResult runInTime(Database<V> database) throws IllegalStateException {
FiniteProgress progress = new FiniteProgress("assigning SOD", database.size());
int processed = 0;
similarityFunction.setDatabase(database, isVerbose(), isTime());
if (logger.isVerbose()) {
logger.verbose("assigning subspace outlier degree:");
}
for (Iterator<Integer> iter = database.iterator(); iter.hasNext();) {
Integer queryObject = iter.next();
processed++;
if (logger.isVerbose()) {
progress.setProcessed(processed);
logger.progress(progress);
}
List<Integer> knnList = getKNN(database, queryObject).idsToList();
SODModel<V> model = new SODModel<V>(database, knnList, alpha, database.get(queryObject));
database.associate(SOD_MODEL, queryObject, model);
}
// combine results.
sodResult = new MultiResult();
sodResult.addResult(new AnnotationFromDatabase<SODModel<?>, V>(database, SOD_MODEL));
sodResult.addResult(new OrderingFromAssociation<SODModel<?>, V>(database, SOD_MODEL, true));
return sodResult;
}
/**
* Provides the k nearest neighbors in terms of the shared nearest neighbor distance.
* <p/>
* The query object is excluded from the knn list.
*
* @param database the database holding the objects
* @param queryObject the query object for which the kNNs should be determined
* @return the k nearest neighbors in terms of the shared nearest neighbor distance without the query object
*/
private KNNList<DoubleDistance> getKNN(Database<V> database, Integer queryObject) {
similarityFunction.getPreprocessor().getParameters();
KNNList<DoubleDistance> kNearestNeighbors = new KNNList<DoubleDistance>(knn, new DoubleDistance(Double.POSITIVE_INFINITY));
for (Iterator<Integer> iter = database.iterator(); iter.hasNext();) {
Integer id = iter.next();
if (!id.equals(queryObject)) {
DoubleDistance distance = new DoubleDistance(1.0 / similarityFunction.similarity(queryObject, id).getValue());
kNearestNeighbors.add(new DistanceResultPair<DoubleDistance>(distance, id));
}
}
return kNearestNeighbors;
}
/**
* Calls the super method
* and sets additionally the values of the parameters
* {@link #KNN_PARAM} and {@link #ALPHA_PARAM}.
* The remaining parameters are passed to the {@link #similarityFunction}.
*/
@Override
public String[] setParameters(String[] args) throws ParameterException {
String[] remainingParameters = super.setParameters(args);
knn = KNN_PARAM.getValue();
alpha = ALPHA_PARAM.getValue();
remainingParameters = similarityFunction.setParameters(remainingParameters);
rememberParametersExcept(args, remainingParameters);
return remainingParameters;
}
public Description getDescription() {
return new Description("SOD", "Subspace outlier degree", "", "");
}
public MultiResult getResult() {
return sodResult;
}
}
|
package de.lmu.ifi.dbs.elki.database.ids;
import java.util.Random;
import de.lmu.ifi.dbs.elki.database.ids.generic.UnmodifiableArrayDBIDs;
import de.lmu.ifi.dbs.elki.database.ids.generic.UnmodifiableDBIDs;
import de.lmu.ifi.dbs.elki.database.ids.integer.IntegerDBIDs;
import de.lmu.ifi.dbs.elki.database.ids.integer.TroveArrayDBIDs;
import de.lmu.ifi.dbs.elki.database.ids.integer.UnmodifiableIntegerArrayDBIDs;
import de.lmu.ifi.dbs.elki.database.ids.integer.UnmodifiableIntegerDBIDs;
import de.lmu.ifi.dbs.elki.persistent.ByteBufferSerializer;
import de.lmu.ifi.dbs.elki.utilities.RandomFactory;
/**
* DBID Utility functions.
*
* @author Erich Schubert
*
* @apiviz.landmark
*
* @apiviz.has DBID
* @apiviz.has DBIDs
* @apiviz.uses DBIDRef
* @apiviz.composedOf DBIDFactory
*/
public final class DBIDUtil {
/**
* Static - no public constructor.
*/
private DBIDUtil() {
// Never called.
}
/**
* Final, global copy of empty DBIDs.
*/
public static final EmptyDBIDs EMPTYDBIDS = new EmptyDBIDs();
/**
* Import and integer as DBID.
*
* Note: this may not be possible for some factories!
*
* @param id Integer ID to import
* @return DBID
*/
public static DBID importInteger(int id) {
return DBIDFactory.FACTORY.importInteger(id);
}
/**
* Export a DBID as int.
*
* Note: this may not be possible for some factories!
*
* @param id DBID to export
* @return integer value
*/
public static int asInteger(DBIDRef id) {
return id.internalGetIndex();
}
/**
* Compare two DBIDs.
*
* @param id1 First ID
* @param id2 Second ID
* @return Comparison result
*/
public static int compare(DBIDRef id1, DBIDRef id2) {
return DBIDFactory.FACTORY.compare(id1, id2);
}
/**
* Test two DBIDs for equality.
*
* @param id1 First ID
* @param id2 Second ID
* @return Comparison result
*/
public static boolean equal(DBIDRef id1, DBIDRef id2) {
return DBIDFactory.FACTORY.equal(id1, id2);
}
/**
* Dereference a DBID reference.
*
* @param ref DBID reference
* @return DBID
*/
public static DBID deref(DBIDRef ref) {
if (ref instanceof DBID) {
return (DBID) ref;
}
return importInteger(ref.internalGetIndex());
}
/**
* Format a DBID as string.
*
* @param id DBID
* @return String representation
*/
public static String toString(DBIDRef id) {
return DBIDFactory.FACTORY.toString(id);
}
/**
* Format a DBID as string.
*
* @param ids DBIDs
* @return String representation
*/
public static String toString(DBIDs ids) {
if (ids instanceof DBID) {
return DBIDFactory.FACTORY.toString((DBID) ids);
}
StringBuilder buf = new StringBuilder();
for (DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
if (buf.length() > 0) {
buf.append(", ");
}
buf.append(DBIDFactory.FACTORY.toString(iter));
}
return buf.toString();
}
/**
* Get a serializer for DBIDs.
*
* @return DBID serializer
*/
public static ByteBufferSerializer<DBID> getDBIDSerializer() {
return DBIDFactory.FACTORY.getDBIDSerializer();
}
/**
* Get a serializer for DBIDs with static size.
*
* @return DBID serializer
*/
public static ByteBufferSerializer<DBID> getDBIDSerializerStatic() {
return DBIDFactory.FACTORY.getDBIDSerializerStatic();
}
/**
* Generate a single DBID.
*
* @return A single DBID
*/
public static DBID generateSingleDBID() {
return DBIDFactory.FACTORY.generateSingleDBID();
}
/**
* Return a single DBID for reuse.
*
* @param id DBID to deallocate
*/
public static void deallocateSingleDBID(DBID id) {
DBIDFactory.FACTORY.deallocateSingleDBID(id);
}
/**
* Generate a static DBID range.
*
* @param size Requested size
* @return DBID range
*/
public static DBIDRange generateStaticDBIDRange(int size) {
return DBIDFactory.FACTORY.generateStaticDBIDRange(size);
}
/**
* Deallocate a static DBID range.
*
* @param range Range to deallocate
*/
public static void deallocateDBIDRange(DBIDRange range) {
DBIDFactory.FACTORY.deallocateDBIDRange(range);
}
/**
* Make a new DBID variable.
*
* @param val Initial value.
* @return Variable
*/
public DBIDVar newVar(DBIDRef val) {
return DBIDFactory.FACTORY.newVar(val);
}
/**
* Make a new (modifiable) array of DBIDs.
*
* @return New array
*/
public static ArrayModifiableDBIDs newArray() {
return DBIDFactory.FACTORY.newArray();
}
/**
* Make a new (modifiable) hash set of DBIDs.
*
* @return New hash set
*/
public static HashSetModifiableDBIDs newHashSet() {
return DBIDFactory.FACTORY.newHashSet();
}
/**
* Make a new (modifiable) array of DBIDs.
*
* @param size Size hint
* @return New array
*/
public static ArrayModifiableDBIDs newArray(int size) {
return DBIDFactory.FACTORY.newArray(size);
}
/**
* Make a new (modifiable) hash set of DBIDs.
*
* @param size Size hint
* @return New hash set
*/
public static HashSetModifiableDBIDs newHashSet(int size) {
return DBIDFactory.FACTORY.newHashSet(size);
}
/**
* Make a new (modifiable) array of DBIDs.
*
* @param existing Existing DBIDs
* @return New array
*/
public static ArrayModifiableDBIDs newArray(DBIDs existing) {
return DBIDFactory.FACTORY.newArray(existing);
}
/**
* Make a new (modifiable) hash set of DBIDs.
*
* @param existing Existing DBIDs
* @return New hash set
*/
public static HashSetModifiableDBIDs newHashSet(DBIDs existing) {
return DBIDFactory.FACTORY.newHashSet(existing);
}
/**
* Compute the set intersection of two sets.
*
* @param first First set
* @param second Second set
* @return result.
*/
// TODO: optimize?
public static ModifiableDBIDs intersection(DBIDs first, DBIDs second) {
if (first.size() > second.size()) {
return intersection(second, first);
}
ModifiableDBIDs inter = newHashSet(first.size());
for (DBIDIter it = first.iter(); it.valid(); it.advance()) {
if (second.contains(it)) {
inter.add(it);
}
}
return inter;
}
/**
* Compute the set symmetric intersection of two sets.
*
* @param first First set
* @param second Second set
* @param firstonly OUTPUT: elements only in first. MUST BE EMPTY
* @param intersection OUTPUT: elements in intersection. MUST BE EMPTY
* @param secondonly OUTPUT: elements only in second. MUST BE EMPTY
*/
// TODO: optimize?
public static void symmetricIntersection(DBIDs first, DBIDs second, HashSetModifiableDBIDs firstonly, HashSetModifiableDBIDs intersection, HashSetModifiableDBIDs secondonly) {
if (first.size() > second.size()) {
symmetricIntersection(second, first, secondonly, intersection, firstonly);
return;
}
assert (firstonly.size() == 0) : "OUTPUT set should be empty!";
assert (intersection.size() == 0) : "OUTPUT set should be empty!";
assert (secondonly.size() == 0) : "OUTPUT set should be empty!";
// Initialize with second
secondonly.addDBIDs(second);
for (DBIDIter it = first.iter(); it.valid(); it.advance()) {
// Try to remove
if (secondonly.remove(it)) {
intersection.add(it);
} else {
firstonly.add(it);
}
}
}
/**
* Returns the union of the two specified collection of IDs.
*
* @param ids1 the first collection
* @param ids2 the second collection
* @return the union of ids1 and ids2 without duplicates
*/
public static ModifiableDBIDs union(DBIDs ids1, DBIDs ids2) {
ModifiableDBIDs result = DBIDUtil.newHashSet(Math.max(ids1.size(), ids2.size()));
result.addDBIDs(ids1);
result.addDBIDs(ids2);
return result;
}
/**
* Returns the difference of the two specified collection of IDs.
*
* @param ids1 the first collection
* @param ids2 the second collection
* @return the difference of ids1 minus ids2
*/
public static ModifiableDBIDs difference(DBIDs ids1, DBIDs ids2) {
ModifiableDBIDs result = DBIDUtil.newHashSet(ids1);
result.removeDBIDs(ids2);
return result;
}
/**
* Wrap an existing DBIDs collection to be unmodifiable.
*
* @param existing Existing collection
* @return Unmodifiable collection
*/
public static StaticDBIDs makeUnmodifiable(DBIDs existing) {
if (existing instanceof StaticDBIDs) {
return (StaticDBIDs) existing;
}
if (existing instanceof TroveArrayDBIDs) {
return new UnmodifiableIntegerArrayDBIDs((TroveArrayDBIDs) existing);
}
if (existing instanceof IntegerDBIDs) {
return new UnmodifiableIntegerDBIDs((IntegerDBIDs) existing);
}
if (existing instanceof ArrayDBIDs) {
return new UnmodifiableArrayDBIDs((ArrayDBIDs) existing);
}
return new UnmodifiableDBIDs(existing);
}
/**
* Ensure that the given DBIDs are array-indexable.
*
* @param ids IDs
* @return Array DBIDs.
*/
public static ArrayDBIDs ensureArray(DBIDs ids) {
if (ids instanceof ArrayDBIDs) {
return (ArrayDBIDs) ids;
} else {
return newArray(ids);
}
}
/**
* Ensure that the given DBIDs support fast "contains" operations.
*
* @param ids IDs
* @return Set DBIDs.
*/
public static SetDBIDs ensureSet(DBIDs ids) {
if (ids instanceof SetDBIDs) {
return (SetDBIDs) ids;
} else {
return newHashSet(ids);
}
}
/**
* Ensure modifiable.
*
* @param ids IDs
* @return Modifiable DBIDs.
*/
public static ModifiableDBIDs ensureModifiable(DBIDs ids) {
if (ids instanceof ModifiableDBIDs) {
return (ModifiableDBIDs) ids;
} else {
if (ids instanceof ArrayDBIDs) {
return newArray(ids);
}
if (ids instanceof HashSetDBIDs) {
return newHashSet(ids);
}
return newArray(ids);
}
}
/**
* Make a DBID pair.
*
* @param id1 first ID
* @param id2 second ID
*
* @return DBID pair
*/
public static DBIDPair newPair(DBIDRef id1, DBIDRef id2) {
return DBIDFactory.FACTORY.newPair(id1, id2);
}
/**
* Make a DoubleDBIDPair.
*
* @param val double value
* @param id ID
* @return new pair
*/
public static DoubleDBIDPair newPair(double val, DBIDRef id) {
return DBIDFactory.FACTORY.newPair(val, id);
}
/**
* Produce a random sample of the given DBIDs.
*
* @param source Original DBIDs
* @param k k Parameter
* @param rnd Random generator
* @return new DBIDs
*/
public static ModifiableDBIDs randomSample(DBIDs source, int k, RandomFactory rnd) {
return randomSample(source, k, rnd.getRandom());
}
/**
* Produce a random shuffling of the given DBID array.
*
* @param ids Original DBIDs
* @param rnd Random generator
*/
public static void randomShuffle(ArrayModifiableDBIDs ids, RandomFactory rnd) {
randomShuffle(ids, rnd.getRandom(), ids.size());
}
/**
* Produce a random shuffling of the given DBID array.
*
* @param ids Original DBIDs
* @param rnd Random generator
*/
public static void randomShuffle(ArrayModifiableDBIDs ids, Random random) {
randomShuffle(ids, random, ids.size());
}
/**
* Produce a random shuffling of the given DBID array.
*
* Only the first {@code limit} elements will be randomized.
*
* @param ids Original DBIDs
* @param rnd Random generator
* @param limit Shuffling limit.
*/
public static void randomShuffle(ArrayModifiableDBIDs ids, Random random, final int limit) {
for (int i = 1; i < limit; i++) {
ids.swap(i - 1, i + random.nextInt(limit - i));
}
}
/**
* Produce a random sample of the given DBIDs.
*
* @param source Original DBIDs
* @param k k Parameter
* @param seed Random generator seed
* @return new DBIDs
*/
public static ModifiableDBIDs randomSample(DBIDs source, int k, int seed) {
return randomSample(source, k, new Random(seed));
}
/**
* Produce a random sample of the given DBIDs.
*
* @param source Original DBIDs
* @param k k Parameter
* @param seed Random generator seed
* @return new DBIDs
*/
public static ModifiableDBIDs randomSample(DBIDs source, int k, Long seed) {
if (seed != null) {
return randomSample(source, k, new Random(seed.longValue()));
} else {
return randomSample(source, k, new Random());
}
}
/**
* Produce a random sample of the given DBIDs.
*
* @param source Original DBIDs
* @param k k Parameter
* @param seed Random generator seed
* @return new DBIDs
*/
public static ModifiableDBIDs randomSample(DBIDs source, int k, Random random) {
if (k <= 0 || k > source.size()) {
throw new IllegalArgumentException("Illegal value for size of random sample: " + k + " > " + source.size() + " or < 0");
}
if (random == null) {
random = new Random();
}
// TODO: better balancing for different sizes
// Two methods: constructive vs. destructive
if (k < source.size() >> 1) {
ArrayDBIDs aids = DBIDUtil.ensureArray(source);
DBIDArrayIter iter = aids.iter();
HashSetModifiableDBIDs sample = DBIDUtil.newHashSet(k);
while (sample.size() < k) {
iter.seek(random.nextInt(aids.size()));
sample.add(iter);
}
return sample;
} else {
ArrayModifiableDBIDs sample = DBIDUtil.newArray(source);
randomShuffle(sample, random, k);
// Delete trailing elements
for (int i = sample.size() - 1; i >= k; i++) {
sample.remove(i);
}
return sample;
}
}
}
|
package dr.inferencexml.distribution;
import dr.inference.distribution.DistributionLikelihood;
import dr.inference.model.Likelihood;
import dr.inference.model.Statistic;
import dr.math.distributions.BetaDistribution;
import dr.math.distributions.ExponentialDistribution;
import dr.math.distributions.GammaDistribution;
import dr.math.distributions.HalfTDistribution;
import dr.math.distributions.InverseGammaDistribution;
import dr.math.distributions.LaplaceDistribution;
import dr.math.distributions.LogNormalDistribution;
import dr.math.distributions.NormalDistribution;
import dr.math.distributions.PoissonDistribution;
import dr.math.distributions.UniformDistribution;
import dr.xml.AbstractXMLObjectParser;
import dr.xml.AttributeRule;
import dr.xml.ElementRule;
import dr.xml.XMLObject;
import dr.xml.XMLObjectParser;
import dr.xml.XMLParseException;
import dr.xml.XMLSyntaxRule;
import dr.xml.XORRule;
public class PriorParsers {
public final static boolean DEBUG = true;
public static final String UNIFORM_PRIOR = "uniformPrior";
public static final String EXPONENTIAL_PRIOR = "exponentialPrior";
public static final String POISSON_PRIOR = "poissonPrior";
public static final String NORMAL_PRIOR = "normalPrior";
public static final String LOG_NORMAL_PRIOR = "logNormalPrior";
public static final String GAMMA_PRIOR = "gammaPrior";
public static final String INVGAMMA_PRIOR = "invgammaPrior";
public static final String INVGAMMA_PRIOR_CORRECT = "inverseGammaPrior";
public static final String LAPLACE_PRIOR = "laplacePrior";
public static final String BETA_PRIOR = "betaPrior";
public static final String UPPER = "upper";
public static final String LOWER = "lower";
public static final String MEAN = "mean";
public static final String MEAN_IN_REAL_SPACE = "meanInRealSpace";
public static final String STDEV = "stdev";
public static final String SHAPE = "shape";
public static final String SHAPEB = "shapeB";
public static final String SCALE = "scale";
public static final String DF = "df";
public static final String OFFSET = "offset";
public static final String UNINFORMATIVE = "uninformative";
public static final String HALF_T_PRIOR = "halfTPrior";
/**
* A special parser that reads a convenient short form of priors on parameters.
*/
public static XMLObjectParser UNIFORM_PRIOR_PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return UNIFORM_PRIOR;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
double lower = xo.getDoubleAttribute(LOWER);
double upper = xo.getDoubleAttribute(UPPER);
if (lower == Double.NEGATIVE_INFINITY || upper == Double.POSITIVE_INFINITY)
throw new XMLParseException("Uniform prior " + xo.getName() + " cannot take a bound at infinity, " +
"because it returns 1/(high-low) = 1/inf");
DistributionLikelihood likelihood = new DistributionLikelihood(new UniformDistribution(lower, upper));
if (DEBUG) {
System.out.println("Uniform prior: " + xo.getChildCount());
}
for (int j = 0; j < xo.getChildCount(); j++) {
if (DEBUG) {
System.out.println(xo.getChild(j));
}
if (xo.getChild(j) instanceof Statistic) {
if (DEBUG) {
//System.out.println((Statistic) xo.getChild(j));
Statistic test = (Statistic) xo.getChild(j);
System.out.println(test.getDimension());
for (int i = 0; i < test.getDimension(); i++) {
System.out.println(" " + test.getDimensionName(i) + " - " + test.getStatisticValue(i));
}
System.out.println(test.getClass());
}
likelihood.addData((Statistic) xo.getChild(j));
if (DEBUG) {
likelihood.calculateLogLikelihood();
System.out.println("likelihood: " + likelihood.getLogLikelihood());
}
} else {
throw new XMLParseException("illegal element in " + xo.getName() + " element");
}
}
return likelihood;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newDoubleRule(LOWER),
AttributeRule.newDoubleRule(UPPER),
new ElementRule(Statistic.class, 1, Integer.MAX_VALUE)
};
public String getParserDescription() {
return "Calculates the prior probability of some data under a given uniform distribution.";
}
public Class getReturnType() {
return Likelihood.class;
}
};
/**
* A special parser that reads a convenient short form of priors on parameters.
*/
public static XMLObjectParser EXPONENTIAL_PRIOR_PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return EXPONENTIAL_PRIOR;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
double scale;
if (xo.hasAttribute(SCALE)) {
scale = xo.getDoubleAttribute(SCALE);
} else {
scale = xo.getDoubleAttribute(MEAN);
}
final double offset = xo.hasAttribute(OFFSET) ? xo.getDoubleAttribute(OFFSET) : 0.0;
DistributionLikelihood likelihood = new DistributionLikelihood(new ExponentialDistribution(1.0 / scale), offset);
if (DEBUG) {
System.out.println("Exponential prior: " + xo.getChildCount());
}
for (int j = 0; j < xo.getChildCount(); j++) {
if (DEBUG) {
System.out.println(xo.getChild(j));
}
if (xo.getChild(j) instanceof Statistic) {
if (DEBUG) {
System.out.println("scale: " + scale);
System.out.println("offset: " + offset);
//System.out.println((Statistic) xo.getChild(j));
Statistic test = (Statistic) xo.getChild(j);
System.out.println(test.getDimension());
for (int i = 0; i < test.getDimension(); i++) {
System.out.println(" " + test.getDimensionName(i) + " - " + test.getStatisticValue(i));
}
System.out.println(test.getClass());
}
likelihood.addData((Statistic) xo.getChild(j));
if (DEBUG) {
likelihood.makeDirty();
likelihood.calculateLogLikelihood();
System.out.println("likelihood: " + likelihood.getLogLikelihood());
}
} else {
throw new XMLParseException("illegal element in " + xo.getName() + " element");
}
}
return likelihood;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
new XORRule(
AttributeRule.newDoubleRule(SCALE),
AttributeRule.newDoubleRule(MEAN)
),
AttributeRule.newDoubleRule(OFFSET, true),
new ElementRule(Statistic.class, 1, Integer.MAX_VALUE)
};
public String getParserDescription() {
return "Calculates the prior probability of some data under a given exponential distribution.";
}
public Class getReturnType() {
return Likelihood.class;
}
};
/**
* A special parser that reads a convenient short form of priors on parameters.
*/
public static XMLObjectParser POISSON_PRIOR_PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return POISSON_PRIOR;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
double mean = xo.getDoubleAttribute(MEAN);
double offset = xo.getDoubleAttribute(OFFSET);
DistributionLikelihood likelihood = new DistributionLikelihood(new PoissonDistribution(mean), offset);
for (int j = 0; j < xo.getChildCount(); j++) {
if (xo.getChild(j) instanceof Statistic) {
likelihood.addData((Statistic) xo.getChild(j));
} else {
throw new XMLParseException("illegal element in " + xo.getName() + " element");
}
}
return likelihood;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newDoubleRule(MEAN),
AttributeRule.newDoubleRule(OFFSET),
new ElementRule(Statistic.class, 1, Integer.MAX_VALUE)
};
public String getParserDescription() {
return "Calculates the prior probability of some data under a given poisson distribution.";
}
public Class getReturnType() {
return Likelihood.class;
}
};
/**
* A special parser that reads a convenient short form of priors on parameters.
*/
public static XMLObjectParser HALF_T_PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return HALF_T_PRIOR;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
double scale = xo.getDoubleAttribute(SCALE);
double df = xo.getDoubleAttribute(DF);
DistributionLikelihood likelihood = new DistributionLikelihood(new HalfTDistribution(scale, df));
for (int j = 0; j < xo.getChildCount(); j++) {
if (xo.getChild(j) instanceof Statistic) {
likelihood.addData((Statistic) xo.getChild(j));
} else {
throw new XMLParseException("illegal element in " + xo.getName() + " element");
}
}
return likelihood;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newDoubleRule(SCALE),
AttributeRule.newDoubleRule(DF),
new ElementRule(Statistic.class, 1, Integer.MAX_VALUE)
};
public String getParserDescription() {
return "Calculates the prior probability of some data under a given half-T distribution.";
}
public Class getReturnType() {
return Likelihood.class;
}
};
/**
* A special parser that reads a convenient short form of priors on parameters.
*/
public static XMLObjectParser NORMAL_PRIOR_PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return NORMAL_PRIOR;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
double mean = xo.getDoubleAttribute(MEAN);
double stdev = xo.getDoubleAttribute(STDEV);
DistributionLikelihood likelihood = new DistributionLikelihood(new NormalDistribution(mean, stdev));
for (int j = 0; j < xo.getChildCount(); j++) {
if (xo.getChild(j) instanceof Statistic) {
likelihood.addData((Statistic) xo.getChild(j));
} else {
throw new XMLParseException("illegal element in " + xo.getName() + " element");
}
}
return likelihood;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newDoubleRule(MEAN),
AttributeRule.newDoubleRule(STDEV),
new ElementRule(Statistic.class, 1, Integer.MAX_VALUE)
};
public String getParserDescription() {
return "Calculates the prior probability of some data under a given normal distribution.";
}
public Class getReturnType() {
return Likelihood.class;
}
};
/**
* A special parser that reads a convenient short form of priors on parameters.
* <p/>
* If X ~ logNormal, then log(X) ~ Normal.
* <br>
* <br>
* If meanInRealSpace=false, <code>mean</code> specifies the mean of log(X) and
* <code>stdev</code> specifies the standard deviation of log(X).
* <br>
* <br>
* If meanInRealSpace=true, <code>mean</code> specifies the mean of X, but <code>
* stdev</code> specifies the standard deviation of log(X).
* <br>
*/
public static XMLObjectParser LOG_NORMAL_PRIOR_PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return LOG_NORMAL_PRIOR;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
double mean = xo.getDoubleAttribute(MEAN);
final double stdev = xo.getDoubleAttribute(STDEV);
final double offset = xo.getAttribute(OFFSET, 0.0);
final boolean meanInRealSpace = xo.getAttribute(MEAN_IN_REAL_SPACE, false);
if (meanInRealSpace) {
if (mean <= 0) {
throw new IllegalArgumentException("meanInRealSpace works only for a positive mean");
}
mean = Math.log(mean) - 0.5 * stdev * stdev;
}
final DistributionLikelihood likelihood = new DistributionLikelihood(new LogNormalDistribution(mean, stdev), offset);
for (int j = 0; j < xo.getChildCount(); j++) {
if (xo.getChild(j) instanceof Statistic) {
likelihood.addData((Statistic) xo.getChild(j));
} else {
throw new XMLParseException("illegal element in " + xo.getName() + " element");
}
}
return likelihood;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newDoubleRule(MEAN),
AttributeRule.newDoubleRule(STDEV),
AttributeRule.newDoubleRule(OFFSET, true),
AttributeRule.newBooleanRule(MEAN_IN_REAL_SPACE, true),
new ElementRule(Statistic.class, 1, Integer.MAX_VALUE)
};
public String getParserDescription() {
return "Calculates the prior probability of some data under a given lognormal distribution.";
}
public Class getReturnType() {
return Likelihood.class;
}
};
/**
* A special parser that reads a convenient short form of priors on parameters.
*/
public static XMLObjectParser GAMMA_PRIOR_PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return GAMMA_PRIOR;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
final double shape = xo.getDoubleAttribute(SHAPE);
final double scale = xo.getDoubleAttribute(SCALE);
final double offset = xo.getAttribute(OFFSET, 0.0);
DistributionLikelihood likelihood = new DistributionLikelihood(new GammaDistribution(shape, scale), offset);
for (int j = 0; j < xo.getChildCount(); j++) {
if (xo.getChild(j) instanceof Statistic) {
likelihood.addData((Statistic) xo.getChild(j));
} else {
throw new XMLParseException("illegal element in " + xo.getName() + " element");
}
}
return likelihood;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newDoubleRule(SHAPE),
AttributeRule.newDoubleRule(SCALE),
AttributeRule.newDoubleRule(OFFSET, true),
// AttributeRule.newBooleanRule(UNINFORMATIVE, true),
new ElementRule(Statistic.class, 1, Integer.MAX_VALUE)
};
public String getParserDescription() {
return "Calculates the prior probability of some data under a given gamma distribution.";
}
public Class getReturnType() {
return Likelihood.class;
}
};
/**
* A special parser that reads a convenient short form of priors on parameters.
*/
public static XMLObjectParser INVGAMMA_PRIOR_PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return INVGAMMA_PRIOR;
}
public String[] getParserNames() {
return new String[]{INVGAMMA_PRIOR, INVGAMMA_PRIOR_CORRECT};
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
final double shape = xo.getDoubleAttribute(SHAPE);
final double scale = xo.getDoubleAttribute(SCALE);
final double offset = xo.getDoubleAttribute(OFFSET);
DistributionLikelihood likelihood = new DistributionLikelihood(new InverseGammaDistribution(shape, scale), offset);
for (int j = 0; j < xo.getChildCount(); j++) {
if (xo.getChild(j) instanceof Statistic) {
likelihood.addData((Statistic) xo.getChild(j));
} else {
throw new XMLParseException("illegal element in " + xo.getName() + " element");
}
}
return likelihood;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newDoubleRule(SHAPE),
AttributeRule.newDoubleRule(SCALE),
AttributeRule.newDoubleRule(OFFSET),
new ElementRule(Statistic.class, 1, Integer.MAX_VALUE)
};
public String getParserDescription() {
return "Calculates the prior probability of some data under a given inverse gamma distribution.";
}
public Class getReturnType() {
return Likelihood.class;
}
};
public static XMLObjectParser LAPLACE_PRIOR_PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return LAPLACE_PRIOR;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
double mean = xo.getDoubleAttribute(MEAN);
double scale = xo.getDoubleAttribute(SCALE);
DistributionLikelihood likelihood = new DistributionLikelihood(new LaplaceDistribution(mean, scale));
for (int j = 0; j < xo.getChildCount(); j++) {
if (xo.getChild(j) instanceof Statistic) {
likelihood.addData((Statistic) xo.getChild(j));
} else {
throw new XMLParseException("illegal element in " + xo.getName() + " element");
}
}
return likelihood;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newDoubleRule(MEAN),
AttributeRule.newDoubleRule(SCALE),
new ElementRule(Statistic.class, 1, Integer.MAX_VALUE)
};
public String getParserDescription() {
return "Calculates the prior probability of some data under a given laplace distribution.";
}
public Class getReturnType() {
return Likelihood.class;
}
};
/**
* A special parser that reads a convenient short form of priors on parameters.
*/
public static XMLObjectParser BETA_PRIOR_PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return BETA_PRIOR;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
final double shape = xo.getDoubleAttribute(SHAPE);
final double shapeB = xo.getDoubleAttribute(SHAPEB);
final double offset = xo.getAttribute(OFFSET, 0.0);
DistributionLikelihood likelihood = new DistributionLikelihood(new BetaDistribution(shape, shapeB), offset);
for (int j = 0; j < xo.getChildCount(); j++) {
if (xo.getChild(j) instanceof Statistic) {
likelihood.addData((Statistic) xo.getChild(j));
} else {
throw new XMLParseException("illegal element in " + xo.getName() + " element");
}
}
return likelihood;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newDoubleRule(SHAPE),
AttributeRule.newDoubleRule(SHAPEB),
AttributeRule.newDoubleRule(OFFSET, true),
new ElementRule(Statistic.class, 1, Integer.MAX_VALUE)
};
public String getParserDescription() {
return "Calculates the prior probability of some data under a given beta distribution.";
}
public Class getReturnType() {
return Likelihood.class;
}
};
}
|
package be.kuleuven.cs.distrinet.jnome.tool.dependency;
import java.io.Writer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.aikodi.chameleon.analysis.dependency.DependencyAnalyzer;
import org.aikodi.chameleon.analysis.dependency.DependencyAnalysis.HistoryFilter;
import org.aikodi.chameleon.core.declaration.Declaration;
import org.aikodi.chameleon.core.element.Element;
import org.aikodi.chameleon.core.reference.CrossReference;
import org.aikodi.chameleon.oo.type.IntersectionType;
import org.aikodi.chameleon.oo.type.Type;
import org.aikodi.chameleon.oo.type.TypeInstantiation;
import org.aikodi.chameleon.oo.type.UnionType;
import org.aikodi.chameleon.oo.type.generics.FormalParameterType;
import org.aikodi.chameleon.util.Lists;
import org.aikodi.chameleon.workspace.InputException;
import org.aikodi.chameleon.workspace.Project;
import org.jgrapht.ext.ComponentAttributeProvider;
import org.jgrapht.ext.DOTExporter;
import org.jgrapht.ext.EdgeNameProvider;
import org.jgrapht.ext.VertexNameProvider;
import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.graph.ListenableDirectedGraph;
import be.kuleuven.cs.distrinet.jnome.analysis.dependency.JavaDependencyOptions;
import be.kuleuven.cs.distrinet.jnome.core.type.AnonymousType;
import be.kuleuven.cs.distrinet.jnome.core.type.ArrayType;
import be.kuleuven.cs.distrinet.rejuse.action.Nothing;
import be.kuleuven.cs.distrinet.rejuse.contract.Contracts;
import be.kuleuven.cs.distrinet.rejuse.function.Function;
import be.kuleuven.cs.distrinet.rejuse.predicate.UniversalPredicate;
public class JavaDependencyAnalyzer extends DependencyAnalyzer<Type> {
public JavaDependencyAnalyzer(Project project,
UniversalPredicate<Type,Nothing> sourcePredicate,
UniversalPredicate<? super CrossReference<?>,Nothing> crossReferencePredicate,
UniversalPredicate<Type,Nothing> targetPredicate) {
super(project);
Contracts.notNull(sourcePredicate, "The source predicate cannot be null");
Contracts.notNull(crossReferencePredicate, "The cross reference predicate cannot be null");
_sourcePredicate = sourcePredicate;
_crossReferencePredicate = crossReferencePredicate;
_targetPredicate = targetPredicate;
}
@Override
protected UniversalPredicate<? super CrossReference<?>, Nothing> crossReferencePredicate() {
return _crossReferencePredicate;
}
@Override
protected Function<Declaration, List<Declaration>,Nothing> createMapper() {
return new Function<Declaration, List<Declaration>,Nothing> (){
@Override
public List<Declaration> apply(Declaration declaration) {
List<Declaration> result;
if(declaration instanceof Type) {
result = decomposeType((Type) declaration);
} else {
result = Lists.create(declaration);
}
return result;
}
protected List<Declaration> decomposeType(Type type) {
List<Declaration> result = new ArrayList<>();
if(type instanceof UnionType) {
((UnionType)type).types().forEach(t -> result.addAll(decomposeType(t)));
} else if(type instanceof IntersectionType) {
((UnionType)type).types().forEach(t -> result.addAll(decomposeType(t)));
} else {
while(type instanceof ArrayType) {
type = ((ArrayType)type).elementType();
}
while(type instanceof TypeInstantiation) {
type = ((TypeInstantiation)type).baseType();
}
while(type instanceof FormalParameterType) {
type = type.nearestAncestor(Type.class);
}
AnonymousType anon = type.farthestAncestorOrSelf(AnonymousType.class);
if(anon != null) {
type = anon.nearestAncestor(Type.class);
}
}
return Lists.create(type);
}
};
}
protected DOTExporter<Element, DefaultEdge> createExporter() {
return new DOTExporter<Element,DefaultEdge>(new VertexNameProvider<Element>() {
@Override
public String getVertexName(Element arg0) {
if(arg0 instanceof Type) {
String result = ((Type)arg0).getFullyQualifiedName().replace('.', '_');
result = result.replace(',', '_');
result = result.replace(' ', '_');
return result;
} else {
throw new IllegalArgumentException();
}
}
}, new VertexNameProvider<Element>() {
@Override
public String getVertexName(Element arg0) {
if(arg0 instanceof Type) {
String result = ((Type)arg0).name().replace('.', '_');
result = result.replace(',', '_');
result = result.replace(' ', '_');
return result;
} else {
throw new IllegalArgumentException();
}
}
}, new EdgeNameProvider<DefaultEdge>() {
@Override
public String getEdgeName(DefaultEdge arg0) {
return "";
}
}, new ComponentAttributeProvider<Element>() {
@Override
public Map<String, String> getComponentAttributes(Element arg0) {
Map<String,String> result = new HashMap<>();
result.put("shape", "box");
return result;
}
}, new ComponentAttributeProvider<DefaultEdge>() {
@Override
public Map<String, String> getComponentAttributes(DefaultEdge arg0) {
Map<String,String> result = new HashMap<>();
return result;
}
}
);
}
public void visualize(Writer writer) throws InputException {
ListenableDirectedGraph<Element, DefaultEdge> graph = buildDependencyGraph();
DOTExporter<Element,DefaultEdge> exporter = createExporter();
exporter.export(writer, graph);
}
private GraphBuilder<Element> createGraphBuilder(final ListenableDirectedGraph<Element, DefaultEdge> graph) {
return new GraphBuilder<Element>() {
@Override
public void addVertex(Element v) {
graph.addVertex(v);
}
@Override
public void addEdge(Element first, Element second) {
addVertex(first);
addVertex(second);
graph.addEdge(first, second);
}
};
}
private ListenableDirectedGraph<Element, DefaultEdge> buildDependencyGraph() throws InputException {
ListenableDirectedGraph<Element, DefaultEdge> graph = new ListenableDirectedGraph<>(DefaultEdge.class);
GraphBuilder<Element> builder = createGraphBuilder(graph);
buildGraph(builder);
return graph;
}
@Override
protected UniversalPredicate<Type, Nothing> elementPredicate() {
return _sourcePredicate;
}
@Override
protected UniversalPredicate<Type, Nothing> declarationPredicate() {
return _targetPredicate;
}
private UniversalPredicate<Type,Nothing> _sourcePredicate;
private UniversalPredicate<Type,Nothing> _targetPredicate;
private UniversalPredicate<? super CrossReference<?>,Nothing> _crossReferencePredicate;
@Override
protected HistoryFilter<Type, Type> historyFilter() {
return (HistoryFilter)JavaDependencyOptions.REDUNDANT_INHERITED_DEPENDENCY_FILTER;
}
}
|
package de.lmu.ifi.dbs.elki.normalization;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
import de.lmu.ifi.dbs.elki.data.DatabaseObject;
import de.lmu.ifi.dbs.elki.data.MultiRepresentedObject;
import de.lmu.ifi.dbs.elki.database.DatabaseObjectMetadata;
import de.lmu.ifi.dbs.elki.math.linearalgebra.LinearEquationSystem;
import de.lmu.ifi.dbs.elki.utilities.ClassGenericsUtil;
import de.lmu.ifi.dbs.elki.utilities.exceptions.ExceptionMessages;
import de.lmu.ifi.dbs.elki.utilities.exceptions.UnableToComplyException;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.OptionID;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameterization.Parameterization;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameters.ClassListParameter;
import de.lmu.ifi.dbs.elki.utilities.pairs.Pair;
/**
* Class to perform and undo a normalization on multi-represented objects with
* respect to given normalizations for each representation.
*
* @author Elke Achtert
* @param <O> object type
*/
public class MultiRepresentedObjectNormalization<O extends DatabaseObject> extends AbstractNormalization<MultiRepresentedObject<O>> {
/**
* Default normalization.
*/
public final static String DEFAULT_NORMALIZATION = AttributeWiseMinMaxNormalization.class.getName();
/**
* Keyword for no normalization.
*/
// TODO: support for this was removed below.
// Instead the user can just give DummyNormalization.class.getName(), right?
// public final static String NO_NORMALIZATION = "noNorm";
/**
* Option ID for normalizations
*/
public final OptionID NORMALIZATION_ID = OptionID.getOrCreateOptionID("normalizations", "A comma separated list of normalizations for each representation. " + "If in one representation no normalization is desired, please use the class '" + DummyNormalization.class.getName() + "' in the list.");
/**
* Normalization class parameter
*/
private final ClassListParameter<Normalization<O>> NORMALIZATION_PARAM = new ClassListParameter<Normalization<O>>(NORMALIZATION_ID, Normalization.class);
/**
* A pattern defining a comma.
*/
public static final Pattern SPLIT = Pattern.compile(",");
/**
* The normalizations for each representation.
*/
private List<Normalization<O>> normalizations;
/**
* Constructor, adhering to
* {@link de.lmu.ifi.dbs.elki.utilities.optionhandling.Parameterizable}
*
* @param config Parameterization
*/
public MultiRepresentedObjectNormalization(Parameterization config) {
super();
config = config.descend(this);
// The default value will be initialized on-demand, since we don't know
// the number of representations beforehand.
if(config.grab(NORMALIZATION_PARAM)) {
// TODO: add support back for NO_NORMALIZATION keyword?
// Right now, the user needs to specify DummyNormalization.class.getName()
this.normalizations = NORMALIZATION_PARAM.instantiateClasses(config);
}
}
/**
* Performs a normalization on a list of database objects and their
* associations.
*
* @param objectAndAssociationsList the list of database objects and their
* associations
* @return a list of normalized database objects and their associations
* corresponding to the given list
* @throws NonNumericFeaturesException if feature vectors differ in length or
* values are not suitable to normalization
*/
@Override
public List<Pair<MultiRepresentedObject<O>, DatabaseObjectMetadata>> normalizeObjects(List<Pair<MultiRepresentedObject<O>, DatabaseObjectMetadata>> objectAndAssociationsList) throws NonNumericFeaturesException {
if(objectAndAssociationsList.size() == 0) {
return new ArrayList<Pair<MultiRepresentedObject<O>, DatabaseObjectMetadata>>();
}
// number of representations
int numberOfRepresentations = objectAndAssociationsList.get(0).getFirst().getNumberOfRepresentations();
// init default normalizations
// must be done here, because at setParameters() the number of
// representations is unknown
if(normalizations == null) {
initDefaultNormalizations(numberOfRepresentations);
}
// normalize each representation
List<List<O>> objects = new ArrayList<List<O>>();
for(int r = 0; r < numberOfRepresentations; r++) {
List<O> objectsInRepresentation = new ArrayList<O>(objectAndAssociationsList.size());
for(Pair<MultiRepresentedObject<O>, DatabaseObjectMetadata> o : objectAndAssociationsList) {
if(numberOfRepresentations != o.getFirst().getNumberOfRepresentations()) {
throw new IllegalArgumentException("Number of representations differs!");
}
objectsInRepresentation.add(o.getFirst().getRepresentation(r));
}
Normalization<O> normalization = normalizations.get(r);
objects.add(normalization.normalize(objectsInRepresentation));
}
// build the normalized multi-represented objects
List<Pair<MultiRepresentedObject<O>, DatabaseObjectMetadata>> normalized = new ArrayList<Pair<MultiRepresentedObject<O>, DatabaseObjectMetadata>>();
for(int i = 0; i < objectAndAssociationsList.size(); i++) {
List<O> representations = new ArrayList<O>(numberOfRepresentations);
for(int r = 0; r < numberOfRepresentations; r++) {
representations.add(objects.get(r).get(i));
}
MultiRepresentedObject<O> o = new MultiRepresentedObject<O>(representations);
o.setID(objectAndAssociationsList.get(i).getFirst().getID());
DatabaseObjectMetadata associations = objectAndAssociationsList.get(i).getSecond();
normalized.add(new Pair<MultiRepresentedObject<O>, DatabaseObjectMetadata>(o, associations));
}
return normalized;
}
@Override
public List<MultiRepresentedObject<O>> normalize(List<MultiRepresentedObject<O>> featureVectors) throws NonNumericFeaturesException {
if(featureVectors.size() == 0) {
return new ArrayList<MultiRepresentedObject<O>>();
}
// number of representations
int numberOfRepresentations = normalizations != null ? normalizations.size() : featureVectors.get(0).getNumberOfRepresentations();
if(normalizations == null) {
initDefaultNormalizations(numberOfRepresentations);
}
// normalize each representation
List<List<O>> objects = new ArrayList<List<O>>();
for(int r = 0; r < numberOfRepresentations; r++) {
List<O> objectsInRepresentation = new ArrayList<O>(featureVectors.size());
for(MultiRepresentedObject<O> o : featureVectors) {
if(numberOfRepresentations != o.getNumberOfRepresentations()) {
throw new IllegalArgumentException("Number of representations differs!");
}
objectsInRepresentation.add(o.getRepresentation(r));
}
Normalization<O> normalization = normalizations.get(r);
objects.add(normalization.normalize(objectsInRepresentation));
}
// build the normalized multi-represented objects
List<MultiRepresentedObject<O>> normalized = new ArrayList<MultiRepresentedObject<O>>();
for(int i = 0; i < featureVectors.size(); i++) {
List<O> representations = new ArrayList<O>(numberOfRepresentations);
for(int r = 0; r < numberOfRepresentations; r++) {
representations.add(objects.get(r).get(i));
}
MultiRepresentedObject<O> o = new MultiRepresentedObject<O>(representations);
o.setID(featureVectors.get(i).getID());
normalized.add(o);
}
return normalized;
}
/**
* Init default normalizations for a given number of representations.
*
* @param numberOfRepresentations
*/
private void initDefaultNormalizations(int numberOfRepresentations) {
normalizations = new ArrayList<Normalization<O>>(numberOfRepresentations);
for(int r = 0; r < numberOfRepresentations; r++) {
try {
Normalization<O> norm = ClassGenericsUtil.instantiateGenerics(Normalization.class, DEFAULT_NORMALIZATION);
normalizations.add(norm);
}
catch(UnableToComplyException e) {
throw new RuntimeException("This should never happen!");
}
}
}
/**
* Transforms a set of feature vectors to the original attribute ranges.
*
* @param featureVectors a set of feature vectors to be transformed into
* original space
* @return a set of feature vectors transformed into original space
* corresponding to the given feature vectors
* @throws NonNumericFeaturesException if feature vectors differ in length or
* are not compatible with values initialized during normalization
*/
@Override
public List<MultiRepresentedObject<O>> restore(List<MultiRepresentedObject<O>> featureVectors) throws NonNumericFeaturesException {
List<MultiRepresentedObject<O>> restored = new ArrayList<MultiRepresentedObject<O>>(featureVectors.size());
for(MultiRepresentedObject<O> o : featureVectors) {
restored.add(restore(o));
}
return restored;
}
/**
* Transforms a feature vector to the original attribute ranges.
*
* @param featureVector a feature vector to be transformed into original space
* @return a feature vector transformed into original space corresponding to
* the given feature vector
* @throws NonNumericFeaturesException feature vector is not compatible with
* values initialized during normalization
*/
@Override
public MultiRepresentedObject<O> restore(MultiRepresentedObject<O> featureVector) throws NonNumericFeaturesException {
List<O> restored = new ArrayList<O>();
int r = featureVector.getNumberOfRepresentations();
for(int i = 0; i < r; i++) {
Normalization<O> normalization = normalizations.get(i);
restored.add(normalization.restore(featureVector.getRepresentation(i)));
}
MultiRepresentedObject<O> o = new MultiRepresentedObject<O>(restored);
o.setID(featureVector.getID());
return o;
}
/**
* @throws UnsupportedOperationException thrown since this is not supported.
*/
@Override
public LinearEquationSystem transform(@SuppressWarnings("unused") LinearEquationSystem linearEquationSystem) {
throw new UnsupportedOperationException(ExceptionMessages.UNSUPPORTED);
}
/**
* Returns a string representation of this normalization. The specified prefix
* pre will be the prefix of each new line. This method is used to write the
* parameters of a normalization to a result of an algorithm using this
* normalization.
*
* @param pre the prefix of each new line
* @return a string representation of this normalization
*/
@Override
public String toString(String pre) {
StringBuffer result = new StringBuffer();
for(Normalization<O> normalization : normalizations) {
result.append(normalization.toString(pre));
}
return result.toString();
}
}
|
package edu.northwestern.bioinformatics.studycalendar.service;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.springframework.transaction.annotation.Transactional;
import edu.northwestern.bioinformatics.studycalendar.dao.SiteDao;
import edu.northwestern.bioinformatics.studycalendar.domain.Site;
import edu.northwestern.bioinformatics.studycalendar.domain.Study;
import edu.northwestern.bioinformatics.studycalendar.utils.accesscontrol.StudyCalendarAuthorizationManager;
import gov.nih.nci.security.authorization.domainobjects.ProtectionGroup;
import gov.nih.nci.security.authorization.domainobjects.User;
/**
* @author Padmaja Vedula
*/
@Transactional
public class TemplateService {
public static final String PARTICIPANT_COORDINATOR_ACCESS_ROLE = "PARTICIPANT_COORDINATOR";
public static final String PARTICIPANT_COORDINATOR_GROUP = "PARTICIPANT_COORDINATOR";
private StudyCalendarAuthorizationManager authorizationManager;
public void assignTemplateToSites(Study studyTemplate, List<String> siteIds) throws Exception {
authorizationManager.assignProtectionElementToPGs(siteIds, studyTemplate.getClass().getName()+"."+studyTemplate.getId());
}
public void assignTemplateToParticipantCds(Study studyTemplate, List<String> userIds) throws Exception {
authorizationManager.assignProtectionElementsToUsers(userIds, studyTemplate.getClass().getName()+"."+studyTemplate.getId());
}
public void assignMultipleTemplates(List<Study> studyTemplates, String userId) throws Exception {
List<String> studyPEs = new ArrayList<String>();
for (Study template : studyTemplates)
{
studyPEs.add(template.getClass().getName()+"."+template.getId());
}
authorizationManager.assignMultipleProtectionElements(userId, studyPEs);
}
public Map getParticipantCoordinators(Study studyTemplate) throws Exception {
return authorizationManager.getUsers(PARTICIPANT_COORDINATOR_GROUP, studyTemplate.getClass().getName()+"."+studyTemplate.getId());
}
public Map getSiteLists(Study studyTemplate) throws Exception {
List<ProtectionGroup> allSites = authorizationManager.getSites();
return authorizationManager.getProtectionGroups(allSites, studyTemplate.getClass().getName()+"."+studyTemplate.getId());
}
public ProtectionGroup getSiteProtectionGroup(String siteName) throws Exception {
return authorizationManager.getSite(siteName);
}
public List getAllSiteProtectionGroups() throws Exception {
return authorizationManager.getSites();
}
////// CONFIGURATION
public void setStudyCalendarAuthorizationManager(StudyCalendarAuthorizationManager authorizationManager) {
this.authorizationManager = authorizationManager;
}
}
|
package edu.psu.compbio.seqcode.projects.akshay.utils;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import edu.psu.compbio.seqcode.genome.Genome;
import edu.psu.compbio.seqcode.genome.GenomeConfig;
import edu.psu.compbio.seqcode.genome.location.Point;
import edu.psu.compbio.seqcode.genome.location.Region;
import edu.psu.compbio.seqcode.genome.location.StrandedPoint;
import edu.psu.compbio.seqcode.gse.datasets.motifs.WeightMatrix;
import edu.psu.compbio.seqcode.gse.gsebricks.verbs.motifs.WeightMatrixScoreProfile;
import edu.psu.compbio.seqcode.gse.gsebricks.verbs.motifs.WeightMatrixScorer;
import edu.psu.compbio.seqcode.gse.gsebricks.verbs.sequence.SequenceGenerator;
import edu.psu.compbio.seqcode.gse.tools.utils.Args;
import edu.psu.compbio.seqcode.gse.utils.ArgParser;
import edu.psu.compbio.seqcode.gse.utils.Pair;
import edu.psu.compbio.seqcode.gse.utils.io.RegionFileUtilities;
import edu.psu.compbio.seqcode.projects.shaun.MotifAnalysisSandbox;
import edu.psu.compbio.seqcode.projects.shaun.rnaseq.GTFReader;
import edu.psu.compbio.seqcode.projects.shaun.rnaseq.genemodels.GeneTUnit;
public class ProfileGeneNeighborhood {
private GenomeConfig gcon;
private List<Point> peaks;
private Map<String,String> peaksSeqs;
private Map<String,List<Point>> peaksAtgenes;
private Map<String,List<PointCluster>> peakClustersAtgenes;
private Map<String, StrandedPoint> genes;
private Map<String, Region> geneDomains;
private int radius; // Neighborhood radius
private int cluster_distance;
private List<WeightMatrix> motifs = new ArrayList<WeightMatrix>();
private int win;
@SuppressWarnings("unchecked")
private SequenceGenerator<Region> seqgen = gcon.getSequenceGenerator();
public ProfileGeneNeighborhood(GenomeConfig g) {
gcon = g;
seqgen = gcon.getSequenceGenerator();
}
/**
*
* @param args
* @throws ParseException
* @throws IOException
*/
public static void main(String[] args) throws IOException, ParseException{
ArgParser ap = new ArgParser(args);
GenomeConfig gc = new GenomeConfig(args);
ProfileGeneNeighborhood profiler = new ProfileGeneNeighborhood(gc);
int win = ap.hasKey("win") ? new Integer(ap.getKeyValue("win")).intValue():150;
int rad = ap.hasKey("radius") ? new Integer(ap.getKeyValue("radius")).intValue() : 50000;
int clusD = ap.hasKey("clusD") ? new Integer(ap.getKeyValue("clusD")).intValue() : 300;
String motiffile = ap.getKeyValue("motiffile");
String backfile = ap.getKeyValue("back");
List<WeightMatrix> matrixList = MotifAnalysisSandbox.loadMotifFromFile(motiffile, backfile, gc.getGenome());
String peaksFile = ap.getKeyValue("peaks");
List<Point> peaks = RegionFileUtilities.loadPeaksFromPeakFile(gc.getGenome(), ap.getKeyValue("peaks"), win);
String genefile = Args.parseString(args, "gtf", null);
//Read selected genes into a list
String geneListFile = Args.parseString(args, "geneList", null);
File gFile = new File(geneListFile);
if(!gFile.isFile()){System.err.println("Invalid positive file name");System.exit(1);}
BufferedReader reader = new BufferedReader(new FileReader(gFile));
String line;
List<String> geneList = new ArrayList<String>();
while ((line = reader.readLine()) != null) {
line.trim();
geneList.add(line);
}
reader.close();
profiler.setWindow(win);
profiler.setClusDistance(clusD);
profiler.setRadius(rad);
profiler.setPeaks(peaks);
profiler.setGenes(genefile, geneList);
profiler.setMotifs(matrixList);
boolean clusterSyntax = ap.hasKey("ClusterSyntax");
if(clusterSyntax){profiler.printPeakClusterSyntax();}
boolean peakSyntax = ap.hasKey("PeakSyntax");
if(peakSyntax){profiler.printPeakSyntax();}
profiler.clear();
}
//Mutators
public void setWindow(int w){win = w;}
public void setClusDistance(int c){cluster_distance = c;}
public void setRadius(int r){radius =r;}
public void setPeaks(List<Point> pts){peaks =pts; loadPeakSeqs();}
public void setGenes(String gfffile, List<String> geneList){loadGenes(gfffile,geneList);}
public void setMotifs(List<WeightMatrix> m){motifs = m;}
public void printPeakClusterSyntax(){
loadpeakClustersAtgenes();
StringBuilder sb = new StringBuilder();
int nMotifs = motifs.size();
for(String gname : peakClustersAtgenes.keySet()){
if(peakClustersAtgenes.get(gname).size() == 0){
sb.append(gname);sb.append("\t");
sb.append("NA\tNA\t");
for(int m=0; m<nMotifs; m++){sb.append("NA"); sb.append("\t");}
sb.deleteCharAt(sb.length()-1);
sb.append("\n");
}else{
for(PointCluster pc: peakClustersAtgenes.get(gname)){
sb.append(gname);sb.append("\t");
int strand = genes.get(gname).getStrand() == '+' ? 1: -1;
int sign = strand*(genes.get(gname).getLocation()-pc.getLocation()) > 0 ? -1 : +1;
int distance = sign*pc.distance(genes.get(gname));
sb.append(pc.getPeakLocationsString()+"\t");
sb.append(distance);sb.append("\t");
String seq = seqgen.execute(pc.expand(win/2));
for(int m=0; m<nMotifs; m++){
Pair<Integer,Double> mScore = bestMotif(seq,motifs.get(m));
sb.append(mScore.cdr());sb.append("\t");
}
sb.deleteCharAt(sb.length()-1);
sb.append("\n");
}
}
}
StringBuilder header = new StringBuilder();
header.append("Gene\tPeak\tDistance\t");
for(WeightMatrix wm : motifs){
header.append(wm.getName());header.append("\t");
}
header.deleteCharAt(header.length()-1);
System.out.println(header.toString());
System.out.println(sb.toString());
}
public void printPeakSyntax(){
loadpeaksAtgenes();
StringBuilder sb = new StringBuilder();
int nMotifs = motifs.size();
for(String gname : peaksAtgenes.keySet()){
if(peaksAtgenes.get(gname).size() == 0){
sb.append(gname);sb.append("\t");
sb.append("NA\tNA\t");
for(int m=0; m<nMotifs; m++){sb.append("NA"); sb.append("\t");}
sb.deleteCharAt(sb.length()-1);
sb.append("\n");
}else{
for(Point p: peaksAtgenes.get(gname)){
sb.append(gname);sb.append("\t");
int strand = genes.get(gname).getStrand() == '+' ? 1: -1;
int sign = strand*(genes.get(gname).getLocation()-p.getLocation()) > 0 ? -1 : +1;
int distance = sign*p.distance(genes.get(gname));
sb.append(p.getLocationString()+"\t");
sb.append(distance);sb.append("\t");
String seq = peaksSeqs.get(p.getLocation());
for(int m=0; m<nMotifs; m++){
Pair<Integer,Double> mScore = bestMotif(seq,motifs.get(m));
sb.append(mScore.cdr());sb.append("\t");
}
sb.deleteCharAt(sb.length()-1);
sb.append("\n");
}
}
}
StringBuilder header = new StringBuilder();
header.append("Gene\tPeak\tDistance\t");
for(WeightMatrix wm : motifs){
header.append(wm.getName());header.append("\t");
}
header.deleteCharAt(header.length()-1);
System.out.println(header.toString());
System.out.println(sb.toString());
}
//Loaders
private void loadPeakSeqs(){
peaksSeqs = new HashMap<String, String>();
for(Point p : peaks){
String seq = seqgen.execute(p.expand(win/2));
peaksSeqs.put(p.getLocationString(), seq);
}
}
private void loadGenes(String gtfFile, List<String> geneList){
GTFReader gffreader = new GTFReader(new File(gtfFile), gcon.getGenome());
List<GeneTUnit> geneObjects = gffreader.loadGenes();
Map<String, StrandedPoint> allGenes = new HashMap<String, StrandedPoint>();
genes = new HashMap<String, StrandedPoint>();
for(GeneTUnit gu: geneObjects){
allGenes.put(gu.getName().toUpperCase(), new StrandedPoint(gu.getTSS(),gu.getStrand()));
}
for(String s : geneList){
String gene_name = s.toUpperCase();
if(allGenes.containsKey(gene_name)){
genes.put(gene_name, allGenes.get(gene_name));
}
}
geneDomains = new HashMap<String,Region>();
for(String gene_name : genes.keySet()){
geneDomains.put(gene_name, genes.get(gene_name).expand(radius));
}
}
private void loadpeakClustersAtgenes(){
Map<String,List<Point>> peaksbyChr = hashbychrom(peaks);
//Sort the points
for(String chrom: peaksbyChr.keySet()){
Collections.sort(peaksbyChr.get(chrom));
}
for(String gene_name : geneDomains.keySet() ){
String geneChr = geneDomains.get(gene_name).getChrom();
peakClustersAtgenes.put(gene_name, new ArrayList<PointCluster>());
if(peaksbyChr.containsKey(geneChr)){
List<Point> nearbyPeaks = new ArrayList<Point>();
for(Point p: peaksbyChr.get(geneChr)){
if(geneDomains.get(gene_name).contains(p)){
nearbyPeaks.add(p);
}
}
Collections.sort(nearbyPeaks); // They should be sorted already, but just to make sure
PointCluster lastadded = null;
for(Point p : nearbyPeaks){
if(lastadded == null){
peakClustersAtgenes.get(gene_name).add(new PointCluster(p));
lastadded = peakClustersAtgenes.get(gene_name).get(peakClustersAtgenes.get(gene_name).size()-1);
}else{
if(lastadded.getLastPeak().distance(p) < cluster_distance){
peakClustersAtgenes.get(gene_name).get(peakClustersAtgenes.get(gene_name).size()-1).addpeak(p);
lastadded = peakClustersAtgenes.get(gene_name).get(peakClustersAtgenes.get(gene_name).size()-1);
}else{
peakClustersAtgenes.get(gene_name).add(new PointCluster(p));
lastadded = peakClustersAtgenes.get(gene_name).get(peakClustersAtgenes.get(gene_name).size()-1);
}
}
}
}
}
}
private void loadpeaksAtgenes(){
Map<String,List<Point>> peaksbyChr = hashbychrom(peaks);
for(String gene_name : geneDomains.keySet()){
String geneChr = geneDomains.get(gene_name).getChrom();
peaksAtgenes.put(gene_name, new ArrayList<Point>());
if(peaksbyChr.containsKey(geneChr)){
for(Point p: peaksbyChr.get(geneChr)){
if(geneDomains.get(gene_name).contains(p)){
peaksAtgenes.get(gene_name).add(p);
}
}
}
}
}
private Pair<Integer,Double> bestMotif(String seq, WeightMatrix motif){
WeightMatrixScorer scorer = new WeightMatrixScorer(motif);
WeightMatrixScoreProfile profiler = scorer.execute(seq);
@SuppressWarnings("unchecked")
Pair<Integer, Double> ret = new Pair(profiler.getMaxIndex(), profiler.getMaxScore());
return ret;
}
private Map<String, List<Point>> hashbychrom(List<Point> pts){
Map<String, List<Point>> byChr = new HashMap<String, List<Point>>();
for(Point p : pts){
if(!byChr.containsKey(p.getChrom()))
byChr.put(p.getChrom(), new ArrayList<Point>());
byChr.get(p.getChrom()).add(p);
}
return byChr;
}
public void clear(){SequenceGenerator.clearCache();}
/**
* Cluster of points, usually used to represent a cluster of ChIPSeq peaks. PointCluster extends Point.
* The Point attributes you get from the PointCluster are those of the leftmost Point in the cluster
*
* @author akshaykakumanu
*
*/
public class PointCluster extends Point {
List<Point> peaks;
public PointCluster(Point p) {
super(p.getGenome(), p.getChrom(), p.getLocation());
// TODO Auto-generated constructor stub
peaks = new ArrayList<Point>();
peaks.add(p);
}
public void addpeak(Point p){
if(p.getChrom().equals(getChrom())){
peaks.add(p);
Collections.sort(peaks);
if(p.compareTo(peaks.get(0))<0){
location = p.getLocation();
}
}
}
public Region expand(int distance){
int ns = Math.max(1, peaks.get(0).getLocation() - distance);
int ne = Math.min(peaks.get(peaks.size()-1).getLocation() + distance, g.getChromLength(chrom));
return new Region(g, chrom, ns, ne);
}
public String getPeakLocationsString(){
StringBuilder retSB = new StringBuilder();
for(Point p: peaks){
retSB.append(p.getLocationString()+";");
}
retSB.deleteCharAt(retSB.length()-1);
return retSB.toString();
}
public Point getLastPeak(){return peaks.get(peaks.size()-1);}
public int getNumPeaks(){return peaks.size();}
}
}
|
package jme3test.stress;
import com.jme3.app.SimpleApplication;
import com.jme3.material.Material;
import com.jme3.math.Vector3f;
import com.jme3.scene.Geometry;
import com.jme3.scene.Mesh;
import com.jme3.scene.Node;
import com.jme3.scene.Spatial.CullHint;
import com.jme3.scene.shape.Sphere;
import com.jme3.util.NativeObjectManager;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Generates 400 new meshes every frame then leaks them.
* Notice how memory usage stays constant and OpenGL objects
* are properly destroyed.
*/
public class TestLeakingGL extends SimpleApplication {
private Material solidColor;
private Sphere original;
public static void main(String[] args){
TestLeakingGL app = new TestLeakingGL();
app.start();
}
public void simpleInitApp() {
original = new Sphere(4, 4, 1);
original.setStatic();
//original.setInterleaved();
// this will make sure all spheres are rendered always
rootNode.setCullHint(CullHint.Never);
solidColor = assetManager.loadMaterial("Common/Materials/RedColor.j3m");
cam.setLocation(new Vector3f(0, 5, 0));
cam.lookAt(Vector3f.ZERO, Vector3f.UNIT_Y);
Logger.getLogger(Node.class.getName()).setLevel(Level.WARNING);
Logger.getLogger(NativeObjectManager.class.getName()).setLevel(Level.WARNING);
}
@Override
public void simpleUpdate(float tpf){
rootNode.detachAllChildren();
for (int y = -15; y < 15; y++){
for (int x = -15; x < 15; x++){
Mesh sphMesh = original.deepClone();
Geometry sphere = new Geometry("sphere", sphMesh);
sphere.setMaterial(solidColor);
sphere.setLocalTranslation(x * 1.5f, 0, y * 1.5f);
rootNode.attachChild(sphere);
}
}
}
}
|
package org.rstudio.studio.client.rsconnect.ui;
import java.util.ArrayList;
import org.rstudio.core.client.CommandWithArg;
import org.rstudio.core.client.Debug;
import org.rstudio.core.client.StringUtil;
import org.rstudio.core.client.command.AppCommand;
import org.rstudio.core.client.command.EnabledChangedHandler;
import org.rstudio.core.client.command.VisibleChangedHandler;
import org.rstudio.core.client.files.FileSystemItem;
import org.rstudio.core.client.widget.OperationWithInput;
import org.rstudio.core.client.widget.ToolbarButton;
import org.rstudio.core.client.widget.ToolbarPopupMenu;
import org.rstudio.studio.client.RStudioGinjector;
import org.rstudio.studio.client.application.events.EventBus;
import org.rstudio.studio.client.common.FilePathUtils;
import org.rstudio.studio.client.common.GlobalDisplay;
import org.rstudio.studio.client.common.rpubs.events.RPubsUploadStatusEvent;
import org.rstudio.studio.client.htmlpreview.model.HTMLPreviewResult;
import org.rstudio.studio.client.rmarkdown.events.RmdRenderCompletedEvent;
import org.rstudio.studio.client.rmarkdown.model.RMarkdownServerOperations;
import org.rstudio.studio.client.rmarkdown.model.RmdOutputInfo;
import org.rstudio.studio.client.rmarkdown.model.RmdPreviewParams;
import org.rstudio.studio.client.rsconnect.RSConnect;
import org.rstudio.studio.client.rsconnect.events.RSConnectActionEvent;
import org.rstudio.studio.client.rsconnect.events.RSConnectDeployInitiatedEvent;
import org.rstudio.studio.client.rsconnect.events.RSConnectDeploymentCompletedEvent;
import org.rstudio.studio.client.rsconnect.model.PlotPublishMRUList;
import org.rstudio.studio.client.rsconnect.model.PublishHtmlSource;
import org.rstudio.studio.client.rsconnect.model.RSConnectDeploymentRecord;
import org.rstudio.studio.client.rsconnect.model.RSConnectPublishSettings;
import org.rstudio.studio.client.rsconnect.model.RSConnectPublishSource;
import org.rstudio.studio.client.rsconnect.model.RSConnectServerOperations;
import org.rstudio.studio.client.rsconnect.model.RenderedDocPreview;
import org.rstudio.studio.client.rsconnect.model.PlotPublishMRUList.Entry;
import org.rstudio.studio.client.server.ServerError;
import org.rstudio.studio.client.server.ServerRequestCallback;
import org.rstudio.studio.client.shiny.model.ShinyApplicationParams;
import org.rstudio.studio.client.workbench.commands.Commands;
import org.rstudio.studio.client.workbench.model.Session;
import org.rstudio.studio.client.workbench.model.SessionInfo;
import org.rstudio.studio.client.workbench.prefs.model.UIPrefs;
import com.google.gwt.core.client.JsArray;
import com.google.gwt.core.client.Scheduler;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.Command;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.HorizontalPanel;
import com.google.gwt.user.client.ui.MenuItem;
import com.google.inject.Inject;
import com.google.inject.Provider;
public class RSConnectPublishButton extends Composite
implements RSConnectDeploymentCompletedEvent.Handler,
RPubsUploadStatusEvent.Handler,
RmdRenderCompletedEvent.Handler
{
class DeploymentPopupMenu extends ToolbarPopupMenu
{
@Override
public void getDynamicPopupMenu(final
ToolbarPopupMenu.DynamicPopupMenuCallback callback)
{
rebuildPopupMenu(callback);
}
}
public RSConnectPublishButton(int contentType, boolean showCaption,
AppCommand boundCommand)
{
contentType_ = contentType;
showCaption_ = showCaption;
boundCommand_ = boundCommand;
// create root widget
HorizontalPanel panel = new HorizontalPanel();
// create publish button itself
publishButton_ = new ToolbarButton(
RStudioGinjector.INSTANCE.getCommands()
.rsconnectDeploy().getImageResource(),
new ClickHandler()
{
@Override
public void onClick(ClickEvent arg0)
{
onPublishButtonClick();
}
});
panel.add(publishButton_);
// create drop menu of previous deployments/other commands
publishMenu_ = new DeploymentPopupMenu();
publishMenuButton_ = new ToolbarButton(publishMenu_, true);
panel.add(publishMenuButton_);
// initialize composite widget
initWidget(panel);
// initialize injected members
RStudioGinjector.INSTANCE.injectMembers(this);
// compute initial visible state
applyVisiblity();
applyCaption("Publish");
setPreviousDeployments(null);
// give ourselves some breathing room on the right
getElement().getStyle().setMarginRight(4, Unit.PX);
}
@Inject
public void initialize(RSConnectServerOperations server,
RMarkdownServerOperations rmdServer,
EventBus events,
Commands commands,
GlobalDisplay display,
Provider<UIPrefs> pUiPrefs,
Session session,
PlotPublishMRUList plotMru)
{
server_ = server;
rmdServer_ = rmdServer;
events_ = events;
commands_ = commands;
display_ = display;
session_ = session;
pUiPrefs_ = pUiPrefs;
plotMru_ = plotMru;
// initialize visibility if requested
if (boundCommand_ != null)
{
boundCommand_.addVisibleChangedHandler(
new VisibleChangedHandler()
{
@Override
public void onVisibleChanged(AppCommand command)
{
applyVisiblity();
}
});
boundCommand_.addEnabledChangedHandler(
new EnabledChangedHandler()
{
@Override
public void onEnabledChanged(AppCommand command)
{
applyVisiblity();
}
});
}
events_.addHandler(RSConnectDeploymentCompletedEvent.TYPE, this);
events_.addHandler(RPubsUploadStatusEvent.TYPE, this);
events_.addHandler(RmdRenderCompletedEvent.TYPE, this);
}
@Override
public void setVisible(boolean visible)
{
boolean wasVisible = visible_;
visible_ = visible;
super.setVisible(visible);
// if becoming visible, repopulate the list of deployments if we haven't
// already
if (!wasVisible && visible)
populateDeployments(false);
}
public void setContentPath(String contentPath, String outputPath)
{
contentPath_ = contentPath;
outputPath_ = outputPath;
if (isVisible())
populateDeployments(false);
}
public void setRmdPreview(RmdPreviewParams params)
{
if (params.isShinyDocument() ||
(params.getResult().isHtml() &&
params.getResult().getFormat() != null))
{
setContentType(params.isWebsiteRmd() ?
RSConnect.CONTENT_TYPE_WEBSITE :
RSConnect.CONTENT_TYPE_DOCUMENT);
docPreview_ = new RenderedDocPreview(params);
setContentPath(params.getResult().getTargetFile(),
params.getOutputFile());
}
else
{
docPreview_ = null;
}
applyVisiblity();
}
public void setShinyPreview(ShinyApplicationParams params)
{
String ext = params.getPath() == null ? "" :
FileSystemItem.getExtensionFromPath(params.getPath()).toLowerCase();
setContentPath(params.getPath(), "");
setContentType(ext == ".r" ?
RSConnect.CONTENT_TYPE_APP_SINGLE :
RSConnect.CONTENT_TYPE_APP);
}
public void setHtmlPreview(HTMLPreviewResult params)
{
if (params.getSucceeded())
{
setContentPath(params.getSourceFile(), params.getHtmlFile());
docPreview_ = new RenderedDocPreview(params);
applyVisiblity();
}
}
public void setRmd(String rmd, boolean isStatic)
{
docPreview_ = new RenderedDocPreview(rmd, "", isStatic);
setContentPath(rmd, "");
SessionInfo sessionInfo = session_.getSessionInfo();
String buildType = sessionInfo.getBuildToolsType();
boolean setType = false;
if (buildType.equals(SessionInfo.BUILD_TOOLS_WEBSITE))
{
// if this is an Rmd with a content path
if (contentType_ == RSConnect.CONTENT_TYPE_DOCUMENT &&
!StringUtil.isNullOrEmpty(contentPath_))
{
// ...and if the content path is within the website dir,
String websiteDir = sessionInfo.getBuildTargetDir();
if (contentPath_.startsWith(websiteDir))
{
setType = true;
setContentType(RSConnect.CONTENT_TYPE_WEBSITE);
}
}
}
// if we haven't set the type yet, apply it
if (!setType)
setContentType(RSConnect.CONTENT_TYPE_DOCUMENT);
applyVisiblity();
}
public void setIsStatic(boolean isStatic)
{
if (docPreview_ != null)
{
docPreview_.setIsStatic(isStatic);
}
applyVisiblity();
}
public void setContentType(int contentType)
{
// this can happen in the viewer pane, which hosts e.g. both HTML widgets
// and R Markdown documents, each of which has its own publishing
// semantics
int oldType = contentType_;
contentType_ = contentType;
if (oldType != contentType)
{
// moving to a document type: get its deployment status
if (contentType == RSConnect.CONTENT_TYPE_DOCUMENT ||
contentType == RSConnect.CONTENT_TYPE_APP ||
contentType == RSConnect.CONTENT_TYPE_APP_SINGLE ||
contentType == RSConnect.CONTENT_TYPE_WEBSITE)
populateDeployments(true);
// moving to a raw HTML type: erase the deployment list
if (contentType == RSConnect.CONTENT_TYPE_HTML ||
contentType == RSConnect.CONTENT_TYPE_PRES)
setPreviousDeployments(null);
}
applyVisiblity();
}
public void setPublishHtmlSource(PublishHtmlSource source)
{
publishHtmlSource_ = source;
setPreviousDeployments(null);
applyVisiblity();
}
@Override
public void onRSConnectDeploymentCompleted(
RSConnectDeploymentCompletedEvent event)
{
if (!event.succeeded())
return;
// when a deployment is successful, refresh ourselves. Consider: it's
// a little wasteful to do this whether or not the deployment was for
// the content on which this button is hosted, but there are unlikely to
// be more than a couple publish buttons at any one time, and this is
// cheap (just hits the local disk)
populateDeployments(true);
}
@Override
public void onRPubsPublishStatus(RPubsUploadStatusEvent event)
{
// make sure it applies to our context
RPubsUploadStatusEvent.Status status = event.getStatus();
if (StringUtil.isNullOrEmpty(status.getError()))
{
populateDeployments(true);
}
}
@Override
public void onRmdRenderCompleted(RmdRenderCompletedEvent event)
{
// ensure we got a result--note that even a cancelled render generates an
// event, but with an empty output file
if (rmdRenderPending_ && event.getResult() != null &&
!StringUtil.isNullOrEmpty(event.getResult().getOutputFile()))
{
RenderedDocPreview docPreview =
new RenderedDocPreview(event.getResult());
events_.fireEvent(RSConnectActionEvent.DeployDocEvent(docPreview,
event.getResult().isWebsite() ?
RSConnect.CONTENT_TYPE_WEBSITE :
RSConnect.CONTENT_TYPE_DOCUMENT,
publishAfterRmdRender_));
}
publishAfterRmdRender_ = null;
rmdRenderPending_ = false;
anyRmdRenderPending_ = false;
}
public void setShowCaption(boolean show)
{
if (showCaption_ != show)
{
showCaption_ = show;
applyCaption();
}
}
public void setManuallyHidden(boolean hide)
{
if (manuallyHidden_ != hide)
{
manuallyHidden_ = hide;
applyVisiblity();
}
}
public static boolean isAnyRmdRenderPending()
{
return anyRmdRenderPending_;
}
public void invokePublish()
{
onPublishButtonClick();
}
private void onPublishButtonClick()
{
// if the publish button is clicked without the droplist ever being
// invoked, then we need to grab the list of existing deployments to
// determine what the default one will be.
if (defaultRec_ == null && populatedPath_ == null)
{
rebuildPopupMenu(new ToolbarPopupMenu.DynamicPopupMenuCallback()
{
@Override
public void onPopupMenu(ToolbarPopupMenu menu)
{
onPublishRecordClick(defaultRec_);
}
});
}
else
{
onPublishRecordClick(defaultRec_);
}
}
private void populateDeployments(final boolean force)
{
// force menu to think this is a new path to check for deployments
if (force)
populatedPath_ = null;
// if we don't need to recompute the caption, stop now
if (!showCaption_)
return;
rebuildPopupMenu(null);
}
private void onPublishRecordClick(final RSConnectDeploymentRecord previous)
{
switch (contentType_)
{
case RSConnect.CONTENT_TYPE_HTML:
case RSConnect.CONTENT_TYPE_PRES:
if (publishHtmlSource_ == null)
{
display_.showErrorMessage("Content Publish Failed",
"No HTML could be generated for the content.");
return;
}
publishHtmlSource_.generatePublishHtml(
new CommandWithArg<String>()
{
@Override
public void execute(String arg)
{
events_.fireEvent(RSConnectActionEvent.DeployHtmlEvent(
contentType_, contentPath_, arg,
publishHtmlSource_.getTitle(), previous));
}
});
break;
case RSConnect.CONTENT_TYPE_PLOT:
// for plots, we need to generate the hosting HTML prior to publishing
if (publishHtmlSource_ != null)
{
publishHtmlSource_.generatePublishHtml(
new CommandWithArg<String>()
{
@Override
public void execute(String htmlFile)
{
events_.fireEvent(
RSConnectActionEvent.DeployPlotEvent(htmlFile,
previous));
}
});
}
break;
case RSConnect.CONTENT_TYPE_APP:
case RSConnect.CONTENT_TYPE_APP_SINGLE:
// Shiny application
events_.fireEvent(RSConnectActionEvent.DeployAppEvent(
contentPath_, contentType_, previous));
break;
case RSConnect.CONTENT_TYPE_DOCUMENT:
if (docPreview_ == null ||
(docPreview_.isStatic() &&
StringUtil.isNullOrEmpty(docPreview_.getOutputFile()) &&
docPreview_.getSourceFile() != null))
{
// if the doc has been saved but not been rendered, go render it and
// come back when we're finished
renderThenPublish(contentPath_, previous);
}
else
{
// All R Markdown variants (single/multiple and static/Shiny)
if (docPreview_.getSourceFile() == null)
{
display_.showErrorMessage("Unsaved Document",
"Unsaved documents cannot be published. Save the document " +
"before publishing it.");
break;
}
events_.fireEvent(RSConnectActionEvent.DeployDocEvent(
docPreview_, RSConnect.CONTENT_TYPE_DOCUMENT, previous));
}
break;
case RSConnect.CONTENT_TYPE_WEBSITE:
events_.fireEvent(RSConnectActionEvent.DeployDocEvent(
docPreview_, RSConnect.CONTENT_TYPE_WEBSITE, previous));
break;
default:
// should never happen
display_.showErrorMessage("Can't Publish " +
RSConnect.contentTypeDesc(contentType_),
"The content type '" +
RSConnect.contentTypeDesc(contentType_) +
"' is not currently supported for publishing.");
}
}
private void setPreviousDeployments(JsArray<RSConnectDeploymentRecord> recs)
{
// clear existing deployment menu, if any
publishMenu_.clearItems();
defaultRec_ = null;
// if there are existing deployments, make the UI reflect that this is a
// republish
if (recs != null && recs.length() > 0)
{
applyCaption("Republish");
// find the default (last deployed record)--this needs to be done as
// a first pass so we can identify the associated menu item in one
// pass
for (int i = 0; i < recs.length(); i++)
{
final RSConnectDeploymentRecord rec = recs.get(i);
if (rec == null)
continue;
if (defaultRec_ == null || defaultRec_.getWhen() < rec.getWhen())
{
defaultRec_ = rec;
}
}
// build the deployment menu
for (int i = 0; i < recs.length(); i++)
{
final RSConnectDeploymentRecord rec = recs.get(i);
final DeploymentMenuItem menuItem = new DeploymentMenuItem(rec,
rec == defaultRec_, new Command()
{
@Override
public void execute()
{
onPublishRecordClick(rec);
}
});
publishMenu_.addItem(menuItem);
}
publishMenu_.addSeparator();
publishMenu_.addItem(new MenuItem(
AppCommand.formatMenuLabel(
commands_.rsconnectDeploy().getImageResource(),
"Other Destination...", null),
true,
new Scheduler.ScheduledCommand()
{
@Override
public void execute()
{
onPublishRecordClick(null);
}
}));
}
else
{
// show first-time publish button caption
applyCaption("Publish");
// no existing deployments to redeploy to, so just offer to make a new
// one
publishMenu_.addItem(new MenuItem(
AppCommand.formatMenuLabel(
commands_.rsconnectDeploy().getImageResource(),
"Publish " + RSConnect.contentTypeDesc(contentType_) +
"...", null),
true,
new Scheduler.ScheduledCommand()
{
@Override
public void execute()
{
onPublishRecordClick(defaultRec_);
}
}));
}
// if it's a plot, show an MRU of recently deployed plot "names"
if (contentType_ == RSConnect.CONTENT_TYPE_PLOT)
{
plotMru_.addPlotMruEntries(publishMenu_,
new OperationWithInput<PlotPublishMRUList.Entry>()
{
@Override
public void execute(Entry plot)
{
republishPlot(plot);
}
});
}
publishMenu_.addSeparator();
publishMenu_.addItem(
commands_.rsconnectManageAccounts().createMenuItem(false));
}
private void republishPlot(final PlotPublishMRUList.Entry plot)
{
if (publishHtmlSource_ != null)
{
publishHtmlSource_.generatePublishHtml(
new CommandWithArg<String>()
{
@Override
public void execute(String htmlFile)
{
RSConnectPublishSource source =
new RSConnectPublishSource(htmlFile, null,
true, true, false, "Plot", contentType_);
ArrayList<String> deployFiles = new ArrayList<String>();
deployFiles.add(FilePathUtils.friendlyFileName(htmlFile));
RSConnectPublishSettings settings =
new RSConnectPublishSettings(
deployFiles,
new ArrayList<String>(),
new ArrayList<String>(),
false, true);
events_.fireEvent(
new RSConnectDeployInitiatedEvent(source, settings,
true, RSConnectDeploymentRecord.create(
plot.name, null, plot.account,
plot.server)));
}
});
}
}
private void applyVisiblity()
{
publishMenuButton_.setVisible(recomputeMenuVisiblity());
setVisible(recomputeVisibility());
}
// recomputes visibility for the popup menu that offers republish
// destinations
private boolean recomputeMenuVisiblity()
{
if (pUiPrefs_.get().enableRStudioConnect().getGlobalValue())
{
// always show the menu when RSConnect is enabled
return true;
}
else if (contentType_ == RSConnect.CONTENT_TYPE_DOCUMENT &&
docPreview_ != null)
{
// show the menu for Shiny documents
return !docPreview_.isStatic();
}
else if (contentType_ == RSConnect.CONTENT_TYPE_APP ||
contentType_ == RSConnect.CONTENT_TYPE_APP_SINGLE)
{
// show the menu for Shiny apps
return true;
}
// hide the menu for everything else
return false;
}
private boolean recomputeVisibility()
{
// if all publishing is disabled, hide ourselves
if (!session_.getSessionInfo().getAllowPublish() ||
!pUiPrefs_.get().showPublishUi().getGlobalValue())
return false;
// if both internal and external publishing is disabled, hide ourselves
if (!session_.getSessionInfo().getAllowExternalPublish() &&
!pUiPrefs_.get().enableRStudioConnect().getGlobalValue())
return false;
// if we're bound to a command's visibility/enabled state, check that
if (boundCommand_ != null && (!boundCommand_.isVisible() ||
!boundCommand_.isEnabled()))
return false;
// if we have no content type, hide ourselves
if (contentType_ == RSConnect.CONTENT_TYPE_NONE)
return false;
// if we do have a content type, ensure that we have actual content
// bound to it
if ((contentType_ == RSConnect.CONTENT_TYPE_HTML ||
contentType_ == RSConnect.CONTENT_TYPE_PLOT ||
contentType_ == RSConnect.CONTENT_TYPE_PRES) &&
publishHtmlSource_ == null)
return false;
if ((contentType_ == RSConnect.CONTENT_TYPE_APP ||
contentType_ == RSConnect.CONTENT_TYPE_APP_SINGLE) &&
StringUtil.isNullOrEmpty(contentPath_))
return false;
if (manuallyHidden_)
return false;
// looks like we should be visible
return true;
}
private void applyCaption(String caption)
{
caption_ = caption;
applyCaption();
}
private void applyCaption()
{
publishButton_.setText(showCaption_ ? caption_ : "");
}
// rebuilds the popup menu--this can happen when the menu is invoked; it can
// also happen when the button is created if we're aggressively checking
// publish status
private void rebuildPopupMenu(final
ToolbarPopupMenu.DynamicPopupMenuCallback callback)
{
final ToolbarPopupMenu menu = publishMenu_;
// prevent reentrancy
if (populating_)
{
if (callback != null)
callback.onPopupMenu(menu);
return;
}
// handle case where we don't have a content path (i.e. plots)
if (contentPath_ == null)
{
setPreviousDeployments(null);
if (callback != null)
callback.onPopupMenu(menu);
return;
}
// avoid populating if we've already set the deployments for this path
// (unless we're forcefully repopulating)
if (populatedPath_ != null && populatedPath_.equals(contentPath_))
{
if (callback != null)
callback.onPopupMenu(menu);
return;
}
String contentPath = contentPath_;
boolean parent = false;
// if this is a Shiny application and an .R file is being invoked, check
// for deployments of its parent path (single-file apps have
// CONTENT_TYPE_APP_SINGLE and their own deployment records)
if (contentType_ == RSConnect.CONTENT_TYPE_APP &&
StringUtil.getExtension(contentPath_).equalsIgnoreCase("r"))
parent = true;
// if this is a document in a website, use the parent path
if (contentType_ == RSConnect.CONTENT_TYPE_WEBSITE)
parent = true;
// apply parent path if needed
if (parent)
{
FileSystemItem fsiContent = FileSystemItem.createFile(contentPath_);
contentPath = fsiContent.getParentPathString();
}
populating_ = true;
server_.getRSConnectDeployments(contentPath,
outputPath_ == null ? "" : outputPath_,
new ServerRequestCallback<JsArray<RSConnectDeploymentRecord>>()
{
@Override
public void onResponseReceived(JsArray<RSConnectDeploymentRecord> recs)
{
populatedPath_ = contentPath_;
populating_ = false;
// if publishing a website but not content, filter deployments
// that are static (as we can't update them)
if (contentType_ == RSConnect.CONTENT_TYPE_WEBSITE &&
(docPreview_ == null ||
StringUtil.isNullOrEmpty(docPreview_.getOutputFile())))
{
JsArray<RSConnectDeploymentRecord> codeRecs =
JsArray.createArray().cast();
for (int i = 0; i < recs.length(); i++)
{
if (!recs.get(i).getAsStatic())
codeRecs.push(recs.get(i));
}
recs = codeRecs;
}
setPreviousDeployments(recs);
if (callback != null)
callback.onPopupMenu(menu);
}
@Override
public void onError(ServerError error)
{
populating_ = false;
if (callback != null)
callback.onPopupMenu(menu);
}
});
}
// for static content only: perform a just-in-time render if necessary and
// then publish the content
private void renderThenPublish(final String target,
final RSConnectDeploymentRecord previous)
{
// prevent re-entrancy
if (rmdInfoPending_)
return;
final Command renderCommand = new Command()
{
@Override
public void execute()
{
publishAfterRmdRender_ = previous;
rmdRenderPending_ = true;
anyRmdRenderPending_ = true;
commands_.knitDocument().execute();
}
};
rmdInfoPending_ = true;
rmdServer_.getRmdOutputInfo(target,
new ServerRequestCallback<RmdOutputInfo>()
{
@Override
public void onResponseReceived(RmdOutputInfo response)
{
if (response.isCurrent())
{
RenderedDocPreview preview = new RenderedDocPreview(
target, response.getOutputFile(), true);
events_.fireEvent(RSConnectActionEvent.DeployDocEvent(
preview, RSConnect.CONTENT_TYPE_DOCUMENT, previous));
}
else
{
renderCommand.execute();
}
rmdInfoPending_ = false;
}
@Override
public void onError(ServerError error)
{
// if we failed to figure out whether we need to do a re-render,
// assume one is necessary
Debug.logError(error);
renderCommand.execute();
rmdInfoPending_ = false;
}
});
}
private final ToolbarButton publishButton_;
private final DeploymentPopupMenu publishMenu_;
private ToolbarButton publishMenuButton_;
private RSConnectServerOperations server_;
private RMarkdownServerOperations rmdServer_;
private EventBus events_;
private Commands commands_;
private GlobalDisplay display_;
private Session session_;
private Provider<UIPrefs> pUiPrefs_;
private PlotPublishMRUList plotMru_;
private String contentPath_;
private String outputPath_;
private int contentType_ = RSConnect.CONTENT_TYPE_NONE;
private String populatedPath_;
private boolean populating_ = false;
private boolean showCaption_ = true;
private RenderedDocPreview docPreview_;
private PublishHtmlSource publishHtmlSource_;
private String caption_;
private boolean manuallyHidden_ = false;
private boolean visible_ = false;
private boolean rmdRenderPending_ = false;
private boolean rmdInfoPending_ = false;
private RSConnectDeploymentRecord publishAfterRmdRender_ = null;
private final AppCommand boundCommand_;
private RSConnectDeploymentRecord defaultRec_;
private static boolean anyRmdRenderPending_ = false;
}
|
package org.rstudio.studio.client.workbench.views.buildtools;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.inject.Inject;
import org.rstudio.core.client.command.CommandBinder;
import org.rstudio.core.client.command.Handler;
import org.rstudio.studio.client.application.events.EventBus;
import org.rstudio.studio.client.workbench.commands.Commands;
import org.rstudio.studio.client.workbench.events.SessionInitEvent;
import org.rstudio.studio.client.workbench.events.SessionInitHandler;
import org.rstudio.studio.client.workbench.model.Session;
import org.rstudio.studio.client.workbench.model.SessionInfo;
import org.rstudio.studio.client.workbench.prefs.model.UIPrefs;
import org.rstudio.studio.client.workbench.ui.DelayLoadTabShim;
import org.rstudio.studio.client.workbench.ui.DelayLoadWorkbenchTab;
import org.rstudio.studio.client.workbench.views.buildtools.model.BuildRestartContext;
import org.rstudio.studio.client.workbench.views.buildtools.model.BuildState;
import org.rstudio.studio.client.workbench.views.buildtools.ui.BuildPaneResources;
public class BuildTab extends DelayLoadWorkbenchTab<BuildPresenter>
{
public interface Binder extends CommandBinder<Commands, Shim> {}
public abstract static class Shim extends DelayLoadTabShim<BuildPresenter, BuildTab>
{
@Handler
public abstract void onBuildAll();
@Handler
public abstract void onRebuildAll();
@Handler
public abstract void onCleanAll();
@Handler
public abstract void onBuildSourcePackage();
@Handler
public abstract void onBuildBinaryPackage();
@Handler
public abstract void onRoxygenizePackage();
@Handler
public abstract void onStopBuild();
@Handler
public abstract void onCheckPackage();
abstract void initialize(BuildState buildState);
abstract void initializeAfterRestart(BuildRestartContext restartContext);
abstract void initializeDevtools(String devtoolsLoadAllPath);
}
@Inject
public BuildTab(final Shim shim,
final Session session,
Binder binder,
final Commands commands,
EventBus eventBus,
UIPrefs uiPrefs)
{
super("Build", shim);
session_ = session;
binder.bind(commands, shim);
// stop build always starts out disabled
commands.stopBuild().setEnabled(false);
// manage roxygen command
commands.roxygenizePackage().setVisible(uiPrefs.useRoxygen().getValue());
uiPrefs.useRoxygen().addValueChangeHandler(
new ValueChangeHandler<Boolean>() {
@Override
public void onValueChange(ValueChangeEvent<Boolean> event)
{
commands.roxygenizePackage().setVisible(event.getValue());
}
});
eventBus.addHandler(SessionInitEvent.TYPE, new SessionInitHandler() {
public void onSessionInit(SessionInitEvent sie)
{
SessionInfo sessionInfo = session.getSessionInfo();
// adapt or remove package commands if this isn't a package
String type = sessionInfo.getBuildToolsType();
if (!type.equals(SessionInfo.BUILD_TOOLS_PACKAGE))
{
commands.buildSourcePackage().remove();
commands.buildBinaryPackage().remove();
commands.roxygenizePackage().remove();
commands.checkPackage().remove();
commands.buildAll().setImageResource(
BuildPaneResources.INSTANCE.iconBuild());
commands.buildAll().setMenuLabel("_Build All");
commands.buildAll().setDesc("Build all");
}
// remove makefile commands if this isn't a makefile
if (!type.equals(SessionInfo.BUILD_TOOLS_MAKEFILE))
{
commands.rebuildAll().remove();
commands.cleanAll().remove();
}
// remove all other commands if there are no build tools
if (type.equals(SessionInfo.BUILD_TOOLS_NONE))
{
commands.buildAll().remove();
commands.rebuildAll().remove();
commands.cleanAll().remove();
commands.stopBuild().remove();
commands.activateBuild().remove();
}
// initialize from build state or restart context
BuildState buildState = sessionInfo.getBuildState();
BuildRestartContext context = sessionInfo.getBuildRestartContext();
String devtoolsLoadPath = sessionInfo.getBuildDevtoolsLoadPath();
if (buildState != null)
shim.initialize(buildState);
else if (context != null)
shim.initializeAfterRestart(context);
else if (devtoolsLoadPath != null)
shim.initializeDevtools(devtoolsLoadPath);
}
});
}
@Override
public boolean isSuppressed()
{
return session_.getSessionInfo().getBuildToolsType().equals(
SessionInfo.BUILD_TOOLS_NONE);
}
private Session session_;
}
|
package org.rstudio.studio.client.workbench.views.console.shell;
import com.google.gwt.core.client.GWT;
import com.google.gwt.core.client.JsArrayString;
import com.google.gwt.core.client.Scheduler;
import com.google.gwt.core.client.Scheduler.ScheduledCommand;
import com.google.gwt.event.dom.client.*;
import com.google.gwt.user.client.Command;
import com.google.inject.Inject;
import org.rstudio.core.client.BrowseCap;
import org.rstudio.core.client.CommandWithArg;
import org.rstudio.core.client.StringUtil;
import org.rstudio.core.client.command.CommandBinder;
import org.rstudio.core.client.command.Handler;
import org.rstudio.core.client.command.KeyboardShortcut;
import org.rstudio.core.client.jsonrpc.RpcObjectList;
import org.rstudio.studio.client.application.events.EventBus;
import org.rstudio.studio.client.common.CommandLineHistory;
import org.rstudio.studio.client.common.debugging.ErrorManager;
import org.rstudio.studio.client.common.debugging.events.UnhandledErrorEvent;
import org.rstudio.studio.client.common.debugging.model.ErrorHandlerType;
import org.rstudio.studio.client.common.shell.ShellDisplay;
import org.rstudio.studio.client.server.ServerError;
import org.rstudio.studio.client.server.ServerRequestCallback;
import org.rstudio.studio.client.server.Void;
import org.rstudio.studio.client.server.VoidServerRequestCallback;
import org.rstudio.studio.client.workbench.commands.Commands;
import org.rstudio.studio.client.workbench.model.ClientInitState;
import org.rstudio.studio.client.workbench.model.ClientState;
import org.rstudio.studio.client.workbench.model.ConsoleAction;
import org.rstudio.studio.client.workbench.model.Session;
import org.rstudio.studio.client.workbench.model.SessionInfo;
import org.rstudio.studio.client.workbench.model.helper.StringStateValue;
import org.rstudio.studio.client.workbench.prefs.model.UIPrefs;
import org.rstudio.studio.client.workbench.views.console.events.*;
import org.rstudio.studio.client.workbench.views.console.model.ConsoleServerOperations;
import org.rstudio.studio.client.workbench.views.console.shell.assist.CompletionManager;
import org.rstudio.studio.client.workbench.views.console.shell.assist.CompletionPopupPanel;
import org.rstudio.studio.client.workbench.views.console.shell.assist.HistoryCompletionManager;
import org.rstudio.studio.client.workbench.views.console.shell.assist.RCompletionManager;
import org.rstudio.studio.client.workbench.views.console.shell.editor.InputEditorDisplay;
import org.rstudio.studio.client.workbench.views.environment.events.DebugModeChangedEvent;
import org.rstudio.studio.client.workbench.views.source.editors.text.ace.AceEditorNative;
import java.util.ArrayList;
public class Shell implements ConsoleInputHandler,
ConsoleWriteOutputHandler,
ConsoleWriteErrorHandler,
ConsoleWritePromptHandler,
ConsoleWriteInputHandler,
ConsolePromptHandler,
ConsoleResetHistoryHandler,
ConsoleRestartRCompletedEvent.Handler,
ConsoleExecutePendingInputEvent.Handler,
SendToConsoleHandler,
DebugModeChangedEvent.Handler,
RunCommandWithDebugEvent.Handler,
UnhandledErrorEvent.Handler
{
static interface Binder extends CommandBinder<Commands, Shell>
{
}
public interface Display extends ShellDisplay
{
void onBeforeUnselected();
void onBeforeSelected();
void onSelected();
}
@Inject
public Shell(ConsoleServerOperations server,
EventBus eventBus,
Display display,
Session session,
Commands commands,
UIPrefs uiPrefs,
ErrorManager errorManager)
{
super() ;
((Binder)GWT.create(Binder.class)).bind(commands, this);
server_ = server ;
eventBus_ = eventBus ;
view_ = display ;
commands_ = commands;
errorManager_ = errorManager;
input_ = view_.getInputEditorDisplay() ;
historyManager_ = new CommandLineHistory(input_);
browseHistoryManager_ = new CommandLineHistory(input_);
prefs_ = uiPrefs;
inputAnimator_ = new ShellInputAnimator(view_.getInputEditorDisplay());
view_.setMaxOutputLines(session.getSessionInfo().getConsoleActionsLimit());
keyDownPreviewHandlers_ = new ArrayList<KeyDownPreviewHandler>() ;
keyPressPreviewHandlers_ = new ArrayList<KeyPressPreviewHandler>() ;
InputKeyDownHandler handler = new InputKeyDownHandler() ;
// This needs to be a capturing key down handler or else Ace will have
// handled the event before we had a chance to prevent it
view_.addCapturingKeyDownHandler(handler) ;
view_.addKeyPressHandler(handler) ;
eventBus.addHandler(ConsoleInputEvent.TYPE, this);
eventBus.addHandler(ConsoleWriteOutputEvent.TYPE, this);
eventBus.addHandler(ConsoleWriteErrorEvent.TYPE, this);
eventBus.addHandler(ConsoleWritePromptEvent.TYPE, this);
eventBus.addHandler(ConsoleWriteInputEvent.TYPE, this);
eventBus.addHandler(ConsolePromptEvent.TYPE, this);
eventBus.addHandler(ConsoleResetHistoryEvent.TYPE, this);
eventBus.addHandler(ConsoleRestartRCompletedEvent.TYPE, this);
eventBus.addHandler(ConsoleExecutePendingInputEvent.TYPE, this);
eventBus.addHandler(SendToConsoleEvent.TYPE, this);
eventBus.addHandler(DebugModeChangedEvent.TYPE, this);
eventBus.addHandler(RunCommandWithDebugEvent.TYPE, this);
eventBus.addHandler(UnhandledErrorEvent.TYPE, this);
final CompletionManager completionManager
= new RCompletionManager(view_.getInputEditorDisplay(),
null,
new CompletionPopupPanel(),
server,
null,
null,
null) ;
addKeyDownPreviewHandler(completionManager) ;
addKeyPressPreviewHandler(completionManager) ;
addKeyDownPreviewHandler(new HistoryCompletionManager(
view_.getInputEditorDisplay(), server));
uiPrefs.insertMatching().bind(new CommandWithArg<Boolean>() {
public void execute(Boolean arg) {
AceEditorNative.setInsertMatching(arg);
}});
sessionInit(session);
}
private void sessionInit(Session session)
{
SessionInfo sessionInfo = session.getSessionInfo();
ClientInitState clientState = sessionInfo.getClientState();
new StringStateValue(GROUP_CONSOLE, STATE_INPUT, ClientState.TEMPORARY, clientState) {
@Override
protected void onInit(String value)
{
initialInput_ = value;
}
@Override
protected String getValue()
{
return view_.getInputEditorDisplay().getText();
}
};
JsArrayString history = sessionInfo.getConsoleHistory();
if (history != null)
setHistory(history);
RpcObjectList<ConsoleAction> actions = sessionInfo.getConsoleActions();
if (actions != null)
{
view_.playbackActions(actions);
}
if (sessionInfo.getResumed())
{
// no special UI for this (resuming session with all console
// history and other UI state preserved deemed adequate feedback)
}
}
public Display getDisplay()
{
return view_ ;
}
@Handler
void onConsoleClear()
{
// clear output
view_.clearOutput();
// notify server
server_.resetConsoleActions(new VoidServerRequestCallback());
// if we don't bounce setFocus the menu retains focus
Scheduler.get().scheduleDeferred(new ScheduledCommand() {
public void execute()
{
view_.getInputEditorDisplay().setFocus(true);
}
});
}
public void addKeyDownPreviewHandler(KeyDownPreviewHandler handler)
{
keyDownPreviewHandlers_.add(handler) ;
}
public void addKeyPressPreviewHandler(KeyPressPreviewHandler handler)
{
keyPressPreviewHandlers_.add(handler) ;
}
public void onConsoleInput(final ConsoleInputEvent event)
{
server_.consoleInput(event.getInput(),
new ServerRequestCallback<Void>() {
@Override
public void onError(ServerError error)
{
// show the error in the console then re-prompt
view_.consoleWriteError("Error: " + error.getUserMessage() + "\n");
if (lastPromptText_ != null)
consolePrompt(lastPromptText_, false);
}
});
}
public void onConsoleWriteOutput(ConsoleWriteOutputEvent event)
{
view_.consoleWriteOutput(event.getOutput()) ;
}
public void onConsoleWriteError(final ConsoleWriteErrorEvent event)
{
view_.consoleWriteError(event.getError());
}
public void onUnhandledError(UnhandledErrorEvent event)
{
if (!debugging_)
{
view_.consoleWriteExtendedError(
event.getError().getErrorMessage(),
event.getError(),
prefs_.autoExpandErrorTracebacks().getValue(),
getHistoryEntry(0));
}
}
public void onConsoleWriteInput(ConsoleWriteInputEvent event)
{
view_.consoleWriteInput(event.getInput());
}
public void onConsoleWritePrompt(ConsoleWritePromptEvent event)
{
view_.consoleWritePrompt(event.getPrompt());
}
public void onConsolePrompt(ConsolePromptEvent event)
{
String prompt = event.getPrompt().getPromptText() ;
boolean addToHistory = event.getPrompt().getAddToHistory() ;
consolePrompt(prompt, addToHistory) ;
}
private void consolePrompt(String prompt, boolean addToHistory)
{
view_.consolePrompt(prompt, true) ;
if (lastPromptText_ == null
&& initialInput_ != null
&& initialInput_.length() > 0)
{
view_.getInputEditorDisplay().setText(initialInput_);
view_.ensureInputVisible();
}
addToHistory_ = addToHistory;
resetHistoryPosition();
lastPromptText_ = prompt ;
if (restoreFocus_)
{
restoreFocus_ = false;
view_.getInputEditorDisplay().setFocus(true);
}
}
public void onConsoleResetHistory(ConsoleResetHistoryEvent event)
{
setHistory(event.getHistory());
}
@Override
public void onRestartRCompleted(ConsoleRestartRCompletedEvent event)
{
if (view_.isPromptEmpty())
eventBus_.fireEvent(new SendToConsoleEvent("", true));
focus();
}
private void processCommandEntry()
{
String commandText = view_.processCommandEntry() ;
if (addToHistory_ && (commandText.length() > 0))
addToHistory(commandText);
// fire event
eventBus_.fireEvent(new ConsoleInputEvent(commandText));
}
public void onSendToConsole(final SendToConsoleEvent event)
{
final InputEditorDisplay display = view_.getInputEditorDisplay();
// get anything already at the console
final String previousInput = StringUtil.notNull(display.getText());
// define code block we execute at finish
Command finishSendToConsole = new Command() {
@Override
public void execute()
{
if (event.shouldExecute())
{
processCommandEntry();
if (previousInput.length() > 0)
display.setText(previousInput);
}
if (!event.shouldExecute() || event.shouldFocus())
{
display.setFocus(true);
display.collapseSelection(false);
}
}
};
// do standrd finish if we aren't animating
if (!event.shouldAnimate())
{
display.clear();
display.setText(event.getCode());
finishSendToConsole.execute();
}
else
{
inputAnimator_.enque(event.getCode(), finishSendToConsole);
}
}
@Override
public void onExecutePendingInput(ConsoleExecutePendingInputEvent event)
{
// if the source view is delegating a Cmd+Enter to us then
// take it if we are focused and we have a command to enter
if (view_.getInputEditorDisplay().isFocused() &&
(view_.getInputEditorDisplay().getText().length() > 0))
{
processCommandEntry();
}
// otherwise delegate back to the source view. we do this via
// executing a command which is a bit of hack but it's a clean
// way to call code within the "current editor" (an event would
// go to all editors). another alternative would be to
// call a method on the SourceShim
else
{
commands_.executeCodeWithoutFocus().execute();
}
}
@Override
public void onDebugModeChanged(DebugModeChangedEvent event)
{
if (event.debugging())
{
view_.ensureInputVisible();
}
debugging_ = event.debugging();
}
@Override
public void onRunCommandWithDebug(final RunCommandWithDebugEvent event)
{
// Invoked from the "Rerun with Debug" command in the ConsoleError widget.
errorManager_.setDebugSessionHandlerType(
ErrorHandlerType.ERRORS_BREAK,
new ServerRequestCallback<Void>()
{
@Override
public void onResponseReceived(Void v)
{
eventBus_.fireEvent(new SendToConsoleEvent(
event.getCommand(), true));
}
@Override
public void onError(ServerError error)
{
// if we failed to set debug mode, don't rerun the command
}
});
}
private final class InputKeyDownHandler implements KeyDownHandler,
KeyPressHandler
{
public void onKeyDown(KeyDownEvent event)
{
int keyCode = event.getNativeKeyCode();
for (KeyDownPreviewHandler handler : keyDownPreviewHandlers_)
{
if (handler.previewKeyDown(event.getNativeEvent()))
{
event.preventDefault() ;
event.stopPropagation() ;
return;
}
}
if (event.getNativeKeyCode() == KeyCodes.KEY_TAB)
event.preventDefault();
int modifiers = KeyboardShortcut.getModifierValue(event.getNativeEvent());
if (event.isUpArrow() && modifiers == 0)
{
if ((input_.getCurrentLineNum() == 0) || input_.isCursorAtEnd())
{
event.preventDefault();
event.stopPropagation();
navigateHistory(-1);
}
}
else if (event.isDownArrow() && modifiers == 0)
{
if ((input_.getCurrentLineNum() == input_.getCurrentLineCount() - 1)
|| input_.isCursorAtEnd())
{
event.preventDefault();
event.stopPropagation();
navigateHistory(1);
}
}
else if (keyCode == KeyCodes.KEY_ENTER && modifiers == 0)
{
event.preventDefault();
event.stopPropagation();
restoreFocus_ = true;
processCommandEntry();
}
else if (keyCode == KeyCodes.KEY_ESCAPE && modifiers == 0)
{
event.preventDefault();
if (input_.getText().length() == 0)
{
// view_.isPromptEmpty() is to check for cases where the
// server is prompting but not at the top level. Escape
// needs to send null in those cases.
// For example, try "scan()" function
if (view_.isPromptEmpty())
{
// interrupt server
server_.interrupt(new VoidServerRequestCallback());
}
else
{
// if the input is already empty then send a console reset
// which will jump us back to the main prompt
eventBus_.fireEvent(new ConsoleInputEvent(null));
}
}
input_.clear();
}
else
{
int mod = KeyboardShortcut.getModifierValue(event.getNativeEvent());
if (mod == KeyboardShortcut.CTRL)
{
switch (keyCode)
{
case 'L':
Shell.this.onConsoleClear() ;
event.preventDefault() ;
break;
}
}
else if (mod == KeyboardShortcut.ALT)
{
switch (keyCode)
{
case 189: // hyphen
event.preventDefault();
event.stopPropagation();
input_.replaceSelection(" <- ", true);
break;
}
}
else if (
(BrowseCap.hasMetaKey() &&
(mod == (KeyboardShortcut.META + KeyboardShortcut.SHIFT))) ||
(!BrowseCap.hasMetaKey() &&
(mod == (KeyboardShortcut.CTRL + KeyboardShortcut.SHIFT))))
{
switch (keyCode)
{
case KeyCodes.KEY_M:
event.preventDefault();
event.stopPropagation();
input_.replaceSelection(" %>% ", true);
break;
}
}
}
}
public void onKeyPress(KeyPressEvent event)
{
for (KeyPressPreviewHandler handler : keyPressPreviewHandlers_)
{
if (handler.previewKeyPress(event.getCharCode()))
{
event.preventDefault() ;
event.stopPropagation() ;
return;
}
}
}
@SuppressWarnings("unused")
private boolean lastKeyCodeWasZero_;
}
private boolean isBrowsePrompt()
{
return lastPromptText_ != null && (lastPromptText_.startsWith("Browse"));
}
private void resetHistoryPosition()
{
historyManager_.resetPosition();
browseHistoryManager_.resetPosition();
}
private void addToHistory(String commandText)
{
if (isBrowsePrompt())
browseHistoryManager_.addToHistory(commandText);
else
historyManager_.addToHistory(commandText);
}
private String getHistoryEntry(int offset)
{
if (isBrowsePrompt())
return browseHistoryManager_.getHistoryEntry(offset);
else
return historyManager_.getHistoryEntry(offset);
}
private void navigateHistory(int offset)
{
if (isBrowsePrompt())
browseHistoryManager_.navigateHistory(offset);
else
historyManager_.navigateHistory(offset);
view_.ensureInputVisible();
}
public void focus()
{
input_.setFocus(true);
}
private void setHistory(JsArrayString history)
{
ArrayList<String> historyList = new ArrayList<String>(history.length());
for (int i = 0; i < history.length(); i++)
historyList.add(history.get(i));
historyManager_.setHistory(historyList);
browseHistoryManager_.resetPosition();
}
public void onBeforeUnselected()
{
view_.onBeforeUnselected();
}
public void onBeforeSelected()
{
view_.onBeforeSelected();
}
public void onSelected()
{
view_.onSelected();
}
private final ConsoleServerOperations server_ ;
private final EventBus eventBus_ ;
private final Display view_ ;
private final Commands commands_;
private final ErrorManager errorManager_;
private final InputEditorDisplay input_ ;
private final ArrayList<KeyDownPreviewHandler> keyDownPreviewHandlers_ ;
private final ArrayList<KeyPressPreviewHandler> keyPressPreviewHandlers_ ;
// indicates whether the next command should be added to history
private boolean addToHistory_ ;
private String lastPromptText_ ;
private final UIPrefs prefs_;
private final CommandLineHistory historyManager_;
private final CommandLineHistory browseHistoryManager_;
private final ShellInputAnimator inputAnimator_;
private String initialInput_ ;
private static final String GROUP_CONSOLE = "console";
private static final String STATE_INPUT = "input";
private boolean restoreFocus_ = true;
private boolean debugging_ = false;
}
|
package com.kirinpatel.vlc;
import com.kirinpatel.Main;
import com.kirinpatel.gui.GUI;
import com.kirinpatel.gui.PlaybackPanel;
import com.kirinpatel.net.Client;
import com.kirinpatel.util.Debug;
import com.kirinpatel.util.User;
import uk.co.caprica.vlcj.binding.internal.libvlc_media_t;
import uk.co.caprica.vlcj.component.DirectMediaPlayerComponent;
import uk.co.caprica.vlcj.discovery.NativeDiscovery;
import uk.co.caprica.vlcj.player.MediaPlayerEventListener;
import uk.co.caprica.vlcj.player.direct.BufferFormatCallback;
import uk.co.caprica.vlcj.player.direct.DirectMediaPlayer;
import uk.co.caprica.vlcj.player.direct.RenderCallback;
import uk.co.caprica.vlcj.player.direct.RenderCallbackAdapter;
import uk.co.caprica.vlcj.player.direct.format.RV32BufferFormat;
import javax.swing.*;
import java.awt.*;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.geom.AffineTransform;
import java.awt.image.AffineTransformOp;
import java.awt.image.BufferedImage;
/**
* Modified JPanel that will play media for the sync application.
*/
public class MediaPlayer extends JPanel {
private final int WIDTH;
private final int HEIGHT;
private final PlaybackPanel playbackPanel;
private final BufferedImage image;
private final DirectMediaPlayer mediaPlayer;
private BufferedImage scale;
private boolean isPaused = true;
private long time = -1;
private long length = -1;
private String mediaURL = "";
private String filePath = "";
private boolean isScrubbing = false;
private boolean isFile = false;
private final String[] options;
/**
* Constructor that will return a MediaPlayer.
*
* @param playbackPanel Returns MediaPanel
*/
public MediaPlayer(PlaybackPanel playbackPanel) {
new NativeDiscovery().discover();
Debug.Log("Creating MediaPlayer...", 6);
setBackground(Color.BLACK);
setOpaque(true);
WIDTH = (1280 * Main.videoQuality) / 100;
HEIGHT = (720 * Main.videoQuality) / 100;
this.playbackPanel = playbackPanel;
// Options have yet to be implemented but will be added to allow for better syncing when using offline media
options = new String[]{"--sout=#transcode{vcodec=x264,width=" + WIDTH + "height=" + HEIGHT + ",acodec=vorb,ab=128,channels=2,samplerate=44100}:display:rtp{mux=ts,dst=localhost,port=8080,sap}"};
image = GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice().getDefaultConfiguration().createCompatibleImage(WIDTH, HEIGHT);
BufferFormatCallback bufferFormatCallback = (sourceWidth, sourceHeight) -> new RV32BufferFormat(WIDTH, HEIGHT);
DirectMediaPlayerComponent mediaPlayerComponent = new DirectMediaPlayerComponent(bufferFormatCallback) {
@Override
protected RenderCallback onGetRenderCallback() {
return new MediaRenderCallback();
}
};
mediaPlayer = mediaPlayerComponent.getMediaPlayer();
mediaPlayer.setStandardMediaOptions();
mediaPlayer.setPlaySubItems(true);
mediaPlayer.addMediaPlayerEventListener(new MediaEventListener());
Debug.Log("MediaPlayer created.", 6);
}
/**
* Initialize media controls or reset them after media is changed.
*/
private void initControls() {
Debug.Log("Initializing media player controls...", 3);
if (playbackPanel.type == 0 && playbackPanel.mediaPosition.getMaximum() != 1000) {
PlaybackPanel.pauseMedia.addActionListener(e -> {
if (isPaused) mediaPlayer.play();
else mediaPlayer.pause();
});
playbackPanel.mediaPosition.addMouseListener(new MouseListener() {
@Override
public void mouseClicked(MouseEvent e) {
}
@Override
public void mousePressed(MouseEvent e) {
isScrubbing = true;
}
@Override
public void mouseReleased(MouseEvent e) {
isScrubbing = false;
}
@Override
public void mouseEntered(MouseEvent e) {
}
@Override
public void mouseExited(MouseEvent e) {
}
});
playbackPanel.mediaPosition.addChangeListener(e -> {
if (isScrubbing) {
int position = playbackPanel.mediaPosition.getValue();
mediaPlayer.setTime(position * getMediaLength() / 1000);
}
});
}
PlaybackPanel.pauseMedia.setText(">");
playbackPanel.mediaPosition.setMaximum(1000);
mediaPlayer.setMarqueeSize(60);
mediaPlayer.setMarqueeOpacity(200);
mediaPlayer.setMarqueeColour(Color.white);
mediaPlayer.setMarqueeTimeout(3500);
mediaPlayer.setMarqueeLocation(50, 1000);
if (playbackPanel.type == 0) {
for (User client : Main.connectedUsers) {
client.setTime(0);
GUI.controlPanel.updateConnectedClients(Main.connectedUsers);
}
}
isPaused = true;
Debug.Log("Media player controls initialized.", 3);
}
public void play() {
if (!mediaURL.isEmpty() && isPaused) {
Debug.Log("Playing media.", 6);
mediaPlayer.play();
}
}
public void pause() {
if (!mediaURL.isEmpty() && !isPaused) {
Debug.Log("Pausing media.", 6);
mediaPlayer.pause();
}
}
public void release() {
Debug.Log("Releasing media player...", 6);
mediaPlayer.stop();
mediaPlayer.release();
Debug.Log("Media player released.", 6);
}
public void setMediaURL(String mediaURL) {
if (!mediaURL.isEmpty() && !mediaURL.equals(this.mediaURL)) {
Debug.Log("Setting media url.", 6);
isFile = false;
mediaPlayer.prepareMedia(mediaURL);
mediaPlayer.parseMedia();
initControls();
}
}
public void setMediaFile(String filePath, String mediaURL) {
if (!filePath.isEmpty() && !mediaURL.isEmpty() && !filePath.equals(this.filePath) && !mediaURL.equals(this.mediaURL)) {
Debug.Log("Setting media file.", 6);
isFile = true;
this.mediaURL = mediaURL;
mediaPlayer.prepareMedia(filePath);
mediaPlayer.parseMedia();
initControls();
}
}
public void setVolume(int volume) {
mediaPlayer.setVolume(volume);
}
public void seekTo(long time) {
Debug.Log("Seeking media player (" + time + ").", 6);
mediaPlayer.setTime(time);
}
public void setRate(float rate) {
if (!isPaused) mediaPlayer.setRate(rate);
}
public boolean isPaused() {
return isPaused;
}
public String getMediaURL() {
return mediaURL;
}
public long getMediaTime() {
return time == -1 ? 0 : time;
}
public long getMediaLength() {
return length == -1 ? 0 : length;
}
public static String formatTime(long value) {
value /= 1000;
int hours = (int) value / 3600;
int remainder = (int) value - hours * 3600;
int minutes = remainder / 60;
remainder = remainder - minutes * 60;
int seconds = remainder;
return String.format("%d:%02d:%02d", hours, minutes, seconds);
}
protected void paintComponent(Graphics g) {
Graphics2D g2 = (Graphics2D) g;
g2.drawImage(scale, null, 0, 0);
}
class MediaRenderCallback extends RenderCallbackAdapter {
public MediaRenderCallback() {
super(new int[WIDTH * HEIGHT]);
}
@Override
protected void onDisplay(DirectMediaPlayer directMediaPlayer, int[] buffer) {
float xScale = (float) getWidth() / WIDTH;
float yScale = (float) getHeight() / HEIGHT;
image.setRGB(0, 0, WIDTH, HEIGHT, buffer, 0, WIDTH);
BufferedImage after = new BufferedImage(getWidth(), getHeight(), BufferedImage.TYPE_INT_ARGB);
AffineTransform at = new AffineTransform();
at.scale(xScale, yScale);
AffineTransformOp scaleOp = new AffineTransformOp(at, AffineTransformOp.TYPE_NEAREST_NEIGHBOR);
scale = scaleOp.filter(image, after);
repaint();
if (playbackPanel.type == 0) {
Main.connectedUsers.get(0).setTime(getMediaTime());
GUI.controlPanel.updateConnectedClients(Main.connectedUsers);
} else {
Client.user.setTime(getMediaTime());
GUI.controlPanel.updateConnectedClients(Main.connectedUsers);
}
}
}
class MediaEventListener implements MediaPlayerEventListener {
@Override
public void mediaChanged(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, libvlc_media_t libvlc_media_t, String s) {
if (!isFile) mediaURL = s;
}
@Override
public void opening(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer) {
mediaPlayer.setVolume(playbackPanel.mediaVolume.getValue());
}
@Override
public void buffering(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, float v) {
}
@Override
public void playing(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer) {
isPaused = false;
length = mediaPlayer.getLength();
PlaybackPanel.pauseMedia.setText("||");
mediaPlayer.setMarqueeText("Playing");
mediaPlayer.enableMarquee(true);
}
@Override
public void paused(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer) {
isPaused = true;
length = mediaPlayer.getLength();
PlaybackPanel.pauseMedia.setText(">");
mediaPlayer.setMarqueeText("Paused");
mediaPlayer.enableMarquee(true);
}
@Override
public void stopped(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer) {
}
@Override
public void forward(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer) {
}
@Override
public void backward(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer) {
}
@Override
public void finished(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer) {
playbackPanel.mediaPosition.setValue(0);
PlaybackPanel.pauseMedia.setText(">");
isPaused = true;
}
@Override
public void timeChanged(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, long l) {
if (!isScrubbing) {
time = l;
playbackPanel.mediaPositionLabel.setText(formatTime(l) + " / " + formatTime(length));
playbackPanel.mediaPosition.setValue((int) (time * 1000 / length));
}
}
@Override
public void positionChanged(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, float v) {
}
@Override
public void seekableChanged(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, int i) {
}
@Override
public void pausableChanged(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, int i) {
}
@Override
public void titleChanged(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, int i) {
}
@Override
public void snapshotTaken(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, String s) {
}
@Override
public void lengthChanged(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, long l) {
length = l;
}
@Override
public void videoOutput(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, int i) {
}
@Override
public void scrambledChanged(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, int i) {
}
@Override
public void elementaryStreamAdded(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, int i, int i1) {
}
@Override
public void elementaryStreamDeleted(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, int i, int i1) {
}
@Override
public void elementaryStreamSelected(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, int i, int i1) {
}
@Override
public void corked(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, boolean b) {
}
@Override
public void muted(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, boolean b) {
}
@Override
public void volumeChanged(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, float v) {
}
@Override
public void audioDeviceChanged(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, String s) {
}
@Override
public void chapterChanged(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, int i) {
}
@Override
public void error(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer) {
}
@Override
public void mediaMetaChanged(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, int i) {
}
@Override
public void mediaSubItemAdded(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, libvlc_media_t libvlc_media_t) {
}
@Override
public void mediaDurationChanged(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, long l) {
}
@Override
public void mediaParsedChanged(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, int i) {
}
@Override
public void mediaFreed(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer) {
}
@Override
public void mediaStateChanged(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, int i) {
}
@Override
public void mediaSubItemTreeAdded(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, libvlc_media_t libvlc_media_t) {
}
@Override
public void newMedia(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer) {
time = 0;
}
@Override
public void subItemPlayed(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, int i) {
}
@Override
public void subItemFinished(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer, int i) {
}
@Override
public void endOfSubItems(uk.co.caprica.vlcj.player.MediaPlayer mediaPlayer) {
}
}
}
|
package org.jpos.q2.nserver;
import org.apache.mina.core.service.IoHandler;
import org.apache.mina.core.session.IdleStatus;
import org.apache.mina.core.session.IoSession;
import org.apache.mina.filter.codec.ProtocolCodecFilter;
import org.apache.mina.filter.keepalive.KeepAliveFilter;
import org.apache.mina.transport.socket.nio.NioSocketAcceptor;
import org.jdom.Element;
import org.jpos.core.ConfigurationException;
import org.jpos.iso.ISOException;
import org.jpos.iso.ISOMsg;
import org.jpos.iso.ISOPackager;
import org.jpos.iso.ISORequestListener;
import org.jpos.q2.QBeanSupport;
import org.jpos.q2.QFactory;
import org.jpos.q2.nserver.codec.DefaultISOMsgCodecFactory;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* A high performance server QBean based on Java NIO, implemented as a
* protocol over Apache MINA.
* <p/>
* NOTE: This server does not use jPos ISOChannels. Instead a generic transport has
* been created and the protocol specific functionality has been decoupled into
* a ProtocolHandler, since ISOChannels (unfortunately) are not transport agnostic.
* <p/>
* Since this code is not based on ISOChannels, this means that not all
* ISOChannels are currently implemented as ProtocolHandlers. Therefore you should
* port your own, or stick to QServer.
* <p/>
* This server would be of good use if you are in need of handling
* thousands of simultaneuous incoming connections on a single server.
* <p/>
* An example Q2 descriptor for NServer:
* <p/>
* <pre>
* {@code
* <?xml version="1.0" encoding="UTF-8"?>
*
* <server name="ath-bridge-prod" class="org.jpos.q2.nserver.NServer" logger="Q2">
*
* <classpath>lib/mina-core-1.1.7.jar</classpath>
* <classpath>lib/slf4j-api-1.5.0.jar</classpath>
* <classpath>lib/slf4j-nop-1.5.0.jar</classpath>
*
* <attr name="port" type="java.lang.Integer">2005</attr>
*
* <protocol-handler
* class="org.jpos.q2.nserver.handler.AthProtocolHandler"
* packager="org.jpos.iso.packager.GenericPackager">
* <property name="packager-config" value="cfg/packager/ath.xml" />
* </protocol-handler>
*
* <request-listener class="com.kontinium.isobridge.BridgeRequestListener">
* <property name="space" value="transient:default"/>
* <property name="queue" value="txnmgr"/>
* <property name="timeout" value="60000"/>
* </request-listener>
*
* </server>
* }
* </pre>
*
* @author Victor Salaman (vsalaman@gmail.com)
*/
public class NServer extends QBeanSupport implements NServerMBean, IoHandler
{
private int port = 0;
ProtocolHandler handler = null;
private ISOMsgCodecFactory codecFactory = new DefaultISOMsgCodecFactory();
NioSocketAcceptor acceptor = null;
protected boolean ignoreISOExceptions;
List<ISORequestListener> requestListeners;
InetSocketAddress socketAddress;
int connectedSessions=0;
KeepAliveHandler keepAliveHandler;
int timeout=-1;
public void setPort(int port)
{
this.port = port;
}
public int getPort()
{
return port;
}
public int getConnectionCount()
{
return connectedSessions;
}
@Override
protected void startService() throws Exception
{
if (port == 0)
{
throw new ConfigurationException("Port value not set");
}
socketAddress = new InetSocketAddress(port);
newCodecFactory();
addListeners();
acceptor = new NioSocketAcceptor();
acceptor.getFilterChain().addLast("codec", new ProtocolCodecFilter(codecFactory));
if(timeout==-1)
{
keepAliveHandler=newKeepAliveHandler();
if(keepAliveHandler!=null)
{
KeepAliveFilter kaf=new KeepAliveFilter(keepAliveHandler.getKeepAliveMessageFactory(),
IdleStatus.READER_IDLE,
keepAliveHandler.getKeepAliveRequestTimeoutHandler(),
keepAliveHandler.getRequestInterval(),
keepAliveHandler.getRequestTimeout());
acceptor.getFilterChain().addLast("keepalive", kaf);
}
}
else if(timeout>0)
{
acceptor.getSessionConfig().setIdleTime(IdleStatus.READER_IDLE,timeout);
}
acceptor.setBacklog(100);
acceptor.setReuseAddress(true);
acceptor.setDefaultLocalAddress(socketAddress);
acceptor.setHandler(this);
acceptor.getSessionConfig().setReadBufferSize(2048);
preBindAcceptorHook(acceptor);
acceptor.bind();
}
/**
* Hook method invoked just before the acceptor is bound. This is uselful for things such as adding filters (e.g. SSL).
*
* @param acceptor the socket acceptor to be used.
* @throws Exception generic exception.
*/
@SuppressWarnings({"UnusedDeclaration"})
protected void preBindAcceptorHook(NioSocketAcceptor acceptor) throws Exception
{
}
@Override
protected void stopService() throws Exception
{
acceptor.unbind();
acceptor.dispose();
}
private void newCodecFactory() throws ConfigurationException
{
QFactory factory = getFactory();
Element persist = getPersist();
handler = newProtocolHandler();
if (handler == null)
{
throw new ConfigurationException("Protocol handler is null");
}
keepAliveHandler = newKeepAliveHandler();
Element codecFactoryElem = persist.getChild("codec-factory");
if (codecFactoryElem != null)
{
codecFactory = (ISOMsgCodecFactory) factory.newInstance(codecFactoryElem.getAttributeValue("class"));
}
if (codecFactory == null)
{
throw new ConfigurationException("Codec Factory is null");
}
codecFactory.init(handler);
}
private ProtocolHandler newProtocolHandler() throws ConfigurationException
{
QFactory f=getFactory();
Element e = getPersist().getChild("protocol-handler");
if (e == null)
{
throw new ConfigurationException("protocol-handler element is required.");
}
String handlerName = e.getAttributeValue("class");
String packagerName = e.getAttributeValue("packager");
ProtocolHandler handler = (ProtocolHandler) f.newInstance(handlerName);
f.setLogger(handler, e);
f.setConfiguration(handler, e);
ISOPackager packager = (ISOPackager) f.newInstance(packagerName);
handler.setPackager(packager);
f.setConfiguration(packager, e);
QFactory.invoke(handler, "setHeader", e.getAttributeValue("header"));
return handler;
}
private KeepAliveHandler newKeepAliveHandler() throws ConfigurationException
{
QFactory f=getFactory();
Element e = getPersist().getChild("keep-alive-handler");
if (e!=null)
{
String handlerName = e.getAttributeValue("class");
KeepAliveHandler handler = (KeepAliveHandler) f.newInstance(handlerName);
f.setConfiguration(handler, e);
return handler;
}
return null;
}
@SuppressWarnings({"WhileLoopReplaceableByForEach"})
private void addListeners() throws ConfigurationException
{
requestListeners = new ArrayList<ISORequestListener>();
QFactory factory = getFactory();
Iterator iter = getPersist().getChildren("request-listener").iterator();
while (iter.hasNext())
{
Element l = (Element) iter.next();
ISORequestListener listener = (ISORequestListener) factory.newInstance(l.getAttributeValue("class"));
factory.setLogger(listener, l);
factory.setConfiguration(listener, l);
requestListeners.add(listener);
}
}
public void sessionCreated(IoSession session) throws Exception
{
}
public void sessionOpened(IoSession session) throws Exception
{
connectedSessions++;
getLog().info(getName() + ".session" + session.getRemoteAddress(), "session started");
}
public void sessionClosed(IoSession session) throws Exception
{
connectedSessions
getLog().info(getName() + ".session" + session.getRemoteAddress(), "session ended");
}
public void sessionIdle(IoSession session, IdleStatus idleStatus) throws Exception
{
if(timeout>0)
{
session.close(true);
}
}
public void exceptionCaught(IoSession session, Throwable throwable) throws Exception
{
if (throwable instanceof ISOException && ignoreISOExceptions)
{
return;
}
getLog().error(getName() + ".session" + session.getRemoteAddress(), throwable);
session.close(true);
}
@SuppressWarnings({"WhileLoopReplaceableByForEach"})
public void messageReceived(IoSession session, Object o) throws Exception
{
if(o instanceof NullMessage)
{
session.write(o);
}
else if(o instanceof ISOMsg)
{
getLog().info(getName() + ".session" + session.getRemoteAddress(), o);
ISOMsg m = (ISOMsg) o;
Iterator iter = requestListeners.iterator();
while (iter.hasNext())
{
final SessionISOSource source = new SessionISOSource(session);
m.setSource(source);
m.setDirection(ISOMsg.INCOMING);
if (((ISORequestListener) iter.next()).process(source, m))
{
break;
}
}
}
}
public void messageSent(IoSession session, Object o) throws Exception
{
if (o instanceof ISOMsg)
{
getLog().info(getName() + ".session" + session.getRemoteAddress(), o);
}
}
public String getCountersAsString()
{
StringBuffer sb = new StringBuffer();
sb.append("connected=").append(acceptor.getManagedSessionCount());
return sb.toString();
}
}
|
package org.jgrapes.net;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.ByteBuffer;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.X509Certificate;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.FutureTask;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.net.ssl.ExtendedSSLSession;
import javax.net.ssl.SNIServerName;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLEngineResult;
import javax.net.ssl.SSLEngineResult.HandshakeStatus;
import javax.net.ssl.SSLEngineResult.Status;
import javax.net.ssl.SSLException;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import org.jgrapes.core.Channel;
import org.jgrapes.core.ClassChannel;
import org.jgrapes.core.Component;
import org.jgrapes.core.Components;
import org.jgrapes.core.EventPipeline;
import org.jgrapes.core.annotation.Handler;
import org.jgrapes.core.annotation.HandlerDefinition.ChannelReplacements;
import org.jgrapes.io.IOSubchannel;
import org.jgrapes.io.events.Close;
import org.jgrapes.io.events.Closed;
import org.jgrapes.io.events.HalfClosed;
import org.jgrapes.io.events.IOError;
import org.jgrapes.io.events.Input;
import org.jgrapes.io.events.OpenTcpConnection;
import org.jgrapes.io.events.Output;
import org.jgrapes.io.events.Purge;
import org.jgrapes.io.util.LinkedIOSubchannel;
import org.jgrapes.io.util.ManagedBuffer;
import org.jgrapes.io.util.ManagedBufferPool;
import org.jgrapes.net.events.Accepted;
import org.jgrapes.net.events.Connected;
/**
* A component that receives and sends byte buffers on an
* encrypted channel and sends and receives the corresponding
* decrypted data on a plain channel.
*
* The encrypted channel is assumed to be the network side
* ("upstream") and therefore {@link Input} events represent
* encrypted data and are decoded to {@link Output} events on
* the plain channel ("downstream") and vice versa.
*/
@SuppressWarnings({ "PMD.ExcessiveImports" })
public class SslCodec extends Component {
@SuppressWarnings("PMD.FieldNamingConventions")
private static final Logger logger
= Logger.getLogger(SslCodec.class.getName());
private final Channel encryptedChannel;
private final SSLContext sslContext;
/**
* Represents the encrypted channel in annotations.
*/
private class EncryptedChannel extends ClassChannel {
}
/**
* Creates a new codec that uses the given {@link SSLContext}.
*
* @param plainChannel the component's channel
* @param encryptedChannel the channel with the encrypted data
* @param sslContext the SSL context to use
*/
public SslCodec(Channel plainChannel, Channel encryptedChannel,
SSLContext sslContext) {
super(plainChannel, ChannelReplacements.create()
.add(EncryptedChannel.class, encryptedChannel));
this.encryptedChannel = encryptedChannel;
this.sslContext = sslContext;
}
/**
* Creates a new codec to be used as client.
*
* @param plainChannel the component's channel
* @param encryptedChannel the channel with the encrypted data
* @param dontValidate if `true` accept all kinds of certificates
*/
@SuppressWarnings({ "PMD.DataflowAnomalyAnalysis", "PMD.CommentRequired",
"PMD.ReturnEmptyArrayRatherThanNull", "PMD.UncommentedEmptyMethodBody",
"PMD.AvoidDuplicateLiterals" })
public SslCodec(Channel plainChannel, Channel encryptedChannel,
boolean dontValidate) {
super(plainChannel, ChannelReplacements.create()
.add(EncryptedChannel.class, encryptedChannel));
this.encryptedChannel = encryptedChannel;
try {
final SSLContext sslContext = SSLContext.getInstance("SSL");
if (dontValidate) {
// Create a trust manager that does not validate certificate
// chains
final TrustManager[] trustAllCerts = {
new X509TrustManager() {
public X509Certificate[] getAcceptedIssuers() {
return null;
}
public void checkClientTrusted(
X509Certificate[] certs, String authType) {
}
public void checkServerTrusted(
X509Certificate[] certs, String authType) {
}
}
};
sslContext.init(null, trustAllCerts, null);
} else {
sslContext.init(null, null, null);
}
this.sslContext = sslContext;
} catch (NoSuchAlgorithmException | KeyManagementException e) {
throw new IllegalArgumentException(e);
}
}
/**
* Creates a new downstream connection as {@link LinkedIOSubchannel}
* of the network connection together with an {@link SSLEngine}.
*
* @param event
* the accepted event
*/
@Handler(channels = EncryptedChannel.class)
public void onAccepted(Accepted event, IOSubchannel encryptedChannel) {
new PlainChannel(event, encryptedChannel);
}
/**
* Forward the connection request to the encrypted network.
*
* @param event the event
*/
@Handler
public void onOpenConnection(OpenTcpConnection event) {
fire(new OpenTcpConnection(event.address()), encryptedChannel);
}
/**
* Creates a new downstream connection as {@link LinkedIOSubchannel}
* of the network connection together with an {@link SSLEngine}.
*
* @param event
* the accepted event
*/
@Handler(channels = EncryptedChannel.class)
public void onConnected(Connected event, IOSubchannel encryptedChannel) {
new PlainChannel(event, encryptedChannel);
}
/**
* Handles encrypted data from upstream (the network). The data is
* send through the {@link SSLEngine} and events are sent downstream
* (and in the initial phases upstream) according to the conversion
* results.
*
* @param event the event
* @param encryptedChannel the channel for exchanging the encrypted data
* @throws InterruptedException
* @throws SSLException
* @throws ExecutionException
*/
@Handler(channels = EncryptedChannel.class)
public void onInput(
Input<ByteBuffer> event, IOSubchannel encryptedChannel)
throws InterruptedException, SSLException, ExecutionException {
@SuppressWarnings({ "unchecked", "PMD.AvoidDuplicateLiterals" })
final Optional<PlainChannel> plainChannel
= (Optional<PlainChannel>) LinkedIOSubchannel
.downstreamChannel(this, encryptedChannel);
if (plainChannel.isPresent()) {
plainChannel.get().sendDownstream(event);
}
}
/**
* Handles a half close event from the encrypted channel (client).
*
* @param event the event
* @param encryptedChannel the channel for exchanging the encrypted data
* @throws InterruptedException
* @throws SSLException
*/
@Handler(channels = EncryptedChannel.class)
public void onHalfClosed(HalfClosed event, IOSubchannel encryptedChannel)
throws SSLException, InterruptedException {
@SuppressWarnings("unchecked")
final Optional<PlainChannel> plainChannel
= (Optional<PlainChannel>) LinkedIOSubchannel
.downstreamChannel(this, encryptedChannel);
if (plainChannel.isPresent()) {
plainChannel.get().upstreamHalfClosed();
}
}
/**
* Handles a close event from the encrypted channel (client).
*
* @param event the event
* @param encryptedChannel the channel for exchanging the encrypted data
* @throws InterruptedException
* @throws SSLException
*/
@Handler(channels = EncryptedChannel.class)
public void onClosed(Closed event, IOSubchannel encryptedChannel)
throws SSLException, InterruptedException {
@SuppressWarnings("unchecked")
final Optional<PlainChannel> plainChannel
= (Optional<PlainChannel>) LinkedIOSubchannel
.downstreamChannel(this, encryptedChannel);
if (plainChannel.isPresent()) {
plainChannel.get().upstreamClosed();
}
}
/**
* Forwards a {@link Purge} event downstream.
*
* @param event the event
* @param encryptedChannel the encrypted channel
*/
@Handler(channels = EncryptedChannel.class)
public void onPurge(Purge event, IOSubchannel encryptedChannel) {
@SuppressWarnings("unchecked")
final Optional<PlainChannel> plainChannel
= (Optional<PlainChannel>) LinkedIOSubchannel
.downstreamChannel(this, encryptedChannel);
if (plainChannel.isPresent()) {
plainChannel.get().purge();
}
}
/**
* Handles an {@link IOError} event from the encrypted channel (client)
* by sending it downstream.
*
* @param event the event
* @param encryptedChannel the channel for exchanging the encrypted data
* @throws InterruptedException
* @throws SSLException
*/
@Handler(channels = EncryptedChannel.class)
public void onIOError(IOError event, IOSubchannel encryptedChannel)
throws SSLException, InterruptedException {
@SuppressWarnings("unchecked")
final Optional<PlainChannel> plainChannel
= (Optional<PlainChannel>) LinkedIOSubchannel
.downstreamChannel(this, encryptedChannel);
plainChannel.ifPresent(channel -> fire(new IOError(event), channel));
}
/**
* Sends plain data through the engine and then upstream.
*
* @param event
* the event with the data
* @throws InterruptedException if the execution was interrupted
* @throws SSLException if some SSL related problem occurs
* @throws ExecutionException
*/
@Handler
public void onOutput(Output<ByteBuffer> event,
PlainChannel plainChannel)
throws InterruptedException, SSLException, ExecutionException {
if (plainChannel.hub() != this) {
return;
}
plainChannel.sendUpstream(event);
}
/**
* Forwards a close event upstream.
*
* @param event
* the close event
* @throws SSLException if an SSL related problem occurs
* @throws InterruptedException if the execution was interrupted
*/
@Handler
public void onClose(Close event, PlainChannel plainChannel)
throws InterruptedException, SSLException {
if (plainChannel.hub() != this) {
return;
}
plainChannel.close(event);
}
/**
* Represents the plain channel.
*/
@SuppressWarnings("PMD.DataflowAnomalyAnalysis")
private class PlainChannel extends LinkedIOSubchannel {
public SocketAddress localAddress;
public SocketAddress remoteAddress;
public SSLEngine sslEngine;
private EventPipeline downPipeline;
private ManagedBufferPool<ManagedBuffer<ByteBuffer>,
ByteBuffer> downstreamPool;
private ByteBuffer carryOver;
private boolean[] inputProcessed = { false };
/**
* Instantiates a new plain channel from an accepted connection.
*
* @param event the event
* @param upstreamChannel the upstream channel
*/
public PlainChannel(Accepted event, IOSubchannel upstreamChannel) {
super(SslCodec.this, channel(), upstreamChannel,
newEventPipeline());
localAddress = event.localAddress();
remoteAddress = event.remoteAddress();
init();
sslEngine.setUseClientMode(false);
}
/**
* Instantiates a new plain channel from an initiated connection.
*
* @param event the event
* @param upstreamChannel the upstream channel
*/
public PlainChannel(Connected event, IOSubchannel upstreamChannel) {
super(SslCodec.this, channel(), upstreamChannel,
newEventPipeline());
localAddress = event.localAddress();
remoteAddress = event.remoteAddress();
init();
sslEngine.setUseClientMode(true);
// Forward downstream
downPipeline.fire(
new Connected(event.localAddress(), event.remoteAddress()),
this);
}
private void init() {
if (remoteAddress instanceof InetSocketAddress) {
sslEngine = sslContext.createSSLEngine(
((InetSocketAddress) remoteAddress).getAddress()
.getHostAddress(),
((InetSocketAddress) remoteAddress).getPort());
} else {
sslEngine = sslContext.createSSLEngine();
}
String channelName = Components.objectName(SslCodec.this)
+ "." + Components.objectName(this);
// Create buffer pools, adding 50 to decoded application buffer
// size, see
final int appBufSize
= sslEngine.getSession().getApplicationBufferSize();
downstreamPool = new ManagedBufferPool<>(ManagedBuffer::new,
() -> ByteBuffer.allocate(appBufSize + 50), 2)
.setName(channelName + ".downstream.buffers");
// Provide buffers with application buffer size
// for use by downstream components.
setByteBufferPool(new ManagedBufferPool<>(ManagedBuffer::new,
() -> ByteBuffer.allocate(appBufSize), 2)
.setName(channelName + ".upstream.buffers"));
downPipeline = newEventPipeline();
// Buffers for sending encrypted data upstream will be
// obtained from upstream() and resized if required.
}
/**
* Sends input downstream.
*
* @param event the event
* @throws SSLException the SSL exception
* @throws InterruptedException the interrupted exception
* @throws ExecutionException the execution exception
*/
public void sendDownstream(Input<ByteBuffer> event)
throws SSLException, InterruptedException, ExecutionException {
ByteBuffer input = event.buffer().duplicate();
if (carryOver != null) {
if (carryOver.remaining() < input.remaining()) {
// Shouldn't happen with carryOver having packet size
// bytes left, have seen it happen nevertheless.
carryOver.flip();
ByteBuffer extCarryOver = ByteBuffer.allocate(
carryOver.remaining() + input.remaining());
extCarryOver.put(carryOver);
carryOver = extCarryOver;
}
carryOver.put(input);
carryOver.flip();
input = carryOver;
carryOver = null;
}
// Main processing
processInput(input);
// Check if data from incomplete packet remains in input buffer
if (input.hasRemaining()) {
// Actually, packet buffer size should be sufficient,
// but since this is hard to test and doesn't really matter...
carryOver = ByteBuffer.allocate(input.remaining()
+ sslEngine.getSession().getPacketBufferSize() + 50);
carryOver.put(input);
}
}
@SuppressWarnings({ "PMD.CyclomaticComplexity", "PMD.NcssCount",
"PMD.AvoidInstantiatingObjectsInLoops", "PMD.ExcessiveMethodLength",
"PMD.NPathComplexity" })
private SSLEngineResult processInput(ByteBuffer input)
throws SSLException, InterruptedException, ExecutionException {
SSLEngineResult unwrapResult;
ManagedBuffer<ByteBuffer> unwrapped = downstreamPool.acquire();
while (true) {
unwrapResult
= sslEngine.unwrap(input, unwrapped.backingBuffer());
synchronized (inputProcessed) {
inputProcessed[0] = true;
inputProcessed.notifyAll();
}
// Handle any handshaking procedures
switch (unwrapResult.getHandshakeStatus()) {
case NEED_TASK:
while (true) {
Runnable runnable = sslEngine.getDelegatedTask();
if (runnable == null) {
break;
}
// Having this handled by the response thread is
// probably not really necessary, but as the delegated
// task usually includes sending upstream...
@SuppressWarnings("PMD.AvoidInstantiatingObjectsInLoops")
FutureTask<Boolean> task
= new FutureTask<>(runnable, true);
upstreamChannel().responsePipeline()
.executorService().submit(task);
task.get();
}
continue;
case NEED_WRAP:
ManagedBuffer<ByteBuffer> feedback
= acquireUpstreamBuffer();
synchronized (sslEngine) {
SSLEngineResult wrapResult = sslEngine.wrap(
ManagedBuffer.EMPTY_BYTE_BUFFER.backingBuffer(),
feedback.backingBuffer());
upstreamChannel()
.respond(Output.fromSink(feedback, false));
if (wrapResult
.getHandshakeStatus() == HandshakeStatus.FINISHED) {
fireAccepted();
}
}
continue;
case FINISHED:
fireAccepted();
break;
default:
break;
}
// Anything to forward downstream?
if (unwrapped.position() > 0) {
// forward unwrapped data
downPipeline.fire(Input.fromSink(unwrapped,
sslEngine.isInboundDone()), this);
unwrapped = null;
}
// If we have a buffer overflow or everything was okay
// and there's data left, we try again, else we quit.
if (unwrapResult.getStatus() != Status.BUFFER_OVERFLOW
&& (unwrapResult.getStatus() != Status.OK
|| !input.hasRemaining())) {
// Underflow or closed
if (unwrapped != null) {
unwrapped.unlockBuffer();
}
break;
}
// Make sure we have an output buffer.
if (unwrapped == null) {
unwrapped = downstreamPool.acquire();
}
}
return unwrapResult;
}
@SuppressWarnings("PMD.DataflowAnomalyAnalysis")
private void fireAccepted() {
List<SNIServerName> snis = Collections.emptyList();
if (sslEngine.getSession() instanceof ExtendedSSLSession) {
snis = ((ExtendedSSLSession) sslEngine.getSession())
.getRequestedServerNames();
}
downPipeline.fire(new Accepted(
localAddress, remoteAddress, true, snis), this);
}
/**
* Send output upstream.
*
* @param event the event
* @throws SSLException the SSL exception
* @throws InterruptedException the interrupted exception
* @throws ExecutionException
*/
public void sendUpstream(Output<ByteBuffer> event)
throws SSLException, InterruptedException, ExecutionException {
ByteBuffer output = event.buffer().backingBuffer().duplicate();
processOutput(output, event.isEndOfRecord());
}
@SuppressWarnings({ "PMD.DataflowAnomalyAnalysis",
"PMD.CyclomaticComplexity", "PMD.NcssCount",
"PMD.NPathComplexity" })
private void processOutput(ByteBuffer output, boolean eor)
throws InterruptedException, SSLException, ExecutionException {
ManagedBuffer<ByteBuffer> wrapped = acquireUpstreamBuffer();
while (true) {
SSLEngineResult wrapResult;
// Cheap synchronization: no (relevant) input
inputProcessed[0] = false;
synchronized (sslEngine) {
wrapResult
= sslEngine.wrap(output, wrapped.backingBuffer());
// Anything to be sent upstream?
if (wrapped.position() > 0) {
upstreamChannel().respond(Output.fromSink(wrapped,
sslEngine.isInboundDone()
|| eor && !output.hasRemaining()));
wrapped = null;
}
}
switch (wrapResult.getHandshakeStatus()) {
case NEED_TASK:
while (true) {
Runnable runnable = sslEngine.getDelegatedTask();
if (runnable == null) {
break;
}
runnable.run();
}
continue;
case NEED_UNWRAP:
// Input required. Wait until
// input becomes available and retry.
synchronized (inputProcessed) {
while (!inputProcessed[0]) {
inputProcessed.wait();
}
}
break;
default:
break;
}
// If we have a buffer overflow or everything was okay
// and there's data left, we try again, else we quit.
if (wrapResult.getStatus() != Status.BUFFER_OVERFLOW
&& (wrapResult.getStatus() != Status.OK
|| !output.hasRemaining())) {
// Underflow or closed
if (wrapped != null) {
wrapped.unlockBuffer();
}
// Warn if data is discarded
if (output.hasRemaining()) {
logger.warning(() -> toString()
+ ": Upstream data discarded, SSLEngine status: "
+ wrapResult.getStatus());
}
break;
}
// Was handshake (or partial content), get new buffer and try
// again
if (wrapped == null) {
wrapped = acquireUpstreamBuffer();
}
}
}
/**
* Close the connection.
*
* @param event the event
* @throws InterruptedException the interrupted exception
* @throws SSLException the SSL exception
*/
public void close(Close event)
throws InterruptedException, SSLException {
sslEngine.closeOutbound();
while (!sslEngine.isOutboundDone()) {
ManagedBuffer<ByteBuffer> feedback = acquireUpstreamBuffer();
sslEngine.wrap(ManagedBuffer.EMPTY_BYTE_BUFFER
.backingBuffer(), feedback.backingBuffer());
upstreamChannel().respond(Output.fromSink(feedback, false));
}
upstreamChannel().respond(new Close());
}
/**
* Handles the {@link HalfClosed} event.
*
* @throws SSLException the SSL exception
* @throws InterruptedException the interrupted exception
*/
public void upstreamHalfClosed()
throws SSLException, InterruptedException {
if (sslEngine.isInboundDone()) {
// Was properly closed on SSL level
return;
}
try {
sslEngine.closeInbound();
sslEngine.closeOutbound();
while (!sslEngine.isOutboundDone()) {
ManagedBuffer<ByteBuffer> feedback
= acquireUpstreamBuffer();
SSLEngineResult result = sslEngine.wrap(
ManagedBuffer.EMPTY_BYTE_BUFFER.backingBuffer(),
feedback.backingBuffer());
// This is required for/since JDK 11. It claims that
// outbound is not done, but doesn't produce any
// additional
// data.
if (result.getStatus() == Status.CLOSED
|| feedback.position() == 0) {
feedback.unlockBuffer();
break;
}
upstreamChannel()
.respond(Output.fromSink(feedback, false));
}
} catch (SSLException e) {
// Several clients (notably chromium, see
// don't close the connection properly. So nobody is really
// interested in this message
logger.log(Level.FINEST, e.getMessage(), e);
} catch (InterruptedException e) {
logger.log(Level.WARNING, e.getMessage(), e);
}
}
/**
* Forwards the {@link Closed} event downstream.
*
* @throws SSLException the SSL exception
* @throws InterruptedException the interrupted exception
*/
public void upstreamClosed()
throws SSLException, InterruptedException {
downPipeline.fire(new Closed(), this);
}
private ManagedBuffer<ByteBuffer> acquireUpstreamBuffer()
throws InterruptedException {
ManagedBuffer<ByteBuffer> feedback
= upstreamChannel().byteBufferPool().acquire();
int encSize
= sslEngine.getSession().getPacketBufferSize() + 50;
if (feedback.capacity() < encSize) {
feedback.replaceBackingBuffer(ByteBuffer.allocate(
encSize));
}
return feedback;
}
/**
* Fire a {@link Purge} event downstream.
*/
public void purge() {
downPipeline.fire(new Purge(), this);
}
}
}
|
package com.almasb.fxgl;
import java.util.logging.Logger;
public final class Version {
private static final Logger log = FXGLLogger.getLogger("Version");
public static int getMajor() {
return 0;
}
public static int getMinor() {
return 1;
}
public static int getPatch() {
return 3;
}
public static void print() {
log.info("FXGL-" + getMajor() + "." + getMinor() + "." + getPatch());
log.info("Source code and latest builds at: https://github.com/AlmasB/FXGL");
}
}
|
package com.njackson;
import android.app.AlertDialog;
import android.app.Notification;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.os.IBinder;
import android.preference.PreferenceManager;
import android.util.Log;
import android.widget.Toast;
import com.getpebble.android.kit.PebbleKit;
import com.getpebble.android.kit.util.PebbleDictionary;
import fr.jayps.android.AdvancedLocation;
public class GPSService extends Service {
private static final String TAG = "PB-GPSService";
private int _updates;
private float _speed;
private float _averageSpeed;
private float _distance;
private float _prevspeed = -1;
private float _prevaverageSpeed = -1;
private float _prevdistance = -1;
private double _prevaltitude = -1;
private long _prevtime = -1;
private long _lastSaveGPSTime = 0;
private double _currentLat;
private double _currentLon;
double xpos = 0;
double ypos = 0;
Location firstLocation = null;
private AdvancedLocation _myLocation;
private LiveTracking _liveTracking;
private static GPSService _this;
private int _refresh_interval = 1000;
private boolean _gpsStarted = false;
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
handleCommand(intent);
_this = this;
makeServiceForeground("Pebble Bike", "GPS started");
// We want this service to continue running until it is explicitly
// stopped, so return sticky.
return START_STICKY;
}
@Override
public void onCreate() {
_locationMgr = (LocationManager) getSystemService(Context.LOCATION_SERVICE);
super.onCreate();
}
private boolean checkGPSEnabled(LocationManager locationMgr) {
if(!locationMgr.isProviderEnabled(LocationManager.GPS_PROVIDER)) {
return false;
} else {
return true;
}
}
@Override
public void onDestroy (){
Log.d(TAG, "Stopped GPS Service");
saveGPSStats();
_this = null;
removeServiceForeground();
//PebbleKit.closeAppOnPebble(getApplicationContext(), Constants.WATCH_UUID);
_locationMgr.removeUpdates(onLocationChange);
}
// load the saved state
public void loadGPSStats() {
Log.d(TAG, "loadGPSStats()");
SharedPreferences settings = getSharedPreferences(Constants.PREFS_NAME,0);
_speed = settings.getFloat("GPS_SPEED",0.0f);
_distance = settings.getFloat("GPS_DISTANCE",0.0f);
_myLocation.setDistance(_distance);
_myLocation.setElapsedTime(settings.getLong("GPS_ELAPSEDTIME", 0));
try {
_myLocation.setAscent(settings.getFloat("GPS_ASCENT", 0.0f));
} catch (ClassCastException e) {
_myLocation.setAscent(0.0);
}
try {
_updates = settings.getInt("GPS_UPDATES",0);
} catch (ClassCastException e) {
_updates = 0;
}
if (settings.contains("GPS_FIRST_LOCATION_LAT") && settings.contains("GPS_FIRST_LOCATION_LON")) {
firstLocation = new Location("PebbleBike");
firstLocation.setLatitude(settings.getFloat("GPS_FIRST_LOCATION_LAT", 0.0f));
firstLocation.setLongitude(settings.getFloat("GPS_FIRST_LOCATION_LON", 0.0f));
} else {
firstLocation = null;
}
}
// save the state
public void saveGPSStats() {
Log.d(TAG, "saveGPSStats()");
SharedPreferences settings = getSharedPreferences(Constants.PREFS_NAME,0);
SharedPreferences.Editor editor = settings.edit();
editor.putFloat("GPS_SPEED", _speed);
editor.putFloat("GPS_DISTANCE",_distance);
editor.putLong("GPS_ELAPSEDTIME", _myLocation.getElapsedTime());
editor.putFloat("GPS_ASCENT", (float) _myLocation.getAscent());
editor.putInt("GPS_UPDATES", _updates);
if (firstLocation != null) {
editor.putFloat("GPS_FIRST_LOCATION_LAT", (float) firstLocation.getLatitude());
editor.putFloat("GPS_FIRST_LOCATION_LON", (float) firstLocation.getLongitude());
}
editor.commit();
}
// reset the saved state
public static void resetGPSStats(SharedPreferences settings) {
Log.d(TAG, "resetGPSStats()");
SharedPreferences.Editor editor = settings.edit();
editor.putFloat("GPS_SPEED", 0.0f);
editor.putFloat("GPS_DISTANCE",0.0f);
editor.putLong("GPS_ELAPSEDTIME", 0);
editor.putFloat("GPS_ASCENT", 0.0f);
editor.putInt("GPS_UPDATES", 0);
editor.remove("GPS_FIRST_LOCATION_LAT");
editor.remove("GPS_FIRST_LOCATION_LON");
editor.commit();
if (_this != null) {
// GPS is running
// reninit all properties
_this._myLocation = new AdvancedLocation(_this.getApplicationContext());
_this._myLocation.debugLevel = 1;
_this._myLocation.debugTagPrefix = "PB-";
_this.loadGPSStats();
}
}
public static void changeRefreshInterval(int refresh_interval) {
if (_this != null) {
// GPS is running
_this._refresh_interval = refresh_interval;
_this._requestLocationUpdates(refresh_interval);
}
}
/*public static void liveSendNames(int live_max_name) {
Log.d(TAG, "liveSendNames("+live_max_name+")");
if (_this != null) {
// GPS is running
String[] names = _this._liveTracking.getNames();
//for (int i = 0; i < names.length; i++ ) {
// Log.d(TAG, "names["+i+"]: " + names[i]);
//}
PebbleDictionary dic = new PebbleDictionary();
if (live_max_name < 0 && names[0] != null) {
dic.addString(Constants.MSG_LIVE_NAME0, names[0]);
}
if (live_max_name < 1 && names[1] != null) {
dic.addString(Constants.MSG_LIVE_NAME1, names[1]);
}
if (live_max_name < 2 && names[2] != null) {
dic.addString(Constants.MSG_LIVE_NAME2, names[2]);
}
if (live_max_name < 3 && names[3] != null) {
dic.addString(Constants.MSG_LIVE_NAME3, names[3]);
}
if (live_max_name < 4 && names[4] != null) {
dic.addString(Constants.MSG_LIVE_NAME4, names[4]);
}
PebbleKit.sendDataToPebble(_this.getApplicationContext(), Constants.WATCH_UUID, dic);
Log.d(TAG, "send MSG_LIVE_NAMEs");
}
}*/
private void handleCommand(Intent intent) {
Log.d(TAG, "Started GPS Service");
_liveTracking = new LiveTracking(getApplicationContext());
_myLocation = new AdvancedLocation(getApplicationContext());
_myLocation.debugLevel = 1;
_myLocation.debugTagPrefix = "PB-";
loadGPSStats();
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(getApplicationContext());
_liveTracking.setLogin(prefs.getString("LIVE_TRACKING_LOGIN", ""));
_liveTracking.setPassword(prefs.getString("LIVE_TRACKING_PASSWORD", ""));
_liveTracking.setUrl(prefs.getString("LIVE_TRACKING_URL", ""));
// check to see if GPS is enabled
if(checkGPSEnabled(_locationMgr)) {
_requestLocationUpdates(intent.getIntExtra("REFRESH_INTERVAL", 1000));
// send the saved values directly to update pebble
Intent broadcastIntent = new Intent();
broadcastIntent.setAction(MainActivity.GPSServiceReceiver.ACTION_RESP);
broadcastIntent.putExtra("DISTANCE", _myLocation.getDistance());
broadcastIntent.putExtra("AVGSPEED", _myLocation.getAverageSpeed());
broadcastIntent.putExtra("ASCENT", _myLocation.getAscent());
sendBroadcast(broadcastIntent);
}else {
Intent broadcastIntent = new Intent();
broadcastIntent.setAction(MainActivity.GPSServiceReceiver.ACTION_GPS_DISABLED);
sendBroadcast(broadcastIntent);
return;
}
//PebbleKit.startAppOnPebble(getApplicationContext(), Constants.WATCH_UUID);
}
private void _requestLocationUpdates(int refresh_interval) {
Log.d(TAG, "_requestLocationUpdates("+refresh_interval+")");
_refresh_interval = refresh_interval;
if (_gpsStarted) {
_locationMgr.removeUpdates(onLocationChange);
}
_locationMgr.requestLocationUpdates(LocationManager.GPS_PROVIDER, _refresh_interval, 2, onLocationChange);
_gpsStarted = true;
}
@Override
public IBinder onBind(Intent intent) {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
private LocationManager _locationMgr = null;
private LocationListener onLocationChange = new LocationListener() {
@Override
public void onLocationChanged(Location location) {
int resultOnLocationChanged = _myLocation.onLocationChanged(location);
//Log.d(TAG, "onLocationChanged: " + _myLocation.getTime() + " Accuracy: " + _myLocation.getAccuracy());
_speed = _myLocation.getSpeed();
if(_speed < 1) {
_speed = 0;
} else {
_updates++;
}
_averageSpeed = _myLocation.getAverageSpeed();
_distance = _myLocation.getDistance();
_currentLat = location.getLatitude();
_currentLon = location.getLongitude();
if (firstLocation == null) {
firstLocation = location;
}
xpos = firstLocation.distanceTo(location) * Math.sin(firstLocation.bearingTo(location)/180*3.1415);
ypos = firstLocation.distanceTo(location) * Math.cos(firstLocation.bearingTo(location)/180*3.1415);
xpos = Math.floor(xpos/10);
ypos = Math.floor(ypos/10);
Log.d(TAG, "xpos="+xpos+"-ypos="+ypos);
boolean send = false;
//if(_myLocation.getAccuracy() < 15.0) // not really needed, something similar is done in AdvancedLocation
if (_speed != _prevspeed || _averageSpeed != _prevaverageSpeed || _distance != _prevdistance || _prevaltitude != _myLocation.getAltitude()) {
send = true;
_prevaverageSpeed = _averageSpeed;
_prevdistance = _distance;
_prevspeed = _speed;
_prevaltitude = _myLocation.getAltitude();
_prevtime = _myLocation.getTime();
} else if (_prevtime + 5000 < _myLocation.getTime()) {
Log.d(TAG, "New GPS data without move");
send = true;
_prevtime = _myLocation.getTime();
}
if (send) {
Intent broadcastIntent = new Intent();
broadcastIntent.setAction(MainActivity.GPSServiceReceiver.ACTION_RESP);
broadcastIntent.addCategory(Intent.CATEGORY_DEFAULT);
broadcastIntent.putExtra("SPEED", _speed);
broadcastIntent.putExtra("DISTANCE", _distance);
broadcastIntent.putExtra("AVGSPEED", _averageSpeed);
broadcastIntent.putExtra("LAT",_currentLat);
broadcastIntent.putExtra("LON",_currentLon);
broadcastIntent.putExtra("ALTITUDE", _myLocation.getAltitude());
broadcastIntent.putExtra("ASCENT", _myLocation.getAscent());
broadcastIntent.putExtra("ASCENTRATE", (3600f * _myLocation.getAscentRate())); // in m/h
broadcastIntent.putExtra("SLOPE", (100f * _myLocation.getSlope()));
broadcastIntent.putExtra("ACCURACY", _myLocation.getAccuracy());
broadcastIntent.putExtra("TIME",_myLocation.getElapsedTime());
broadcastIntent.putExtra("XPOS", xpos);
broadcastIntent.putExtra("YPOS", ypos);
broadcastIntent.putExtra("BEARING", _myLocation.getBearing());
sendBroadcast(broadcastIntent);
if (_lastSaveGPSTime == 0 || (_myLocation.getTime() - _lastSaveGPSTime > 60000)) {
saveGPSStats();
_lastSaveGPSTime = _myLocation.getTime();
}
}
if (MainActivity._liveTracking && resultOnLocationChanged == AdvancedLocation.SAVED) {
_liveTracking.addPoint(firstLocation, location);
}
}
@Override
public void onProviderDisabled(String arg0) {
// TODO Auto-generated method stub
}
@Override
public void onProviderEnabled(String arg0) {
// TODO Auto-generated method stub
}
@Override
public void onStatusChanged(String arg0, int arg1, Bundle arg2) {
// TODO Auto-generated method stub
}
};
private void makeServiceForeground(String titre, String texte) {
final int myID = 1000;
//The intent to launch when the user clicks the expanded notification
Intent i = new Intent(this, MainActivity.class);
i.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP);
PendingIntent pendIntent = PendingIntent.getActivity(this, 0, i, 0);
// The following code is deprecated since API 11 (Android 3.x). Notification.Builder could be used instead, but without Android 2.x compatibility
Notification notification = new Notification(R.drawable.ic_launcher, "Pebble Bike", System.currentTimeMillis());
notification.setLatestEventInfo(this, titre, texte, pendIntent);
notification.flags |= Notification.FLAG_NO_CLEAR;
startForeground(myID, notification);
}
private void removeServiceForeground() {
stopForeground(true);
}
}
|
package org.drools.gorm.session.marshalling;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.drools.KnowledgeBase;
import org.drools.gorm.marshalling.GrailsPlaceholderResolverStrategy;
import org.drools.gorm.session.SessionInfo;
import org.drools.marshalling.Marshaller;
import org.drools.marshalling.MarshallerFactory;
import org.drools.marshalling.ObjectMarshallingStrategy;
import org.drools.runtime.Environment;
import org.drools.runtime.EnvironmentName;
import org.drools.runtime.KnowledgeSessionConfiguration;
import org.drools.runtime.StatefulKnowledgeSession;
public class GormSessionMarshallingHelper {
private KnowledgeBase kbase;
private KnowledgeSessionConfiguration conf;
private StatefulKnowledgeSession ksession;
private Marshaller marshaller;
private Environment env;
/**
* Exist Info, so load session from here
* @param info
* @param ruleBase
* @param conf
* @param marshallingConfiguration
*/
public GormSessionMarshallingHelper(SessionInfo info,
KnowledgeBase kbase,
KnowledgeSessionConfiguration conf,
Environment env) {
info.setMarshallingHelper( this );
this.kbase = kbase;
this.conf = conf;
this.env = env;
this.marshaller = MarshallerFactory.newMarshaller( kbase, buildObjectMashallingStrategies() ) ;
loadSnapshot( info.getData() );
}
public GormSessionMarshallingHelper(KnowledgeBase kbase,
KnowledgeSessionConfiguration conf, Environment env) {
this.kbase = kbase;
this.conf = conf;
this.env = env;
this.marshaller = MarshallerFactory.newMarshaller( kbase, buildObjectMashallingStrategies() ) ;
}
/**
* new session, don't write now as info will request it on update callback
* @param info
* @param session
* @param conf
* @param marshallingConfiguration
*/
public GormSessionMarshallingHelper(StatefulKnowledgeSession ksession,
KnowledgeSessionConfiguration conf) {
this.ksession = ksession;
this.kbase = ksession.getKnowledgeBase();
this.conf = conf;
this.env = ksession.getEnvironment();
this.marshaller = MarshallerFactory.newMarshaller( kbase, buildObjectMashallingStrategies()) ;
}
private ObjectMarshallingStrategy[] buildObjectMashallingStrategies() {
ObjectMarshallingStrategy[] strategies = (ObjectMarshallingStrategy[]) this.env.get( EnvironmentName.OBJECT_MARSHALLING_STRATEGIES );
if (strategies != null ) {
List<ObjectMarshallingStrategy> aux = new ArrayList<ObjectMarshallingStrategy>(strategies.length + 1);
aux.add(new GrailsPlaceholderResolverStrategy());
aux.addAll(Arrays.asList(strategies));
return aux.toArray(new ObjectMarshallingStrategy[aux.size()]);
}
return new ObjectMarshallingStrategy[] {
new GrailsPlaceholderResolverStrategy(),
MarshallerFactory.newSerializeMarshallingStrategy() };
}
public byte[] getSnapshot() {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
marshaller.marshall( baos,
ksession );
} catch ( IOException e ) {
throw new RuntimeException( "Unable to get session snapshot",
e );
}
return baos.toByteArray();
}
public StatefulKnowledgeSession loadSnapshot(byte[] bytes,
StatefulKnowledgeSession ksession) {
try {
ByteArrayInputStream bais = new ByteArrayInputStream( bytes );
if (ksession == null) {
this.ksession = this.marshaller.unmarshall(bais,
this.conf,
this.env);
} else {
this.ksession = ksession;
this.marshaller.unmarshall( bais,
this.ksession );
}
return this.ksession;
} catch ( Exception e ) {
throw new RuntimeException( "Unable to load session snapshot",
e );
}
}
public StatefulKnowledgeSession loadSnapshot(byte[] bytes) {
return loadSnapshot(bytes, ksession);
}
public StatefulKnowledgeSession getObject() {
return ksession;
}
public KnowledgeBase getKbase() {
return kbase;
}
public KnowledgeSessionConfiguration getConf() {
return conf;
}
}
|
package org.noses.game.character;
import java.util.List;
import org.noses.game.GeldarGame;
import org.noses.game.character.inventory.Inventory;
import org.noses.game.item.Egg;
import org.noses.game.item.Item;
import org.noses.game.path.MovingCollision;
import org.noses.game.ui.hud.HUD;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.audio.Sound;
import com.badlogic.gdx.maps.tiled.TiledMapTileLayer;
import com.badlogic.gdx.utils.Timer;
import com.badlogic.gdx.utils.Timer.Task;
public class Avatar extends MovingCharacter {
private boolean canCapture;
private boolean canBeHurt;
private Task captureTask;
private Task hurtTask;
private int score;
Sound captureSound;
Sound pickupSound;
Sound walkSound;
Sound hurtSound;
Inventory inventory;
public Avatar(GeldarGame parent) {
super("avatar.png", parent);
captureSound = Gdx.audio.newSound(Gdx.files.internal("sounds/capture.wav"));
pickupSound = Gdx.audio.newSound(Gdx.files.internal("sounds/pickup.wav"));
hurtSound = Gdx.audio.newSound(Gdx.files.internal("sounds/hurt.wav"));
walkSound = Gdx.audio.newSound(Gdx.files.internal("sounds/walk.wav"));
initialize();
inventory = new Inventory();
}
public void initialize() {
canCapture = true;
canBeHurt = true;
findAGoodSpot();
score = 0;
}
public void setScore(int score) {
this.score = score;
}
@Override
protected void walk() {
walkSound.play(0.5f);
super.walk();
}
@Override
public float getNumPerSecond() {
return 7;
}
public boolean canCapture() {
return canCapture;
}
public void captured(MovingCharacter collider) {
if (!canCapture) {
return;
}
MovingCollision.getInstance().getMovingCharacters().remove(collider);
score++;
HUD.getInstance().setScore(score);
captureSound.play();
/*
* canCapture = false;
*
* if ((captureTask != null) && (captureTask.isScheduled())) {
* captureTask.cancel(); } captureTask =
* Timer.instance().scheduleTask(new Task() {
*
* @Override public void run() { canCapture = true;
* captureTask.cancel(); } }, 1);
*/
}
public boolean canBeHurt() {
return canBeHurt;
}
public void hurt() {
if (!canBeHurt()) {
return;
}
score -= 2;
canBeHurt = false;
if ((hurtTask != null) && (hurtTask.isScheduled())) {
hurtTask.cancel();
}
hurtTask = Timer.instance().scheduleTask(new Task() {
@Override
public void run() {
canBeHurt = true;
hurtTask.cancel();
}
}, 2);
HUD.getInstance().setScore(score);
hurtSound.play();
}
@Override
public void chooseNextSpot() {
return;
}
@Override
public void collideWith(MovingCharacter collider) {
if (collider instanceof Dragon) {
captured(collider);
} else if (collider instanceof Mage) {
hurt();
}
}
@Override
public void collideWith(Item item) {
if (item.isInventory()) {
addToInventory(item);
}
}
@Override
public void setX(int x) {
walkSound.play(0.5f);
super.setX(x);
}
@Override
public void setY(int y) {
walkSound.play(0.5f);
super.setY(y);
}
public void disable() {
canCapture = false;
canBeHurt = false;
stop();
}
public int getScore() {
return score;
}
public void addToInventory(Item item) {
if (!item.isInventory()) {
return;
}
if (!inventory.contains(item)) {
System.out.println("Adding " + item + " to inventory");
pickupSound.play();
inventory.add(item);
}
parent.removeItem(item);
}
}
|
package ca.ulaval.glo4002.services.assemblers;
import javax.persistence.EntityNotFoundException;
import ca.ulaval.glo4002.domain.intervention.Intervention;
import ca.ulaval.glo4002.domain.intervention.InterventionFactory;
import ca.ulaval.glo4002.domain.patient.Patient;
import ca.ulaval.glo4002.domain.patient.PatientRepository;
import ca.ulaval.glo4002.exceptions.domainexceptions.interventionexceptions.PatientDoesNotExist;
import ca.ulaval.glo4002.services.dto.InterventionCreationDTO;
public class InterventionAssembler {
private InterventionFactory interventionFactory;
public InterventionAssembler() {
this.interventionFactory = new InterventionFactory();
}
public Intervention assembleInterventionFromDTO(InterventionCreationDTO dto, PatientRepository patientRepository) {
Intervention intervention;
Patient patient;
try {
patient = patientRepository.getById(dto.getPatientNumber());
} catch (EntityNotFoundException e) {
throw new PatientDoesNotExist();
}
intervention = interventionFactory.createIntervention(dto, patient);
return intervention;
}
}
|
package tv.rocketbeans.rbcgj.ui;
import com.badlogic.gdx.graphics.Camera;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.scenes.scene2d.ui.Label;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import aurelienribon.tweenengine.BaseTween;
import aurelienribon.tweenengine.Tween;
import aurelienribon.tweenengine.TweenCallback;
import aurelienribon.tweenengine.TweenEquation;
import aurelienribon.tweenengine.TweenEquations;
import aurelienribon.tweenengine.TweenManager;
import tv.rocketbeans.rbcgj.tweens.ActorTween;
import tv.rocketbeans.rbcgj.tweens.SharedTweenManager;
public class Tooltip {
public interface TooltipFactory {
Actor create();
}
private static final Tooltip instance = new Tooltip();
private TweenManager tweenManager = SharedTweenManager.getInstance();
private Stage stage;
private Camera camera;
private Set<Actor> tooltips = new HashSet<Actor>();
private TweenEquation equation;
private float duration;
private float scale;
private Actor lastTooltip;
private Tooltip() {
setTweenEquation(TweenEquations.easeOutCubic);
duration = 2.5f;
scale = 1.0f;
}
public static Tooltip getInstance() {
return instance;
}
public void create(float x, float y, Label.LabelStyle style, String text) {
create(x, y, style, text, Color.WHITE, null);
}
public void create(float x, float y, final Label.LabelStyle style, final String text, Color color, final TweenCallback callback) {
create(x, y, style, text, color, callback, new TooltipFactory() {
@Override
public Actor create() {
final Label tooltip = new Label(text, style) {
@Override
public float getX() {
return super.getX() - camera.position.x + camera.viewportWidth / 2f - this.getWidth() / 2f;
}
@Override
public float getY() {
return super.getY() - camera.position.y + camera.viewportHeight / 2f - this.getHeight() / 2f;
}
@Override
public float getOriginX() {
return super.getOriginX() + this.getWidth() / 2f;
}
@Override
public float getOriginY() {
return super.getOriginY() + this.getHeight() / 2f;
}
};
tooltip.setWrap(true);
tooltip.setWidth(400f);
return tooltip;
}
});
}
public void create(float x, float y, Label.LabelStyle style, String text, Color color, final TweenCallback callback, TooltipFactory factory) {
if (lastTooltip != null) {
final Actor tooltip = lastTooltip;
tweenManager.killTarget(lastTooltip);
Tween.to(tooltip, ActorTween.ALPHA, 1f).target(0f).setCallbackTriggers(TweenCallback.COMPLETE)
.setCallback(new TweenCallback() {
@Override
public void onEvent(int type, BaseTween<?> source) {
stage.getActors().removeValue(tooltip, true);
lastTooltip = null;
}
}).ease(equation).start(tweenManager);
}
final Actor tooltip = factory.create();
tooltip.setColor(color);
tooltip.setPosition(x, y);
stage.addActor(tooltip);
tooltips.add(tooltip);
Tween.to(tooltip, ActorTween.ALPHA, this.duration).delay(2.8f).target(0f).setCallbackTriggers(TweenCallback.COMPLETE)
.setCallback(new TweenCallback() {
@Override
public void onEvent(int type, BaseTween<?> source) {
if (callback != null) {
callback.onEvent(type, source);
}
lastTooltip = null;
stage.getActors().removeValue(tooltip, true);
}
}).ease(equation).start(tweenManager);
Tween.to(tooltip, ActorTween.SCALE, this.duration).target(scale).ease(equation).start(tweenManager);
if (lastTooltip != tooltip) {
lastTooltip = tooltip;
}
}
public void setDuration(float duration) {
this.duration = duration;
}
public void setTweenEquation(TweenEquation equation) {
this.equation = equation;
}
public void setScale(float scale) {
this.scale = scale;
}
public void clear() {
for (Actor a : tooltips) {
tweenManager.killTarget(a);
stage.getActors().removeValue(a, true);
}
tooltips.clear();
}
public void init(Stage stage, Camera camera) {
this.stage = stage;
this.camera = camera;
}
}
|
package ch.unibas.dmi.dbis.reqman.ui.evaluator;
import ch.unibas.dmi.dbis.reqman.control.EntityController;
import ch.unibas.dmi.dbis.reqman.data.Group;
import ch.unibas.dmi.dbis.reqman.data.Member;
import ch.unibas.dmi.dbis.reqman.ui.common.AbstractVisualCreator;
import ch.unibas.dmi.dbis.reqman.ui.common.Utils;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.event.ActionEvent;
import javafx.scene.control.*;
import javafx.scene.control.cell.PropertyValueFactory;
import javafx.scene.input.MouseButton;
import javafx.util.Callback;
import org.apache.commons.lang.StringUtils;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
/**
* TODO: Write JavaDoc
*
* @author loris.sauter
*/
public class GroupPropertiesScene extends AbstractVisualCreator<ch.unibas.dmi.dbis.reqman.data.Group> {
private TextField tfName;
private TextField tfProjectName;
private TextField tfExportFileName;
private TableView<ObservableMember> table;
private ch.unibas.dmi.dbis.reqman.data.Group group = null;
private ObservableList<ObservableMember> tableData;
GroupPropertiesScene() {
populateScene();
}
GroupPropertiesScene(Group group) {
this();
this.group = group;
loadGroup();
}
@Override
public String getPromptTitle() {
return "Group Properties";
}
@Override
public void handleSaving(ActionEvent event) {
String name = tfName.getText();
String projectName = tfProjectName.getText();
if (StringUtils.isNotEmpty(name)) {
boolean empty = false;
if(tableData.size() == 1){
if(tableData.get(0).isEmpty() ){
empty = true;
}
}
List<Member> members = tableData.stream().map(ObservableMember::getMember).collect(Collectors.toList());
if(empty || (members == null || members.size() == 0)){
Utils.showWarningDialog("Invalid Members", "The group has too few members, at least one is needed!");
return;
}
if (group == null) {
group = EntityController.getInstance().createGroup(name, members.toArray(new Member[0]));
} else {
group.setName(name);
group.setMembers(members);
}
if (StringUtils.isNotEmpty(tfExportFileName.getText())) {
group.setExportFileName(tfExportFileName.getText());
}
if(StringUtils.isNotBlank(projectName)){
group.setProjectName(projectName);
}
dismiss();
} else {
String msg = "";
if (!StringUtils.isNotEmpty(name)) {
msg += "Group name is missing.\n";
}
Utils.showWarningDialog("Mandatory field(s) missing", msg);
return;
}
}
@Override
public ch.unibas.dmi.dbis.reqman.data.Group create() throws IllegalStateException {
if (!isCreatorReady()) {
throw new IllegalStateException("Cannot create Group, creator not ready");
}
return group;
}
@Override
public boolean isCreatorReady() {
return group != null;
}
@Override
protected void populateScene() {
initComponents();
loadGroup();
}
private void loadGroup() {
if (group != null) {
tfName.setText(group.getName());
tfProjectName.setText(group.getProjectName());
tfExportFileName.setText(group.getExportFileName());
}
loadMembers();
}
private void initComponents() {
Label lblName = new Label("Group Name*");
Label lblProjectName = new Label("Project Name");
Label lblExportFileName = new Label("Export File Name");
Label lblMembers = new Label("Members");
table = createTableView();
tfName = new TextField();
tfProjectName = new TextField();
tfExportFileName = new TextField();
int rowIndex = 0;
grid.add(lblName, 0, rowIndex);
grid.add(tfName, 1, rowIndex++);
grid.add(lblProjectName, 0, rowIndex);
grid.add(tfProjectName, 1, rowIndex++);
grid.add(lblExportFileName, 0, rowIndex);
grid.add(tfExportFileName, 1, rowIndex++);
grid.add(lblMembers, 0, rowIndex);
grid.add(table, 1, rowIndex, 1, 2);
rowIndex += 2;
grid.add(buttons, 0, ++rowIndex, 2, 1);
setRoot(grid);
}
private TableView<ObservableMember> createTableView() {
TableView<ObservableMember> table = new TableView<>();
table.setEditable(true);
Callback<TableColumn<ObservableMember, String>, TableCell<ObservableMember, String>> cellFactory = (TableColumn<ObservableMember, String> c) -> new UpdatingCell();
TableColumn<ObservableMember, String> firstCol = new TableColumn<>("Name");
firstCol.setCellValueFactory(
new PropertyValueFactory<>("name")
);
firstCol.setCellFactory(cellFactory);
firstCol.setOnEditCommit((TableColumn.CellEditEvent<ObservableMember, String> t) -> {
t.getTableView().getItems().get(t.getTablePosition().getRow()).setName(t.getNewValue());
});
TableColumn<ObservableMember, String> secondCol = new TableColumn<>("First Name");
secondCol.setCellValueFactory(
new PropertyValueFactory<>("firstName")
);
secondCol.setCellFactory(cellFactory);
secondCol.setOnEditCommit((TableColumn.CellEditEvent<ObservableMember, String> t) -> {
t.getTableView().getItems().get(t.getTablePosition().getRow()).setFirstName(t.getNewValue());
});
TableColumn<ObservableMember, String> thirdCol = new TableColumn<>("Email");
thirdCol.setCellValueFactory(new PropertyValueFactory<>("email"));
thirdCol.setCellFactory(cellFactory);
thirdCol.setOnEditCommit((TableColumn.CellEditEvent<ObservableMember, String> t) -> {
t.getTableView().getItems().get(t.getTablePosition().getRow()).setEmail(t.getNewValue());
});
table.getColumns().addAll(firstCol, secondCol, thirdCol);
// ContextMenu
ContextMenu cm = new ContextMenu();
MenuItem addMember = new MenuItem("Add Row");
addMember.setOnAction(this::handleAddMember);
MenuItem rmMember = new MenuItem("Remove current row");
rmMember.setOnAction(this::handleRemoveMember);
cm.getItems().addAll(addMember, rmMember);
table.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY);
table.setOnMouseClicked(event -> {
if (MouseButton.SECONDARY.equals(event.getButton())) {
cm.show(table, event.getScreenX(), event.getScreenY());
}
});
table.setItems(tableData);
return table;
}
private void handleAddMember(ActionEvent event) {
Member member = EvaluatorPromptFactory.promptMember();
if (member != null) {
// Check if the list contains only the empty one. If so replace empty one with new one.
if (isMemberListOnlyEmpty()) {
tableData.remove(0);
}
tableData.add(ObservableMember.fromMember(member));
}
}
private void loadMembers() {
if (group != null) {
tableData.clear();
tableData.addAll(Arrays.stream(group.getMembers()).map(ObservableMember::fromMember).collect(Collectors.toList()));
} else {
setMemberListOnlyEmpty();
}
table.setItems(tableData);
}
private boolean isMemberListOnlyEmpty() {
if (tableData.size() > 1) {
return false;
}
ObservableMember first = tableData.get(0);
return first.isEmpty();
}
private void handleRemoveMember(ActionEvent event) {
int index = table.getSelectionModel().getSelectedIndex();
ObservableMember item = table.getSelectionModel().getSelectedItem();
if (item != null) {
tableData.remove(index);
}
if (tableData.isEmpty()) {
setMemberListOnlyEmpty();
}
}
private void setMemberListOnlyEmpty() {
tableData = FXCollections.observableArrayList(ObservableMember.empty());
}
public static class ObservableMember {
private static final String DELIMETER = ",";
private final SimpleStringProperty name, firstName, email;
private final Member member;
public ObservableMember(Member member) {
if (member == null) {
this.member = null;
name = new SimpleStringProperty();
firstName = new SimpleStringProperty();
email = new SimpleStringProperty();
} else {
this.member = member;
this.name = new SimpleStringProperty(member.getName());
this.firstName = new SimpleStringProperty(member.getFirstName());
this.email = new SimpleStringProperty(member.getEmail());
}
}
public static String convertToString(ObservableMember m) {
StringBuilder sb = new StringBuilder();
sb.append(m.getName());
sb.append(DELIMETER);
sb.append(m.getFirstName());
if (StringUtils.isNotEmpty(m.getEmail())) {
sb.append(DELIMETER);
sb.append(m.getEmail());
}
return sb.toString();
}
public static ObservableMember fromMember(Member member) {
return new ObservableMember(member);
}
public static ObservableMember empty() {
return new ObservableMember(null);
}
public String getName() {
return name.getValue();
}
public void setName(String name) {
this.name.setValue(name);
}
public String getFirstName() {
return firstName.getValue();
}
public void setFirstName(String firstName) {
this.firstName.setValue(firstName);
}
public String getEmail() {
return email.getValue();
}
public void setEmail(String email) {
this.email.setValue(email);
}
public Member getMember() {
return member;
}
public SimpleStringProperty nameProperty() {
return name;
}
public SimpleStringProperty firstNameProperty() {
return firstName;
}
public SimpleStringProperty emailProperty() {
return email;
}
public boolean isEmpty() {
return member == null;
}
}
private static class UpdatingCell extends TableCell<ObservableMember, String> {
private TextField textField;
public UpdatingCell() {
}
@Override
public void startEdit() {
if (!isEmpty()) {
super.startEdit();
createTextField();
setText(null);
setGraphic(textField);
textField.selectAll();
}
}
@Override
public void cancelEdit() {
super.cancelEdit();
setText(getItem());
setGraphic(null);
}
@Override
public void updateItem(String item, boolean empty) {
super.updateItem(item, empty);
if (isEmpty()) {
setText(null);
setGraphic(null);
} else {
if (isEditing()) {
if (textField != null) {
textField.setText(getString());
}
setText(null);
setGraphic(textField);
} else {
setText(getString());
setGraphic(null);
}
}
}
public String getString() {
return getItem() == null ? "" : getItem();
}
private void createTextField() {
textField = new TextField(getString());
textField.setMinWidth(this.getWidth() - this.getGraphicTextGap() * 2);
textField.focusedProperty().addListener((ObservableValue<? extends Boolean> observableValue, Boolean oldValue, Boolean newValue) -> {
if (!newValue) {
commitEdit(textField.getText());
}
});
}
}
}
|
package xal.model.elem;
import java.io.PrintWriter;
import xal.model.IElement;
import xal.model.IProbe;
import xal.model.ModelException;
import xal.model.elem.sync.IRfCavityCell;
import xal.model.elem.sync.IRfGap;
import xal.sim.scenario.LatticeElement;
import xal.smf.impl.RfGap;
import xal.tools.beam.EnergyVector;
import xal.tools.beam.PhaseMap;
import xal.tools.beam.PhaseMatrix;
import xal.tools.beam.RelativisticParameterConverter;
import xal.tools.beam.em.AxialFieldSpectrum;
import xal.tools.beam.optics.AcceleratingRfGap;
import xal.tools.math.fnc.IRealFunction;
/**
* <p>
* Represents the action of an ideal RF gap. Gap is modeled as a thin element
* whose accelerating action is given by the Panofsky formula. </p> <p>
* </p>
* <p>
* The gap provides acceleration to the propagation probe as well as
* longitudinal focusing and radial defocusing. These mechanisms are
* implemented according to that provided by an ideal gap where the effects can
* be described analytically.
* </p>
* <p>
* <h4>CKA NOTES:</h4>
* <br/>
* · The gap length <i>L</i> should not be used in any time or phase
* calculations since it is absorbed by adjacent drift spaces. (So I believe.)
* For example, drift time Δ<i>t</i> should probably not include the
* term Δ<i>t</i> ∝ ω<i>L</i>/2.
* <br/>
* <br/>
* · The <i>phase correction</i> parameter Δφ I believe is the
* change in probe phase due to propagation from the first gap in the cavity.
* Thus, the first gap has a correction of Δφ and the probe phase
* there φ<sub>0</sub> is simply the cavity phase φ<sub><i>cav</i></sub>.
* <br/>
* <br/>
* · It is essential that the probe has the correct phase coming into
* this element. If this is a first
* gap (see <code>{@link #isFirstGap()}</code>) then that
* phase must be the phase of the cavity, given by
* <code>{@link #getPhase()}</code>. Thus, we must set the
* probe phase to the phase of the cavity at these special gaps. (This is
* unfortunately necessary since we cannot implement RF cavity elements
* with the current lattice generator.) In the Element/Algorithm/Probe
* architecture elements do not modify probes. Thus, although it's a kluge
* no matter which way you do it, the probe phase should be reset by the
* algorithm.
* <br/>
* <br/>
* · There are provisions for both an offset of gap electrical center
* with geometric center and for the Fourier sine transit time factor <i>S</i>.
* And both values are used. This creates a potential inconsistency since the sine
* transit time factor can account for any shifts in the field center
* (i.e., the Fourier sine and cosine transforms together can represent
* <b>any</b> continuous function.) I'm not sure if the provided <i>S</i>(β)
* is taken at the geometric center or the electrical center (there it would
* probably be zero). If at the geometric center you are probably shifting
* everything right back to the geometric center by using the offset.
* <br/>
* <br/>
* · The above condition is localized in the method
* <code>{@link #compGapPhaseAndEnergyGain(IProbe)}</code> which determines
* when to add the phase change due
* to the gap offset. It is added into the phase change when the phase and
* energy are computed directly by the Panofsky method (i.e., method
* <code>{@link #compGapPhaseAndEnergyGainDirect(IProbe)}</code>). When
* the <i>S</i>(β) transit time factor is used to compute the phase and
* energy gain
* (i.e., method <code>{@link #compGapPhaseAndEnergyGainIndirect(IProbe)}</code>)
* the change in phase due to gap offset is not added.
* <br/>
* <br/>
* · The above method <code>{@link #compGapPhaseAndEnergyGain(IProbe)}</code>
* also consolidates the two different methods of computing
* the phase and energy gains through the gap. One method is the direct method based
* upon the Panofsky equation (and where the phase change is 0 through the gap). This
* method is used when the <tt>useRfGapPhaseCalculation</tt> flag in the probe's
* algorithm object is set to <code>false</code>. If it is set to <code>true</code>
* then the indirect, iterative (and presumably more accurate) method is used to
* solve a transcendental equation for the phase and energy changes through the
* gap.
* </p>
*
* @author Christopher K. Allen
* @since November 22, 2005
* @version Nov 23, 2014
* <br/>Jan 16, 2015
* <br/>July 29, 2015
*/
public class SpectrumMapRfGap extends ThinElement implements IRfGap, IRfCavityCell {
/*
* Global Constants
*/
/**
* the string type identifier for all SpectrumMapRfGap objects
*/
public final static String s_strType = "SpectrumMapRfGap"; //$NON-NLS-1$
// DataAdaptor Data Tags
/**
* Parameters for XAL MODEL LATTICE dtd
*/
public final static String s_strParamETL = "ETL"; //$NON-NLS-1$
/**
* Description of the Field
*/
public final static String s_strParamPhase = "Phase"; //$NON-NLS-1$
/**
* Description of the Field
*/
public final static String s_strParamFreq = "Frequency"; //$NON-NLS-1$
// Numeric Constants
/** The number 2π */
public final static double DBL_2PI = 2.0*Math.PI;
/** Error tolerance in the iterative search for phase change through RF gap */
private static final double DBL_PHASECALC_CNVERR = 1.0e-12;
/** Maximum number of allowable iterations in the phase change search */
private static final int INT_PHASECALC_MAXITER = 50;
/**
* TODO CKA - Remove
*
* Don't know what this is? CKA
*/
@Deprecated
public static double COEFF_X = 1.0;
/**
* TODO CKA - Remove
*
* Don't know what this is? CKA
*/
@Deprecated
public static double COEFF_Y = 1.0;
/*
* Internal Data Structures
*/
/**
* Enumeration specifying the method of phase and energy gain calculation.
* The gap may be switched between the given simulation modes to achieve
* differing objectives. Specially we can simulate a "perfect" gap in
* order to facilitate design calculations, or simulate various degrees
* of deviation from design parameters to observe operating effects.
*
*
* @author Christopher K. Allen
* @since Feb 5, 2015
*/
public enum PHASECALC {
/**
* Design Phase: Use gap design phase and design transit time factor
* to compute energy.
*/
DESIGN,
/**
* Dynamic Phase: Use <b>probe</b> phase with cavity spatial fields
* (i.e., mode structure) and gap offset for phase,
* design transit time factor <i>T</i>(β) for energy calculation.
*/
DYNPHASE,
/**
* Dynamic Energy: Use dynamic phase plus iterative algorithm for energy including
* both <i>S</i>(β) and <i>T</i>(β) transit time factors accounting for
* gap offset.
*/
DYNENERGY;
}
/**
* Represents the longitudinal conjugate variables of
* phase and energy.
*
* @author Christopher K. Allen
* @since Nov 28, 2014
*/
private class EnergyVariables {
/** particle phase */
public double phi;
/** particle energy */
public double W;
/** Zero argument constructor */
@SuppressWarnings("unused")
public EnergyVariables() {
this(0.0, 0.0);
}
/** Initializing Constructor */
public EnergyVariables(double phi, double W) {
this.phi = phi;
this.W = W;
}
/**
* @see java.lang.Object#toString()
*
* @since Feb 5, 2015 by Christopher K. Allen
*/
@Override
public String toString() {
return "(phi,W)=(" + phi + ", " + W + ')';
}
}
/*
* Global Attributes
*/
/*
* Local Attributes
*/
// Operating Parameters
/**
* ETL product of gap
*/
private double m_dblETL = 0.0;
/**
* phase delay of gap w.r.t. the synchronous particle
*/
private double m_dblPhase = 0.0;
/**
* the on axis accelerating field (V)
*/
private double dblFieldE0 = 0.;
/**
* operating frequency of the gap
*/
private double m_dblFreq = 0.0;
// Geometric Properties
/**
* the separation of the gap center from the cell center (m)
*/
private double gapOffset = 0.;
/**
* <del>the accelerating cell length</del> No, this is the effective length of the gap
*/
private double dblGapLength = 0.;
/**
* flag indicating that this gap is in the leading cell of an RF cavity
*/
private boolean bolStartCell = false;
/**
* flag indicating that this gap is in the end cell of an RF cavity
*/
private boolean bolEndCell = false;
// /**
// * fit of the TTF vs. beta
// */
// private RealUnivariatePolynomial fitTTF;
// /**
// * fit of the TTF-prime vs. beta
// */
// private RealUnivariatePolynomial fitTTFPrime;
// /**
// * fit of the S factor vs. beta
// */
// private RealUnivariatePolynomial fitSTF;
// /**
// * fit of the S-prime vs. beta
// */
// private RealUnivariatePolynomial fitSTFPrime;
/** axial field spectrum defining this gap */
private AxialFieldSpectrum spcGapFlds;
/** RF gap acceleration model used to compute phase jump and energy gain */
private AcceleratingRfGap gapAcclMdl;
// Computation Properties
/**
* phase and energy computation method
*/
private PHASECALC enmPhsCalcMth = PHASECALC.DYNENERGY;
// Parent RF Cavity Properties
/**
* = 0 if the gap is part of a 0 mode cavity structure (e.g. DTL),
* = 1/2 if the gap is part of a pi/2 mode cavity structure
* = 1 if the gap is part of a pi mode cavity (e.g. Super-conducting)
*/
private double dblCavModeConst = 0.;
/**
* The index of the cavity cell (within the parent cavity) containing this gap.
*/
private int indCell = 0;
/*
* Initialization
*/
/**
* Creates a new instance of SpectrumMapRfGap
*
*@param strId instance identifier of element
*@param dblETL field/transit time/length factor for gap (in <b>volts</b> )
*@param dblPhase operating phase of gap (in <b>radians</b> )
*@param dblFreq operating RF frequency of gap (in <b>Hertz</b> )
*/
public SpectrumMapRfGap(String strId, double dblETL, double dblPhase, double dblFreq) {
super(s_strType, strId);
this.setETL(dblETL);
this.setPhase(dblPhase);
this.setFrequency(dblFreq);
}
/**
* JavaBean constructor - creates a new uninitialized instance of SpectrumMapRfGap <b>
* BE CAREFUL</b>
*/
public SpectrumMapRfGap() {
super(s_strType);
}
/*
* Attribute Query
*/
/**
* <del>Returns the cell length (m)</del>
*
* <p>
* <b>CKA</b> This method never returned the <em>cavity cell</em> length. It
* always returned the length of the gap within the cell. Fortunately everywhere
* this method was used it was used in that context. Thus I have changed the name
* from <code>getCellLength</code> to <code>getGapLength</code>. I have also renamed
* the corresponding class variable.
* </p>
* <p>
* Of course the gap is being modeled as a thin element and has no length proper.
* The length <i>L</i> given here is simply the value that produces the appropriate
* acceleration potential <i>V</i><sub>0</sub> = <i>E</i><sub>0</sub>L when using
* a hard-edge model.
* </p>
* <p>
* <h4>CKA NOTES:</h4>
* · I believe this is the length of the overall gap cell
* structure, not just the gap itself.
* <br/>
* · Specifically, it is the distance from one gap center
* to the next in an accelerating structure.
* <br/>
* · I'm not sure if the electric field maximum is averaged over this
* quantity, I suppose it should be.
* </p>
*
* @return The dblGapLength value
*
* @version Jan 15, 2015
*/
public double getGapLength() {
return dblGapLength;
}
/**
* Return the displacement of the "effective gap" (i.e., the gap model)
* from the true center of the actual RF gap center. The true center
* would be the value provided to the SMF accelerator hierarchy.
*
* @return the difference between the RF gap true center and effective center (in meters)
*
* @author Christopher K. Allen
* @since Nov 17, 2014
*/
public double getGapOffset() {
return this.gapOffset;
}
/*
* Operations
*/
/**
* Compute the wavelength of the RF.
*
* @return RF wavelength in <b>meters</b>
*/
public double wavelengthRF() {
// Compute the RF wavelength
double c = IElement.LightSpeed;
double f = getFrequency();
double lambda = c/f;
return lambda;
}
/**
* Compute and return the mid-gap normalized velocity for the
* given probe.
*
* <p>
* <h4>NOTE:</h4>
* - Because of the state-dependent nature of the energy calculations
* (this needs to be fixed), this function will only work correctly
* if the function energyGain() is consistent.
* <br/>
* - CKA: I believe this has been fixed.
* </p>
*
* @param probe probe containing energy information
*
* @return average or "mid-gap" velocity in units of <b>c</b>
*
* @see SpectrumMapRfGap#energyGain(IProbe)
*/
public double compMidGapBeta(IProbe probe) {
EnergyVector varMidGap = this.compGapPhaseAndEnergyGain(probe);
double Er = probe.getSpeciesRestEnergy();
double W_mid = varMidGap.getEnergy();
double b_mid = RelativisticParameterConverter.computeBetaFromEnergies(W_mid, Er);
return b_mid;
}
// /**
// * a method that is called once by transferMatrix to calculate the energy
// * gain. This prevents energy gain calculation from being repeated many times.
// * Importantly it provides a workaround from the eneryGain being calculated
// * after the upstreamExitPhase is updated elsewhere
// *
// *@param probe The Parameter
// *@return The Return Value
// */
// /*
// * public double compEnergyGain(IProbe probe) {
// * }
// */
/**
* <p>
* Provided for legacy calculations. This method computes the phase of the
* gap necessary to account for the cavity mode field distribution. For example,
* if the phase of the probe at the gap is φ<sub>0</sub> and the mode amplitude factor
* for cell/gap <i>n</i> is <i>A<sub>n</sub></i>, then the energy gain
* Δ<i>W<sub>n</sub></i> is
* <br/>
* <br/>
* Δ<i>W<sub>n</sub></i> = <i>qA<sub>n</sub>ETL</i> cos φ<sub>0</sub> .
* <br/>
* <br/>
* Combining the <i>A<sub>n</sub></i> and cos φ<sub>0</sub> we get
* <br/>
* <br/>
* Δ<i>W<sub>n</sub></i> = <i>qETL</i> cos (φ<sub>0</sub> + <i>nq</i>π) ,
* <br/>
* <br/>
* where <i>q</i> is the cavity mode constant
* (see <code>{@link #getCavityModeConstant()}</code>). This method returns the cosine
* argument of the later expression.
* </p>
*
* @param probe probe being propagated through gap
*
* @return the combined phase φ<sub>0</sub> + <i>nq</i>π combining the
* propagation time of the probe and the spatial structure of the field
*
* @since Jan 15, 2015 by Christopher K. Allen
*/
public double compEffectivePhaseAtGap(IProbe probe) {
final int n = this.getCavityCellIndex();
final double q = this.getCavityModeConstant();
final double phi_cav = n*q*Math.PI;
final double phi_prb = this.compGapEntrancePhase(probe);
final double phi_eff = phi_prb + phi_cav % DBL_2PI;
return phi_eff;
}
/**
* <p>
* Get the transverse focusing constant for a particular probe. The focusing
* constant is used in the construction of the transfer matrix for the RF gap.
* A gap provides longitudinal focusing and transverse defocusing as well as a
* gain in beam energy. This focusing constant describes the effect in the
* transverse direction, which is defocusing when the gap is accelerator and,
* therefore, typically negative.
* </p>
* <p>
* The value represents the thin lens focusing constant for an ideal RF gap
* (this is the inverse of the focal length). To compute the focusing action
* for the lens we must include beam energy, which is changing through the
* gap. We use the value of beta for which the beam has received half the
* total energy gain.
* </p>
*
*@param probe beam energy and particle charge are taken from the probe
*@return (de)focusing constant (<b>in radians/meter</b> )
*/
public double compTransFocusing(IProbe probe) {
// TODO - this is the full energy and phase gain
EnergyVariables varMidGap = this.compMidGapPhaseAndEnergy(probe);
double W_mid = varMidGap.W;
double phi_mid = varMidGap.phi;
double c = IElement.LightSpeed;
double Q = Math.abs(probe.getSpeciesCharge());
double Er = probe.getSpeciesRestEnergy();
double g_mid = W_mid / Er + 1.0;
double b_mid = RelativisticParameterConverter.computeBetaFromEnergies(W_mid, Er);
double bg_mid = b_mid * g_mid;
double E = this.getE0();
double f = this.getFrequency();
double k_mid = RelativisticParameterConverter.computeWavenumberFromBeta(b_mid, f);
double T_mid = this.spcGapFlds.Tz(k_mid);
// double T_mid = this.fitTTF.evaluateAt(b_mid);
double L = this.getLength();
double A = this.compCavModeFieldCoeff();
double ETL = E * T_mid * L;
double kr = (Math.PI * f/c) * Q * A * ETL * Math.sin(-phi_mid) / (Er * bg_mid * bg_mid);
// double kr = Math.PI*Q*ETL*f*Math.sin(-phi)/(q*c*Er*bgbar*bgbar);
return kr;
}
/**
* <p>
* Get the longitudinal focusing constant for a particular probe. The focusing
* constant is used in the construction of the transfer matrix for the RF gap.
* A gap provides longitudinal focusing and transverse defocusing as well as a
* gain in beam energy. This focusing constant describes the effect in the
* longitudinal direction, which is focusing and, therefore, positive.
* </p>
* <p>
* The value represents the thin lens focusing constant for an ideal RF gap
* (this is the inverse of the focal length). To compute the focusing action
* for the lens we must include beam energy, which is changing through the
* gap. We use the value of beta for which the beam has received half the
* total energy gain.
* </p>
*
*@param probe beam energy and particle charge are taken from the probe
*@return (de)focusing constant (<b>in radians/meter</b> )
*/
public double compLongFocusing(IProbe probe) {
EnergyVariables varMidGap = this.compMidGapPhaseAndEnergy(probe);
double Wbar = varMidGap.W;
double Er = probe.getSpeciesRestEnergy();
double gbar = Wbar / Er + 1.0;
double kr = this.compTransFocusing(probe);
double kz = -2.0 * kr * gbar * gbar;
return kz;
}
/*
* IRfGap Interface
*/
/**
* Return the ETL product of the gap, where E is the longitudinal electric
* field, T is the transit time factor, and L is the gap length.
*
*@return the ETL product of the gap (in <bold>volts</bold> ).
*/
@Override
public double getETL() {
return m_dblETL;
}
/**
* Return the RF phase delay of the gap with respect to the synchronous
* particle.
*
*@return phase delay w.r.t. synchronous particle (in <bold>radians</bold> ).
*/
@Override
public double getPhase() {
return m_dblPhase;
}
/**
* Get the operating frequency of the RF gap.
*
*@return frequency of RF gap (in <bold>Hertz</bold> )
*/
@Override
public double getFrequency() {
return m_dblFreq;
}
/**
* Set the ETL product of the RF gap where E is the longitudinal electric
* field of the gap, T is the transit time factor of the gap, L is the length
* of the gap. <p>
*
* The maximum energy gain from the gap is given by qETL where q is the charge
* (in coulombs) of the species particle.
*
*@param dblETL ETL product of gap (in <bold>volts</bold> ).
*/
@Override
public void setETL(double dblETL) {
m_dblETL = dblETL;
}
/**
* Set the phase delay of the RF in gap with respect to the synchronous
* particle. The actual energy gain from the gap is given by qETLcos(dblPhi)
* where dbkPhi is the phase delay.
*
*@param dblPhase phase delay of the RF w.r.t. synchronous particle (in
* <bold>radians</bold> ).
*/
@Override
public void setPhase(double dblPhase) {
m_dblPhase = dblPhase;
}
/**
* Set the operating frequency of the RF gap.
*
*@param dblFreq frequency of RF gap (in <bold>Hertz</bold> )
*/
@Override
public void setFrequency(double dblFreq) {
m_dblFreq = dblFreq;
}
/**
* Set the on accelerating field E - the on axis field (V/m)
*
* @param E The new E0 value
*/
@Override
public void setE0(double E) {
dblFieldE0 = E;
this.resetGapPotential();
}
/**
* Get the on accelerating field (V/m)
*
*@return The e0 value
*/
@Override
public double getE0() {
return dblFieldE0;
}
/*
* IRfCavityCell Interface
*/
/*
* Attribute Query
*/
/**
* return whether this gap is the initial gap of a cavity
*
*@return The firstGap value
*/
@Override
public boolean isFirstGap() {
// boolean bolInitialGap = this.getCavityCellIndex() == 0;
// return bolInitialGap;
return bolStartCell;
}
/**
*
* @see xal.model.elem.sync.IRfCavityCell#setCavityCellIndex(int)
*
* @since Jan 8, 2015 by Christopher K. Allen
*/
@Override
public void setCavityCellIndex(int indCell) {
this.indCell = indCell;
}
/**
*
* @see xal.model.elem.sync.IRfCavityCell#setCavityModeConstant(double)
*
* @since Jan 8, 2015 by Christopher K. Allen
*/
@Override
public void setCavityModeConstant(double dblCavModeConst) {
this.dblCavModeConst = dblCavModeConst;
}
/**
*
* @see xal.model.elem.sync.IRfCavityCell#getCavityCellIndex()
*
* @since Jan 8, 2015 by Christopher K. Allen
*/
@Override
public int getCavityCellIndex() {
return this.indCell;
}
/**
* <p>
* Returns the structure mode <b>number</b> <i>q</i> for the cavity in which this
* gap belongs. Here the structure mode number is defined in terms of
* the fractional phase advance between cells, with respect to π.
* To make this explicit
* <br/>
* <br/>
* <i>q</i> = 0 ⇛ 0 mode
* <br/>
* <i>q</i> = 1/2 ⇒ π/2 mode
* <br/>
* <i>q</i> = 1 ⇛ π mode
* <br/>
* <br/>
* Thus, a cavity mode constant of <i>q</i> = 1/2 indicates a π/2
* phase advance between adjacent cells and a corresponding cell amplitude
* function <i>A<sub>n</sub></i> of
* <br/>
* <br/>
* <i>A<sub>n</sub></i> = cos(<i>nq</i>π)
* <br/>
* <br/>
* where <i>n</i> is the index of the cell within the coupled cavity.
* </p>
*
* @return the cavity mode constant for the cell containing this gap
*
* @see <i>RF Linear Accelerators</i>, Thomas P. Wangler (Wiley, 2008).
*
* @author Christopher K. Allen
* @since Nov 20, 2014
*/
public double getCavityModeConstant() {
return this.dblCavModeConst;
}
/**
* Returns flag indicating whether or not this gap is in the initial or terminal cell
* in a string of cells within an RF cavity.
*
* @return <code>true</code> if this gap is in a cavity cell at either end of a cavity cell bank,
* <code>false</code> otherwise
*
* @since Jan 23, 2015 by Christopher K. Allen
*/
@Override
public boolean isEndCell() {
return this.bolEndCell;
}
/**
*
* @see xal.model.elem.sync.IRfCavityCell#isFirstCell()
*
* @since Jan 23, 2015 by Christopher K. Allen
*/
@Override
public boolean isFirstCell() {
return this.bolStartCell;
}
/*
* IElement Interface
*/
/**
* Returns the time taken for the probe to propagate through element.
* <br/>
* <br/>
* TODO Need to correct this after computing (Δφ,Δ<i>W</i>)
*
* @param probe propagating probe
*
* @return The time taken to propagate through gap including phase shift δφ
* and any gap offset Δ<i>l</i> (asymmetric drifting at initial and final energies)
*/
@Override
public double elapsedTime(IProbe probe) {
// Get the phase jump across the gap (if any)
EnergyVector varGain = this.compGapPhaseAndEnergyGain(probe);
double d_phi = varGain.getPhase();
// Compute the time necessary for a smooth propagation through that phase jump
double f = this.getFrequency();
double w = DBL_2PI * f;
double dT = d_phi/w;
return dT;
}
// /** the interface method to provide the energy gain.
// * since this calculation has gotten complicated it is done
// * in the TransferMap method and the answer is returned here. */
/**
* Compute the energy gain of the RF gap for a probe including the effects of
* calculating the phase advance.
*
* @param probe uses the particle species charge
*
* @return energy gain for this probe (<b>in electron-volts</b> )
*/
@Override
public double energyGain(IProbe probe) {
double dW = this.compGapPhaseAndEnergyGain(probe).getEnergy();
// System.out.println("SpectrumMapRfGap#energyGain() - " + this.getId() + " index=" + this.indCell + ", dW = " + dW);
return dW;
}
/**
* Compute and return the longitudinal phase advance (w.r.t. the RF) for
* the given probe while propagating through this element.
*
* @see xal.model.elem.ThinElement#longitudinalPhaseAdvance(xal.model.IProbe)
*
* @author Christopher K. Allen
* @since Nov 23, 2014
*/
@Override
public double longitudinalPhaseAdvance(IProbe probe) {
// We trick the algorithm into resetting the probe's phase to the phase
// of this gap, which is the klystron phase of this cavity
if ( this.isFirstGap() ) {
double phi0 = this.getPhase();
double phi = probe.getLongitinalPhase();
double dphi = this.compGapPhaseAndEnergyGain(probe).getPhase();
double phi_reset = -phi + phi0 + dphi;
return phi_reset;
// We're just a plain ole gap, advance the probe phase by the phase gain
} else {
double dphi = this.compGapPhaseAndEnergyGain(probe).getPhase();
return dphi;
}
}
// /**
// * Here we need to inform the algorithm (i.e., the
// * <code>Tracker</code> object) of the gap exit time for use in the
// * phase advance calculation of the next RF gap element.
// *
// * @see xal.model.elem.Element#propagate(xal.model.IProbe)
// *
// * @author Christopher K. Allen
// * @since Nov 24, 2014
// */
// @Override
// public void propagate(IProbe probe) throws ModelException {
// super.propagate(probe);
// IAlgorithm alg = probe.getAlgorithm();
// if (alg instanceof Tracker) {
// Tracker tracker = (Tracker)alg;
// tracker.setRfGapExitTime();
/**
* Compute the transfer map for an ideal RF gap.
*
* @param probe compute transfer map using parameters from this probe
*
* @return transfer map for the probe
*
* @exception ModelException this should not occur
*/
@Override
protected PhaseMap transferMap(IProbe probe) throws ModelException {
// System.out.println("This is " + this.getId());
// System.out.println("dblFieldE0 is " + this.getE0());
// System.out.println("ETL is " + this.getETL());
// System.out.println("");
// Get probe parameters at initial energy
double Er = probe.getSpeciesRestEnergy();
double Wi = probe.getKineticEnergy();
double bi = probe.getBeta();
double gi = probe.getGamma();
// Determine the current energy gain and focusing constants for the gap
// the following section is to calculate the phase of the beam at each gap, rather than use hardwired phases.
// update the energy gain first:
// if(probe.getAlgorithm().useRfGapPhaseCalculation()) {
// compEnergyGain(probe);
// } else {
// simpleEnergyGain(probe);
double dW = this.compGapPhaseAndEnergyGain(probe).getEnergy();
double kz = this.compLongFocusing(probe);
double kt = this.compTransFocusing(probe);
// Compute final energy parameters
double Wf = Wi + dW;
double gf = Wf / Er + 1.0;
double bf = Math.sqrt(1.0 - 1.0 / (gf * gf));
// Compute average energy parameters
// double Wb = (Wf + Wi) / 2.0;
// double gb = Wb / Er + 1.0;
// double bg = Math.sqrt(1.0 - 1.0/(gb*gb));
// Compute component block matrices then full transfer matrix
double arrTranX[][] = new double[][]{{1.0, 0.0}, {kt*COEFF_X / (bf * gf), bi * gi / (bf * gf)}};
double arrTranY[][] = new double[][]{{1.0, 0.0}, {kt*COEFF_Y / (bf * gf), bi * gi / (bf * gf)}};
// CKA - Corrected 7/14/2010
// Additional factor gbar^2 in the longitudinal focusing term
// double arrLong[][] = new double[][]{{1.0, 0.0}, {(kz / (bf * gf)) * gb * gb / (gf * gf), gi * gi * gi * bi / (gf * gf * gf * bf)}};
double arrLong[][] = new double[][]{{1.0, 0.0}, { kz / (bf * gf * gf * gf), gi * gi * gi * bi / (gf * gf * gf * bf)}};
PhaseMatrix matPhi = new PhaseMatrix();
matPhi.setElem(6, 6, 1.0);
matPhi.setSubMatrix(0, 1, 0, 1, arrTranX);
matPhi.setSubMatrix(2, 3, 2, 3, arrTranY);
matPhi.setSubMatrix(4, 5, 4, 5, arrLong);
// Do the phase update if this is desired:
// do it here to resuse the bi, bf, etc. factors
// if(probe.getAlgorithm().useRfGapPhaseCalculation()) advancePhase(probe);
// PrintWriter os = new PrintWriter(System.out);
// matPhi.print(os);
// os.close();
return new PhaseMap(matPhi);
}
/*
* IComponent Interface
*/
/**
* Conversion method to be provided by the user
*
* <p>
* <h4>NOTES - CKA</h4>
* · The accelerating gap potential is set for a unit charge. It is later updated
* for whatever charge the probe carries when energy and phase calculations are made. See
* {@link #compGapPhaseAndEnergyGain(IProbe)} where {@link #gapAcclMdl} is modified.
* <br/>
* <br/>
* · Someday we really need to remove this dependency with SMF.
* </p>
*
* @param latticeElement the SMF node to convert
*
* @see #compGapPhaseAndEnergyGain(IProbe)
*/
@Override
public void initializeFrom(LatticeElement element) {
super.initializeFrom(element);
RfGap rfgap = (RfGap) element.getHardwareNode();
// Initialize the RF gap properties
this.bolStartCell = rfgap.isFirstGap();
this.bolEndCell = rfgap.isEndCell();
this.dblGapLength = rfgap.getGapLength();
this.gapOffset = rfgap.getGapOffset();
// Initialize the RF cavity properties
this.dblCavModeConst = rfgap.getStructureMode();
this.dblFieldE0 = rfgap.getGapDfltAmp() * 1.0e3; // the SMF object uses kV (stupid)
this.m_dblFreq = rfgap.getGapDfltFrequency() * 1.0e6; // the SMF object uses MHz (stupid)
// Create the defining axial field spectrum object
IRealFunction fitTTFPrime = rfgap.getTTFPrimeFit();
IRealFunction fitTTF = rfgap.getTTFFit();
IRealFunction fitSTFPrime = rfgap.getSPrimeFit();
IRealFunction fitSTF = rfgap.getSFit();
this.spcGapFlds = new AxialFieldSpectrum(this.m_dblFreq, this.gapOffset, fitTTF, fitTTFPrime, fitSTF, fitSTFPrime);
// Create the accelerating gap model
// The accelerating gap potential is set for a unit charge. It is later updated
// for whatever charge the probe carries when energy and phase calculations are made.
double E0 = this.getE0();
double A = this.compCavModeFieldCoeff();
double L = this.getGapLength();
double V0 = A * E0 * L; // This is for a unit charge
this.gapAcclMdl = new AcceleratingRfGap(this.m_dblFreq, V0, this.spcGapFlds);
}
/*
* Object Overrides
*/
/**
*
* @see xal.model.elem.Element#toString()
*
* @since Jan 22, 2015 by Christopher K. Allen
*/
@Override
public String toString() {
StringBuffer bufOut = new StringBuffer();
bufOut.append(super.toString());
bufOut.append(" Gap ETL product : " + this.getETL()); //$NON-NLS-1$
bufOut.append('\n');
bufOut.append(" Gap phase shift : " + this.getPhase()); //$NON-NLS-1$
bufOut.append('\n');
bufOut.append(" RF frequency : " + this.getFrequency()); //$NON-NLS-1$
bufOut.append('\n');
bufOut.append(" Axial field dblFieldE0 : " + this.getE0() );
bufOut.append('\n');
bufOut.append(" Gap offset : " + this.getGapOffset() );
bufOut.append('\n');
return bufOut.toString();
}
/**
* Dump current state and content to output stream.
*
*@param os output stream object
*/
@Override
public void print(PrintWriter os) {
super.print(os);
os.println(" Gap ETL product : " + this.getETL()); //$NON-NLS-1$
os.println(" Gap phase shift : " + this.getPhase()); //$NON-NLS-1$
os.println(" RF frequency : " + this.getFrequency()); //$NON-NLS-1$
os.println(" Axial field dblFieldE0 : " + this.getE0() );
}
/*
* Support Methods
*/
/**
* <p>
* Computes and returns the electric field coefficient associated with the
* RF cavity cell containing this gap. Specifically, if this gap belongs to
* the <i>n<sup>th</sup></i> cell of the cavity (index origin 0) and the cavity
* operates in the <i>q</i>π-mode where <i>q</i> is the cavity mode constant,
* then the field coefficient <i>A<sub>n</sub></i> is defined
* <br/>
* <br/>
* <i>A<sub>n</sub></i> ≜ cos(<i>nq</i>π) ,
* <br/>
* <br/>
* so that the electric field <i>E<sub>n</sub></i>(<i>z</i>,<i>t</i>) at cell <i>n</i>
* is given by
* <br/>
* <br/>
* <i>E<sub>n</sub></i>(<i>z</i>,<i>t</i>) =
* <i>A<sub>n</sub></i> <i>E</i><sub>0</sub>(<i>z</i>)
* cos(ω<i>t</i> + φ<sub>0</sub>) ,
* <br/>
* <br/>
* where <i>E</i><sub>0</sub>(<i>z</i>) is the axial field profile of each cell.
* </p>
*
* @return the cavity field coefficient <i>A<sub>n</sub></i> for this gap
*
* @since Jan 12, 2015 by Christopher K. Allen
*/
private double compCavModeFieldCoeff() {
final int n = this.getCavityCellIndex();
final double q = this.getCavityModeConstant();
// double nkluge = Math.IEEEremainder(n, 9);
// final double A = Math.cos(nkluge*q*Math.PI);
final double A = Math.cos(n*q*Math.PI);
return A;
}
/**
* <p>
* Resets the total potential gain across the gap. This is the integral of the
* axial electric field <i>E<sub>z</sub></i>(<i>z</i>) through the entire gap
* region.
* </p>
* <p>
* This method needs to be called whenever the gap length or gap potential are
* changed. The effects is to change the magnitude of the spectral maps that define
* the gap fields.
* </p>
*
* @since Oct 16, 2015, Christopher K. Allen
*/
private void resetGapPotential() {
// Create the accelerating gap model
// The accelerating gap potential is set for a unit charge. It is later updated
// for whatever charge the probe carries when energy and phase calculations are made.
double E0 = this.getE0();
double A = this.compCavModeFieldCoeff();
double L = this.getGapLength();
double V0 = A * E0 * L; // This is for a unit charge
this.gapAcclMdl = new AcceleratingRfGap(this.m_dblFreq, V0, this.spcGapFlds);
}
/**
* <p>
* Computes and returns the phase at the position of the gap's
* <b>electrical entrance</b>.
* The longitudinal phase of the given probe is assumed to be at the
* gap's geometric entrance as it arrives, a correction is necessary if
* the geometric and electrical centers are offset.
* </p>
* <p>
* <h4>NOTE</h4>
* · It is essential that the probe has the correct phase.
* If this is the first
* gap then that phase must be the phase of the cavity, given by
* <code>{@link #getPhase()}</code>. Thus, we must set the
* probe phase to the phase of the cavity at these special gaps. (This is
* unfortunately necessary since we cannot implement RF cavity elements
* with the current lattice generator.) In the Element/Algorithm/Probe
* architecture elements do not modify probes. Thus, although it's a kluge
* no matter which way you do it, the probe phase should be reset by the
* algorithm.
* <br/>
* <br/>
* · To get the phase at the middle of the gap (again, electrical middle)
* you can call <code>{@link #compGapPhaseAndEnergyImpulses(IProbe)}</code> and add
* <b>half</b> that value to this method's returned value.
* </p>
*
* @param probe probe containing phase information
*
* @return the probe phase corrected as necessary for any gap offset
*
* @since Nov 26, 2014, Christopher K. Allen
*/
private double compGapEntrancePhase(IProbe probe) {
// Get the phase of the probe at the gap geometric center
double phi0 = this.isFirstGap() ? this.getPhase() : probe.getLongitinalPhase();
double bi = probe.getBeta();
double dl = this.getGapOffset();
double dphi = this.compDriftingPhaseAdvance(bi, dl);
double phi = phi0 + dphi;
// Correct the phase as needed for any difference from electrical center according
// to the simulation mode we are using
switch (this.enmPhsCalcMth) {
case DESIGN:
return phi0;
case DYNPHASE:
return phi;
case DYNENERGY:
return phi0;
default:
return phi0;
}
}
// /**
// * Computes and returns the longitudinal phase change δφ energy
// * gain Δ<i>W</i> through
// * the gap due to acceleration. Although this is a thin lens the longitudinal
// * phase must have a impulsive change δφ sympletically conjugate
// * to the change Δ<i>W</i> in longitudinal energy.
// *
// * @param probe probe propagating through gap, we use its phase and energy parameters
// *
// * @return the change in phase δφ of the
// * give probe through the gap
// *
// * @since Nov 28, 2014 @author Christopher K. Allen
// */
// private double compGapPhaseGain(final IProbe probe) {
// EnergyVariables cordGapEffects = this.compGapPhaseAndEnergyImpulses(probe);
// return cordGapEffects.phi;
// /**
// * Computes and returns the final particle velocity through the gap. That is,
// * we return the velocity of the particle after it has passed through the gap.
// *
// * @param probe contains the parameters for gap action
// *
// * @return the final probe particle velocity β<sub><i>f</i></sub> with
// * respect to the speed of light <i>c</i>
// *
// * @author Christopher K. Allen
// * @since Nov 19, 2014
// */
// private double compFinalBeta(final IProbe probe) {
// // Initial energy parameters
// double Er = probe.getSpeciesRestEnergy();
// double Wi = probe.getKineticEnergy();
// // The energy gain through the gap
// double dW = this.compGapEnergyGain(probe);
// // Final energy parameters
// double Wf = Wi + dW;
// double gf = Wf / Er + 1.0;
// double bf = Math.sqrt(1.0 - 1.0 / (gf * gf));
// return bf;
// /**
// * <p>
// * Routine to calculate the energy gain through the gap. If the
// * <code>useRfGapPhaseCalculation</code> flag in the probe's algorithm
// * is set to <code>true</code> the full iterative search for these
// * parameters is used. (This is done by deferring to
// * <code>{@link #compGapPhaseAndEnergyImpulses(IProbe)}</code>.) Otherwise
// * a simple evaluation of the Panofsky equation is used.
// * <p>
// * <h4>CKA NOTES: 11/17/2014</h4>
// * · This method was at the heart of some major architectural issues.
// * <br/>
// * · We have a state-dependent situation, the computed results being
// * dependent upon the state of <b>class</b> variables.
// * <br/>
// * · These class state variables should most likely be local properties
// * of the probe objects.
// * </p>
// *
// * @param probe probe propagating through gap, we use its phase and energy parameters
// *
// * @return the change in energy Δ<i>W</i> of the
// * give probe through the gap
// *
// */
// private double compGapEnergyGain(final IProbe probe) {
// // If we are not using the cavity RF phase model we just return the
// // results of the Panofsky equation
// // Maybe we have the useRfGapPhaseCalculation flag in the probe??!!
// if (probe.getAlgorithm().useRfGapPhaseCalculation() == false) {
// double Q = Math.abs( probe.getSpeciesCharge() );
// double ETL = this.getETL();
// double phi0 = this.getPhase();
// double dW = Q * ETL * Math.cos( phi0);
// return dW;
// // Else we do the full energy gain calculation
// EnergyVariables crdGapEffects = this.compGapPhaseAndEnergyImpulses(probe);
// return crdGapEffects.W;
// /**
// * <p>
// * Computes and returns the total propagation time for the probe from inception
// * until the end of the entire accelerating gap cell. That is, the returned value
// * would be the total time <b>for the probe</b>, not the time interval for its
// * propagation through the accelerating cell.
// * </p>
// * <p>
// * <h4>CKA NOTES:</h4>
// * · The gap phase change Δφ is included in the exit time
// * calculation via the call to <code>{@link #elapsedTime(IProbe)}</code>
// * where it is calculated internally.
// * </p>
// *
// * @param probe probe containing parameters for gap computations
// *
// * @return probe time at exit of full accelerating cell
// *
// * @author Christopher K. Allen
// * @since Nov 19, 2014
// */
// @Deprecated
// private double compCellExitTime(final IProbe probe) {
// double c = IElement.LightSpeed; // speed of light
// double ti = probe.getTime(); // probe time at cell entrance
// double bf = this.compFinalBeta(probe); // final probe velocity after gap
// double dt = this.elapsedTime(probe); // propagation time through gap
// double dL = this.getCellLength()/2.0; // half length of total accelerating cell
// double tf = ti + dt + dL/(bf*c);
// return tf;
// /**
// * Computes the phase shift due to probe drift time between gaps
// * in a coupled-cavity tank. The returned result considers both the drift
// * time of the probe between gaps and the mode number of the cavity
// * fields.
// *
// * @param probe drifting probe
// *
// * @return the phase shift occurring between the previous gap and this gap
// *
// * @since Nov 28, 2014 @author Christopher K. Allen
// *
// * @deprecated This is bullshit
// */
// @Deprecated
// private double compCoupledCavityPhaseShift(IProbe probe) {
// // Compute the drifting time since the last gap
// ProbeState<?> stateLastGap = probe.lookupLastStateFor( this.getType() );
// double t_prev = stateLastGap.getTime(); // the exit time of the previous gap
// double t_curr = probe.getTime(); // the entrance time of this gap
// double t_drift = t_curr - t_prev; // drifting time between two gaps
// // Compute the RF frequency of the operating mode
// double f_0 = this.getFrequency();
// double q_mode = this.getCavityModeConstant();
// double f_mode = 2.0*q_mode*f_0;
// // Compute the number of RF cycles taken between RF gaps
// // and then the resulting phase shift
// int nCycles = (int) Math.round(f_mode * t_drift);
// double D_phi = nCycles*Math.PI;
// return D_phi;
// // TODO - Figure this out??
// // applied after the above is computed and then added on the next call
// // STRUCTURE_PHASE = STRUCTURE_PHASE - (2 - dblCavModeConst) * Math.PI;
// // STRUCTURE_PHASE = Math.IEEEremainder(STRUCTURE_PHASE , (2. * Math.PI));
/**
* <p>
* Returns the advance in RF phase for a particle drifting at velocity β for
* a distance <i>l</i>.
* The value is simply the time of flight Δ<i>t</i> needed to propagate
* the distance <i>l</i> times the angular frequency ω ≜ 2π<i>f</i>
* of the cavity (<i>f</i> is the fundamental cavity frequency).
* Specifically,
* <br/>
* <br/>
* Δφ = ωΔ<i>t</i> = ω<i>l</i>/β<i>c</i>
* = (2π/βλ)l = <i>kl</i>
* <br/>
* <br/>
* where Δφ is the change in phase due to the offset, λ is the
* wavelength of the RF, and <i>k</i> is the wave number of the particle.
*
* @param probe probe object arriving at the gap
*
* @return the value Δφ from a particle drifting at
* velocity β for distance <i>l</i>
*
* @author Christopher K. Allen
* @since Nov 19, 2014
*/
private double compDriftingPhaseAdvance(double beta, double len) {
double c = IElement.LightSpeed; // speed of light
double f = this.getFrequency();
//the correction for the gap offset needed
double dt = len / (beta * c);
double dphi = DBL_2PI * f * dt;
return dphi;
}
/**
* Computes and returns the phase change due to any gap offset between the
* geometric center and the electrical center. The phase change Δφ
* is given by
* <br/>
* <br/>
* Δφ = ω Δ<i>z</i>(1/β<sub><i>i</sub>c</i> - 1/β<sub><i>f</sub>c</i>)
* = (<i>k<sub>i</sub></i> - <i>k<sub>f</sub></i>)Δ<i>z</i> ,
* <br/>
* <br/>
* where ω is the angular frequency of the RF, Δ<i>z</i> is the gap offset,
* β<sub><i>i</i></sub> is the pre-gap velocity, β<sub><i>f</i></sub>
* is the post-gap velocity, <i>k<sub>i</sub></i> is the pre-gap wave number, and <i>k<sub>f</sub></i>
* is the post-gap wave number. Note that the phase change Δφ can be negative
* if the offset <i>l</i> is toward the downstream direction.
*
* @param beta_i the pre-gap velocity β<sub><i>i</i></sub>
* @param beta_f the post-gap velocity β<sub><i>f</i></sub>
*
* @return
*
* @since Jan 13, 2015 by Christopher K. Allen
*/
private double compGapOffsetPhaseChange(double beta_i, double beta_f) {
// TODO This isn't right. Think about the offset and the total gap length
double dl = this.getGapOffset();
double dphi_i = this.compDriftingPhaseAdvance(beta_i, dl);
double dphi_f = this.compDriftingPhaseAdvance(beta_f, dl);
double dphi = dphi_i - dphi_f;
return dphi;
}
/**
* Computes the phase φ<sub>mid</sub> and energy <i>W</i><sub>mid</sub> of the
* probe at the middle of the gap. The returned values are given by
* <br/>
* <br/>
* φ<sub>mid</sub> = φ<sub>0</sub> + δφ/2,
* <br/>
* <i>W</i><sub>mid</sub> = <i>W</i><sub>0</sub> + Δ<i>W</i>/2
* <br/>
* <br/>
* where φ<sub>0</sub> is the probe phase at the gap entrance and
* <i>W</i><sub>0</sub> is the probe energy at the gap entrance;
*
* @param probe probe propagating through the gap
*
* @return mid-gap longitudinal phase coordinates (φ<sub>mid</sub>,<i>W</i><sub>mid</sub>)
*
* @since Jan 13, 2015 by Christopher K. Allen
*
* @see #compGapPhaseAndEnergyImpulses(IProbe)
*/
private EnergyVariables compMidGapPhaseAndEnergy(IProbe probe) {
// Get the phase and energy at the entrance of the gap
double W_i = probe.getKineticEnergy();
double phi_i = this.compGapEntrancePhase(probe);
EnergyVector varDelVals = this.compGapPhaseAndEnergyGain(probe);
double d_W = varDelVals.getEnergy();
double d_phi = varDelVals.getPhase();
// Create the longitudinal phase variable object to return
double W = W_i + d_W/2.0;
double phi = phi_i + d_phi/2.0;
EnergyVariables varMidGap = new EnergyVariables(phi, W);
return varMidGap;
}
/**
* <p>
* Computes and returns the change in the probe's phase and energy due to the RF gap.
* This action here is primarily that of delation. If the <code>useRfGapPhaseCalculation</code>
* flag of the probe's algorithm object is set to <code>false</code>, then the
* phase and energy change is computed with method
* <code>{@link #compGapPhaseAndEnergyGainDirect(IProbe)}</code>. If the flag is
* <code>true</code> then the calculation is done using
* <code>{@link #compGapPhaseAndEnergyGainIndirect(IProbe)}</code>.
* </p>
* <p>
* Also in the case where
* <code>useRfGapPhaseCalculation</code> = <code>false</code> the phase change
* due to the gap offset δφ is added to the returned phase gain Δφ.
* Specifically, we compute δφ =
* <code>{@link #compGapOffsetPhaseChange(double, double)}</code>
* then add it to Δφ. This action is actually done in the method
* <code>{@link #compGapPhaseAndEnergyGainDirect(IProbe)}</code> which is
* called by this method for the aforementioned case.
* </p>
*
* @param probe the probe experiencing the change in energy and phase
*
* @return phase and energy gain for the given probe imparted by this gap
*
* @since Jan 14, 2015 by Christopher K. Allen
*/
private EnergyVector compGapPhaseAndEnergyGain(IProbe probe) throws AcceleratingRfGap.NoConvergenceException {
// If the algorithm doesn't want to use the dynamic phase and energy
// gain calculations then we default to the design mode and return
if (probe.getAlgorithm().useRfGapPhaseCalculation() == false) {
EnergyVector varGain = this.compGapPhaseAndEnergyGainDirect(probe);
return varGain;
}
// Switch on the type of calculation mode we are using
// Maybe we have the useRfGapPhaseCalculation flag in the probe??!!
switch (this.enmPhsCalcMth) {
case DESIGN:
return this.compGapPhaseAndEnergyGainDesign(probe);
case DYNPHASE:
return this.compGapPhaseAndEnergyGainDirect(probe);
case DYNENERGY:
return this.compGapPhaseAndEnergyGainIndirect(probe);
default:
return this.compGapPhaseAndEnergyGainDesign(probe);
}
}
/**
* This is a simple "design code" computation of the phase and energy gain.
* We return the results of the Panofsky equation for the design phase ignoring
* the cavity modes. There is no change in phase.
*
* @param probe we are computing the gap effects for this probe
*
* @return the increment energy due to gap using the design model
*
* @since Jan 14, 2015 by Christopher K. Allen
*/
private EnergyVector compGapPhaseAndEnergyGainDesign(IProbe probe) {
// Compute the energy gain without corrections
double Q = Math.abs( probe.getSpeciesCharge() );
double ETL = this.getETL();
double phi0 = this.getPhase();
double dW = Q * ETL * Math.cos( phi0 );
double dphi = 0.0;
EnergyVector varGain = new EnergyVector(dphi, dW);
return varGain;
}
/**
* We just return the results from the Panofsky equation but
* use the probe phase at the electrical center
* of the gap (with no corrective impulse).
* The electrical center phase is determined by a call to the method
* <code>{@link #compGapEntrancePhase(IProbe)}</code> which accounts
* for propagation to the gap electrical center.
* The Panofsky equation then provides the energy
* gain and the method <code>{@link #compGapOffsetPhaseChange(double, double)}</code>
* provides the change in phase.
*
* @param probe we are computing the gap effects for this probe
*
* @return the increments in phase and energy due to gap using the simple model
*
* @since Jan 14, 2015 by Christopher K. Allen
*/
private EnergyVector compGapPhaseAndEnergyGainDirect(IProbe probe) {
// Compute the energy gain without corrections
double Q = Math.abs( probe.getSpeciesCharge() );
double ETL = this.getETL();
double A = this.compCavModeFieldCoeff();
double phi0 = this.compGapEntrancePhase(probe);
double dW = Q * A * ETL * Math.cos( phi0 );
// Now we add in the change in phase due to any offset in the gap
// electrical center from the geometric center
// We only do this if the S() transit time factor was not used to compute
// the phase and energy gains
double Er = probe.getSpeciesRestEnergy();
double W_i = probe.getKineticEnergy();
double W_f = W_i + dW;
double b_i = probe.getBeta();
double b_f = RelativisticParameterConverter.computeBetaFromEnergies(W_f, Er);
double dphi = this.compGapOffsetPhaseChange(b_i, b_f);
EnergyVector varGain = new EnergyVector(dphi, dW);
return varGain;
}
// TODO Remove this after debugging
private boolean bolMethodCalled = false;
/**
* <p>
* Computes and returns the longitudinal phase change δφ energy
* gain Δ<i>W</i> through
* the gap due to acceleration. Although this is a thin lens the longitudinal
* phase must have a impulsive change symplectically conjugate to the change
* Δ<i>W</i> in longitudinal energy.
* </p>
* <p>
* The calculation is done using a fixed-point search on formulas for the gap
* phase change δφ and energy gain Δ<i>W</i> which are taking
* from Lapostolle and Weiss's <i>Formulae for Linear Accelerators</i>
* CERN PS-2000-01.
* </p>
* <p>
* <h4>CKA NOTES</h4>
* · The strategy is to compute the mid-gap velocity β<i><sub>mid</sub></i>
* and phase change δφ<i><sub>mid</sub></i>. All other gap parameters can be computed
* from these values.
* <br/>
* <br/>
* · We start with the phase of the probe at the entrance of the gap, call
* it φ<sub>0</sub>, see <code>{@link #compGapEntrancePhase(IProbe)}</code>.
* Then all the initial parameters for the loop,
* i.e., Δ<i>W</i><sub>0</sub>, β<sub>0</sub>, <i>T</i>(β<sub>0</sub>),
* <i>S</i>(β<sub>0</sub>), etc., are computed from that value.
* <br/>
* <br/>
* · The values are computed with the (maybe naive) assumption that the mid-gap
* phase change δφ<i><sub>mid</sub></i> is equal to half the total
* gap phase change δφ, or δφ<i><sub>mid</sub></i> = δφ/2.
* <br/>
* <br/>
* · We also assume (perhaps more accurately) that the mid-gap energy gain
* Δ<i>W</i><sub><i>mid</i></sub> is half the total energy gain Δ<i>W</i>,
* or Δ<i>W</i><sub><i>mid</i></sub> = Δ<i>W</i>/2.
* <br/>
* <br/>
* · I am avoiding the use of <code>fitTTFprime</code> and <code>fitSTFPrime</code>
* because I do not know what values they represent. That is, are they
* <i>dT</i>(β)</i>/<i>d</i>β or <i>dT</i>(β)/<i>dk</i>?
* </p>
*
* @param probe probe propagating through gap, we use its phase and energy parameters
*
* @return the change in phase δφ and energy Δ <i>W</i> of the
* give probe through the gap
*
* @author Christopher K. Allen
* @since Nov 26, 2014
*/
private EnergyVector compGapPhaseAndEnergyGainIndirect(IProbe probe) throws AcceleratingRfGap.NoConvergenceException {
// Initial probe parameters
double Q = Math.abs(probe.getSpeciesCharge());
double Er = probe.getSpeciesRestEnergy();
double bi = probe.getBeta();
double Wi = probe.getKineticEnergy();
// IMPORTANT!!!!
// TODO: We need to check that the probe is giving the correct phase here.
// The phase must be set with respect to the first gap
double phi0 = this.compGapEntrancePhase(probe); // phase at gap "electrical" entrance
// phase change from center correction factor for future time calculations
// in PARMILA TTFPrime and SPrime are in [1/cm] units, we use [m]
// CKA - I will try to eliminate these quantities because I cannot
// determine that they are correct
// double ttf_prime = 0.01*this.fitTTFPrime.evaluateAt(bi);
// double stf_prime = 0.01*this.fitSTFPrime.evaluateAt(bi);
// Gap parameters
double E0 = this.getE0();
double f = this.getFrequency();
double A = this.compCavModeFieldCoeff();
double L = this.getGapLength();
double qAEL = Q * A * E0 * L;
// TODO CKA - I BELIEVE T and S in the XDXF files are in centimeters!!!!
// That is they are dT(b)/dk and dS(b)/dk NEED TO DEAL WITH THIS!!!
// CKA - Now I believe the objective is to compute the mid-gap wave number k_mid
// and phase change d_phi. All other gap parameters can be computed
// from these values.
// Initialize the search
EnergyVector vecInitEnergy = new EnergyVector(phi0, Wi);
EnergyVector vecPreGapGains;
try {
vecPreGapGains = this.gapAcclMdl.computePreGapGains(Er, vecInitEnergy);
} catch (AcceleratingRfGap.NoConvergenceException e) {
System.err.println("WARNING! SpectrumMapRfGap#compGapPhaseAndEnergyGain() did not converge for element " + this.getId());
throw e;
}
double dphi_mid = Q * vecPreGapGains.getPhase();
double dW_mid = Q * vecPreGapGains.getEnergy();
// TODO Remove type out
if (!this.bolMethodCalled) {
double V0 = this.gapAcclMdl.getRfFieldPotential();
double ki = DBL_2PI /(bi*IElement.LightSpeed/this.getFrequency());
// double db = 0.01*bi;
double dk = 0.01*ki;
// double dT = (this.fitTTF.evaluateAt(bi + db) - ttf)/db;
// double dS = (this.fitSTF.evaluateAt(bi + db) - stf)/db;
double T = this.spcGapFlds.Tz(ki);
double S = this.spcGapFlds.Sz(ki);
double d_T = this.spcGapFlds.dTz(ki);
double d_S = this.spcGapFlds.dSz(ki);
double dT = (this.spcGapFlds.Tz(ki + dk) - T)/dk;
double dS = (this.spcGapFlds.Sz(ki + dk) - S)/dk;
phi0 = (180.0/Math.PI) * Math.IEEEremainder(phi0, 2.0*Math.PI); // convert to degrees
System.out.println("SpectrumMapRfGap#compEnergyGainIndirect: " + this.getId());
System.out.println(" Q*V0=" + Q * V0);
System.out.println(" phi0=" + phi0 + ", cos(phi0)=" + Math.cos(phi0) + ", Acos(phi0)=" + A*Math.cos(phi0));
System.out.println(" T(ki)=" + T + ", T'(ki)=" + d_T + ", S(ki)=" + S + ", S'(ki)=" + d_S);
System.out.println(" dT/dk=" + d_T + ", dS/dk=" + d_S);
System.out.println(" Numeric: T'(ki)=" + dT + ", S'(ki)=" + dS);
System.out.println(" ki=" + ki);
}
// TODO Remove type out
if (!this.bolMethodCalled) {
double b_mid = RelativisticParameterConverter.computeBetaFromEnergies(Wi + dW_mid, Er);
double k_mid = DBL_2PI /(b_mid*IElement.LightSpeed/this.getFrequency());
System.out.println(" k_mid=" + k_mid);
System.out.println(" dphi_mid=" + (180.0/Math.PI)*dphi_mid + ", dW_mid=" + dW_mid + ", W_mid=" + Double.toString(Wi+dW_mid));
System.out.println();
this.bolMethodCalled = true;
}
// TODO - Temporary until we get the calculated for the post gap region installed
double dphi = 2.0 * dphi_mid;
double dW = 2.0 * dW_mid;
EnergyVector vecGains = new EnergyVector(dphi, dW);
return vecGains;
}
}
|
package com.bailei.study.jzoffer.interview8;
public class MinNumberInRotatedArray {
public int min(int a[]) {
if (a.length < 1) {
return 0;
}
if (a.length == 1) return a[0];
int p = 0, q = a.length - 1;
while (q - p != 1) {
int half = (q + p) / 2;
if (a[p] <= a[half]) { // in the first child array
p = half;
} else if (a[half] < a[q]) {
q = half;
}
}
return a[q];
}
public static void main(String[] args) {
int[] array = {7, 8, 1, 2, 3, 3, 4, 5, 6, 7};
System.out.println(new MinNumberInRotatedArray().min(array));
}
}
|
package com.camunda.demo.springboot.conf;
import org.springframework.amqp.rabbit.connection.ConnectionFactory;
import org.springframework.cloud.config.java.AbstractCloudConfig;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
@Configuration
@Profile("cloud")
public class RabbitMqCloudConfiguration extends AbstractCloudConfig {
@Bean
public ConnectionFactory rabbitConnectionFactory() {
return connectionFactory().rabbitConnectionFactory();
}
}
|
package com.celements.web.plugin.cmd;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.velocity.VelocityContext;
import org.xwiki.component.manager.ComponentLookupException;
import org.xwiki.model.reference.DocumentReference;
import com.celements.pagetype.cmd.PageTypeCommand;
import com.celements.sajson.Builder;
import com.celements.web.service.IWebUtilsService;
import com.xpn.xwiki.XWikiContext;
import com.xpn.xwiki.XWikiException;
import com.xpn.xwiki.api.Document;
import com.xpn.xwiki.doc.XWikiDocument;
import com.xpn.xwiki.objects.BaseObject;
import com.xpn.xwiki.web.Utils;
public class ExternalJavaScriptFilesCommand {
public static final String JAVA_SCRIPT_EXTERNAL_FILES_CLASS_DOC = "ExternalFiles";
public static final String JAVA_SCRIPT_EXTERNAL_FILES_CLASS_SPACE = "JavaScript";
public static final String JAVA_SCRIPT_EXTERNAL_FILES_CLASS =
JAVA_SCRIPT_EXTERNAL_FILES_CLASS_SPACE + "." + JAVA_SCRIPT_EXTERNAL_FILES_CLASS_DOC;
private static Log LOGGER = LogFactory.getFactory().getInstance(
ExternalJavaScriptFilesCommand.class);
private XWikiContext context;
private Set<String> extJSfileSet;
private Set<String> extJSAttUrlSet;
private List<String> extJSfileList;
private List<String> extJSnotFoundList;
private boolean displayedAll = false;
private AttachmentURLCommand attUrlCmd_injected = null;
public ExternalJavaScriptFilesCommand(XWikiContext context) {
this.context = context;
extJSfileSet = new HashSet<String>();
extJSAttUrlSet = new HashSet<String>();
extJSfileList = new Vector<String>();
extJSnotFoundList = new Vector<String>();
}
public String addLazyExtJSfile(String jsFile) {
return addLazyExtJSfile(jsFile, null);
}
public String addLazyExtJSfile(String jsFile, String action) {
String attUrl;
if (!StringUtils.isEmpty(action)) {
attUrl = getAttUrlCmd().getAttachmentURL(jsFile, action, context);
} else {
attUrl = getAttUrlCmd().getAttachmentURL(jsFile, context);
}
Builder jsonBuilder = new Builder();
jsonBuilder.openDictionary();
jsonBuilder.addStringProperty("fullURL", attUrl);
jsonBuilder.openProperty("initLoad");
jsonBuilder.addBoolean(true);
jsonBuilder.closeDictionary();
return "<span class='cel_lazyloadJS' style='display: none;'>" + jsonBuilder.getJSON()
+"</span>";
}
public String addExtJSfileOnce(String jsFile) {
return addExtJSfileOnce(jsFile, null);
}
public String addExtJSfileOnce(String jsFile, String action) {
if (!extJSAttUrlSet.contains(jsFile)) {
if (getAttUrlCmd().isAttachmentLink(jsFile)
|| getAttUrlCmd().isOnDiskLink(jsFile)) {
extJSAttUrlSet.add(jsFile);
}
if (!StringUtils.isEmpty(action)) {
return addExtJSfileOnce_internal(jsFile, getAttUrlCmd().getAttachmentURL(jsFile,
action, context));
} else {
return addExtJSfileOnce_internal(jsFile, getAttUrlCmd().getAttachmentURL(jsFile,
context));
}
}
return "";
}
private String addExtJSfileOnce_internal(String jsFile, String jsFileUrl) {
String jsIncludes2 = "";
if (jsFileUrl == null) {
if (!extJSfileSet.contains(jsFile)) {
jsIncludes2 = "<!-- WARNING: js-file not found: " + jsFile + "-->";
extJSfileSet.add(jsFile);
extJSnotFoundList.add(jsIncludes2);
}
} else {
if (!extJSfileSet.contains(jsFileUrl)) {
jsIncludes2 = getExtStringForJsFile(jsFileUrl);
extJSfileSet.add(jsFileUrl);
extJSfileList.add(jsFileUrl);
}
}
if(!displayedAll) {
jsIncludes2 = "";
}
return jsIncludes2;
}
AttachmentURLCommand getAttUrlCmd() {
if(attUrlCmd_injected != null) {
return attUrlCmd_injected;
}
return new AttachmentURLCommand();
}
void injectAttUrlCmd(AttachmentURLCommand attUrlCmd) {
attUrlCmd_injected = attUrlCmd;
}
void injectDisplayAll(boolean displayedAll) {
this.displayedAll = displayedAll;
}
String getExtStringForJsFile(String jsFile) {
return "<script type=\"text/javascript\" src=\""
+ StringEscapeUtils.escapeHtml(jsFile) + "\"></script>";
}
public String getAllExternalJavaScriptFiles() throws XWikiException {
VelocityContext vcontext = ((VelocityContext) context.get("vcontext"));
if ((vcontext != null) && vcontext.containsKey("skin_doc")) {
addAllExtJSfilesFromDoc(context.getWiki().getDocument(getWebUtils(
).resolveDocumentReference(((Document)vcontext.get("skin_doc")).getFullName()),
context));
}
addAllExtJSfilesFromDoc(context.getWiki().getDocument(new DocumentReference(
context.getDatabase(), "XWiki", "XWikiPreferences"), context));
addAllExtJSfilesFromDoc(context.getWiki().getDocument(new DocumentReference(
context.getDatabase(), context.getDoc().getDocumentReference(
).getLastSpaceReference().getName(), "WebPreferences"), context));
addAllExtJSfilesFromDoc(context.getDoc());
XWikiDocument pagetype = getPageTypeDoc(context.getDoc());
if(pagetype != null){
addAllExtJSfilesFromDoc(pagetype);
}
notifyExtJavaScriptFileListener();
String jsIncludes = "";
for (String jsFile : extJSfileList) {
jsIncludes += getExtStringForJsFile(jsFile) + "\n";
}
for (String jsFileWarning : extJSnotFoundList) {
jsIncludes += jsFileWarning + "\n";
}
displayedAll = true;
return jsIncludes;
}
private void notifyExtJavaScriptFileListener() {
Map<String, IExtJSFilesListener> listenerMap = getListenerMap();
for (String jsfListenerKey : listenerMap.keySet()) {
listenerMap.get(jsfListenerKey).beforeAllExtFinish(this);
}
}
private Map<String, IExtJSFilesListener> getListenerMap() {
try {
return Utils.getComponent(IWebUtilsService.class).lookupMap(
IExtJSFilesListener.class);
} catch (ComponentLookupException exp) {
LOGGER.error("Failed to get IExtJSFilesListener components.", exp);
}
return Collections.emptyMap();
}
private String addAllExtJSfilesFromDoc(XWikiDocument doc) {
String jsIncludes2 = "";
for (String jsFile : getJavaScriptExternalFilePaths(doc)) {
String addJSinclude = addExtJSfileOnce(jsFile);
if(!"".equals(addJSinclude)) {
jsIncludes2 = jsIncludes2 + addJSinclude + "\n";
}
}
return jsIncludes2;
}
private XWikiDocument getPageTypeDoc(XWikiDocument doc) throws XWikiException{
LOGGER.trace("entering with doc: '" + ((doc != null)?doc.getDocumentReference():"null")
+ "'");
XWikiDocument pagetypeDoc = null;
BaseObject obj = doc.getXObject(new DocumentReference(context.getDatabase(),
PageTypeCommand.PAGE_TYPE_CLASS_SPACE, PageTypeCommand.PAGE_TYPE_CLASS_DOC));
LOGGER.debug("Celements2.PageType object: '" + obj + "'");
if((obj != null) && (obj instanceof BaseObject)){
String pagetypeName = obj.getStringValue("page_type");
LOGGER.debug("PageType name is: '" + pagetypeName + "'");
if((pagetypeName != null) && (!pagetypeName.equals(""))){
pagetypeDoc = context.getWiki().getDocument(getWebUtils(
).resolveDocumentReference(new PageTypeCommand().completePageTypeDocName(
pagetypeName)), context);
}
}
LOGGER.trace("ending. PageType is: '"
+ ((pagetypeDoc != null)?pagetypeDoc.getDocumentReference():"null") + "'");
return pagetypeDoc;
}
private List<String> getJavaScriptExternalFilePaths(XWikiDocument doc) {
List<BaseObject> javaScriptFiles = doc.getXObjects(new DocumentReference(
context.getDatabase(), JAVA_SCRIPT_EXTERNAL_FILES_CLASS_SPACE,
JAVA_SCRIPT_EXTERNAL_FILES_CLASS_DOC));
Vector<String> jsFiles = new Vector<String>();
if (javaScriptFiles != null) {
for(Object filepath : javaScriptFiles) {
if ((filepath != null) && (filepath instanceof BaseObject)) {
BaseObject filepathObj = (BaseObject) filepath;
if(!"".equals(filepathObj.getStringValue("filepath"))) {
jsFiles.add(filepathObj.getStringValue("filepath"));
}
}
}
}
return jsFiles;
}
private IWebUtilsService getWebUtils() {
return Utils.getComponent(IWebUtilsService.class);
}
}
|
package com.censoredsoftware.Demigods.Engine.Listener;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.block.Block;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.FallingBlock;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.block.*;
import org.bukkit.event.entity.EntityChangeBlockEvent;
import org.bukkit.event.entity.EntityExplodeEvent;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.inventory.ItemStack;
import com.censoredsoftware.Demigods.Engine.Object.Player.PlayerWrapper;
import com.censoredsoftware.Demigods.Engine.Object.Structure.Structure;
import com.censoredsoftware.Demigods.Engine.Object.Structure.StructureSave;
import com.censoredsoftware.Demigods.Engine.Utility.LocationUtility;
// TODO Fix for lag.
public class GriefListener implements Listener
{
// @EventHandler(priority = EventPriority.HIGHEST)
public void onBlockPlace(BlockPlaceEvent event)
{
Location location = event.getBlock().getLocation();
if(Structure.isInRadiusWithFlag(location, Structure.Flag.NO_GRIEFING_ZONE))
{
StructureSave save = Structure.getInRadiusWithFlag(location, Structure.Flag.NO_GRIEFING_ZONE);
if(PlayerWrapper.isImmortal(event.getPlayer()) && save.getSettingHasOwner() && save.getOwner() != null && PlayerWrapper.getPlayer(event.getPlayer()).getCurrent().getName().equals(save.getOwner().getName())) return;
event.setCancelled(true);
}
}
// @EventHandler(priority = EventPriority.HIGHEST)
public void onBlockBreak(BlockBreakEvent event)
{
Location location = event.getBlock().getLocation();
if(Structure.isInRadiusWithFlag(location, Structure.Flag.NO_GRIEFING_ZONE))
{
StructureSave save = Structure.getInRadiusWithFlag(location, Structure.Flag.NO_GRIEFING_ZONE);
if(PlayerWrapper.isImmortal(event.getPlayer()) && save.getSettingHasOwner() && save.getOwner() != null && PlayerWrapper.getPlayer(event.getPlayer()).getCurrent().getName().equals(save.getOwner().getName())) return;
event.setCancelled(true);
}
}
// @EventHandler(priority = EventPriority.HIGHEST)
public void onBlockIgnite(BlockIgniteEvent event)
{
Location location = event.getBlock().getLocation();
if(Structure.isInRadiusWithFlag(location, Structure.Flag.NO_GRIEFING_ZONE))
{
StructureSave save = Structure.getInRadiusWithFlag(location, Structure.Flag.NO_GRIEFING_ZONE);
if(event.getPlayer() != null && PlayerWrapper.isImmortal(event.getPlayer()) && save.getSettingHasOwner() && save.getOwner() != null && PlayerWrapper.getPlayer(event.getPlayer()).getCurrent().getName().equals(save.getOwner().getName())) return;
event.setCancelled(true);
}
}
// @EventHandler(priority = EventPriority.HIGHEST)
public void onBlockBurn(BlockBurnEvent event)
{
Location location = event.getBlock().getLocation();
if(Structure.isInRadiusWithFlag(location, Structure.Flag.NO_GRIEFING_ZONE)) event.setCancelled(true);
}
// @EventHandler(priority = EventPriority.HIGHEST)
public void onBlockFall(EntityChangeBlockEvent event)
{
if(event.getEntityType() != EntityType.FALLING_BLOCK) return;
FallingBlock block = (FallingBlock) event.getEntity();
if(Structure.isInRadiusWithFlag(LocationUtility.getFloorBelowLocation(block.getLocation()), Structure.Flag.NO_GRIEFING_ZONE))
{
// Break the block
event.setCancelled(true);
event.getBlock().setType(Material.AIR);
block.getLocation().getWorld().dropItemNaturally(block.getLocation(), new ItemStack(block.getMaterial()));
block.remove();
}
}
// @EventHandler(priority = EventPriority.HIGHEST)
public void onPistonExtend(BlockFromToEvent event)
{
boolean from = Structure.isInRadiusWithFlag(event.getBlock().getLocation(), Structure.Flag.NO_GRIEFING_ZONE);
boolean to = Structure.isInRadiusWithFlag(event.getToBlock().getLocation(), Structure.Flag.NO_GRIEFING_ZONE);
if(from != to) event.setCancelled(true);
}
@EventHandler(priority = EventPriority.HIGHEST)
public void onPistonExtend(BlockPistonExtendEvent event)
{
boolean in = false;
boolean out = false;
for(Block block : event.getBlocks())
{
if(Structure.isInRadiusWithFlag(block.getLocation(), Structure.Flag.NO_GRIEFING_ZONE)) in = true;
else out = true;
}
if(in != out) event.setCancelled(true);
}
// @EventHandler(priority = EventPriority.HIGHEST)
public void onPistonRetract(BlockPistonRetractEvent event)
{
boolean block = Structure.isInRadiusWithFlag(event.getBlock().getLocation(), Structure.Flag.NO_GRIEFING_ZONE);
boolean retract = Structure.isInRadiusWithFlag(event.getRetractLocation(), Structure.Flag.NO_GRIEFING_ZONE);
if(block != retract) event.setCancelled(true);
}
// @EventHandler(priority = EventPriority.HIGHEST)
public void onBlockDamage(BlockDamageEvent event)
{
Location location = event.getBlock().getLocation();
if(Structure.isInRadiusWithFlag(location, Structure.Flag.NO_GRIEFING_ZONE))
{
StructureSave save = Structure.getInRadiusWithFlag(location, Structure.Flag.NO_GRIEFING_ZONE);
if(PlayerWrapper.isImmortal(event.getPlayer()) && save.getSettingHasOwner() && save.getOwner() != null && PlayerWrapper.getPlayer(event.getPlayer()).getCurrent().getId().equals(save.getOwner().getId())) return;
event.setCancelled(true);
}
}
// @EventHandler(priority = EventPriority.HIGHEST)
public void onEntityExplode(final EntityExplodeEvent event)
{
if(Structure.isInRadiusWithFlag(event.getLocation(), Structure.Flag.NO_GRIEFING_ZONE)) event.setCancelled(true);
}
// @EventHandler(priority = EventPriority.HIGHEST)
public void onAttemptInventoryOpen(PlayerInteractEvent event) // TODO Fix horse inventories.
{
if(!event.getAction().equals(Action.RIGHT_CLICK_BLOCK)) return;
Block block = event.getClickedBlock();
Location location = block.getLocation();
if(!Structure.isInRadiusWithFlag(location, Structure.Flag.NO_GRIEFING_ZONE)) return;
if(block.getType().equals(Material.CHEST) || block.getType().equals(Material.ENDER_CHEST) || block.getType().equals(Material.FURNACE) || block.getType().equals(Material.BURNING_FURNACE) || block.getType().equals(Material.DISPENSER) || block.getType().equals(Material.DROPPER) || block.getType().equals(Material.BREWING_STAND) || block.getType().equals(Material.BEACON) || block.getType().equals(Material.HOPPER) || block.getType().equals(Material.HOPPER_MINECART) || block.getType().equals(Material.STORAGE_MINECART))
{
StructureSave save = Structure.getInRadiusWithFlag(location, Structure.Flag.NO_GRIEFING_ZONE);
if(PlayerWrapper.isImmortal(event.getPlayer()) && save.getSettingHasOwner() && save.getOwner() != null && PlayerWrapper.getPlayer((Player) event.getPlayer()).getCurrent().getName().equals(save.getOwner().getName())) return;
event.setCancelled(true);
}
}
}
|
package com.conventnunnery.plugins.mythicdrops.managers;
import com.conventnunnery.plugins.mythicdrops.MythicDrops;
import com.conventnunnery.plugins.mythicdrops.objects.CustomItem;
import com.conventnunnery.plugins.mythicdrops.objects.MythicEnchantment;
import com.conventnunnery.plugins.mythicdrops.objects.SocketGem;
import com.conventnunnery.plugins.mythicdrops.objects.SocketItem;
import com.conventnunnery.plugins.mythicdrops.objects.Tier;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Material;
import org.bukkit.enchantments.Enchantment;
import org.bukkit.enchantments.EnchantmentWrapper;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.ItemMeta;
import org.bukkit.inventory.meta.Repairable;
import org.bukkit.material.MaterialData;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
/**
* A class that handles all of the plugin's drop creation.
*/
public class DropManager {
private final MythicDrops plugin;
private List<CustomItem> customItems;
/**
* Instantiates a new Drop API.
*
* @param plugin the plugin
*/
public DropManager(MythicDrops plugin) {
this.plugin = plugin;
customItems = new ArrayList<CustomItem>();
}
/**
* Construct a random ItemStack.
*
* @param reason the reason
* @return random ItemStack
*/
public ItemStack constructItemStack(GenerationReason reason) {
switch (reason) {
case MOB_SPAWN:
if (getPlugin().getPluginSettings().isAllowCustomToSpawn() &&
getPlugin().getPluginSettings().isOnlyCustomItems() ||
getPlugin().getRandom().nextDouble() <=
getPlugin().getPluginSettings().getPercentageCustomDrop() &&
!customItems.isEmpty()) {
return randomCustomItemWithChance().toItemStack();
}
if (getPlugin().getPluginSettings().isSocketGemsEnabled()
&& getPlugin().getRandom().nextDouble() < getPlugin()
.getPluginSettings().getSocketGemsChance()) {
MaterialData materialData = getPlugin().getSocketGemManager().getRandomSocketGemMaterial();
SocketGem socketGem = getPlugin().getSocketGemManager().getRandomSocketGemWithChance();
if (materialData != null && socketGem != null)
return new SocketItem(materialData, socketGem);
}
return constructItemStack(getPlugin().getTierManager().randomTierWithChance(), reason);
case COMMAND:
return constructItemStack(getPlugin().getTierManager()
.randomTierWithChance(), reason);
case EXTERNAL:
return constructItemStack(getPlugin().getTierManager()
.randomTierWithChance(), reason);
default:
return constructItemStack(getPlugin().getTierManager()
.randomTierWithChance(), reason);
}
}
/**
* Construct an ItemStack based on a Tier.
*
* @param tier Tier to base the ItemStack on
* @param reason reason to generate the ItemStack
* @return constructed ItemStack
*/
public ItemStack constructItemStack(Tier tier, GenerationReason reason) {
ItemStack itemstack = null;
MaterialData matData = null;
int attempts = 0;
if (tier == null) {
return null;
}
while (matData == null && attempts < 10) {
matData = getPlugin().getItemManager().getMatDataFromTier(tier);
attempts++;
}
if (matData == null || matData.getItemTypeId() == 0
|| matData.getItemType() == Material.AIR)
return itemstack;
itemstack = matData.toItemStack(1);
if (itemstack == null) {
return itemstack;
}
if (reason != null && reason != GenerationReason.COMMAND) {
double min = Math.min(tier.getMinimumDurability(), tier.getMaximumDurability()) *
itemstack.getType().getMaxDurability();
double max = Math.max(tier.getMinimumDurability(), tier.getMaximumDurability()) *
itemstack.getType().getMaxDurability();
double minDuraPercent = itemstack.getType().getMaxDurability() -
Math.max(min, max) *
itemstack.getType().getMaxDurability();
double maxDuraPercent =
itemstack.getType().getMaxDurability() -
Math.min(min, max) *
itemstack.getType().getMaxDurability();
int minDura =
(int) minDuraPercent;
int maxDura = (int) maxDuraPercent;
short dura = (short) (getPlugin().getRandom()
.nextInt(
Math.abs(Math.max(minDura, maxDura) - Math.min(minDura, maxDura)) + 1) +
Math.min(minDura, maxDura));
itemstack.setDurability(dura);
}
for (MythicEnchantment me : tier.getBaseEnchantments()) {
if (tier.isSafeBaseEnchantments() && me.getEnchantment().canEnchantItem(itemstack)) {
itemstack.addEnchantment(me.getEnchantment(), Math.abs(me.getMinimumLevel() +
getPlugin().getRandom().nextInt(me.getMaximumLevel() - me.getMinimumLevel())));
} else if (!tier.isSafeBaseEnchantments()) {
itemstack.addUnsafeEnchantment(me.getEnchantment(), Math.abs(me.getMinimumLevel() +
getPlugin().getRandom().nextInt(me.getMaximumLevel() - me.getMinimumLevel() + 1)));
}
}
if (tier.getMaximumBonusEnchantments() > 0) {
int randEnchs = getPlugin().getRandom().nextInt(
Math.abs(tier.getMaximumBonusEnchantments() - tier.getMinimumBonusEnchantments() + 1)) +
tier.getMinimumBonusEnchantments();
for (int i = 0; i < randEnchs; i++) {
Set<MythicEnchantment> allowEnchs = tier.getBonusEnchantments();
List<Enchantment> stackEnchs = getEnchantStack(itemstack);
List<MythicEnchantment> actual = new ArrayList<MythicEnchantment>();
for (MythicEnchantment te : allowEnchs) {
if (stackEnchs.contains(te.getEnchantment())) {
actual.add(te);
}
}
if (actual.size() > 0) {
MythicEnchantment ench = actual.get(getPlugin().getRandom()
.nextInt(actual.size()));
int lev =
getPlugin().getRandom()
.nextInt(Math.abs(ench.getMaximumLevel() - ench.getMinimumLevel()) + 1) +
ench.getMinimumLevel();
if (getPlugin().getPluginSettings().isSafeEnchantsOnly()) {
if (!getPlugin().getPluginSettings().isAllowEnchantsPastNormalLevel()) {
itemstack.addEnchantment(
ench.getEnchantment(),
getAcceptableEnchantmentLevel(ench.getEnchantment(),
lev <= 0 ? 1 : Math.abs(lev)));
} else {
itemstack.addUnsafeEnchantment(ench.getEnchantment(), lev <= 0 ? 1 : Math.abs(lev));
}
} else {
itemstack.addUnsafeEnchantment(ench.getEnchantment(), lev <= 0 ? 1 : Math.abs(lev));
}
}
}
}
if (matData.getItemType() == null) {
return itemstack;
}
ItemMeta im;
if (itemstack.hasItemMeta())
im = itemstack.getItemMeta();
else
im = Bukkit.getItemFactory().getItemMeta(matData.getItemType());
im.setDisplayName(getPlugin().getNameManager().randomFormattedName(
itemstack, tier));
List<String> toolTips = getPlugin().getPluginSettings()
.getAdvancedToolTipFormat();
List<String> tt = new ArrayList<String>();
for (String s : toolTips) {
tt.add(ChatColor.translateAlternateColorCodes(
'&',
s.replace("%itemtype%",
getPlugin().getNameManager().getItemTypeName(matData))
.replace("%tiername%",
tier.getDisplayColor() + tier.getDisplayName())
.replace(
"%basematerial%",
getPlugin().getNameManager()
.getMinecraftMaterialName(
itemstack.getType()))
.replace(
"%mythicmaterial%",
getPlugin().getNameManager()
.getMythicMaterialName(
itemstack.getData())).replace("%enchantment%",
tier.getDisplayColor() + getPlugin().getNameManager().getEnchantmentTypeName(itemstack) +
tier.getIdentificationColor())));
}
if (getPlugin().getPluginSettings().isSockettedItemsEnabled() &&
getPlugin().getRandom().nextDouble() <= getPlugin().getPluginSettings().getSpawnWithSocketChance()) {
int amtTT = 0;
for (int i = 0;
i < getPlugin().getRandom()
.nextInt(Math.abs(tier.getMaximumSockets() - tier.getMinimumSockets()) + 1) +
tier.getMinimumSockets(); i++) {
tt.add(ChatColor.GOLD + "(Socket)");
amtTT++;
}
if (amtTT > 0) {
tt.add(ChatColor.GRAY + "Find a " + ChatColor.GOLD + "Socket Gem" + ChatColor.GRAY + " to fill a " +
ChatColor.GOLD + "(Socket)");
}
}
if (getPlugin().getPluginSettings().isRandomLoreEnabled() &&
getPlugin().getRandom().nextDouble() <= getPlugin().getPluginSettings().getRandomLoreChance() &&
!getPlugin().getNameManager().getBasicLore().isEmpty()) {
tt.addAll(getPlugin().getNameManager().randomLore());
}
im.setLore(tt);
if (im instanceof Repairable) {
Repairable r = (Repairable) im;
r.setRepairCost(1000);
itemstack.setItemMeta((ItemMeta) r);
} else {
itemstack.setItemMeta(im);
}
return itemstack;
}
/**
* Gets acceptable Enchantment level.
*
* @param ench the Enchantment
* @param level the level
* @return the acceptable Enchantment level
*/
public int getAcceptableEnchantmentLevel(Enchantment ench, int level) {
EnchantmentWrapper ew = new EnchantmentWrapper(ench.getId());
int i = level;
if (i > ew.getMaxLevel()) {
i = ew.getMaxLevel();
} else if (i < ew.getStartLevel()) {
i = ew.getStartLevel();
}
return i;
}
/**
* Gets custom items.
*
* @return the custom items
*/
public List<CustomItem> getCustomItems() {
return customItems;
}
/**
* Gets a list of Enchantments that can go on an ItemStack.
*
* @param ci ItemStack to check
* @return list of possible Enchantments
*/
public List<Enchantment> getEnchantStack(final ItemStack ci) {
List<Enchantment> set = new ArrayList<Enchantment>();
if (ci == null) {
return set;
}
boolean bln = getPlugin().getPluginSettings().isSafeEnchantsOnly();
for (Enchantment e : Enchantment.values()) {
if (bln) {
if (e.canEnchantItem(ci)) {
set.add(e);
}
} else {
set.add(e);
}
}
return set;
}
/**
* Gets plugin.
*
* @return the plugin
*/
public MythicDrops getPlugin() {
return plugin;
}
public void debugCustomItems() {
List<String> customItemNames = new ArrayList<String>();
for (CustomItem ci : customItems) {
customItemNames.add(ci.getName());
}
getPlugin().getDebug().debug(
"Loaded custom items: "
+ customItemNames.toString().replace("[", "")
.replace("]", ""));
}
/**
* Random custom item.
*
* @return the custom item
*/
@SuppressWarnings("unused")
public CustomItem randomCustomItem() {
return customItems.get(getPlugin().getRandom().nextInt(customItems.size()));
}
public CustomItem getCustomItemByName(String name) {
for (CustomItem i : customItems) {
if (name.equalsIgnoreCase(i.getName())) {
return i;
}
}
return null;
}
/**
* Random custom item with chance.
*
* @return the custom item
*/
public CustomItem randomCustomItemWithChance() {
CustomItem ci = null;
if (customItems == null || customItems.isEmpty())
return ci;
while (ci == null) {
for (CustomItem c : customItems) {
double d = plugin.getRandom().nextDouble();
if (d <= c.getChance()) {
ci = c;
break;
}
}
}
return ci;
}
/**
* Enum of GenerationReasons.
*/
public enum GenerationReason {
*/MOB_SPAWN, /**
*/COMMAND, /**
/**
* Use when spawning a mob
* Use for commands
* Use for anything else
*/EXTERNAL
}
}
|
package org.voltdb.compiler;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.Set;
import java.util.TreeSet;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.hsqldb_voltpatches.HSQLInterface;
import org.hsqldb_voltpatches.VoltXMLElement;
import org.json_voltpatches.JSONException;
import org.voltcore.logging.Level;
import org.voltcore.logging.VoltLogger;
import org.voltdb.CatalogContext;
import org.voltdb.ProcInfoData;
import org.voltdb.RealVoltDB;
import org.voltdb.TransactionIdManager;
import org.voltdb.VoltDB;
import org.voltdb.VoltDBInterface;
import org.voltdb.VoltType;
import org.voltdb.catalog.Catalog;
import org.voltdb.catalog.CatalogMap;
import org.voltdb.catalog.Column;
import org.voltdb.catalog.ColumnRef;
import org.voltdb.catalog.Database;
import org.voltdb.catalog.FilteredCatalogDiffEngine;
import org.voltdb.catalog.Index;
import org.voltdb.catalog.MaterializedViewInfo;
import org.voltdb.catalog.Procedure;
import org.voltdb.catalog.Statement;
import org.voltdb.catalog.Table;
import org.voltdb.common.Constants;
import org.voltdb.common.Permission;
import org.voltdb.compiler.projectfile.ClassdependenciesType.Classdependency;
import org.voltdb.compiler.projectfile.DatabaseType;
import org.voltdb.compiler.projectfile.ExportType;
import org.voltdb.compiler.projectfile.ExportType.Tables;
import org.voltdb.compiler.projectfile.GroupsType;
import org.voltdb.compiler.projectfile.PartitionsType;
import org.voltdb.compiler.projectfile.ProceduresType;
import org.voltdb.compiler.projectfile.ProjectType;
import org.voltdb.compiler.projectfile.RolesType;
import org.voltdb.compiler.projectfile.SchemasType;
import org.voltdb.compilereport.ReportMaker;
import org.voltdb.expressions.AbstractExpression;
import org.voltdb.expressions.TupleValueExpression;
import org.voltdb.planner.StatementPartitioning;
import org.voltdb.utils.CatalogSchemaTools;
import org.voltdb.utils.CatalogUtil;
import org.voltdb.utils.Encoder;
import org.voltdb.utils.InMemoryJarfile;
import org.voltdb.utils.InMemoryJarfile.JarLoader;
import org.voltdb.utils.LogKeys;
import org.xml.sax.ErrorHandler;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import com.google_voltpatches.common.collect.ImmutableList;
/**
* Compiles a project XML file and some metadata into a Jarfile
* containing stored procedure code and a serialzied catalog.
*
*/
public class VoltCompiler {
/** Represents the level of severity for a Feedback message generated during compiling. */
public static enum Severity { INFORMATIONAL, WARNING, ERROR, UNEXPECTED }
public static final int NO_LINE_NUMBER = -1;
private static final String NO_FILENAME = "null";
// Causes the "debugoutput" folder to be generated and populated.
// Also causes explain plans on disk to include cost.
public final static boolean DEBUG_MODE = System.getProperties().contains("compilerdebug");
// was this voltcompiler instantiated in a main(), or as part of VoltDB
public final boolean standaloneCompiler;
// tables that change between the previous compile and this one
// used for Live-DDL caching of plans
private final Set<String> m_dirtyTables = new TreeSet<>();
// A collection of statements from the previous catalog
// used for Live-DDL caching of plans
private final Map<String, Statement> m_previousCatalogStmts = new HashMap<>();
// feedback by filename
ArrayList<Feedback> m_infos = new ArrayList<Feedback>();
ArrayList<Feedback> m_warnings = new ArrayList<Feedback>();
ArrayList<Feedback> m_errors = new ArrayList<Feedback>();
// set of annotations by procedure name
private Map<String, ProcInfoData> m_procInfoOverrides = null;
// Name of DDL file built by the DDL VoltCompiler from the catalog and added to the jar.
public static String AUTOGEN_DDL_FILE_NAME = "autogen-ddl.sql";
// Environment variable used to verify that a catalog created from autogen-dll.sql is effectively
// identical to the original catalog that was used to create the autogen-ddl.sql file.
public static final boolean DEBUG_VERIFY_CATALOG = Boolean.valueOf(System.getenv().get("VERIFY_CATALOG_DEBUG"));
// Turn off warning about DRing replicated tables
public static final boolean DISABLE_DR_WARNING = Boolean.getBoolean("DISABLE_DR_WARNING");
String m_projectFileURL = null;
private String m_currentFilename = NO_FILENAME;
Map<String, String> m_ddlFilePaths = new HashMap<String, String>();
String[] m_addedClasses = null;
String[] m_importLines = null;
// generated html text for catalog report
String m_report = null;
String m_reportPath = null;
static String m_canonicalDDL = null;
Catalog m_catalog = null;
DatabaseEstimates m_estimates = new DatabaseEstimates();
private List<String> m_capturedDiagnosticDetail = null;
private static final VoltLogger compilerLog = new VoltLogger("COMPILER");
private static final VoltLogger consoleLog = new VoltLogger("CONSOLE");
private static final VoltLogger Log = new VoltLogger("org.voltdb.compiler.VoltCompiler");
private final static String m_emptyDDLComment = "-- This DDL file is a placeholder for starting without a user-supplied catalog.\n";
private ClassLoader m_classLoader = ClassLoader.getSystemClassLoader();
/**
* Represents output from a compile. This works similarly to Log4j; there
* are different levels of feedback including info, warning, error, and
* unexpected error. Feedback can be output to a printstream (like stdout)
* or can be examined programatically.
*
*/
public static class Feedback {
Severity severityLevel;
String fileName;
int lineNo;
String message;
Feedback(final Severity severityLevel, final String message, final String fileName, final int lineNo) {
this.severityLevel = severityLevel;
this.message = message;
this.fileName = fileName;
this.lineNo = lineNo;
}
public String getStandardFeedbackLine() {
String retval = "";
if (severityLevel == Severity.INFORMATIONAL)
retval = "INFO";
if (severityLevel == Severity.WARNING)
retval = "WARNING";
if (severityLevel == Severity.ERROR)
retval = "ERROR";
if (severityLevel == Severity.UNEXPECTED)
retval = "UNEXPECTED ERROR";
return retval + " " + getLogString();
}
public String getLogString() {
String retval = new String();
if (fileName != null) {
retval += "[" + fileName;
if (lineNo != NO_LINE_NUMBER)
retval += ":" + lineNo;
retval += "]";
}
retval += ": " + message;
return retval;
}
public Severity getSeverityLevel() {
return severityLevel;
}
public String getFileName() {
return fileName;
}
public int getLineNumber() {
return lineNo;
}
public String getMessage() {
return message;
}
}
class VoltCompilerException extends Exception {
private static final long serialVersionUID = -2267780579911448600L;
private String message = null;
VoltCompilerException(final Exception e) {
super(e);
}
VoltCompilerException(final String message, final int lineNo) {
addErr(message, lineNo);
this.message = message;
}
VoltCompilerException(final String message) {
addErr(message);
this.message = message;
}
VoltCompilerException(String message, Throwable cause) {
message += "\n caused by:\n " + cause.toString();
addErr(message);
this.message = message;
this.initCause(cause);
}
@Override
public String getMessage() {
return message;
}
}
class VoltXMLErrorHandler implements ErrorHandler {
@Override
public void error(final SAXParseException exception) throws SAXException {
addErr(exception.getMessage(), exception.getLineNumber());
}
@Override
public void fatalError(final SAXParseException exception) throws SAXException {
//addErr(exception.getMessage(), exception.getLineNumber());
}
@Override
public void warning(final SAXParseException exception) throws SAXException {
addWarn(exception.getMessage(), exception.getLineNumber());
}
}
public class ProcedureDescriptor {
public final ArrayList<String> m_authGroups;
public final String m_className;
// for single-stmt procs
public final String m_singleStmt;
public final String m_joinOrder;
public final String m_partitionString;
public final boolean m_builtInStmt; // autogenerated sql statement
public final Language m_language; // Java or Groovy
public final String m_scriptImpl; // Procedure code from DDL (if any)
public final Class<?> m_class;
ProcedureDescriptor (final ArrayList<String> authGroups, final String className) {
assert(className != null);
m_authGroups = authGroups;
m_className = className;
m_singleStmt = null;
m_joinOrder = null;
m_partitionString = null;
m_builtInStmt = false;
m_language = null;
m_scriptImpl = null;
m_class = null;
}
public ProcedureDescriptor(final ArrayList<String> authGroups, final Language language, final String scriptImpl, Class<?> clazz) {
assert(clazz != null && language != null);
m_authGroups = authGroups;
m_className = clazz.getName();
m_singleStmt = null;
m_joinOrder = null;
m_partitionString = null;
m_builtInStmt = false;
m_language = language;
m_scriptImpl = scriptImpl;
m_class = clazz;
}
ProcedureDescriptor(final ArrayList<String> authGroups, final Class<?> clazz, final String partitionString, final Language language, final String scriptImpl) {
assert(clazz != null);
assert(partitionString != null);
m_authGroups = authGroups;
m_className = clazz.getName();
m_singleStmt = null;
m_joinOrder = null;
m_partitionString = partitionString;
m_builtInStmt = false;
m_language = language;
m_scriptImpl = scriptImpl;
m_class = clazz;
}
ProcedureDescriptor (final ArrayList<String> authGroups, final String className,
final String singleStmt, final String joinOrder, final String partitionString,
boolean builtInStmt, Language language, final String scriptImpl, Class<?> clazz)
{
assert(className != null);
assert(singleStmt != null);
m_authGroups = authGroups;
m_className = className;
m_singleStmt = singleStmt;
m_joinOrder = joinOrder;
m_partitionString = partitionString;
m_builtInStmt = builtInStmt;
m_language = language;
m_scriptImpl = scriptImpl;
m_class = clazz;
}
}
/** Passing true to constructor indicates the compiler is being run in standalone mode */
public VoltCompiler(boolean standaloneCompiler) {
this.standaloneCompiler = standaloneCompiler;
}
/** Parameterless constructor is for embedded VoltCompiler use only. */
public VoltCompiler() {
this(false);
}
public boolean hasErrors() {
return m_errors.size() > 0;
}
public boolean hasErrorsOrWarnings() {
return (m_warnings.size() > 0) || hasErrors();
}
void addInfo(final String msg) {
addInfo(msg, NO_LINE_NUMBER);
}
void addWarn(final String msg) {
addWarn(msg, NO_LINE_NUMBER);
}
void addErr(final String msg) {
addErr(msg, NO_LINE_NUMBER);
}
void addInfo(final String msg, final int lineNo) {
final Feedback fb = new Feedback(Severity.INFORMATIONAL, msg, m_currentFilename, lineNo);
m_infos.add(fb);
if (standaloneCompiler) {
compilerLog.info(fb.getLogString());
}
else {
compilerLog.debug(fb.getLogString());
}
}
void addWarn(final String msg, final int lineNo) {
final Feedback fb = new Feedback(Severity.WARNING, msg, m_currentFilename, lineNo);
m_warnings.add(fb);
compilerLog.warn(fb.getLogString());
}
void addErr(final String msg, final int lineNo) {
final Feedback fb = new Feedback(Severity.ERROR, msg, m_currentFilename, lineNo);
m_errors.add(fb);
compilerLog.error(fb.getLogString());
}
/**
* Compile from a set of DDL files, but no project.xml.
*
* @param jarOutputPath The location to put the finished JAR to.
* @param ddlFilePaths The array of DDL files to compile (at least one is required).
* @return true if successful
* @throws VoltCompilerException
*/
public boolean compileFromDDL(
final String jarOutputPath,
final String... ddlFilePaths)
throws VoltCompilerException
{
return compileWithProjectXML(null, jarOutputPath, ddlFilePaths);
}
/**
* Compile optionally using a (DEPRECATED) project.xml file.
* This internal method prepares to compile with or without a project file.
*
* @param projectFileURL URL of the project file or NULL if not used.
* @param jarOutputPath The location to put the finished JAR to.
* @param ddlFilePaths The array of DDL files to compile (at least one is required if there's a project file).
* @return true if successful
*/
public boolean compileWithProjectXML(
final String projectFileURL,
final String jarOutputPath,
final String... ddlFilePaths)
{
VoltCompilerReader projectReader = null;
if (projectFileURL != null) {
try {
projectReader = new VoltCompilerFileReader(projectFileURL);
}
catch (IOException e) {
compilerLog.error(String.format(
"Failed to initialize reader for project file \"%s\".",
projectFileURL));
return false;
}
}
else if (ddlFilePaths.length == 0) {
compilerLog.error(String.format(
"At least one DDL file is required if no project file is specified.",
projectFileURL));
return false;
}
List<VoltCompilerReader> ddlReaderList;
try {
ddlReaderList = DDLPathsToReaderList(ddlFilePaths);
}
catch (VoltCompilerException e) {
compilerLog.error("Unable to open DDL file.", e);
return false;
}
return compileInternalToFile(projectReader, jarOutputPath, null, null, ddlReaderList, null);
}
/**
* Compile empty catalog jar
* @param jarOutputPath output jar path
* @return true if successful
*/
public boolean compileEmptyCatalog(final String jarOutputPath) {
// Use a special DDL reader to provide the contents.
List<VoltCompilerReader> ddlReaderList = new ArrayList<VoltCompilerReader>(1);
ddlReaderList.add(new VoltCompilerStringReader("ddl.sql", m_emptyDDLComment));
// Seed it with the DDL so that a version upgrade hack in compileInternalToFile()
// doesn't try to get the DDL file from the path.
InMemoryJarfile jarFile = new InMemoryJarfile();
try {
ddlReaderList.get(0).putInJar(jarFile, "ddl.sql");
}
catch (IOException e) {
compilerLog.error("Failed to add DDL file to empty in-memory jar.");
return false;
}
return compileInternalToFile(null, jarOutputPath, null, null, ddlReaderList, jarFile);
}
private static void addBuildInfo(final InMemoryJarfile jarOutput) {
StringBuilder buildinfo = new StringBuilder();
String info[] = RealVoltDB.extractBuildInfo(compilerLog);
buildinfo.append(info[0]).append('\n');
buildinfo.append(info[1]).append('\n');
buildinfo.append(System.getProperty("user.name")).append('\n');
buildinfo.append(System.getProperty("user.dir")).append('\n');
buildinfo.append(Long.toString(System.currentTimeMillis())).append('\n');
byte buildinfoBytes[] = buildinfo.toString().getBytes(Constants.UTF8ENCODING);
jarOutput.put(CatalogUtil.CATALOG_BUILDINFO_FILENAME, buildinfoBytes);
}
/**
* Internal method that takes the generated DDL from the catalog and builds a new catalog.
* The generated catalog is diffed with the original catalog to verify compilation and
* catalog generation consistency.
*/
private void debugVerifyCatalog(InMemoryJarfile origJarFile, Catalog origCatalog)
{
final VoltCompiler autoGenCompiler = new VoltCompiler();
// Make the new compiler use the original jarfile's classloader so it can
// pull in the class files for procedures and imports
autoGenCompiler.m_classLoader = origJarFile.getLoader();
List<VoltCompilerReader> autogenReaderList = new ArrayList<VoltCompilerReader>(1);
autogenReaderList.add(new VoltCompilerJarFileReader(origJarFile, AUTOGEN_DDL_FILE_NAME));
DatabaseType autoGenDatabase = getProjectDatabase(null);
InMemoryJarfile autoGenJarOutput = new InMemoryJarfile();
autoGenCompiler.m_currentFilename = AUTOGEN_DDL_FILE_NAME;
Catalog autoGenCatalog = autoGenCompiler.compileCatalogInternal(autoGenDatabase, null, null,
autogenReaderList, autoGenJarOutput);
FilteredCatalogDiffEngine diffEng = new FilteredCatalogDiffEngine(origCatalog, autoGenCatalog);
String diffCmds = diffEng.commands();
if (diffCmds != null && !diffCmds.equals("")) {
VoltDB.crashLocalVoltDB("Catalog Verification from Generated DDL failed! " +
"The offending diffcmds were: " + diffCmds);
}
else {
Log.info("Catalog verification completed successfuly.");
}
}
/**
* Internal method for compiling with and without a project.xml file or DDL files.
*
* @param projectReader Reader for project file or null if a project file is not used.
* @param jarOutputPath The location to put the finished JAR to.
* @param ddlFilePaths The list of DDL files to compile (when no project is provided).
* @param jarOutputRet The in-memory jar to populate or null if the caller doesn't provide one.
* @return true if successful
*/
private boolean compileInternalToFile(
final VoltCompilerReader projectReader,
final String jarOutputPath,
final VoltCompilerReader cannonicalDDLIfAny,
final Catalog previousCatalogIfAny,
final List<VoltCompilerReader> ddlReaderList,
final InMemoryJarfile jarOutputRet)
{
if (jarOutputPath == null) {
addErr("The output jar path is null.");
return false;
}
InMemoryJarfile jarOutput = compileInternal(projectReader, cannonicalDDLIfAny, previousCatalogIfAny, ddlReaderList, jarOutputRet);
if (jarOutput == null) {
return false;
}
try {
jarOutput.writeToFile(new File(jarOutputPath)).run();
}
catch (final Exception e) {
e.printStackTrace();
addErr("Error writing catalog jar to disk: " + e.getMessage());
return false;
}
return true;
}
/**
* Internal method for compiling with and without a project.xml file or DDL files.
*
* @param projectReader Reader for project file or null if a project file is not used.
* @param ddlFilePaths The list of DDL files to compile (when no project is provided).
* @param jarOutputRet The in-memory jar to populate or null if the caller doesn't provide one.
* @return The InMemoryJarfile containing the compiled catalog if
* successful, null if not. If the caller provided an InMemoryJarfile, the
* return value will be the same object, not a copy.
*/
private InMemoryJarfile compileInternal(
final VoltCompilerReader projectReader,
final VoltCompilerReader cannonicalDDLIfAny,
final Catalog previousCatalogIfAny,
final List<VoltCompilerReader> ddlReaderList,
final InMemoryJarfile jarOutputRet)
{
// Expect to have either >1 ddl file or a project file.
assert(ddlReaderList.size() > 0 || projectReader != null);
// Make a temporary local output jar if one wasn't provided.
final InMemoryJarfile jarOutput = (jarOutputRet != null
? jarOutputRet
: new InMemoryJarfile());
m_projectFileURL = (projectReader != null ? projectReader.getPath() : null);
if (m_projectFileURL == null && (ddlReaderList == null || ddlReaderList.isEmpty())) {
addErr("One or more DDL files are required.");
return null;
}
// clear out the warnings and errors
m_warnings.clear();
m_infos.clear();
m_errors.clear();
// do all the work to get the catalog
DatabaseType database = getProjectDatabase(projectReader);
if (database == null) {
return null;
}
final Catalog catalog = compileCatalogInternal(database, cannonicalDDLIfAny, previousCatalogIfAny, ddlReaderList, jarOutput);
if (catalog == null) {
return null;
}
// Build DDL from Catalog Data
m_canonicalDDL = CatalogSchemaTools.toSchema(catalog, m_importLines);
// generate the catalog report and write it to disk
try {
m_report = ReportMaker.report(m_catalog, m_warnings, m_canonicalDDL);
m_reportPath = null;
File file = null;
// write to working dir when using VoltCompiler directly
if (standaloneCompiler) {
file = new File("catalog-report.html");
}
else {
// try to get a catalog context
VoltDBInterface voltdb = VoltDB.instance();
CatalogContext catalogContext = voltdb != null ? voltdb.getCatalogContext() : null;
// it's possible that standaloneCompiler will be false and catalogContext will be null
// in test code.
// if we have a context, write report to voltroot
if (catalogContext != null) {
file = new File(catalogContext.cluster.getVoltroot(), "catalog-report.html");
}
}
// if there's a good place to write the report, do so
if (file != null) {
FileWriter fw = new FileWriter(file);
fw.write(m_report);
fw.close();
m_reportPath = file.getAbsolutePath();
}
} catch (IOException e) {
e.printStackTrace();
return null;
}
jarOutput.put(AUTOGEN_DDL_FILE_NAME, m_canonicalDDL.getBytes(Constants.UTF8ENCODING));
if (DEBUG_VERIFY_CATALOG) {
debugVerifyCatalog(jarOutput, catalog);
}
// WRITE CATALOG TO JAR HERE
final String catalogCommands = catalog.serialize();
byte[] catalogBytes = catalogCommands.getBytes(Constants.UTF8ENCODING);
try {
// Don't update buildinfo if it's already present, e.g. while upgrading.
// Note when upgrading the version has already been updated by the caller.
if (!jarOutput.containsKey(CatalogUtil.CATALOG_BUILDINFO_FILENAME)) {
addBuildInfo(jarOutput);
}
jarOutput.put(CatalogUtil.CATALOG_FILENAME, catalogBytes);
// put the compiler report into the jarfile
jarOutput.put("catalog-report.html", m_report.getBytes(Constants.UTF8ENCODING));
}
catch (final Exception e) {
e.printStackTrace();
return null;
}
assert(!hasErrors());
if (hasErrors()) {
return null;
}
return jarOutput;
}
/**
* Get textual explain plan info for each plan from the
* catalog to be shoved into the catalog jarfile.
*/
HashMap<String, byte[]> getExplainPlans(Catalog catalog) {
HashMap<String, byte[]> retval = new HashMap<String, byte[]>();
Database db = getCatalogDatabase();
assert(db != null);
for (Procedure proc : db.getProcedures()) {
for (Statement stmt : proc.getStatements()) {
String s = "SQL: " + stmt.getSqltext() + "\n";
s += "COST: " + Integer.toString(stmt.getCost()) + "\n";
s += "PLAN:\n\n";
s += Encoder.hexDecodeToString(stmt.getExplainplan()) + "\n";
byte[] b = s.getBytes(Constants.UTF8ENCODING);
retval.put(proc.getTypeName() + "_" + stmt.getTypeName() + ".txt", b);
}
}
return retval;
}
private VoltCompilerFileReader createDDLFileReader(String path)
throws VoltCompilerException
{
try {
return new VoltCompilerFileReader(VoltCompilerFileReader.getSchemaPath(m_projectFileURL, path));
}
catch (IOException e) {
String msg = String.format("Unable to open schema file \"%s\" for reading: %s", path, e.getMessage());
throw new VoltCompilerException(msg);
}
}
private List<VoltCompilerReader> DDLPathsToReaderList(final String... ddlFilePaths)
throws VoltCompilerException
{
List<VoltCompilerReader> ddlReaderList = new ArrayList<VoltCompilerReader>(ddlFilePaths.length);
for (int i = 0; i < ddlFilePaths.length; ++i) {
ddlReaderList.add(createDDLFileReader(ddlFilePaths[i]));
}
return ddlReaderList;
}
/**
* Compile from DDL files (only).
* @param ddlFilePaths input ddl files
* @return compiled catalog
* @throws VoltCompilerException
*/
public Catalog compileCatalogFromDDL(final String... ddlFilePaths)
throws VoltCompilerException
{
DatabaseType database = getProjectDatabase(null);
InMemoryJarfile jarOutput = new InMemoryJarfile();
return compileCatalogInternal(database, null, null, DDLPathsToReaderList(ddlFilePaths), jarOutput);
}
/**
* Compile from project file (without explicit DDL file paths).
* @param projectFileURL project file URL/path
* @return compiled catalog
* @throws VoltCompilerException
*/
public Catalog compileCatalogFromProject(final String projectFileURL)
throws VoltCompilerException
{
VoltCompilerReader projectReader = null;
try {
projectReader = new VoltCompilerFileReader(projectFileURL);
}
catch (IOException e) {
throw new VoltCompilerException(String.format(
"Unable to create project reader for \"%s\": %s",
projectFileURL, e.getMessage()));
}
DatabaseType database = getProjectDatabase(projectReader);
InMemoryJarfile jarOutput = new InMemoryJarfile();
// Provide an empty DDL reader list.
return compileCatalogInternal(database, null, null, DDLPathsToReaderList(), jarOutput);
}
/**
* Read the project file and get the database object.
* @param projectFileURL project file URL/path
* @return database for project or null
*/
private DatabaseType getProjectDatabase(final VoltCompilerReader projectReader)
{
DatabaseType database = null;
if (projectReader != null) {
m_currentFilename = projectReader.getName();
try {
JAXBContext jc = JAXBContext.newInstance("org.voltdb.compiler.projectfile");
// This schema shot the sheriff.
SchemaFactory sf = SchemaFactory.newInstance(
javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI);
Schema schema = sf.newSchema(this.getClass().getResource("ProjectFileSchema.xsd"));
Unmarshaller unmarshaller = jc.createUnmarshaller();
// But did not shoot unmarshaller!
unmarshaller.setSchema(schema);
@SuppressWarnings("unchecked")
JAXBElement<ProjectType> result = (JAXBElement<ProjectType>) unmarshaller.unmarshal(projectReader);
ProjectType project = result.getValue();
database = project.getDatabase();
}
catch (JAXBException e) {
// Convert some linked exceptions to more friendly errors.
if (e.getLinkedException() instanceof java.io.FileNotFoundException) {
addErr(e.getLinkedException().getMessage());
compilerLog.error(e.getLinkedException().getMessage());
}
else {
DeprecatedProjectElement deprecated = DeprecatedProjectElement.valueOf(e);
if( deprecated != null) {
addErr("Found deprecated XML element \"" + deprecated.name() + "\" in project.xml file, "
+ deprecated.getSuggestion());
addErr("Error schema validating project.xml file. " + e.getLinkedException().getMessage());
compilerLog.error("Found deprecated XML element \"" + deprecated.name() + "\" in project.xml file");
compilerLog.error(e.getMessage());
compilerLog.error(projectReader.getPath());
}
else if (e.getLinkedException() instanceof org.xml.sax.SAXParseException) {
addErr("Error schema validating project.xml file. " + e.getLinkedException().getMessage());
compilerLog.error("Error schema validating project.xml file: " + e.getLinkedException().getMessage());
compilerLog.error(e.getMessage());
compilerLog.error(projectReader.getPath());
}
else {
throw new RuntimeException(e);
}
}
}
catch (SAXException e) {
addErr("Error schema validating project.xml file. " + e.getMessage());
compilerLog.error("Error schema validating project.xml file. " + e.getMessage());
}
}
else {
// No project.xml - create a stub object.
database = new DatabaseType();
}
return database;
}
/**
* Internal method for compiling the catalog.
*
* @param database catalog-related info parsed from a project file
* @param ddlReaderList Reader objects for ddl files.
* @param jarOutput The in-memory jar to populate or null if the caller doesn't provide one.
* @return true if successful
*/
private Catalog compileCatalogInternal(
final DatabaseType database,
final VoltCompilerReader cannonicalDDLIfAny,
final Catalog previousCatalogIfAny,
final List<VoltCompilerReader> ddlReaderList,
final InMemoryJarfile jarOutput)
{
// Compiler instance is reusable. Clear the cache.
cachedAddedClasses.clear();
m_catalog = new Catalog();
// Initialize the catalog for one cluster
m_catalog.execute("add / clusters cluster");
m_catalog.getClusters().get("cluster").setSecurityenabled(false);
if (database != null) {
final String databaseName = database.getName();
// schema does not verify that the database is named "database"
if (databaseName.equals("database") == false) {
return null; // error messaging handled higher up
}
// shutdown and make a new hsqldb
try {
Database previousDBIfAny = null;
if (previousCatalogIfAny != null) {
previousDBIfAny = previousCatalogIfAny.getClusters().get("cluster").getDatabases().get("database");
}
compileDatabaseNode(database, cannonicalDDLIfAny, previousDBIfAny, ddlReaderList, jarOutput);
} catch (final VoltCompilerException e) {
return null;
}
}
assert(m_catalog != null);
// add epoch info to catalog
final int epoch = (int)(TransactionIdManager.getEpoch() / 1000);
m_catalog.getClusters().get("cluster").setLocalepoch(epoch);
return m_catalog;
}
ProcInfoData getProcInfoOverride(final String procName) {
if (m_procInfoOverrides == null)
return null;
return m_procInfoOverrides.get(procName);
}
public String getCanonicalDDL() {
if(m_canonicalDDL == null) {
throw new RuntimeException();
}
return m_canonicalDDL;
}
public Catalog getCatalog() {
return m_catalog;
}
public Database getCatalogDatabase() {
return m_catalog.getClusters().get("cluster").getDatabases().get("database");
}
private Database initCatalogDatabase() {
// create the database in the catalog
m_catalog.execute("add /clusters#cluster databases database");
addDefaultRoles();
return getCatalogDatabase();
}
/**
* Create default roles. These roles cannot be removed nor overridden in the DDL.
* Make sure to omit these roles in the generated DDL in {@link org.voltdb.utils.CatalogSchemaTools}
* Also, make sure to prevent them from being dropped by DROP ROLE in the DDLCompiler
* !!!
* IF YOU ADD A THIRD ROLE TO THE DEFAULTS, IT'S TIME TO BUST THEM OUT INTO A CENTRAL
* LOCALE AND DO ALL THIS MAGIC PROGRAMATICALLY --izzy 11/20/2014
*/
private void addDefaultRoles()
{
// admin
m_catalog.execute("add /clusters#cluster/databases#database groups administrator");
Permission.setPermissionsInGroup(getCatalogDatabase().getGroups().get("administrator"),
Permission.getPermissionsFromAliases(Arrays.asList("ADMIN")));
// user
m_catalog.execute("add /clusters#cluster/databases#database groups user");
Permission.setPermissionsInGroup(getCatalogDatabase().getGroups().get("user"),
Permission.getPermissionsFromAliases(Arrays.asList("SQL", "ALLPROC")));
}
public static enum DdlProceduresToLoad
{
NO_DDL_PROCEDURES, ONLY_SINGLE_STATEMENT_PROCEDURES, ALL_DDL_PROCEDURES
}
/**
* Simplified interface for loading a ddl file with full support for VoltDB
* extensions (partitioning, procedures, export), but no support for "project file" input.
* This is, at least initially, only a back door to create a fully functional catalog for
* the purposes of planner unit testing.
* @param hsql an interface to the hsql frontend, initialized and potentially reused by the caller.
* @param whichProcs indicates which ddl-defined procedures to load: none, single-statement, or all
* @param ddlFilePaths schema file paths
* @throws VoltCompilerException
*/
public Catalog loadSchema(HSQLInterface hsql,
DdlProceduresToLoad whichProcs,
String... ddlFilePaths) throws VoltCompilerException
{
m_catalog = new Catalog();
m_catalog.execute("add / clusters cluster");
Database db = initCatalogDatabase();
List<VoltCompilerReader> ddlReaderList = DDLPathsToReaderList(ddlFilePaths);
final VoltDDLElementTracker voltDdlTracker = new VoltDDLElementTracker(this);
InMemoryJarfile jarOutput = new InMemoryJarfile();
compileDatabase(db, hsql, voltDdlTracker, null, null, ddlReaderList, null, null, whichProcs, jarOutput);
return m_catalog;
}
/**
* Load a ddl file with full support for VoltDB extensions (partitioning, procedures,
* export), AND full support for input via a project xml file's "database" node.
* @param database catalog-related info parsed from a project file
* @param ddlReaderList Reader objects for ddl files.
* @param jarOutput The in-memory jar to populate or null if the caller doesn't provide one.
* @throws VoltCompilerException
*/
private void compileDatabaseNode(
final DatabaseType database,
VoltCompilerReader cannonicalDDLIfAny,
Database previousDBIfAny,
final List<VoltCompilerReader> ddlReaderList,
final InMemoryJarfile jarOutput)
throws VoltCompilerException
{
final ArrayList<Class<?>> classDependencies = new ArrayList<Class<?>>();
final VoltDDLElementTracker voltDdlTracker = new VoltDDLElementTracker(this);
Database db = initCatalogDatabase();
// schemas/schema
if (database.getSchemas() != null) {
for (SchemasType.Schema schema : database.getSchemas().getSchema()) {
compilerLog.l7dlog( Level.INFO, LogKeys.compiler_VoltCompiler_CatalogPath.name(),
new Object[] {schema.getPath()}, null);
// Prefer to use the in-memory copy.
// All ddl.sql is placed in the jar root folder.
File schemaFile = new File(schema.getPath());
String schemaName = schemaFile.getName();
if (jarOutput != null && jarOutput.containsKey(schemaName)) {
ddlReaderList.add(new VoltCompilerJarFileReader(jarOutput, schemaName));
}
else {
ddlReaderList.add(createDDLFileReader(schema.getPath()));
}
}
}
// groups/group (alias for roles/role).
if (database.getGroups() != null) {
for (GroupsType.Group group : database.getGroups().getGroup()) {
org.voltdb.catalog.Group catGroup = db.getGroups().add(group.getName());
catGroup.setSql(group.isAdhoc());
catGroup.setSqlread(catGroup.getSql());
catGroup.setDefaultproc(group.isDefaultproc() || catGroup.getSql());
catGroup.setDefaultprocread(group.isDefaultprocread() || catGroup.getDefaultproc() || catGroup.getSqlread());
if (group.isSysproc()) {
catGroup.setAdmin(true);
catGroup.setSql(true);
catGroup.setSqlread(true);
catGroup.setDefaultproc(true);
catGroup.setDefaultprocread(true);
}
}
}
// roles/role (alias for groups/group).
if (database.getRoles() != null) {
for (RolesType.Role role : database.getRoles().getRole()) {
org.voltdb.catalog.Group catGroup = db.getGroups().add(role.getName());
catGroup.setSql(role.isAdhoc());
catGroup.setSqlread(catGroup.getSql());
catGroup.setDefaultproc(role.isDefaultproc() || catGroup.getSql());
catGroup.setDefaultprocread(role.isDefaultprocread() || catGroup.getDefaultproc() || catGroup.getSqlread());
if (role.isSysproc()) {
catGroup.setAdmin(true);
catGroup.setSql(true);
catGroup.setSqlread(true);
catGroup.setDefaultproc(true);
catGroup.setDefaultprocread(true);
}
}
}
// procedures/procedure
if (database.getProcedures() != null) {
for (ProceduresType.Procedure proc : database.getProcedures().getProcedure()) {
voltDdlTracker.add(getProcedure(proc));
}
}
// classdependencies/classdependency
if (database.getClassdependencies() != null) {
for (Classdependency dep : database.getClassdependencies().getClassdependency()) {
classDependencies.add(getClassDependency(dep));
}
}
// partitions/table
if (database.getPartitions() != null) {
for (PartitionsType.Partition table : database.getPartitions().getPartition()) {
voltDdlTracker.addPartition(table.getTable(), table.getColumn());
}
}
// shutdown and make a new hsqldb
HSQLInterface hsql = HSQLInterface.loadHsqldb();
compileDatabase(db, hsql, voltDdlTracker, cannonicalDDLIfAny, previousDBIfAny, ddlReaderList, database.getExport(), classDependencies,
DdlProceduresToLoad.ALL_DDL_PROCEDURES, jarOutput);
}
/**
* Common code for schema loading shared by loadSchema and compileDatabaseNode
*
* @param db the database entry in the catalog
* @param hsql an interface to the hsql frontend, initialized and potentially reused by the caller.
* @param voltDdlTracker non-standard VoltDB schema annotations, initially those from a project file
* @param schemas the ddl input files
* @param export optional export connector configuration (from the project file)
* @param classDependencies optional additional jar files required by procedures
* @param whichProcs indicates which ddl-defined procedures to load: none, single-statement, or all
* @param jarOutput The in-memory jar to populate or null if the caller doesn't provide one.
*/
private void compileDatabase(
Database db,
HSQLInterface hsql,
VoltDDLElementTracker voltDdlTracker,
VoltCompilerReader cannonicalDDLIfAny,
Database previousDBIfAny,
List<VoltCompilerReader> schemaReaders,
ExportType export,
Collection<Class<?>> classDependencies,
DdlProceduresToLoad whichProcs,
InMemoryJarfile jarOutput)
throws VoltCompilerException
{
// Actually parse and handle all the DDL
// DDLCompiler also provides partition descriptors for DDL PARTITION
// and REPLICATE statements.
final DDLCompiler ddlcompiler = new DDLCompiler(this, hsql, voltDdlTracker, m_classLoader);
if (cannonicalDDLIfAny != null) {
// add the file object's path to the list of files for the jar
m_ddlFilePaths.put(cannonicalDDLIfAny.getName(), cannonicalDDLIfAny.getPath());
ddlcompiler.loadSchema(cannonicalDDLIfAny, db, whichProcs);
}
m_dirtyTables.clear();
for (final VoltCompilerReader schemaReader : schemaReaders) {
String origFilename = m_currentFilename;
try {
if (m_currentFilename == null || m_currentFilename.equals(NO_FILENAME))
m_currentFilename = schemaReader.getName();
// add the file object's path to the list of files for the jar
m_ddlFilePaths.put(schemaReader.getName(), schemaReader.getPath());
ddlcompiler.loadSchema(schemaReader, db, whichProcs);
}
finally {
m_currentFilename = origFilename;
}
}
ddlcompiler.compileToCatalog(db);
// Actually parse and handle all the partitions
// this needs to happen before procedures are compiled
String msg = "In database, ";
final CatalogMap<Table> tables = db.getTables();
for (Table table: tables) {
String tableName = table.getTypeName();
if (voltDdlTracker.m_partitionMap.containsKey(tableName.toLowerCase())) {
String colName = voltDdlTracker.m_partitionMap.get(tableName.toLowerCase());
// A null column name indicates a replicated table. Ignore it here
// because it defaults to replicated in the catalog.
if (colName != null) {
assert(tables.getIgnoreCase(tableName) != null);
if (table.getMaterializer() != null) {
msg += "the materialized view is automatically partitioned based on source table. "
+ "Invalid PARTITION statement on view table " + tableName + ".";
throw new VoltCompilerException(msg);
}
final Column partitionCol = table.getColumns().getIgnoreCase(colName);
// make sure the column exists
if (partitionCol == null) {
msg += "PARTITION has unknown COLUMN '" + colName + "'";
throw new VoltCompilerException(msg);
}
// make sure the column is marked not-nullable
if (partitionCol.getNullable() == true) {
msg += "Partition column '" + tableName + "." + colName + "' is nullable. " +
"Partition columns must be constrained \"NOT NULL\".";
throw new VoltCompilerException(msg);
}
// verify that the partition column is a supported type
VoltType pcolType = VoltType.get((byte) partitionCol.getType());
switch (pcolType) {
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case STRING:
case VARBINARY:
break;
default:
msg += "Partition column '" + tableName + "." + colName + "' is not a valid type. " +
"Partition columns must be an integer or varchar type.";
throw new VoltCompilerException(msg);
}
table.setPartitioncolumn(partitionCol);
table.setIsreplicated(false);
// Check valid indexes, whether they contain the partition column or not.
for (Index index: table.getIndexes()) {
checkValidPartitionTableIndex(index, partitionCol, tableName);
}
// Set the partitioning of destination tables of associated views.
// If a view's source table is replicated, then a full scan of the
// associated view is single-sited. If the source is partitioned,
// a full scan of the view must be distributed, unless it is filtered
// by the original table's partitioning key, which, to be filtered,
// must also be a GROUP BY key.
final CatalogMap<MaterializedViewInfo> views = table.getViews();
for (final MaterializedViewInfo mvi : views) {
mvi.getDest().setIsreplicated(false);
setGroupedTablePartitionColumn(mvi, partitionCol);
}
}
}
}
// add database estimates info
addDatabaseEstimatesInfo(m_estimates, db);
// Process DDL exported tables
NavigableMap<String, NavigableSet<String>> exportTables = voltDdlTracker.getExportedTables();
for (Entry<String, NavigableSet<String>> e : exportTables.entrySet()) {
String targetName = e.getKey();
for (String tableName : e.getValue()) {
addExportTableToConnector(targetName, tableName, db);
}
}
// Process and add exports and connectors to the catalog
// Must do this before compiling procedures to deny updates
// on append-only tables.
if (export != null) {
// currently, only a single connector is allowed
compileExport(export, db);
}
// process DRed tables
for (Entry<String, String> drNode: voltDdlTracker.getDRedTables().entrySet()) {
compileDRTable(drNode, db);
}
if (whichProcs != DdlProceduresToLoad.NO_DDL_PROCEDURES) {
Collection<ProcedureDescriptor> allProcs = voltDdlTracker.getProcedureDescriptors();
CatalogMap<Procedure> previousProcsIfAny = null;
if (previousDBIfAny != null) {
previousProcsIfAny = previousDBIfAny.getProcedures();
}
compileProcedures(db, hsql, allProcs, classDependencies, whichProcs, previousProcsIfAny, jarOutput);
}
// add extra classes from the DDL
m_addedClasses = voltDdlTracker.m_extraClassses.toArray(new String[0]);
// Also, grab the IMPORT CLASS lines so we can add them to the
// generated DDL
m_importLines = voltDdlTracker.m_importLines.toArray(new String[0]);
addExtraClasses(jarOutput);
compileRowLimitDeleteStmts(db, hsql, ddlcompiler.getLimitDeleteStmtToXmlEntries());
}
private void compileRowLimitDeleteStmts(
Database db,
HSQLInterface hsql,
Collection<Map.Entry<Statement, VoltXMLElement>> deleteStmtXmlEntries)
throws VoltCompilerException {
for (Map.Entry<Statement, VoltXMLElement> entry : deleteStmtXmlEntries) {
Statement stmt = entry.getKey();
VoltXMLElement xml = entry.getValue();
// choose DeterminismMode.FASTER for determinism, and rely on the planner to error out
// if we generated a plan that is content-non-deterministic.
StatementCompiler.compileStatementAndUpdateCatalog(this,
hsql,
db.getCatalog(),
db,
m_estimates,
stmt,
xml,
stmt.getSqltext(),
null, // no user-supplied join order
DeterminismMode.FASTER,
StatementPartitioning.partitioningForRowLimitDelete());
}
}
private void checkValidPartitionTableIndex(Index index, Column partitionCol, String tableName)
throws VoltCompilerException {
// skip checking for non-unique indexes.
if (!index.getUnique()) {
return;
}
boolean containsPartitionColumn = false;
String jsonExpr = index.getExpressionsjson();
// if this is a pure-column index...
if (jsonExpr.isEmpty()) {
for (ColumnRef cref : index.getColumns()) {
Column col = cref.getColumn();
// unique index contains partitioned column
if (col.equals(partitionCol)) {
containsPartitionColumn = true;
break;
}
}
}
// if this is a fancy expression-based index...
else {
try {
int partitionColIndex = partitionCol.getIndex();
List<AbstractExpression> indexExpressions = AbstractExpression.fromJSONArrayString(jsonExpr, null);
for (AbstractExpression expr: indexExpressions) {
if (expr instanceof TupleValueExpression &&
((TupleValueExpression) expr).getColumnIndex() == partitionColIndex ) {
containsPartitionColumn = true;
break;
}
}
} catch (JSONException e) {
e.printStackTrace(); // danger will robinson
assert(false);
}
}
if (containsPartitionColumn) {
if (index.getAssumeunique()) {
String exceptionMsg = String.format("ASSUMEUNIQUE is not valid " +
"for an index that includes the partitioning column. Please use UNIQUE instead.");
throw new VoltCompilerException(exceptionMsg);
}
}
else if ( ! index.getAssumeunique()) {
// Throw compiler exception.
String indexName = index.getTypeName();
String keyword = "";
if (indexName.startsWith(HSQLInterface.AUTO_GEN_PRIMARY_KEY_PREFIX)) {
indexName = "PRIMARY KEY";
keyword = "PRIMARY KEY";
} else {
indexName = "UNIQUE INDEX " + indexName;
keyword = "UNIQUE";
}
String exceptionMsg = "Invalid use of " + keyword +
". The " + indexName + " on the partitioned table " + tableName +
" does not include the partitioning column " + partitionCol.getName() +
". See the documentation for the 'CREATE TABLE' and 'CREATE INDEX' commands and the 'ASSUMEUNIQUE' keyword.";
throw new VoltCompilerException(exceptionMsg);
}
}
/**
* Once the DDL file is over, take all of the extra classes found and add them to the jar.
*/
private void addExtraClasses(final InMemoryJarfile jarOutput) throws VoltCompilerException {
List<String> addedClasses = new ArrayList<String>();
for (String className : m_addedClasses) {
/*
* Only add the class if it isn't already in the output jar.
* The jar will be pre-populated when performing an automatic
* catalog version upgrade.
*/
if (!jarOutput.containsKey(className)) {
try {
Class<?> clz = Class.forName(className, true, m_classLoader);
if (addClassToJar(jarOutput, clz)) {
addedClasses.add(className);
}
}
catch (Exception e) {
String msg = "Class %s could not be loaded/found/added to the jar.";
msg = String.format(msg, className);
throw new VoltCompilerException(msg);
}
// reset the added classes to the actual added classes
}
}
m_addedClasses = addedClasses.toArray(new String[0]);
}
/**
* @param db the database entry in the catalog
* @param hsql an interface to the hsql frontend, initialized and potentially reused by the caller.
* @param classDependencies
* @param voltDdlTracker non-standard VoltDB schema annotations
* @param whichProcs indicates which ddl-defined procedures to load: none, single-statement, or all
* @throws VoltCompilerException
*/
private void compileProcedures(Database db,
HSQLInterface hsql,
Collection<ProcedureDescriptor> allProcs,
Collection<Class<?>> classDependencies,
DdlProceduresToLoad whichProcs,
CatalogMap<Procedure> prevProcsIfAny,
InMemoryJarfile jarOutput) throws VoltCompilerException
{
// build a cache of previous SQL stmts
m_previousCatalogStmts.clear();
if (prevProcsIfAny != null) {
for (Procedure prevProc : prevProcsIfAny) {
for (Statement prevStmt : prevProc.getStatements()) {
addStatementToCache(prevStmt);
}
}
}
// Ignore class dependencies if ignoring java stored procs.
// This extra qualification anticipates some (undesirable) overlap between planner
// testing and additional library code in the catalog jar file.
// That is, if it became possible for ddl file syntax to trigger additional
// (non-stored-procedure) class loading into the catalog jar,
// planner-only testing would find it convenient to ignore those
// dependencies for its "dry run" on an unchanged application ddl file.
if (whichProcs == DdlProceduresToLoad.ALL_DDL_PROCEDURES) {
// Add all the class dependencies to the output jar
for (final Class<?> classDependency : classDependencies) {
addClassToJar(jarOutput, classDependency);
}
}
final List<ProcedureDescriptor> procedures = new ArrayList<>();
procedures.addAll(allProcs);
// Actually parse and handle all the Procedures
for (final ProcedureDescriptor procedureDescriptor : procedures) {
final String procedureName = procedureDescriptor.m_className;
if (procedureDescriptor.m_singleStmt == null) {
m_currentFilename = procedureName.substring(procedureName.lastIndexOf('.') + 1);
m_currentFilename += ".class";
}
else if (whichProcs == DdlProceduresToLoad.ONLY_SINGLE_STATEMENT_PROCEDURES) {
// In planner test mode, especially within the plannerTester framework,
// ignore any java procedures referenced in ddl CREATE PROCEDURE statements to allow
// re-use of actual application ddl files without introducing class dependencies.
// This potentially allows automatic plannerTester regression test support
// for all the single-statement procedures of an unchanged application ddl file.
continue;
}
else {
m_currentFilename = procedureName;
}
ProcedureCompiler.compile(this, hsql, m_estimates, m_catalog, db, procedureDescriptor, jarOutput);
}
// done handling files
m_currentFilename = NO_FILENAME;
// allow gc to reclaim any cache memory here
m_previousCatalogStmts.clear();
}
private void setGroupedTablePartitionColumn(MaterializedViewInfo mvi, Column partitionColumn)
throws VoltCompilerException {
// A view of a replicated table is replicated.
// A view of a partitioned table is partitioned -- regardless of whether it has a partition key
// -- it certainly isn't replicated!
// If the partitioning column is grouped, its counterpart is the partitioning column of the view table.
// Otherwise, the view table just doesn't have a partitioning column
// -- it is seemingly randomly distributed,
// and its grouped columns are only locally unique but not globally unique.
Table destTable = mvi.getDest();
// Get the grouped columns in "index" order.
// This order corresponds to the iteration order of the MaterializedViewInfo's group by columns.
List<Column> destColumnArray = CatalogUtil.getSortedCatalogItems(destTable.getColumns(), "index");
String partitionColName = partitionColumn.getTypeName(); // Note getTypeName gets the column name -- go figure.
if (mvi.getGroupbycols().size() > 0) {
int index = 0;
for (ColumnRef cref : CatalogUtil.getSortedCatalogItems(mvi.getGroupbycols(), "index")) {
Column srcCol = cref.getColumn();
if (srcCol.getName().equals(partitionColName)) {
Column destCol = destColumnArray.get(index);
destTable.setPartitioncolumn(destCol);
return;
}
++index;
}
} else {
String complexGroupbyJson = mvi.getGroupbyexpressionsjson();
assert(complexGroupbyJson != null && complexGroupbyJson.length() > 0);
if (complexGroupbyJson.length() > 0) {
int partitionColIndex = partitionColumn.getIndex();
List<AbstractExpression> mvComplexGroupbyCols = null;
try {
mvComplexGroupbyCols = AbstractExpression.fromJSONArrayString(complexGroupbyJson, null);
} catch (JSONException e) {
e.printStackTrace();
}
int index = 0;
for (AbstractExpression expr: mvComplexGroupbyCols) {
if (expr instanceof TupleValueExpression) {
TupleValueExpression tve = (TupleValueExpression) expr;
if (tve.getColumnIndex() == partitionColIndex) {
Column destCol = destColumnArray.get(index);
destTable.setPartitioncolumn(destCol);
return;
}
}
++index;
}
}
}
}
/** Provide a feedback path to monitor plan output via harvestCapturedDetail */
public void enableDetailedCapture() {
m_capturedDiagnosticDetail = new ArrayList<String>();
}
/** Access recent plan output, for diagnostic purposes */
public List<String> harvestCapturedDetail() {
List<String> harvested = m_capturedDiagnosticDetail;
m_capturedDiagnosticDetail = null;
return harvested;
}
/** Capture plan context info -- statement, cost, high-level "explain". */
public void captureDiagnosticContext(String planDescription) {
if (m_capturedDiagnosticDetail == null) {
return;
}
m_capturedDiagnosticDetail.add(planDescription);
}
/** Capture plan content in terse json format. */
public void captureDiagnosticJsonFragment(String json) {
if (m_capturedDiagnosticDetail == null) {
return;
}
m_capturedDiagnosticDetail.add(json);
}
static void addDatabaseEstimatesInfo(final DatabaseEstimates estimates, final Database db) {
// Not implemented yet. Don't panic.
/*for (Table table : db.getTables()) {
DatabaseEstimates.TableEstimates tableEst = new DatabaseEstimates.TableEstimates();
tableEst.maxTuples = 1000000;
tableEst.minTuples = 100000;
estimates.tables.put(table, tableEst);
}*/
}
ProcedureDescriptor getProcedure(
org.voltdb.compiler.projectfile.ProceduresType.Procedure xmlproc)
throws VoltCompilerException
{
final ArrayList<String> groups = new ArrayList<String>();
// @groups
if (xmlproc.getGroups() != null) {
for (String group : xmlproc.getGroups().split(",")) {
groups.add(group);
}
}
// @class
String classattr = xmlproc.getClazz();
// If procedure/sql is present, this is a "statement procedure"
if (xmlproc.getSql() != null) {
String partattr = xmlproc.getPartitioninfo();
// null partattr means multi-partition
// set empty attributes to multi-partition
if (partattr != null && partattr.length() == 0)
partattr = null;
return new ProcedureDescriptor(groups, classattr,
xmlproc.getSql().getValue(),
xmlproc.getSql().getJoinorder(),
partattr, false, null, null, null);
}
else {
String partattr = xmlproc.getPartitioninfo();
if (partattr != null) {
String msg = "Java procedures must specify partition info using " +
"@ProcInfo annotation in the Java class implementation " +
"and may not use the @partitioninfo project file procedure attribute.";
throw new VoltCompilerException(msg);
}
Class<?> clazz;
try {
clazz = Class.forName(classattr, true, m_classLoader);
}
catch (ClassNotFoundException e) {
throw new VoltCompilerException(String.format(
"Cannot load class for procedure: %s",
classattr));
}
catch (Throwable cause) {
// We are here because the class was found and the initializer of the class
// threw an error we can't anticipate. So we will wrap the error with a
// runtime exception that we can trap in our code.
throw new VoltCompilerException(String.format(
"Cannot load class for procedure: %s",
classattr), cause);
}
return new ProcedureDescriptor(groups, Language.JAVA, null, clazz);
}
}
Class<?> getClassDependency(Classdependency xmlclassdep)
throws VoltCompilerException
{
String msg = "";
String className = xmlclassdep.getClazz();
// schema doesn't currently enforce this.. but could I guess.
if (className.length() == 0) {
msg += "\"classDependency\" element has empty \"class\" attribute.";
throw new VoltCompilerException(msg);
}
Class<?> cls = null;
try {
cls = Class.forName(className, true, m_classLoader);
} catch (final ClassNotFoundException e) {
msg += "\"classDependency\" can not find class " + className + " in classpath";
throw new VoltCompilerException(msg);
}
return cls;
}
private void compileExport(final ExportType export, final Database catdb)
throws VoltCompilerException
{
// Test the error paths before touching the catalog
if (export == null) {
return;
}
// This code is used for adding export tables to the default group connector
if (export.getTables() != null) {
for (Tables.Table xmltable : export.getTables().getTable()) {
addExportTableToConnector(Constants.DEFAULT_EXPORT_CONNECTOR_NAME, xmltable.getName(), catdb);
}
if (export.getTables().getTable().isEmpty()) {
compilerLog.warn("Export defined with an empty <tables> element");
}
} else {
compilerLog.warn("Export defined with no <tables> element");
}
}
void addExportTableToConnector(final String targetName, final String tableName, final Database catdb)
throws VoltCompilerException
{
assert tableName != null && ! tableName.trim().isEmpty() && catdb != null;
// Catalog Connector
org.voltdb.catalog.Connector catconn = catdb.getConnectors().getIgnoreCase(targetName);
if (catconn == null) {
catconn = catdb.getConnectors().add(targetName);
}
org.voltdb.catalog.Table tableref = catdb.getTables().getIgnoreCase(tableName);
if (tableref == null) {
throw new VoltCompilerException("While configuring export, table " + tableName + " was not present in " +
"the catalog.");
}
if (CatalogUtil.isTableMaterializeViewSource(catdb, tableref)) {
compilerLog.error("While configuring export, table " + tableName + " is a source table " +
"for a materialized view. Export only tables do not support views.");
throw new VoltCompilerException("Export table configured with materialized view.");
}
if (tableref.getMaterializer() != null)
{
compilerLog.error("While configuring export, table " + tableName + " is a " +
"materialized view. A view cannot be an export table.");
throw new VoltCompilerException("View configured as an export table");
}
if (tableref.getIndexes().size() > 0) {
compilerLog.error("While configuring export, table " + tableName + " has indexes defined. " +
"Export tables can't have indexes (including primary keys).");
throw new VoltCompilerException("Table with indexes configured as an export table");
}
if (tableref.getIsreplicated()) {
// if you don't specify partition columns, make
// export tables partitioned, but on no specific column (iffy)
tableref.setIsreplicated(false);
tableref.setPartitioncolumn(null);
}
org.voltdb.catalog.ConnectorTableInfo connTableInfo =
catconn.getTableinfo().getIgnoreCase(tableName);
if (connTableInfo == null) {
connTableInfo = catconn.getTableinfo().add(tableName);
connTableInfo.setTable(tableref);
connTableInfo.setAppendonly(true);
}
else {
throw new VoltCompilerException(String.format(
"Table \"%s\" is already exported", tableName
));
}
}
void compileDRTable(final Entry<String, String> drNode, final Database db)
throws VoltCompilerException
{
String tableName = drNode.getKey();
String action = drNode.getValue();
org.voltdb.catalog.Table tableref = db.getTables().getIgnoreCase(tableName);
if (tableref == null) {
throw new VoltCompilerException("While configuring dr, table " + tableName + " was not present in the catalog");
} else if (tableref.getMaterializer() != null) {
throw new VoltCompilerException("While configuring dr, table " + tableName + " is a materialized view." +
" DR does not support materialized view.");
}
if (action.equalsIgnoreCase("DISABLE")) {
tableref.setIsdred(false);
} else {
tableref.setIsdred(true);
}
}
// Usage messages for new and legacy syntax.
static final String usageNew = "VoltCompiler <output-JAR> <input-DDL> ...";
static final String usageLegacy = "VoltCompiler <project-file> <output-JAR>";
/**
* Main
*
* Incoming arguments:
*
* New syntax: OUTPUT_JAR INPUT_DDL ...
* Legacy syntax: PROJECT_FILE OUTPUT_JAR
*
* @param args arguments (see above)
*/
public static void main(final String[] args)
{
// passing true to constructor indicates the compiler is being run in standalone mode
final VoltCompiler compiler = new VoltCompiler(true);
boolean success = false;
if (args.length > 0 && args[0].toLowerCase().endsWith(".jar")) {
// The first argument is *.jar for the new syntax.
if (args.length >= 2) {
// Check for accidental .jar or .xml files specified for argument 2
// to catch accidental incomplete use of the legacy syntax.
if (args[1].toLowerCase().endsWith(".xml") || args[1].toLowerCase().endsWith(".jar")) {
System.err.println("Error: Expecting a DDL file as the second argument.\n"
+ " .xml and .jar are invalid DDL file extensions.");
System.exit(-1);
}
try {
success = compiler.compileFromDDL(args[0], ArrayUtils.subarray(args, 1, args.length));
} catch (VoltCompilerException e) {
System.err.printf("Compiler exception: %s\n", e.getMessage());
}
}
else {
System.err.printf("Usage: %s\n", usageNew);
System.exit(-1);
}
}
else if (args.length > 0 && args[0].toLowerCase().endsWith(".xml")) {
// The first argument is *.xml for the legacy syntax.
if (args.length == 2) {
// warn the user that this is deprecated
consoleLog.warn("Compiling from a project file is deprecated and will be removed in a future release.");
success = compiler.compileWithProjectXML(args[0], args[1]);
}
else {
System.err.printf("Usage: %s\n", usageLegacy);
System.exit(-1);
}
}
else {
// Can't recognize the arguments or there are no arguments.
System.err.printf("Usage: %s\n %s\n", usageNew, usageLegacy);
System.exit(-1);
}
// Should have exited if inadequate arguments were provided.
assert(args.length > 0);
// Exit with error code if we failed
if (!success) {
compiler.summarizeErrors(System.out, null);
System.exit(-1);
}
compiler.summarizeSuccess(System.out, null, args[0]);
}
public void summarizeSuccess(PrintStream outputStream, PrintStream feedbackStream, String jarOutputPath) {
if (outputStream != null) {
Database database = getCatalogDatabase();
outputStream.println("
outputStream.println("Successfully created " + jarOutputPath);
for (String ddl : m_ddlFilePaths.keySet()) {
outputStream.println("Includes schema: " + m_ddlFilePaths.get(ddl));
}
outputStream.println();
// Accumulate a summary of the summary for a briefer report
ArrayList<Procedure> nonDetProcs = new ArrayList<Procedure>();
ArrayList<Procedure> tableScans = new ArrayList<Procedure>();
int countSinglePartition = 0;
int countMultiPartition = 0;
int countDefaultProcs = 0;
for (Procedure p : database.getProcedures()) {
if (p.getSystemproc()) {
continue;
}
// Aggregate statistics about MP/SP/SEQ
if (!p.getDefaultproc()) {
if (p.getSinglepartition()) {
countSinglePartition++;
}
else {
countMultiPartition++;
}
}
else {
countDefaultProcs++;
}
if (p.getHasseqscans()) {
tableScans.add(p);
}
outputStream.printf("[%s][%s] %s\n",
p.getSinglepartition() ? "SP" : "MP",
p.getReadonly() ? "READ" : "WRITE",
p.getTypeName());
for (Statement s : p.getStatements()) {
String seqScanTag = "";
if (s.getSeqscancount() > 0) {
seqScanTag = "[TABLE SCAN] ";
}
String determinismTag = "";
// if the proc is a java stored proc that is read&write,
// output determinism warnings
if (p.getHasjava() && (!p.getReadonly())) {
if (s.getIscontentdeterministic() == false) {
determinismTag = "[NDC] ";
nonDetProcs.add(p);
}
else if (s.getIsorderdeterministic() == false) {
determinismTag = "[NDO] ";
nonDetProcs.add(p);
}
}
String statementLine;
String sqlText = s.getSqltext();
sqlText = squeezeWhitespace(sqlText);
if (seqScanTag.length() + determinismTag.length() + sqlText.length() > 80) {
statementLine = " " + (seqScanTag + determinismTag + sqlText).substring(0, 80) + "...";
} else {
statementLine = " " + seqScanTag + determinismTag + sqlText;
}
outputStream.println(statementLine);
}
outputStream.println();
}
outputStream.println("
if (m_addedClasses.length > 0) {
if (m_addedClasses.length > 10) {
outputStream.printf("Added %d additional classes to the catalog jar.\n\n",
m_addedClasses.length);
}
else {
String logMsg = "Added the following additional classes to the catalog jar:\n";
for (String className : m_addedClasses) {
logMsg += " " + className + "\n";
}
outputStream.println(logMsg);
}
outputStream.println("
}
// post-compile summary and legend.
outputStream.printf(
"Catalog contains %d built-in CRUD procedures.\n" +
"\tSimple insert, update, delete, upsert and select procedures are created\n" +
"\tautomatically for convenience.\n\n",
countDefaultProcs);
if (countSinglePartition > 0) {
outputStream.printf(
"[SP] Catalog contains %d single partition procedures.\n" +
"\tSingle partition procedures run in parallel and scale\n" +
"\tas partitions are added to a cluster.\n\n",
countSinglePartition);
}
if (countMultiPartition > 0) {
outputStream.printf(
"[MP] Catalog contains %d multi-partition procedures.\n" +
"\tMulti-partition procedures run globally at all partitions\n" +
"\tand do not run in parallel with other procedures.\n\n",
countMultiPartition);
}
if (!tableScans.isEmpty()) {
outputStream.printf("[TABLE SCAN] Catalog contains %d procedures that use a table scan:\n\n",
tableScans.size());
for (Procedure p : tableScans) {
outputStream.println("\t\t" + p.getClassname());
}
outputStream.printf(
"\n\tTable scans do not use indexes and may become slower as tables grow.\n\n");
}
if (!nonDetProcs.isEmpty()) {
outputStream.println(
"[NDO][NDC] NON-DETERMINISTIC CONTENT OR ORDER WARNING:\n" +
"\tThe procedures listed below contain non-deterministic queries.\n");
for (Procedure p : nonDetProcs) {
outputStream.println("\t\t" + p.getClassname());
}
outputStream.printf(
"\n" +
"\tUsing the output of these queries as input to subsequent\n" +
"\twrite queries can result in differences between replicated\n" +
"\tpartitions at runtime, forcing VoltDB to shutdown the cluster.\n" +
"\tReview the compiler messages above to identify the offending\n" +
"\tSQL statements (marked as \"[NDO] or [NDC]\"). Add a unique\n" +
"\tindex to the schema or an explicit ORDER BY clause to the\n" +
"\tquery to make these queries deterministic.\n\n");
}
if (countSinglePartition == 0 && countMultiPartition > 0) {
outputStream.printf(
"ALL MULTI-PARTITION WARNING:\n" +
"\tAll of the user procedures are multi-partition. This often\n" +
"\tindicates that the application is not utilizing VoltDB partitioning\n" +
"\tfor best performance. For information on VoltDB partitioning, see:\n"+
"\thttp://voltdb.com/docs/UsingVoltDB/ChapAppDesign.php\n\n");
}
if (m_reportPath != null) {
outputStream.println("
outputStream.println(String.format(
"Full catalog report can be found at file:
m_reportPath));
}
outputStream.println("
}
if (feedbackStream != null) {
for (Feedback fb : m_warnings) {
feedbackStream.println(fb.getLogString());
}
for (Feedback fb : m_infos) {
feedbackStream.println(fb.getLogString());
}
}
}
/**
* Return a copy of the input sqltext with each run of successive whitespace characters replaced by a single space.
* This is just for informal feedback purposes, so quoting is not respected.
* @param sqltext
* @return a possibly modified copy of the input sqltext
**/
private static String squeezeWhitespace(String sqltext) {
String compact = sqltext.replaceAll("\\s+", " ");
return compact;
}
public void summarizeErrors(PrintStream outputStream, PrintStream feedbackStream) {
if (outputStream != null) {
outputStream.println("
outputStream.println("Catalog compilation failed.");
outputStream.println("
}
if (feedbackStream != null) {
for (Feedback fb : m_errors) {
feedbackStream.println(fb.getLogString());
}
}
}
// this needs to be reset in the main compile func
private static final HashSet<Class<?>> cachedAddedClasses = new HashSet<Class<?>>();
public List<Class<?>> getInnerClasses(Class <?> c)
throws VoltCompilerException {
ImmutableList.Builder<Class<?>> builder = ImmutableList.builder();
ClassLoader cl = c.getClassLoader();
if (cl == null) {
cl = Thread.currentThread().getContextClassLoader();
}
// if loading from an InMemoryJarFile, the process is a bit different...
if (cl instanceof JarLoader) {
String[] classes = ((JarLoader) cl).getInnerClassesForClass(c.getName());
for (String innerName : classes) {
Class<?> clz = null;
try {
clz = cl.loadClass(innerName);
}
catch (ClassNotFoundException e) {
String msg = "Unable to load " + c + " inner class " + innerName +
" from in-memory jar representation.";
throw new VoltCompilerException(msg);
}
assert(clz != null);
builder.add(clz);
}
}
else {
String stem = c.getName().replace('.', '/');
String cpath = stem + ".class";
URL curl = cl.getResource(cpath);
if (curl == null) {
throw new VoltCompilerException(String.format(
"Failed to find class file %s in jar.", cpath));
}
// load from an on-disk jar
if ("jar".equals(curl.getProtocol())) {
Pattern nameRE = Pattern.compile("\\A(" + stem + "\\$[^/]+).class\\z");
String jarFN;
try {
jarFN = URLDecoder.decode(curl.getFile(), "UTF-8");
}
catch (UnsupportedEncodingException e) {
String msg = "Unable to UTF-8 decode " + curl.getFile() + " for class " + c;
throw new VoltCompilerException(msg);
}
jarFN = jarFN.substring(5, jarFN.indexOf('!'));
JarFile jar = null;
try {
jar = new JarFile(jarFN);
Enumeration<JarEntry> entries = jar.entries();
while (entries.hasMoreElements()) {
String name = entries.nextElement().getName();
Matcher mtc = nameRE.matcher(name);
if (mtc.find()) {
String innerName = mtc.group(1).replace('/', '.');
Class<?> inner;
try {
inner = cl.loadClass(innerName);
} catch (ClassNotFoundException e) {
String msg = "Unable to load " + c + " inner class " + innerName;
throw new VoltCompilerException(msg);
}
builder.add(inner);
}
}
}
catch (IOException e) {
String msg = "Cannot access class " + c + " source code location of " + jarFN;
throw new VoltCompilerException(msg);
}
finally {
if ( jar != null) try {jar.close();} catch (Exception ignoreIt) {};
}
}
// load directly from a classfile
else if ("file".equals(curl.getProtocol())) {
Pattern nameRE = Pattern.compile("/(" + stem + "\\$[^/]+).class\\z");
File sourceDH = new File(curl.getFile()).getParentFile();
for (File f: sourceDH.listFiles()) {
Matcher mtc = nameRE.matcher(f.getAbsolutePath());
if (mtc.find()) {
String innerName = mtc.group(1).replace('/', '.');
Class<?> inner;
try {
inner = cl.loadClass(innerName);
} catch (ClassNotFoundException e) {
String msg = "Unable to load " + c + " inner class " + innerName;
throw new VoltCompilerException(msg);
}
builder.add(inner);
}
}
}
}
return builder.build();
}
public boolean addClassToJar(InMemoryJarfile jarOutput, final Class<?> cls)
throws VoltCompiler.VoltCompilerException
{
if (cachedAddedClasses.contains(cls)) {
return false;
} else {
cachedAddedClasses.add(cls);
}
for (final Class<?> nested : getInnerClasses(cls)) {
addClassToJar(jarOutput, nested);
}
try {
return VoltCompilerUtils.addClassToJar(jarOutput, cls);
} catch (IOException e) {
throw new VoltCompilerException(e.getMessage());
}
}
/**
* @param m_procInfoOverrides the m_procInfoOverrides to set
*/
public void setProcInfoOverrides(Map<String, ProcInfoData> procInfoOverrides) {
m_procInfoOverrides = procInfoOverrides;
}
/**
* Helper enum that scans sax exception messages for deprecated xml elements
*
* @author ssantoro
*/
enum DeprecatedProjectElement {
security(
"(?i)\\Acvc-[^:]+:\\s+Invalid\\s+content\\s+.+?\\s+element\\s+'security'",
"security may be enabled in the deployment file only"
);
/**
* message regular expression that pertains to the deprecated element
*/
private final Pattern messagePattern;
/**
* a suggestion string to exaplain alternatives
*/
private final String suggestion;
DeprecatedProjectElement(String messageRegex, String suggestion) {
this.messagePattern = Pattern.compile(messageRegex);
this.suggestion = suggestion;
}
String getSuggestion() {
return suggestion;
}
/**
* Given a JAXBException it determines whether or not the linked
* exception is associated with a deprecated xml elements
*
* @param jxbex a {@link JAXBException}
* @return an enum of {@code DeprecatedProjectElement} if the
* given exception corresponds to a deprecated xml element
*/
static DeprecatedProjectElement valueOf( JAXBException jxbex) {
if( jxbex == null
|| jxbex.getLinkedException() == null
|| ! (jxbex.getLinkedException() instanceof org.xml.sax.SAXParseException)
) {
return null;
}
org.xml.sax.SAXParseException saxex =
org.xml.sax.SAXParseException.class.cast(jxbex.getLinkedException());
for( DeprecatedProjectElement dpe: DeprecatedProjectElement.values()) {
Matcher mtc = dpe.messagePattern.matcher(saxex.getMessage());
if( mtc.find()) return dpe;
}
return null;
}
}
/**
* Compile the provided jarfile. Basically, treat the jarfile as a staging area
* for the artifacts to be included in the compile, and then compile it in place.
*
* *NOTE*: Does *NOT* work with project.xml jarfiles.
*
* @return the compiled catalog is contained in the provided jarfile.
*
*/
public void compileInMemoryJarfileWithNewDDL(InMemoryJarfile jarfile, String newDDL, Catalog oldCatalog) throws IOException
{
String oldDDL = new String(jarfile.get(VoltCompiler.AUTOGEN_DDL_FILE_NAME),
Constants.UTF8ENCODING);
compilerLog.trace("OLD DDL: " + oldDDL);
VoltCompilerStringReader canonicalDDLReader = null;
VoltCompilerStringReader newDDLReader = null;
// Use the in-memory jarfile-provided class loader so that procedure
// classes can be found and copied to the new file that gets written.
ClassLoader originalClassLoader = m_classLoader;
try {
canonicalDDLReader = new VoltCompilerStringReader(VoltCompiler.AUTOGEN_DDL_FILE_NAME, oldDDL);
newDDLReader = new VoltCompilerStringReader("Ad Hoc DDL Input", newDDL);
List<VoltCompilerReader> ddlList = new ArrayList<>();
ddlList.add(newDDLReader);
m_classLoader = jarfile.getLoader();
// Do the compilation work.
InMemoryJarfile jarOut = compileInternal(null, canonicalDDLReader, oldCatalog, ddlList, jarfile);
// Trim the compiler output to try to provide a concise failure
// explanation
if (jarOut != null) {
compilerLog.debug("Successfully recompiled InMemoryJarfile");
}
else {
String errString = "Adhoc DDL failed";
if (m_errors.size() > 0) {
errString = m_errors.get(m_errors.size() - 1).getLogString();
}
int endtrim = errString.indexOf(" in statement starting");
if (endtrim < 0) { endtrim = errString.length(); }
String trimmed = errString.substring(0, endtrim);
throw new IOException(trimmed);
}
}
finally {
// Restore the original class loader
m_classLoader = originalClassLoader;
if (canonicalDDLReader != null) {
try { canonicalDDLReader.close(); } catch (IOException ioe) {}
}
if (newDDLReader != null) {
try { newDDLReader.close(); } catch (IOException ioe) {}
}
}
}
/**
* Compile the provided jarfile. Basically, treat the jarfile as a staging area
* for the artifacts to be included in the compile, and then compile it in place.
*
* *NOTE*: Does *NOT* work with project.xml jarfiles.
*
* @return the compiled catalog is contained in the provided jarfile.
*
*/
public void compileInMemoryJarfile(InMemoryJarfile jarfile) throws IOException
{
// Gather DDL files for recompilation
List<VoltCompilerReader> ddlReaderList = new ArrayList<VoltCompilerReader>();
Entry<String, byte[]> entry = jarfile.firstEntry();
while (entry != null) {
String path = entry.getKey();
// SOMEDAY: It would be better to have a manifest that explicitly lists
// ddl files instead of using a brute force *.sql glob.
if (path.toLowerCase().endsWith(".sql")) {
ddlReaderList.add(new VoltCompilerJarFileReader(jarfile, path));
compilerLog.trace("Added SQL file from jarfile to compilation: " + path);
}
entry = jarfile.higherEntry(entry.getKey());
}
// Use the in-memory jarfile-provided class loader so that procedure
// classes can be found and copied to the new file that gets written.
ClassLoader originalClassLoader = m_classLoader;
try {
m_classLoader = jarfile.getLoader();
// Do the compilation work.
InMemoryJarfile jarOut = compileInternal(null, null, null, ddlReaderList, jarfile);
// Trim the compiler output to try to provide a concise failure
// explanation
if (jarOut != null) {
compilerLog.debug("Successfully recompiled InMemoryJarfile");
}
else {
String errString = "Adhoc DDL failed";
if (m_errors.size() > 0) {
errString = m_errors.get(m_errors.size() - 1).getLogString();
}
int fronttrim = errString.indexOf("DDL Error");
if (fronttrim < 0) { fronttrim = 0; }
int endtrim = errString.indexOf(" in statement starting");
if (endtrim < 0) { endtrim = errString.length(); }
String trimmed = errString.substring(fronttrim, endtrim);
throw new IOException(trimmed);
}
}
finally {
// Restore the original class loader
m_classLoader = originalClassLoader;
}
}
/**
* Check a loaded catalog. If it needs to be upgraded recompile it and save
* an upgraded jar file.
*
* @param outputJar in-memory jar file (updated in place here)
* @return source version upgraded from or null if not upgraded
* @throws IOException
*/
public String upgradeCatalogAsNeeded(InMemoryJarfile outputJar)
throws IOException
{
// getBuildInfoFromJar() performs some validation.
String[] buildInfoLines = CatalogUtil.getBuildInfoFromJar(outputJar);
String versionFromCatalog = buildInfoLines[0];
// Set if an upgrade happens.
String upgradedFromVersion = null;
// Check if it's compatible (or the upgrade is being forced).
// getConfig() may return null if it's being mocked for a test.
if ( VoltDB.Configuration.m_forceCatalogUpgrade
|| !versionFromCatalog.equals(VoltDB.instance().getVersionString())) {
// Check if there's a project.
VoltCompilerReader projectReader =
(outputJar.containsKey("project.xml")
? new VoltCompilerJarFileReader(outputJar, "project.xml")
: null);
// Patch the buildinfo.
String versionFromVoltDB = VoltDB.instance().getVersionString();
buildInfoLines[0] = versionFromVoltDB;
buildInfoLines[1] = String.format("voltdb-auto-upgrade-to-%s", versionFromVoltDB);
byte[] buildInfoBytes = StringUtils.join(buildInfoLines, "\n").getBytes();
outputJar.put(CatalogUtil.CATALOG_BUILDINFO_FILENAME, buildInfoBytes);
// Gather DDL files for recompilation if not using a project file.
List<VoltCompilerReader> ddlReaderList = new ArrayList<VoltCompilerReader>();
if (projectReader == null) {
Entry<String, byte[]> entry = outputJar.firstEntry();
while (entry != null) {
String path = entry.getKey();
//TODO: It would be better to have a manifest that explicitly lists
// ddl files instead of using a brute force *.sql glob.
if (path.toLowerCase().endsWith(".sql")) {
ddlReaderList.add(new VoltCompilerJarFileReader(outputJar, path));
}
entry = outputJar.higherEntry(entry.getKey());
}
}
// Use the in-memory jarfile-provided class loader so that procedure
// classes can be found and copied to the new file that gets written.
ClassLoader originalClassLoader = m_classLoader;
// Compile and save the file to voltdbroot. Assume it's a test environment if there
// is no catalog context available.
String jarName = String.format("catalog-%s.jar", versionFromVoltDB);
String textName = String.format("catalog-%s.out", versionFromVoltDB);
CatalogContext catalogContext = VoltDB.instance().getCatalogContext();
final String outputJarPath = (catalogContext != null
? new File(catalogContext.cluster.getVoltroot(), jarName).getPath()
: VoltDB.Configuration.getPathToCatalogForTest(jarName));
// Place the compiler output in a text file in the same folder.
final String outputTextPath = (catalogContext != null
? new File(catalogContext.cluster.getVoltroot(), textName).getPath()
: VoltDB.Configuration.getPathToCatalogForTest(textName));
try {
m_classLoader = outputJar.getLoader();
consoleLog.info(String.format(
"Version %s catalog will be automatically upgraded to version %s.",
versionFromCatalog, versionFromVoltDB));
// Do the compilation work.
boolean success = compileInternalToFile(projectReader, outputJarPath, null, null, ddlReaderList, outputJar);
// Sanitize the *.sql files in the jarfile so that only the autogenerated
// canonical DDL file will be used for future compilations
// Bomb out if we failed to generate the canonical DDL
if (success) {
boolean foundCanonicalDDL = false;
Entry<String, byte[]> entry = outputJar.firstEntry();
while (entry != null) {
String path = entry.getKey();
if (path.toLowerCase().endsWith(".sql")) {
if (!path.toLowerCase().equals(AUTOGEN_DDL_FILE_NAME)) {
outputJar.remove(path);
}
else {
foundCanonicalDDL = true;
}
}
entry = outputJar.higherEntry(entry.getKey());
}
success = foundCanonicalDDL;
}
if (success) {
// Set up the return string.
upgradedFromVersion = versionFromCatalog;
}
// Summarize the results to a file.
// Briefly log success or failure and mention the output text file.
PrintStream outputStream = new PrintStream(outputTextPath);
try {
if (success) {
summarizeSuccess(outputStream, outputStream, outputJarPath);
consoleLog.info(String.format(
"The catalog was automatically upgraded from " +
"version %s to %s and saved to \"%s\". " +
"Compiler output is available in \"%s\".",
versionFromCatalog, versionFromVoltDB,
outputJarPath, outputTextPath));
}
else {
summarizeErrors(outputStream, outputStream);
outputStream.close();
compilerLog.error("Catalog upgrade failed.");
compilerLog.info(String.format(
"Had attempted to perform an automatic version upgrade of a " +
"catalog that was compiled by an older %s version of VoltDB, " +
"but the automatic upgrade failed. The cluster will not be " +
"able to start until the incompatibility is fixed. " +
"Try re-compiling the catalog with the newer %s version " +
"of the VoltDB compiler. Compiler output from the failed " +
"upgrade is available in \"%s\".",
versionFromCatalog, versionFromVoltDB, outputTextPath));
throw new IOException(String.format(
"Catalog upgrade failed. You will need to recompile using voltdb compile."));
}
}
finally {
outputStream.close();
}
}
catch (IOException ioe) {
// Do nothing because this could come from the normal failure path
throw ioe;
}
catch (Exception e) {
compilerLog.error("Catalog upgrade failed with error:");
compilerLog.error(e.getMessage());
compilerLog.info(String.format(
"Had attempted to perform an automatic version upgrade of a " +
"catalog that was compiled by an older %s version of VoltDB, " +
"but the automatic upgrade failed. The cluster will not be " +
"able to start until the incompatibility is fixed. " +
"Try re-compiling the catalog with the newer %s version " +
"of the VoltDB compiler. Compiler output from the failed " +
"upgrade is available in \"%s\".",
versionFromCatalog, versionFromVoltDB, outputTextPath));
throw new IOException(String.format(
"Catalog upgrade failed. You will need to recompile using voltdb compile."));
}
finally {
// Restore the original class loader
m_classLoader = originalClassLoader;
}
}
return upgradedFromVersion;
}
/**
* Note that a table changed in order to invalidate potential cached
* statements that reference the changed table.
*/
void markTableAsDirty(String tableName) {
m_dirtyTables.add(tableName.toLowerCase());
}
/**
* Key prefix includes attributes that make a cached statement usable if they match
*
* For example, if the SQL is the same, but the partitioning isn't, then the statements
* aren't actually interchangeable.
*/
String getKeyPrefix(StatementPartitioning partitioning, DeterminismMode detMode, String joinOrder) {
// no caching for inferred yet
if (partitioning.isInferred()) {
return null;
}
String joinOrderPrefix = "
if (joinOrder != null) {
joinOrderPrefix += joinOrder;
}
boolean partitioned = partitioning.wasSpecifiedAsSingle();
return joinOrderPrefix + String.valueOf(detMode.toChar()) + (partitioned ? "P
}
void addStatementToCache(Statement stmt) {
String key = stmt.getCachekeyprefix() + stmt.getSqltext();
m_previousCatalogStmts.put(key, stmt);
}
// track hits and misses for debugging
static long m_stmtCacheHits = 0;
static long m_stmtCacheMisses = 0;
/** Look for a match from the previous catalog that matches the key + sql */
Statement getCachedStatement(String keyPrefix, String sql) {
String key = keyPrefix + sql;
Statement candidate = m_previousCatalogStmts.get(key);
if (candidate == null) {
++m_stmtCacheMisses;
return null;
}
// check that no underlying tables have been modified since the proc had been compiled
String[] tablesTouched = candidate.getTablesread().split(",");
for (String tableName : tablesTouched) {
if (m_dirtyTables.contains(tableName.toLowerCase())) {
++m_stmtCacheMisses;
return null;
}
}
tablesTouched = candidate.getTablesupdated().split(",");
for (String tableName : tablesTouched) {
if (m_dirtyTables.contains(tableName.toLowerCase())) {
++m_stmtCacheMisses;
return null;
}
}
++m_stmtCacheHits;
// easy debugging stmt
//printStmtCacheStats();
return candidate;
}
@SuppressWarnings("unused")
private void printStmtCacheStats() {
System.out.printf("Hits: %d, Misses %d, Percent %.2f\n",
m_stmtCacheHits, m_stmtCacheMisses,
(m_stmtCacheHits * 100.0) / (m_stmtCacheHits + m_stmtCacheMisses));
System.out.flush();
}
}
|
package org.voltdb.compiler;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import org.apache.commons.lang3.ArrayUtils;
import org.hsqldb_voltpatches.HSQLInterface;
import org.voltcore.logging.Level;
import org.voltcore.logging.VoltLogger;
import org.voltdb.ProcInfoData;
import org.voltdb.RealVoltDB;
import org.voltdb.TransactionIdManager;
import org.voltdb.VoltType;
import org.voltdb.catalog.Catalog;
import org.voltdb.catalog.CatalogMap;
import org.voltdb.catalog.Column;
import org.voltdb.catalog.ColumnRef;
import org.voltdb.catalog.Constraint;
import org.voltdb.catalog.Database;
import org.voltdb.catalog.Group;
import org.voltdb.catalog.GroupRef;
import org.voltdb.catalog.MaterializedViewInfo;
import org.voltdb.catalog.Procedure;
import org.voltdb.catalog.Statement;
import org.voltdb.catalog.Table;
import org.voltdb.compiler.projectfile.ClassdependenciesType.Classdependency;
import org.voltdb.compiler.projectfile.DatabaseType;
import org.voltdb.compiler.projectfile.ExportType;
import org.voltdb.compiler.projectfile.ExportType.Tables;
import org.voltdb.compiler.projectfile.GroupsType;
import org.voltdb.compiler.projectfile.InfoType;
import org.voltdb.compiler.projectfile.ProceduresType;
import org.voltdb.compiler.projectfile.ProjectType;
import org.voltdb.compiler.projectfile.RolesType;
import org.voltdb.compiler.projectfile.SchemasType;
import org.voltdb.types.ConstraintType;
import org.voltdb.utils.CatalogUtil;
import org.voltdb.utils.Encoder;
import org.voltdb.utils.InMemoryJarfile;
import org.voltdb.utils.LogKeys;
import org.voltdb.utils.StringInputStream;
import org.xml.sax.ErrorHandler;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
/**
* Compiles a project XML file and some metadata into a Jarfile
* containing stored procedure code and a serialzied catalog.
*
*/
public class VoltCompiler {
/** Represents the level of severity for a Feedback message generated during compiling. */
public static enum Severity { INFORMATIONAL, WARNING, ERROR, UNEXPECTED }
public static final int NO_LINE_NUMBER = -1;
// feedback by filename
ArrayList<Feedback> m_infos = new ArrayList<Feedback>();
ArrayList<Feedback> m_warnings = new ArrayList<Feedback>();
ArrayList<Feedback> m_errors = new ArrayList<Feedback>();
// set of annotations by procedure name
private Map<String, ProcInfoData> m_procInfoOverrides = null;
String m_projectFileURL = null;
String m_jarOutputPath = null;
String m_currentFilename = null;
Map<String, String> m_ddlFilePaths = new HashMap<String, String>();
InMemoryJarfile m_jarOutput = null;
Catalog m_catalog = null;
//Cluster m_cluster = null;
HSQLInterface m_hsql = null;
DatabaseEstimates m_estimates = new DatabaseEstimates();
private List<String> m_capturedDiagnosticDetail = null;
private static final VoltLogger compilerLog = new VoltLogger("COMPILER");
@SuppressWarnings("unused")
private static final VoltLogger Log = new VoltLogger("org.voltdb.compiler.VoltCompiler");
/**
* Represents output from a compile. This works similarly to Log4j; there
* are different levels of feedback including info, warning, error, and
* unexpected error. Feedback can be output to a printstream (like stdout)
* or can be examined programatically.
*
*/
public static class Feedback {
Severity severityLevel;
String fileName;
int lineNo;
String message;
Feedback(final Severity severityLevel, final String message, final String fileName, final int lineNo) {
this.severityLevel = severityLevel;
this.message = message;
this.fileName = fileName;
this.lineNo = lineNo;
}
public String getStandardFeedbackLine() {
String retval = "";
if (severityLevel == Severity.INFORMATIONAL)
retval = "INFO";
if (severityLevel == Severity.WARNING)
retval = "WARNING";
if (severityLevel == Severity.ERROR)
retval = "ERROR";
if (severityLevel == Severity.UNEXPECTED)
retval = "UNEXPECTED ERROR";
return retval + " " + getLogString();
}
public String getLogString() {
String retval = new String();
if (fileName != null) {
retval += "[" + fileName;
if (lineNo != NO_LINE_NUMBER)
retval += ":" + lineNo;
retval += "]";
}
retval += ": " + message;
return retval;
}
public Severity getSeverityLevel() {
return severityLevel;
}
public String getFileName() {
return fileName;
}
public int getLineNumber() {
return lineNo;
}
public String getMessage() {
return message;
}
}
class VoltCompilerException extends Exception {
private static final long serialVersionUID = -2267780579911448600L;
private String message = null;
VoltCompilerException(final Exception e) {
super(e);
}
VoltCompilerException(final String message, final int lineNo) {
addErr(message, lineNo);
this.message = message;
}
VoltCompilerException(final String message) {
addErr(message);
this.message = message;
}
@Override
public String getMessage() {
return message;
}
}
class VoltXMLErrorHandler implements ErrorHandler {
@Override
public void error(final SAXParseException exception) throws SAXException {
addErr(exception.getMessage(), exception.getLineNumber());
}
@Override
public void fatalError(final SAXParseException exception) throws SAXException {
//addErr(exception.getMessage(), exception.getLineNumber());
}
@Override
public void warning(final SAXParseException exception) throws SAXException {
addWarn(exception.getMessage(), exception.getLineNumber());
}
}
class ProcedureDescriptor {
final ArrayList<String> m_authGroups;
final String m_className;
// for single-stmt procs
final String m_singleStmt;
final String m_joinOrder;
final String m_partitionString;
final boolean m_builtInStmt; // autogenerated sql statement
ProcedureDescriptor (final ArrayList<String> authGroups, final String className) {
assert(className != null);
m_authGroups = authGroups;
m_className = className;
m_singleStmt = null;
m_joinOrder = null;
m_partitionString = null;
m_builtInStmt = false;
}
ProcedureDescriptor (final ArrayList<String> authGroups, final String className, String partitionString) {
assert(className != null);
assert(partitionString != null);
m_authGroups = authGroups;
m_className = className;
m_singleStmt = null;
m_joinOrder = null;
m_partitionString = partitionString;
m_builtInStmt = false;
}
ProcedureDescriptor (final ArrayList<String> authGroups, final String className,
final String singleStmt, final String joinOrder, final String partitionString,
boolean builtInStmt)
{
assert(className != null);
assert(singleStmt != null);
m_authGroups = authGroups;
m_className = className;
m_singleStmt = singleStmt;
m_joinOrder = joinOrder;
m_partitionString = partitionString;
m_builtInStmt = builtInStmt;
}
}
public boolean hasErrors() {
return m_errors.size() > 0;
}
public boolean hasErrorsOrWarnings() {
return (m_warnings.size() > 0) || hasErrors();
}
void addInfo(final String msg) {
addInfo(msg, NO_LINE_NUMBER);
}
void addWarn(final String msg) {
addWarn(msg, NO_LINE_NUMBER);
}
void addErr(final String msg) {
addErr(msg, NO_LINE_NUMBER);
}
void addInfo(final String msg, final int lineNo) {
final Feedback fb = new Feedback(Severity.INFORMATIONAL, msg, m_currentFilename, lineNo);
m_infos.add(fb);
compilerLog.info(fb.getLogString());
}
void addWarn(final String msg, final int lineNo) {
final Feedback fb = new Feedback(Severity.WARNING, msg, m_currentFilename, lineNo);
m_warnings.add(fb);
compilerLog.warn(fb.getLogString());
}
void addErr(final String msg, final int lineNo) {
final Feedback fb = new Feedback(Severity.ERROR, msg, m_currentFilename, lineNo);
m_errors.add(fb);
compilerLog.error(fb.getLogString());
}
/**
* Compile from a set of DDL files, but no project.xml.
*
* @param jarOutputPath The location to put the finished JAR to.
* @param ddlFilePaths The array of DDL files to compile (at least one is required).
* @return true if successful
*/
public boolean compileFromDDL(
final String jarOutputPath,
final String... ddlFilePaths)
{
return compileInternal(null, jarOutputPath, ddlFilePaths);
}
/**
* Compile from a set of DDL files, but no project.xml.
*
* @param jarOutputPath The location to put the finished JAR to.
* @param ddlFilePaths The collection of DDL files to compile (at least one is required).
* @return true if successful
*/
public boolean compileFromDDL(
final String jarOutputPath,
final Collection<String> ddlFilePaths)
{
return compileInternal(null, jarOutputPath, ddlFilePaths.toArray(new String[ddlFilePaths.size()]));
}
/**
* Compile using a project.xml file (DEPRECATED).
*
* @param projectFileURL URL of the project file.
* @param jarOutputPath The location to put the finished JAR to.
* @return true if successful
*/
public boolean compileWithProjectXML(
final String projectFileURL,
final String jarOutputPath)
{
return compileInternal(projectFileURL, jarOutputPath, new String[] {});
}
/**
* Internal method for compiling with and without a project.xml file or DDL files.
*
* @param projectFileURL URL of the project file or null if not used.
* @param jarOutputPath The location to put the finished JAR to.
* @param ddlFilePaths The list of DDL files to compile (when no project is provided).
* @return true if successful
*/
private boolean compileInternal(
final String projectFileURL,
final String jarOutputPath,
final String[] ddlFilePaths)
{
m_hsql = null;
m_projectFileURL = projectFileURL;
m_jarOutputPath = jarOutputPath;
if (m_projectFileURL == null && (ddlFilePaths == null || ddlFilePaths.length == 0)) {
addErr("One or more DDL files are required.");
return false;
}
if (m_jarOutputPath == null) {
addErr("The output jar path is null.");
return false;
}
// clear out the warnings and errors
m_warnings.clear();
m_infos.clear();
m_errors.clear();
// do all the work to get the catalog
final Catalog catalog = compileCatalog(projectFileURL, ddlFilePaths);
if (catalog == null) {
compilerLog.error("Catalog compilation failed.");
return false;
}
HashMap<String, byte[]> explainPlans = getExplainPlans(catalog);
// WRITE CATALOG TO JAR HERE
final String catalogCommands = catalog.serialize();
byte[] catalogBytes = null;
try {
catalogBytes = catalogCommands.getBytes("UTF-8");
}
catch (final UnsupportedEncodingException e1) {
addErr("Can't encode the compiled catalog file correctly");
return false;
}
StringBuilder buildinfo = new StringBuilder();
String info[] = RealVoltDB.extractBuildInfo();
buildinfo.append(info[0]).append('\n');
buildinfo.append(info[1]).append('\n');
buildinfo.append(System.getProperty("user.name")).append('\n');
buildinfo.append(System.getProperty("user.dir")).append('\n');
buildinfo.append(Long.toString(System.currentTimeMillis())).append('\n');
try {
byte buildinfoBytes[] = buildinfo.toString().getBytes("UTF-8");
m_jarOutput.put("buildinfo.txt", buildinfoBytes);
m_jarOutput.put("catalog.txt", catalogBytes);
if (projectFileURL != null) {
File projectFile = new File(projectFileURL);
if (projectFile.exists()) {
m_jarOutput.put("project.xml", projectFile);
}
}
for (final Entry<String, String> e : m_ddlFilePaths.entrySet())
m_jarOutput.put(e.getKey(), new File(e.getValue()));
// write all the plans to a folder in the jarfile
for (final Entry<String, byte[]> e : explainPlans.entrySet())
m_jarOutput.put("plans/" + e.getKey(), e.getValue());
m_jarOutput.writeToFile(new File(jarOutputPath)).run();
}
catch (final Exception e) {
e.printStackTrace();
return false;
}
assert(!hasErrors());
if (hasErrors()) {
return false;
}
return true;
}
/**
* Get textual explain plan info for each plan from the
* catalog to be shoved into the catalog jarfile.
*/
HashMap<String, byte[]> getExplainPlans(Catalog catalog) {
HashMap<String, byte[]> retval = new HashMap<String, byte[]>();
Database db = catalog.getClusters().get("cluster").getDatabases().get("database");
assert(db != null);
for (Procedure proc : db.getProcedures()) {
for (Statement stmt : proc.getStatements()) {
String s = "SQL: " + stmt.getSqltext() + "\n";
s += "COST: " + Integer.toString(stmt.getCost()) + "\n";
s += "PLAN:\n\n";
s += Encoder.hexDecodeToString(stmt.getExplainplan()) + "\n";
byte[] b = null;
try {
b = s.getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
assert(false);
}
retval.put(proc.getTypeName() + "_" + stmt.getTypeName() + ".txt", b);
}
}
return retval;
}
@SuppressWarnings("unchecked")
public Catalog compileCatalog(final String projectFileURL, final String... ddlFilePaths)
{
// Compiler instance is reusable. Clear the cache.
cachedAddedClasses.clear();
m_currentFilename = (projectFileURL != null ? new File(projectFileURL).getName() : "null");
m_jarOutput = new InMemoryJarfile();
ProjectType project = null;
if (projectFileURL != null && !projectFileURL.isEmpty()) {
try {
JAXBContext jc = JAXBContext.newInstance("org.voltdb.compiler.projectfile");
// This schema shot the sheriff.
SchemaFactory sf = SchemaFactory.newInstance(
javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI);
Schema schema = sf.newSchema(this.getClass().getResource("ProjectFileSchema.xsd"));
Unmarshaller unmarshaller = jc.createUnmarshaller();
// But did not shoot unmarshaller!
unmarshaller.setSchema(schema);
JAXBElement<ProjectType> result = (JAXBElement<ProjectType>) unmarshaller.unmarshal(new File(projectFileURL));
project = result.getValue();
}
catch (JAXBException e) {
// Convert some linked exceptions to more friendly errors.
if (e.getLinkedException() instanceof java.io.FileNotFoundException) {
addErr(e.getLinkedException().getMessage());
compilerLog.error(e.getLinkedException().getMessage());
return null;
}
DeprecatedProjectElement deprecated = DeprecatedProjectElement.valueOf(e);
if( deprecated != null) {
addErr("Found deprecated XML element \"" + deprecated.name() + "\" in project.xml file, "
+ deprecated.getSuggestion());
addErr("Error schema validating project.xml file. " + e.getLinkedException().getMessage());
compilerLog.error("Found deprecated XML element \"" + deprecated.name() + "\" in project.xml file");
compilerLog.error(e.getMessage());
compilerLog.error(projectFileURL);
return null;
}
if (e.getLinkedException() instanceof org.xml.sax.SAXParseException) {
addErr("Error schema validating project.xml file. " + e.getLinkedException().getMessage());
compilerLog.error("Error schema validating project.xml file: " + e.getLinkedException().getMessage());
compilerLog.error(e.getMessage());
compilerLog.error(projectFileURL);
return null;
}
throw new RuntimeException(e);
}
catch (SAXException e) {
addErr("Error schema validating project.xml file. " + e.getMessage());
compilerLog.error("Error schema validating project.xml file. " + e.getMessage());
return null;
}
}
else {
// No project.xml - create a stub object.
project = new ProjectType();
project.setInfo(new InfoType());
project.setDatabase(new DatabaseType());
}
try {
compileXMLRootNode(project, ddlFilePaths);
} catch (final VoltCompilerException e) {
compilerLog.l7dlog( Level.ERROR, LogKeys.compiler_VoltCompiler_FailedToCompileXML.name(), null);
return null;
}
assert(m_catalog != null);
// add epoch info to catalog
final int epoch = (int)(TransactionIdManager.getEpoch() / 1000);
m_catalog.getClusters().get("cluster").setLocalepoch(epoch);
// done handling files
m_currentFilename = null;
return m_catalog;
}
ProcInfoData getProcInfoOverride(final String procName) {
if (m_procInfoOverrides == null)
return null;
return m_procInfoOverrides.get(procName);
}
public Catalog getCatalog() {
return m_catalog;
}
void compileXMLRootNode(ProjectType project, String... ddlFilePaths) throws VoltCompilerException {
m_catalog = new Catalog();
temporaryCatalogInit();
DatabaseType database = project.getDatabase();
if (database != null) {
compileDatabaseNode(database, ddlFilePaths);
}
}
/**
* Initialize the catalog for one cluster
*/
void temporaryCatalogInit() {
m_catalog.execute("add / clusters cluster");
m_catalog.getClusters().get("cluster").setSecurityenabled(false);
}
void compileDatabaseNode(DatabaseType database, String... ddlFilePaths) throws VoltCompilerException {
final ArrayList<String> programs = new ArrayList<String>();
final List<String> schemas = new ArrayList<String>(Arrays.asList(ddlFilePaths));
final ArrayList<ProcedureDescriptor> procedures = new ArrayList<ProcedureDescriptor>();
final ArrayList<Class<?>> classDependencies = new ArrayList<Class<?>>();
final VoltDDLElementTracker voltDdlTracker = new VoltDDLElementTracker(this);
final String databaseName = database.getName();
// schema does not verify that the database is named "database"
if (databaseName.equals("database") == false) {
final String msg = "VoltDB currently requires all database elements to be named "+
"\"database\" (found: \"" + databaseName + "\")";
throw new VoltCompilerException(msg);
}
// create the database in the catalog
m_catalog.execute("add /clusters[cluster] databases " + databaseName);
Database db = m_catalog.getClusters().get("cluster").getDatabases().get(databaseName);
// schemas/schema
if (database.getSchemas() != null) {
for (SchemasType.Schema schema : database.getSchemas().getSchema()) {
compilerLog.l7dlog( Level.INFO, LogKeys.compiler_VoltCompiler_CatalogPath.name(),
new Object[] {schema.getPath()}, null);
schemas.add(schema.getPath());
}
}
// groups/group (alias for roles/role).
if (database.getGroups() != null) {
for (GroupsType.Group group : database.getGroups().getGroup()) {
org.voltdb.catalog.Group catGroup = db.getGroups().add(group.getName());
catGroup.setAdhoc(group.isAdhoc());
catGroup.setSysproc(group.isSysproc());
catGroup.setDefaultproc(group.isDefaultproc());
}
}
// roles/role (alias for groups/group).
if (database.getRoles() != null) {
for (RolesType.Role role : database.getRoles().getRole()) {
org.voltdb.catalog.Group catGroup = db.getGroups().add(role.getName());
catGroup.setAdhoc(role.isAdhoc());
catGroup.setSysproc(role.isSysproc());
catGroup.setDefaultproc(role.isDefaultproc());
}
}
// procedures/procedure
if (database.getProcedures() != null) {
for (ProceduresType.Procedure proc : database.getProcedures().getProcedure()) {
voltDdlTracker.add(getProcedure(proc));
}
}
// classdependencies/classdependency
if (database.getClassdependencies() != null) {
for (Classdependency dep : database.getClassdependencies().getClassdependency()) {
classDependencies.add(getClassDependency(dep));
}
}
// partitions/table
if (database.getPartitions() != null) {
for (org.voltdb.compiler.projectfile.PartitionsType.Partition table : database.getPartitions().getPartition()) {
voltDdlTracker.put(table.getTable(), table.getColumn());
}
}
// shutdown and make a new hsqldb
m_hsql = HSQLInterface.loadHsqldb();
// Actually parse and handle all the programs
for (final String programName : programs) {
m_catalog.execute("add " + db.getPath() + " programs " + programName);
}
// Actually parse and handle all the DDL
// DDLCompiler also provides partition descriptors for DDL PARTITION
// and REPLICATE statements.
final DDLCompiler ddlcompiler = new DDLCompiler(this, m_hsql, voltDdlTracker, db);
for (final String schemaPath : schemas) {
File schemaFile = null;
if (schemaPath.contains(".jar!")) {
String ddlText = null;
try {
ddlText = readFileFromJarfile(schemaPath);
} catch (final Exception e) {
throw new VoltCompilerException(e);
}
schemaFile = VoltProjectBuilder.writeStringToTempFile(ddlText);
}
else {
schemaFile = new File(schemaPath);
}
if (!schemaFile.isAbsolute()) {
// Resolve schemaPath relative to either the database definition xml file
// or the working directory.
if (m_projectFileURL != null) {
schemaFile = new File(new File(m_projectFileURL).getParent(), schemaPath);
}
else {
schemaFile = new File(schemaPath);
}
}
// add the file object's path to the list of files for the jar
m_ddlFilePaths.put(schemaFile.getName(), schemaFile.getPath());
ddlcompiler.loadSchema(schemaFile.getAbsolutePath());
}
ddlcompiler.compileToCatalog(m_catalog, db);
// Actually parse and handle all the partitions
// this needs to happen before procedures are compiled
String msg = "In database \"" + databaseName + "\", ";
final CatalogMap<Table> tables = db.getTables();
for (String tableName : voltDdlTracker.m_partitionMap.keySet()) {
String colName = voltDdlTracker.m_partitionMap.get(tableName);
// A null column name indicates a replicated table. Ignore it here
// because it defaults to replicated in the catalog.
if (colName != null) {
final Table t = tables.getIgnoreCase(tableName);
if (t == null) {
msg += "PARTITION has unknown TABLE '" + tableName + "'";
throw new VoltCompilerException(msg);
}
final Column c = t.getColumns().getIgnoreCase(colName);
// make sure the column exists
if (c == null) {
msg += "PARTITION has unknown COLUMN '" + colName + "'";
throw new VoltCompilerException(msg);
}
// make sure the column is marked not-nullable
if (c.getNullable() == true) {
msg += "Partition column '" + tableName + "." + colName + "' is nullable. " +
"Partition columns must be constrained \"NOT NULL\".";
throw new VoltCompilerException(msg);
}
// verify that the partition column is a supported type
VoltType pcolType = VoltType.get((byte) c.getType());
switch (pcolType) {
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case STRING:
case VARBINARY:
break;
default:
msg += "Partition column '" + tableName + "." + colName + "' is not a valid type. " +
"Partition columns must be an integer or varchar type.";
throw new VoltCompilerException(msg);
}
t.setPartitioncolumn(c);
t.setIsreplicated(false);
// Set the destination tables of associated views non-replicated.
// If a view's source table is replicated, then a full scan of the
// associated view is singled-sited. If the source is partitioned,
// a full scan of the view must be distributed.
final CatalogMap<MaterializedViewInfo> views = t.getViews();
for (final MaterializedViewInfo mvi : views) {
mvi.getDest().setIsreplicated(false);
setGroupedTablePartitionColumn(mvi, c);
}
}
}
// this should reorder the tables and partitions all alphabetically
String catData = m_catalog.serialize();
m_catalog = new Catalog();
m_catalog.execute(catData);
db = m_catalog.getClusters().get("cluster").getDatabases().get(databaseName);
// add database estimates info
addDatabaseEstimatesInfo(m_estimates, db);
// Process DDL exported tables
for( String exportedTableName: voltDdlTracker.getExportedTables()) {
addExportTableToConnector(exportedTableName, db);
}
// Process and add exports and connectors to the catalog
// Must do this before compiling procedures to deny updates
// on append-only tables.
if (database.getExport() != null) {
// currently, only a single connector is allowed
ExportType export = database.getExport();
compileExport(export, db);
}
// Generate the auto-CRUD procedure descriptors. This creates
// procedure descriptors to insert, delete, select and update
// tables, with some caveats. (See ENG-1601).
List<ProcedureDescriptor> autoCrudProcedures = generateCrud(m_catalog);
procedures.addAll(autoCrudProcedures);
// Add procedures read from DDL and project file
procedures.addAll( voltDdlTracker.getProcedureDescriptors());
// Actually parse and handle all the Procedures
for (final ProcedureDescriptor procedureDescriptor : procedures) {
final String procedureName = procedureDescriptor.m_className;
if (procedureDescriptor.m_singleStmt == null) {
m_currentFilename = procedureName.substring(procedureName.lastIndexOf('.') + 1);
m_currentFilename += ".class";
} else {
m_currentFilename = procedureName;
}
ProcedureCompiler.compile(this, m_hsql, m_estimates, m_catalog, db, procedureDescriptor);
}
// Add all the class dependencies to the output jar
for (final Class<?> classDependency : classDependencies) {
addClassToJar( classDependency, this );
}
}
private void setGroupedTablePartitionColumn(MaterializedViewInfo mvi, Column partitionColumn)
throws VoltCompilerException {
// A view of a replicated table is replicated.
// A view of a partitioned table is partitioned -- regardless of whether it has a partition key
// -- it certainly isn't replicated!
// If the partitioning column is grouped, its counterpart is the partitioning column of the view table.
// Otherwise, the view table just doesn't have a partitioning column
// -- it is seemingly randomly distributed,
// and its grouped columns are only locally unique but not globally unique.
Table destTable = mvi.getDest();
// Get the grouped columns in "index" order.
// This order corresponds to the iteration order of the MaterializedViewInfo's getGroupbycols.
List<Column> destColumnArray = CatalogUtil.getSortedCatalogItems(destTable.getColumns(), "index");
String partitionColName = partitionColumn.getTypeName(); // Note getTypeName gets the column name -- go figure.
int index = 0;
for (ColumnRef cref : mvi.getGroupbycols()) {
Column srcCol = cref.getColumn();
if (srcCol.getName().equals(partitionColName)) {
Column destCol = destColumnArray.get(index);
destTable.setPartitioncolumn(destCol);
return;
}
++index;
}
}
/** Provide a feedback path to monitor plan output via harvestCapturedDetail */
public void enableDetailedCapture() {
m_capturedDiagnosticDetail = new ArrayList<String>();
}
/** Access recent plan output, for diagnostic purposes */
public List<String> harvestCapturedDetail() {
List<String> harvested = m_capturedDiagnosticDetail;
m_capturedDiagnosticDetail = null;
return harvested;
}
/** Capture plan context info -- statement, cost, high-level "explain". */
public void captureDiagnosticContext(String planDescription) {
if (m_capturedDiagnosticDetail == null) {
return;
}
m_capturedDiagnosticDetail.add(planDescription);
}
/** Capture plan content in terse json format. */
public void captureDiagnosticJsonFragment(String json) {
if (m_capturedDiagnosticDetail == null) {
return;
}
m_capturedDiagnosticDetail.add(json);
}
/**
* Create INSERT, UPDATE, DELETE and SELECT procedure descriptors for all partitioned,
* non-export tables with primary keys that include the partitioning column.
*
* @param catalog
* @return a list of new procedure descriptors
*/
List<ProcedureDescriptor> generateCrud(Catalog catalog) {
final LinkedList<ProcedureDescriptor> crudprocs = new LinkedList<ProcedureDescriptor>();
final Database db = catalog.getClusters().get("cluster").getDatabases().get("database");
for (Table table : db.getTables()) {
if (CatalogUtil.isTableExportOnly(db, table)) {
compilerLog.debug("Skipping creation of CRUD procedures for export-only table " +
table.getTypeName());
continue;
}
if (table.getMaterializer() != null) {
compilerLog.debug("Skipping creation of CRUD procedures for view " +
table.getTypeName());
continue;
}
if (table.getIsreplicated()) {
compilerLog.debug("Creating multi-partition insert procedure for replicated table " +
table.getTypeName());
crudprocs.add(generateCrudReplicatedInsert(table));
continue;
}
// get the partition column
final Column partitioncolumn = table.getPartitioncolumn();
// all partitioned tables get insert crud procs
crudprocs.add(generateCrudInsert(table, partitioncolumn));
// select/delete/update crud requires pkey. Pkeys are stored as constraints.
final CatalogMap<Constraint> constraints = table.getConstraints();
final Iterator<Constraint> it = constraints.iterator();
Constraint pkey = null;
while (it.hasNext()) {
Constraint constraint = it.next();
if (constraint.getType() == ConstraintType.PRIMARY_KEY.getValue()) {
pkey = constraint;
break;
}
}
if (pkey == null) {
compilerLog.debug("Skipping creation of CRUD select/delete/update for partitioned table " +
table.getTypeName() + " because no primary key is declared.");
continue;
}
// Primary key must include the partition column for the table
// for select/delete/update
boolean pkeyHasPartitionColumn = false;
CatalogMap<ColumnRef> pkeycols = pkey.getIndex().getColumns();
Iterator<ColumnRef> pkeycolsit = pkeycols.iterator();
while (pkeycolsit.hasNext()) {
ColumnRef colref = pkeycolsit.next();
if (colref.getColumn().equals(partitioncolumn)) {
pkeyHasPartitionColumn = true;
break;
}
}
if (!pkeyHasPartitionColumn) {
compilerLog.debug("Skipping creation of CRUD select/delete/update for partitioned table " +
table.getTypeName() + " because primary key does not include the partitioning column.");
continue;
}
// select, delete and updarte here (insert generated above)
crudprocs.add(generateCrudSelect(table, partitioncolumn, pkey));
crudprocs.add(generateCrudDelete(table, partitioncolumn, pkey));
crudprocs.add(generateCrudUpdate(table, partitioncolumn, pkey));
}
return crudprocs;
}
/** Helper to sort table columns by table column order */
private static class TableColumnComparator implements Comparator<Column> {
public TableColumnComparator() {
}
@Override
public int compare(Column o1, Column o2) {
return o1.getIndex() - o2.getIndex();
}
}
/** Helper to sort index columnrefs by index column order */
private static class ColumnRefComparator implements Comparator<ColumnRef> {
public ColumnRefComparator() {
}
@Override
public int compare(ColumnRef o1, ColumnRef o2) {
return o1.getIndex() - o2.getIndex();
}
}
/**
* Helper to generate a WHERE pkey_col1 = ?, pkey_col2 = ? ...; clause.
* @param partitioncolumn partitioning column for the table
* @param pkey constraint from the catalog
* @param paramoffset 0-based counter of parameters in the full sql statement so far
* @param sb string buffer accumulating the sql statement
* @return offset in the index of the partition column
*/
private int generateCrudPKeyWhereClause(Column partitioncolumn,
Constraint pkey, StringBuilder sb)
{
// Sort the catalog index columns by index column order.
ArrayList<ColumnRef> indexColumns = new ArrayList<ColumnRef>(pkey.getIndex().getColumns().size());
for (ColumnRef c : pkey.getIndex().getColumns()) {
indexColumns.add(c);
}
Collections.sort(indexColumns, new ColumnRefComparator());
boolean first = true;
int partitionOffset = -1;
sb.append(" WHERE ");
for (ColumnRef pkc : indexColumns) {
if (!first) sb.append(" AND ");
first = false;
sb.append("(" + pkc.getColumn().getName() + " = ?" + ")");
if (pkc.getColumn() == partitioncolumn) {
partitionOffset = pkc.getIndex();
}
}
sb.append(";");
return partitionOffset;
}
/**
* Helper to generate a full col1 = ?, col2 = ?... clause.
* @param table
* @param sb
*/
private void generateCrudExpressionColumns(Table table, StringBuilder sb) {
boolean first = true;
// Sort the catalog table columns by column order.
ArrayList<Column> tableColumns = new ArrayList<Column>(table.getColumns().size());
for (Column c : table.getColumns()) {
tableColumns.add(c);
}
Collections.sort(tableColumns, new TableColumnComparator());
for (Column c : tableColumns) {
if (!first) sb.append(", ");
first = false;
sb.append(c.getName() + " = ?");
}
}
/**
* Helper to generate a full col1, col2, col3 list.
*/
private void generateCrudColumnList(Table table, StringBuilder sb) {
boolean first = true;
sb.append("(");
// Sort the catalog table columns by column order.
ArrayList<Column> tableColumns = new ArrayList<Column>(table.getColumns().size());
for (Column c : table.getColumns()) {
tableColumns.add(c);
}
Collections.sort(tableColumns, new TableColumnComparator());
// Output the SQL column list.
for (Column c : tableColumns) {
assert (c.getIndex() >= 0); // mostly mask unused 'c'.
if (!first) sb.append(", ");
first = false;
sb.append("?");
}
sb.append(")");
}
/**
* Create a statement like:
* "delete from <table> where {<pkey-column =?>...}"
*/
private ProcedureDescriptor generateCrudDelete(Table table,
Column partitioncolumn, Constraint pkey)
{
StringBuilder sb = new StringBuilder();
sb.append("DELETE FROM " + table.getTypeName());
int partitionOffset =
generateCrudPKeyWhereClause(partitioncolumn, pkey, sb);
String partitioninfo =
table.getTypeName() + "." + partitioncolumn.getName() + ":" + partitionOffset;
ProcedureDescriptor pd =
new ProcedureDescriptor(
new ArrayList<String>(), // groups
table.getTypeName() + ".delete", // className
sb.toString(), // singleStmt
null, // joinOrder
partitioninfo, // table.column:offset
true); // builtin statement
compilerLog.info("Synthesized built-in DELETE procedure: " +
sb.toString() + " for " + table.getTypeName() + " with partitioning: " +
partitioninfo);
return pd;
}
/**
* Create a statement like:
* "update <table> set {<each-column = ?>...} where {<pkey-column = ?>...}
*/
private ProcedureDescriptor generateCrudUpdate(Table table,
Column partitioncolumn, Constraint pkey)
{
StringBuilder sb = new StringBuilder();
sb.append("UPDATE " + table.getTypeName() + " SET ");
generateCrudExpressionColumns(table, sb);
generateCrudPKeyWhereClause(partitioncolumn, pkey, sb);
String partitioninfo =
table.getTypeName() + "." + partitioncolumn.getName() + ":" + partitioncolumn.getIndex();
ProcedureDescriptor pd =
new ProcedureDescriptor(
new ArrayList<String>(), // groups
table.getTypeName() + ".update", // className
sb.toString(), // singleStmt
null, // joinOrder
partitioninfo, // table.column:offset
true); // builtin statement
compilerLog.info("Synthesized built-in UPDATE procedure: " +
sb.toString() + " for " + table.getTypeName() + " with partitioning: " +
partitioninfo);
return pd;
}
/**
* Create a statement like:
* "insert into <table> values (?, ?, ...);"
*/
private ProcedureDescriptor generateCrudInsert(Table table,
Column partitioncolumn)
{
StringBuilder sb = new StringBuilder();
sb.append("INSERT INTO " + table.getTypeName() + " VALUES ");
generateCrudColumnList(table, sb);
sb.append(";");
String partitioninfo =
table.getTypeName() + "." + partitioncolumn.getName() + ":" + partitioncolumn.getIndex();
ProcedureDescriptor pd =
new ProcedureDescriptor(
new ArrayList<String>(), // groups
table.getTypeName() + ".insert", // className
sb.toString(), // singleStmt
null, // joinOrder
partitioninfo, // table.column:offset
true); // builtin statement
compilerLog.info("Synthesized built-in INSERT procedure: " +
sb.toString() + " for " + table.getTypeName() + " with partitioning: " +
partitioninfo);
return pd;
}
/**
* Create a statement like:
* "insert into <table> values (?, ?, ...);"
* for a replicated table.
*/
private ProcedureDescriptor generateCrudReplicatedInsert(Table table) {
StringBuilder sb = new StringBuilder();
sb.append("INSERT INTO " + table.getTypeName() + " VALUES ");
generateCrudColumnList(table, sb);
sb.append(";");
ProcedureDescriptor pd =
new ProcedureDescriptor(
new ArrayList<String>(), // groups
table.getTypeName() + ".insert", // className
sb.toString(), // singleStmt
null, // joinOrder
null, // table.column:offset
true); // builtin statement
compilerLog.info("Synthesized built-in INSERT multi-partition procedure: " +
sb.toString() + " for " + table.getTypeName());
return pd;
}
/**
* Create a statement like:
* "select * from <table> where pkey_col1 = ?, pkey_col2 = ? ... ;"
*/
private ProcedureDescriptor generateCrudSelect(Table table,
Column partitioncolumn, Constraint pkey)
{
StringBuilder sb = new StringBuilder();
sb.append("SELECT * FROM " + table.getTypeName());
int partitionOffset =
generateCrudPKeyWhereClause(partitioncolumn, pkey, sb);
String partitioninfo =
table.getTypeName() + "." + partitioncolumn.getName() + ":" + partitionOffset;
ProcedureDescriptor pd =
new ProcedureDescriptor(
new ArrayList<String>(), // groups
table.getTypeName() + ".select", // className
sb.toString(), // singleStmt
null, // joinOrder
partitioninfo, // table.column:offset
true); // builtin statement
compilerLog.info("Synthesized built-in SELECT procedure: " +
sb.toString() + " for " + table.getTypeName() + " with partitioning: " +
partitioninfo);
return pd;
}
static void addDatabaseEstimatesInfo(final DatabaseEstimates estimates, final Database db) {
// Not implemented yet. Don't panic.
/*for (Table table : db.getTables()) {
DatabaseEstimates.TableEstimates tableEst = new DatabaseEstimates.TableEstimates();
tableEst.maxTuples = 1000000;
tableEst.minTuples = 100000;
estimates.tables.put(table, tableEst);
}*/
}
ProcedureDescriptor getProcedure(
org.voltdb.compiler.projectfile.ProceduresType.Procedure xmlproc)
throws VoltCompilerException
{
final ArrayList<String> groups = new ArrayList<String>();
// @groups
if (xmlproc.getGroups() != null) {
for (String group : xmlproc.getGroups().split(",")) {
groups.add(group);
}
}
// @class
String classattr = xmlproc.getClazz();
// If procedure/sql is present, this is a "statement procedure"
if (xmlproc.getSql() != null) {
String partattr = xmlproc.getPartitioninfo();
// null partattr means multi-partition
// set empty attributes to multi-partition
if (partattr != null && partattr.length() == 0)
partattr = null;
return new ProcedureDescriptor(groups, classattr,
xmlproc.getSql().getValue(),
xmlproc.getSql().getJoinorder(),
partattr, false);
}
else {
String partattr = xmlproc.getPartitioninfo();
if (partattr != null) {
String msg = "Java procedures must specify partition info using " +
"@ProcInfo annotation in the Java class implementation " +
"and may not use the @partitioninfo project file procedure attribute.";
throw new VoltCompilerException(msg);
}
return new ProcedureDescriptor(groups, classattr);
}
}
Class<?> getClassDependency(Classdependency xmlclassdep)
throws VoltCompilerException
{
String msg = "";
String className = xmlclassdep.getClazz();
// schema doesn't currently enforce this.. but could I guess.
if (className.length() == 0) {
msg += "\"classDependency\" element has empty \"class\" attribute.";
throw new VoltCompilerException(msg);
}
Class<?> cls = null;
try {
cls = Class.forName(className);
} catch (final ClassNotFoundException e) {
msg += "\"classDependency\" can not find class " + className + " in classpath";
throw new VoltCompilerException(msg);
}
return cls;
}
void grantExportToGroup( final String groupName, final Database catdb)
throws VoltCompilerException
{
assert groupName != null && ! groupName.trim().isEmpty() && catdb != null;
// Catalog Connector
// Relying on schema's enforcement of at most 1 connector
org.voltdb.catalog.Connector catconn = catdb.getConnectors().getIgnoreCase("0");
if (catconn == null) {
catconn = catdb.getConnectors().add("0");
}
final Group group = catdb.getGroups().getIgnoreCase(groupName);
if (group == null) {
throw new VoltCompilerException("Export has a role " + groupName + " that does not exist");
}
GroupRef groupRef = catconn.getAuthgroups().getIgnoreCase(groupName);
if (groupRef == null) {
groupRef = catconn.getAuthgroups().add(groupName);
groupRef.setGroup(group);
}
else if (groupRef.getGroup() == null) {
groupRef.setGroup(group);
}
if (groupRef.getGroup() != group) {
throw new VoltCompilerException(
"Mismatched group reference found in export connector auth groups: " +
"it references '" + groupRef.getGroup().getTypeName() +
"(" + System.identityHashCode(groupRef.getGroup()) +") " +
"', when it should reference '" + group.getTypeName() +
"(" + System.identityHashCode(group) +")" +
"' instead");
}
}
void compileExport(final ExportType export, final Database catdb)
throws VoltCompilerException
{
// Test the error paths before touching the catalog
if (export == null) {
return;
}
// Catalog Connector
// Relying on schema's enforcement of at most 1 connector
// This check is also done here to mimic the same behavior of the
// previous implementation of this method, where the connector is created as
// long as the export element is present in project XML. Now that we are
// deprecating project.xml, we won't be able to mimic in DDL, what an
// empty <export/> element currently implies.
org.voltdb.catalog.Connector catconn = catdb.getConnectors().getIgnoreCase("0");
if (catconn == null) {
catconn = catdb.getConnectors().add("0");
}
// add authorized users and groups
final ArrayList<String> groupslist = new ArrayList<String>();
// @groups
if (export.getGroups() != null) {
for (String group : export.getGroups().split(",")) {
groupslist.add(group);
}
}
for (String groupName : groupslist) {
grantExportToGroup(groupName, catdb);
}
// Catalog Connector.ConnectorTableInfo
if (export.getTables() != null) {
for (Tables.Table xmltable : export.getTables().getTable()) {
addExportTableToConnector(xmltable.getName(), catdb);
}
if (export.getTables().getTable().isEmpty()) {
compilerLog.warn("Export defined with an empty <tables> element");
}
} else {
compilerLog.warn("Export defined with no <tables> element");
}
}
void addExportTableToConnector( final String tableName, final Database catdb)
throws VoltCompilerException
{
assert tableName != null && ! tableName.trim().isEmpty() && catdb != null;
// Catalog Connector
// Relying on schema's enforcement of at most 1 connector
org.voltdb.catalog.Connector catconn = catdb.getConnectors().getIgnoreCase("0");
if (catconn == null) {
catconn = catdb.getConnectors().add("0");
}
org.voltdb.catalog.Table tableref = catdb.getTables().getIgnoreCase(tableName);
if (tableref == null) {
throw new VoltCompilerException("While configuring export, table " + tableName + " was not present in " +
"the catalog.");
}
if (CatalogUtil.isTableMaterializeViewSource(catdb, tableref)) {
compilerLog.error("While configuring export, table " + tableName + " is a source table " +
"for a materialized view. Export only tables do not support views.");
throw new VoltCompilerException("Export table configured with materialized view.");
}
if (tableref.getMaterializer() != null)
{
compilerLog.error("While configuring export, table " + tableName + " is a " +
"materialized view. A view cannot be an export table.");
throw new VoltCompilerException("View configured as an export table");
}
if (tableref.getIndexes().size() > 0) {
compilerLog.error("While configuring export, table " + tableName + " has indexes defined. " +
"Export tables can't have indexes (including primary keys).");
throw new VoltCompilerException("Table with indexes configured as an export table");
}
if (tableref.getIsreplicated()) {
// if you don't specify partition columns, make
// export tables partitioned, but on no specific column (iffy)
tableref.setIsreplicated(false);
tableref.setPartitioncolumn(null);
}
org.voltdb.catalog.ConnectorTableInfo connTableInfo =
catconn.getTableinfo().getIgnoreCase(tableName);
if (connTableInfo == null) {
connTableInfo = catconn.getTableinfo().add(tableName);
connTableInfo.setTable(tableref);
connTableInfo.setAppendonly(true);
}
else {
throw new VoltCompilerException(String.format(
"Table \"%s\" is already exported", tableName
));
}
}
// Usage messages for new and legacy syntax.
static final String usageNew = "VoltCompiler <output-JAR> <input-DDL> ...";
static final String usageLegacy = "VoltCompiler <project-file> <output-JAR>";
/**
* Main
*
* Incoming arguments:
*
* New syntax: OUTPUT_JAR INPUT_DDL ...
* Legacy syntax: PROJECT_FILE OUTPUT_JAR
*
* @param args arguments (see above)
*/
public static void main(final String[] args)
{
final VoltCompiler compiler = new VoltCompiler();
boolean success = false;
if (args.length > 0 && args[0].toLowerCase().endsWith(".jar")) {
// The first argument is *.jar for the new syntax.
if (args.length >= 2) {
// Check for accidental .jar or .xml files specified for argument 2
// to catch accidental incomplete use of the legacy syntax.
if (args[1].toLowerCase().endsWith(".xml") || args[1].toLowerCase().endsWith(".jar")) {
System.err.println("Error: Expecting a DDL file as the second argument.\n"
+ " .xml and .jar are invalid DDL file extensions.");
System.exit(-1);
}
success = compiler.compileFromDDL(args[0], ArrayUtils.subarray(args, 1, args.length));
}
else {
System.err.printf("Usage: %s\n", usageNew);
System.exit(-1);
}
}
else if (args.length > 0 && args[0].toLowerCase().endsWith(".xml")) {
// The first argument is *.xml for the legacy syntax.
if (args.length == 2) {
success = compiler.compileWithProjectXML(args[0], args[1]);
}
else {
System.err.printf("Usage: %s\n", usageLegacy);
System.exit(-1);
}
}
else {
// Can't recognize the arguments or there are no arguments.
System.err.printf("Usage: %s\n %s\n", usageNew, usageLegacy);
System.exit(-1);
}
// Exit with error code if we failed
if (!success) {
compiler.summarizeErrors(System.out, null);
System.exit(-1);
}
compiler.summarizeSuccess(System.out, null);
}
public void summarizeSuccess(PrintStream outputStream, PrintStream feedbackStream) {
if (outputStream != null) {
Database database = m_catalog.getClusters().get("cluster").
getDatabases().get("database");
outputStream.println("
outputStream.println("Successfully created " + m_jarOutputPath);
for (String ddl : m_ddlFilePaths.keySet()) {
outputStream.println("Includes schema: " + m_ddlFilePaths.get(ddl));
}
outputStream.println();
// memoize non-det procs for a warning at the end of the compile
ArrayList<Procedure> nonDetProcs = new ArrayList<Procedure>();
for (Procedure p : database.getProcedures()) {
if (p.getSystemproc()) {
continue;
}
outputStream.printf("[%s][%s]%s %s\n",
p.getSinglepartition() ? "SP" : "MP",
p.getReadonly() ? "RO" : "RW",
p.getHasseqscans() ? "[Seq]" : "",
p.getTypeName());
for (Statement s : p.getStatements()) {
String seqScanTag = "";
if (s.getSeqscancount() > 0) {
seqScanTag = "[Seq] ";
}
String determinismTag = "";
// if the proc is a java stored proc that is read&write,
// output determinism warnings
if (p.getHasjava() && (!p.getReadonly())) {
if (s.getIscontentdeterministic() == false) {
determinismTag = "[NDC] ";
nonDetProcs.add(p);
}
else if (s.getIsorderdeterministic() == false) {
determinismTag = "[NDO] ";
nonDetProcs.add(p);
}
}
String statementLine;
String sqlText = s.getSqltext();
sqlText = squeezeWhitespace(sqlText);
if (seqScanTag.length() + determinismTag.length() + sqlText.length() > 80) {
statementLine = " " + (seqScanTag + determinismTag + sqlText).substring(0, 80) + "...";
} else {
statementLine = " " + seqScanTag + determinismTag + sqlText;
}
outputStream.println(statementLine);
}
outputStream.println();
}
outputStream.println("
// post-compile determinism warning
if (!nonDetProcs.isEmpty()) {
outputStream.println(
"\nNON-DETERMINISM WARNING:\n\n" +
"The procedures listed below contain non-deterministic queries.\n");
for (Procedure p : nonDetProcs) {
outputStream.println(" " + p.getClassname());
}
outputStream.println(
"\nUsing the output of these queries as input to subsequent\n" +
"write queries can result in differences between replicated\n" +
"partitions at runtime, forcing VoltDB to shutdown the cluster.\n" +
"Review the compiler messages above to identify the offending\n" +
"SQL statements (marked as \"[NDO] or [NDC]\"). Add a unique\n" +
"index to the schema or an explicit ORDER BY clause to the\n" +
"query to make these queries deterministic.\n");
outputStream.println("
}
}
if (feedbackStream != null) {
for (Feedback fb : m_warnings) {
feedbackStream.println(fb.getLogString());
}
for (Feedback fb : m_infos) {
feedbackStream.println(fb.getLogString());
}
}
}
/**
* Return a copy of the input sqltext with each run of successive whitespace characters replaced by a single space.
* This is just for informal feedback purposes, so quoting is not respected.
* @param sqltext
* @return a possibly modified copy of the input sqltext
**/
private static String squeezeWhitespace(String sqltext) {
String compact = sqltext.replaceAll("\\s+", " ");
return compact;
}
public void summarizeErrors(PrintStream outputStream, PrintStream feedbackStream) {
if (outputStream != null) {
outputStream.println("
outputStream.println("Project compilation failed. See log for errors.");
outputStream.println("
}
if (feedbackStream != null) {
for (Feedback fb : m_errors) {
feedbackStream.println(fb.getLogString());
}
}
}
// this needs to be reset in the main compile func
private static final HashSet<Class<?>> cachedAddedClasses = new HashSet<Class<?>>();
public static final void addClassToJar(final Class<?> cls, final VoltCompiler compiler)
throws VoltCompiler.VoltCompilerException {
if (cachedAddedClasses.contains(cls)) {
return;
} else {
cachedAddedClasses.add(cls);
}
for (final Class<?> nested : cls.getDeclaredClasses()) {
addClassToJar(nested, compiler);
}
String packagePath = cls.getName();
packagePath = packagePath.replace('.', '/');
packagePath += ".class";
String realName = cls.getName();
realName = realName.substring(realName.lastIndexOf('.') + 1);
realName += ".class";
final URL absolutePath = cls.getResource(realName);
File file = null;
InputStream fis = null;
int fileSize = 0;
try {
file =
new File(URLDecoder.decode(absolutePath.getFile(), "UTF-8"));
fis = new FileInputStream(file);
assert(file.canRead());
assert(file.isFile());
fileSize = (int) file.length();
} catch (final FileNotFoundException e) {
try {
final String contents = readFileFromJarfile(absolutePath.getPath());
fis = new StringInputStream(contents);
fileSize = contents.length();
}
catch (final Exception e2) {
final String msg = "Unable to locate classfile for " + realName;
throw compiler.new VoltCompilerException(msg);
}
} catch (final UnsupportedEncodingException e) {
e.printStackTrace();
System.exit(-1);
// Prevent warning about fis possibly being null below.
return;
}
assert(fileSize > 0);
int readSize = 0;
final byte[] fileBytes = new byte[fileSize];
try {
while (readSize < fileSize) {
readSize = fis.read(fileBytes, readSize, fileSize - readSize);
}
} catch (final IOException e) {
final String msg = "Unable to read (or completely read) classfile for " + realName;
throw compiler.new VoltCompilerException(msg);
}
compiler.m_jarOutput.put(packagePath, fileBytes);
}
/**
* Read a file from a jar in the form path/to/jar.jar!/path/to/file.ext
*/
static String readFileFromJarfile(String fulljarpath) throws IOException {
assert (fulljarpath.contains(".jar!"));
String[] paths = fulljarpath.split("!");
if (paths[0].startsWith("file:"))
paths[0] = paths[0].substring("file:".length());
paths[1] = paths[1].substring(1);
return readFileFromJarfile(paths[0], paths[1]);
}
static String readFileFromJarfile(String jarfilePath, String entryPath) throws IOException {
InputStream fin = null;
try {
URL jar_url = new URL(jarfilePath);
fin = jar_url.openStream();
} catch (MalformedURLException ex) {
// Invalid URL. Try as a file.
fin = new FileInputStream(jarfilePath);
}
JarInputStream jarIn = new JarInputStream(fin);
JarEntry catEntry = jarIn.getNextJarEntry();
while ((catEntry != null) && (catEntry.getName().equals(entryPath) == false)) {
catEntry = jarIn.getNextJarEntry();
}
if (catEntry == null) {
jarIn.close();
return null;
}
byte[] bytes = InMemoryJarfile.readFromJarEntry(jarIn, catEntry);
return new String(bytes, "UTF-8");
}
/**
* @param m_procInfoOverrides the m_procInfoOverrides to set
*/
public void setProcInfoOverrides(Map<String, ProcInfoData> procInfoOverrides) {
m_procInfoOverrides = procInfoOverrides;
}
/**
* Helper enum that scans sax exception messages for deprecated xml elements
*
* @author ssantoro
*/
enum DeprecatedProjectElement {
security(
"(?i)\\Acvc-[^:]+:\\s+Invalid\\s+content\\s+.+?\\s+element\\s+'security'",
"security may be enabled in the deployment file only"
);
/**
* message regular expression that pertains to the deprecated element
*/
private final Pattern messagePattern;
/**
* a suggestion string to exaplain alternatives
*/
private final String suggestion;
DeprecatedProjectElement(String messageRegex, String suggestion) {
this.messagePattern = Pattern.compile(messageRegex);
this.suggestion = suggestion;
}
String getSuggestion() {
return suggestion;
}
/**
* Given a JAXBException it determines whether or not the linked
* exception is associated with a deprecated xml elements
*
* @param jxbex a {@link JAXBException}
* @return an enum of {@code DeprecatedProjectElement} if the
* given exception corresponds to a deprecated xml element
*/
static DeprecatedProjectElement valueOf( JAXBException jxbex) {
if( jxbex == null
|| jxbex.getLinkedException() == null
|| ! (jxbex.getLinkedException() instanceof org.xml.sax.SAXParseException)
) {
return null;
}
org.xml.sax.SAXParseException saxex =
org.xml.sax.SAXParseException.class.cast(jxbex.getLinkedException());
for( DeprecatedProjectElement dpe: DeprecatedProjectElement.values()) {
Matcher mtc = dpe.messagePattern.matcher(saxex.getMessage());
if( mtc.find()) return dpe;
}
return null;
}
}
}
|
package com.github.TKnudsen.ComplexDataObject.view;
import java.awt.geom.Rectangle2D;
public class Rectangle2DTools {
public static Rectangle2D[][] createRectangleMatrix(Rectangle2D rectangle, int xCount, int yCount, double betweenSpaceOffset) {
if (rectangle == null || xCount <= 0 || yCount <= 0 || betweenSpaceOffset < 0 || rectangle.getWidth() < xCount + betweenSpaceOffset * (xCount - 1) || rectangle.getHeight() < yCount + betweenSpaceOffset * (yCount - 1))
return null;
Rectangle2D[][] rectangleArray = new Rectangle2D[xCount][yCount];
double ySpace = rectangle.getHeight() - ((yCount - 1) * betweenSpaceOffset);
double xSpace = rectangle.getWidth() - ((xCount - 1) * betweenSpaceOffset);
double height = ySpace / yCount;
double width = xSpace / xCount;
for (int x = 0; x < xCount; x++) {
for (int y = 0; y < yCount; y++) {
double xPosition = rectangle.getX() + x * width + (x - 1) * betweenSpaceOffset;
double yPosition = rectangle.getY() + y * height + (y - 1) * betweenSpaceOffset;
rectangleArray[x][y] = new Rectangle2D.Double(xPosition, yPosition, width, height);
}
}
return rectangleArray;
}
public static Rectangle2D[][] createRectangleMatrix(Rectangle2D rectangle, int xCount, int yCount, int betweenSpaceOffset) {
return createRectangleMatrix(rectangle, xCount, yCount, (double) betweenSpaceOffset);
}
}
|
import info.yeppp.ebuilda.*;
import info.yeppp.ebuilda.filesystem.AbsoluteDirectoryPath;
import info.yeppp.ebuilda.filesystem.AbsoluteFilePath;
import info.yeppp.ebuilda.filesystem.RelativeDirectoryPath;
import info.yeppp.ebuilda.filesystem.RelativeFilePath;
import info.yeppp.ebuilda.sdk.AndroidNDK;
import info.yeppp.ebuilda.sdk.AndroidToolchain;
import info.yeppp.ebuilda.sdk.WindowsSDK;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
public class CLIBuild {
public static void main(String[] args) throws Exception {
final AbsoluteDirectoryPath yepppRoot = Machine.getLocal().getWorkingDirectory();
for (final String abiName : args) {
final ABI abi = ABI.parse(abiName);
final Toolchain toolchain = getToolchain(abi);
setup(toolchain.cppCompiler, toolchain.assembler, toolchain.linker, toolchain.microsoftResourceCompiler, toolchain.gnuStrip, toolchain.gnuObjCopy, yepppRoot);
build(toolchain.cppCompiler, toolchain.assembler, toolchain.linker, toolchain.microsoftResourceCompiler, toolchain.gnuStrip, toolchain.gnuObjCopy, yepppRoot);
}
}
static class Toolchain {
public Toolchain(CppCompiler cppCompiler, Assembler assembler, Linker linker) {
this.cppCompiler = cppCompiler;
this.assembler = assembler;
this.linker = linker;
this.microsoftResourceCompiler = null;
this.gnuStrip = null;
this.gnuObjCopy = null;
}
public Toolchain(CppCompiler cppCompiler, Assembler assembler, Linker linker, MicrosoftResourceCompiler microsoftResourceCompiler) {
this.cppCompiler = cppCompiler;
this.assembler = assembler;
this.linker = linker;
this.microsoftResourceCompiler = microsoftResourceCompiler;
this.gnuStrip = null;
this.gnuObjCopy = null;
}
public Toolchain(CppCompiler cppCompiler, Assembler assembler, Linker linker, GnuStrip gnuStrip, GnuObjCopy gnuObjCopy) {
this.cppCompiler = cppCompiler;
this.assembler = assembler;
this.linker = linker;
this.microsoftResourceCompiler = null;
this.gnuStrip = gnuStrip;
this.gnuObjCopy = gnuObjCopy;
}
final CppCompiler cppCompiler;
final Assembler assembler;
final Linker linker;
final MicrosoftResourceCompiler microsoftResourceCompiler;
final GnuStrip gnuStrip;
final GnuObjCopy gnuObjCopy;
}
public static void setup(CppCompiler cppCompiler, Assembler assembler, Linker linker, MicrosoftResourceCompiler microsoftResourceCompiler, GnuStrip gnuStrip, GnuObjCopy gnuObjCopy, AbsoluteDirectoryPath yepppRoot) {
final ABI abi = cppCompiler.getABI();
final AbsoluteDirectoryPath sourceDirectory = new AbsoluteDirectoryPath(yepppRoot, new RelativeDirectoryPath("library/sources"));
final AbsoluteDirectoryPath objectDirectory = new AbsoluteDirectoryPath(yepppRoot, new RelativeDirectoryPath("library/binaries/" + abi.toString()));
final AbsoluteDirectoryPath runtimeBinariesDirectory = new AbsoluteDirectoryPath(yepppRoot, new RelativeDirectoryPath("runtime/binaries/" + abi.toString()));
cppCompiler.setSourceDirectory(sourceDirectory);
cppCompiler.setObjectDirectory(objectDirectory);
cppCompiler.addDefaultGlobalIncludeDirectories();
cppCompiler.setVerboseBuild(true);
cppCompiler.addMacro("YEP_BUILD_LIBRARY");
if (!abi.getOperatingSystem().equals(OperatingSystem.Windows)) {
GnuCppCompiler gnuCppCompiler = (GnuCppCompiler)cppCompiler;
gnuCppCompiler.setPositionIndependentCodeGeneration(GnuCppCompiler.PositionIndependentCodeGeneration.UnlimitedLibraryPIC);
}
cppCompiler.setRttiEnabled(false);
cppCompiler.setExceptionsSupport(CppCompiler.Exceptions.NoExceptions);
cppCompiler.setRuntimeLibrary(CppCompiler.RuntimeLibrary.NoRuntimeLibrary);
cppCompiler.setOptimization(CppCompiler.Optimization.MaxSpeedOptimization);
cppCompiler.addIncludeDirectory(cppCompiler.getSourceDirectory());
cppCompiler.addIncludeDirectory(new AbsoluteDirectoryPath(yepppRoot, new RelativeDirectoryPath("library/headers")));
if (microsoftResourceCompiler != null) {
microsoftResourceCompiler.setSourceDirectory(sourceDirectory);
microsoftResourceCompiler.setObjectDirectory(objectDirectory);
microsoftResourceCompiler.addDefaultGlobalIncludeDirectories();
microsoftResourceCompiler.setVerboseBuild(true);
microsoftResourceCompiler.addIncludeDirectory(sourceDirectory);
microsoftResourceCompiler.addIncludeDirectory(new AbsoluteDirectoryPath(yepppRoot, new RelativeDirectoryPath("library/headers")));
}
if (assembler != null) {
assembler.setSourceDirectory(sourceDirectory);
assembler.setObjectDirectory(objectDirectory);
assembler.setVerboseBuild(true);
if (assembler instanceof NASM) {
final NASM nasm = (NASM)assembler;
nasm.setOptimization(NASM.Optimization.Multipass);
}
}
if (gnuObjCopy != null) {
gnuObjCopy.setVerboseBuild(true);
}
if (gnuStrip != null) {
gnuStrip.setVerboseBuild(true);
}
linker.setObjectDirectory(objectDirectory);
linker.setBinariesDirectory(objectDirectory);
linker.addDefaultGlobalLibraryDirectories();
if (!abi.getOperatingSystem().equals(OperatingSystem.Windows)) {
GnuLinker gnuLinker = (GnuLinker)linker;
gnuLinker.setPIC(GnuLinker.PositionIndependentCode.Unlimited);
}
linker.setVerboseBuild(true);
linker.setRuntimeLibraryUse(false);
linker.addLibraryDirectory(runtimeBinariesDirectory);
linker.addStaticLibraryDependence("yeprt");
if (abi.getOperatingSystem().equals(OperatingSystem.Windows)) {
linker.addDynamicLibraryDependence("kernel32");
}
}
public static Pattern getAssemblyPattern(ABI abi) {
switch (abi.getLowLevelABI()) {
case X86_Pic:
return Pattern.compile(".+\\.x86\\-pic\\.asm");
case X86_NonPic:
return Pattern.compile(".+\\.x86\\-nonpic\\.asm");
case X64_Microsoft:
return Pattern.compile(".+\\.x64\\-ms\\.asm");
case X64_SystemV:
return Pattern.compile(".+\\.x64\\-sysv\\.asm");
case ARM_SoftEABI:
return Pattern.compile(".+\\.arm(?:\\-softeabi)?\\.asm");
case ARM_HardEABI:
return Pattern.compile(".+\\.arm(?:\\-hardeabi)?\\.asm");
case MIPS_O32:
return Pattern.compile(".+\\.mips\\.asm");
default:
throw new Error(String.format("Unknown low-level ABI %s", abi.getLowLevelABI().toString()));
}
}
public static void build(CppCompiler cppCompiler, Assembler assembler, Linker linker, MicrosoftResourceCompiler microsoftResourceCompiler, GnuStrip gnuStrip, GnuObjCopy gnuObjCopy, AbsoluteDirectoryPath yepppRoot) {
final ABI abi = cppCompiler.getABI();
final Architecture architecture = abi.getArchitecture();
final OperatingSystem operatingSystem = abi.getOperatingSystem();
final AbsoluteFilePath libraryBinaryPath = new AbsoluteFilePath(linker.getBinariesDirectory(), new RelativeFilePath("yeppp"));
final BuildMessages buildMessages = new BuildMessages();
final List<AbsoluteFilePath> cppSources = cppCompiler.getSourceDirectory().getFiles(Pattern.compile(".+\\.cpp"), true);
final List<AbsoluteFilePath> rcSources = cppCompiler.getSourceDirectory().getFiles(Pattern.compile(".+\\.rc"), true);
final List<AbsoluteFilePath> asmSources = assembler.getSourceDirectory().getFiles(getAssemblyPattern(abi), true);
final List<AbsoluteFilePath> objects = new ArrayList<AbsoluteFilePath>(cppSources.size());
for (final AbsoluteFilePath source : cppSources) {
final String sourcePath = source.getRelativePath(cppCompiler.getSourceDirectory()).toString();
if (sourcePath.equals("library/CpuX86.cpp") && !(architecture.equals(Architecture.X86) || architecture.equals(Architecture.X64))) {
continue;
}
if (sourcePath.equals("library/CpuArm.cpp") && !architecture.equals(Architecture.ARM)) {
continue;
}
if (sourcePath.equals("library/CpuMips.cpp") && !architecture.equals(Architecture.MIPS)) {
continue;
}
if (sourcePath.equals("library/CpuWindows.cpp") && !operatingSystem.equals(OperatingSystem.Windows)) {
continue;
}
if (sourcePath.equals("library/CpuLinux.cpp") && !operatingSystem.equals(OperatingSystem.Linux)) {
continue;
}
if (sourcePath.equals("library/CpuMacOSX.cpp") && !operatingSystem.equals(OperatingSystem.MacOSX)) {
continue;
}
if (sourcePath.equals("library/Unsafe.cpp") && !operatingSystem.equals(OperatingSystem.Linux)) {
continue;
}
buildMessages.add(cppCompiler.compile(source));
objects.add(cppCompiler.getObjectPath(source));
}
if (abi.getOperatingSystem().equals(OperatingSystem.Windows)) {
for (final AbsoluteFilePath source : rcSources) {
buildMessages.add(microsoftResourceCompiler.compile(source));
objects.add(microsoftResourceCompiler.getObjectPath(source));
}
}
for (final AbsoluteFilePath source : asmSources) {
buildMessages.add(assembler.assemble(source));
objects.add(assembler.getObjectPath(source));
}
buildMessages.add(linker.linkDynamicLibrary(libraryBinaryPath, objects));
if ((gnuStrip != null) && (gnuObjCopy != null)) {
final AbsoluteFilePath libraryBinary = new AbsoluteFilePath(linker.getBinariesDirectory(), new RelativeFilePath("libyeppp.so"));
final AbsoluteFilePath debugBinary = new AbsoluteFilePath(linker.getBinariesDirectory(), new RelativeFilePath("libyeppp.dbg"));
buildMessages.add(gnuStrip.extractDebugInformation(libraryBinary, debugBinary));
buildMessages.add(gnuStrip.strip(libraryBinary));
buildMessages.add(gnuObjCopy.addGnuDebugLink(libraryBinary, debugBinary));
}
for (BuildMessage buildMessage : buildMessages.iterable()) {
System.out.println(buildMessage.toString());
}
}
public static Toolchain getToolchain(ABI abi) {
switch (abi) {
case X64_Windows_Microsoft_Default:
case X86_Windows_Default_i586:
{
final VisualStudio visualStudio = VisualStudio.enumerate(Machine.getLocal(), abi).getNewest();
final NASM nasm = NASM.enumerate(Machine.getLocal(), abi).getNewest();
final MicrosoftResourceCompiler resourceCompiler = visualStudio.getWindowsSDK().getResourceCompiler();
return new Toolchain(visualStudio.getCppCompiler(), nasm, visualStudio.getLinker(), resourceCompiler);
}
case ARM_Linux_SoftEABI_Android:
case ARM_Linux_SoftEABI_AndroidV7A:
case X86_Linux_Pic_Android:
case MIPS_Linux_O32_Android:
{
final AndroidNDK androidNDK = AndroidNDK.enumerate(Machine.getLocal()).getNewest();
final AndroidToolchain androidToolchain = androidNDK.enumerateToolchains(abi, AndroidToolchain.Type.GNU).getNewest();
return new Toolchain(androidToolchain.getCppCompiler(), androidToolchain.getAssembler(), androidToolchain.getLinker(), androidToolchain.getStrip(), androidToolchain.getObjCopy());
}
case X64_Linux_SystemV_Default:
case X86_Linux_Pic_i586:
{
final GnuToolchain gnuToolchain = GnuToolchain.enumerate(Machine.getLocal(), abi).getNewest();
final NASM nasm = NASM.enumerate(Machine.getLocal(), abi).getNewest();
return new Toolchain(gnuToolchain.getCppCompiler(), nasm, gnuToolchain.getLinker(), gnuToolchain.getStrip(), gnuToolchain.getObjCopy());
}
case X64_Linux_KNC_Default:
case ARM_Linux_HardEABI_V7A:
case ARM_Linux_SoftEABI_V5T:
{
final GnuToolchain gnuToolchain = GnuToolchain.enumerate(Machine.getLocal(), abi).getNewest();
return new Toolchain(gnuToolchain.getCppCompiler(), gnuToolchain.getAssembler(), gnuToolchain.getLinker());
}
default:
return null;
}
}
}
|
package edu.umd.cs.findbugs;
import java.io.IOException;
import java.io.InputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;
/**
* Version number and release date information.
*/
public class Version {
/**
* Major version number.
*/
public static final int MAJOR = 1;
/**
* Minor version number.
*/
public static final int MINOR = 3;
/**
* Patch level.
*/
public static final int PATCHLEVEL = 9;
/**
* Development version or release candidate?
*/
public static final boolean IS_DEVELOPMENT = true;
/**
* Release candidate number.
* "0" indicates that the version is not a release candidate.
*/
public static final int RELEASE_CANDIDATE = 1;
/**
* Release date.
*/
public static final String COMPUTED_DATE;
public static final String DATE;
private static final String COMPUTED_ECLIPSE_DATE;
static {
SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss z, dd MMMM, yyyy");
SimpleDateFormat eclipseDateFormat = new SimpleDateFormat("yyyyMMdd");
COMPUTED_DATE = dateFormat.format(new Date());
COMPUTED_ECLIPSE_DATE = eclipseDateFormat.format(new Date()) ;
}
/**
* Preview release number.
* "0" indicates that the version is not a preview release.
*/
public static final int PREVIEW = 0;
private static final String RELEASE_SUFFIX_WORD =
(RELEASE_CANDIDATE > 0
? "rc" + RELEASE_CANDIDATE
: (PREVIEW > 0 ? "preview" + PREVIEW : "dev-" + COMPUTED_ECLIPSE_DATE));
public static final String RELEASE_BASE = MAJOR + "." + MINOR + "." + PATCHLEVEL ;
/**
* Release version string.
*/
public static final String COMPUTED_RELEASE =
RELEASE_BASE + (IS_DEVELOPMENT ? "-" + RELEASE_SUFFIX_WORD : "");
/**
* Release version string.
*/
public static final String RELEASE;
/**
* Version of Eclipse plugin.
*/
public static final String COMPUTED_ECLIPSE_UI_VERSION =
RELEASE_BASE + "." + COMPUTED_ECLIPSE_DATE;
static {
InputStream in = null;
String release, date;
try {
Properties versionProperties = new Properties();
in = Version.class.getResourceAsStream("version.properties");
versionProperties.load(in);
release = (String) versionProperties.get("release.number");
date = (String) versionProperties.get("release.date");
if (release == null)
release = COMPUTED_RELEASE;
if (date == null)
date = COMPUTED_DATE;
} catch (RuntimeException e) {
release = COMPUTED_RELEASE;
date = COMPUTED_DATE;
} catch (IOException e) {
release = COMPUTED_RELEASE;
date = COMPUTED_DATE;
} finally {
try {
if (in != null) in.close();
} catch (IOException e) {
assert true; // nothing to do here
}
}
RELEASE = release;
DATE = date;
}
/**
* FindBugs website.
*/
public static final String WEBSITE = "http://findbugs.sourceforge.net";
/**
* Downloads website.
*/
public static final String DOWNLOADS_WEBSITE = "http://prdownloads.sourceforge.net/findbugs";
/**
* Support email.
*/
public static final String SUPPORT_EMAIL = "http://findbugs.sourceforge.net/reportingBugs.html";
public static void main(String[] argv) {
if (argv.length != 1)
usage();
String arg = argv[0];
if (!IS_DEVELOPMENT && RELEASE_CANDIDATE != 0) {
throw new IllegalStateException("Non developmental version, but is release candidate " + RELEASE_CANDIDATE);
}
if (arg.equals("-release"))
System.out.println(RELEASE);
else if (arg.equals("-date"))
System.out.println(DATE);
else if (arg.equals("-props")) {
System.out.println("release.base=" + RELEASE_BASE);
System.out.println("release.number=" + COMPUTED_RELEASE);
System.out.println("release.date=" + COMPUTED_DATE);
System.out.println("eclipse.ui.version=" + COMPUTED_ECLIPSE_UI_VERSION);
System.out.println("findbugs.website=" + WEBSITE);
System.out.println("findbugs.downloads.website=" + DOWNLOADS_WEBSITE);
} else {
usage();
System.exit(1);
}
}
private static void usage() {
System.err.println("Usage: " + Version.class.getName() +
" (-release|-date|-props)");
}
}
// vim:ts=4
|
package edu.umd.cs.findbugs;
import java.io.IOException;
import java.io.InputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;
/**
* Version number and release date information.
*/
public class Version {
/**
* Major version number.
*/
public static final int MAJOR = 1;
/**
* Minor version number.
*/
public static final int MINOR = 2;
/**
* Patch level.
*/
public static final int PATCHLEVEL = 1;
/**
* Development version or release candidate?
*/
public static final boolean IS_DEVELOPMENT = true;
/**
* Release candidate number.
* "0" indicates that the version is not a release candidate.
*/
public static final int RELEASE_CANDIDATE = 0;
static final SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss z, dd MMMM, yyyy");
static final SimpleDateFormat eclipseDateFormat = new SimpleDateFormat("yyyyMMdd");
/**
* Release date.
*/
public static final String COMPUTED_DATE = dateFormat.format(new Date());
public static final String DATE;
public static final String COMPUTED_ECLIPSE_DATE = eclipseDateFormat.format(new Date()) ;
/**
* Preview release number.
* "0" indicates that the version is not a preview release.
*/
public static final int PREVIEW = 0;
private static final String RELEASE_SUFFIX_WORD =
(RELEASE_CANDIDATE > 0
? "rc" + RELEASE_CANDIDATE
: (PREVIEW > 0 ? "preview" + PREVIEW : "dev-" + COMPUTED_ECLIPSE_DATE));
public static final String RELEASE_BASE = MAJOR + "." + MINOR + "." + PATCHLEVEL ;
/**
* Release version string.
*/
public static final String COMPUTED_RELEASE =
RELEASE_BASE + (IS_DEVELOPMENT ? "-" + RELEASE_SUFFIX_WORD : "");
/**
* Release version string.
*/
public static final String RELEASE;
/**
* Version of Eclipse plugin.
*/
public static final String COMPUTED_ECLIPSE_UI_VERSION =
MAJOR + "." + MINOR + "." + PATCHLEVEL + "." + COMPUTED_ECLIPSE_DATE;
public static final String ECLIPSE_UI_VERSION;
static {
InputStream in = null;
String release, eclipse_ui_version, date;
try {
Properties versionProperties = new Properties();
in = Version.class.getResourceAsStream("/version.properties");
versionProperties.load(in);
release = (String) versionProperties.get("release.number");
eclipse_ui_version = (String) versionProperties.get("eclipse.ui.version");
date = (String) versionProperties.get("release.date");
} catch (RuntimeException e) {
release = COMPUTED_RELEASE;
eclipse_ui_version = COMPUTED_ECLIPSE_UI_VERSION;
date = COMPUTED_DATE;
} catch (IOException e) {
release = COMPUTED_RELEASE;
eclipse_ui_version = COMPUTED_ECLIPSE_UI_VERSION;
date = COMPUTED_DATE;
} finally {
try {
if (in != null) in.close();
} catch (IOException e) {
assert true; // nothing to do here
}
}
RELEASE = release;
ECLIPSE_UI_VERSION = eclipse_ui_version;
DATE = date;
}
/**
* FindBugs website.
*/
public static final String WEBSITE = "http://findbugs.sourceforge.net";
/**
* Downloads website.
*/
public static final String DOWNLOADS_WEBSITE = "http://prdownloads.sourceforge.net/findbugs";
/**
* Support email.
*/
public static final String SUPPORT_EMAIL = "http://findbugs.sourceforge.net/reportingBugs.html";
public static void main(String[] argv) {
if (argv.length != 1)
usage();
String arg = argv[0];
if (!IS_DEVELOPMENT && RELEASE_CANDIDATE != 0) {
throw new IllegalStateException("Non developmental version, but is release candidate " + RELEASE_CANDIDATE);
}
if (arg.equals("-release"))
System.out.println(RELEASE);
else if (arg.equals("-date"))
System.out.println(DATE);
else if (arg.equals("-props")) {
System.out.println("release.number=" + COMPUTED_RELEASE);
System.out.println("release.date=" + COMPUTED_DATE);
System.out.println("eclipse.ui.version=" + COMPUTED_ECLIPSE_UI_VERSION);
System.out.println("findbugs.website=" + WEBSITE);
System.out.println("findbugs.downloads.website=" + DOWNLOADS_WEBSITE);
} else {
usage();
System.exit(1);
}
}
private static void usage() {
System.err.println("Usage: " + Version.class.getName() +
" (-release|-date|-props)");
}
}
// vim:ts=4
|
package com.hea3ven.buildingbricks.compat.vanilla;
import java.util.function.Consumer;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.init.Blocks;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.EnumFacing;
import net.minecraft.world.DimensionType;
import net.minecraft.world.chunk.IChunkProvider;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.common.config.Property;
import net.minecraftforge.event.terraingen.PopulateChunkEvent;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import com.hea3ven.tools.commonutils.util.BlockPosUtil;
import com.hea3ven.tools.commonutils.util.ModifiableBlockPos;
public class GrassSlabWorldGen implements Consumer<Property> {
private static GrassSlabWorldGen instance;
public static boolean enabled = true;
public static GrassSlabWorldGen get() {
if (instance == null)
instance = new GrassSlabWorldGen();
return instance;
}
@Override
public void accept(Property property) {
enabled = property.getBoolean();
if (enabled)
MinecraftForge.EVENT_BUS.register(this);
else
MinecraftForge.EVENT_BUS.unregister(this);
}
@SubscribeEvent
public void onPopulateChunkPreEvent(PopulateChunkEvent.Pre event) {
if(event.getWorld().provider.getDimensionType() != DimensionType.OVERWORLD)
return;
IChunkProvider chunkProvider = event.getWorld().getChunkProvider();
int x = event.getChunkX() << 4;
int z = event.getChunkZ() << 4;
int offX = 16;
int offZ = 16;
if (chunkProvider.getLoadedChunk(event.getChunkX() - 1, event.getChunkZ()) != null) {
x
offX++;
} else {
x++;
offX
}
if (chunkProvider.getLoadedChunk(event.getChunkX(), event.getChunkZ() - 1) != null) {
z
offZ++;
} else {
z++;
offZ
}
if (chunkProvider.getLoadedChunk(event.getChunkX() + 1, event.getChunkZ()) != null)
offX++;
else
offX
if (chunkProvider.getLoadedChunk(event.getChunkX(), event.getChunkZ() + 1) != null)
offZ++;
else
offZ
posLoop:
for (ModifiableBlockPos pos : BlockPosUtil.getBox(new BlockPos(x, 0, z), offX, 1, offZ)) {
while (pos.getY() < 255 && event.getWorld().getBlockState(pos).getMaterial() != Material.GRASS) {
pos.up();
}
if (pos.getY() >= 255) {
continue;
}
if (event.getChunkX() == -4 && event.getChunkZ() == 4)
for (EnumFacing face : EnumFacing.HORIZONTALS) {
pos.offset(face, 1);
Block block = event.getWorld().getBlockState(pos).getBlock();
if (block.isReplaceable(event.getWorld(), pos) ||
block == ProxyModBuildingBricksCompatVanilla.grassSlab)
continue posLoop;
pos.offset(face, -1);
}
pos.up();
for (EnumFacing face : EnumFacing.HORIZONTALS) {
pos.offset(face, 1);
if (event.getWorld().getBlockState(pos).getBlock() == Blocks.GRASS) {
pos.offset(face, -1);
event.getWorld()
.setBlockState(pos,
ProxyModBuildingBricksCompatVanilla.grassSlab.getDefaultState(), 2);
continue posLoop;
}
pos.offset(face, -1);
}
}
}
}
|
package edu.umd.cs.findbugs;
import java.io.IOException;
import java.io.InputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;
/**
* Version number and release date information.
*/
public class Version {
/**
* Major version number.
*/
public static final int MAJOR = 1;
/**
* Minor version number.
*/
public static final int MINOR = 3;
/**
* Patch level.
*/
public static final int PATCHLEVEL = 6;
/**
* Development version or release candidate?
*/
public static final boolean IS_DEVELOPMENT = true;
/**
* Release candidate number.
* "0" indicates that the version is not a release candidate.
*/
public static final int RELEASE_CANDIDATE = 3;
/**
* Release date.
*/
public static final String COMPUTED_DATE;
public static final String DATE;
public static final String COMPUTED_ECLIPSE_DATE;
public static final String ECLIPSE_DATE;
static {
SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss z, dd MMMM, yyyy");
SimpleDateFormat eclipseDateFormat = new SimpleDateFormat("yyyyMMdd");
COMPUTED_DATE = dateFormat.format(new Date());
COMPUTED_ECLIPSE_DATE = eclipseDateFormat.format(new Date()) ;
}
/**
* Preview release number.
* "0" indicates that the version is not a preview release.
*/
public static final int PREVIEW = 0;
private static final String RELEASE_SUFFIX_WORD =
(RELEASE_CANDIDATE > 0
? "rc" + RELEASE_CANDIDATE
: (PREVIEW > 0 ? "preview" + PREVIEW : "dev-" + COMPUTED_ECLIPSE_DATE));
public static final String RELEASE_BASE = MAJOR + "." + MINOR + "." + PATCHLEVEL ;
/**
* Release version string.
*/
public static final String COMPUTED_RELEASE =
RELEASE_BASE + (IS_DEVELOPMENT ? "-" + RELEASE_SUFFIX_WORD : "");
/**
* Release version string.
*/
public static final String RELEASE;
/**
* Version of Eclipse plugin.
*/
public static final String COMPUTED_ECLIPSE_UI_VERSION =
MAJOR + "." + MINOR + "." + PATCHLEVEL + "." + COMPUTED_ECLIPSE_DATE;
public static final String ECLIPSE_UI_VERSION;
static {
InputStream in = null;
String release, eclipse_ui_version, date, eclipseDate;
try {
Properties versionProperties = new Properties();
in = Version.class.getResourceAsStream("version.properties");
versionProperties.load(in);
release = (String) versionProperties.get("release.number");
eclipse_ui_version = (String) versionProperties.get("eclipse.ui.version");
date = (String) versionProperties.get("release.date");
eclipseDate = (String) versionProperties.get("eclipse.date");
if (release == null)
release = COMPUTED_RELEASE;
if (eclipse_ui_version == null)
eclipse_ui_version = COMPUTED_ECLIPSE_UI_VERSION;
if (date == null)
date = COMPUTED_DATE;
if (eclipseDate == null)
eclipseDate = COMPUTED_ECLIPSE_DATE;
} catch (RuntimeException e) {
release = COMPUTED_RELEASE;
eclipse_ui_version = COMPUTED_ECLIPSE_UI_VERSION;
date = COMPUTED_DATE;
eclipseDate = COMPUTED_ECLIPSE_DATE;
} catch (IOException e) {
release = COMPUTED_RELEASE;
eclipse_ui_version = COMPUTED_ECLIPSE_UI_VERSION;
date = COMPUTED_DATE;
eclipseDate = COMPUTED_ECLIPSE_DATE;
} finally {
try {
if (in != null) in.close();
} catch (IOException e) {
assert true; // nothing to do here
}
}
RELEASE = release;
ECLIPSE_UI_VERSION = eclipse_ui_version;
DATE = date;
ECLIPSE_DATE = eclipseDate;
}
/**
* FindBugs website.
*/
public static final String WEBSITE = "http://findbugs.sourceforge.net";
/**
* Downloads website.
*/
public static final String DOWNLOADS_WEBSITE = "http://prdownloads.sourceforge.net/findbugs";
/**
* Support email.
*/
public static final String SUPPORT_EMAIL = "http://findbugs.sourceforge.net/reportingBugs.html";
public static void main(String[] argv) {
if (argv.length != 1)
usage();
String arg = argv[0];
if (!IS_DEVELOPMENT && RELEASE_CANDIDATE != 0) {
throw new IllegalStateException("Non developmental version, but is release candidate " + RELEASE_CANDIDATE);
}
if (arg.equals("-release"))
System.out.println(RELEASE);
else if (arg.equals("-date"))
System.out.println(DATE);
else if (arg.equals("-props")) {
System.out.println("release.number=" + COMPUTED_RELEASE);
System.out.println("release.date=" + COMPUTED_DATE);
System.out.println("eclipse.date=" + COMPUTED_ECLIPSE_DATE);
System.out.println("eclipse.ui.version=" + COMPUTED_ECLIPSE_UI_VERSION);
System.out.println("findbugs.website=" + WEBSITE);
System.out.println("findbugs.downloads.website=" + DOWNLOADS_WEBSITE);
} else {
usage();
System.exit(1);
}
}
private static void usage() {
System.err.println("Usage: " + Version.class.getName() +
" (-release|-date|-props)");
}
}
// vim:ts=4
|
package com.hubspot.jinjava.objects.serialization;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.hubspot.jinjava.util.WhitespaceUtils;
import java.util.Objects;
public class PyishObjectMapper {
public static final ObjectWriter PYISH_OBJECT_WRITER = new ObjectMapper()
.registerModule(
new SimpleModule()
.setSerializerModifier(PyishBeanSerializerModifier.INSTANCE)
.addSerializer(PyishSerializable.class, PyishSerializer.INSTANCE)
)
.writer(PyishPrettyPrinter.INSTANCE)
.with(PyishCharacterEscapes.INSTANCE);
public static String getAsUnquotedPyishString(Object val) {
if (val != null) {
return WhitespaceUtils.unquoteAndUnescape(getAsPyishString(val));
}
return "";
}
public static String getAsPyishString(Object val) {
try {
return PYISH_OBJECT_WRITER
.writeValueAsString(val)
.replace("'", "\\'")
// Replace double-quotes with single quote as they are preferred in Jinja
.replaceAll("(?<!\\\\)(\\\\\\\\)*(?:\")", "$1'");
} catch (JsonProcessingException e) {
return Objects.toString(val, "");
}
}
}
|
package org.opensim.version40;
/**
*
* @author Ayman-NMBL
*/
public class TheApp40 {
// This is an empty class used as a tag to Preferences registry because we need a class to locate package
}
|
package com.lightd.ideap.maven.execution.actions;
import com.intellij.execution.impl.EditorHyperlinkSupport;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.Presentation;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.FoldRegion;
import com.intellij.openapi.editor.FoldingModel;
import com.intellij.openapi.editor.actions.ToggleUseSoftWrapsToolbarAction;
import com.intellij.openapi.editor.impl.softwrap.SoftWrapAppliancePlaces;
import com.lightd.ideap.maven.MvnCommandFolding;
class MvnSoftWrapsAction extends ToggleUseSoftWrapsToolbarAction {
private final MvnCommandFolding commandFolding;
public MvnSoftWrapsAction(MvnCommandFolding commandFolding) {
super(SoftWrapAppliancePlaces.CONSOLE);
this.commandFolding = commandFolding;
}
@Override
public boolean isSelected(AnActionEvent e) {
boolean selected = super.isSelected(e);
if (selected && !(getLastState(e.getPresentation()))) {
setSelected(e, true);
}
return selected;
}
@Override
public void setSelected(AnActionEvent event, final boolean state) {
super.setSelected(event, state);
Editor editor = getEditor(event);
if (editor == null) return;
String text = EditorHyperlinkSupport.getLineText(editor.getDocument(), 0, false);
if (state && text == null) return;
final String placeholder = commandFolding.getPlaceHolder(text);
final FoldingModel foldingModel = editor.getFoldingModel();
FoldRegion[] foldRegions = foldingModel.getAllFoldRegions();
Runnable foldTask = null;
final int endFoldRegionOffset = editor.getDocument().getLineEndOffset(0);
Runnable addCollapsedFoldRegionTask = new Runnable() {
@Override
public void run() {
FoldRegion foldRegion = foldingModel.addFoldRegion(0, endFoldRegionOffset, placeholder);
if (foldRegion != null) {
foldRegion.setExpanded(false);
}
}
};
if (foldRegions.length <= 0) {
if (!state) return;
foldTask = addCollapsedFoldRegionTask;
}
else {
final FoldRegion foldRegion = foldRegions[0];
if (foldRegion.getStartOffset() == 0 && foldRegion.getEndOffset() == endFoldRegionOffset) {
foldTask = new Runnable() {
@Override
public void run() {
foldRegion.setExpanded(!state);
}
};
}
else if (state)
foldTask = addCollapsedFoldRegionTask;
}
if (foldTask != null)
foldingModel.runBatchFoldingOperation(foldTask);
}
private boolean getLastState(Presentation presentation) {
Object rs = presentation.getClientProperty(SELECTED_PROPERTY);
if (rs instanceof Boolean)
return (Boolean)rs;
return false;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.