answer stringlengths 17 10.2M |
|---|
package org.sagebionetworks.repo.web.controller;
import com.amazonaws.AmazonServiceException;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.sagebionetworks.StackConfigurationSingleton;
import org.sagebionetworks.repo.manager.UserCertificationRequiredException;
import org.sagebionetworks.repo.manager.authentication.PasswordResetViaEmailRequiredException;
import org.sagebionetworks.repo.manager.loginlockout.UnsuccessfulLoginLockoutException;
import org.sagebionetworks.repo.manager.oauth.OAuthClientNotVerifiedException;
import org.sagebionetworks.repo.manager.password.InvalidPasswordException;
import org.sagebionetworks.repo.manager.table.InvalidTableQueryFacetColumnRequestException;
import org.sagebionetworks.repo.manager.trash.EntityInTrashCanException;
import org.sagebionetworks.repo.manager.trash.ParentInTrashCanException;
import org.sagebionetworks.repo.model.ACLInheritanceException;
import org.sagebionetworks.repo.model.AsynchJobFailedException;
import org.sagebionetworks.repo.model.ConflictingUpdateException;
import org.sagebionetworks.repo.model.DatastoreException;
import org.sagebionetworks.repo.model.ErrorResponse;
import org.sagebionetworks.repo.model.ErrorResponseCode;
import org.sagebionetworks.repo.model.InvalidModelException;
import org.sagebionetworks.repo.model.LockedException;
import org.sagebionetworks.repo.model.NameConflictException;
import org.sagebionetworks.repo.model.NotReadyException;
import org.sagebionetworks.repo.model.OAuthErrorResponse;
import org.sagebionetworks.repo.model.TermsOfUseException;
import org.sagebionetworks.repo.model.TooManyRequestsException;
import org.sagebionetworks.repo.model.UnauthenticatedException;
import org.sagebionetworks.repo.model.UnauthorizedException;
import org.sagebionetworks.repo.model.asynch.AsynchronousJobStatus;
import org.sagebionetworks.repo.model.drs.DrsErrorResponse;
import org.sagebionetworks.repo.model.ses.QuarantinedEmailException;
import org.sagebionetworks.repo.model.table.TableStatus;
import org.sagebionetworks.repo.model.table.TableUnavailableException;
import org.sagebionetworks.repo.queryparser.ParseException;
import org.sagebionetworks.repo.web.DeprecatedServiceException;
import org.sagebionetworks.repo.web.FileHandleLinkedException;
import org.sagebionetworks.repo.web.NotFoundException;
import org.sagebionetworks.repo.web.OAuthBadRequestException;
import org.sagebionetworks.repo.web.OAuthException;
import org.sagebionetworks.repo.web.OAuthForbiddenException;
import org.sagebionetworks.repo.web.OAuthUnauthenticatedException;
import org.sagebionetworks.repo.web.ServiceUnavailableException;
import org.sagebionetworks.repo.web.TemporarilyUnavailableException;
import org.sagebionetworks.repo.web.UrlHelpers;
import org.sagebionetworks.repo.web.filter.ByteLimitExceededException;
import org.sagebionetworks.schema.adapter.JSONEntity;
import org.sagebionetworks.schema.adapter.JSONObjectAdapterException;
import org.springframework.beans.TypeMismatchException;
import org.springframework.dao.TransientDataAccessException;
import org.springframework.http.HttpStatus;
import org.springframework.http.converter.HttpMessageNotReadableException;
import org.springframework.transaction.UnexpectedRollbackException;
import org.springframework.web.HttpMediaTypeNotAcceptableException;
import org.springframework.web.HttpMediaTypeNotSupportedException;
import org.springframework.web.HttpRequestMethodNotSupportedException;
import org.springframework.web.bind.MissingServletRequestParameterException;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.servlet.NoHandlerFoundException;
import org.springframework.web.util.NestedServletException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.ByteArrayOutputStream;
import java.io.EOFException;
import java.io.IOException;
import java.io.PrintStream;
@ControllerAdvice
public class BaseControllerExceptionHandlerAdvice {
static final String SERVICE_TEMPORARILY_UNAVAIABLE_PLEASE_TRY_AGAIN_LATER = "Service temporarily unavailable, please try again later.";
private static Logger log = LogManager.getLogger(BaseControllerExceptionHandlerAdvice.class);
/**
* @param ex
* @param request
* @return
*/
@ExceptionHandler(OAuthBadRequestException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
public @ResponseBody
OAuthErrorResponse handleOAuthBadRequestException(OAuthBadRequestException ex, HttpServletRequest request) {
return handleOAuthException(ex, request, false);
}
/**
* @param ex
* @param request
* @return
*/
@ExceptionHandler(OAuthUnauthenticatedException.class)
@ResponseStatus(HttpStatus.UNAUTHORIZED)
public @ResponseBody
OAuthErrorResponse handleOAuthUnauthenticatedException(OAuthUnauthenticatedException ex, HttpServletRequest request) {
return handleOAuthException(ex, request, false);
}
/**
* @param ex
* @param request
* @return
*/
@ExceptionHandler(OAuthForbiddenException.class)
@ResponseStatus(HttpStatus.FORBIDDEN)
public @ResponseBody
OAuthErrorResponse handleOAuthForbiddenException(OAuthForbiddenException ex, HttpServletRequest request) {
return handleOAuthException(ex, request, false);
}
/**
* When a TableUnavilableException occurs we need to communicate the table status to the caller with a 202 ACCEPTED,
* indicating we accepted they call but the resource is not ready yet.
* @param ex
* @param request
* @return
*/
@ExceptionHandler(TableUnavailableException.class)
@ResponseStatus(HttpStatus.ACCEPTED)
public @ResponseBody
TableStatus handleTableUnavilableException(TableUnavailableException ex,
HttpServletRequest request) {
return ex.getStatus();
}
/**
* When a NotReadyException occurs we need to communicate the async status to the caller with a 202 ACCEPTED,
* indicating we accepted they call but the resource is not ready yet.
*
* @param ex
* @param request
* @return
*/
@ExceptionHandler(NotReadyException.class)
@ResponseStatus(HttpStatus.ACCEPTED)
public @ResponseBody
AsynchronousJobStatus handleResultNotReadyException(NotReadyException ex, HttpServletRequest request) {
return ex.getStatus();
}
@ExceptionHandler(MissingServletRequestParameterException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
public @ResponseBody
JSONEntity handleMissingServletRequestParameterException(MissingServletRequestParameterException ex,
HttpServletRequest request) {
return handleException(ex, request, false);
}
/**
* This is an application exception thrown when the request references an
* entity that does not exist
* <p>
*
* TODO this exception is getting generic treatment right now but we may
* want log this less verbosely if it becomes a normal and expected
* exception
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(NotFoundException.class)
@ResponseStatus(HttpStatus.NOT_FOUND)
public @ResponseBody
JSONEntity handleNotFoundException(NotFoundException ex,
HttpServletRequest request) {
return handleException(ex, request, false);
}
/**
* This exception is thrown when the service is down, or in read-only mode
* for non-read calls.
*
* @param ex
* @param request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(ServiceUnavailableException.class)
@ResponseStatus(HttpStatus.SERVICE_UNAVAILABLE)
public @ResponseBody
JSONEntity handleServiceUnavailableException(
ServiceUnavailableException ex, HttpServletRequest request) {
return handleException(ex, request, true);
}
/**
* This is an application exception thrown when a resource was more recently
* updated than the version referenced in the current update request
* <p>
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(ConflictingUpdateException.class)
@ResponseStatus(HttpStatus.PRECONDITION_FAILED)
public @ResponseBody
JSONEntity handleConflictingUpdateException(
ConflictingUpdateException ex, HttpServletRequest request) {
return handleException(ex, request, false);
}
@ExceptionHandler(UnauthorizedException.class)
@ResponseStatus(HttpStatus.FORBIDDEN)
public @ResponseBody
JSONEntity handleUnauthorizedException(UnauthorizedException ex,
HttpServletRequest request) {
return handleException(ex, request, false);
}
/**
* This is thrown when there are problems authenticating the user.
* The user is usually advised to correct their credentials and try again.
*/
@ExceptionHandler(UnauthenticatedException.class)
@ResponseStatus(HttpStatus.UNAUTHORIZED)
public @ResponseBody
JSONEntity handleUnauthenticatedException(UnauthenticatedException ex,
HttpServletRequest request,
HttpServletResponse response) {
return handleException(ex, request, false);
}
/**
* This exception is thrown when an entity with a given name already exists.
* @param ex
* @param request
* @return
*/
@ExceptionHandler(NameConflictException.class)
@ResponseStatus(HttpStatus.CONFLICT)
public @ResponseBody
JSONEntity handleNameConflictException(NameConflictException ex,
HttpServletRequest request) {
return handleException(ex, request, false);
}
/**
* This exception is thrown when an async job fails.
* @param ex
* @param request
* @return
*/
@ExceptionHandler(AsynchJobFailedException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
public @ResponseBody
JSONEntity handleAsynchJobFailedException(AsynchJobFailedException ex,
HttpServletRequest request) {
return handleException(ex, request, false);
}
/**
* This is an application exception thrown when for example bad parameter
* values were passed
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(IllegalArgumentException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
public @ResponseBody
JSONEntity handleIllegalArgumentException(IllegalArgumentException ex,
HttpServletRequest request) {
return handleException(ex, request, false);
}
/**
* This is an application exception thrown when while trying to send an email
* the recipient is in quarantine
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(QuarantinedEmailException.class)
@ResponseStatus(HttpStatus.CONFLICT)
public @ResponseBody
JSONEntity handleQuarantinedEmailException(QuarantinedEmailException ex,
HttpServletRequest request) {
return handleException(ex, request, true);
}
/**
* This is an application exception thrown when while trying to delete a file handle
* that is somehow still linked
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(FileHandleLinkedException.class)
@ResponseStatus(HttpStatus.CONFLICT)
public @ResponseBody
JSONEntity handleFileHandleLinkedException(FileHandleLinkedException ex, HttpServletRequest request) {
return handleException(ex, request, false);
}
/**
* This is an application exception thrown when a model object does not pass
* validity checks
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(InvalidModelException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
public @ResponseBody
JSONEntity handleInvalidModelException(InvalidModelException ex,
HttpServletRequest request) {
return handleException(ex, request, false);
}
/**
* This occurs for example when parsing a URL for an integer but a string is
* found in its location
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(TypeMismatchException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
public @ResponseBody
JSONEntity handleTypeMismatchException(TypeMismatchException ex,
HttpServletRequest request) {
return handleException(ex, request, false);
}
/**
* This occurs for example when one passes field names that do not exist in
* the object type we are trying to deserialize
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(JSONObjectAdapterException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
public @ResponseBody
JSONEntity handleJSONObjectAdapterException(JSONObjectAdapterException ex,
HttpServletRequest request) {
return handleException(ex, request, false);
}
/**
* This occurs when a POST or PUT request has no body
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity Response object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(EOFException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
public @ResponseBody
JSONEntity handleEofException(EOFException ex, HttpServletRequest request) {
return handleException(ex, request, true);
}
/**
* This occurs for example when the we send invalid JSON in the request
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(HttpMessageNotReadableException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
public @ResponseBody
JSONEntity handleNotReadableException(
HttpMessageNotReadableException ex, HttpServletRequest request) {
return handleException(ex, request, false);
}
/**
* This occurs when the user specifies a query that the system cannot parse
* or handle
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(ParseException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
public @ResponseBody
JSONEntity handleParseException(ParseException ex,
HttpServletRequest request) {
return handleException(ex, request, false);
}
/**
* This occurs for example when the request asks for responses to be in a
* content type not supported
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(HttpMediaTypeNotAcceptableException.class)
@ResponseStatus(HttpStatus.NOT_ACCEPTABLE)
public @ResponseBody
JSONEntity handleNotAcceptableException(
HttpMediaTypeNotAcceptableException ex, HttpServletRequest request) {
return handleException(ex, request, false);
}
/**
* Haven't been able to get this one to happen yet
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(HttpMediaTypeNotSupportedException.class)
@ResponseStatus(HttpStatus.UNSUPPORTED_MEDIA_TYPE)
public @ResponseBody
JSONEntity handleNotSupportedException(
HttpMediaTypeNotSupportedException ex, HttpServletRequest request) {
return handleException(ex, request, false);
}
/**
* PLFM-3574 -- throw a 400-level error when a client uses the wrong verb on
* an existing call
* @param ex the exception thrown by Spring when a method is called that isn't supported
* @param request the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(HttpRequestMethodNotSupportedException.class)
@ResponseStatus(HttpStatus.METHOD_NOT_ALLOWED)
public @ResponseBody
JSONEntity handleHttpRequestMethodNotSupportedException(HttpRequestMethodNotSupportedException ex,
HttpServletRequest request) {
return handleException(ex, request, false);
}
/**
* This is thrown when there are problems persisting and retrieving objects
* from the datastore
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(DatastoreException.class)
@ResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR)
public @ResponseBody
JSONEntity handleDatastoreException(DatastoreException ex,
HttpServletRequest request) {
return handleException(ex, request, true);
}
/**
* Haven't been able to get this one to happen yet. I was assuming this
* might catch more exceptions that I am not handling explicitly yet.
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(ServletException.class)
@ResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR)
public @ResponseBody
JSONEntity handleServletException(ServletException ex,
HttpServletRequest request) {
return handleException(ex, request, true);
}
/**
* Haven't been able to get this one to happen yet
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(NestedServletException.class)
@ResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR)
public @ResponseBody
JSONEntity handleNestedServletException(NestedServletException ex,
HttpServletRequest request) {
return handleException(ex, request, true);
}
/**
* Haven't been able to get this one to happen yet
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(IllegalStateException.class)
@ResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR)
public @ResponseBody
JSONEntity handleIllegalStateException(IllegalStateException ex,
HttpServletRequest request) {
return handleException(ex, request, true);
}
/**
* When this exception occurs we want to redirect the caller to the
* benefactor's ACL URL.
*
* @param ex
* @param request
* @param response
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
* @throws IOException
*/
@ExceptionHandler(ACLInheritanceException.class)
@ResponseStatus(HttpStatus.NOT_FOUND)
public @ResponseBody
JSONEntity handleAccessControlListInheritanceException(
ACLInheritanceException ex, HttpServletRequest request,
HttpServletResponse response) throws IOException {
// Build and set the redirect URL
String message = ACLInheritanceException.DEFAULT_MSG_PREFIX
+ UrlHelpers.createACLRedirectURL(request, ex.getBenefactorId());
return handleException(ex, request, message, false, null);
}
/**
* Return a bit of the stack trace for NullPointerExceptions to help us
* debug
*
* @param ex
* @param request
* @param response
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(NullPointerException.class)
@ResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR)
public @ResponseBody
JSONEntity handleNullPointerException(NullPointerException ex,
HttpServletRequest request, HttpServletResponse response) {
log.error("Handling " + request.toString(), ex);
final int MAX_STACK_TRACE_LENGTH = 256;
String trace = stackTraceToString(ex);
int endIndex = (MAX_STACK_TRACE_LENGTH < trace.length()) ? MAX_STACK_TRACE_LENGTH
: trace.length();
String message = "Send a Jira bug report to the platform team with this message: "
+ trace.substring(0, endIndex);
ErrorResponse er = new ErrorResponse();
er.setReason(message);
return er;
}
/**
* Handle any exceptions not handled by specific handlers. Log an additional
* message with higher severity because we really do want to know what sorts
* of new exceptions are occurring.
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JsonEnityt object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(Exception.class)
@ResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR)
public @ResponseBody
JSONEntity handleAllOtherExceptions(Exception ex,
HttpServletRequest request) {
log.error("Consider specifically handling exceptions of type "
+ ex.getClass().getName());
return handleException(ex, request, true);
}
/**
* When we encounter deadlock we do not tell the users what the error was,
* rather we tell them to try again later with a 503.
* @param ex
* @param request
* @return
*/
@ExceptionHandler(TransientDataAccessException.class)
@ResponseStatus(HttpStatus.SERVICE_UNAVAILABLE)
public @ResponseBody
JSONEntity handleTransientDataAccessExceptions(TransientDataAccessException ex,
HttpServletRequest request) {
log.error("Handling " + request.toString(), ex);
ErrorResponse er = new ErrorResponse();
er.setReason(SERVICE_TEMPORARILY_UNAVAIABLE_PLEASE_TRY_AGAIN_LATER);
return er;
}
/**
* Log the exception at the warning level and return an OAuthErrorResponse
* object, which provides JSON error messages that are in-spec with OAuth2/OIDC error messages
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @param fullTrace Should the full stack trace of the exception be written to the log.
* @return an OAuthErrorResponse object containing the exception reason or some
* other human-readable response
*/
OAuthErrorResponse handleOAuthException(OAuthException ex,
HttpServletRequest request, boolean fullTrace) {
// Let the existing exception handler deal with logging
handleException(ex, request, fullTrace);
// Create the necessary object
OAuthErrorResponse errorResponse = new OAuthErrorResponse();
errorResponse.setError(ex.getError().name());
errorResponse.setError_description(ex.getErrorDescription());
if (ex.getErrorDescription() == null) {
errorResponse.setReason(ex.getError().name());
} else {
errorResponse.setReason(ex.getError().name() + ". " + ex.getErrorDescription());
}
return errorResponse;
}
/**
* Log the exception at the warning level and return an JSONEntity
* object. Child classes should override this method if they want to change
* the behavior for all exceptions.
*
* @param ex the exception to be handled
* @param request the client request
* @param fullTrace Should the full stack trace of the exception be written to the log.
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
JSONEntity handleException(Throwable ex,
HttpServletRequest request, boolean fullTrace) {
return handleException(ex, request, fullTrace, null);
}
/**
* Log the exception at the warning level and return an ErrorResponse
* object. Child classes should override this method if they want to change
* the behavior for all exceptions.
*
* @param ex the exception to be handled
* @param request the client request
* @param fullTrace Should the full stack trace of the exception be written to the log.
* @param associatedErrorCode Optional. Used when an ErrorResponseCode should be associated with the Throwable.
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
JSONEntity handleException(Throwable ex,
HttpServletRequest request, boolean fullTrace, ErrorResponseCode associatedErrorCode) {
String message = ex.getMessage();
if (message == null) {
message = ex.getClass().getName();
}
return handleException(ex, request, message, fullTrace, associatedErrorCode);
}
/**
* Log the exception at the warning level and return an ErrorResponse object. Child classes should override this
* method if they want to change the behavior for all exceptions.
*
* @param ex the exception to be handled
* @param request the client request
* @param fullTrace Should the full stack trace of the exception be written to the log.
* @param associatedErrorCode Optional. Used when an ErrorResponseCode should be associated with the Throwable.
* @return an ErrorResponse object containing the exception reason or some other human-readable response
*/
private JSONEntity handleException(Throwable ex, HttpServletRequest request, String message, boolean fullTrace, ErrorResponseCode associatedErrorCode) {
// TODO: why do we need this logging behavior difference?
// Always log the stack trace on develop stacks
if (fullTrace || StackConfigurationSingleton.singleton().isDevelopStack()) {
// Print the full stack trace
log.error("Handling " + request.toString(), ex);
} else {
// Only print one line
log.error("Handling " + request.toString());
}
if (request != null &&
!StringUtils.isEmpty(request.getPathInfo()) && request.getPathInfo().startsWith("/ga4gh/drs/v1")) {
return getDrsErrorResponse(ex, message);
}
ErrorResponse er = new ErrorResponse();
er.setReason(message);
er.setErrorCode(associatedErrorCode);
return er;
}
private DrsErrorResponse getDrsErrorResponse(final Throwable ex, final String message){
final DrsErrorResponse drsErrorResponse = new DrsErrorResponse();
drsErrorResponse.setMsg(message);
drsErrorResponse.setStatus_code(getHttpStatusCode(ex));
return drsErrorResponse;
}
private long getHttpStatusCode(final Throwable ex) {
if (ex instanceof NotFoundException) {
return HttpStatus.NOT_FOUND.value();
} else if (ex instanceof IllegalArgumentException) {
return HttpStatus.BAD_REQUEST.value();
} else if (ex instanceof UnauthorizedException) {
return HttpStatus.UNAUTHORIZED.value();
} else if (ex instanceof UnauthenticatedException) {
return HttpStatus.FORBIDDEN.value();
} else {
return HttpStatus.INTERNAL_SERVER_ERROR.value();
}
}
/**
* @param ex
* @return stack trace as a string
*/
public static String stackTraceToString(Throwable ex) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ex.printStackTrace(new PrintStream(baos));
try {
baos.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}
return baos.toString();
}
/**
* When the entity is in the trash can.
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(EntityInTrashCanException.class)
@ResponseStatus(HttpStatus.NOT_FOUND)
public @ResponseBody
JSONEntity handleEntityInTrashCanException(EntityInTrashCanException ex,
HttpServletRequest request) {
return handleException(ex, request, true);
}
/**
* When an entity's parent is in the trash can.
*
* @param ex
* the exception to be handled
* @param request
* the client request
* @return an JSONEntity object containing the exception reason or some
* other human-readable response
*/
@ExceptionHandler(ParentInTrashCanException.class)
@ResponseStatus(HttpStatus.FORBIDDEN)
public @ResponseBody
JSONEntity handleParentInTrashCanException(ParentInTrashCanException ex,
HttpServletRequest request) {
return handleException(ex, request, true);
}
/**
* When the number of requests made to a particular service exceeds a threshold rate
*/
@ExceptionHandler(TooManyRequestsException.class)
@ResponseStatus(HttpStatus.TOO_MANY_REQUESTS)
public @ResponseBody
JSONEntity handleTooManyRequestsException(TooManyRequestsException ex,
HttpServletRequest request, HttpServletResponse response) {
return handleException(ex, request, true);
}
/**
* Handle ByteLimitExceededException which occurs when the request is
* larger than the maximum size.
*/
@ExceptionHandler(ByteLimitExceededException.class)
@ResponseStatus(HttpStatus.PAYLOAD_TOO_LARGE)
public @ResponseBody
JSONEntity handleTooManyRequestsException(ByteLimitExceededException ex,
HttpServletRequest request, HttpServletResponse response) {
boolean fullTrace = false;
return handleException(ex, request, fullTrace);
}
/**
* This is thrown when the user has not accepted the terms of use
*/
@ExceptionHandler(TermsOfUseException.class)
@ResponseStatus(HttpStatus.FORBIDDEN)
public @ResponseBody
JSONEntity handleTermsOfUseException(TermsOfUseException ex,
HttpServletRequest request,
HttpServletResponse response) {
return handleException(ex, request, false);
}
/**
* This is thrown when the requested object is being locked
*/
@ExceptionHandler(LockedException.class)
@ResponseStatus(HttpStatus.LOCKED)
public @ResponseBody
JSONEntity handleLockedException(LockedException ex,
HttpServletRequest request,
HttpServletResponse response) {
return handleException(ex, request, false);
}
/**
* This is thrown when the requested object is being locked
*/
@ExceptionHandler(UnsuccessfulLoginLockoutException.class)
@ResponseStatus(HttpStatus.LOCKED)
public @ResponseBody
JSONEntity handleLockedException(UnsuccessfulLoginLockoutException ex,
HttpServletRequest request,
HttpServletResponse response) {
return handleException(ex, request, false);
}
/**
* This is thrown when the user tries to use a deprecated service
*/
@ExceptionHandler(DeprecatedServiceException.class)
@ResponseStatus(HttpStatus.GONE)
public @ResponseBody
JSONEntity handleDeprecatedServiceException(DeprecatedServiceException ex,
HttpServletRequest request,
HttpServletResponse response) {
return handleException(ex, request, false);
}
@ExceptionHandler(UnexpectedRollbackException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
public @ResponseBody
JSONEntity handleUnexpectedRollbackException(UnexpectedRollbackException ex,
HttpServletRequest request,
HttpServletResponse response) {
return handleException(ex.getCause(), request, true);
}
/**
* See PLFM-4332. Map TemporarilyUnavailableException to 503 (service unavailable).
*
* @param ex
* @param request
* @param response
* @return
*/
@ExceptionHandler(TemporarilyUnavailableException.class)
@ResponseStatus(HttpStatus.SERVICE_UNAVAILABLE)
public @ResponseBody
JSONEntity handleTemporarilyUnavailableException(TemporarilyUnavailableException ex,
HttpServletRequest request,
HttpServletResponse response) {
return handleException(ex.getCause(), request, true);
}
/**
* See PLFM-4292. Map all AmazonServiceException to 502 (bad gateway) .
*
* @param ex
* @param request
* @param response
* @return
*/
@ExceptionHandler(AmazonServiceException.class)
@ResponseStatus(HttpStatus.BAD_GATEWAY)
public @ResponseBody
JSONEntity handleAmazonServiceException(AmazonServiceException ex,
HttpServletRequest request,
HttpServletResponse response) {
return handleException(ex.getCause(), request, true);
}
@ExceptionHandler(InvalidPasswordException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
public @ResponseBody
JSONEntity handleInvalidPasswordException(InvalidPasswordException ex,
HttpServletRequest request) {
return handleException(ex, request, false);
}
@ExceptionHandler(PasswordResetViaEmailRequiredException.class)
@ResponseStatus(HttpStatus.UNAUTHORIZED)
public @ResponseBody
JSONEntity handlePasswordChangeRequiredException(PasswordResetViaEmailRequiredException ex,
HttpServletRequest request){
return handleException(ex, request, false, ErrorResponseCode.PASSWORD_RESET_VIA_EMAIL_REQUIRED);
}
@ExceptionHandler(UserCertificationRequiredException.class)
@ResponseStatus(HttpStatus.FORBIDDEN)
public @ResponseBody
JSONEntity handleUserCertificationRequiredException(UserCertificationRequiredException ex,
HttpServletRequest request){
return handleException(ex, request, false, ErrorResponseCode.USER_CERTIFICATION_REQUIRED);
}
@ExceptionHandler(InvalidTableQueryFacetColumnRequestException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
public @ResponseBody
JSONEntity handleInvalidTableQueryFacetColumnRequestException(InvalidTableQueryFacetColumnRequestException ex,
HttpServletRequest request){
return handleException(ex, request, false, ErrorResponseCode.INVALID_TABLE_QUERY_FACET_COLUMN_REQUEST);
}
@ExceptionHandler(NoHandlerFoundException.class)
@ResponseStatus(HttpStatus.NOT_FOUND)
public @ResponseBody
JSONEntity handleNoHandlerFoundException(NoHandlerFoundException ex, HttpServletRequest request){
return handleException(ex,
request,
ex.getHttpMethod() +" was not found. Please reference API documentation at https://docs.synapse.org/rest/",
false,
null);
}
@ExceptionHandler(OAuthClientNotVerifiedException.class)
@ResponseStatus(HttpStatus.FORBIDDEN)
public @ResponseBody
JSONEntity handleOAuthClientNotVerifiedException(OAuthClientNotVerifiedException ex,
HttpServletRequest request){
return handleException(ex, request, true, ErrorResponseCode.OAUTH_CLIENT_NOT_VERIFIED);
}
} |
package net.krazyweb.cataclysm.mapeditor;
import com.google.common.eventbus.EventBus;
import com.google.common.eventbus.Subscribe;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.scene.canvas.Canvas;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.image.Image;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.StackPane;
import javafx.scene.paint.Color;
import javafx.scene.transform.Rotate;
import net.krazyweb.cataclysm.mapeditor.events.*;
import net.krazyweb.cataclysm.mapeditor.map.CataclysmMap;
import net.krazyweb.cataclysm.mapeditor.map.PlaceGroupZone;
import net.krazyweb.cataclysm.mapeditor.tools.PencilTool;
import net.krazyweb.cataclysm.mapeditor.tools.Tool;
import java.util.List;
public class MapDisplay {
private static final Tile.AdditionalTileType[] BITWISE_TYPES = {
Tile.AdditionalTileType.UNCONNECTED,
Tile.AdditionalTileType.END_PIECE,
Tile.AdditionalTileType.END_PIECE,
Tile.AdditionalTileType.CORNER,
Tile.AdditionalTileType.END_PIECE,
Tile.AdditionalTileType.EDGE,
Tile.AdditionalTileType.CORNER,
Tile.AdditionalTileType.T_CONNECTION,
Tile.AdditionalTileType.END_PIECE,
Tile.AdditionalTileType.CORNER,
Tile.AdditionalTileType.EDGE,
Tile.AdditionalTileType.T_CONNECTION,
Tile.AdditionalTileType.CORNER,
Tile.AdditionalTileType.T_CONNECTION,
Tile.AdditionalTileType.T_CONNECTION,
Tile.AdditionalTileType.CENTER
};
private static final int[] BITWISE_ROTATIONS = {
0, 0, 270, 0, 180, 0, 270, 270, 90, 90, 90, 0, 180, 90, 180, 0
};
@FXML
private StackPane root;
@FXML
private Canvas terrain, overlays, groups;
private int lastHoverX, lastHoverY;
private boolean dragging = false;
private CataclysmMap map;
private EventBus eventBus;
private Tool tool = new PencilTool(); //TODO Set to last tool used on startup
private Tile currentTile = Tile.tiles.get("t_grass");
//TODO Condense these handlers
private final EventHandler<MouseEvent> clickEvent = event -> {
updateInfo(event.getX(), event.getY()); //TODO Let the tool define where to draw the overlays
tool.click(event, currentTile, groups, map);
};
private final EventHandler<MouseEvent> releaseEvent = event -> {
updateInfo(event.getX(), event.getY()); //TODO Let the tool define where to draw the overlays
tool.release(event, currentTile, groups, map);
};
private final EventHandler<MouseEvent> dragEvent = event -> {
updateInfo(event.getX(), event.getY()); //TODO Let the tool define where to draw the overlays
tool.drag(event, currentTile, groups, map);
};
private final EventHandler<MouseEvent> dragStartEvent = event -> {
updateInfo(event.getX(), event.getY()); //TODO Let the tool define where to draw the overlays
tool.dragStart(event, currentTile, groups, map);
};
private final EventHandler<MouseEvent> dragFinishEvent = event -> {
updateInfo(event.getX(), event.getY()); //TODO Let the tool define where to draw the overlays
tool.dragEnd(event, currentTile, groups, map);
};
public void setEventBus(final EventBus eventBus) {
this.eventBus = eventBus;
}
@Subscribe
public void tilePickedEventListener(final TilePickedEvent event) {
currentTile = event.getTile();
}
@Subscribe
public void mapRedrawRequestEventListener(final MapRedrawRequestEvent event) {
drawMap();
}
@Subscribe
public void tileRedrawRequestEventListener(final TileRedrawRequestEvent event) {
drawTile(event.getX(), event.getY());
drawTile(event.getX() + 1, event.getY());
drawTile(event.getX() - 1, event.getY());
drawTile(event.getX(), event.getY() + 1);
drawTile(event.getX(), event.getY() - 1);
}
@Subscribe
public void placeGroupRedrawRequestEventListener(final PlaceGroupRedrawRequestEvent event) {
drawPlaceGroups();
}
@Subscribe
public void toolSelectedEventListener(final ToolSelectedEvent event) {
tool = event.getTool();
}
private void clearOverlay() {
overlays.getGraphicsContext2D().clearRect(lastHoverX * 32 - 5, lastHoverY * 32 - 5, 42, 42); //TODO Use tileset size
}
private void updateInfo(final int mouseX, final int mouseY) {
int eventX = (mouseX / 32); //TODO Use tileset size
int eventY = (mouseY / 32); //TODO Use tileset size
if (eventX < 0 || eventY < 0 || eventX >= CataclysmMap.SIZE || eventY >= CataclysmMap.SIZE) {
clearOverlay();
lastHoverX = eventX;
lastHoverY = eventY;
return;
}
if (eventX != lastHoverX || eventY != lastHoverY) {
clearOverlay();
StringBuilder info = new StringBuilder()
.append(map.getTerrainAt(eventX, eventY)).append(" | ")
.append(map.getFurnitureAt(eventX, eventY)).append(" | ");
List<PlaceGroupZone> zones = map.getPlaceGroupZonesAt(eventX, eventY);
zones.forEach(zone -> info.append(" (").append(zone.group.type).append(" ").append(zone.group.group).append(")"));
eventBus.post(new TileHoverEvent(info.toString(), eventX, eventY)); //TODO Pass tiles to event-not formatting; have the consumers format the text instead
lastHoverX = eventX;
lastHoverY = eventY;
overlays.getGraphicsContext2D().setStroke(Color.WHITE);
overlays.getGraphicsContext2D().strokeRect(lastHoverX * 32, lastHoverY * 32, 32, 32); //TODO Use tileset size
}
}
private void updateInfo(final double mouseX, final double mouseY) {
updateInfo((int) mouseX, (int) mouseY);
}
@Subscribe
public void mapLoadedEventListener(final MapLoadedEvent event) {
//TODO Move this?
if (map != null) {
eventBus.unregister(map);
}
try {
map = event.getMap();
drawMap();
} catch (Exception e) {
e.printStackTrace();
}
root.setOnMouseMoved(mouseEvent -> updateInfo(mouseEvent.getX(), mouseEvent.getY()));
root.setOnMouseExited(mouseEvent -> clearOverlay());
root.setOnMouseReleased(mouseEvent -> {
if (dragging) {
dragging = false;
dragFinishEvent.handle(mouseEvent);
} else {
releaseEvent.handle(mouseEvent);
}
});
root.setOnMousePressed(clickEvent);
root.setOnMouseDragged(mouseEvent -> {
if (!dragging) {
dragging = true;
dragStartEvent.handle(mouseEvent);
}
dragEvent.handle(mouseEvent);
});
}
private void drawMap() {
for (int x = 0; x < CataclysmMap.SIZE; x++) {
for (int y = 0; y < CataclysmMap.SIZE; y++) {
drawTile(x, y);
}
}
drawPlaceGroups();
}
private void drawPlaceGroups() {
GraphicsContext graphicsContext = groups.getGraphicsContext2D();
graphicsContext.clearRect(0, 0, 768, 768); //TODO Use calculated size
List<PlaceGroupZone> placeGroupZones = map.getPlaceGroupZones();
for (int i = placeGroupZones.size() - 1; i >= 0; i
PlaceGroupZone placeGroupZone = placeGroupZones.get(i);
graphicsContext.setFill(placeGroupZone.fillColor);
graphicsContext.setStroke(placeGroupZone.strokeColor);
graphicsContext.fillRect(placeGroupZone.x * 32, placeGroupZone.y * 32, placeGroupZone.w * 32, placeGroupZone.h * 32); //TODO Use tileset size
graphicsContext.strokeRect(placeGroupZone.x * 32, placeGroupZone.y * 32, placeGroupZone.w * 32, placeGroupZone.h * 32); //TODO Use tileset size
}
}
private void drawTile(final int x, final int y) {
terrain.getGraphicsContext2D().setFill(Color.BLACK);
terrain.getGraphicsContext2D().fillRect(x * 32, y * 32, 32, 32); //TODO Use tileset size
drawTile(x, y, terrain.getGraphicsContext2D());
}
private void drawTile(final int x, final int y, final GraphicsContext graphicsContext) {
if (x < 0 || y < 0 || x >= CataclysmMap.SIZE || y >= CataclysmMap.SIZE) {
return;
}
//Fallback for tiles not supported by tileset
if (Tile.tiles.get(map.getTerrainAt(x, y)) == null) {
graphicsContext.setFill(Color.FUCHSIA);
graphicsContext.fillRect(x * 32, y * 32, 32, 32); //TODO Use tileset size
return;
}
//TODO Don't duplicate these sections
if (Tile.tiles.get(map.getTerrainAt(x, y)).isMultiTile()) {
int bitwiseMapping = map.getBitwiseMapping(x, y, CataclysmMap.Layer.TERRAIN);
Image texture = TileSet.textures.get(Tile.tiles.get(map.getTerrainAt(x, y)).getTile(BITWISE_TYPES[bitwiseMapping]).getID());
int rotation = BITWISE_ROTATIONS[bitwiseMapping];
drawRotatedImage(graphicsContext, texture, rotation, x * 32, y * 32); //TODO Use tileset size
} else {
Image texture = TileSet.textures.get(Tile.tiles.get(map.getTerrainAt(x, y)).getTile().getID());
graphicsContext.drawImage(texture, x * 32, y * 32); //TODO Use tileset size
}
//TODO Don't duplicate these sections
if (map.getFurnitureAt(x, y) != null) {
if (Tile.tiles.get(map.getFurnitureAt(x, y)).isMultiTile()) {
int bitwiseMapping = map.getBitwiseMapping(x, y, CataclysmMap.Layer.FURNITURE);
Image texture = TileSet.textures.get(Tile.tiles.get(map.getFurnitureAt(x, y)).getTile(BITWISE_TYPES[bitwiseMapping]).getID());
int rotation = BITWISE_ROTATIONS[bitwiseMapping];
drawRotatedImage(graphicsContext, texture, rotation, x * 32, y * 32); //TODO Use tileset size
} else {
Image texture = TileSet.textures.get(Tile.tiles.get(map.getFurnitureAt(x, y)).getTile().getID());
graphicsContext.drawImage(texture, x * 32, y * 32); //TODO Use tileset size
}
}
}
private void rotate(final GraphicsContext graphicsContext, final double angle, final double x, final double y) {
Rotate rotation = new Rotate(angle, x, y);
graphicsContext.setTransform(rotation.getMxx(), rotation.getMyx(), rotation.getMxy(), rotation.getMyy(), rotation.getTx(), rotation.getTy());
}
private void drawRotatedImage(final GraphicsContext graphicsContext, final Image image, final double angle, final int x, final int y) {
if (image == null) {
return;
}
graphicsContext.save(); // saves the current state on stack, including the current transform
rotate(graphicsContext, angle, x + image.getWidth() / 2, y + image.getHeight() / 2);
graphicsContext.drawImage(image, x, y);
graphicsContext.restore(); // back to original state (before rotation)
}
} |
package net.minecraftforge.client.model;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import net.minecraft.block.Block;
import net.minecraft.block.state.IBlockState;
import net.minecraft.client.Minecraft;
import net.minecraft.client.renderer.BlockModelShapes;
import net.minecraft.client.renderer.ItemMeshDefinition;
import net.minecraft.client.renderer.ItemModelMesher;
import net.minecraft.client.renderer.block.model.ItemCameraTransforms;
import net.minecraft.client.renderer.block.model.ItemModelGenerator;
import net.minecraft.client.renderer.block.model.ModelBlock;
import net.minecraft.client.renderer.block.model.ModelBlockDefinition;
import net.minecraft.client.renderer.block.model.ModelBlockDefinition.MissingVariantException;
import net.minecraft.client.renderer.block.model.ModelBlockDefinition.Variant;
import net.minecraft.client.renderer.block.model.ModelBlockDefinition.Variants;
import net.minecraft.client.renderer.block.statemap.IStateMapper;
import net.minecraft.client.renderer.texture.IIconCreator;
import net.minecraft.client.renderer.texture.TextureAtlasSprite;
import net.minecraft.client.renderer.texture.TextureMap;
import net.minecraft.client.renderer.vertex.VertexFormat;
import net.minecraft.client.resources.IResourceManager;
import net.minecraft.client.resources.model.BuiltInModel;
import net.minecraft.client.resources.model.ModelBakery;
import net.minecraft.client.resources.model.ModelResourceLocation;
import net.minecraft.client.resources.model.ModelRotation;
import net.minecraft.client.resources.model.WeightedBakedModel;
import net.minecraft.item.Item;
import net.minecraft.util.IRegistry;
import net.minecraft.util.ResourceLocation;
import net.minecraftforge.client.event.TextureStitchEvent;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.fml.common.FMLLog;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import net.minecraftforge.fml.common.registry.GameData;
import net.minecraftforge.fml.common.registry.RegistryDelegate;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.logging.log4j.Level;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
public class ModelLoader extends ModelBakery
{
private final Map<ModelResourceLocation, IModel> stateModels = new HashMap<ModelResourceLocation, IModel>();
private final Set<ResourceLocation> textures = new HashSet<ResourceLocation>();
private final Set<ResourceLocation> loadingModels = new HashSet<ResourceLocation>();
private final Set<ModelResourceLocation> missingVariants = Sets.newHashSet();
private boolean isLoading = false;
public boolean isLoading()
{
return isLoading;
}
public ModelLoader(IResourceManager manager, TextureMap map, BlockModelShapes shapes)
{
super(manager, map, shapes);
VanillaLoader.instance.setLoader(this);
ModelLoaderRegistry.clearModelCache();
}
@Override
public IRegistry setupModelRegistry()
{
isLoading = true;
loadBlocks();
loadItems();
stateModels.put(MODEL_MISSING, getModel(new ResourceLocation(MODEL_MISSING.getResourceDomain(), MODEL_MISSING.getResourcePath())));
textures.remove(TextureMap.LOCATION_MISSING_TEXTURE);
textures.addAll(LOCATIONS_BUILTIN_TEXTURES);
textureMap.loadSprites(resourceManager, new IIconCreator()
{
public void registerSprites(TextureMap map)
{
for(ResourceLocation t : textures)
{
sprites.put(t, map.registerSprite(t));
}
}
});
sprites.put(new ResourceLocation("missingno"), textureMap.getMissingSprite());
Function<ResourceLocation, TextureAtlasSprite> textureGetter = Functions.forMap(sprites, textureMap.getMissingSprite());
for(Entry<ModelResourceLocation, IModel> e : stateModels.entrySet())
{
bakedRegistry.putObject(e.getKey(), e.getValue().bake(e.getValue().getDefaultState(), Attributes.DEFAULT_BAKED_FORMAT, textureGetter));
}
return bakedRegistry;
}
private void loadBlocks()
{
Map<IBlockState, ModelResourceLocation> stateMap = blockModelShapes.getBlockStateMapper().putAllStateModelLocations();
Collection<ModelResourceLocation> variants = Lists.newArrayList(stateMap.values());
variants.add(new ModelResourceLocation("minecraft:item_frame", "normal")); //Vanilla special cases item_frames so must we
variants.add(new ModelResourceLocation("minecraft:item_frame", "map"));
loadVariants(variants);
}
@Override
protected void registerVariant(ModelBlockDefinition definition, ModelResourceLocation location)
{
Variants variants = null;
try
{
variants = definition.getVariants(location.getVariant());
}
catch(MissingVariantException e)
{
missingVariants.add(location);
}
if(variants != null && !variants.getVariants().isEmpty())
{
try
{
stateModels.put(location, new WeightedRandomModel(variants));
}
catch(Throwable e)
{
throw new RuntimeException(e);
}
}
}
private void loadItems()
{
registerVariantNames();
for(Item item : GameData.getItemRegistry().typeSafeIterable())
{
for(String s : (List<String>)getVariantNames(item))
{
ResourceLocation file = getItemLocation(s);
ModelResourceLocation memory = new ModelResourceLocation(s, "inventory");
IModel model = getModel(file);
if(model == null || model == getMissingModel())
{
missingVariants.add(memory);
}
else stateModels.put(memory, model);
}
}
}
public IModel getModel(ResourceLocation location)
{
if(!ModelLoaderRegistry.loaded(location)) loadAnyModel(location);
return ModelLoaderRegistry.getModel(location);
}
@Override
protected ResourceLocation getModelLocation(ResourceLocation model)
{
return new ResourceLocation(model.getResourceDomain(), model.getResourcePath() + ".json");
}
private void loadAnyModel(ResourceLocation location)
{
if(loadingModels.contains(location))
{
throw new IllegalStateException("circular model dependencies involving model " + location);
}
loadingModels.add(location);
IModel model = ModelLoaderRegistry.getModel(location);
for(ResourceLocation dep : model.getDependencies())
{
getModel(dep);
}
textures.addAll(model.getTextures());
loadingModels.remove(location);
}
private class VanillaModelWrapper implements IModel
{
private final ResourceLocation location;
private final ModelBlock model;
public VanillaModelWrapper(ResourceLocation location, ModelBlock model)
{
this.location = location;
this.model = model;
}
public Collection<ResourceLocation> getDependencies()
{
if(model.getParentLocation() == null || model.getParentLocation().getResourcePath().startsWith("builtin/")) return Collections.emptyList();
return Collections.singletonList(model.getParentLocation());
}
public Collection<ResourceLocation> getTextures()
{
// setting parent here to make textures resolve properly
if(model.getParentLocation() != null)
{
IModel parent = getModel(model.getParentLocation());
if(parent instanceof VanillaModelWrapper)
{
model.parent = ((VanillaModelWrapper) parent).model;
}
else
{
throw new IllegalStateException("vanilla model" + model + "can't have non-vanilla parent");
}
}
ImmutableSet.Builder<ResourceLocation> builder = ImmutableSet.builder();
if(hasItemModel(model))
{
for(String s : (List<String>)ItemModelGenerator.LAYERS)
{
String r = model.resolveTextureName(s);
ResourceLocation loc = new ResourceLocation(r);
if(!r.equals(s))
{
builder.add(loc);
}
// mojang hardcode
if(model.getRootModel() == MODEL_COMPASS && !loc.equals(TextureMap.LOCATION_MISSING_TEXTURE))
{
TextureAtlasSprite.setLocationNameCompass(loc.toString());
}
else if(model.getRootModel() == MODEL_CLOCK && !loc.equals(TextureMap.LOCATION_MISSING_TEXTURE))
{
TextureAtlasSprite.setLocationNameClock(loc.toString());
}
}
}
for(String s : (Iterable<String>)model.textures.values())
{
if(!s.startsWith("
{
builder.add(new ResourceLocation(s));
}
}
return builder.build();
}
public IFlexibleBakedModel bake(IModelState state, VertexFormat format, Function<ResourceLocation, TextureAtlasSprite> bakedTextureGetter)
{
if(!Attributes.moreSpecific(format, Attributes.DEFAULT_BAKED_FORMAT))
{
throw new IllegalArgumentException("can't bake vanilla models to the format that doesn't fit into the default one: " + format);
}
ModelBlock model = this.model;
if(hasItemModel(model)) model = makeItemModel(model);
if(model == null) return getMissingModel().bake(state, format, bakedTextureGetter);
if(isCustomRenderer(model)) return new IFlexibleBakedModel.Wrapper(new BuiltInModel(new ItemCameraTransforms(model.getThirdPersonTransform(), model.getFirstPersonTransform(), model.getHeadTransform(), model.getInGuiTransform())), Attributes.DEFAULT_BAKED_FORMAT);
return new IFlexibleBakedModel.Wrapper(bakeModel(model, state.apply(this), state instanceof UVLock), Attributes.DEFAULT_BAKED_FORMAT);
}
public IModelState getDefaultState()
{
return ModelRotation.X0_Y0;
}
}
public static class UVLock implements IModelState
{
private final IModelState state;
public UVLock(IModelState state)
{
this.state = state;
}
public TRSRTransformation apply(IModelPart part)
{
return state.apply(part);
}
}
// Weighted models can contain multiple copies of 1 model with different rotations - this is to make it work with IModelState (different copies will be different objects).
private static class WeightedPartWrapper implements IModel
{
private final IModel model;
public WeightedPartWrapper(IModel model)
{
this.model = model;
}
public Collection<ResourceLocation> getDependencies()
{
return model.getDependencies();
}
public Collection<ResourceLocation> getTextures()
{
return model.getTextures();
}
public IFlexibleBakedModel bake(IModelState state, VertexFormat format, Function<ResourceLocation, TextureAtlasSprite> bakedTextureGetter)
{
return model.bake(state, format, bakedTextureGetter);
}
public IModelState getDefaultState()
{
return model.getDefaultState();
}
}
private class WeightedRandomModel implements IModel
{
private final List<Variant> variants;
private final List<ResourceLocation> locations = new ArrayList<ResourceLocation>();
private final List<IModel> models = new ArrayList<IModel>();
private final IModelState defaultState;
public WeightedRandomModel(Variants variants)
{
this.variants = variants.getVariants();
ImmutableMap.Builder<IModelPart, TRSRTransformation> builder = ImmutableMap.builder();
for(Variant v : (List<Variant>)variants.getVariants())
{
ResourceLocation loc = v.getModelLocation();
locations.add(loc);
IModel model = new WeightedPartWrapper(getModel(loc));
models.add(model);
builder.put(model, new TRSRTransformation(v.getRotation()));
}
defaultState = new MapModelState(builder.build());
}
public Collection<ResourceLocation> getDependencies()
{
return ImmutableList.copyOf(locations);
}
public Collection<ResourceLocation> getTextures()
{
return Collections.emptyList();
}
private IModelState addUV(boolean uv, IModelState state)
{
if(uv) return new UVLock(state);
return state;
}
public IFlexibleBakedModel bake(IModelState state, VertexFormat format, Function<ResourceLocation, TextureAtlasSprite> bakedTextureGetter)
{
if(!Attributes.moreSpecific(format, Attributes.DEFAULT_BAKED_FORMAT))
{
throw new IllegalArgumentException("can't bake vanilla weighted models to the format that doesn't fit into the default one: " + format);
}
if(variants.size() == 1)
{
Variant v = variants.get(0);
IModel model = models.get(0);
return model.bake(addUV(v.isUvLocked(), state.apply(model)), format, bakedTextureGetter);
}
WeightedBakedModel.Builder builder = new WeightedBakedModel.Builder();
for(int i = 0; i < variants.size(); i++)
{
IModel model = models.get(i);
Variant v = variants.get(i);
builder.add(model.bake(addUV(v.isUvLocked(), state.apply(model)), format, bakedTextureGetter), variants.get(i).getWeight());
}
return new FlexibleWeightedBakedModel(builder.build(), Attributes.DEFAULT_BAKED_FORMAT);
}
public IModelState getDefaultState()
{
return defaultState;
}
}
private static class FlexibleWeightedBakedModel extends WeightedBakedModel implements IFlexibleBakedModel
{
private final WeightedBakedModel parent;
private final VertexFormat format;
public FlexibleWeightedBakedModel(WeightedBakedModel parent, VertexFormat format)
{
super(parent.models);
this.parent = parent;
this.format = format;
}
public VertexFormat getFormat()
{
return format;
}
}
private boolean isBuiltinModel(ModelBlock model)
{
return model == MODEL_GENERATED || model == MODEL_COMPASS || model == MODEL_CLOCK || model == MODEL_ENTITY;
}
public IModel getMissingModel()
{
return getModel(new ResourceLocation(MODEL_MISSING.getResourceDomain(), MODEL_MISSING.getResourcePath()));
}
static enum VanillaLoader implements ICustomModelLoader
{
instance;
private ModelLoader loader;
void setLoader(ModelLoader loader)
{
this.loader = loader;
}
ModelLoader getLoader()
{
return loader;
}
public void onResourceManagerReload(IResourceManager resourceManager)
{
// do nothing, cause loader will store the reference to the resourceManager
}
public boolean accepts(ResourceLocation modelLocation)
{
return true;
}
public IModel loadModel(ResourceLocation modelLocation)
{
try
{
return loader.new VanillaModelWrapper(modelLocation, loader.loadModel(modelLocation));
}
catch(IOException e)
{
if(loader.isLoading)
{
// holding error until onPostBakeEvent
}
else FMLLog.log(Level.ERROR, e, "Exception loading model %s with vanilla loader, skipping", modelLocation);
return loader.getMissingModel();
}
}
}
public static class White extends TextureAtlasSprite
{
public static ResourceLocation loc = new ResourceLocation("white");
public static White instance = new White();
protected White()
{
super(loc.toString());
}
@Override
public boolean hasCustomLoader(IResourceManager manager, ResourceLocation location)
{
return true;
}
@Override
public boolean load(IResourceManager manager, ResourceLocation location)
{
BufferedImage image = new BufferedImage(1, 1, BufferedImage.TYPE_INT_ARGB);
image.setRGB(0, 0, 0xFFFFFFFF);
BufferedImage[] images = new BufferedImage[Minecraft.getMinecraft().gameSettings.mipmapLevels + 1];
images[0] = image;
loadSprite(images, null);
return false;
}
public void register(TextureMap map)
{
map.setTextureEntry(White.loc.toString(), White.instance);
}
}
public void onPostBakeEvent(IRegistry modelRegistry)
{
for(ModelResourceLocation missing : missingVariants)
{
if(modelRegistry.getObject(missing) == null)
{
FMLLog.severe("Model definition for location %s not found", missing);
}
}
isLoading = false;
}
private static final Map<RegistryDelegate<Block>, IStateMapper> customStateMappers = Maps.newHashMap();
public static void setCustomStateMapper(Block block, IStateMapper mapper)
{
customStateMappers.put(block.delegate, mapper);
}
public static void onRegisterAllBlocks(BlockModelShapes shapes)
{
for (Entry<RegistryDelegate<Block>, IStateMapper> e : customStateMappers.entrySet())
{
shapes.registerBlockWithStateMapper(e.getKey().get(), e.getValue());
}
}
private static final Map<RegistryDelegate<Item>, ItemMeshDefinition> customMeshDefinitions = com.google.common.collect.Maps.newHashMap();
private static final Map<Pair<RegistryDelegate<Item>, Integer>, ModelResourceLocation> customModels = com.google.common.collect.Maps.newHashMap();
public static void setCustomModelResourceLocation(Item item, int metadata, ModelResourceLocation model)
{
customModels.put(Pair.of(item.delegate, metadata), model);
}
public static void setCustomMeshDefinition(Item item, ItemMeshDefinition meshDefinition)
{
customMeshDefinitions.put(item.delegate, meshDefinition);
}
public static void onRegisterItems(ItemModelMesher mesher)
{
for (Map.Entry<RegistryDelegate<Item>, ItemMeshDefinition> e : customMeshDefinitions.entrySet())
{
mesher.register(e.getKey().get(), e.getValue());
}
for (Entry<Pair<RegistryDelegate<Item>, Integer>, ModelResourceLocation> e : customModels.entrySet())
{
mesher.register(e.getKey().getLeft().get(), e.getKey().getRight(), e.getValue());
}
}
} |
package net.peelo.kahvi.compiler.ast.type;
import net.peelo.kahvi.compiler.ast.AtomVisitor;
import net.peelo.kahvi.compiler.util.Name;
import net.peelo.kahvi.compiler.util.SourcePosition;
import java.util.Collections;
import java.util.List;
public final class ClassType extends ReferenceType
{
/** Qualified name of the type. */
private final Name className;
/** Optional type arguments. */
private final List<TypeArgument> typeArguments;
public ClassType(SourcePosition position, Name className)
{
this(position, className, Collections.<TypeArgument>emptyList());
}
public ClassType(SourcePosition position,
Name className,
List<TypeArgument> typeArguments)
{
super(position);
this.className = className;
for (TypeArgument ta : (this.typeArguments = typeArguments))
{
ta.setEnclosingScope(this);
}
}
/**
* Returns qualified name of the class.
*/
public Name getClassName()
{
return this.className;
}
/**
* Returns list of optional type arguments.
*/
public List<TypeArgument> getTypeArguments()
{
return this.typeArguments;
}
@Override
public <R, P> R accept(TypeVisitor<R, P> visitor, P p)
{
return visitor.visitClassType(this, p);
}
@Override
public <R, P> R accept(TypeArgumentVisitor<R, P> visitor, P p)
{
return visitor.visitClassType(this, p);
}
@Override
public <R, P> R accept(AtomVisitor<R, P> visitor, P p)
{
return visitor.visitClassType(this, p);
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
sb.append(this.className);
if (!this.typeArguments.isEmpty())
{
boolean first = true;
sb.append('<');
for (TypeArgument ta : this.typeArguments)
{
if (first)
{
first = false;
} else {
sb.append(", ");
}
sb.append(ta);
}
sb.append('>');
}
return sb.toString();
}
} |
package net.twasi.core.api.ws;
import com.google.gson.*;
import net.twasi.core.api.ws.models.TwasiWebsocketAnswer;
import net.twasi.core.api.ws.models.TwasiWebsocketClient;
import net.twasi.core.api.ws.models.WebsocketHandledException;
import net.twasi.core.logger.TwasiLogger;
import org.eclipse.jetty.websocket.api.CloseStatus;
import org.eclipse.jetty.websocket.api.Session;
import org.eclipse.jetty.websocket.api.WebSocketException;
import org.eclipse.jetty.websocket.api.annotations.*;
import org.eclipse.jetty.websocket.servlet.WebSocketServlet;
import org.eclipse.jetty.websocket.servlet.WebSocketServletFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
@WebSocket
public class TwasiWebsocketServlet extends WebSocketServlet {
public static final TwasiWebsocketTopicManager topicManager = new TwasiWebsocketTopicManager();
private static List<TwasiWebsocketClient> clients = new ArrayList<>();
private Session session;
private TwasiWebsocketClient client;
private boolean keepAlive = true;
public TwasiWebsocketServlet() {
JsonObject ob = new JsonObject();
ob.add("type", new JsonPrimitive("shutdown"));
ob.add("reason", new JsonPrimitive("Twasi Core is shutting down. This hasn't to do with you, my friend. Maybe it's restarting, we will find out!"));
Runtime.getRuntime().addShutdownHook(new Thread(() -> clients.forEach(c -> {
c.getConnection().close(new CloseStatus(1012, "Twasi Core is shutting down. This hasn't to do with you, my friend. Maybe it's restarting, we will find out!")); // 1012 = Service restarting
})));
}
@Override
public void configure(WebSocketServletFactory webSocketServletFactory) {
webSocketServletFactory.register(getClass());
}
@OnWebSocketClose
public void onClose(int statusCode, String reason) {
clients = clients.stream().filter(el -> !el.getConnection().equals(session)).collect(Collectors.toList());
}
@OnWebSocketError
public void onError(Throwable t) {
TwasiLogger.log.error("Websocket API error: ", t);
}
@OnWebSocketConnect
public void onConnect(Session session) throws IOException {
this.session = session;
TwasiWebsocketClient client = new TwasiWebsocketClient(session, null);
clients.add(this.client = client);
client.send(TwasiWebsocketAnswer.success(new JsonPrimitive("Connection established sucessfully")).toString());
Thread thread = new Thread(() -> {
while (keepAlive) {
try {
TimeUnit.SECONDS.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
JsonObject ob = new JsonObject();
ob.add("type", new JsonPrimitive("keepalive"));
ob.add("timestamp", new JsonPrimitive(Calendar.getInstance().getTime().getTime()));
try {
client.send(ob.toString());
} catch (IOException ignored) {
} catch (WebSocketException e) {
this.onClose(0, "");
}
}
});
thread.setDaemon(true);
thread.start();
}
@OnWebSocketMessage
public void onMessage(String s) throws IOException {
String ref = null;
JsonElement result;
try {
JsonObject element = new JsonParser().parse(s).getAsJsonObject();
if (element.has("ref")) ref = element.get("ref").getAsString();
result = topicManager.handle(client, element);
} catch (JsonParseException e) {
JsonObject ob = new JsonObject();
ob.add("status", new JsonPrimitive("INVALID_INPUT"));
ob.add("description", new JsonPrimitive("Please send a valid JSON string."));
result = ob;
} catch (WebsocketHandledException e) {
JsonObject ob = new JsonObject();
ob.add("status", new JsonPrimitive("ERROR"));
ob.add("description", new JsonPrimitive("A known Error occurred: " + e.getMessage() + " Error ref-id: " /* TODO Add ref id */));
result = ob;
} catch (Exception e) {
JsonObject ob = new JsonObject();
ob.add("status", new JsonPrimitive("ERROR"));
ob.add("description", new JsonPrimitive("An Error occurred. If this keeps happening please inform the team with error ref-id: " /* TODO Add ref id */));
result = ob;
}
JsonObject response = new JsonObject();
if (ref != null) response.add("ref", new JsonPrimitive(ref));
else response.add("ref", null);
response.add("result", result);
session.getRemote().sendString(response.toString());
}
} |
package net.wizardsoflua.wol.spell;
import java.util.Collections;
import java.util.Deque;
import java.util.List;
import net.minecraft.command.CommandException;
import net.minecraft.command.ICommandSender;
import net.minecraft.server.MinecraftServer;
import net.minecraft.util.math.BlockPos;
import net.wizardsoflua.WizardsOfLua;
import net.wizardsoflua.WolAnnouncementMessage;
import net.wizardsoflua.wol.menu.CommandAction;
import net.wizardsoflua.wol.menu.MenuEntry;
public class SpellBreakAction extends MenuEntry implements CommandAction {
private static final String ALL = "all";
private static final String BY_SID = "bySid";
private static final String BY_NAME = "byName";
private static final String BY_OWNER = "byOwner";
private final WizardsOfLua wol;
public SpellBreakAction(WizardsOfLua wol) {
this.wol = wol;
}
@Override
public List<String> getTabCompletions(MinecraftServer server, ICommandSender sender,
Deque<String> argList, BlockPos targetPos) {
if (argList.size() == 1) {
String next = argList.poll();
return getMatchingTokens(next, ALL, BY_NAME, BY_OWNER, BY_SID);
}
String filterKey = argList.poll();
if (argList.size() == 1) {
if (BY_OWNER.equals(filterKey)) {
return getMatchingTokens(argList.poll(), server.getOnlinePlayerNames());
}
if (BY_SID.equals(filterKey)) {
return getMatchingTokens(argList.poll(), wol.getSpellRegistry().getActiveSids());
}
if (BY_NAME.equals(filterKey)) {
return getMatchingTokens(argList.poll(), wol.getSpellRegistry().getActiveNames());
}
}
return Collections.emptyList();
}
@Override
public void execute(ICommandSender sender, Deque<String> argList) throws CommandException {
String option = argList.poll();
if (ALL.equalsIgnoreCase(option)) {
wol.getSpellRegistry().breakAll();
// TODO I18n
sender.sendMessage(new WolAnnouncementMessage("Broke all spells"));
} else if (BY_SID.equalsIgnoreCase(option)) {
String sidString = argList.poll();
// TODO throw command exception if value is not an integer or null
int sid = Integer.parseInt(sidString);
boolean found = wol.getSpellRegistry().breakBySid(sid);
if (found) {
// TODO I18n
sender.sendMessage(new WolAnnouncementMessage(String.format("Broke %s spell", 1)));
} else {
throw new CommandException("No matching spell found!");
}
} else if (BY_NAME.equalsIgnoreCase(option)) {
String name = argList.poll();
// TODO support names with white spaces!
// TODO throw command exception if value is null
int count = wol.getSpellRegistry().breakByName(name);
if (count == 1) {
// TODO I18n
sender.sendMessage(new WolAnnouncementMessage(String.format("Broke %s spell", count)));
} else if (count > 1) {
// TODO I18n
sender.sendMessage(new WolAnnouncementMessage(String.format("Broke %s spells", count)));
} else {
throw new CommandException("No matching spells found!");
}
} else if (BY_OWNER.equalsIgnoreCase(option)) {
String ownerName = argList.poll();
// TODO throw command exception if value is null
int count = wol.getSpellRegistry().breakByOwner(ownerName);
if (count == 1) {
// TODO I18n
sender.sendMessage(new WolAnnouncementMessage(String.format("Broke %s spell", count)));
} else if (count > 1) {
// TODO I18n
sender.sendMessage(new WolAnnouncementMessage(String.format("Broke %s spells", count)));
} else {
throw new CommandException("No matching spells found!");
}
} else {
// TODO I18n
throw new CommandException("Illegal spell break option: %s!", option);
}
}
} |
package net.xprova.propertylanguage;
import java.util.ArrayList;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.tree.ParseTree;
import net.xprova.propertylanguage.PropertyLanguageParser.AtomContext;
import net.xprova.propertylanguage.PropertyLanguageParser.PropertyContext;
public class PropertyBuilder {
// these must be the same as their correspondents in grammar:
public static final String NOT = "~";
public static final String AND = "&";
public static final String XOR = "^";
public static final String OR = "|";
public static final String EQ = "==";
public static final String NEQ = "!=";
public static final String IMPLY = "|->";
public static final String IMPLY_NEXT = "|=>";
public static final String LPAREN = "(";
public static final String AT = "@";
public static final String HASH = "
public static final String DOUBLE_HASH = "
public static final String ROSE = "$rose";
public static final String FELL = "$fell";
public static final String STABLE = "$stable";
public static final String CHANGED = "$changed";
public static final String ALWAYS = "$always";
public static final String NEVER = "$never";
public static final String EVENTUALLY = "$eventually";
private static void rewriteSyntaticSugar(Property root) {
for (Property c : root.children)
rewriteSyntaticSugar(c);
// change (x |=> #n y) into (x |-> #n+1 y)
if (root.name.equals(IMPLY_NEXT)) {
root.name = IMPLY;
root.children.get(1).delay -= 1;
}
// change (x |-> y) into (~x | y)
if (root.name.equals(IMPLY)) {
Property c1 = root.children.get(0);
Property c2 = root.children.get(1);
Property notC1 = Property.build(NOT).child(c1);
root.children(notC1, c2).name = OR;
}
// change ($rose(x)) into (~x & #1 x)
if (root.name.equals(ROSE)) {
Property c1 = root.children.get(0);
Property notC1 = Property.build(NOT).child(new Property(c1));
notC1.delay += 2;
root.children(c1, notC1).name = AND;
}
// change ($fell(x)) into (x & #1 ~x)
if (root.name.equals(FELL)) {
Property c1 = root.children.get(0);
Property notC1 = Property.build(NOT).child(new Property(c1));
c1.delay += 1;
root.children(c1, notC1).name = AND;
}
// $stable(x) into ~(x ^ #1 x)
if (root.name.equals(STABLE)) {
Property c1 = new Property(root.children.get(0));
Property c2 = new Property(root.children.get(0));
c2.delay += 1;
Property xorN = Property.build(XOR).children(c1, c2);
root.child(xorN).name = NOT;
}
// $changed(x) into (x ^ #1 x)
if (root.name.equals(CHANGED)) {
Property c1 = new Property(root.children.get(0));
Property c2 = new Property(root.children.get(0));
c2.delay += 1;
root.children(c1, c2).name = XOR;
}
// (x == y) into ~(x ^ y)
if (root.name.equals(EQ)) {
Property xorNode = new Property(root);
xorNode.name = XOR;
root.child(xorNode).name = NOT;
}
// (x != y) into (x ^ y)
if (root.name.equals(NEQ)) {
root.name = XOR;
}
// $never(x) into $always(~x)
if (root.name.equals(NEVER)) {
Property notChild = Property.build(NOT).children(root.children);
root.child(notChild).name = ALWAYS;
}
}
private static Property parseAST(ParseTree root) throws Exception {
ArrayList<Property> children = new ArrayList<Property>();
if (root.getChildCount() == 1) {
if (root.getPayload() instanceof AtomContext) {
return Property.build(root.getText());
} else {
return parseAST(root.getChild(0));
}
}
if (root.getChildCount() == 2) {
// NOT
children.add(parseAST(root.getChild(1)));
return Property.build(root.getChild(0).getText()).children(children);
}
String c0 = root.getChild(0).getText();
String c1 = root.getChild(1).getText();
if (ROSE.equals(c0) || FELL.equals(c0) || STABLE.equals(c0) || CHANGED.equals(c0) || ALWAYS.equals(c0)
|| NEVER.equals(c0)) {
children.add(parseAST(root.getChild(2)));
return Property.build(c0).children(children);
}
if (AND.equals(c1) || XOR.equals(c1) || OR.equals(c1)) {
for (int i = 0; i < root.getChildCount(); i += 2)
children.add(parseAST(root.getChild(i)));
return Property.build(c1).children(children);
}
if (DOUBLE_HASH.equals(c1)) {
int cumDelay = 0;
for (int i = 0; i < root.getChildCount(); i++) {
ParseTree ci = root.getChild(i);
if (ci.getPayload() instanceof Token) {
Token pl = (Token) ci.getPayload();
if (DOUBLE_HASH.equals(pl.getText())) {
cumDelay += 1;
} else {
// token is NUM
// mind the (-1): we've incremented cumDelay when
// we processed the preceding DOUBLE_DASH so this
// is to make the total increase due to ##n equal
// to n
cumDelay += Integer.valueOf(pl.getText()) - 1;
}
} else {
// this is an identifier
Property childNode = parseAST(ci);
childNode.delay -= cumDelay;
children.add(childNode);
}
}
return Property.build(AND).children(children);
}
if (EQ.equals(c1) || NEQ.equals(c1) || IMPLY.equals(c1) || IMPLY_NEXT.equals(c1)) {
children.add(parseAST(root.getChild(0)));
children.add(parseAST(root.getChild(2)));
return Property.build(c1).children(children);
}
if (c0.equals(LPAREN)) {
children.add(parseAST(root.getChild(1)));
return Property.build(LPAREN).children(children);
}
if (c0.equals(AT)) {
children.add(parseAST(root.getChild(2)));
int delay = Integer.valueOf(c1);
return Property.build(LPAREN).children(children).delay(delay);
}
if (c0.equals(HASH)) {
children.add(parseAST(root.getChild(2)));
int delay = -Integer.valueOf(c1);
return Property.build(LPAREN).children(children).delay(delay);
}
System.out.println(root.getText());
throw new Exception("error while traversing property AST");
}
public static Property build(String str) throws Exception {
// step 1: generate property AST
ANTLRInputStream antlr = new ANTLRInputStream(str);
PropertyLanguageLexer lexer1 = new PropertyLanguageLexer(antlr);
CommonTokenStream tokenStream = new CommonTokenStream(lexer1);
PropertyLanguageParser p1 = new PropertyLanguageParser(tokenStream);
PropertyContext e = p1.property();
// step 2: traverse AST to generate expression tree
Property root = parseAST(e.getChild(0));
// step 3: process syntactic sugar
rewriteSyntaticSugar(root);
// step 4: normalise delays
root.flattenDelays(0);
root.addDelayRecur(-root.getMinDelay(0));
root.groupDelays();
root.print();
return root;
}
} |
package archimulator.sim.uncore.cache.replacement.prefetchAware;
import archimulator.sim.common.report.ReportNode;
import archimulator.sim.uncore.MemoryHierarchyAccess;
import archimulator.sim.uncore.cache.EvictableCache;
import archimulator.sim.uncore.cache.replacement.AbstractCacheReplacementPolicy;
import archimulator.sim.uncore.cache.replacement.CacheReplacementPolicy;
import archimulator.sim.uncore.cache.setDueling.AbstractSetDuelingUnit;
import java.io.Serializable;
import java.util.Arrays;
import java.util.List;
/**
* Abstract Set dueling based cache replacement policy.
*
* @param <StateT> the state type of the parent evictable cache
* @author Min Cai
*/
public abstract class AbstractSetDuelingCacheReplacementPolicy<StateT extends Serializable> extends AbstractCacheReplacementPolicy<StateT> implements SetDuelingCacheReplacementPolicy<StateT> {
private AbstractSetDuelingUnit setDuelingUnit;
private List<CacheReplacementPolicy<StateT>> policies;
/**
* Create a set dueling based cache replacement policy for the specified evictable cache.
*
* @param cache the parent evictable cache
*/
@SuppressWarnings("unchecked")
public AbstractSetDuelingCacheReplacementPolicy(EvictableCache<StateT> cache, CacheReplacementPolicy<StateT>... policies) {
super(cache);
this.policies = Arrays.asList(policies);
this.setDuelingUnit = this.createSetDuelingUnit(cache, policies.length);
}
/**
* Create an set dueling unit.
*
* @param cache the parent evictable cache
* @param numPolicies the number of policies
* @return the newly created set dueling unit
*/
protected abstract AbstractSetDuelingUnit createSetDuelingUnit(EvictableCache<StateT> cache, int numPolicies);
/**
* Get the cache replacement policy for the specified access and set index.
*
* @param access the memory hierarchy access
* @param set the set index
* @return the cache replacement policy for the specified access and set index
*/
public CacheReplacementPolicy<StateT> getPolicy(MemoryHierarchyAccess access, int set) {
return this.policies.get(this.setDuelingUnit.getPolicyId(set, 0));
}
@Override
public void dumpStats(ReportNode reportNode) {
}
/**
* Get the set dueling unit.
*
* @return the set dueling unit
*/
public AbstractSetDuelingUnit getSetDuelingUnit() {
return setDuelingUnit;
}
} |
package org.animotron.expression;
import org.animotron.graph.builder.GraphBuilder;
import org.neo4j.graphdb.Relationship;
/**
* @author <a href="mailto:shabanovd@gmail.com">Dmitriy Shabanov</a>
* @author <a href="mailto:gazdovsky@gmail.com">Evgeny Gazdovsky</a>
*
*/
public abstract class AbstractExpression extends Expression {
private Relationship relationship = null;
protected final GraphBuilder builder;
public AbstractExpression(GraphBuilder builder) {
this.builder = builder;
}
public AbstractExpression(Relationship r) {
builder = null;
relationship = r;
}
public abstract void build() throws Throwable;
@Override
protected synchronized Relationship relationship() {
if (relationship == null) {
try {
builder.build(this);
relationship = builder.relationship();
} catch (Throwable t) {
t.printStackTrace();
throw new RuntimeException(t);
}
}
return relationship;
}
} |
package stroom.processor.impl.db;
import stroom.docref.DocRef;
import stroom.processor.shared.ProcessorFilter;
import stroom.processor.shared.QueryData;
import stroom.query.api.v2.ExpressionOperator;
import stroom.query.api.v2.ExpressionTerm;
import stroom.query.api.v2.ExpressionTerm.Condition;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.UUID;
import javax.xml.bind.JAXBException;
class TestProcessorFilterMarshaller {
private static final Logger LOGGER = LoggerFactory.getLogger(TestProcessorFilterMarshaller.class);
@Test
void testMarshall() throws JAXBException {
final QueryData queryData = new QueryData();
queryData.setDataSource(DocRef.builder()
.uuid(UUID.randomUUID().toString())
.type("Index")
.name("Some idx")
.build());
queryData.setExpression(
ExpressionOperator.builder()
.addTerm(ExpressionTerm.builder()
.field("SomeField")
.condition(Condition.EQUALS)
.value("xxxx")
.build())
.build()
);
queryData.setParams("");
final ProcessorFilter processorFilter = new ProcessorFilter();
// Blank tracker
processorFilter.setReprocess(true);
processorFilter.setEnabled(true);
processorFilter.setPriority(1);
processorFilter.setProcessor(null);
processorFilter.setQueryData(queryData);
processorFilter.setMinMetaCreateTimeMs(System.currentTimeMillis());
processorFilter.setMaxMetaCreateTimeMs(System.currentTimeMillis());
final ProcessorFilterMarshaller processorFilterMarshaller = new ProcessorFilterMarshaller();
final ProcessorFilter marshalled = processorFilterMarshaller.marshal(processorFilter);
Assertions.assertThat(marshalled.getData())
.isNotBlank();
LOGGER.debug("marshalled:\n{}", marshalled.getData());
}
} |
package org.blackbananacoin.common.bitcoin;
import static com.google.common.base.Preconditions.checkNotNull;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.text.SimpleDateFormat;
import java.util.Date;
import com.google.bitcoin.core.ECKey;
import com.google.bitcoin.core.NetworkParameters;
import com.google.bitcoin.core.Utils;
public class Bitcoins {
public static final long COIN = 100000000L;;
public static final SimpleDateFormat FMDateKeyContent = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss'Z'");
/**
* @param addr
* @param amount
* @return
*/
public static String buildUri(String addr, double amount) {
checkNotNull(addr);
final StringBuilder uri = new StringBuilder("bitcoin:");
uri.append(addr);
uri.append("?amount=").append(String.format("%.8f", amount));
return uri.toString();
}
public static ECKey createECKeyByDoubleDigestString(String seed)
throws UnsupportedEncodingException {
ECKey eckey = new ECKey(new BigInteger(1, Utils.doubleDigest(seed
.getBytes("UTF-8"))));
return eckey;
}
public static String walletPrivateKeyFormat(ECKey k,
NetworkParameters params, int hrAgo) {
// wallet key save to /mnt/sdcard/datelabel.key
// private key file content
// BitcoinxxxxxxxxxxxxxxxAddress 2014-01-28T09:13:54Z
long time = new Date().getTime();
long createTime = time - hrAgo * 60 * 60 * 1000;
String timeInKey = FMDateKeyContent.format(new Date(createTime));
String keyContent = "# KEEP YOUR PRIVATE KEYS SAFE !\n"
+ k.getPrivateKeyEncoded(params).toString() + " " + timeInKey
+ "\n# End of private keys";
return keyContent;
}
} |
package peergos.server.storage;
import com.amazonaws.*;
import com.amazonaws.auth.*;
import com.amazonaws.client.builder.*;
import com.amazonaws.services.s3.*;
import com.amazonaws.services.s3.model.*;
import peergos.server.corenode.*;
import peergos.server.sql.*;
import peergos.server.util.*;
import peergos.shared.cbor.*;
import peergos.shared.crypto.hash.*;
import peergos.shared.io.ipfs.cid.*;
import peergos.shared.io.ipfs.multibase.binary.*;
import peergos.shared.storage.*;
import peergos.shared.io.ipfs.multihash.*;
import io.prometheus.client.Histogram;
import peergos.shared.util.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.function.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.*;
public class S3BlockStorage implements ContentAddressedStorage {
private static final Logger LOG = Logger.getGlobal();
private static final Histogram readTimerLog = Histogram.build()
.labelNames("filesize")
.name("block_read_seconds")
.help("Time to read a block from immutable storage")
.exponentialBuckets(0.01, 2, 16)
.register();
private static final Histogram writeTimerLog = Histogram.build()
.labelNames("filesize")
.name("s3_block_write_seconds")
.help("Time to write a block to immutable storage")
.exponentialBuckets(0.01, 2, 16)
.register();
private final Multihash id;
private final AmazonS3 s3Client;
private final String bucket, folder;
private final TransactionStore transactions;
public S3BlockStorage(S3Config config, Multihash id, TransactionStore transactions) {
this.id = id;
this.bucket = config.bucket;
this.folder = config.path.isEmpty() || config.path.endsWith("/") ? config.path : config.path + "/";
AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard()
.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(config.regionEndpoint, config.region))
.withCredentials(new AWSStaticCredentialsProvider(
new BasicAWSCredentials(config.accessKey, config.secretKey)));
s3Client = builder.build();
LOG.info("Using S3 Block Storage at " + config.regionEndpoint + ", bucket " + config.bucket + ", path: " + config.path);
this.transactions = transactions;
}
private static String hashToKey(Multihash hash) {
// To be compatible with IPFS we use the same scheme here, the cid bytes encoded as uppercase base32
String padded = new Base32().encodeAsString(hash.toBytes());
int padStart = padded.indexOf("=");
return padStart > 0 ? padded.substring(0, padStart) : padded;
}
private Multihash keyToHash(String key) {
// To be compatible with IPFS we use the ame scheme here, the cid bytes encoded as uppercase base32
byte[] decoded = new Base32().decode(key.substring(folder.length()));
return Cid.cast(decoded);
}
@Override
public CompletableFuture<Optional<CborObject>> get(Multihash hash) {
if (hash instanceof Cid && ((Cid) hash).codec == Cid.Codec.Raw)
throw new IllegalStateException("Need to call getRaw if cid is not cbor!");
return getRaw(hash).thenApply(opt -> opt.map(CborObject::fromByteArray));
}
@Override
public CompletableFuture<Optional<byte[]>> getRaw(Multihash hash) {
return Futures.of(map(hash, h -> {
GetObjectRequest get = new GetObjectRequest(bucket, folder + hashToKey(hash));
Histogram.Timer readTimer = readTimerLog.labels("read").startTimer();
try (S3Object res = s3Client.getObject(get); DataInputStream din = new DataInputStream(new BufferedInputStream(res.getObjectContent()))) {
return Optional.of(Serialize.readFully(din));
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
readTimer.observeDuration();
}
}, e -> Optional.empty()));
}
@Override
public CompletableFuture<List<Multihash>> pinUpdate(PublicKeyHash owner, Multihash existing, Multihash updated) {
return Futures.of(Collections.singletonList(updated));
}
@Override
public CompletableFuture<List<Multihash>> recursivePin(PublicKeyHash owner, Multihash hash) {
return Futures.of(Collections.singletonList(hash));
}
@Override
public CompletableFuture<List<Multihash>> recursiveUnpin(PublicKeyHash owner, Multihash hash) {
return Futures.of(Collections.singletonList(hash));
}
@Override
public CompletableFuture<Boolean> gc() {
return Futures.errored(new IllegalStateException("S3 doesn't implement GC!"));
}
private void collectGarbage(JdbcIpnsAndSocial pointers) {
// TODO: do this more efficiently with a bloom filter, and streaming
List<Multihash> present = getFiles(Integer.MAX_VALUE);
List<Multihash> pending = transactions.getOpenTransactionBlocks();
// This pointers call must happen AFTER the previous two for correctness
List<Multihash> currentRoots = pointers.getAllTargets(this);
BitSet reachable = new BitSet(present.size());
for (Multihash root : currentRoots) {
markReachable(root, present, reachable);
}
for (Multihash additional : pending) {
int index = present.indexOf(additional);
if (index >= 0)
reachable.set(index);
}
for (int i=0; i < present.size(); i++)
if (! reachable.get(i))
delete(present.get(i));
}
private void markReachable(Multihash root, List<Multihash> present, BitSet reachable) {
int index = present.indexOf(root);
if (index >= 0)
reachable.set(index);
List<Multihash> links = getLinks(root).join();
for (Multihash link : links) {
markReachable(link, present, reachable);
}
}
@Override
public CompletableFuture<Optional<Integer>> getSize(Multihash hash) {
return Futures.of(map(hash, h -> {
if (hash.isIdentity()) // Identity hashes are not actually stored explicitly
return Optional.of(0);
Histogram.Timer readTimer = readTimerLog.labels("size").startTimer();
try {
ObjectMetadata res = s3Client.getObjectMetadata(bucket, folder + hashToKey(hash));
return Optional.of((int)res.getContentLength());
} finally {
readTimer.observeDuration();
}
}, e -> Optional.empty()));
}
public boolean contains(Multihash key) {
return map(key, h -> s3Client.doesObjectExist(bucket, folder + hashToKey(h)), e -> false);
}
public <T> T map(Multihash hash, Function<Multihash, T> success, Function<Throwable, T> absent) {
try {
return success.apply(hash);
} catch (AmazonServiceException e) {
/* Caught an AmazonServiceException,
which means our request made it
to Amazon S3, but was rejected with an error response
for some reason.
*/
if ("NoSuchKey".equals(e.getErrorCode())) {
Histogram.Timer readTimer = readTimerLog.labels("absent").startTimer();
readTimer.observeDuration();
return absent.apply(e);
}
LOG.warning("AmazonServiceException: " + e.getMessage());
LOG.warning("AWS Error Code: " + e.getErrorCode());
throw new RuntimeException(e.getMessage(), e);
} catch (AmazonClientException e) {
/* Caught an AmazonClientException,
which means the client encountered
an internal error while trying to communicate
with S3, such as not being able to access the network.
*/
LOG.severe("AmazonClientException: " + e.getMessage());
LOG.severe("Thrown at:" + e.getCause().toString());
throw new RuntimeException(e.getMessage(), e);
}
}
@Override
public CompletableFuture<Multihash> id() {
return Futures.of(id);
}
@Override
public CompletableFuture<TransactionId> startTransaction(PublicKeyHash owner) {
return CompletableFuture.completedFuture(transactions.startTransaction(owner));
}
@Override
public CompletableFuture<Boolean> closeTransaction(PublicKeyHash owner, TransactionId tid) {
transactions.closeTransaction(owner, tid);
return CompletableFuture.completedFuture(true);
}
@Override
public CompletableFuture<List<Multihash>> put(PublicKeyHash owner,
PublicKeyHash writer,
List<byte[]> signatures,
List<byte[]> blocks,
TransactionId tid) {
return put(owner, writer, signatures, blocks, false, tid);
}
@Override
public CompletableFuture<List<Multihash>> putRaw(PublicKeyHash owner,
PublicKeyHash writer,
List<byte[]> signatures,
List<byte[]> blocks,
TransactionId tid) {
return put(owner, writer, signatures, blocks, true, tid);
}
private CompletableFuture<List<Multihash>> put(PublicKeyHash owner,
PublicKeyHash writer,
List<byte[]> signatures,
List<byte[]> blocks,
boolean isRaw,
TransactionId tid) {
return CompletableFuture.completedFuture(blocks.stream()
.map(b -> put(b, isRaw, tid, owner))
.collect(Collectors.toList()));
}
/** Must be atomic relative to reads of the same key
*
* @param data
*/
public Multihash put(byte[] data, boolean isRaw, TransactionId tid, PublicKeyHash owner) {
Histogram.Timer writeTimer = writeTimerLog.labels("write").startTimer();
try {
Multihash hash = new Multihash(Multihash.Type.sha2_256, Hash.sha256(data));
Cid cid = new Cid(1, isRaw ? Cid.Codec.Raw : Cid.Codec.DagCbor, hash.type, hash.getHash());
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentLength(data.length);
PutObjectRequest put = new PutObjectRequest(bucket, folder + hashToKey(cid), new ByteArrayInputStream(data), metadata);
transactions.addBlock(cid, tid, owner);
PutObjectResult putResult = s3Client.putObject(put);
return cid;
} catch (AmazonServiceException e) {
/* Caught an AmazonServiceException,
which means our request made it
to Amazon S3, but was rejected with an error response
for some reason.
*/
LOG.severe("AmazonServiceException: " + e.getMessage());
LOG.severe("AWS Error Code: " + e.getErrorCode());
throw new RuntimeException(e.getMessage(), e);
} catch (AmazonClientException e) {
/* Caught an AmazonClientException,
which means the client encountered
an internal error while trying to communicate
with S3, such as not being able to access the network.
*/
LOG.severe("AmazonClientException: " + e.getMessage());
LOG.severe("Thrown at:" + e.getCause().toString());
throw new RuntimeException(e.getMessage(), e);
} finally {
writeTimer.observeDuration();
}
}
private List<Multihash> getFiles(long maxReturned) {
List<Multihash> results = new ArrayList<>();
applyToAll(obj -> results.add(keyToHash(obj.getKey())), maxReturned);
return results;
}
private List<String> getFilenames(long maxReturned) {
List<String> results = new ArrayList<>();
applyToAll(obj -> results.add(obj.getKey()), maxReturned);
return results;
}
private void applyToAll(Consumer<S3ObjectSummary> processor, long maxObjects) {
try {
LOG.log(Level.FINE, "Listing blobs");
final ListObjectsV2Request req = new ListObjectsV2Request()
.withBucketName(bucket)
.withPrefix(folder)
.withMaxKeys(10_000);
ListObjectsV2Result result;
long processedObjects = 0;
do {
result = s3Client.listObjectsV2(req);
for (S3ObjectSummary objectSummary : result.getObjectSummaries()) {
if (objectSummary.getKey().endsWith("/")) {
LOG.fine(" - " + objectSummary.getKey() + " " + "(directory)");
continue;
}
LOG.fine(" - " + objectSummary.getKey() + " " +
"(size = " + objectSummary.getSize() +
"; modified: " + objectSummary.getLastModified() + ")");
processor.accept(objectSummary);
processedObjects++;
if (processedObjects >= maxObjects)
return;
}
LOG.log(Level.FINE, "Next Continuation Token : " + result.getNextContinuationToken());
req.setContinuationToken(result.getNextContinuationToken());
} while (result.isTruncated());
} catch (AmazonServiceException ase) {
/* Caught an AmazonServiceException,
which means our request made it
to Amazon S3, but was rejected with an error response
for some reason.
*/
LOG.severe("AmazonServiceException: " + ase.getMessage());
LOG.severe("AWS Error Code: " + ase.getErrorCode());
} catch (AmazonClientException ace) {
/* Caught an AmazonClientException,
which means the client encountered
an internal error while trying to communicate
with S3, such as not being able to access the network.
*/
LOG.severe("AmazonClientException: " + ace.getMessage());
LOG.severe("Thrown at:" + ace.getCause().toString());
}
}
public void delete(Multihash hash) {
DeleteObjectRequest del = new DeleteObjectRequest(bucket, folder + hashToKey(hash));
s3Client.deleteObject(del);
}
public static void main(String[] args) throws Exception {
String b32 = "AFKQBEBABZVZ5Q7UPEQUXHIR64UR7J3KAL75JCSBC5WPCKAJXLKPSIAKW6REEQAFGC5IDWOYC46X3IXL7HRDPF6VSCEPENWMATFJUCVU57XAO2XD2EKD7OEVXEUM2VP7XHGXD2MKXKHWPCTAUM57NUYWS3FRYWICVFSPWVAYITJLBWL4VXD5SJ2DVISS2AY4ZRW232HZNNNWE4ZYFF6CC7TN27RNZVBAJV2GFZAK3PNOIPK7ENG7J67IMQEKJTTCJORVOEBDYUOFZ6RL3JCEQIRH6NCSQYTITKYQXXFOLJO4FLOD4ZHGTI34JJGQGQYS5VCL5RHSVWFCYNU5ZH54LXE6DYNCKJYSXI3TIZRQEK5ADGGD4WE55S53EJCI5VMDKPRNILMHU2VMEGH67KWH7BUQ5ZR4GZE34RS6I33ZA7ATNRL5SUIJXG6ZT2MXGT2NHFXRULUNC5N7LYHV452FOBU572YNVFW4BBWD4AC6PVZDQNUXKUAVNNW7IJW45XHCVZQP2LODBNMG5YTS627TXBQTHETK32VJM33BYFYJ7JWHHB7GPS24MRDPARVLIVFW7FX5X5RC56U754IKNHBBURGPVM5F4YRG64KNG3T2EWGPII76BQO3KF67S4ZXGI5HMJKLFBQTC7AR5LSIFE2IF33ZZ7NPVXNP2K37VODGFUWDVZXXHXKR5TH75U6IWYT43XM5U4M2L7RXXGJC5RC5C2FXLYEBQXPZZCIKSLP3Y5VHBXDSIO5277LLOHFYSZRWHGYZXWGEHRVRQAL64Y6ARBTBQNRCALASOB6MMVUCB5BMT2STS3MHR6M7RSVFBWVVEUWHXLPSGFOUOJG2F3VET4MGZJIEJSQVQLLQ5PFA272URQ2OO7YYV5XP7OBLXQKB3BN54YQXOGAS52RFI4V55VVCGVK5VOBGLS3HBSU5CCSKDZXRCLVHD5CAEZLITV64BY5KBDQ3WU3IJ4XY3TMVFNSSTC2HABMTOJJZWMKXWWS4WBS3HZYOBZKQ45DC4GGHEE4PD6WKOEWGPXG674RPZPJWJU6DVFZH2XKK5LN5CE2H7HWOW2K342HSVD5A6WEVWZSCOU5VSSQT23VJAB3KY3QE25OTVVE7A7DRBF23N3RHUUVUADDD64BZI775IGMRCPNPWYI2L6EJH2HOWDJ3JRQKHH6G2AZI5JNNSM66EGB6MAPATFOKIQUC2O2UI4SP7XYY2TSX3ZVHO57FCZEG7YONNPJRHGKM7JRUIH3OPMJ34RKO2P5NCXN6MUOKN6P5LGFQLOK2SZXQB46K42JRULEX3MNATBRDM6LC5XQOAWYNBQG2AVSMJMTTICDXW7WDNCRMLU3ULLPUJVK5K7AGUFLA3FST5ZWAYWM7VEKKHLBG27NQOUVN4KGP7JNZMVLRRJU5ZAFIS76I2N3AICGR7JKN75HWTSMED5D6DNE5GGT6MSN466O2HRDCJ4VH6H4QNDRBG6ZKX57ODNYZJ7XEZJQLIOM5R72SGY77PTK6YPXY7GPWMQ2IJB5Y55OIARXQDCOQAKYIKXI44GBXVJMGUFKFWVEZ2ARWFT7ZLY4N6KLZMWO4A5KZHKXQP7J3QWQGMEAB4J5QIZNWCO3BJ7XTR6FLNBQNOSBY4SHAG32HBTRXORPOVV3SVKF7CHHJU4S5WMDTUMC2MT2KN2NKJY4V5QZOBCYMOBTGPL2R5QEMHTPZ46PJHO6OWAYXO3XJXHBCCLXYEXAQVFMPE5UIKBJ7KWCDKTSPPELAZ2IYLRL7HPUYXTDGK6X2M5P42X4GGK4VTQDKWAZ62JIWC7BEZ54FYIA2OOKEVXO2W6A77LN3JXW2TTJOHB73OZO4YLDB3JMODKGBEJZYEEGWDLCOFW5MC3BY7CBBGNHIPWOYOEPUBGGXIVVQTIBZOLRINNDXIYAXHKZ2TADIHQALFWZRIWQLL5OXAIBOQCO4IZXJ3MU3SW5V4E7OU46OJB7LR5FEL3462EHP24GEWLDKVHVTTKT4UJLTACBBBHVUWF4X56JFWAXWQQZUMEHOL4ANO4IRRMMHQSKXCY6BUSVRHUPUCQEY7KNE2WDMILYR6HM5E5UNQUHWMXBDTY5ZS42P32NTSM22VFYX3A6J56C2QKAOHTD27XDDQGDX7QKHJFKOGC55LIPNKV4RQX44AINMCJQYPVVPQL52WYMGEMLQAVTJYOC7AHPVMET3BF7EYPMUW6OHYECBF327C6PYDU5EH2KJCPLCCDRCBHOG5OB7NVE7O7JY4J67U44KBOLWWCQILM2H32J2RRTIAB6UAXRVWXAII2VIHR5HMAIXOXKXXN5WRMWEKQZDNUZRAVK6HMF6COWBXDVBNAKRNRGJHIMD7FPZFPEGYTGE5UBNISC5F37F32XMBVV22ZOHKYKRPSTCN6FVQBIMGK4DZOQPUPJQJ3E3WRR665Y4N3ZPFWFFESUHFJGSSW4LNOIYC7C7ZCD7IUJTZ3D32LVOOKLLFBJQBNOIAYVOWZJS2FPTRB4TLF6WBCFI3FWM42YJN564XBR2BLSQZVOWUM5YM2PYHCPRG5VPUXAPYKTRNQIS4COV5SB65AYPWX3BBVGGICZZUCA3FTLB3J6HDNWYGO6NPTG5AGX5P7SRBMZRHH3JRK6ESPJDUYX5L24UUFIFAED25SGENLSIVA656W3AINQFYDGQCPCKKBXZBLRQTO74ZLDC7ZNZAUADUT2T36LUTKUTEQSZTTP2WVMKT3HLNTYQQMGIAVZWRSJJYTVQCPG7XMDCSSPTHCUGU7XHGN42AHR3D42FHGINNLEY3QJTENOTCVOWVZQOQVPGFKMVRJDVV4PX57WXOM3T2JZ34CFXYP6GHMHJ57LOY77A5W4WFWRU3VGSZ4A6HBKGBR6TCA2HHA3FGSZ33U2IP6UCBGETFNKDQ7FS43SDSJU3K6DP4L2R2OQ6ZIZPUWUO2IIZW2GYM7GAOO5PFB7VFJHLIX2CKCNDL4LUOSI2HZ7AG3IK2DEWY64VJM6OK3PBE2MNYL53E5LMOAZ3R2YCO5T6PEANYIEC2BKK23XHGJGL4Z5F3PDEP4FZFNE2THBRHMBTCQP2NRQRJ4IRDD6HESAF47JZC4EHRIJT6MGCS3MQFLT3TWQCXVCIHSC6VX7FRKCNRT2HEGSRWSLCOGVSRNBKIISSKWXK6ALNRW4YNTFGEN47GPFGUVIXUVCGSAB5BZFFIKHWHRV3RGL4GD5YDLNDBIRYFEUNCLD4CC76VXYOCOXJ6BJ3VW7INAKST7E6GAOTVY6FUD42RQ6RULGNRSUOOF5HB34VSLQ4WNKRUSLFSPNUT5NFP25S2MDG6XGIT3565SVDGXR6DPRCZTHZGH2LSPBYSF3IBYHK3VNY6EPYMXOOVLTE6V3B4JB7A7KTYSB23MZLT3YALOHFEU4T6E2QMBMQQB5BF5PRWEPVPMCHCW2GOI4KCJY5BI2UKV62HIJ5RHZ7WC4YIURV4BS3JZSRPYXH75CAED5BKA7R235LLSFU4LNUPJ7PIG7ZO5GQCOEI52WIVTHA6VT2DS37IGZRHDLMPC5TWDU7CLOSS24LOOFZ7I2JE25V7PXMCEPWOXGF47E5PF4KZATFCXDHQ54K4GMEDIRNVCFNKFZUBVDFTW2LCARRMYDYIETVXLXHSOWRPDGEU333FTYHNQ2WQC2B774MKOD7C776KYZZFZZUJG3P5MJJ4E6FB7JIRZDD6OPNZWUBD53FZKTV6BE3EWO3P7KNESYBHCUUCPKC4336DFOPJFQSEPJPSBXZDZIZHNXOPMBEP4WDUUIDMGTGM2UOHPK64I42Q6ZE6RRFBV6NHUJAQ2UL7IMBAZ362366IRA7R4NRPUCOW2O7F2GN2YB6F6AHKYPBOTCRR6Q6346WUDYKTWYAZKUB57JWHXUBYJSASXXRN5RFTKTI7DHFINDRZAAI4LHSVMY4HA2FGOYXJ2G6T2WGH4HTG2YCIP35YPVOYYOUPEBUBN7CZG7ASUIVHXXN7QG2IFZT5CA3JEHI7CAAYAXRVHSIKCQHWNRKGTSER7KM6ZPGQ3RPTYAEXI3ENYVF5AP5KIEJR7WJQWJ364TDZXWQYY7NYKDCSANWJ7WT2VTAXBH3BYJ64MPXRIDO3LWEAZ2BZ2AXFXS34XOWX4TCMEM2VDGFMZ2RIMT2IUOZNMD3UJE4GY6ARMCP7ISQ3PXASWFLVZJFIRAFJPK5BNP62XIM34EXQTGQJXNHYKBL7KOEREVXXRDYE3YFPUOSO7MA5Q2LOTQV2OGVDP5TQKZVCFG733TOXD3CHHLO55JPQIOLI5HCCUP7X4HW3UOBKYZX43BOPNFNNQ6EY3MPDNPRYGTVS7E4AETRO4624UEU3UTTZXN5ZE4HFMTTLSTXRTKD6VVDN6BNCRNPVHFRKUBJFRQL2HMJVBKGUNUA4TAXBHGB4PFOAOYRQT6JKCTEQMEFTDXMIEZJ6QFEAD4OGTMNWKYSFROSS5NVMBD7MC4HJJO2P32Z6HIHH5UWEJN43V7RI2RIG6CVMUAB52ETRZUWIRUJE37CM6AWGHDTWESFJMOZ7G4PTLMT347OCOAGPAYMQJOEO55V3CCOSO5J5VW3VRAE3NEFBNOFLKOWQK2Y5DZW2RQ7PJHVXDEDKZF4MFRB7RWVRZHYHMAH4A5D66VCW3D2ACIHKT6A5KGWRONNUPW7LS7MNPCIZEBD2IDFOL6OBNM4AWTBUHDPHH5BEEDDUX4VCBVIZYVKRFGZP3R6ZXTJBBZXCKMYWVXBC2LJANN62NU4E4ODE4LDEJRTZZ3ZJ4K5W6FLGZNISAXCUPEKRBS2XKTPKS4RBQTWXSROMNX2NNHQACVLWL77WNQFV4F5JAOCDGLUITQPZHSZX3COFZQ2D32LEQPB2EKEEITJOJO26KPUZ6VJQITOFHKYR4PHMS5XG2F5OFFHYWYJRZXD5AOWPWJWR6CKMEH3VKF6XGOGH55N7ZEWYG73L5MOB7PEQP5GEOSHMHLFMR3YLTASD7CJOUBUYLVJNCJHZAUVEAQLHRKJ6LNWLH24YVHH25CR7G75YLP2X2UUEYSB6NIWSHQDSOUNIKX3FIFCX6PEU63FXV6IPVZVYLFZ2FNQ5GFIPS5XWEE25DUPUDA3VD6AZIVOX2KVMZ5TOS2VMITW53RDVGOOX6BWJGKHPTZTCAWE2MQ2UICOHYG4B5C5P2LBT5ET7PP3YLCWYJWJHPKXQFKVPOVSJIETOUQF5WGIZJRXY5C3OUUZILFEUW3TSLHARNHFQMMETAQJXUKHRQYYGTDTVKKUXD2N54PIKDEP4GOEV6RXCUFCKRTYHM2RJYEEIXU7WGBI2TGJDJX647LTYRU7EVXSYCLK5HWLOZHY2OHE23MZW32NILGN4352UBFYKUGJ7QJEADMRHN2YAFR32UNRXRDYJGVDLWV4EECIP5EVDOOPJOJLUQXRC762AFBSRERT3CJASED5DEVZ2T5MMLB7WVAN33CWQXIDMS5NXFLDW6Q6OE3KMMB65NAV3ERYWQPUPFH4Z5JOVRSO6JJTAG2AB6W7M5BP6AHB4YTDTT4BR7KXOKLVKLQCFQAOJEQC4O63PJQ4FDKD6N5Q3ZBM3EXTRU7X2QMVBEWLAUTCJQQPVWRHUR2MDKS2MDR7UCB6MAFYRTYQJHW54KKITHSOVABJ77CKQH4SKEZ6PMBVPWGHFF5XJHUIJZLFDOFQZB3Z4MLJMYIZF6FDRBRONATKULMGS2QZRUQSGY5PPIJHIPDDCHX5FLT6ZAAYVPCSMV5O7FUHOCB6DSYIDF53QJ4XCS5YJ5FV4UVMYH22X4EW5S6WLGJ5CWDEEZSSHJWFLH4BKIGF7QE3MMPWCEZJH5UO3JBBWXFY47XKHMDQPRC4TGDP5HKHVPNCGWGZ6MPIM3WTKT3NZMEKBJ6JT44O23ZNNER5UUQHOJ5E4NFYHOXF6OMJAGPEJQZU6LG52NZPZID4YNOWOV7LXCURXF5PYQSBSUUWYDQP6W5U3XI6VYHF3BPLISZBLRWPFMFVI5DMMTQT3IMBAVVKBTJB2C65AAO2ZRXREBCPIIQA4VKX7QYWDN3U6EOQ3JFFI3O54EOOFA7K5HBQQ5ZS5ALQG74UKU6AN7NFZPYEKG7OJKI6EW5AUIDGHEW5UNEMKC4XSWNK6TJKCQNBZ4BKMXP66AH7GVB3OLLDFY5JSU4VBD3PVCX6GVT5SB62T4NRCV6IPJOWCB6G6C6KBFQ4R5B42N6N2VHM5ANEYTUKRSYG5EUL2A2EU5LZ2SYGDUHZBTV7ZMLOYLPLC35X4JKNRHNJCQK4PPY4ILIHORYU2OVJG3YFGT3Z2LQ3ATI2FJMAHEIIPYUSH2QOKSMZHWSFH25XLK4XRINN6FRJ2AMTEDZVQHKJY72WXWTTU5CDX65UAJRBBVVB23D7EJYZDX6W5EUT6UID63M7FZJNAMY55FTWLY23UXTB3TWFZTTGCCUHNOGDNGZNI7ZL4VHIRQJGWAVA2KGVDQIF2T35Y7HEH4X6URMORIAGUM57GVS2J3MNBOA33SRA5BFONIBSTDZYYLTHNUVZVW6WNDY45PQKMHO7JCXZIB55A2P2AJBU25NBNKLQ";
byte[] decoded = new Base32().decode(b32);
Cid cid = Cid.cast(decoded);
System.out.println();
// Use this method to test access to a bucket
S3Config config = S3Config.build(Args.parse(args));
System.out.println("Testing S3 bucket: " + config.bucket + " in region " + config.region + " with base dir: " + config.path);
Multihash id = new Multihash(Multihash.Type.sha2_256, RAMStorage.hash("S3Storage".getBytes()));
TransactionStore transactions = JdbcTransactionStore.build(Sqlite.build(":memory:"), new SqliteCommands());
S3BlockStorage s3 = new S3BlockStorage(config, id, transactions);
System.out.println("***** Testing ls and read");
System.out.println("Testing ls...");
List<Multihash> files = s3.getFiles(1000);
System.out.println("Success! found " + files.size());
System.out.println("Testing read...");
byte[] data = s3.getRaw(files.get(0)).join().get();
System.out.println("Success: read blob of size " + data.length);
System.out.println("Testing write...");
byte[] uploadData = new byte[10 * 1024];
new Random().nextBytes(uploadData);
PublicKeyHash owner = PublicKeyHash.NULL;
TransactionId tid = s3.startTransaction(owner).join();
Multihash put = s3.put(uploadData, true, tid, owner);
System.out.println("Success!");
System.out.println("Testing delete...");
s3.delete(put);
System.out.println("Success!");
}
@Override
public String toString() {
return "S3BlockStore[" + bucket + ":" + folder + "]";
}
} |
package org.bouncycastle.crypto.tls;
import java.io.IOException;
class DTLSRecordLayer implements DatagramTransport {
private static final int RECORD_HEADER_LENGTH = 13;
private static final int MAX_FRAGMENT_LENGTH = 1 << 14;
private static final long TCP_MSL = 1000L * 60 * 2;
private static final long RETRANSMIT_TIMEOUT = TCP_MSL * 2;
private final DatagramTransport transport;
private final TlsContext context;
private final TlsPeer peer;
private final ByteQueue recordQueue = new ByteQueue();
private volatile boolean closed = false;
private volatile boolean failed = false;
private volatile ProtocolVersion discoveredPeerVersion = null;
private volatile boolean inHandshake;
private DTLSEpoch currentEpoch, pendingEpoch;
private DTLSEpoch readEpoch, writeEpoch;
private DTLSHandshakeRetransmit retransmit = null;
private DTLSEpoch retransmitEpoch = null;
private long retransmitExpiry = 0;
DTLSRecordLayer(DatagramTransport transport, TlsContext context, TlsPeer peer, short contentType) {
this.transport = transport;
this.context = context;
this.peer = peer;
this.inHandshake = true;
this.currentEpoch = new DTLSEpoch(0, new TlsNullCipher(context));
this.pendingEpoch = null;
this.readEpoch = currentEpoch;
this.writeEpoch = currentEpoch;
}
ProtocolVersion getDiscoveredPeerVersion() {
return discoveredPeerVersion;
}
void initPendingEpoch(TlsCipher pendingCipher) {
if (pendingEpoch != null) {
throw new IllegalStateException();
}
/*
* TODO "In order to ensure that any given sequence/epoch pair is unique, implementations
* MUST NOT allow the same epoch value to be reused within two times the TCP maximum segment
* lifetime."
*/
// TODO Check for overflow
this.pendingEpoch = new DTLSEpoch(writeEpoch.getEpoch() + 1, pendingCipher);
}
void handshakeSuccessful(DTLSHandshakeRetransmit retransmit) {
if (readEpoch == currentEpoch || writeEpoch == currentEpoch) {
// TODO
throw new IllegalStateException();
}
if (retransmit != null) {
this.retransmit = retransmit;
this.retransmitEpoch = currentEpoch;
this.retransmitExpiry = System.currentTimeMillis() + RETRANSMIT_TIMEOUT;
}
this.inHandshake = false;
this.currentEpoch = pendingEpoch;
this.pendingEpoch = null;
}
void resetWriteEpoch() {
if (retransmitEpoch != null) {
this.writeEpoch = retransmitEpoch;
} else {
this.writeEpoch = currentEpoch;
}
}
public int getReceiveLimit() throws IOException {
return Math.min(MAX_FRAGMENT_LENGTH,
readEpoch.getCipher().getPlaintextLimit(transport.getReceiveLimit() - RECORD_HEADER_LENGTH));
}
public int getSendLimit() throws IOException {
return Math.min(MAX_FRAGMENT_LENGTH,
writeEpoch.getCipher().getPlaintextLimit(transport.getSendLimit() - RECORD_HEADER_LENGTH));
}
public int receive(byte[] buf, int off, int len, int waitMillis) throws IOException {
byte[] record = null;
for (;;) {
int receiveLimit = Math.min(len, getReceiveLimit()) + RECORD_HEADER_LENGTH;
if (record == null || record.length < receiveLimit) {
record = new byte[receiveLimit];
}
try {
if (retransmit != null && System.currentTimeMillis() > retransmitExpiry) {
retransmit = null;
retransmitEpoch = null;
}
int received = receiveRecord(record, 0, receiveLimit, waitMillis);
if (received < 0) {
return received;
}
if (received < RECORD_HEADER_LENGTH) {
continue;
}
int length = TlsUtils.readUint16(record, 11);
if (received != (length + RECORD_HEADER_LENGTH)) {
continue;
}
short type = TlsUtils.readUint8(record, 0);
// TODO Support user-specified custom protocols?
switch (type) {
case ContentType.alert:
case ContentType.application_data:
case ContentType.change_cipher_spec:
case ContentType.handshake:
break;
default:
// TODO Exception?
continue;
}
int epoch = TlsUtils.readUint16(record, 3);
DTLSEpoch recordEpoch = null;
if (epoch == readEpoch.getEpoch()) {
recordEpoch = readEpoch;
} else if (type == ContentType.handshake && retransmitEpoch != null
&& epoch == retransmitEpoch.getEpoch()) {
recordEpoch = retransmitEpoch;
}
if (recordEpoch == null)
continue;
long seq = TlsUtils.readUint48(record, 5);
if (recordEpoch.getReplayWindow().shouldDiscard(seq))
continue;
ProtocolVersion version = TlsUtils.readVersion(record, 1);
if (discoveredPeerVersion != null && !discoveredPeerVersion.equals(version))
continue;
byte[] plaintext = recordEpoch.getCipher().decodeCiphertext(
getMacSequenceNumber(recordEpoch.getEpoch(), seq), type, record, RECORD_HEADER_LENGTH,
received - RECORD_HEADER_LENGTH);
recordEpoch.getReplayWindow().reportAuthenticated(seq);
if (discoveredPeerVersion == null) {
discoveredPeerVersion = version;
}
switch (type) {
case ContentType.alert: {
if (plaintext.length == 2) {
short alertLevel = plaintext[0];
short alertDescription = plaintext[1];
peer.notifyAlertReceived(alertLevel, alertDescription);
if (alertLevel == AlertLevel.fatal) {
fail(alertDescription);
throw new TlsFatalAlert(alertDescription);
}
// TODO Can close_notify be a fatal alert?
if (alertDescription == AlertDescription.close_notify) {
closeTransport();
}
} else {
// TODO What exception?
}
continue;
}
case ContentType.application_data: {
if (inHandshake) {
// TODO Consider buffering application data for new epoch that arrives
// out-of-order with the Finished message
continue;
}
break;
}
case ContentType.change_cipher_spec: {
// Implicitly receive change_cipher_spec and change to pending cipher state
if (plaintext.length != 1 || plaintext[0] != 1) {
continue;
}
if (pendingEpoch != null) {
readEpoch = pendingEpoch;
}
continue;
}
case ContentType.handshake: {
if (!inHandshake) {
if (retransmit != null) {
retransmit.receivedHandshakeRecord(epoch, plaintext, 0, plaintext.length);
}
// TODO Consider support for HelloRequest
continue;
}
}
}
/*
* NOTE: If we receive any non-handshake data in the new epoch implies the peer has
* received our final flight.
*/
if (!inHandshake && retransmit != null) {
this.retransmit = null;
this.retransmitEpoch = null;
}
System.arraycopy(plaintext, 0, buf, off, plaintext.length);
return plaintext.length;
} catch (IOException e) {
// NOTE: Assume this is a timeout for the moment
throw e;
}
}
}
public void send(byte[] buf, int off, int len) throws IOException {
short contentType = ContentType.application_data;
if (this.inHandshake || this.writeEpoch == this.retransmitEpoch) {
contentType = ContentType.handshake;
short handshakeType = TlsUtils.readUint8(buf, off);
if (handshakeType == HandshakeType.finished) {
DTLSEpoch nextEpoch = null;
if (this.inHandshake) {
nextEpoch = pendingEpoch;
}
else if (this.writeEpoch == this.retransmitEpoch) {
nextEpoch = currentEpoch;
}
if (nextEpoch == null) {
// TODO
throw new IllegalStateException();
}
// Implicitly send change_cipher_spec and change to pending cipher state
// TODO Send change_cipher_spec and finished records in single datagram?
byte[] data = new byte[] { 1 };
sendRecord(ContentType.change_cipher_spec, data, 0, data.length);
writeEpoch = nextEpoch;
}
}
sendRecord(contentType, buf, off, len);
}
public void close() throws IOException {
if (!closed) {
if (inHandshake) {
warn(AlertDescription.user_canceled, "User canceled handshake");
}
closeTransport();
}
}
void fail(short alertDescription) {
if (!closed) {
try {
raiseAlert(AlertLevel.fatal, alertDescription, null, null);
} catch (Exception e) {
// Ignore
}
failed = true;
closeTransport();
}
}
void warn(short alertDescription, String message) throws IOException {
raiseAlert(AlertLevel.warning, alertDescription, message, null);
}
private void closeTransport() {
if (!closed) {
/*
* RFC 5246 7.2.1. Unless some other fatal alert has been transmitted, each party is
* required to send a close_notify alert before closing the write side of the
* connection. The other party MUST respond with a close_notify alert of its own and
* close down the connection immediately, discarding any pending writes.
*/
try {
if (!failed) {
warn(AlertDescription.close_notify, null);
}
transport.close();
} catch (Exception e) {
// Ignore
}
closed = true;
}
}
private void raiseAlert(short alertLevel, short alertDescription, String message, Exception cause)
throws IOException {
peer.notifyAlertRaised(alertLevel, alertDescription, message, cause);
byte[] error = new byte[2];
error[0] = (byte) alertLevel;
error[1] = (byte) alertDescription;
sendRecord(ContentType.alert, error, 0, 2);
}
private int receiveRecord(byte[] buf, int off, int len, int waitMillis) throws IOException {
if (recordQueue.size() > 0) {
int length = 0;
if (recordQueue.size() >= RECORD_HEADER_LENGTH) {
byte[] lengthBytes = new byte[2];
recordQueue.read(lengthBytes, 0, 2, 11);
length = TlsUtils.readUint16(lengthBytes, 0);
}
int received = Math.min(recordQueue.size(), RECORD_HEADER_LENGTH + length);
recordQueue.read(buf, off, received, 0);
recordQueue.removeData(received);
return received;
}
int received = transport.receive(buf, off, len, waitMillis);
if (received >= RECORD_HEADER_LENGTH) {
int fragmentLength = TlsUtils.readUint16(buf, off + 11);
int recordLength = RECORD_HEADER_LENGTH + fragmentLength;
if (received > recordLength) {
recordQueue.addData(buf, off + recordLength, received - recordLength);
received = recordLength;
}
}
return received;
}
private void sendRecord(short contentType, byte[] buf, int off, int len) throws IOException {
/*
* RFC 5264 6.2.1 Implementations MUST NOT send zero-length fragments of Handshake, Alert,
* or ChangeCipherSpec content types.
*/
if (len < 1 && contentType != ContentType.application_data) {
throw new TlsFatalAlert(AlertDescription.internal_error);
}
int recordEpoch = writeEpoch.getEpoch();
long recordSequenceNumber = writeEpoch.allocateSequenceNumber();
byte[] ciphertext = writeEpoch.getCipher().encodePlaintext(
getMacSequenceNumber(recordEpoch, recordSequenceNumber), contentType, buf, off, len);
if (ciphertext.length > MAX_FRAGMENT_LENGTH) {
throw new TlsFatalAlert(AlertDescription.internal_error);
}
byte[] record = new byte[ciphertext.length + RECORD_HEADER_LENGTH];
TlsUtils.writeUint8(contentType, record, 0);
ProtocolVersion version = discoveredPeerVersion != null ? discoveredPeerVersion : context.getClientVersion();
TlsUtils.writeVersion(version, record, 1);
TlsUtils.writeUint16(recordEpoch, record, 3);
TlsUtils.writeUint48(recordSequenceNumber, record, 5);
TlsUtils.writeUint16(ciphertext.length, record, 11);
System.arraycopy(ciphertext, 0, record, RECORD_HEADER_LENGTH, ciphertext.length);
transport.send(record, 0, record.length);
}
private static long getMacSequenceNumber(int epoch, long sequence_number) {
return ((long) epoch << 48) | sequence_number;
}
} |
package peergos.server.storage;
import com.amazonaws.*;
import com.amazonaws.auth.*;
import com.amazonaws.client.builder.*;
import com.amazonaws.services.s3.*;
import com.amazonaws.services.s3.model.*;
import peergos.server.corenode.*;
import peergos.server.sql.*;
import peergos.server.util.*;
import peergos.shared.*;
import peergos.shared.cbor.*;
import peergos.shared.crypto.asymmetric.*;
import peergos.shared.crypto.hash.*;
import peergos.shared.io.ipfs.cid.*;
import peergos.shared.io.ipfs.multibase.binary.*;
import peergos.shared.mutable.*;
import peergos.shared.storage.*;
import peergos.shared.io.ipfs.multihash.*;
import io.prometheus.client.Histogram;
import peergos.shared.util.*;
import java.io.*;
import java.nio.file.*;
import java.sql.*;
import java.time.*;
import java.util.*;
import java.util.Base64;
import java.util.concurrent.*;
import java.util.function.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.*;
public class S3BlockStorage implements ContentAddressedStorage {
private static final Logger LOG = Logger.getGlobal();
private static final Histogram readTimerLog = Histogram.build()
.labelNames("filesize")
.name("block_read_seconds")
.help("Time to read a block from immutable storage")
.exponentialBuckets(0.01, 2, 16)
.register();
private static final Histogram writeTimerLog = Histogram.build()
.labelNames("filesize")
.name("s3_block_write_seconds")
.help("Time to write a block to immutable storage")
.exponentialBuckets(0.01, 2, 16)
.register();
private final Multihash id;
private final AmazonS3 s3Client;
private final String bucket, folder;
private final TransactionStore transactions;
public S3BlockStorage(S3Config config, Multihash id, TransactionStore transactions) {
this.id = id;
this.bucket = config.bucket;
this.folder = config.path.isEmpty() || config.path.endsWith("/") ? config.path : config.path + "/";
AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard()
.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(config.regionEndpoint, config.region))
.withCredentials(new AWSStaticCredentialsProvider(
new BasicAWSCredentials(config.accessKey, config.secretKey)));
s3Client = builder.build();
LOG.info("Using S3 Block Storage at " + config.regionEndpoint + ", bucket " + config.bucket + ", path: " + config.path);
this.transactions = transactions;
}
private static String hashToKey(Multihash hash) {
// To be compatible with IPFS we use the same scheme here, the cid bytes encoded as uppercase base32
String padded = new Base32().encodeAsString(hash.toBytes());
int padStart = padded.indexOf("=");
return padStart > 0 ? padded.substring(0, padStart) : padded;
}
private Multihash keyToHash(String key) {
// To be compatible with IPFS we use the ame scheme here, the cid bytes encoded as uppercase base32
byte[] decoded = new Base32().decode(key.substring(folder.length()));
return Cid.cast(decoded);
}
@Override
public CompletableFuture<Optional<CborObject>> get(Multihash hash) {
if (hash instanceof Cid && ((Cid) hash).codec == Cid.Codec.Raw)
throw new IllegalStateException("Need to call getRaw if cid is not cbor!");
return getRaw(hash).thenApply(opt -> opt.map(CborObject::fromByteArray));
}
@Override
public CompletableFuture<Optional<byte[]>> getRaw(Multihash hash) {
return Futures.of(map(hash, h -> {
GetObjectRequest get = new GetObjectRequest(bucket, folder + hashToKey(hash));
Histogram.Timer readTimer = readTimerLog.labels("read").startTimer();
try (S3Object res = s3Client.getObject(get); DataInputStream din = new DataInputStream(new BufferedInputStream(res.getObjectContent()))) {
return Optional.of(Serialize.readFully(din));
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
readTimer.observeDuration();
}
}, e -> Optional.empty()));
}
@Override
public CompletableFuture<List<Multihash>> pinUpdate(PublicKeyHash owner, Multihash existing, Multihash updated) {
return Futures.of(Collections.singletonList(updated));
}
@Override
public CompletableFuture<List<Multihash>> recursivePin(PublicKeyHash owner, Multihash hash) {
return Futures.of(Collections.singletonList(hash));
}
@Override
public CompletableFuture<List<Multihash>> recursiveUnpin(PublicKeyHash owner, Multihash hash) {
return Futures.of(Collections.singletonList(hash));
}
@Override
public CompletableFuture<Boolean> gc() {
return Futures.errored(new IllegalStateException("S3 doesn't implement GC!"));
}
/** The result of this method is a snapshot of the mutable pointers that is consistent with the blocks store
* after GC has completed (saved to a file which can be independently backed up).
*
* @param pointers
* @return
*/
private void collectGarbage(JdbcIpnsAndSocial pointers) throws IOException {
// TODO: do this more efficiently with a bloom filter, and streaming
List<Multihash> present = getFiles(Integer.MAX_VALUE);
List<Multihash> pending = transactions.getOpenTransactionBlocks();
// This pointers call must happen AFTER the previous two for correctness
Map<PublicKeyHash, byte[]> allPointers = pointers.getAllEntries();
BitSet reachable = new BitSet(present.size());
for (PublicKeyHash writerHash : allPointers.keySet()) {
byte[] signedRawCas = allPointers.get(writerHash);
PublicSigningKey writer = getSigningKey(writerHash).join().get();
byte[] bothHashes = writer.unsignMessage(signedRawCas);
HashCasPair cas = HashCasPair.fromCbor(CborObject.fromByteArray(bothHashes));
MaybeMultihash updated = cas.updated;
if (updated.isPresent())
markReachable(updated.get(), present, reachable);
}
for (Multihash additional : pending) {
int index = present.indexOf(additional);
if (index >= 0)
reachable.set(index);
}
// Save pointers snapshot to file
Path pointerSnapshotFile = Paths.get("pointers-snapshot-" + LocalDateTime.now() + ".txt");
for (Map.Entry<PublicKeyHash, byte[]> entry : allPointers.entrySet()) {
Files.write(pointerSnapshotFile, (entry.getKey() + ":" +
ArrayOps.bytesToHex(entry.getValue()) + "\n").getBytes(), StandardOpenOption.APPEND);
}
long deletedBlocks = 0;
long deletedSize = 0;
for (int i=0; i < present.size(); i++)
if (! reachable.get(i)) {
Multihash hash = present.get(i);
int size = getSize(hash).join().get();
deletedBlocks++;
deletedSize += size;
delete(hash);
}
System.out.println("GC complete. Freed " + deletedBlocks + " blocks totalling " + deletedSize + " bytes");
}
private void markReachable(Multihash root, List<Multihash> present, BitSet reachable) {
int index = present.indexOf(root);
if (index >= 0)
reachable.set(index);
List<Multihash> links = getLinks(root).join();
for (Multihash link : links) {
markReachable(link, present, reachable);
}
}
@Override
public CompletableFuture<Optional<Integer>> getSize(Multihash hash) {
return Futures.of(map(hash, h -> {
if (hash.isIdentity()) // Identity hashes are not actually stored explicitly
return Optional.of(0);
Histogram.Timer readTimer = readTimerLog.labels("size").startTimer();
try {
ObjectMetadata res = s3Client.getObjectMetadata(bucket, folder + hashToKey(hash));
return Optional.of((int)res.getContentLength());
} finally {
readTimer.observeDuration();
}
}, e -> Optional.empty()));
}
public boolean contains(Multihash key) {
return map(key, h -> s3Client.doesObjectExist(bucket, folder + hashToKey(h)), e -> false);
}
public <T> T map(Multihash hash, Function<Multihash, T> success, Function<Throwable, T> absent) {
try {
return success.apply(hash);
} catch (AmazonServiceException e) {
/* Caught an AmazonServiceException,
which means our request made it
to Amazon S3, but was rejected with an error response
for some reason.
*/
if ("NoSuchKey".equals(e.getErrorCode())) {
Histogram.Timer readTimer = readTimerLog.labels("absent").startTimer();
readTimer.observeDuration();
return absent.apply(e);
}
LOG.warning("AmazonServiceException: " + e.getMessage());
LOG.warning("AWS Error Code: " + e.getErrorCode());
throw new RuntimeException(e.getMessage(), e);
} catch (AmazonClientException e) {
/* Caught an AmazonClientException,
which means the client encountered
an internal error while trying to communicate
with S3, such as not being able to access the network.
*/
LOG.severe("AmazonClientException: " + e.getMessage());
LOG.severe("Thrown at:" + e.getCause().toString());
throw new RuntimeException(e.getMessage(), e);
}
}
@Override
public CompletableFuture<Multihash> id() {
return Futures.of(id);
}
@Override
public CompletableFuture<TransactionId> startTransaction(PublicKeyHash owner) {
return CompletableFuture.completedFuture(transactions.startTransaction(owner));
}
@Override
public CompletableFuture<Boolean> closeTransaction(PublicKeyHash owner, TransactionId tid) {
transactions.closeTransaction(owner, tid);
return CompletableFuture.completedFuture(true);
}
@Override
public CompletableFuture<List<Multihash>> put(PublicKeyHash owner,
PublicKeyHash writer,
List<byte[]> signatures,
List<byte[]> blocks,
TransactionId tid) {
return put(owner, writer, signatures, blocks, false, tid);
}
@Override
public CompletableFuture<List<Multihash>> putRaw(PublicKeyHash owner,
PublicKeyHash writer,
List<byte[]> signatures,
List<byte[]> blocks,
TransactionId tid) {
return put(owner, writer, signatures, blocks, true, tid);
}
private CompletableFuture<List<Multihash>> put(PublicKeyHash owner,
PublicKeyHash writer,
List<byte[]> signatures,
List<byte[]> blocks,
boolean isRaw,
TransactionId tid) {
return CompletableFuture.completedFuture(blocks.stream()
.map(b -> put(b, isRaw, tid, owner))
.collect(Collectors.toList()));
}
/** Must be atomic relative to reads of the same key
*
* @param data
*/
public Multihash put(byte[] data, boolean isRaw, TransactionId tid, PublicKeyHash owner) {
Histogram.Timer writeTimer = writeTimerLog.labels("write").startTimer();
try {
Multihash hash = new Multihash(Multihash.Type.sha2_256, Hash.sha256(data));
Cid cid = new Cid(1, isRaw ? Cid.Codec.Raw : Cid.Codec.DagCbor, hash.type, hash.getHash());
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentLength(data.length);
PutObjectRequest put = new PutObjectRequest(bucket, folder + hashToKey(cid), new ByteArrayInputStream(data), metadata);
transactions.addBlock(cid, tid, owner);
PutObjectResult putResult = s3Client.putObject(put);
return cid;
} catch (AmazonServiceException e) {
/* Caught an AmazonServiceException,
which means our request made it
to Amazon S3, but was rejected with an error response
for some reason.
*/
LOG.severe("AmazonServiceException: " + e.getMessage());
LOG.severe("AWS Error Code: " + e.getErrorCode());
throw new RuntimeException(e.getMessage(), e);
} catch (AmazonClientException e) {
/* Caught an AmazonClientException,
which means the client encountered
an internal error while trying to communicate
with S3, such as not being able to access the network.
*/
LOG.severe("AmazonClientException: " + e.getMessage());
LOG.severe("Thrown at:" + e.getCause().toString());
throw new RuntimeException(e.getMessage(), e);
} finally {
writeTimer.observeDuration();
}
}
private List<Multihash> getFiles(long maxReturned) {
List<Multihash> results = new ArrayList<>();
applyToAll(obj -> results.add(keyToHash(obj.getKey())), maxReturned);
return results;
}
private List<String> getFilenames(long maxReturned) {
List<String> results = new ArrayList<>();
applyToAll(obj -> results.add(obj.getKey()), maxReturned);
return results;
}
private void applyToAll(Consumer<S3ObjectSummary> processor, long maxObjects) {
try {
LOG.log(Level.FINE, "Listing blobs");
final ListObjectsV2Request req = new ListObjectsV2Request()
.withBucketName(bucket)
.withPrefix(folder)
.withMaxKeys(10_000);
ListObjectsV2Result result;
long processedObjects = 0;
do {
result = s3Client.listObjectsV2(req);
for (S3ObjectSummary objectSummary : result.getObjectSummaries()) {
if (objectSummary.getKey().endsWith("/")) {
LOG.fine(" - " + objectSummary.getKey() + " " + "(directory)");
continue;
}
LOG.fine(" - " + objectSummary.getKey() + " " +
"(size = " + objectSummary.getSize() +
"; modified: " + objectSummary.getLastModified() + ")");
processor.accept(objectSummary);
processedObjects++;
if (processedObjects >= maxObjects)
return;
}
LOG.log(Level.FINE, "Next Continuation Token : " + result.getNextContinuationToken());
req.setContinuationToken(result.getNextContinuationToken());
} while (result.isTruncated());
} catch (AmazonServiceException ase) {
/* Caught an AmazonServiceException,
which means our request made it
to Amazon S3, but was rejected with an error response
for some reason.
*/
LOG.severe("AmazonServiceException: " + ase.getMessage());
LOG.severe("AWS Error Code: " + ase.getErrorCode());
} catch (AmazonClientException ace) {
/* Caught an AmazonClientException,
which means the client encountered
an internal error while trying to communicate
with S3, such as not being able to access the network.
*/
LOG.severe("AmazonClientException: " + ace.getMessage());
LOG.severe("Thrown at:" + ace.getCause().toString());
}
}
public void delete(Multihash hash) {
DeleteObjectRequest del = new DeleteObjectRequest(bucket, folder + hashToKey(hash));
s3Client.deleteObject(del);
}
public static void main(String[] args) throws Exception {
System.out.println("Performing GC on block store...");
Args a = Args.parse(args);
S3Config config = S3Config.build(a);
boolean usePostgres = a.getBoolean("use-postgres", false);
SqlSupplier sqlCommands = usePostgres ?
new PostgresCommands() :
new SqliteCommands();
Connection database;
if (usePostgres) {
String postgresHost = a.getArg("postgres.host");
int postgresPort = a.getInt("postgres.port", 5432);
String databaseName = a.getArg("postgres.database", "peergos");
String postgresUsername = a.getArg("postgres.username");
String postgresPassword = a.getArg("postgres.password");
database = Postgres.build(postgresHost, postgresPort, databaseName, postgresUsername, postgresPassword);
} else {
database = Sqlite.build(Sqlite.getDbPath(a, "mutable-pointers-file"));
}
Connection transactionsDb = usePostgres ?
database :
Sqlite.build(Sqlite.getDbPath(a, "transactions-sql-file"));
TransactionStore transactions = JdbcTransactionStore.build(transactionsDb, sqlCommands);
S3BlockStorage s3 = new S3BlockStorage(config, Cid.decode(a.getArg("ipfs.id")), transactions);
JdbcIpnsAndSocial rawPointers = new JdbcIpnsAndSocial(database, sqlCommands);
s3.collectGarbage(rawPointers);
}
public static void test(String[] args) throws Exception {
// Use this method to test access to a bucket
S3Config config = S3Config.build(Args.parse(args));
System.out.println("Testing S3 bucket: " + config.bucket + " in region " + config.region + " with base dir: " + config.path);
Multihash id = new Multihash(Multihash.Type.sha2_256, RAMStorage.hash("S3Storage".getBytes()));
TransactionStore transactions = JdbcTransactionStore.build(Sqlite.build(":memory:"), new SqliteCommands());
S3BlockStorage s3 = new S3BlockStorage(config, id, transactions);
System.out.println("***** Testing ls and read");
System.out.println("Testing ls...");
List<Multihash> files = s3.getFiles(1000);
System.out.println("Success! found " + files.size());
System.out.println("Testing read...");
byte[] data = s3.getRaw(files.get(0)).join().get();
System.out.println("Success: read blob of size " + data.length);
System.out.println("Testing write...");
byte[] uploadData = new byte[10 * 1024];
new Random().nextBytes(uploadData);
PublicKeyHash owner = PublicKeyHash.NULL;
TransactionId tid = s3.startTransaction(owner).join();
Multihash put = s3.put(uploadData, true, tid, owner);
System.out.println("Success!");
System.out.println("Testing delete...");
s3.delete(put);
System.out.println("Success!");
}
@Override
public String toString() {
return "S3BlockStore[" + bucket + ":" + folder + "]";
}
} |
package org.datazup.pathextractor;
import com.github.jknack.handlebars.Handlebars;
import com.github.jknack.handlebars.Template;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import org.datazup.template.engine.HandlerBarRenderer;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
public abstract class PathExtractorBase implements AbstractVariableSet {
HandlerBarRenderer handlerBarRenderer = new HandlerBarRenderer();
public abstract Object extractObjectValue(String path);
public abstract Map resolveToMap(Object o);
public abstract List resolveToList(Object o);
public abstract Map<String,Object> getDataObject();
public Object extractObjectValue(Map<String, Object> objMap, String path) {
if (path.contains(Handlebars.DELIM_START) && path.contains(Handlebars.DELIM_END)) {
try {
Object o = compileString(objMap, path);
return o;
} catch (IOException e) {
e.printStackTrace();
}
}
return extractObjectValue(objMap, path, false, false);
}
public Object extractObjectValue(Map<String, Object> objMap, String path, boolean shouldRemove, boolean returnRowMap) {
if (null == objMap || null == path || path.isEmpty()) return objMap;
path = normalizePath(path);
String tmp = path;
if (path.contains(".")) {
String key = path.substring(0, path.indexOf("."));
String rest = path.substring(path.indexOf(".") + 1, path.length());
if (objMap.containsKey(key)) {
Object keyObj = objMap.get(key);
Map resolvedMap = resolveToMap(keyObj);
Object obj = extractObjectValue(resolvedMap, rest, shouldRemove, returnRowMap);
return obj;
} else if (key.endsWith("]")) {
return handleListParenthesisExtraction(objMap, path, shouldRemove, returnRowMap);
}
} else {
if (path.endsWith("]")) {
return handleListParenthesisExtraction(objMap, path, shouldRemove, returnRowMap);
}
if (objMap.containsKey(tmp)) {
if (returnRowMap) {
return objMap;
} else {
if (shouldRemove) {
return objMap.remove(tmp);
} else {
return objMap.get(tmp);
}
}
}
}
return null;
}
private Object handleListParenthesisExtraction(Map<String, Object> objMap, String path, boolean shouldRemove, boolean returnRowMap) {
if (path.contains("]")) {
String listKey = path.substring(0, path.indexOf("["));
String parameter = path.substring(path.indexOf("[") + 1, path.indexOf("]"));
String rest = path.substring(path.indexOf("]") + 1, path.length());
if (rest.startsWith(".")) {
rest = rest.substring(1, rest.length());
}
// we need to escape '.' dot after ] (sample is: list[0].item
if (objMap.containsKey(listKey)) {
Object listObj = objMap.get(listKey);
List list = resolveToList(listObj);
Object res = handleReturnFromList(list, objMap, listKey, parameter, rest, shouldRemove, returnRowMap);
return res;
/*if (listObj instanceof List) {
List list = (List) listObj;
Object res = handleReturnFromList(list, objMap, listKey, parameter, rest, shouldRemove, returnRowMap);
return res;
} else if (listObj instanceof JsonArray) {
JsonArray list = (JsonArray) listObj;
List lst = list.getList();
Object res = handleReturnFromList(lst, objMap, listKey, parameter, rest, shouldRemove, returnRowMap);
return res;
}*/
}
}
return null;
}
private Object handleReturnFromList(List list, Map<String, Object> objMap, String listKey, String parameter, String rest, boolean shouldRemove, boolean returnRowMap) {
Integer index = null;
if (StringUtils.isNotEmpty(parameter)) {
index = getListIndex(parameter, list.size());
if (null != index) {
Object itemFromList = list.get(index);
if (StringUtils.isEmpty(rest) && shouldRemove){
return list.remove(index.intValue());
}else if (null != itemFromList) {
Object obj = resolveToMap(itemFromList);
if (obj instanceof Map){
return extractObjectValue((Map) obj, rest, shouldRemove, returnRowMap);
}else {
obj = resolveToList(itemFromList);
if (obj instanceof List) {
Object o = extractFieldValues((List) obj, rest, shouldRemove, returnRowMap);
return o;
}
}
/* if (itemFromList instanceof Map) {
return extractObjectValue((Map) itemFromList, rest, shouldRemove, returnRowMap);
} else if (itemFromList instanceof JsonObject) {
return extractObjectValue(((JsonObject) itemFromList).getMap(), rest, shouldRemove, returnRowMap);
} else if (itemFromList instanceof List) {
Object o = extractFieldValues((List) itemFromList, rest, shouldRemove, returnRowMap);
return o;
} else if (itemFromList instanceof JsonArray) {
JsonArray jsonArray = (JsonArray) itemFromList;
Object o = extractFieldValues(jsonArray.getList(), rest, shouldRemove, returnRowMap);
return o;
}*/
} else {
if (returnRowMap) {
return list;
} else {
return itemFromList;
}
}
}
}
if (StringUtils.isNotEmpty(rest)) {
// extract list of single object based on rest value
Object listOfObjects = extractFieldValues(list, rest, shouldRemove, returnRowMap);
return listOfObjects;
} else {
if (shouldRemove) {
if (null==index){
return null;
}else{
Object removed = list.remove(index.intValue());
return removed;
}
} else {
return list;
}
}
// return null;
}
private Object extractFieldValues(List<Object> list, String rest, boolean shouldRemove, boolean returnRowMap) {
List<Object> listOfObjects = new ArrayList<>();
if (StringUtils.isEmpty(rest)) {
return list;
}
if (list.size()==0)
return null;
Iterator<Object> iter = list.iterator();
while(iter.hasNext()){
Object objInList = iter.next();
Map<String,Object> map = resolveToMap(objInList);
if (rest.contentEquals(".") || rest.contains("]")) {
// need to extract further
Object restObj = extractObjectValue(map, rest, shouldRemove, returnRowMap);
if (null != restObj) {
Object obj = resolveToMap(restObj);
if (null!=obj && obj instanceof Map){
Map m = (Map) restObj;
if (m.containsKey(rest) && null != m.get(rest)) {
listOfObjects.add(restObj);
}
}else {
obj = resolveToList(restObj);
if (null != obj && obj instanceof List) {
return restObj;
}
}
}
} else {
if (map.containsKey(rest)) {
Object o = map.get(rest);
listOfObjects.add(o);
}
}
}
return listOfObjects;
}
private Integer getListIndex(String parameter, Integer listSize) {
Integer index = null;
if (parameter.equalsIgnoreCase("last")) {
index = listSize - 1;
} else if (NumberUtils.isNumber(parameter)) {
index = NumberUtils.createInteger(parameter);
}
return index;
}
public static String normalizePath(String path) {
if (null==path || path.isEmpty()) return path;
if (path.startsWith("$") && path.endsWith("$")) {
path = path.substring(1, path.length() - 1);
}
return path;
}
public Object compileString(String expression) throws IOException {
return compileString(getDataObject(), expression);
}
public Object compileString(Map<String,Object> dataObject, String expression) throws IOException {
if (expression.contains(Handlebars.DELIM_START) && expression.contains(Handlebars.DELIM_END)) {
return renderTemplate(dataObject, expression);
}else{
Object o = extractObjectValue(dataObject, expression);
if (null!=o)
return o;
return expression;
}
}
public String renderTemplate(String item) throws IOException {
/* Template tmpl = handlerBarRenderer.getNext().compileInline(item);
String alertSubjectResult = tmpl.apply(getDataObject());
return alertSubjectResult;*/
return renderTemplate(getDataObject(), item);
}
public String renderTemplate(Map<String,Object> dataObject, String item) throws IOException {
Template tmpl = handlerBarRenderer.getNext().compileInline(item);
String alertSubjectResult = tmpl.apply(dataObject);
return alertSubjectResult;
}
} |
package com.rcarrillocruz.android.openstackdroid;
import android.app.Application;
public class OpenstackdroidApplication extends Application {
private String token;
private String computeEndpoint;
private String volumeEndpoint;
private String identityEndpoint;
private String imageEndpoint;
@Override
public void onCreate() {
// TODO Auto-generated method stub
super.onCreate();
}
@Override
public void onTerminate() {
// TODO Auto-generated method stub
super.onTerminate();
}
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
public String getComputeEndpoint() {
return computeEndpoint;
}
public void setComputeEndpoint(String computeEndpoint) {
this.computeEndpoint = computeEndpoint;
}
public String getVolumeEndpoint() {
return volumeEndpoint;
}
public void setVolumeEndpoint(String volumeEndpoint) {
this.volumeEndpoint = volumeEndpoint;
}
public String getIdentityEndpoint() {
return identityEndpoint;
}
public void setIdentityEndpoint(String identityEndpoint) {
this.identityEndpoint = identityEndpoint;
}
public String getImageEndpoint() {
return imageEndpoint;
}
public void setImageEndpoint(String imageEndpoint) {
this.imageEndpoint = imageEndpoint;
}
} |
package org.embulk.filter.stdout;
import com.google.common.base.Optional;
import org.embulk.config.Config;
import org.embulk.config.ConfigDefault;
import org.embulk.config.ConfigSource;
import org.embulk.config.Task;
import org.embulk.config.TaskSource;
import org.embulk.spi.Column;
import org.embulk.spi.FilterPlugin;
import org.embulk.spi.PageOutput;
import org.embulk.spi.PageBuilder;
import org.embulk.spi.PageReader;
import org.embulk.spi.Page;
import org.embulk.spi.Exec;
import org.embulk.spi.util.PagePrinter;
import org.embulk.spi.Schema;
import org.embulk.spi.ColumnVisitor;
public class StdoutFilterPlugin
implements FilterPlugin
{
public interface PluginTask
extends Task
{
@Config("timezone")
@ConfigDefault("null")
public Optional<String> getTimezone();
}
@Override
public void transaction(ConfigSource config, Schema inputSchema,
FilterPlugin.Control control)
{
PluginTask task = config.loadConfig(PluginTask.class);
Schema outputSchema = inputSchema;
control.run(task.dump(), outputSchema);
}
@Override
public PageOutput open(final TaskSource taskSource, final Schema inputSchema,
final Schema outputSchema, final PageOutput output)
{
final PluginTask task = taskSource.loadTask(PluginTask.class);
return new PageOutput() {
private final PageReader pageReader = new PageReader(inputSchema);
private final PageBuilder pageBuilder = new PageBuilder(Exec.getBufferAllocator(), outputSchema, output);
private final PagePrinter pagePrinter = new PagePrinter(inputSchema, task.getTimezone().or("UTC"));
private final ColumnVisitorImpl visitor = new ColumnVisitorImpl(pageBuilder);
@Override
public void finish() {
pageBuilder.finish();
}
@Override
public void close() {
pageBuilder.close();
}
@Override
public void add(Page page) {
pageReader.setPage(page);
while (pageReader.nextRecord()) {
System.out.println(pagePrinter.printRecord(pageReader, ",")); // here!
outputSchema.visitColumns(visitor);
pageBuilder.addRecord();
}
}
class ColumnVisitorImpl implements ColumnVisitor {
private final PageBuilder pageBuilder;
ColumnVisitorImpl(PageBuilder pageBuilder) {
this.pageBuilder = pageBuilder;
}
@Override
public void booleanColumn(Column outputColumn) {
if (pageReader.isNull(outputColumn)) {
pageBuilder.setNull(outputColumn);
} else {
pageBuilder.setBoolean(outputColumn, pageReader.getBoolean(outputColumn));
}
}
@Override
public void longColumn(Column outputColumn) {
if (pageReader.isNull(outputColumn)) {
pageBuilder.setNull(outputColumn);
} else {
pageBuilder.setLong(outputColumn, pageReader.getLong(outputColumn));
}
}
@Override
public void doubleColumn(Column outputColumn) {
if (pageReader.isNull(outputColumn)) {
pageBuilder.setNull(outputColumn);
} else {
pageBuilder.setDouble(outputColumn, pageReader.getDouble(outputColumn));
}
}
@Override
public void stringColumn(Column outputColumn) {
if (pageReader.isNull(outputColumn)) {
pageBuilder.setNull(outputColumn);
} else {
pageBuilder.setString(outputColumn, pageReader.getString(outputColumn));
}
}
@Override
public void timestampColumn(Column outputColumn) {
if (pageReader.isNull(outputColumn)) {
pageBuilder.setNull(outputColumn);
} else {
pageBuilder.setTimestamp(outputColumn, pageReader.getTimestamp(outputColumn));
}
}
@Override
public void jsonColumn(Column outputColumn) {
if (pageReader.isNull(outputColumn)) {
pageBuilder.setNull(outputColumn);
} else {
pageBuilder.setJson(outputColumn, pageReader.getJson(outputColumn));
}
}
}
};
}
} |
package org.irmacard.cardemu.protocols;
import android.os.AsyncTask;
import android.util.Log;
import com.google.gson.reflect.TypeToken;
import org.acra.ACRA;
import org.irmacard.cardemu.*;
import org.irmacard.cardemu.httpclient.HttpClient;
import org.irmacard.cardemu.httpclient.HttpClientException;
import org.irmacard.cardemu.httpclient.HttpResultHandler;
import org.irmacard.credentials.CredentialsException;
import org.irmacard.credentials.idemix.messages.IssueCommitmentMessage;
import org.irmacard.credentials.idemix.messages.IssueSignatureMessage;
import org.irmacard.credentials.idemix.proofs.ProofList;
import org.irmacard.api.common.IssuingRequest;
import org.irmacard.api.common.DisclosureProofRequest;
import org.irmacard.api.common.DisclosureProofResult;
import org.irmacard.api.common.util.GsonUtil;
import org.irmacard.credentials.info.InfoException;
import java.lang.reflect.Type;
import java.util.ArrayList;
public class JsonProtocol extends Protocol {
private static String TAG = "CardEmuJson";
private String server;
public void connect(String url) {
if (!url.endsWith("/"))
url = url + "/";
server = url;
if (server.contains("/verification/"))
startDisclosure();
else if (server.contains("/issue/"))
startIssuance();
}
/**
* Report the specified exception to the MainActivity
*/
private void fail(HttpClientException e) {
String feedback;
if (e.getCause() != null)
feedback = e.getCause().getMessage();
else
feedback = "Server returned status " + e.status;
fail(feedback);
}
/**
* Report the specified exception to the MainActivity
*/
private void fail(Exception e) {
fail(e.getMessage());
}
/**
* Report the specified feedback as a failure to the MainActivity
*/
private void fail(String feedback) {
Log.w(TAG, feedback);
activity.setFeedback(feedback, "failure");
activity.setState(MainActivity.STATE_IDLE);
}
/**
* Retrieve an {@link IssuingRequest} from the server
*/
public void startIssuance() {
Log.i(TAG, "Retrieving issuing request: " + server);
activity.setState(MainActivity.STATE_CONNECTING_TO_SERVER);
final String server = this.server;
final HttpClient client = new HttpClient(GsonUtil.getGson());
client.get(IssuingRequest.class, server, new HttpResultHandler<IssuingRequest>() {
@Override public void onSuccess(IssuingRequest result) {
Log.i(TAG, result.toString());
finishIssuance(result, client);
}
@Override public void onError(HttpClientException exception) {
fail(exception);
}
});
}
/**
* Given an {@link IssuingRequest}, compute the first issuing message and post it to the server
* (using the specified {@link HttpClient}). If the server returns corresponding CL signatures,
* construct and save the new Idemix credentials.
*/
private void finishIssuance(IssuingRequest request, final HttpClient client) {
Log.i(TAG, "Posting issuing commitments");
IssueCommitmentMessage msg;
try {
msg = CredentialManager.getIssueCommitments(request);
} catch (InfoException e) {
e.printStackTrace();
activity.setFeedback("Issuing failed: wrong credential type", "failure");
activity.setState(MainActivity.STATE_IDLE);
return;
}
Type t = new TypeToken<ArrayList<IssueSignatureMessage>>(){}.getType();
client.post(t, server + "commitments", msg, new HttpResultHandler<ArrayList<IssueSignatureMessage>>() {
@Override public void onSuccess(ArrayList<IssueSignatureMessage> result) {
try {
CredentialManager.constructCredentials(result);
activity.setFeedback("Issuing was successfull", "success");
activity.setState(MainActivity.STATE_IDLE);
done();
} catch (InfoException|CredentialsException e) {
fail(e);
}
}
@Override public void onError(HttpClientException exception) {
fail(exception);
}
});
}
/**
* Retrieve a {@link DisclosureProofRequest} from the server, see if we can satisfy it, and if so,
* ask the user which attributes she wants to disclose.
*/
private void startDisclosure() {
Log.i(TAG, "Retrieving disclosure request: " + server);
activity.setState(MainActivity.STATE_CONNECTING_TO_SERVER);
final String server = this.server;
final HttpClient client = new HttpClient(GsonUtil.getGson());
client.get(DisclosureProofRequest.class, server, new HttpResultHandler<DisclosureProofRequest>() {
@Override public void onSuccess(DisclosureProofRequest result) {
if (result.getContent().size() == 0 || result.getNonce() == null || result.getContext() == null) {
activity.setFeedback("Got malformed disclosure request", "failure");
cancelDisclosure();
return;
}
activity.setState(MainActivity.STATE_READY);
askForVerificationPermission(result);
}
@Override public void onError(HttpClientException exception) {
cancelDisclosure();
fail(exception);
}
});
}
/**
* Given a {@link DisclosureProofRequest} with selected attributes, perform the disclosure.
*/
public void disclose(final DisclosureProofRequest request) {
activity.setState(MainActivity.STATE_COMMUNICATING);
Log.i(TAG, "Sending disclosure proofs to " + server);
ProofList proofs;
try {
proofs = CredentialManager.getProofs(request);
} catch (CredentialsException e) {
e.printStackTrace();
cancelDisclosure();
fail(e);
return;
}
HttpClient client = new HttpClient(GsonUtil.getGson());
client.post(DisclosureProofResult.Status.class, server + "proofs", proofs,
new HttpResultHandler<DisclosureProofResult.Status>() {
@Override public void onSuccess(DisclosureProofResult.Status result) {
if (result == DisclosureProofResult.Status.VALID) {
activity.setFeedback("Successfully disclosed attributes", "success");
activity.setState(MainActivity.STATE_IDLE);
done();
} else { // We successfully computed a proof but server rejects it? That's fishy, report it
String feedback = "Server rejected proof: " + result.name().toLowerCase();
ACRA.getErrorReporter().handleException(new Exception(feedback));
fail(feedback);
}
}
@Override public void onError(HttpClientException exception) {
fail(exception);
}
});
}
/**
* Cancels the current disclosure session by DELETE-ing the specified url and setting the state to idle.
*/
@Override
public void cancelDisclosure() {
super.cancelDisclosure();
Log.i(TAG, "Canceling disclosure to " + server);
new AsyncTask<String,Void,Void>() {
@Override protected Void doInBackground(String... params) {
try {
new HttpClient(GsonUtil.getGson()).doDelete(params[0]);
} catch (HttpClientException e) {
e.printStackTrace();
}
return null;
}
}.execute(server);
server = null;
}
} |
package org.jboss.msc.service;
import static java.lang.Thread.holdsLock;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.jboss.msc.service.management.ServiceStatus;
import org.jboss.msc.value.Value;
/**
* The service controller implementation.
*
* @param <S> the service type
*
* @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a>
* @author <a href="mailto:flavia.rainone@jboss.com">Flavia Rainone</a>
* @author <a href="mailto:ropalka@redhat.com">Richard Opalka</a>
*/
final class ServiceControllerImpl<S> implements ServiceController<S>, Dependent {
private static final String ILLEGAL_CONTROLLER_STATE = "Illegal controller state";
/**
* The service itself.
*/
private final Value<? extends Service<S>> serviceValue;
/**
* The dependencies of this service.
*/
private final Dependency[] dependencies;
/**
* The injections of this service.
*/
private final ValueInjection<?>[] injections;
/**
* The out injections of this service.
*/
private final ValueInjection<?>[] outInjections;
/**
* The set of registered service listeners.
*/
private final IdentityHashSet<ServiceListener<? super S>> listeners;
/**
* The set of registered stability monitors.
*/
private final IdentityHashSet<StabilityMonitor> monitors;
/**
* The primary registration of this service.
*/
private final ServiceRegistrationImpl primaryRegistration;
/**
* The alias registrations of this service.
*/
private final ServiceRegistrationImpl[] aliasRegistrations;
/**
* The parent of this service.
*/
private final ServiceControllerImpl<?> parent;
/**
* The children of this service (only valid during {@link State#UP}).
*/
private final IdentityHashSet<ServiceControllerImpl<?>> children;
/**
* The immediate unavailable dependencies of this service.
*/
private final IdentityHashSet<ServiceName> immediateUnavailableDependencies;
/**
* The start exception.
*/
private StartException startException;
/**
* The controller mode.
*/
private ServiceController.Mode mode = ServiceController.Mode.NEVER;
/**
* The controller state.
*/
private Substate state = Substate.NEW;
/**
* The number of registrations which place a demand-to-start on this
* instance. If this value is >0, propagate a demand up to all parent
* dependents. If this value is >0 and mode is ON_DEMAND, we should start.
*/
private int demandedByCount;
/**
* Count for dependencies that are trying to stop. If this count is greater than zero then
* dependents will be notified that a stop is necessary.
*/
private int stoppingDependencies;
/**
* The number of dependents that are currently running. The deployment will
* not execute the {@code stop()} method (and subsequently leave the
* {@link org.jboss.msc.service.ServiceController.State#STOPPING} state)
* until all running dependents (and listeners) are stopped.
*/
private int runningDependents;
/**
* Count for failure notification. It indicates how many services have
* failed to start and are not recovered so far. This count monitors
* failures that happen when starting this service, and dependency related
* failures as well. When incremented from 0 to 1, it is time to notify
* dependents and listeners that a failure occurred. When decremented from 1
* to 0, the dependents and listeners are notified that the affected
* services are retrying to start. Values larger than 1 are ignored to avoid
* multiple notifications.
*/
private int failCount;
/**
* Indicates if this service has one or more transitive dependencies that
* are not available. Count for notification of unavailable dependencies.
* Its value indicates how many transitive dependencies are unavailable.
* When incremented from 0 to 1, dependents are notified of the unavailable
* dependency unless immediateUnavailableDependencies is not empty. When
* decremented from 1 to 0, a notification that the unavailable dependencies
* are now available is sent to dependents, unless immediateUnavailableDependencies
* is not empty. Values larger than 1 are ignored to avoid multiple
* notifications.
*/
private int transitiveUnavailableDepCount;
/**
* Indicates whether dependencies have been demanded.
*/
private boolean dependenciesDemanded = false;
/**
* The number of asynchronous tasks that are currently running. This
* includes listeners, start/stop methods, outstanding asynchronous
* start/stops, and internal tasks.
*/
private int asyncTasks;
/**
* The service target for adding child services (can be {@code null} if none
* were added).
*/
private volatile ChildServiceTarget childTarget;
/**
* The system nanotime of the moment in which the last lifecycle change was
* initiated.
*/
@SuppressWarnings("VolatileLongOrDoubleField")
private volatile long lifecycleTime;
private static final Dependent[] NO_DEPENDENTS = new Dependent[0];
private static final ServiceControllerImpl<?>[] NO_CONTROLLERS = new ServiceControllerImpl<?>[0];
private static final String[] NO_STRINGS = new String[0];
static final int MAX_DEPENDENCIES = (1 << 14) - 1;
ServiceControllerImpl(final Value<? extends Service<S>> serviceValue, final Dependency[] dependencies, final ValueInjection<?>[] injections, final ValueInjection<?>[] outInjections, final ServiceRegistrationImpl primaryRegistration, final ServiceRegistrationImpl[] aliasRegistrations, final Set<StabilityMonitor> monitors, final Set<? extends ServiceListener<? super S>> listeners, final ServiceControllerImpl<?> parent) {
assert dependencies.length <= MAX_DEPENDENCIES;
this.serviceValue = serviceValue;
this.dependencies = dependencies;
this.injections = injections;
this.outInjections = outInjections;
this.primaryRegistration = primaryRegistration;
this.aliasRegistrations = aliasRegistrations;
this.listeners = new IdentityHashSet<ServiceListener<? super S>>(listeners);
this.monitors = new IdentityHashSet<StabilityMonitor>(monitors);
// We also need to register this controller with monitors explicitly.
// This allows inherited monitors to have registered all child controllers
// and later to remove them when inherited stability monitor is cleared.
for (final StabilityMonitor monitor : monitors) {
monitor.addControllerNoCallback(this);
}
this.parent = parent;
int depCount = dependencies.length;
stoppingDependencies = parent == null? depCount : depCount + 1;
children = new IdentityHashSet<ServiceControllerImpl<?>>();
immediateUnavailableDependencies = new IdentityHashSet<ServiceName>();
}
Substate getSubstateLocked() {
return state;
}
/**
* Start this service installation, connecting it to its parent and dependencies. Also,
* set the instance in primary and alias registrations.
* <p>
* All notifications from dependencies, parents, and registrations will be ignored until the
* installation is {@link #commitInstallation(org.jboss.msc.service.ServiceController.Mode) committed}.
*/
void startInstallation() {
for (Dependency dependency : dependencies) {
dependency.addDependent(this);
}
if (parent != null) parent.addChild(this);
// Install the controller in each registration
primaryRegistration.setInstance(this);
for (ServiceRegistrationImpl aliasRegistration: aliasRegistrations) {
aliasRegistration.setInstance(this);
}
}
/**
* Commit the service install, kicking off the mode set and listener execution.
*
* @param initialMode the initial service mode
*/
void commitInstallation(Mode initialMode) {
assert (state == Substate.NEW);
assert initialMode != null;
assert !holdsLock(this);
final ArrayList<Runnable> listenerAddedTasks = new ArrayList<Runnable>(16);
final ArrayList<Runnable> tasks = new ArrayList<Runnable>(16);
synchronized (this) {
final boolean leavingRestState = isStableRestState();
getListenerTasks(ListenerNotification.LISTENER_ADDED, listenerAddedTasks);
internalSetMode(initialMode, tasks);
// placeholder async task for running listener added tasks
addAsyncTasks(listenerAddedTasks.size() + tasks.size() + 1);
updateStabilityState(leavingRestState);
}
doExecute(tasks);
tasks.clear();
for (Runnable listenerAddedTask : listenerAddedTasks) {
listenerAddedTask.run();
}
synchronized (this) {
final boolean leavingRestState = isStableRestState();
for (Map.Entry<ServiceName, Dependent[]> dependentEntry : getDependentsByDependencyName().entrySet()) {
ServiceName serviceName = dependentEntry.getKey();
for (Dependent dependent : dependentEntry.getValue()) {
if (dependent != null) dependent.immediateDependencyAvailable(serviceName);
}
}
Dependent[][] dependents = getDependents();
if (!immediateUnavailableDependencies.isEmpty() || transitiveUnavailableDepCount > 0) {
propagateTransitiveUnavailability(dependents);
}
if (failCount > 0) {
tasks.add(new DependencyFailedTask(dependents, false));
}
state = Substate.DOWN;
// subtract one to compensate for +1 above
decrementAsyncTasks();
transition(tasks);
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
/**
* Roll back the service install.
*/
void rollbackInstallation() {
synchronized(this) {
final boolean leavingRestState = isStableRestState();
mode = Mode.REMOVE;
incrementAsyncTasks();
state = Substate.CANCELLED;
updateStabilityState(leavingRestState);
}
(new RemoveTask()).run();
}
/**
* Return {@code true} only if this service controller installation is committed.
*
* @return true if this service controller installation is committed
*/
boolean isInstallationCommitted() {
assert holdsLock(this);
// should not be NEW nor CANCELLED
return state.compareTo(Substate.CANCELLED) > 0;
}
/**
* Determine whether a stopped controller should start.
*
* @return {@code true} if so
*/
private boolean shouldStart() {
assert holdsLock(this);
return mode == Mode.ACTIVE || mode == Mode.PASSIVE || demandedByCount > 0 && (mode == Mode.ON_DEMAND || mode == Mode.LAZY);
}
/**
* Determine whether a running controller should stop.
*
* @return {@code true} if so
*/
private boolean shouldStop() {
assert holdsLock(this);
return mode == Mode.REMOVE || demandedByCount == 0 && mode == Mode.ON_DEMAND || mode == Mode.NEVER;
}
/**
* Returns true if controller is in rest state and no async tasks are running, false otherwise.
* @return true if stable rest state, false otherwise
*/
boolean isStableRestState() {
assert holdsLock(this);
return asyncTasks == 0 && state.isRestState();
}
void updateStabilityState(final boolean leavingStableRestState) {
assert holdsLock(this);
final boolean enteringStableRestState = state.isRestState() && asyncTasks == 0;
if (leavingStableRestState) {
if (!enteringStableRestState) {
primaryRegistration.getContainer().incrementUnstableServices();
for (StabilityMonitor monitor : monitors) {
monitor.incrementUnstableServices();
}
}
} else {
if (enteringStableRestState) {
primaryRegistration.getContainer().decrementUnstableServices();
for (StabilityMonitor monitor : monitors) {
monitor.decrementUnstableServices();
}
}
}
}
/**
* Identify the transition to take. Call under lock.
*
* @return the transition or {@code null} if none is needed at this time
*/
private Transition getTransition() {
assert holdsLock(this);
switch (state) {
case DOWN: {
if (mode == ServiceController.Mode.REMOVE) {
return Transition.DOWN_to_REMOVING;
} else if (mode == ServiceController.Mode.NEVER) {
return Transition.DOWN_to_WONT_START;
} else if (shouldStart() && (mode != Mode.PASSIVE || stoppingDependencies == 0)) {
return Transition.DOWN_to_START_REQUESTED;
} else {
// mode is either LAZY or ON_DEMAND with demandedByCount == 0, or mode is PASSIVE and downDep > 0
return Transition.DOWN_to_WAITING;
}
}
case WAITING: {
if (((mode != Mode.ON_DEMAND && mode != Mode.LAZY) || demandedByCount > 0) &&
(mode != Mode.PASSIVE || stoppingDependencies == 0)) {
return Transition.WAITING_to_DOWN;
}
break;
}
case WONT_START: {
if (mode != ServiceController.Mode.NEVER){
return Transition.WONT_START_to_DOWN;
}
break;
}
case STOPPING: {
return Transition.STOPPING_to_DOWN;
}
case STOP_REQUESTED: {
if (shouldStart() && stoppingDependencies == 0) {
return Transition.STOP_REQUESTED_to_UP;
}
if (runningDependents == 0) {
return Transition.STOP_REQUESTED_to_STOPPING;
}
break;
}
case UP: {
if (shouldStop() || stoppingDependencies > 0) {
return Transition.UP_to_STOP_REQUESTED;
}
break;
}
case START_FAILED: {
if (shouldStart()) {
if (stoppingDependencies == 0) {
if (startException == null) {
return Transition.START_FAILED_to_STARTING;
}
} else {
return Transition.START_FAILED_to_DOWN;
}
} else {
return Transition.START_FAILED_to_DOWN;
}
break;
}
case START_INITIATING: {
return Transition.START_INITIATING_to_STARTING;
}
case STARTING: {
if (startException == null) {
return Transition.STARTING_to_UP;
} else {
return Transition.STARTING_to_START_FAILED;
}
}
case START_REQUESTED: {
if (shouldStart()) {
if (mode == Mode.PASSIVE && stoppingDependencies > 0) {
return Transition.START_REQUESTED_to_DOWN;
}
if (!immediateUnavailableDependencies.isEmpty() || transitiveUnavailableDepCount > 0 || failCount > 0) {
return Transition.START_REQUESTED_to_PROBLEM;
}
else if (stoppingDependencies == 0) {
return Transition.START_REQUESTED_to_START_INITIATING;
}
} else {
return Transition.START_REQUESTED_to_DOWN;
}
break;
}
case PROBLEM: {
if (! shouldStart() || (immediateUnavailableDependencies.isEmpty() && transitiveUnavailableDepCount == 0 && failCount == 0) || mode == Mode.PASSIVE) {
return Transition.PROBLEM_to_START_REQUESTED;
}
break;
}
case REMOVING: {
return Transition.REMOVING_to_REMOVED;
}
case CANCELLED: {
return Transition.CANCELLED_to_REMOVED;
}
case REMOVED:
{
// no possible actions
break;
}
}
return null;
}
/**
* Run the locked portion of a transition. Call under lock.
*
* @param tasks the list to which async tasks should be appended
*/
void transition(final ArrayList<Runnable> tasks) {
assert holdsLock(this);
if (asyncTasks != 0 || state == Substate.NEW) {
// no movement possible
return;
}
Transition transition;
do {
// first of all, check if parents should be demanded/undemanded
switch (mode) {
case NEVER:
case REMOVE:
if (dependenciesDemanded) {
tasks.add(new UndemandDependenciesTask());
dependenciesDemanded = false;
}
break;
case LAZY: {
if (state.getState() == State.UP && state != Substate.STOP_REQUESTED) {
if (!dependenciesDemanded) {
tasks.add(new DemandDependenciesTask());
dependenciesDemanded = true;
}
break;
}
// fall thru!
}
case ON_DEMAND:
case PASSIVE: {
if (demandedByCount > 0 && !dependenciesDemanded) {
tasks.add(new DemandDependenciesTask());
dependenciesDemanded = true;
} else if (demandedByCount == 0 && dependenciesDemanded) {
tasks.add(new UndemandDependenciesTask());
dependenciesDemanded = false;
}
break;
}
case ACTIVE: {
if (!dependenciesDemanded) {
tasks.add(new DemandDependenciesTask());
dependenciesDemanded = true;
}
break;
}
}
transition = getTransition();
if (transition == null) {
return;
}
switch (transition) {
case DOWN_to_WAITING: {
getListenerTasks(transition, tasks);
break;
}
case WAITING_to_DOWN: {
getListenerTasks(transition, tasks);
break;
}
case DOWN_to_WONT_START: {
getListenerTasks(transition, tasks);
tasks.add(new ServiceUnavailableTask());
break;
}
case WONT_START_to_DOWN: {
getListenerTasks(transition, tasks);
tasks.add(new ServiceAvailableTask());
break;
}
case STOPPING_to_DOWN: {
getListenerTasks(transition, tasks);
tasks.add(new DependentStoppedTask());
break;
}
case START_REQUESTED_to_DOWN: {
getListenerTasks(transition, tasks);
break;
}
case START_REQUESTED_to_START_INITIATING: {
getListenerTasks(transition, tasks);
tasks.add(new DependentStartedTask());
break;
}
case START_REQUESTED_to_PROBLEM: {
getPrimaryRegistration().getContainer().addProblem(this);
for (StabilityMonitor monitor : monitors) {
monitor.addProblem(this);
}
if (!immediateUnavailableDependencies.isEmpty()) {
getListenerTasks(ListenerNotification.IMMEDIATE_DEPENDENCY_UNAVAILABLE, tasks);
}
if (transitiveUnavailableDepCount > 0) {
getListenerTasks(ListenerNotification.TRANSITIVE_DEPENDENCY_UNAVAILABLE, tasks);
}
if (failCount > 0) {
getListenerTasks(ListenerNotification.DEPENDENCY_FAILURE, tasks);
}
getListenerTasks(transition, tasks);
break;
}
case UP_to_STOP_REQUESTED: {
if (mode == Mode.LAZY && demandedByCount == 0) {
assert dependenciesDemanded;
tasks.add(new UndemandDependenciesTask());
dependenciesDemanded = false;
}
getListenerTasks(transition, tasks);
lifecycleTime = System.nanoTime();
tasks.add(new DependencyStoppedTask(getDependents()));
break;
}
case STARTING_to_UP: {
getListenerTasks(transition, tasks);
tasks.add(new DependencyStartedTask(getDependents()));
break;
}
case STARTING_to_START_FAILED: {
getPrimaryRegistration().getContainer().addFailed(this);
for (StabilityMonitor monitor : monitors) {
monitor.addFailed(this);
}
ChildServiceTarget childTarget = this.childTarget;
if (childTarget != null) {
childTarget.valid = false;
this.childTarget = null;
}
getListenerTasks(transition, tasks);
tasks.add(new DependencyFailedTask(getDependents(), true));
break;
}
case START_FAILED_to_STARTING: {
getPrimaryRegistration().getContainer().removeFailed(this);
for (StabilityMonitor monitor : monitors) {
monitor.removeFailed(this);
}
getListenerTasks(transition, tasks);
tasks.add(new DependencyRetryingTask(getDependents()));
break;
}
case START_INITIATING_to_STARTING: {
getListenerTasks(transition, tasks);
tasks.add(new StartTask());
break;
}
case START_FAILED_to_DOWN: {
getPrimaryRegistration().getContainer().removeFailed(this);
for (StabilityMonitor monitor : monitors) {
monitor.removeFailed(this);
}
startException = null;
failCount
getListenerTasks(transition, tasks);
tasks.add(new DependencyRetryingTask(getDependents()));
tasks.add(new StopTask(true));
tasks.add(new DependentStoppedTask());
break;
}
case STOP_REQUESTED_to_UP: {
getListenerTasks(transition, tasks);
tasks.add(new DependencyStartedTask(getDependents()));
break;
}
case STOP_REQUESTED_to_STOPPING: {
ChildServiceTarget childTarget = this.childTarget;
if (childTarget != null) {
childTarget.valid = false;
this.childTarget = null;
}
getListenerTasks(transition, tasks);
tasks.add(new StopTask(false));
break;
}
case DOWN_to_REMOVING: {
getListenerTasks(transition, tasks);
tasks.add(new ServiceUnavailableTask());
Dependent[][] dependents = getDependents();
// Clear all dependency uninstalled flags from dependents
if (!immediateUnavailableDependencies.isEmpty() || transitiveUnavailableDepCount > 0) {
propagateTransitiveAvailability(dependents);
}
if (failCount > 0) {
tasks.add(new DependencyRetryingTask(dependents));
}
tasks.add(new RemoveTask());
break;
}
case CANCELLED_to_REMOVED:
case REMOVING_to_REMOVED: {
getListenerTasks(transition, tasks);
listeners.clear();
for (final StabilityMonitor monitor : monitors) {
monitor.removeControllerNoCallback(this);
}
break;
}
case DOWN_to_START_REQUESTED: {
getListenerTasks(transition, tasks);
break;
}
case PROBLEM_to_START_REQUESTED: {
getPrimaryRegistration().getContainer().removeProblem(this);
for (StabilityMonitor monitor : monitors) {
monitor.removeProblem(this);
}
if (!immediateUnavailableDependencies.isEmpty()) {
getListenerTasks(ListenerNotification.IMMEDIATE_DEPENDENCY_AVAILABLE, tasks);
}
if (transitiveUnavailableDepCount > 0) {
getListenerTasks(ListenerNotification.TRANSITIVE_DEPENDENCY_AVAILABLE, tasks);
}
if (failCount > 0) {
getListenerTasks(ListenerNotification.DEPENDENCY_FAILURE_CLEAR, tasks);
}
getListenerTasks(transition, tasks);
lifecycleTime = System.nanoTime();
break;
}
default: {
throw new IllegalStateException();
}
}
state = transition.getAfter();
} while (tasks.isEmpty());
// Notify waiters that a transition occurred
notifyAll();
}
private void getListenerTasks(final Transition transition, final ArrayList<Runnable> tasks) {
for (ServiceListener<? super S> listener : listeners) {
tasks.add(new ListenerTask(listener, transition));
}
}
private void getListenerTasks(final ListenerNotification notification, final ArrayList<Runnable> tasks) {
for (ServiceListener<? super S> listener : listeners) {
tasks.add(new ListenerTask(listener, notification));
}
}
void doExecute(final ArrayList<Runnable> tasks) {
assert !holdsLock(this);
if (tasks == null) return;
final Executor executor = primaryRegistration.getContainer().getExecutor();
for (Runnable task : tasks) {
try {
executor.execute(task);
} catch (RejectedExecutionException e) {
task.run();
}
}
}
public void setMode(final ServiceController.Mode newMode) {
internalSetMode(null, newMode);
}
private boolean internalSetMode(final ServiceController.Mode expectedMode, final ServiceController.Mode newMode) {
assert !holdsLock(this);
if (newMode == null) {
throw new IllegalArgumentException("newMode is null");
}
if (newMode != Mode.REMOVE && primaryRegistration.getContainer().isShutdown()) {
throw new IllegalArgumentException("Container is shutting down");
}
final ArrayList<Runnable> tasks = new ArrayList<Runnable>(4);
synchronized (this) {
final boolean leavingRestState = isStableRestState();
final Mode oldMode = mode;
if (expectedMode != null && expectedMode != oldMode) {
return false;
}
if (oldMode == newMode) {
return true;
}
internalSetMode(newMode, tasks);
transition(tasks);
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
return true;
}
private void internalSetMode(final Mode newMode, final ArrayList<Runnable> taskList) {
assert holdsLock(this);
final ServiceController.Mode oldMode = mode;
if (oldMode == Mode.REMOVE) {
if (state.compareTo(Substate.REMOVING) >= 0) {
throw new IllegalStateException("Service already removed");
}
getListenerTasks(ListenerNotification.REMOVE_REQUEST_CLEARED, taskList);
}
if (newMode == Mode.REMOVE) {
getListenerTasks(ListenerNotification.REMOVE_REQUESTED, taskList);
}
mode = newMode;
}
@Override
public void immediateDependencyAvailable(ServiceName dependencyName) {
final ArrayList<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
assert immediateUnavailableDependencies.contains(dependencyName);
immediateUnavailableDependencies.remove(dependencyName);
if (!immediateUnavailableDependencies.isEmpty() || state.compareTo(Substate.CANCELLED) <= 0 || state.compareTo(Substate.REMOVING) >= 0) {
return;
}
// we dropped it to 0
tasks = new ArrayList<Runnable>(16);
if (state == Substate.PROBLEM) {
getListenerTasks(ListenerNotification.IMMEDIATE_DEPENDENCY_AVAILABLE, tasks);
}
// both unavailable dep counts are 0
if (transitiveUnavailableDepCount == 0) {
transition(tasks);
propagateTransitiveAvailability(getDependents());
}
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
@Override
public void immediateDependencyUnavailable(ServiceName dependencyName) {
final ArrayList<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
immediateUnavailableDependencies.add(dependencyName);
if (immediateUnavailableDependencies.size() != 1 || state.compareTo(Substate.CANCELLED) <= 0 || state.compareTo(Substate.REMOVING) >= 0) {
return;
}
// we raised it to 1
tasks = new ArrayList<Runnable>(16);
if (state == Substate.PROBLEM) {
getListenerTasks(ListenerNotification.IMMEDIATE_DEPENDENCY_UNAVAILABLE, tasks);
}
// if this is the first unavailable dependency, we need to notify dependents;
// otherwise, they have already been notified
if (transitiveUnavailableDepCount == 0) {
transition(tasks);
propagateTransitiveUnavailability(getDependents());
}
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
private void propagateTransitiveUnavailability(final Dependent[][] dependentsSnapshot) {
assert Thread.holdsLock(this);
for (Dependent[] dependentArray : dependentsSnapshot) {
for (Dependent dependent : dependentArray) {
if (dependent != null) dependent.transitiveDependencyUnavailable();
}
}
}
private void propagateTransitiveAvailability(final Dependent[][] dependentsSnapshot) {
assert Thread.holdsLock(this);
for (Dependent[] dependentArray : dependentsSnapshot) {
for (Dependent dependent : dependentArray) {
if (dependent != null) dependent.transitiveDependencyAvailable();
}
}
}
@Override
public void transitiveDependencyAvailable() {
final ArrayList<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
if (-- transitiveUnavailableDepCount != 0 || state.compareTo(Substate.CANCELLED) <= 0 || state.compareTo(Substate.REMOVING) >= 0) {
return;
}
// we dropped it to 0
tasks = new ArrayList<Runnable>(16);
if (state == Substate.PROBLEM) {
getListenerTasks(ListenerNotification.TRANSITIVE_DEPENDENCY_AVAILABLE, tasks);
}
// there are no immediate nor transitive unavailable dependencies
if (immediateUnavailableDependencies.isEmpty()) {
transition(tasks);
propagateTransitiveAvailability(getDependents());
}
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
@Override
public void transitiveDependencyUnavailable() {
final ArrayList<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
if (++ transitiveUnavailableDepCount != 1 || state.compareTo(Substate.CANCELLED) <= 0 || state.compareTo(Substate.REMOVING) >= 0) {
return;
}
// we raised it to 1
tasks = new ArrayList<Runnable>(16);
if (state == Substate.PROBLEM) {
getListenerTasks(ListenerNotification.TRANSITIVE_DEPENDENCY_UNAVAILABLE, tasks);
}
//if this is the first unavailable dependency, we need to notify dependents;
// otherwise, they have already been notified
if (immediateUnavailableDependencies.isEmpty()) {
transition(tasks);
propagateTransitiveUnavailability(getDependents());
}
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
/** {@inheritDoc} */
public ServiceControllerImpl<?> getController() {
return this;
}
@Override
public void immediateDependencyUp() {
final ArrayList<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
if (--stoppingDependencies != 0) {
return;
}
// we dropped it to 0
tasks = new ArrayList<Runnable>();
transition(tasks);
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
@Override
public void immediateDependencyDown() {
final ArrayList<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
if (++stoppingDependencies != 1) {
return;
}
// we dropped it below 0
tasks = new ArrayList<Runnable>();
transition(tasks);
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
@Override
public void dependencyFailed() {
final ArrayList<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
if (++failCount != 1 || state.compareTo(Substate.CANCELLED) <= 0) {
return;
}
// we raised it to 1
tasks = new ArrayList<Runnable>();
if (state == Substate.PROBLEM) {
getListenerTasks(ListenerNotification.DEPENDENCY_FAILURE, tasks);
}
tasks.add(new DependencyFailedTask(getDependents(), false));
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
@Override
public void dependencyFailureCleared() {
final ArrayList<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
if (--failCount != 0 || state == Substate.CANCELLED) {
return;
}
// we dropped it to 0
tasks = new ArrayList<Runnable>();
if (state == Substate.PROBLEM) {
getListenerTasks(ListenerNotification.DEPENDENCY_FAILURE_CLEAR, tasks);
}
tasks.add(new DependencyRetryingTask(getDependents()));
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
void dependentStarted() {
assert !holdsLock(this);
synchronized (this) {
runningDependents++;
}
}
void dependentStopped() {
assert !holdsLock(this);
final ArrayList<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
if (--runningDependents != 0) {
return;
}
tasks = new ArrayList<Runnable>();
transition(tasks);
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
void newDependent(final ServiceName dependencyName, final Dependent dependent) {
assert holdsLock(this);
if (failCount > 0 && state != Substate.STARTING) {
// if starting and failCount is 1, dependents have not been notified yet...
// hence, skip it to avoid duplicate notification
dependent.dependencyFailed();
}
if (!immediateUnavailableDependencies.isEmpty() || transitiveUnavailableDepCount > 0) {
dependent.transitiveDependencyUnavailable();
}
if (state == Substate.WONT_START) {
dependent.immediateDependencyUnavailable(dependencyName);
} else if (state.getState() == State.UP && state != Substate.STOP_REQUESTED) {
dependent.immediateDependencyUp();
}
}
void addDemand() {
addDemands(1);
}
void addDemands(final int demandedByCount) {
assert !holdsLock(this);
final ArrayList<Runnable> tasks = new ArrayList<Runnable>();
final boolean propagate;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
final int cnt = this.demandedByCount;
this.demandedByCount += demandedByCount;
boolean notStartedLazy = mode == Mode.LAZY && !(state.getState() == State.UP && state != Substate.STOP_REQUESTED);
propagate = cnt == 0 && (mode == Mode.ON_DEMAND || notStartedLazy || mode == Mode.PASSIVE);
if (propagate) {
transition(tasks);
}
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
void removeDemand() {
assert !holdsLock(this);
final ArrayList<Runnable> tasks = new ArrayList<Runnable>();
final boolean propagate;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
final int cnt = --demandedByCount;
boolean notStartedLazy = mode == Mode.LAZY && !(state.getState() == State.UP && state != Substate.STOP_REQUESTED);
propagate = cnt == 0 && (mode == Mode.ON_DEMAND || notStartedLazy || mode == Mode.PASSIVE);
if (propagate) {
transition(tasks);
}
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
void addChild(ServiceControllerImpl<?> child) {
assert !holdsLock(this);
synchronized (this) {
switch (state) {
case START_INITIATING:
case STARTING:
case UP:
case STOP_REQUESTED: {
children.add(child);
newDependent(primaryRegistration.getName(), child);
break;
}
default: throw new IllegalStateException("Children cannot be added in state " + state.getState());
}
}
}
void removeChild(ServiceControllerImpl<?> child) {
assert !holdsLock(this);
final ArrayList<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
children.remove(child);
if (children.isEmpty()) {
switch (state) {
case START_FAILED:
case STOPPING:
// last child was removed; drop async count
decrementAsyncTasks();
transition(tasks = new ArrayList<Runnable>());
break;
default:
return;
}
} else {
return;
}
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
IdentityHashSet<ServiceControllerImpl<?>> getChildren() {
assert holdsLock(this);
return children;
}
public ServiceControllerImpl<?> getParent() {
return parent;
}
public ServiceContainerImpl getServiceContainer() {
return primaryRegistration.getContainer();
}
public ServiceController.State getState() {
return state.getState();
}
public S getValue() throws IllegalStateException {
return serviceValue.getValue().getValue();
}
public S awaitValue() throws IllegalStateException, InterruptedException {
assert !holdsLock(this);
synchronized (this) {
for (;;) switch (state.getState()) {
case UP: {
return serviceValue.getValue().getValue();
}
case START_FAILED: {
throw new IllegalStateException("Failed to start service", startException);
}
case REMOVED: {
throw new IllegalStateException("Service was removed");
}
default: {
wait();
}
}
}
}
public S awaitValue(final long time, final TimeUnit unit) throws IllegalStateException, InterruptedException, TimeoutException {
assert !holdsLock(this);
long now;
long then = System.nanoTime();
long remaining = unit.toNanos(time);
synchronized (this) {
do {
switch (state.getState()) {
case UP: {
return serviceValue.getValue().getValue();
}
case START_FAILED: {
throw new IllegalStateException("Failed to start service", startException);
}
case REMOVED: {
throw new IllegalStateException("Service was removed");
}
default: {
wait(remaining / 1000000L, (int) (remaining % 1000000L));
}
}
// When will then be now?
now = System.nanoTime();
remaining -= now - then;
// soon...
then = now;
} while (remaining > 0L);
throw new TimeoutException("Operation timed out");
}
}
public Service<S> getService() throws IllegalStateException {
return serviceValue.getValue();
}
public ServiceName getName() {
return primaryRegistration.getName();
}
private static final ServiceName[] NO_NAMES = new ServiceName[0];
public ServiceName[] getAliases() {
final ServiceRegistrationImpl[] aliasRegistrations = this.aliasRegistrations;
final int len = aliasRegistrations.length;
if (len == 0) {
return NO_NAMES;
}
final ServiceName[] names = new ServiceName[len];
for (int i = 0; i < len; i++) {
names[i] = aliasRegistrations[i].getName();
}
return names;
}
public void addListener(final ServiceListener<? super S> listener) {
assert !holdsLock(this);
final Substate state;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
if (listeners.contains(listener)) {
// Duplicates not allowed
throw new IllegalArgumentException("Listener " + listener + " already present on controller for " + primaryRegistration.getName());
}
listeners.add(listener);
incrementAsyncTasks();
state = this.state;
if (state == Substate.REMOVED) {
incrementAsyncTasks();
}
updateStabilityState(leavingRestState);
}
invokeListener(listener, ListenerNotification.LISTENER_ADDED, null);
if (state == Substate.REMOVED) {
invokeListener(listener, ListenerNotification.TRANSITION, Transition.REMOVING_to_REMOVED);
}
}
public void removeListener(final ServiceListener<? super S> listener) {
synchronized (this) {
listeners.remove(listener);
}
}
@Override
public StartException getStartException() {
synchronized (this) {
return startException;
}
}
@Override
public void retry() {
assert !holdsLock(this);
final ArrayList<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
if (state.getState() != ServiceController.State.START_FAILED) {
return;
}
failCount
assert failCount == 0;
startException = null;
transition(tasks = new ArrayList<Runnable>());
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
@Override
public synchronized Set<ServiceName> getImmediateUnavailableDependencies() {
return immediateUnavailableDependencies.clone();
}
public ServiceController.Mode getMode() {
synchronized (this) {
return mode;
}
}
public boolean compareAndSetMode(final Mode expectedMode, final Mode newMode) {
if (expectedMode == null) {
throw new IllegalArgumentException("expectedMode is null");
}
return internalSetMode(expectedMode, newMode);
}
ServiceStatus getStatus() {
synchronized (this) {
final String parentName = parent == null ? null : parent.getName().getCanonicalName();
final String name = primaryRegistration.getName().getCanonicalName();
final ServiceRegistrationImpl[] aliasRegistrations = this.aliasRegistrations;
final int aliasLength = aliasRegistrations.length;
final String[] aliases;
if (aliasLength == 0) {
aliases = NO_STRINGS;
} else {
aliases = new String[aliasLength];
for (int i = 0; i < aliasLength; i++) {
aliases[i] = aliasRegistrations[i].getName().getCanonicalName();
}
}
String serviceClass = "<unknown>";
try {
final Service<? extends S> value = serviceValue.getValue();
if (value != null) {
serviceClass = value.getClass().getName();
}
} catch (RuntimeException ignored) {
}
final Dependency[] dependencies = this.dependencies;
final int dependenciesLength = dependencies.length;
final String[] dependencyNames;
if (dependenciesLength == 0) {
dependencyNames = NO_STRINGS;
} else {
dependencyNames = new String[dependenciesLength];
for (int i = 0; i < dependenciesLength; i++) {
dependencyNames[i] = dependencies[i].getName().getCanonicalName();
}
}
StartException startException = this.startException;
return new ServiceStatus(
parentName,
name,
aliases,
serviceClass,
mode.name(),
state.getState().name(),
state.name(),
dependencyNames,
failCount != 0,
startException != null ? startException.toString() : null,
!immediateUnavailableDependencies.isEmpty() || transitiveUnavailableDepCount != 0
);
}
}
String dumpServiceDetails() {
final StringBuilder b = new StringBuilder();
IdentityHashSet<Dependent> dependents;
synchronized (primaryRegistration) {
dependents = primaryRegistration.getDependents();
synchronized (dependents) {
dependents = dependents.clone();
}
}
b.append("Service Name: ").append(primaryRegistration.getName().toString()).append(" - Dependents: ").append(dependents.size()).append('\n');
for (Dependent dependent : dependents) {
final ServiceControllerImpl<?> controller = dependent.getController();
synchronized (controller) {
b.append(" ").append(controller.getName().toString()).append(" - State: ").append(controller.state.getState()).append(" (Substate: ").append(controller.state).append(")\n");
}
}
b.append("Service Aliases: ").append(aliasRegistrations.length).append('\n');
for (ServiceRegistrationImpl registration : aliasRegistrations) {
synchronized (registration) {
dependents = registration.getDependents();
synchronized (dependents) {
dependents = dependents.clone();
}
}
b.append(" ").append(registration.getName().toString()).append(" - Dependents: ").append(dependents.size()).append('\n');
for (Dependent dependent : dependents) {
final ServiceControllerImpl<?> controller = dependent.getController();
b.append(" ").append(controller.getName().toString()).append(" - State: ").append(controller.state.getState()).append(" (Substate: ").append(controller.state).append(")\n");
}
}
synchronized (this) {
b.append("Children: ").append(children.size()).append('\n');
for (ServiceControllerImpl<?> child : children) {
synchronized (child) {
b.append(" ").append(child.getName().toString()).append(" - State: ").append(child.state.getState()).append(" (Substate: ").append(child.state).append(")\n");
}
}
final Substate state = this.state;
b.append("State: ").append(state.getState()).append(" (Substate: ").append(state).append(")\n");
if (parent != null) {
b.append("Parent Name: ").append(parent.getPrimaryRegistration().getName().toString()).append('\n');
}
b.append("Service Mode: ").append(mode).append('\n');
if (startException != null) {
b.append("Start Exception: ").append(startException.getClass().getName()).append(" (Message: ").append(startException.getMessage()).append(")\n");
}
String serviceValueString = "(indeterminate)";
try {
serviceValueString = serviceValue.toString();
} catch (Throwable ignored) {}
b.append("Service Value: ").append(serviceValueString).append('\n');
String serviceObjectString = "(indeterminate)";
Object serviceObjectClass = "(indeterminate)";
try {
Object serviceObject = serviceValue.getValue();
if (serviceObject != null) {
serviceObjectClass = serviceObject.getClass();
serviceObjectString = serviceObject.toString();
}
} catch (Throwable ignored) {}
b.append("Service Object: ").append(serviceObjectString).append('\n');
b.append("Service Object Class: ").append(serviceObjectClass).append('\n');
b.append("Demanded By: ").append(demandedByCount).append('\n');
b.append("Stopping Dependencies: ").append(stoppingDependencies).append('\n');
b.append("Running Dependents: ").append(runningDependents).append('\n');
b.append("Fail Count: ").append(failCount).append('\n');
b.append("Immediate Unavailable Dep Count: ").append(immediateUnavailableDependencies.size()).append('\n');
for (ServiceName name : immediateUnavailableDependencies) {
b.append(" ").append(name.toString()).append('\n');
}
b.append("Transitive Unavailable Dep Count: ").append(transitiveUnavailableDepCount).append('\n');
b.append("Dependencies Demanded: ").append(dependenciesDemanded ? "yes" : "no").append('\n');
b.append("Async Tasks: ").append(asyncTasks).append('\n');
if (lifecycleTime != 0L) {
final long elapsedNanos = System.nanoTime() - lifecycleTime;
final long now = System.currentTimeMillis();
final long stamp = now - (elapsedNanos / 1000000L);
b.append("Lifecycle Timestamp: ").append(lifecycleTime).append(String.format(" = %tb %<td %<tH:%<tM:%<tS.%<tL%n", stamp));
}
}
b.append("Dependencies: ").append(dependencies.length).append('\n');
for (int i = 0; i < dependencies.length; i ++) {
final Dependency dependency = dependencies[i];
final ServiceControllerImpl<?> controller = dependency.getDependencyController();
b.append(" ").append(dependency.getName().toString());
if (controller == null) {
b.append(" (missing)\n");
} else {
synchronized (controller) {
b.append(" - State: ").append(controller.state.getState()).append(" (Substate: ").append(controller.state).append(")\n");
}
}
}
return b.toString();
}
void addMonitor(final StabilityMonitor stabilityMonitor) {
assert !holdsLock(this);
synchronized (this) {
if (monitors.add(stabilityMonitor) && !isStableRestState()) {
stabilityMonitor.incrementUnstableServices();
if (state == Substate.START_FAILED) {
stabilityMonitor.addFailed(this);
} else if (state == Substate.PROBLEM) {
stabilityMonitor.addProblem(this);
}
}
}
}
void removeMonitor(final StabilityMonitor stabilityMonitor) {
assert !holdsLock(this);
synchronized (this) {
if (monitors.remove(stabilityMonitor) && !isStableRestState()) {
stabilityMonitor.removeProblem(this);
stabilityMonitor.removeFailed(this);
stabilityMonitor.decrementUnstableServices();
}
}
}
void removeMonitorNoCallback(final StabilityMonitor stabilityMonitor) {
assert !holdsLock(this);
synchronized (this) {
monitors.remove(stabilityMonitor);
}
}
Set<StabilityMonitor> getMonitors() {
assert holdsLock(this);
return monitors;
}
private enum ListenerNotification {
/** Notify the listener that is has been added. */
LISTENER_ADDED,
/** Notifications related to the current state. */
TRANSITION,
/** Notify the listener that a dependency failure occurred. */
DEPENDENCY_FAILURE,
/** Notify the listener that all dependency failures are cleared. */
DEPENDENCY_FAILURE_CLEAR,
/** Notify the listener that an immediate dependency is unavailable. */
IMMEDIATE_DEPENDENCY_UNAVAILABLE,
/** Notify the listener that all previously unavailable immediate dependencies are now available. */
IMMEDIATE_DEPENDENCY_AVAILABLE,
/** Notify the listener a transitive dependency is unavailable. */
TRANSITIVE_DEPENDENCY_UNAVAILABLE,
/** Notify the listener that all previously unavailable transitive dependencies are now available. */
TRANSITIVE_DEPENDENCY_AVAILABLE,
/** Notify the listener that the service is going to be removed. */
REMOVE_REQUESTED,
/** Notify the listener that the service is no longer going to be removed. */
REMOVE_REQUEST_CLEARED
}
/**
* Invokes the listener, performing the notification specified.
*
* @param listener listener to be invoked
* @param notification specified notification
* @param transition the transition to be notified, only relevant if {@code notification} is
* {@link ListenerNotification#TRANSITION}
*/
private void invokeListener(final ServiceListener<? super S> listener, final ListenerNotification notification, final Transition transition) {
assert !holdsLock(this);
// first set the TCCL
final ClassLoader contextClassLoader = setTCCL(getCL(listener.getClass()));
try {
switch (notification) {
case TRANSITION: {
listener.transition(this, transition);
break;
}
case LISTENER_ADDED: {
listener.listenerAdded(this);
break;
}
case IMMEDIATE_DEPENDENCY_UNAVAILABLE: {
listener.immediateDependencyUnavailable(this);
break;
}
case IMMEDIATE_DEPENDENCY_AVAILABLE: {
listener.immediateDependencyAvailable(this);
break;
}
case TRANSITIVE_DEPENDENCY_UNAVAILABLE: {
listener.transitiveDependencyUnavailable(this);
break;
}
case TRANSITIVE_DEPENDENCY_AVAILABLE: {
listener.transitiveDependencyAvailable(this);
break;
}
case DEPENDENCY_FAILURE: {
listener.dependencyFailed(this);
break;
}
case DEPENDENCY_FAILURE_CLEAR: {
listener.dependencyFailureCleared(this);
break;
}
case REMOVE_REQUESTED: {
listener.serviceRemoveRequested(this);
break;
}
case REMOVE_REQUEST_CLEARED: {
listener.serviceRemoveRequestCleared(this);
break;
}
default: throw new IllegalStateException();
}
} catch (Throwable t) {
ServiceLogger.SERVICE.listenerFailed(t, listener);
} finally {
// reset TCCL
setTCCL(contextClassLoader);
// perform transition tasks
final ArrayList<Runnable> tasks = new ArrayList<Runnable>();
synchronized (this) {
final boolean leavingRestState = isStableRestState();
// Subtract one for this executing listener
decrementAsyncTasks();
transition(tasks);
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
}
public Substate getSubstate() {
synchronized (this) {
return state;
}
}
ServiceRegistrationImpl getPrimaryRegistration() {
return primaryRegistration;
}
ServiceRegistrationImpl[] getAliasRegistrations() {
return aliasRegistrations;
}
/**
* Returns a compiled array of all dependents of this service instance.
*
* @return an array of dependents, including children
*/
private Dependent[][] getDependents() {
IdentityHashSet<Dependent> dependentSet = primaryRegistration.getDependents();
if (aliasRegistrations.length == 0) {
synchronized (dependentSet) {
return new Dependent[][] { dependentSet.toScatteredArray(NO_DEPENDENTS),
children.toScatteredArray(NO_DEPENDENTS)};
}
}
Dependent[][] dependents = new Dependent[aliasRegistrations.length + 2][];
synchronized (dependentSet) {
dependents[0] = dependentSet.toScatteredArray(NO_DEPENDENTS);
}
dependents[1] = children.toScatteredArray(NO_DEPENDENTS);
for (int i = 0; i < aliasRegistrations.length; i++) {
final ServiceRegistrationImpl alias = aliasRegistrations[i];
final IdentityHashSet<Dependent> aliasDependentSet = alias.getDependents();
synchronized (aliasDependentSet) {
dependents[i + 2] = aliasDependentSet.toScatteredArray(NO_DEPENDENTS);
}
}
return dependents;
}
/**
* Returns a compiled map of all dependents of this service mapped by the dependency name.
* This map can be used when it is necessary to perform notifications to these dependents that require
* the name of the dependency issuing notification.
* <br> The return result does not include children.
*
* @return an array of dependents, including children
*/
// children are not included in this this result
private Map<ServiceName, Dependent[]> getDependentsByDependencyName() {
final Map<ServiceName, Dependent[]> dependents = new HashMap<ServiceName, Dependent[]>();
addDependentsByName(primaryRegistration, dependents);
for (ServiceRegistrationImpl aliasRegistration: aliasRegistrations) {
addDependentsByName(aliasRegistration, dependents);
}
return dependents;
}
private void addDependentsByName(ServiceRegistrationImpl registration, Map<ServiceName, Dependent[]> dependentsByName) {
IdentityHashSet<Dependent> registrationDependents = registration.getDependents();
synchronized(registrationDependents) {
dependentsByName.put(registration.getName(), registrationDependents.toScatteredArray(NO_DEPENDENTS));
}
}
private void performInjections() {
final int injectionsLength = injections.length;
boolean ok = false;
int i = 0;
try {
for (; i < injectionsLength; i++) {
final ValueInjection<?> injection = injections[i];
doInject(injection);
}
ok = true;
} finally {
if (! ok) {
for (; i >= 0; i
injections[i].getTarget().uninject();
}
}
}
}
private void performOutInjections() {
final int injectionsLength = outInjections.length;
for (int i = 0; i < injectionsLength; i++) {
final ValueInjection<?> injection = outInjections[i];
try {
doInject(injection);
} catch (Throwable t) {
ServiceLogger.SERVICE.exceptionAfterComplete(t, primaryRegistration.getName());
}
}
}
enum ContextState {
// mid transition states
SYNC_ASYNC_COMPLETE,
SYNC_ASYNC_FAILED,
// final transition states
SYNC,
ASYNC,
COMPLETE,
FAILED,
}
private static <T> void doInject(final ValueInjection<T> injection) {
injection.getTarget().inject(injection.getSource().getValue());
}
private static ClassLoader setTCCL(ClassLoader newTCCL) {
final SecurityManager sm = System.getSecurityManager();
final SetTCCLAction setTCCLAction = new SetTCCLAction(newTCCL);
if (sm != null) {
return AccessController.doPrivileged(setTCCLAction);
} else {
return setTCCLAction.run();
}
}
private static ClassLoader getCL(final Class<?> clazz) {
final SecurityManager sm = System.getSecurityManager();
if (sm != null) {
return AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {
@Override
public ClassLoader run() {
return clazz.getClassLoader();
}
});
} else {
return clazz.getClassLoader();
}
}
@Override
public String toString() {
return String.format("Controller for %s@%x", getName(), Integer.valueOf(hashCode()));
}
private abstract class ControllerTask implements Runnable {
private ControllerTask() {
assert holdsLock(ServiceControllerImpl.this);
}
public final void run() {
assert !holdsLock(ServiceControllerImpl.this);
try {
if (!execute()) return;
final ArrayList<Runnable> tasks = new ArrayList<Runnable>();
synchronized (ServiceControllerImpl.this) {
final boolean leavingRestState = isStableRestState();
// Subtract one for this task
decrementAsyncTasks();
transition(tasks);
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
} catch (Throwable t) {
ServiceLogger.SERVICE.internalServiceError(t, primaryRegistration.getName());
}
}
abstract boolean execute();
}
private class DemandDependenciesTask extends ControllerTask {
boolean execute() {
for (Dependency dependency : dependencies) {
dependency.addDemand();
}
if (parent != null) parent.addDemand();
return true;
}
}
private class UndemandDependenciesTask extends ControllerTask {
boolean execute() {
for (Dependency dependency : dependencies) {
dependency.removeDemand();
}
if (parent != null) parent.removeDemand();
return true;
}
}
private class DependentStoppedTask extends ControllerTask {
boolean execute() {
for (Dependency dependency : dependencies) {
dependency.dependentStopped();
}
if (parent != null) parent.dependentStopped();
return true;
}
}
private class DependentStartedTask extends ControllerTask {
boolean execute() {
for (Dependency dependency : dependencies) {
dependency.dependentStarted();
}
if (parent != null) parent.dependentStarted();
return true;
}
}
private class ServiceUnavailableTask extends ControllerTask {
private final Map<ServiceName, Dependent[]> dependents;
private final Dependent[] children;
ServiceUnavailableTask() {
dependents = getDependentsByDependencyName();
children = ServiceControllerImpl.this.children.toScatteredArray(NO_DEPENDENTS);
}
boolean execute() {
for (Map.Entry<ServiceName, Dependent[]> dependentEntry : dependents.entrySet()) {
ServiceName serviceName = dependentEntry.getKey();
for (Dependent dependent : dependentEntry.getValue()) {
if (dependent != null) dependent.immediateDependencyUnavailable(serviceName);
}
}
final ServiceName primaryRegistrationName = primaryRegistration.getName();
for (Dependent child : children) {
if (child != null) child.immediateDependencyUnavailable(primaryRegistrationName);
}
return true;
}
}
private class ServiceAvailableTask extends ControllerTask {
private final Map<ServiceName, Dependent[]> dependents;
private final Dependent[] children;
ServiceAvailableTask() {
dependents = getDependentsByDependencyName();
children = ServiceControllerImpl.this.children.toScatteredArray(NO_DEPENDENTS);
}
boolean execute() {
for (Map.Entry<ServiceName, Dependent[]> dependentEntry : dependents.entrySet()) {
ServiceName serviceName = dependentEntry.getKey();
for (Dependent dependent : dependentEntry.getValue()) {
if (dependent != null) dependent.immediateDependencyAvailable(serviceName);
}
}
final ServiceName primaryRegistrationName = primaryRegistration.getName();
for (Dependent child : children) {
if (child != null) child.immediateDependencyAvailable(primaryRegistrationName);
}
return true;
}
}
private class StartTask implements Runnable {
public void run() {
assert !holdsLock(ServiceControllerImpl.this);
final ServiceName serviceName = primaryRegistration.getName();
final StartContextImpl context = new StartContextImpl();
try {
performInjections();
final Service<? extends S> service = serviceValue.getValue();
if (service == null) {
throw new IllegalArgumentException("Service is null");
}
startService(service, context);
synchronized (context.lock) {
if (context.state != ContextState.SYNC) {
return;
}
context.state = ContextState.COMPLETE;
}
performOutInjections();
final ArrayList<Runnable> tasks = new ArrayList<Runnable>();
synchronized (ServiceControllerImpl.this) {
final boolean leavingRestState = isStableRestState();
// Subtract one for this task
decrementAsyncTasks();
transition(tasks);
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
} catch (StartException e) {
e.setServiceName(serviceName);
startFailed(e, serviceName, context);
} catch (Throwable t) {
StartException e = new StartException("Failed to start service", t, serviceName);
startFailed(e, serviceName, context);
}
}
private void startService(Service<? extends S> service, StartContext context) throws StartException {
final ClassLoader contextClassLoader = setTCCL(service.getClass().getClassLoader());
try {
service.start(context);
} finally {
setTCCL(contextClassLoader);
}
}
private void startFailed(StartException e, ServiceName serviceName, StartContextImpl context) {
ServiceLogger.FAIL.startFailed(e, serviceName);
synchronized (context.lock) {
final ContextState oldState = context.state;
if (oldState != ContextState.SYNC && oldState != ContextState.ASYNC) {
ServiceLogger.FAIL.exceptionAfterComplete(e, serviceName);
return;
}
context.state = ContextState.FAILED;
}
final ArrayList<Runnable> tasks;
synchronized (ServiceControllerImpl.this) {
final boolean leavingRestState = isStableRestState();
startException = e;
failCount++;
// Subtract one for this task
decrementAsyncTasks();
transition(tasks = new ArrayList<Runnable>());
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
}
private class StopTask implements Runnable {
private final boolean onlyUninject;
private final ServiceControllerImpl<?>[] children;
StopTask(final boolean onlyUninject) {
this.onlyUninject = onlyUninject;
if (!onlyUninject && !ServiceControllerImpl.this.children.isEmpty()) {
synchronized (ServiceControllerImpl.this) {
final boolean leavingRestState = isStableRestState();
this.children = ServiceControllerImpl.this.children.toScatteredArray(NO_CONTROLLERS);
// placeholder async task for child removal; last removed child will decrement this count
// see removeChild method to verify when this count is decremented
incrementAsyncTasks();
updateStabilityState(leavingRestState);
}
}
else {
this.children = null;
}
}
public void run() {
assert !holdsLock(ServiceControllerImpl.this);
final ServiceName serviceName = primaryRegistration.getName();
final StopContextImpl context = new StopContextImpl();
boolean ok = false;
try {
if (! onlyUninject) {
try {
if (children != null) {
for (ServiceController<?> child: children) {
if (child != null) child.setMode(Mode.REMOVE);
}
}
final Service<? extends S> service = serviceValue.getValue();
if (service != null) {
stopService(service, context);
ok = true;
} else {
ServiceLogger.ROOT.stopServiceMissing(serviceName);
}
} catch (Throwable t) {
ServiceLogger.FAIL.stopFailed(t, serviceName);
}
}
} finally {
final ArrayList<Runnable> tasks;
synchronized (context.lock) {
if (ok && context.state != ContextState.SYNC) {
// We want to discard the exception anyway, if there was one. Which there can't be.
//noinspection ReturnInsideFinallyBlock
return;
}
context.state = ContextState.COMPLETE;
}
uninject(serviceName, injections);
uninject(serviceName, outInjections);
synchronized (ServiceControllerImpl.this) {
final boolean leavingRestState = isStableRestState();
// Subtract one for this task
decrementAsyncTasks();
transition(tasks = new ArrayList<Runnable>());
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
}
private void stopService(Service<? extends S> service, StopContext context) {
final ClassLoader contextClassLoader = setTCCL(service.getClass().getClassLoader());
try {
service.stop(context);
} finally {
setTCCL(contextClassLoader);
}
}
private void uninject(final ServiceName serviceName, ValueInjection<?>[] injections) {
for (ValueInjection<?> injection : injections) try {
injection.getTarget().uninject();
} catch (Throwable t) {
ServiceLogger.ROOT.uninjectFailed(t, serviceName, injection);
}
}
}
private class ListenerTask implements Runnable {
private final ListenerNotification notification;
private final ServiceListener<? super S> listener;
private final Transition transition;
ListenerTask(final ServiceListener<? super S> listener, final Transition transition) {
this.listener = listener;
this.transition = transition;
notification = ListenerNotification.TRANSITION;
}
ListenerTask(final ServiceListener<? super S> listener, final ListenerNotification notification) {
this.listener = listener;
transition = null;
this.notification = notification;
}
public void run() {
assert !holdsLock(ServiceControllerImpl.this);
invokeListener(listener, notification, transition);
}
}
private class DependencyStartedTask implements Runnable {
private final Dependent[][] dependents;
DependencyStartedTask(final Dependent[][] dependents) {
this.dependents = dependents;
}
public void run() {
try {
for (Dependent[] dependentArray : dependents) {
for (Dependent dependent : dependentArray) {
if (dependent != null) dependent.immediateDependencyUp();
}
}
final ArrayList<Runnable> tasks = new ArrayList<Runnable>();
synchronized (ServiceControllerImpl.this) {
final boolean leavingRestState = isStableRestState();
// Subtract one for this task
decrementAsyncTasks();
transition(tasks);
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
} catch (Throwable t) {
ServiceLogger.SERVICE.internalServiceError(t, primaryRegistration.getName());
}
}
}
private class DependencyStoppedTask implements Runnable {
private final Dependent[][] dependents;
DependencyStoppedTask(final Dependent[][] dependents) {
this.dependents = dependents;
}
public void run() {
try {
for (Dependent[] dependentArray : dependents) {
for (Dependent dependent : dependentArray) {
if (dependent != null) dependent.immediateDependencyDown();
}
}
final ArrayList<Runnable> tasks = new ArrayList<Runnable>();
synchronized (ServiceControllerImpl.this) {
final boolean leavingRestState = isStableRestState();
// Subtract one for this task
decrementAsyncTasks();
transition(tasks);
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
} catch (Throwable t) {
ServiceLogger.SERVICE.internalServiceError(t, primaryRegistration.getName());
}
}
}
private class DependencyFailedTask implements Runnable {
private final Dependent[][] dependents;
private final ServiceControllerImpl<?>[] children;
DependencyFailedTask(final Dependent[][] dependents, final boolean removeChildren) {
this.dependents = dependents;
if (removeChildren && !ServiceControllerImpl.this.children.isEmpty()) {
synchronized (ServiceControllerImpl.this) {
final boolean leavingRestState = isStableRestState();
this.children = ServiceControllerImpl.this.children.toScatteredArray(NO_CONTROLLERS);
// placeholder async task for child removal; last removed child will decrement this count
// see removeChild method to verify when this count is decremented
incrementAsyncTasks();
updateStabilityState(leavingRestState);
}
}
else {
this.children = null;
}
}
public void run() {
try {
if (children != null) {
for (ServiceControllerImpl<?> child: children) {
if (child != null) child.setMode(Mode.REMOVE);
}
}
for (Dependent[] dependentArray : dependents) {
for (Dependent dependent : dependentArray) {
if (dependent != null) dependent.dependencyFailed();
}
}
final ArrayList<Runnable> tasks = new ArrayList<Runnable>();
synchronized (ServiceControllerImpl.this) {
final boolean leavingRestState = isStableRestState();
// Subtract one for this task
decrementAsyncTasks();
transition(tasks);
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
} catch (Throwable t) {
ServiceLogger.SERVICE.internalServiceError(t, primaryRegistration.getName());
}
}
}
private class DependencyRetryingTask implements Runnable {
private final Dependent[][] dependents;
DependencyRetryingTask(final Dependent[][] dependents) {
this.dependents = dependents;
}
public void run() {
try {
for (Dependent[] dependentArray : dependents) {
for (Dependent dependent : dependentArray) {
if (dependent != null) dependent.dependencyFailureCleared();
}
}
final ArrayList<Runnable> tasks = new ArrayList<Runnable>();
synchronized (ServiceControllerImpl.this) {
final boolean leavingRestState = isStableRestState();
// Subtract one for this task
decrementAsyncTasks();
transition(tasks);
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
} catch (Throwable t) {
ServiceLogger.SERVICE.internalServiceError(t, primaryRegistration.getName());
}
}
}
private class RemoveTask implements Runnable {
public void run() {
try {
assert getMode() == ServiceController.Mode.REMOVE;
assert getSubstate() == Substate.REMOVING || getSubstate() == Substate.CANCELLED;
primaryRegistration.clearInstance(ServiceControllerImpl.this);
for (ServiceRegistrationImpl registration : aliasRegistrations) {
registration.clearInstance(ServiceControllerImpl.this);
}
for (Dependency dependency : dependencies) {
dependency.removeDependent(ServiceControllerImpl.this);
}
final ServiceControllerImpl<?> parent = ServiceControllerImpl.this.parent;
if (parent != null) parent.removeChild(ServiceControllerImpl.this);
final ArrayList<Runnable> tasks = new ArrayList<Runnable>();
synchronized (ServiceControllerImpl.this) {
final boolean leavingRestState = isStableRestState();
// Subtract one for this task
decrementAsyncTasks();
transition(tasks);
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
} catch (Throwable t) {
ServiceLogger.SERVICE.internalServiceError(t, primaryRegistration.getName());
}
}
}
private class StartContextImpl implements StartContext {
private ContextState state = ContextState.SYNC;
private final Object lock = new Object();
public void failed(StartException reason) throws IllegalStateException {
synchronized (lock) {
if (state == ContextState.COMPLETE || state == ContextState.FAILED
|| state == ContextState.SYNC_ASYNC_COMPLETE || state == ContextState.SYNC_ASYNC_FAILED) {
throw new IllegalStateException(ILLEGAL_CONTROLLER_STATE);
}
if (state == ContextState.ASYNC) {
state = ContextState.FAILED;
}
if (state == ContextState.SYNC) {
state = ContextState.SYNC_ASYNC_FAILED;
}
}
if (reason == null) {
reason = new StartException("Start failed, and additionally, a null cause was supplied");
}
final ServiceName serviceName = getName();
reason.setServiceName(serviceName);
ServiceLogger.FAIL.startFailed(reason, serviceName);
final ArrayList<Runnable> tasks = new ArrayList<Runnable>();
synchronized (ServiceControllerImpl.this) {
final boolean leavingRestState = isStableRestState();
startException = reason;
failCount ++;
// Subtract one for this task
decrementAsyncTasks();
transition(tasks);
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
public ServiceTarget getChildTarget() {
synchronized (lock) {
if (state == ContextState.COMPLETE || state == ContextState.FAILED) {
throw new IllegalStateException("Lifecycle context is no longer valid");
}
synchronized (ServiceControllerImpl.this) {
if (childTarget == null) {
childTarget = new ChildServiceTarget(getServiceContainer());
}
return childTarget;
}
}
}
public void asynchronous() throws IllegalStateException {
synchronized (lock) {
if (state == ContextState.SYNC) {
state = ContextState.ASYNC;
} else if (state == ContextState.SYNC_ASYNC_COMPLETE) {
state = ContextState.COMPLETE;
} else if (state == ContextState.SYNC_ASYNC_FAILED) {
state = ContextState.FAILED;
} else if (state == ContextState.ASYNC) {
throw new IllegalStateException(ILLEGAL_CONTROLLER_STATE);
}
}
}
public void complete() throws IllegalStateException {
synchronized (lock) {
if (state == ContextState.COMPLETE || state == ContextState.FAILED
|| state == ContextState.SYNC_ASYNC_COMPLETE || state == ContextState.SYNC_ASYNC_FAILED) {
throw new IllegalStateException(ILLEGAL_CONTROLLER_STATE);
}
if (state == ContextState.ASYNC) {
state = ContextState.COMPLETE;
}
if (state == ContextState.SYNC) {
state = ContextState.SYNC_ASYNC_COMPLETE;
}
}
performOutInjections();
final ArrayList<Runnable> tasks = new ArrayList<Runnable>();
synchronized (ServiceControllerImpl.this) {
final boolean leavingRestState = isStableRestState();
// Subtract one for this task
decrementAsyncTasks();
transition(tasks);
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
public long getElapsedTime() {
return System.nanoTime() - lifecycleTime;
}
public ServiceController<?> getController() {
return ServiceControllerImpl.this;
}
public void execute(final Runnable command) {
final ClassLoader contextClassLoader = setTCCL(command.getClass().getClassLoader());
try {
command.run();
} finally {
setTCCL(contextClassLoader);
}
}
}
private final class ChildServiceTarget extends ServiceTargetImpl {
private volatile boolean valid = true;
private ChildServiceTarget(final ServiceTargetImpl parentTarget) {
super(parentTarget);
}
<T> ServiceController<T> install(final ServiceBuilderImpl<T> serviceBuilder) throws ServiceRegistryException {
if (! valid) {
throw new IllegalStateException("Service target is no longer valid");
}
return super.install(serviceBuilder);
}
protected <T> ServiceBuilder<T> createServiceBuilder(final ServiceName name, final Value<? extends Service<T>> value, final ServiceControllerImpl<?> parent) throws IllegalArgumentException {
return super.createServiceBuilder(name, value, ServiceControllerImpl.this);
}
@Override
public ServiceTarget subTarget() {
return new ChildServiceTarget(this);
}
}
private class StopContextImpl implements StopContext {
private ContextState state = ContextState.SYNC;
private final Object lock = new Object();
public void asynchronous() throws IllegalStateException {
synchronized (lock) {
if (state == ContextState.SYNC) {
state = ContextState.ASYNC;
} else if (state == ContextState.SYNC_ASYNC_COMPLETE) {
state = ContextState.COMPLETE;
} else if (state == ContextState.ASYNC) {
throw new IllegalStateException(ILLEGAL_CONTROLLER_STATE);
}
}
}
public void complete() throws IllegalStateException {
synchronized (lock) {
if (state == ContextState.COMPLETE || state == ContextState.SYNC_ASYNC_COMPLETE) {
throw new IllegalStateException(ILLEGAL_CONTROLLER_STATE);
}
if (state == ContextState.ASYNC) {
state = ContextState.COMPLETE;
}
if (state == ContextState.SYNC) {
state = ContextState.SYNC_ASYNC_COMPLETE;
}
}
for (ValueInjection<?> injection : injections) {
injection.getTarget().uninject();
}
final ArrayList<Runnable> tasks = new ArrayList<Runnable>();
synchronized (ServiceControllerImpl.this) {
final boolean leavingRestState = isStableRestState();
// Subtract one for this task
decrementAsyncTasks();
transition(tasks);
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
public ServiceController<?> getController() {
return ServiceControllerImpl.this;
}
public void execute(final Runnable command) {
final ClassLoader contextClassLoader = setTCCL(command.getClass().getClassLoader());
try {
command.run();
} finally {
setTCCL(contextClassLoader);
}
}
public long getElapsedTime() {
return System.nanoTime() - lifecycleTime;
}
}
void addAsyncTasks(final int size) {
assert holdsLock(this);
assert size >= 0;
if (size > 0) asyncTasks += size;
}
void incrementAsyncTasks() {
assert holdsLock(this);
asyncTasks++;
}
void decrementAsyncTasks() {
assert holdsLock(this);
assert asyncTasks > 0;
asyncTasks
}
} |
package io.lumify.securegraph.model.ontology;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.Lists;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import io.lumify.core.exception.LumifyException;
import io.lumify.core.model.ontology.*;
import io.lumify.core.model.properties.LumifyProperties;
import io.lumify.core.model.user.AuthorizationRepository;
import io.lumify.core.util.LumifyLogger;
import io.lumify.core.util.LumifyLoggerFactory;
import io.lumify.core.util.TimingCallable;
import org.json.JSONObject;
import org.securegraph.*;
import org.securegraph.property.StreamingPropertyValue;
import org.securegraph.util.ConvertingIterable;
import org.securegraph.util.FilterIterable;
import org.securegraph.util.IterableUtils;
import org.semanticweb.owlapi.io.OWLOntologyDocumentSource;
import org.semanticweb.owlapi.io.ReaderDocumentSource;
import org.semanticweb.owlapi.model.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import static com.google.common.base.Preconditions.checkNotNull;
import static io.lumify.core.model.ontology.OntologyLumifyProperties.*;
import static io.lumify.core.model.properties.LumifyProperties.DISPLAY_NAME;
import static io.lumify.core.model.properties.LumifyProperties.TITLE;
import static org.securegraph.util.IterableUtils.*;
@Singleton
public class SecureGraphOntologyRepository extends OntologyRepositoryBase {
private static final LumifyLogger LOGGER = LumifyLoggerFactory.getLogger(SecureGraphOntologyRepository.class);
public static final String ID_PREFIX = "ontology_";
public static final String ID_PREFIX_PROPERTY = ID_PREFIX + "prop_";
public static final String ID_PREFIX_RELATIONSHIP = ID_PREFIX + "rel_";
public static final String ID_PREFIX_CONCEPT = ID_PREFIX + "concept_";
private Graph graph;
private Authorizations authorizations;
private Cache<String, List<Concept>> allConceptsWithPropertiesCache = CacheBuilder.newBuilder()
.expireAfterWrite(1, TimeUnit.HOURS)
.build();
private Cache<String, List<OntologyProperty>> allPropertiesCache = CacheBuilder.newBuilder()
.expireAfterWrite(1, TimeUnit.HOURS)
.build();
private Cache<String, List<Relationship>> relationshipLabelsCache = CacheBuilder.newBuilder()
.expireAfterWrite(1, TimeUnit.HOURS)
.build();
@Inject
public SecureGraphOntologyRepository(final Graph graph,
final AuthorizationRepository authorizationRepository) {
this.graph = graph;
authorizationRepository.addAuthorizationToGraph(SecureGraphOntologyRepository.VISIBILITY_STRING);
Set<String> authorizationsSet = new HashSet<String>();
authorizationsSet.add(VISIBILITY_STRING);
this.authorizations = authorizationRepository.createAuthorizations(authorizationsSet);
if (!isOntologyDefined()) {
LOGGER.info("Base ontology not defined. Creating a new ontology.");
defineOntology(authorizations);
} else {
LOGGER.info("Base ontology already defined.");
}
}
@Override
public void clearCache() {
LOGGER.info("clearing ontology cache");
graph.flush();
this.allConceptsWithPropertiesCache.invalidateAll();
this.allPropertiesCache.invalidateAll();
this.relationshipLabelsCache.invalidateAll();
}
@Override
protected void addEntityGlyphIconToEntityConcept(Concept entityConcept, byte[] rawImg) {
StreamingPropertyValue raw = new StreamingPropertyValue(new ByteArrayInputStream(rawImg), byte[].class);
raw.searchIndex(false);
entityConcept.setProperty(LumifyProperties.GLYPH_ICON.getPropertyName(), raw, authorizations);
graph.flush();
}
@Override
public void storeOntologyFile(InputStream in, IRI documentIRI) {
StreamingPropertyValue value = new StreamingPropertyValue(in, byte[].class);
value.searchIndex(false);
Map<String, Object> metadata = new HashMap<String, Object>();
Vertex rootConceptVertex = ((SecureGraphConcept) getRootConcept()).getVertex();
metadata.put("index", toList(rootConceptVertex.getProperties("ontologyFile")).size());
rootConceptVertex.addPropertyValue(documentIRI.toString(), "ontologyFile", value, metadata, VISIBILITY.getVisibility(), authorizations);
graph.flush();
}
@Override
public List<OWLOntology> loadOntologyFiles(OWLOntologyManager m, OWLOntologyLoaderConfiguration config, IRI excludedIRI) throws OWLOntologyCreationException, IOException {
List<OWLOntology> loadedOntologies = new ArrayList<OWLOntology>();
Iterable<Property> ontologyFiles = getOntologyFiles();
for (Property ontologyFile : ontologyFiles) {
IRI ontologyFileIRI = IRI.create(ontologyFile.getKey());
if (excludedIRI != null && excludedIRI.equals(ontologyFileIRI)) {
continue;
}
InputStream lumifyBaseOntologyIn = ((StreamingPropertyValue) ontologyFile.getValue()).getInputStream();
try {
Reader lumifyBaseOntologyReader = new InputStreamReader(lumifyBaseOntologyIn);
LOGGER.info("Loading existing ontology: %s", ontologyFile.getKey());
OWLOntologyDocumentSource lumifyBaseOntologySource = new ReaderDocumentSource(lumifyBaseOntologyReader, ontologyFileIRI);
try {
OWLOntology o = m.loadOntologyFromOntologyDocument(lumifyBaseOntologySource, config);
loadedOntologies.add(o);
} catch (UnloadableImportException ex) {
LOGGER.error("Could not load %s", ontologyFileIRI, ex);
}
} finally {
lumifyBaseOntologyIn.close();
}
}
return loadedOntologies;
}
private Iterable<Property> getOntologyFiles() {
List<Property> ontologyFiles = toList(((SecureGraphConcept) getRootConcept()).getVertex().getProperties("ontologyFile"));
Collections.sort(ontologyFiles, new Comparator<Property>() {
@Override
public int compare(Property ontologyFile1, Property ontologyFile2) {
Integer index1 = (Integer) ontologyFile1.getMetadata().get("index");
Integer index2 = (Integer) ontologyFile2.getMetadata().get("index");
return index1.compareTo(index2);
}
});
return ontologyFiles;
}
@Override
public Iterable<Relationship> getRelationshipLabels() {
try {
return relationshipLabelsCache.get("", new TimingCallable<List<Relationship>>("getRelationshipLabels") {
@Override
public List<Relationship> callWithTime() throws Exception {
Iterable<Vertex> vertices = graph.query(getAuthorizations())
.has(CONCEPT_TYPE.getPropertyName(), TYPE_RELATIONSHIP)
.limit(10000)
.vertices();
return toList(new ConvertingIterable<Vertex, Relationship>(vertices) {
@Override
protected Relationship convert(Vertex vertex) {
Vertex sourceVertex = single(vertex.getVertices(Direction.IN, LabelName.HAS_EDGE.toString(), getAuthorizations()));
String sourceConceptIRI = ONTOLOGY_TITLE.getPropertyValue(sourceVertex);
Vertex destVertex = single(vertex.getVertices(Direction.OUT, LabelName.HAS_EDGE.toString(), getAuthorizations()));
String destConceptIRI = ONTOLOGY_TITLE.getPropertyValue(destVertex);
final List<String> inverseOfIRIs = getRelationshipInverseOfIRIs(vertex);
return new SecureGraphRelationship(vertex, sourceConceptIRI, destConceptIRI, inverseOfIRIs);
}
});
}
});
} catch (ExecutionException e) {
throw new LumifyException("Could not get relationship labels");
}
}
private List<String> getRelationshipInverseOfIRIs(final Vertex vertex) {
return IterableUtils.toList(new ConvertingIterable<Vertex, String>(vertex.getVertices(Direction.OUT, LabelName.INVERSE_OF.toString(), getAuthorizations())) {
@Override
protected String convert(Vertex inverseOfVertex) {
return OntologyLumifyProperties.ONTOLOGY_TITLE.getPropertyValue(inverseOfVertex);
}
});
}
@Override
public String getDisplayNameForLabel(String relationshipIRI) {
String displayName = null;
if (relationshipIRI != null && !relationshipIRI.trim().isEmpty()) {
try {
Relationship relationship = getRelationshipByIRI(relationshipIRI);
if (relationship != null) {
displayName = relationship.getDisplayName();
}
} catch (IllegalArgumentException iae) {
throw new IllegalStateException(String.format("Found multiple vertices for relationship label \"%s\"", relationshipIRI),
iae);
}
}
return displayName;
}
@Override
public Iterable<OntologyProperty> getProperties() {
try {
return allPropertiesCache.get("", new TimingCallable<List<OntologyProperty>>("getProperties") {
@Override
public List<OntologyProperty> callWithTime() throws Exception {
return toList(new ConvertingIterable<Vertex, OntologyProperty>(graph.query(getAuthorizations())
.has(CONCEPT_TYPE.getPropertyName(), TYPE_PROPERTY)
.vertices()) {
@Override
protected OntologyProperty convert(Vertex vertex) {
return new SecureGraphOntologyProperty(vertex);
}
});
}
});
} catch (ExecutionException e) {
throw new LumifyException("Could not get properties", e);
}
}
@Override
public OntologyProperty getProperty(String propertyIRI) {
try {
Vertex propVertex = singleOrDefault(graph.query(getAuthorizations())
.has(CONCEPT_TYPE.getPropertyName(), TYPE_PROPERTY)
.has(ONTOLOGY_TITLE.getPropertyName(), propertyIRI)
.vertices(), null);
return propVertex != null ? new SecureGraphOntologyProperty(propVertex) : null;
} catch (IllegalArgumentException iae) {
throw new IllegalStateException(String.format("Too many \"%s\" properties", propertyIRI), iae);
}
}
@Override
public Relationship getRelationshipByIRI(String relationshipIRI) {
Vertex relationshipVertex = singleOrDefault(graph.query(getAuthorizations())
.has(CONCEPT_TYPE.getPropertyName(), TYPE_RELATIONSHIP)
.has(ONTOLOGY_TITLE.getPropertyName(), relationshipIRI)
.vertices(), null);
if (relationshipVertex == null) {
return null;
}
String from = single(relationshipVertex.getVertexIds(Direction.IN, getAuthorizations())).toString();
String to = single(relationshipVertex.getVertexIds(Direction.OUT, getAuthorizations())).toString();
List<String> inverseOfIRIs = getRelationshipInverseOfIRIs(relationshipVertex);
return new SecureGraphRelationship(relationshipVertex, from, to, inverseOfIRIs);
}
@Override
public Iterable<Concept> getConcepts() {
return getConcepts(false);
}
@Override
public Iterable<Concept> getConceptsWithProperties() {
try {
return allConceptsWithPropertiesCache.get("", new TimingCallable<List<Concept>>("getConceptsWithProperties") {
@Override
public List<Concept> callWithTime() throws Exception {
return toList(getConcepts(true));
}
});
} catch (ExecutionException e) {
throw new LumifyException("could not get concepts with properties", e);
}
}
private Iterable<Concept> getConcepts(final boolean withProperties) {
return new ConvertingIterable<Vertex, Concept>(graph.query(getAuthorizations())
.has(CONCEPT_TYPE.getPropertyName(), TYPE_CONCEPT)
.vertices()) {
@Override
protected Concept convert(Vertex vertex) {
if (withProperties) {
List<OntologyProperty> conceptProperties = getPropertiesByVertexNoRecursion(vertex);
Vertex parentConceptVertex = getParentConceptVertex(vertex);
String parentConceptIRI = ONTOLOGY_TITLE.getPropertyValue(parentConceptVertex);
return new SecureGraphConcept(vertex, parentConceptIRI, conceptProperties);
} else {
return new SecureGraphConcept(vertex);
}
}
};
}
private Concept getRootConcept() {
return getConceptByIRI(SecureGraphOntologyRepository.ROOT_CONCEPT_IRI);
}
@Override
public Concept getEntityConcept() {
return getConceptByIRI(SecureGraphOntologyRepository.ENTITY_CONCEPT_IRI);
}
private List<Concept> getChildConcepts(Concept concept) {
Vertex conceptVertex = ((SecureGraphConcept) concept).getVertex();
return toConcepts(conceptVertex.getVertices(Direction.IN, LabelName.IS_A.toString(), getAuthorizations()));
}
@Override
public Concept getParentConcept(final Concept concept) {
Vertex parentConceptVertex = getParentConceptVertex(((SecureGraphConcept) concept).getVertex());
if (parentConceptVertex == null) {
return null;
}
return new SecureGraphConcept(parentConceptVertex);
}
private List<Concept> toConcepts(Iterable<Vertex> vertices) {
ArrayList<Concept> concepts = new ArrayList<Concept>();
for (Vertex vertex : vertices) {
concepts.add(new SecureGraphConcept(vertex));
}
return concepts;
}
@Override
public Concept getConceptByIRI(String conceptIRI) {
// use the query API instead of the getVertex API to ensure we use the search index
// to ensure the ontology has been indexed.
Vertex conceptVertex = singleOrDefault(graph.query(getAuthorizations())
.has(CONCEPT_TYPE.getPropertyName(), TYPE_CONCEPT)
.has(ONTOLOGY_TITLE.getPropertyName(), conceptIRI)
.vertices(), null);
return conceptVertex != null ? new SecureGraphConcept(conceptVertex) : null;
}
private List<OntologyProperty> getPropertiesByVertexNoRecursion(Vertex vertex) {
return toList(new ConvertingIterable<Vertex, OntologyProperty>(vertex.getVertices(Direction.OUT, LabelName.HAS_PROPERTY.toString(), getAuthorizations())) {
@Override
protected OntologyProperty convert(Vertex o) {
return new SecureGraphOntologyProperty(o);
}
});
}
@Override
public List<Concept> getConceptAndChildrenByIRI(String conceptIRI) {
ArrayList<Concept> concepts = new ArrayList<Concept>();
Concept concept = getConceptByIRI(conceptIRI);
if (concept == null) {
return null;
}
concepts.add(concept);
List<Concept> children = getChildConcepts(concept);
concepts.addAll(children);
return concepts;
}
@Override
public List<Concept> getAllLeafNodesByConcept(Concept concept) {
List<Concept> childConcepts = getChildConcepts(concept);
List<Concept> parent = Lists.newArrayList(concept);
if (childConcepts.size() > 0) {
List<Concept> childrenList = new ArrayList<Concept>();
for (Concept childConcept : childConcepts) {
List<Concept> child = getAllLeafNodesByConcept(childConcept);
childrenList.addAll(child);
}
parent.addAll(childrenList);
}
return parent;
}
@Override
public Concept getOrCreateConcept(Concept parent, String conceptIRI, String displayName, File inDir) {
Concept concept = getConceptByIRI(conceptIRI);
if (concept != null) {
return concept;
}
VertexBuilder builder = graph.prepareVertex(ID_PREFIX_CONCEPT + conceptIRI, VISIBILITY.getVisibility());
CONCEPT_TYPE.setProperty(builder, TYPE_CONCEPT, VISIBILITY.getVisibility());
ONTOLOGY_TITLE.setProperty(builder, conceptIRI, VISIBILITY.getVisibility());
DISPLAY_NAME.setProperty(builder, displayName, VISIBILITY.getVisibility());
if (conceptIRI.equals(OntologyRepository.ENTITY_CONCEPT_IRI)) {
OntologyLumifyProperties.TITLE_FORMULA.setProperty(builder, "prop('http://lumify.io#title') || ''", VISIBILITY.getVisibility());
OntologyLumifyProperties.SUBTITLE_FORMULA.setProperty(builder, "prop('http://lumify.io#source') || ''", VISIBILITY.getVisibility());
OntologyLumifyProperties.TIME_FORMULA.setProperty(builder, "prop('http://lumify.io#publishedDate') || ''", VISIBILITY.getVisibility());
}
Vertex vertex = builder.save(getAuthorizations());
concept = new SecureGraphConcept(vertex);
if (parent != null) {
findOrAddEdge(((SecureGraphConcept) concept).getVertex(), ((SecureGraphConcept) parent).getVertex(), LabelName.IS_A.toString());
}
graph.flush();
return concept;
}
protected void findOrAddEdge(Vertex fromVertex, final Vertex toVertex, String edgeLabel) {
List<Vertex> matchingEdges = toList(new FilterIterable<Vertex>(fromVertex.getVertices(Direction.OUT, edgeLabel, getAuthorizations())) {
@Override
protected boolean isIncluded(Vertex vertex) {
return vertex.getId().equals(toVertex.getId());
}
});
if (matchingEdges.size() > 0) {
return;
}
String edgeId = fromVertex.getId() + "-" + toVertex.getId();
fromVertex.getGraph().addEdge(edgeId, fromVertex, toVertex, edgeLabel, VISIBILITY.getVisibility(), getAuthorizations());
}
@Override
public OntologyProperty addPropertyTo(
Concept concept,
String propertyIRI,
String displayName,
PropertyType dataType,
JSONObject possibleValues,
Collection<TextIndexHint> textIndexHints,
boolean userVisible,
boolean searchable,
Boolean displayTime,
Double boost) {
checkNotNull(concept, "vertex was null");
OntologyProperty property = getOrCreatePropertyType(concept, propertyIRI, dataType, displayName, possibleValues, textIndexHints, userVisible, searchable, displayTime, boost);
checkNotNull(property, "Could not find property: " + propertyIRI);
findOrAddEdge(((SecureGraphConcept) concept).getVertex(), ((SecureGraphOntologyProperty) property).getVertex(), LabelName.HAS_PROPERTY.toString());
graph.flush();
return property;
}
@Override
protected void getOrCreateInverseOfRelationship(Relationship fromRelationship, Relationship inverseOfRelationship) {
SecureGraphRelationship fromRelationshipSg = (SecureGraphRelationship) fromRelationship;
SecureGraphRelationship inverseOfRelationshipSg = (SecureGraphRelationship) inverseOfRelationship;
findOrAddEdge(fromRelationshipSg.getVertex(), inverseOfRelationshipSg.getVertex(), LabelName.INVERSE_OF.toString());
findOrAddEdge(inverseOfRelationshipSg.getVertex(), fromRelationshipSg.getVertex(), LabelName.INVERSE_OF.toString());
}
@Override
public Relationship getOrCreateRelationshipType(Concept from, Concept to, String relationshipIRI, String displayName) {
Relationship relationship = getRelationshipByIRI(relationshipIRI);
if (relationship != null) {
return relationship;
}
VertexBuilder builder = graph.prepareVertex(ID_PREFIX_RELATIONSHIP + from.getIRI() + "-" + to.getIRI(), VISIBILITY.getVisibility());
CONCEPT_TYPE.setProperty(builder, TYPE_RELATIONSHIP, VISIBILITY.getVisibility());
ONTOLOGY_TITLE.setProperty(builder, relationshipIRI, VISIBILITY.getVisibility());
DISPLAY_NAME.setProperty(builder, displayName, VISIBILITY.getVisibility());
Vertex relationshipVertex = builder.save(getAuthorizations());
findOrAddEdge(((SecureGraphConcept) from).getVertex(), relationshipVertex, LabelName.HAS_EDGE.toString());
findOrAddEdge(relationshipVertex, ((SecureGraphConcept) to).getVertex(), LabelName.HAS_EDGE.toString());
List<String> inverseOfIRIs = new ArrayList<String>(); // no inverse of because this relationship is new
graph.flush();
return new SecureGraphRelationship(relationshipVertex, from.getTitle(), to.getTitle(), inverseOfIRIs);
}
private OntologyProperty getOrCreatePropertyType(
final Concept concept,
final String propertyName,
final PropertyType dataType,
final String displayName,
JSONObject possibleValues,
Collection<TextIndexHint> textIndexHints,
boolean userVisible,
boolean searchable,
Boolean displayTime,
Double boost) {
OntologyProperty typeProperty = getProperty(propertyName);
if (typeProperty == null) {
DefinePropertyBuilder definePropertyBuilder = graph.defineProperty(propertyName);
definePropertyBuilder.dataType(PropertyType.getTypeClass(dataType));
if (dataType == PropertyType.STRING) {
definePropertyBuilder.textIndexHint(textIndexHints);
}
if (boost != null) {
if (graph.isFieldBoostSupported()) {
definePropertyBuilder.boost(boost);
} else {
LOGGER.warn("Field boosting is not support by the graph");
}
}
definePropertyBuilder.define();
VertexBuilder builder = graph.prepareVertex(ID_PREFIX_PROPERTY + concept.getIRI() + "_" + propertyName, VISIBILITY.getVisibility());
CONCEPT_TYPE.setProperty(builder, TYPE_PROPERTY, VISIBILITY.getVisibility());
ONTOLOGY_TITLE.setProperty(builder, propertyName, VISIBILITY.getVisibility());
DATA_TYPE.setProperty(builder, dataType.toString(), VISIBILITY.getVisibility());
USER_VISIBLE.setProperty(builder, userVisible, VISIBILITY.getVisibility());
SEARCHABLE.setProperty(builder, searchable, VISIBILITY.getVisibility());
if (displayTime != null) {
DISPLAY_TIME.setProperty(builder, displayTime, VISIBILITY.getVisibility());
}
if (boost != null) {
BOOST.setProperty(builder, boost, VISIBILITY.getVisibility());
}
if (displayName != null && !displayName.trim().isEmpty()) {
DISPLAY_NAME.setProperty(builder, displayName.trim(), VISIBILITY.getVisibility());
}
if (possibleValues != null) {
POSSIBLE_VALUES.setProperty(builder, possibleValues, VISIBILITY.getVisibility());
}
typeProperty = new SecureGraphOntologyProperty(builder.save(getAuthorizations()));
graph.flush();
}
return typeProperty;
}
private Vertex getParentConceptVertex(Vertex conceptVertex) {
try {
return singleOrDefault(conceptVertex.getVertices(Direction.OUT, LabelName.IS_A.toString(), getAuthorizations()), null);
} catch (IllegalArgumentException iae) {
throw new IllegalStateException(String.format("Unexpected number of parents for concept %s",
TITLE.getPropertyValue(conceptVertex)), iae);
}
}
private Authorizations getAuthorizations() {
return authorizations;
}
} |
package org.jboss.msc.service;
import static java.lang.Thread.holdsLock;
import static org.jboss.msc.service.SecurityUtils.getCL;
import static org.jboss.msc.service.SecurityUtils.setTCCL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.jboss.msc.service.management.ServiceStatus;
/**
* The service controller implementation.
*
* @param <S> the service type
*
* @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a>
* @author <a href="mailto:flavia.rainone@jboss.com">Flavia Rainone</a>
* @author <a href="mailto:ropalka@redhat.com">Richard Opalka</a>
*/
final class ServiceControllerImpl<S> implements ServiceController<S>, Dependent {
private static final String ILLEGAL_CONTROLLER_STATE = "Illegal controller state";
private static final int DEPENDENCY_AVAILABLE_TASK = 1;
private static final int DEPENDENCY_UNAVAILABLE_TASK = 1 << 1;
private static final int DEPENDENCY_STARTED_TASK = 1 << 2;
private static final int DEPENDENCY_STOPPED_TASK = 1 << 3;
private static final int DEPENDENCY_FAILED_TASK = 1 << 4;
private static final int DEPENDENCY_RETRYING_TASK = 1 << 5;
/**
* The service container.
*/
private final ServiceContainerImpl container;
/**
* The service itself.
*/
private final Service<S> service;
/**
* The dependencies of this service.
*/
private final Dependency[] dependencies;
/**
* The injections of this service.
*/
private final ValueInjection<?>[] injections;
/**
* The out injections of this service.
*/
private final ValueInjection<?>[] outInjections;
/**
* The set of registered service listeners.
*/
private final IdentityHashSet<ServiceListener<? super S>> listeners;
/**
* Lifecycle listeners.
*/
private final IdentityHashSet<LifecycleListener> lifecycleListeners;
/**
* Container shutdown listener.
*/
private ContainerShutdownListener shutdownListener;
/**
* The set of registered stability monitors.
*/
private final IdentityHashSet<StabilityMonitor> monitors;
/**
* The primary registration of this service.
*/
private final ServiceRegistrationImpl primaryRegistration;
/**
* The alias registrations of this service.
*/
private final ServiceRegistrationImpl[] aliasRegistrations;
/**
* The parent of this service.
*/
private final ServiceControllerImpl<?> parent;
/**
* The children of this service (only valid during {@link State#UP}).
*/
private final IdentityHashSet<ServiceControllerImpl<?>> children;
/**
* The unavailable dependencies of this service.
*/
private final IdentityHashSet<ServiceName> unavailableDependencies;
/**
* The start exception.
*/
private StartException startException;
/**
* The controller mode.
*/
private ServiceController.Mode mode = ServiceController.Mode.NEVER;
/**
* The controller state.
*/
private Substate state = Substate.NEW;
/**
* Tracks which dependent tasks have completed its execution.
* First 16 bits track if dependent task have been scheduled.
* Second 16 bits track whether scheduled dependent task finished its execution.
*/
private int execFlags;
/**
* The number of registrations which place a demand-to-start on this
* instance. If this value is >0, propagate a demand up to all parent
* dependents. If this value is >0 and mode is ON_DEMAND, we should start.
*/
private int demandedByCount;
/**
* Count for dependencies that are trying to stop. If this count is greater than zero then
* dependents will be notified that a stop is necessary.
*/
private int stoppingDependencies;
/**
* The number of dependents that are currently running. The deployment will
* not execute the {@code stop()} method (and subsequently leave the
* {@link org.jboss.msc.service.ServiceController.State#STOPPING} state)
* until all running dependents (and listeners) are stopped.
*/
private int runningDependents;
/**
* Count for failure notification. It indicates how many services have
* failed to start and are not recovered so far. This count monitors
* failures that happen when starting this service, and dependency related
* failures as well. When incremented from 0 to 1, it is time to notify
* dependents and listeners that a failure occurred. When decremented from 1
* to 0, the dependents and listeners are notified that the affected
* services are retrying to start. Values larger than 1 are ignored to avoid
* multiple notifications.
*/
private int failCount;
/**
* Indicates whether dependencies have been demanded.
*/
private boolean dependenciesDemanded = false;
/**
* The number of asynchronous tasks that are currently running. This
* includes listeners, start/stop methods, outstanding asynchronous
* start/stops, and internal tasks.
*/
private int asyncTasks;
/**
* Tasks executed last on transition outside the lock.
*/
private final List<Runnable> listenerTransitionTasks = new ArrayList<Runnable>();
/**
* The service target for adding child services (can be {@code null} if none
* were added).
*/
private volatile ChildServiceTarget childTarget;
/**
* The system nanotime of the moment in which the last lifecycle change was
* initiated.
*/
@SuppressWarnings("VolatileLongOrDoubleField")
private volatile long lifecycleTime;
private static final String[] NO_STRINGS = new String[0];
static final int MAX_DEPENDENCIES = (1 << 14) - 1;
ServiceControllerImpl(final ServiceContainerImpl container, final Service<S> service, final Dependency[] dependencies, final ValueInjection<?>[] injections, final ValueInjection<?>[] outInjections, final ServiceRegistrationImpl primaryRegistration, final ServiceRegistrationImpl[] aliasRegistrations, final Set<StabilityMonitor> monitors, final Set<? extends ServiceListener<? super S>> listeners, final Set<LifecycleListener> lifecycleListeners, final ServiceControllerImpl<?> parent) {
assert dependencies.length <= MAX_DEPENDENCIES;
this.container = container;
this.service = service;
this.dependencies = dependencies;
this.injections = injections;
this.outInjections = outInjections;
this.primaryRegistration = primaryRegistration;
this.aliasRegistrations = aliasRegistrations;
this.listeners = new IdentityHashSet<ServiceListener<? super S>>(listeners);
this.lifecycleListeners = new IdentityHashSet<LifecycleListener>(lifecycleListeners);
this.monitors = new IdentityHashSet<StabilityMonitor>(monitors);
// We also need to register this controller with monitors explicitly.
// This allows inherited monitors to have registered all child controllers
// and later to remove them when inherited stability monitor is cleared.
for (final StabilityMonitor monitor : monitors) {
monitor.addControllerNoCallback(this);
}
this.parent = parent;
int depCount = dependencies.length;
stoppingDependencies = parent == null ? depCount : depCount + 1;
children = new IdentityHashSet<ServiceControllerImpl<?>>();
unavailableDependencies = new IdentityHashSet<ServiceName>();
}
Substate getSubstateLocked() {
return state;
}
/**
* Set this instance into primary and alias registrations.
* <p></p>
* All notifications from registrations will be ignored until the
* installation is {@link #commitInstallation(org.jboss.msc.service.ServiceController.Mode) committed}.
*/
void startInstallation() {
Lockable lock = primaryRegistration.getLock();
synchronized (lock) {
lock.acquireWrite();
try {
primaryRegistration.setInstance(this);
} finally {
lock.releaseWrite();
}
}
for (ServiceRegistrationImpl aliasRegistration: aliasRegistrations) {
lock = aliasRegistration.getLock();
synchronized (lock) {
lock.acquireWrite();
try {
aliasRegistration.setInstance(this);
} finally {
lock.releaseWrite();
}
}
}
}
/**
* Start this service configuration connecting it to its parent and dependencies.
* <p></p>
* All notifications from dependencies and parents will be ignored until the
* installation is {@link #commitInstallation(org.jboss.msc.service.ServiceController.Mode) committed}.
*/
void startConfiguration() {
Lockable lock;
for (Dependency dependency : dependencies) {
lock = dependency.getLock();
synchronized (lock) {
lock.acquireWrite();
try {
dependency.addDependent(this);
} finally {
lock.releaseWrite();
}
}
}
if (parent != null) parent.addChild(this);
}
/**
* Commit the service install, kicking off the mode set and listener execution.
*
* @param initialMode the initial service mode
*/
void commitInstallation(Mode initialMode) {
assert (state == Substate.NEW);
assert initialMode != null;
assert !holdsLock(this);
final List<Runnable> listenerAddedTasks = new ArrayList<Runnable>();
synchronized (this) {
if (container.isShutdown()) {
throw new IllegalStateException ("Container is down");
}
final boolean leavingRestState = isStableRestState();
getListenerTasks(ListenerNotification.LISTENER_ADDED, listenerAddedTasks);
internalSetMode(initialMode);
// placeholder async task for running listener added tasks
addAsyncTasks(listenerAddedTasks.size() + 1);
updateStabilityState(leavingRestState);
}
for (Runnable listenerAddedTask : listenerAddedTasks) {
listenerAddedTask.run();
}
final List<Runnable> tasks;
synchronized (this) {
if (container.isShutdown()) {
throw new IllegalStateException ("Container is down");
}
final boolean leavingRestState = isStableRestState();
// subtract one to compensate for +1 above
decrementAsyncTasks();
tasks = transition();
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
/**
* Roll back the service install.
*/
void rollbackInstallation() {
final Runnable removeTask;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
mode = Mode.REMOVE;
state = Substate.CANCELLED;
removeTask = new RemoveTask();
incrementAsyncTasks();
updateStabilityState(leavingRestState);
}
removeTask.run();
}
/**
* Return {@code true} only if this service controller installation is committed.
*
* @return true if this service controller installation is committed
*/
boolean isInstallationCommitted() {
assert holdsLock(this);
// should not be NEW nor CANCELLED
return state.compareTo(Substate.CANCELLED) > 0;
}
/**
* Controller notifications are ignored (we do not create new tasks on notification) if
* controller didn't finish its installation process.
*
* @return true if notification must be ignored, false otherwise
*/
private boolean ignoreNotification() {
assert holdsLock(this);
return state == Substate.NEW;
}
/**
* Determine whether a stopped controller should start.
*
* @return {@code true} if so
*/
private boolean shouldStart() {
assert holdsLock(this);
return mode == Mode.ACTIVE || mode == Mode.PASSIVE || demandedByCount > 0 && (mode == Mode.ON_DEMAND || mode == Mode.LAZY);
}
/**
* Determine whether a running controller should stop.
*
* @return {@code true} if so
*/
private boolean shouldStop() {
assert holdsLock(this);
return mode == Mode.REMOVE || demandedByCount == 0 && mode == Mode.ON_DEMAND || mode == Mode.NEVER;
}
/**
* Returns true if controller is in rest state and no async tasks are running, false otherwise.
* @return true if stable rest state, false otherwise
*/
private boolean isStableRestState() {
assert holdsLock(this);
return asyncTasks == 0 && state.isRestState();
}
private void updateStabilityState(final boolean leavingStableRestState) {
assert holdsLock(this);
final boolean enteringStableRestState = state.isRestState() && asyncTasks == 0;
if (leavingStableRestState) {
if (!enteringStableRestState) {
container.incrementUnstableServices();
for (StabilityMonitor monitor : monitors) {
monitor.incrementUnstableServices();
}
}
} else {
if (enteringStableRestState) {
container.decrementUnstableServices();
for (StabilityMonitor monitor : monitors) {
monitor.decrementUnstableServices();
}
if (shutdownListener != null && state == Substate.REMOVED) {
shutdownListener.controllerDied();
shutdownListener = null;
}
}
}
}
/**
* Identify the transition to take. Call under lock.
*
* @return the transition or {@code null} if none is needed at this time
*/
private Transition getTransition() {
assert holdsLock(this);
switch (state) {
case NEW: {
if (!container.isShutdown()) {
return Transition.NEW_to_DOWN;
}
break;
}
case DOWN: {
if (mode == ServiceController.Mode.REMOVE) {
return Transition.DOWN_to_REMOVING;
} else if (mode == ServiceController.Mode.NEVER) {
return Transition.DOWN_to_WONT_START;
} else if (shouldStart() && (mode != Mode.PASSIVE || stoppingDependencies == 0)) {
return Transition.DOWN_to_START_REQUESTED;
} else {
// mode is either LAZY or ON_DEMAND with demandedByCount == 0, or mode is PASSIVE and downDep > 0
return Transition.DOWN_to_WAITING;
}
}
case WAITING: {
if (((mode != Mode.ON_DEMAND && mode != Mode.LAZY) || demandedByCount > 0) &&
(mode != Mode.PASSIVE || stoppingDependencies == 0)) {
return Transition.WAITING_to_DOWN;
}
break;
}
case WONT_START: {
if (mode != ServiceController.Mode.NEVER){
return Transition.WONT_START_to_DOWN;
}
break;
}
case STOPPING: {
if (children.isEmpty()) {
return Transition.STOPPING_to_DOWN;
}
break;
}
case STOP_REQUESTED: {
if (shouldStart() && stoppingDependencies == 0) {
return Transition.STOP_REQUESTED_to_UP;
} else if (runningDependents == 0) {
return Transition.STOP_REQUESTED_to_STOPPING;
}
break;
}
case UP: {
if (shouldStop() || stoppingDependencies > 0) {
return Transition.UP_to_STOP_REQUESTED;
}
break;
}
case START_FAILED: {
if (children.isEmpty()) {
if (shouldStart() && stoppingDependencies == 0) {
if (startException == null) {
return Transition.START_FAILED_to_STARTING;
}
} else if (runningDependents == 0) {
return Transition.START_FAILED_to_DOWN;
}
}
break;
}
case START_INITIATING: {
if (shouldStart() && runningDependents == 0 && stoppingDependencies == 0 && failCount == 0) {
return Transition.START_INITIATING_to_STARTING;
} else {
// it is possible runningDependents > 0 if this service is optional dependency to some other service
return Transition.START_INITIATING_to_START_REQUESTED;
}
}
case STARTING: {
if (startException == null) {
return Transition.STARTING_to_UP;
} else {
return Transition.STARTING_to_START_FAILED;
}
}
case START_REQUESTED: {
if (shouldStart()) {
if (mode == Mode.PASSIVE && stoppingDependencies > 0) {
return Transition.START_REQUESTED_to_DOWN;
}
if (!unavailableDependencies.isEmpty() || failCount > 0) {
return Transition.START_REQUESTED_to_PROBLEM;
}
if (stoppingDependencies == 0 && runningDependents == 0) {
// it is possible runningDependents > 0 if this service is optional dependency to some other service
return Transition.START_REQUESTED_to_START_INITIATING;
}
} else {
return Transition.START_REQUESTED_to_DOWN;
}
break;
}
case PROBLEM: {
if (! shouldStart() || (unavailableDependencies.isEmpty() && failCount == 0) || mode == Mode.PASSIVE) {
return Transition.PROBLEM_to_START_REQUESTED;
}
break;
}
case REMOVING: {
return Transition.REMOVING_to_REMOVED;
}
case CANCELLED: {
return Transition.CANCELLED_to_REMOVED;
}
case REMOVED:
{
// no possible actions
break;
}
}
return null;
}
private boolean postTransitionTasks(final List<Runnable> tasks) {
assert holdsLock(this);
// Listener transition tasks are executed last for ongoing transition and outside of intrinsic lock
if (listenerTransitionTasks.size() > 0) {
tasks.addAll(listenerTransitionTasks);
listenerTransitionTasks.clear();
return true;
}
return false;
}
/**
* Run the locked portion of a transition. Call under the lock.
*
* @return returns list of async tasks to execute
*/
private List<Runnable> transition() {
assert holdsLock(this);
if (asyncTasks != 0) {
// no movement possible
return Collections.EMPTY_LIST;
}
final List<Runnable> tasks = new ArrayList<Runnable>();
if (postTransitionTasks(tasks)) {
// no movement possible
return tasks;
}
// clean up tasks execution flags
execFlags = 0;
Transition transition;
do {
// first of all, check if dependencies & parent should be un/demanded
switch (mode) {
case NEVER:
case REMOVE:
if (dependenciesDemanded) {
tasks.add(new UndemandDependenciesTask());
dependenciesDemanded = false;
}
break;
case LAZY: {
if (state == Substate.UP) {
if (!dependenciesDemanded) {
tasks.add(new DemandDependenciesTask());
dependenciesDemanded = true;
}
break;
}
// fall thru!
}
case ON_DEMAND:
case PASSIVE: {
if (demandedByCount > 0 && !dependenciesDemanded) {
tasks.add(new DemandDependenciesTask());
dependenciesDemanded = true;
} else if (demandedByCount == 0 && dependenciesDemanded) {
tasks.add(new UndemandDependenciesTask());
dependenciesDemanded = false;
}
break;
}
case ACTIVE: {
if (!dependenciesDemanded) {
tasks.add(new DemandDependenciesTask());
dependenciesDemanded = true;
}
break;
}
}
transition = getTransition();
if (transition == null) {
return tasks;
}
getListenerTasks(transition, listenerTransitionTasks);
switch (transition) {
case NEW_to_DOWN: {
getListenerTasks(LifecycleEvent.DOWN, listenerTransitionTasks);
tasks.add(new DependencyAvailableTask());
break;
}
case DOWN_to_WAITING: {
tasks.add(new DependencyUnavailableTask());
break;
}
case WAITING_to_DOWN: {
tasks.add(new DependencyAvailableTask());
break;
}
case DOWN_to_WONT_START: {
tasks.add(new DependencyUnavailableTask());
break;
}
case WONT_START_to_DOWN: {
tasks.add(new DependencyAvailableTask());
break;
}
case STOPPING_to_DOWN: {
getListenerTasks(LifecycleEvent.DOWN, listenerTransitionTasks);
tasks.add(new DependentStoppedTask());
break;
}
case START_REQUESTED_to_DOWN: {
break;
}
case START_REQUESTED_to_START_INITIATING: {
lifecycleTime = System.nanoTime();
tasks.add(new DependentStartedTask());
break;
}
case START_REQUESTED_to_PROBLEM: {
tasks.add(new DependencyUnavailableTask());
container.addProblem(this);
for (StabilityMonitor monitor : monitors) {
monitor.addProblem(this);
}
break;
}
case UP_to_STOP_REQUESTED: {
lifecycleTime = System.nanoTime();
if (mode == Mode.LAZY && demandedByCount == 0) {
assert dependenciesDemanded;
tasks.add(new UndemandDependenciesTask());
dependenciesDemanded = false;
}
tasks.add(new DependencyStoppedTask());
break;
}
case STARTING_to_UP: {
getListenerTasks(LifecycleEvent.UP, listenerTransitionTasks);
tasks.add(new DependencyStartedTask());
break;
}
case STARTING_to_START_FAILED: {
getListenerTasks(LifecycleEvent.FAILED, listenerTransitionTasks);
container.addFailed(this);
for (StabilityMonitor monitor : monitors) {
monitor.addFailed(this);
}
ChildServiceTarget childTarget = this.childTarget;
if (childTarget != null) {
childTarget.valid = false;
this.childTarget = null;
}
tasks.add(new DependencyFailedTask());
tasks.add(new RemoveChildrenTask());
break;
}
case START_FAILED_to_STARTING: {
container.removeFailed(this);
for (StabilityMonitor monitor : monitors) {
monitor.removeFailed(this);
}
tasks.add(new DependencyRetryingTask());
tasks.add(new StartTask());
break;
}
case START_INITIATING_to_STARTING: {
tasks.add(new StartTask());
break;
}
case START_INITIATING_to_START_REQUESTED: {
tasks.add(new DependentStoppedTask());
break;
}
case START_FAILED_to_DOWN: {
getListenerTasks(LifecycleEvent.DOWN, listenerTransitionTasks);
container.removeFailed(this);
for (StabilityMonitor monitor : monitors) {
monitor.removeFailed(this);
}
startException = null;
tasks.add(new DependencyRetryingTask());
tasks.add(new DependentStoppedTask());
break;
}
case STOP_REQUESTED_to_UP: {
tasks.add(new DependencyStartedTask());
break;
}
case STOP_REQUESTED_to_STOPPING: {
ChildServiceTarget childTarget = this.childTarget;
if (childTarget != null) {
childTarget.valid = false;
this.childTarget = null;
}
tasks.add(new StopTask());
tasks.add(new RemoveChildrenTask());
break;
}
case DOWN_to_REMOVING: {
tasks.add(new DependencyUnavailableTask());
break;
}
case CANCELLED_to_REMOVED:
getListenerTasks(LifecycleEvent.REMOVED, listenerTransitionTasks);
for (StabilityMonitor monitor : monitors) {
monitor.removeControllerNoCallback(this);
}
listeners.clear();
lifecycleListeners.clear();
break;
case REMOVING_to_REMOVED: {
getListenerTasks(LifecycleEvent.REMOVED, listenerTransitionTasks);
tasks.add(new RemoveTask());
for (StabilityMonitor monitor : monitors) {
monitor.removeControllerNoCallback(this);
}
listeners.clear();
lifecycleListeners.clear();
break;
}
case DOWN_to_START_REQUESTED: {
break;
}
case PROBLEM_to_START_REQUESTED: {
tasks.add(new DependencyAvailableTask());
container.removeProblem(this);
for (StabilityMonitor monitor : monitors) {
monitor.removeProblem(this);
}
break;
}
default: {
throw new IllegalStateException();
}
}
state = transition.getAfter();
} while (tasks.isEmpty() && listenerTransitionTasks.isEmpty());
// Notify waiters that a transition occurred
notifyAll();
if (tasks.size() > 0) {
// Postponing listener transition tasks
} else {
postTransitionTasks(tasks);
}
return tasks;
}
private void getListenerTasks(final Transition transition, final List<Runnable> tasks) {
for (ServiceListener<? super S> listener : listeners) {
tasks.add(new ListenerTask(listener, transition));
}
}
private void getListenerTasks(final ListenerNotification notification, final List<Runnable> tasks) {
for (ServiceListener<? super S> listener : listeners) {
tasks.add(new ListenerTask(listener, notification));
}
}
private void getListenerTasks(final LifecycleEvent event, final List<Runnable> tasks) {
for (LifecycleListener listener : lifecycleListeners) {
tasks.add(new LifecycleListenerTask(listener, event));
}
}
void doExecute(final List<Runnable> tasks) {
assert !holdsLock(this);
if (tasks.isEmpty()) return;
final Executor executor = container.getExecutor();
for (Runnable task : tasks) {
try {
executor.execute(task);
} catch (RejectedExecutionException e) {
task.run();
}
}
}
public void setMode(final ServiceController.Mode newMode) {
internalSetMode(null, newMode);
}
private boolean internalSetMode(final ServiceController.Mode expectedMode, final ServiceController.Mode newMode) {
assert !holdsLock(this);
if (newMode == null) {
throw new IllegalArgumentException("newMode is null");
}
if (newMode != Mode.REMOVE && container.isShutdown()) {
throw new IllegalArgumentException("Container is shutting down");
}
final List<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
final Mode oldMode = mode;
if (expectedMode != null && expectedMode != oldMode) {
return false;
}
if (oldMode == newMode) {
return true;
}
internalSetMode(newMode);
tasks = transition();
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
return true;
}
private void internalSetMode(final Mode newMode) {
assert holdsLock(this);
final ServiceController.Mode oldMode = mode;
if (oldMode == Mode.REMOVE) {
if (state.compareTo(Substate.REMOVING) >= 0) {
throw new IllegalStateException("Service already removed");
}
}
mode = newMode;
}
@Override
public void dependencyAvailable(final ServiceName dependencyName) {
final List<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
assert unavailableDependencies.contains(dependencyName);
unavailableDependencies.remove(dependencyName);
if (ignoreNotification() || !unavailableDependencies.isEmpty()) return;
// we dropped it to 0
tasks = transition();
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
@Override
public void dependencyUnavailable(final ServiceName dependencyName) {
final List<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
assert !unavailableDependencies.contains(dependencyName);
unavailableDependencies.add(dependencyName);
if (ignoreNotification() || unavailableDependencies.size() != 1) return;
// we raised it to 1
tasks = transition();
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
/** {@inheritDoc} */
public ServiceControllerImpl<?> getDependentController() {
return this;
}
@Override
public void dependencyUp() {
final List<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
assert stoppingDependencies > 0;
--stoppingDependencies;
if (ignoreNotification() || stoppingDependencies != 0) return;
// we dropped it to 0
tasks = transition();
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
@Override
public void dependencyDown() {
final List<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
++stoppingDependencies;
if (ignoreNotification() || stoppingDependencies != 1) return;
// we raised it to 1
tasks = transition();
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
@Override
public void dependencyFailed() {
final List<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
++failCount;
if (ignoreNotification() || failCount != 1) return;
// we raised it to 1
tasks = transition();
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
@Override
public void dependencySucceeded() {
final List<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
assert failCount > 0;
--failCount;
if (ignoreNotification() || failCount != 0) return;
// we dropped it to 0
tasks = transition();
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
void dependentStarted() {
dependentsStarted(1);
}
void dependentsStarted(final int count) {
assert !holdsLock(this);
synchronized (this) {
runningDependents += count;
}
}
void dependentStopped() {
assert !holdsLock(this);
final List<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
assert runningDependents > 0;
--runningDependents;
if (ignoreNotification() || runningDependents != 0) return;
// we dropped it to 0
tasks = transition();
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
void newDependent(final ServiceName dependencyName, final Dependent dependent) {
assert holdsLock(this);
if (isFailed()) dependent.dependencyFailed();
if (isUnavailable()) dependent.dependencyUnavailable(dependencyName);
if (isUp()) dependent.dependencyUp();
}
private boolean isFailed() {
assert holdsLock(this);
if (state == Substate.START_FAILED && finishedTask(DEPENDENCY_FAILED_TASK)) return true;
if (state == Substate.STARTING && unfinishedTask(DEPENDENCY_RETRYING_TASK)) return true;
if (state == Substate.DOWN && unfinishedTask(DEPENDENCY_RETRYING_TASK)) return true;
return false;
}
private boolean isUnavailable() {
assert holdsLock(this);
if (state == Substate.WAITING && finishedTask(DEPENDENCY_UNAVAILABLE_TASK)) return true;
if (state == Substate.WONT_START && finishedTask(DEPENDENCY_UNAVAILABLE_TASK)) return true;
if (state == Substate.REMOVING && finishedTask(DEPENDENCY_UNAVAILABLE_TASK)) return true;
if (state == Substate.PROBLEM && finishedTask(DEPENDENCY_UNAVAILABLE_TASK)) return true;
if (state == Substate.DOWN && unfinishedTask(DEPENDENCY_AVAILABLE_TASK)) return true;
if (state == Substate.START_REQUESTED && unfinishedTask(DEPENDENCY_AVAILABLE_TASK)) return true;
if (state == Substate.NEW || state == Substate.CANCELLED || state == Substate.REMOVED) return true;
return false;
}
private boolean isUp() {
assert holdsLock(this);
if (state == Substate.UP && finishedTask(DEPENDENCY_STARTED_TASK)) return true;
if (state == Substate.STOP_REQUESTED && unfinishedTask(DEPENDENCY_STOPPED_TASK)) return true;
return false;
}
private boolean unfinishedTask(final int taskFlag) {
assert holdsLock(this);
final boolean taskScheduled = (execFlags & (taskFlag << 16)) != 0;
final boolean taskRunning = (execFlags & taskFlag) == 0;
return taskScheduled && taskRunning;
}
private boolean finishedTask(final int taskFlag) {
assert holdsLock(this);
final boolean taskUnscheduled = (execFlags & (taskFlag << 16)) == 0;
final boolean taskFinished = (execFlags & taskFlag) != 0;
return taskUnscheduled || taskFinished;
}
void addDemand() {
addDemands(1);
}
void addDemands(final int demandedByCount) {
assert !holdsLock(this);
final boolean propagate;
final List<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
final int cnt = this.demandedByCount;
this.demandedByCount += demandedByCount;
if (ignoreNotification()) return;
boolean notStartedLazy = mode == Mode.LAZY && !(state.getState() == State.UP && state != Substate.STOP_REQUESTED);
propagate = cnt == 0 && (mode == Mode.ON_DEMAND || notStartedLazy || mode == Mode.PASSIVE);
if (!propagate) return;
tasks = transition();
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
void removeDemand() {
assert !holdsLock(this);
final boolean propagate;
final List<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
assert demandedByCount > 0;
final int cnt = --demandedByCount;
if (ignoreNotification()) return;
boolean notStartedLazy = mode == Mode.LAZY && !(state.getState() == State.UP && state != Substate.STOP_REQUESTED);
propagate = cnt == 0 && (mode == Mode.ON_DEMAND || notStartedLazy || mode == Mode.PASSIVE);
if (!propagate) return;
tasks = transition();
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
void addChild(final ServiceControllerImpl<?> child) {
assert !holdsLock(this);
synchronized (this) {
if (state.getState() != State.STARTING && state.getState() != State.UP) {
throw new IllegalStateException("Children cannot be added in state " + state.getState());
}
children.add(child);
newDependent(primaryRegistration.getName(), child);
}
}
void removeChild(final ServiceControllerImpl<?> child) {
assert !holdsLock(this);
final List<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
if (!children.remove(child)) return; // may happen if child installation process failed
if (ignoreNotification() || children.size() > 0) return;
// we dropped it to 0
tasks = transition();
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
IdentityHashSet<ServiceControllerImpl<?>> getChildren() {
assert holdsLock(this);
return children;
}
public ServiceControllerImpl<?> getParent() {
return parent;
}
public ServiceContainerImpl getServiceContainer() {
return container;
}
public ServiceController.State getState() {
synchronized (this) {
return state.getState();
}
}
public S getValue() throws IllegalStateException {
return service.getValue();
}
public S awaitValue() throws IllegalStateException, InterruptedException {
assert !holdsLock(this);
synchronized (this) {
for (;;) switch (state.getState()) {
case UP: {
return service.getValue();
}
case START_FAILED: {
throw new IllegalStateException("Failed to start service", startException);
}
case REMOVED: {
throw new IllegalStateException("Service was removed");
}
default: {
wait();
}
}
}
}
public S awaitValue(final long time, final TimeUnit unit) throws IllegalStateException, InterruptedException, TimeoutException {
assert !holdsLock(this);
long now;
long then = System.nanoTime();
long remaining = unit.toNanos(time);
synchronized (this) {
do {
switch (state.getState()) {
case UP: {
return service.getValue();
}
case START_FAILED: {
throw new IllegalStateException("Failed to start service", startException);
}
case REMOVED: {
throw new IllegalStateException("Service was removed");
}
default: {
wait(remaining / 1000000L, (int) (remaining % 1000000L));
}
}
// When will then be now?
now = System.nanoTime();
remaining -= now - then;
// soon...
then = now;
} while (remaining > 0L);
throw new TimeoutException("Operation timed out");
}
}
public Service<S> getService() throws IllegalStateException {
return service;
}
public ServiceName getName() {
return primaryRegistration.getName();
}
private static final ServiceName[] NO_NAMES = new ServiceName[0];
public ServiceName[] getAliases() {
final ServiceRegistrationImpl[] aliasRegistrations = this.aliasRegistrations;
final int len = aliasRegistrations.length;
if (len == 0) {
return NO_NAMES;
}
final ServiceName[] names = new ServiceName[len];
for (int i = 0; i < len; i++) {
names[i] = aliasRegistrations[i].getName();
}
return names;
}
void addListener(final ContainerShutdownListener listener) {
assert !holdsLock(this);
synchronized (this) {
if (state == Substate.REMOVED && asyncTasks == 0) {
return; // controller is dead
}
if (shutdownListener != null) {
return; // register listener only once
}
shutdownListener = listener;
shutdownListener.controllerAlive();
}
}
public void addListener(final LifecycleListener listener) {
if (listener == null) return;
final List<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
if (lifecycleListeners.contains(listener)) return;
lifecycleListeners.add(listener);
if (state == Substate.UP) {
listenerTransitionTasks.add(new LifecycleListenerTask(listener, LifecycleEvent.UP));
} else if (state == Substate.DOWN) {
listenerTransitionTasks.add(new LifecycleListenerTask(listener, LifecycleEvent.DOWN));
} else if (state == Substate.START_FAILED) {
listenerTransitionTasks.add(new LifecycleListenerTask(listener, LifecycleEvent.FAILED));
} else if (state == Substate.REMOVED) {
listenerTransitionTasks.add(new LifecycleListenerTask(listener, LifecycleEvent.REMOVED));
}
tasks = transition();
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
public void addListener(final ServiceListener<? super S> listener) {
assert !holdsLock(this);
ListenerTask listenerAddedTask, listenerRemovedTask = null;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
if (listeners.contains(listener)) {
// Duplicates not allowed
throw new IllegalArgumentException("Listener " + listener + " already present on controller for " + primaryRegistration.getName());
}
listeners.add(listener);
listenerAddedTask = new ListenerTask(listener, ListenerNotification.LISTENER_ADDED);
incrementAsyncTasks();
if (state == Substate.REMOVED) {
listenerRemovedTask = new ListenerTask(listener, Transition.REMOVING_to_REMOVED);
incrementAsyncTasks();
}
updateStabilityState(leavingRestState);
}
try { listenerAddedTask.run(); } finally { if (listenerRemovedTask != null) listenerRemovedTask.run(); }
}
public void removeListener(final LifecycleListener listener) {
synchronized (this) {
lifecycleListeners.remove(listener);
}
}
public void removeListener(final ServiceListener<? super S> listener) {
synchronized (this) {
listeners.remove(listener);
}
}
@Override
public StartException getStartException() {
synchronized (this) {
return startException;
}
}
@Override
public void retry() {
assert !holdsLock(this);
final List<Runnable> tasks;
synchronized (this) {
final boolean leavingRestState = isStableRestState();
if (failCount > 0 || state.getState() != ServiceController.State.START_FAILED) return;
startException = null;
tasks = transition();
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
@Override
public synchronized Set<ServiceName> getImmediateUnavailableDependencies() {
return unavailableDependencies.clone();
}
@Override
public Collection<ServiceName> getUnavailableDependencies() {
return getImmediateUnavailableDependencies();
}
public ServiceController.Mode getMode() {
synchronized (this) {
return mode;
}
}
public boolean compareAndSetMode(final Mode expectedMode, final Mode newMode) {
if (expectedMode == null) {
throw new IllegalArgumentException("expectedMode is null");
}
return internalSetMode(expectedMode, newMode);
}
ServiceStatus getStatus() {
synchronized (this) {
final String parentName = parent == null ? null : parent.getName().getCanonicalName();
final String name = primaryRegistration.getName().getCanonicalName();
final ServiceRegistrationImpl[] aliasRegistrations = this.aliasRegistrations;
final int aliasLength = aliasRegistrations.length;
final String[] aliases;
if (aliasLength == 0) {
aliases = NO_STRINGS;
} else {
aliases = new String[aliasLength];
for (int i = 0; i < aliasLength; i++) {
aliases[i] = aliasRegistrations[i].getName().getCanonicalName();
}
}
String serviceClass = "<unknown>";
try {
final Service<? extends S> value = service;
if (value != null) {
serviceClass = value.getClass().getName();
}
} catch (RuntimeException ignored) {
}
final Dependency[] dependencies = this.dependencies;
final int dependenciesLength = dependencies.length;
final String[] dependencyNames;
if (dependenciesLength == 0) {
dependencyNames = NO_STRINGS;
} else {
dependencyNames = new String[dependenciesLength];
for (int i = 0; i < dependenciesLength; i++) {
dependencyNames[i] = dependencies[i].getName().getCanonicalName();
}
}
StartException startException = this.startException;
return new ServiceStatus(
parentName,
name,
aliases,
serviceClass,
mode.name(),
state.getState().name(),
state.name(),
dependencyNames,
failCount != 0,
startException != null ? startException.toString() : null,
!unavailableDependencies.isEmpty()
);
}
}
String dumpServiceDetails() {
final StringBuilder b = new StringBuilder();
IdentityHashSet<Dependent> dependents;
synchronized (primaryRegistration) {
dependents = primaryRegistration.getDependents().clone();
}
b.append("Service Name: ").append(primaryRegistration.getName().toString()).append(" - Dependents: ").append(dependents.size()).append('\n');
for (Dependent dependent : dependents) {
final ServiceControllerImpl<?> controller = dependent.getDependentController();
synchronized (controller) {
b.append(" ").append(controller.getName().toString()).append(" - State: ").append(controller.state.getState()).append(" (Substate: ").append(controller.state).append(")\n");
}
}
b.append("Service Aliases: ").append(aliasRegistrations.length).append('\n');
for (ServiceRegistrationImpl registration : aliasRegistrations) {
synchronized (registration) {
dependents = registration.getDependents().clone();
}
b.append(" ").append(registration.getName().toString()).append(" - Dependents: ").append(dependents.size()).append('\n');
for (Dependent dependent : dependents) {
final ServiceControllerImpl<?> controller = dependent.getDependentController();
b.append(" ").append(controller.getName().toString()).append(" - State: ").append(controller.state.getState()).append(" (Substate: ").append(controller.state).append(")\n");
}
}
synchronized (this) {
b.append("Children: ").append(children.size()).append('\n');
for (ServiceControllerImpl<?> child : children) {
synchronized (child) {
b.append(" ").append(child.getName().toString()).append(" - State: ").append(child.state.getState()).append(" (Substate: ").append(child.state).append(")\n");
}
}
final Substate state = this.state;
b.append("State: ").append(state.getState()).append(" (Substate: ").append(state).append(")\n");
if (parent != null) {
b.append("Parent Name: ").append(parent.getPrimaryRegistration().getName().toString()).append('\n');
}
b.append("Service Mode: ").append(mode).append('\n');
if (startException != null) {
b.append("Start Exception: ").append(startException.getClass().getName()).append(" (Message: ").append(startException.getMessage()).append(")\n");
}
String serviceObjectString = "(indeterminate)";
Object serviceObjectClass = "(indeterminate)";
try {
Object serviceObject = service;
if (serviceObject != null) {
serviceObjectClass = serviceObject.getClass();
serviceObjectString = serviceObject.toString();
}
} catch (Throwable ignored) {}
b.append("Service Object: ").append(serviceObjectString).append('\n');
b.append("Service Object Class: ").append(serviceObjectClass).append('\n');
b.append("Demanded By: ").append(demandedByCount).append('\n');
b.append("Stopping Dependencies: ").append(stoppingDependencies).append('\n');
b.append("Running Dependents: ").append(runningDependents).append('\n');
b.append("Fail Count: ").append(failCount).append('\n');
b.append("Unavailable Dep Count: ").append(unavailableDependencies.size()).append('\n');
for (ServiceName name : unavailableDependencies) {
b.append(" ").append(name.toString()).append('\n');
}
b.append("Dependencies Demanded: ").append(dependenciesDemanded ? "yes" : "no").append('\n');
b.append("Async Tasks: ").append(asyncTasks).append('\n');
if (lifecycleTime != 0L) {
final long elapsedNanos = System.nanoTime() - lifecycleTime;
final long now = System.currentTimeMillis();
final long stamp = now - (elapsedNanos / 1000000L);
b.append("Lifecycle Timestamp: ").append(lifecycleTime).append(String.format(" = %tb %<td %<tH:%<tM:%<tS.%<tL%n", stamp));
}
}
b.append("Dependencies: ").append(dependencies.length).append('\n');
for (int i = 0; i < dependencies.length; i ++) {
final Dependency dependency = dependencies[i];
final ServiceControllerImpl<?> controller = dependency.getDependencyController();
b.append(" ").append(dependency.getName().toString());
if (controller == null) {
b.append(" (missing)\n");
} else {
synchronized (controller) {
b.append(" - State: ").append(controller.state.getState()).append(" (Substate: ").append(controller.state).append(")\n");
}
}
}
return b.toString();
}
void addMonitor(final StabilityMonitor stabilityMonitor) {
assert !holdsLock(this);
synchronized (this) {
if (monitors.add(stabilityMonitor) && !isStableRestState()) {
stabilityMonitor.incrementUnstableServices();
if (state == Substate.START_FAILED) {
stabilityMonitor.addFailed(this);
} else if (state == Substate.PROBLEM) {
stabilityMonitor.addProblem(this);
}
}
}
}
void removeMonitor(final StabilityMonitor stabilityMonitor) {
assert !holdsLock(this);
synchronized (this) {
if (monitors.remove(stabilityMonitor) && !isStableRestState()) {
stabilityMonitor.removeProblem(this);
stabilityMonitor.removeFailed(this);
stabilityMonitor.decrementUnstableServices();
}
}
}
void removeMonitorNoCallback(final StabilityMonitor stabilityMonitor) {
assert !holdsLock(this);
synchronized (this) {
monitors.remove(stabilityMonitor);
}
}
Set<StabilityMonitor> getMonitors() {
assert holdsLock(this);
return monitors;
}
private enum ListenerNotification {
/** Notify the listener that is has been added. */
LISTENER_ADDED,
/** Notifications related to the current state. */
TRANSITION,
}
public Substate getSubstate() {
synchronized (this) {
return state;
}
}
ServiceRegistrationImpl getPrimaryRegistration() {
return primaryRegistration;
}
ServiceRegistrationImpl[] getAliasRegistrations() {
return aliasRegistrations;
}
private static void inject(final ServiceName serviceName, final ValueInjection<?>[] injections) {
boolean ok = false;
int i = 0;
try {
for (; i < injections.length; i++) {
inject(serviceName, injections[i]);
}
ok = true;
} finally {
if (!ok) {
for (; i >= 0; i
uninject(serviceName, injections[i]);
}
}
}
}
private static <T> void inject(final ServiceName serviceName, final ValueInjection<T> injection) {
try {
injection.getTarget().inject(injection.getSource().getValue());
} catch (final Throwable t) {
ServiceLogger.SERVICE.injectFailed(t, serviceName);
throw t;
}
}
private static void uninject(final ServiceName serviceName, final ValueInjection<?>[] injections) {
for (ValueInjection<?> injection : injections) {
uninject(serviceName, injection);
}
}
private static <T> void uninject(final ServiceName serviceName, final ValueInjection<T> injection) {
try {
injection.getTarget().uninject();
} catch (Throwable t) {
ServiceLogger.ROOT.uninjectFailed(t, serviceName, injection);
}
}
@Override
public String toString() {
return String.format("Controller for %s@%x", getName(), Integer.valueOf(hashCode()));
}
private abstract class ControllerTask implements Runnable {
private ControllerTask() {
assert holdsLock(ServiceControllerImpl.this);
}
public final void run() {
assert !holdsLock(ServiceControllerImpl.this);
try {
beforeExecute();
if (!execute()) return;
final List<Runnable> tasks;
synchronized (ServiceControllerImpl.this) {
final boolean leavingRestState = isStableRestState();
// Subtract one for this task
decrementAsyncTasks();
tasks = transition();
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
} catch (Throwable t) {
ServiceLogger.SERVICE.internalServiceError(t, primaryRegistration.getName());
} finally {
afterExecute();
}
}
void afterExecute() {}
void beforeExecute() {}
abstract boolean execute();
}
private abstract class DependenciesControllerTask extends ControllerTask {
final boolean execute() {
Lockable lock;
for (Dependency dependency : dependencies) {
lock = dependency.getLock();
synchronized (lock) {
lock.acquireWrite();
try {
inform(dependency);
} finally {
lock.releaseWrite();
}
}
}
if (parent != null) inform(parent);
return true;
}
abstract void inform(Dependency dependency);
abstract void inform(ServiceControllerImpl parent);
}
private abstract class DependentsControllerTask extends ControllerTask {
private final int execFlag;
private DependentsControllerTask(final int execFlag) {
this.execFlag = execFlag;
execFlags |= (execFlag << 16);
}
final boolean execute() {
for (Dependent dependent : primaryRegistration.getDependents()) {
inform(dependent, primaryRegistration.getName());
}
for (ServiceRegistrationImpl aliasRegistration : aliasRegistrations) {
for (Dependent dependent : aliasRegistration.getDependents()) {
inform(dependent, aliasRegistration.getName());
}
}
synchronized (ServiceControllerImpl.this) {
for (Dependent child : children) {
inform(child, primaryRegistration.getName());
}
execFlags |= execFlag;
}
return true;
}
void inform(final Dependent dependent, final ServiceName serviceName) { inform(dependent); }
void inform(final Dependent dependent) {}
void beforeExecute() {
Lockable lock = primaryRegistration.getLock();
synchronized (lock) { lock.acquireRead(); }
for (ServiceRegistrationImpl aliasRegistration : aliasRegistrations) {
lock = aliasRegistration.getLock();
synchronized (lock) { lock.acquireRead(); }
}
}
void afterExecute() {
Lockable lock = primaryRegistration.getLock();
synchronized (lock) { lock.releaseRead(); }
for (ServiceRegistrationImpl aliasRegistration : aliasRegistrations) {
lock = aliasRegistration.getLock();
synchronized (lock) { lock.releaseRead(); }
}
}
}
private final class DemandDependenciesTask extends DependenciesControllerTask {
void inform(final Dependency dependency) { dependency.addDemand(); }
void inform(final ServiceControllerImpl parent) { parent.addDemand(); }
}
private final class UndemandDependenciesTask extends DependenciesControllerTask {
void inform(final Dependency dependency) { dependency.removeDemand(); }
void inform(final ServiceControllerImpl parent) { parent.removeDemand(); }
}
private final class DependentStartedTask extends DependenciesControllerTask {
void inform(final Dependency dependency) { dependency.dependentStarted(); }
void inform(final ServiceControllerImpl parent) { parent.dependentStarted(); }
}
private final class DependentStoppedTask extends DependenciesControllerTask {
void inform(final Dependency dependency) { dependency.dependentStopped(); }
void inform(final ServiceControllerImpl parent) { parent.dependentStopped(); }
}
private final class DependencyAvailableTask extends DependentsControllerTask {
DependencyAvailableTask() { super(DEPENDENCY_AVAILABLE_TASK); }
void inform(final Dependent dependent, final ServiceName name) { dependent.dependencyAvailable(name); }
}
private final class DependencyUnavailableTask extends DependentsControllerTask {
DependencyUnavailableTask() { super(DEPENDENCY_UNAVAILABLE_TASK); }
void inform(final Dependent dependent, final ServiceName name) { dependent.dependencyUnavailable(name); }
}
private final class DependencyStartedTask extends DependentsControllerTask {
private DependencyStartedTask() { super(DEPENDENCY_STARTED_TASK); }
void inform(final Dependent dependent) { dependent.dependencyUp(); }
}
private final class DependencyStoppedTask extends DependentsControllerTask {
private DependencyStoppedTask() { super(DEPENDENCY_STOPPED_TASK); }
void inform(final Dependent dependent) { dependent.dependencyDown(); }
}
private final class DependencyFailedTask extends DependentsControllerTask {
private DependencyFailedTask() { super(DEPENDENCY_FAILED_TASK); }
void inform(final Dependent dependent) { dependent.dependencyFailed(); }
}
private final class DependencyRetryingTask extends DependentsControllerTask {
private DependencyRetryingTask() { super(DEPENDENCY_RETRYING_TASK); }
void inform(final Dependent dependent) { dependent.dependencySucceeded(); }
}
private final class StartTask extends ControllerTask {
boolean execute() {
final ServiceName serviceName = primaryRegistration.getName();
final StartContextImpl context = new StartContextImpl();
try {
inject(serviceName, injections);
startService(service, context);
boolean startFailed;
synchronized (context.lock) {
context.state |= AbstractContext.CLOSED;
if ((context.state & AbstractContext.ASYNC) != 0) {
// asynchronous() was called
if ((context.state & (AbstractContext.COMPLETED | AbstractContext.FAILED)) == 0) {
// Neither complete() nor failed() have been called yet
return false;
}
} else {
// asynchronous() was not called
if ((context.state & (AbstractContext.COMPLETED | AbstractContext.FAILED)) == 0) {
// Neither complete() nor failed() have been called yet
context.state |= AbstractContext.COMPLETED;
}
}
startFailed = (context.state & AbstractContext.FAILED) != 0;
}
if (startFailed) {
uninject(serviceName, injections);
} else {
inject(serviceName, outInjections);
}
return true;
} catch (StartException e) {
e.setServiceName(serviceName);
return startFailed(e, serviceName, context);
} catch (Throwable t) {
StartException e = new StartException("Failed to start service", t, serviceName);
return startFailed(e, serviceName, context);
}
}
private void startService(Service<? extends S> service, StartContext context) throws StartException {
final ClassLoader contextClassLoader = setTCCL(getCL(service.getClass()));
try {
service.start(context);
} finally {
setTCCL(contextClassLoader);
}
}
private boolean startFailed(final StartException e, final ServiceName serviceName, final StartContextImpl context) {
ServiceLogger.FAIL.startFailed(e, serviceName);
synchronized (context.lock) {
context.state |= (AbstractContext.FAILED | AbstractContext.CLOSED);
synchronized (ServiceControllerImpl.this) {
startException = e;
}
}
uninject(serviceName, injections);
uninject(serviceName, outInjections);
return true;
}
}
private final class StopTask extends ControllerTask {
boolean execute() {
final ServiceName serviceName = primaryRegistration.getName();
final StopContextImpl context = new StopContextImpl();
boolean ok = false;
try {
stopService(service, context);
ok = true;
} catch (Throwable t) {
ServiceLogger.FAIL.stopFailed(t, serviceName);
} finally {
synchronized (context.lock) {
context.state |= AbstractContext.CLOSED;
if (ok & (context.state & AbstractContext.ASYNC) != 0) {
// no exception thrown and asynchronous() was called
if ((context.state & AbstractContext.COMPLETED) == 0) {
// complete() have not been called yet
return false;
}
} else {
// exception thrown or asynchronous() was not called
if ((context.state & AbstractContext.COMPLETED) == 0) {
// complete() have not been called yet
context.state |= AbstractContext.COMPLETED;
}
}
}
uninject(serviceName, injections);
uninject(serviceName, outInjections);
}
return true;
}
private void stopService(Service<? extends S> service, StopContext context) {
final ClassLoader contextClassLoader = setTCCL(getCL(service.getClass()));
try {
service.stop(context);
} finally {
setTCCL(contextClassLoader);
}
}
}
private final class ListenerTask extends ControllerTask {
private final ListenerNotification notification;
private final ServiceListener<? super S> listener;
private final Transition transition;
ListenerTask(final ServiceListener<? super S> listener, final Transition transition) {
this.listener = listener;
this.transition = transition;
notification = ListenerNotification.TRANSITION;
}
ListenerTask(final ServiceListener<? super S> listener, final ListenerNotification notification) {
this.listener = listener;
transition = null;
this.notification = notification;
}
boolean execute() {
invokeListener(listener, notification, transition);
return true;
}
private void invokeListener(final ServiceListener<? super S> listener, final ListenerNotification notification, final Transition transition) {
// first set the TCCL
final ClassLoader contextClassLoader = setTCCL(getCL(listener.getClass()));
try {
switch (notification) {
case TRANSITION: {
listener.transition(ServiceControllerImpl.this, transition);
break;
}
case LISTENER_ADDED: {
listener.listenerAdded(ServiceControllerImpl.this);
break;
}
default: throw new IllegalStateException();
}
} catch (Throwable t) {
ServiceLogger.SERVICE.listenerFailed(t, listener);
} finally {
// reset TCCL
setTCCL(contextClassLoader);
}
}
}
private final class LifecycleListenerTask extends ControllerTask {
private final LifecycleListener listener;
private final LifecycleEvent event;
LifecycleListenerTask(final LifecycleListener listener, final LifecycleEvent event) {
this.listener = listener;
this.event = event;
}
boolean execute() {
final ClassLoader oldCL = setTCCL(getCL(listener.getClass()));
try {
listener.handleEvent(ServiceControllerImpl.this, event);
} catch (Throwable t) {
ServiceLogger.SERVICE.listenerFailed(t, listener);
} finally {
setTCCL(oldCL);
}
return true;
}
}
private final class RemoveChildrenTask extends ControllerTask {
boolean execute() {
synchronized (ServiceControllerImpl.this) {
for (ServiceControllerImpl<?> child : children) child.setMode(Mode.REMOVE);
}
return true;
}
}
private final class RemoveTask extends ControllerTask {
boolean execute() {
assert getMode() == ServiceController.Mode.REMOVE;
assert getSubstate() == Substate.REMOVED || getSubstate() == Substate.CANCELLED;
Lockable lock = primaryRegistration.getLock();
synchronized (lock) {
lock.acquireWrite();
try {
primaryRegistration.clearInstance(ServiceControllerImpl.this);
} finally {
lock.releaseWrite();
}
}
for (ServiceRegistrationImpl aliasRegistration : aliasRegistrations) {
lock = aliasRegistration.getLock();
synchronized (lock) {
lock.acquireWrite();
try {
aliasRegistration.clearInstance(ServiceControllerImpl.this);
} finally {
lock.releaseWrite();
}
}
}
for (Dependency dependency : dependencies) {
lock = dependency.getLock();
synchronized (lock) {
lock.acquireWrite();
try {
dependency.removeDependent(ServiceControllerImpl.this);
} finally {
lock.releaseWrite();
}
}
}
if (parent != null) parent.removeChild(ServiceControllerImpl.this);
return true;
}
}
private abstract class AbstractContext implements LifecycleContext {
static final int ASYNC = 1;
static final int CLOSED = 1 << 1;
static final int COMPLETED = 1 << 2;
static final int FAILED = 1 << 3;
int state;
final Object lock = new Object();
abstract void onComplete();
final int setState(final int newState) {
synchronized (lock) {
if (((newState & ASYNC) != 0 && ((state & ASYNC) != 0 || (state & CLOSED) != 0)) ||
((newState & (COMPLETED | FAILED)) != 0 && (state & (COMPLETED | FAILED)) != 0) ||
((newState & (COMPLETED | FAILED)) != 0 && (state & CLOSED) != 0 && (state & ASYNC) == 0)) {
throw new IllegalStateException(ILLEGAL_CONTROLLER_STATE);
}
return state |= newState;
}
}
final void taskCompleted() {
final List<Runnable> tasks;
synchronized (ServiceControllerImpl.this) {
final boolean leavingRestState = isStableRestState();
// Subtract one for this task
decrementAsyncTasks();
tasks = transition();
addAsyncTasks(tasks.size());
updateStabilityState(leavingRestState);
}
doExecute(tasks);
}
public final void complete() {
final int state = setState(COMPLETED);
if ((state & CLOSED) != 0) {
onComplete();
taskCompleted();
}
}
public final void asynchronous() {
setState(ASYNC);
}
public final long getElapsedTime() {
return System.nanoTime() - lifecycleTime;
}
public final ServiceController<?> getController() {
return ServiceControllerImpl.this;
}
public final void execute(final Runnable command) {
doExecute(Collections.<Runnable>singletonList(new Runnable() {
public void run() {
final ClassLoader contextClassLoader = setTCCL(getCL(command.getClass()));
try {
command.run();
} finally {
setTCCL(contextClassLoader);
}
}
}));
}
}
private final class StartContextImpl extends AbstractContext implements StartContext {
public void failed(StartException reason) throws IllegalStateException {
if (reason == null) {
reason = new StartException("Start failed, and additionally, a null cause was supplied");
}
final ServiceName serviceName = getName();
reason.setServiceName(serviceName);
ServiceLogger.FAIL.startFailed(reason, serviceName);
final int state;
synchronized (lock) {
state = setState(FAILED);
synchronized (ServiceControllerImpl.this) {
startException = reason;
}
}
if ((state & CLOSED) != 0) {
uninject(serviceName, injections);
taskCompleted();
}
}
public ServiceTarget getChildTarget() {
synchronized (lock) {
if ((state & (COMPLETED | FAILED)) != 0) {
throw new IllegalStateException("Lifecycle context is no longer valid");
}
synchronized (ServiceControllerImpl.this) {
if (childTarget == null) {
childTarget = new ChildServiceTarget(container);
}
return childTarget;
}
}
}
void onComplete() {
final ServiceName serviceName = primaryRegistration.getName();
inject(serviceName, outInjections);
}
}
private final class StopContextImpl extends AbstractContext implements StopContext {
void onComplete() {
final ServiceName serviceName = primaryRegistration.getName();
uninject(serviceName, injections);
uninject(serviceName, outInjections);
}
}
private final class ChildServiceTarget extends ServiceTargetImpl {
private volatile boolean valid = true;
private ChildServiceTarget(final ServiceTargetImpl parentTarget) {
super(parentTarget);
}
<T> ServiceController<T> install(final ServiceBuilderImpl<T> serviceBuilder) throws ServiceRegistryException {
if (! valid) {
throw new IllegalStateException("Service target is no longer valid");
}
return super.install(serviceBuilder);
}
protected <T> ServiceBuilder<T> createServiceBuilder(final ServiceName name, final Service<T> service, final ServiceControllerImpl<?> parent) throws IllegalArgumentException {
return super.createServiceBuilder(name, service, ServiceControllerImpl.this);
}
@Override
public ServiceTarget subTarget() {
return new ChildServiceTarget(this);
}
}
private void addAsyncTasks(final int size) {
assert holdsLock(this);
assert size >= 0;
if (size > 0) asyncTasks += size;
}
private void incrementAsyncTasks() {
assert holdsLock(this);
asyncTasks++;
}
private void decrementAsyncTasks() {
assert holdsLock(this);
assert asyncTasks > 0;
asyncTasks
}
} |
package fr.liglab.adele.cream.runtime.handler.behavior.manager;
import fr.liglab.adele.cream.annotations.internal.BehaviorReference;
import fr.liglab.adele.cream.annotations.internal.HandlerReference;
import fr.liglab.adele.cream.model.ContextEntity;
import fr.liglab.adele.cream.utils.SuccessorStrategy;
import org.apache.felix.ipojo.*;
import org.apache.felix.ipojo.annotations.*;
import org.apache.felix.ipojo.annotations.Handler;
import org.apache.felix.ipojo.architecture.HandlerDescription;
import org.apache.felix.ipojo.handlers.providedservice.ProvidedServiceHandler;
import org.apache.felix.ipojo.metadata.Element;
import org.apache.felix.ipojo.parser.FieldMetadata;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.util.Dictionary;
import java.util.Hashtable;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListSet;
@Handler(name = HandlerReference.BEHAVIOR_MANAGER_HANDLER, namespace = HandlerReference.NAMESPACE)
public class BehaviorManagerHandler extends PrimitiveHandler implements InvocationHandler,ContextListener {
private final Map<String,RequiredBehavior> myRequiredBehaviorById = new ConcurrentHashMap<>();
private final Set<String> stateVariable = new ConcurrentSkipListSet<>();
private String myContextId;
private final Object lockValidity = new Object();
@Override
public void configure(Element metadata, Dictionary configuration) throws ConfigurationException {
myContextId = (String) configuration.get(ContextEntity.CONTEXT_ENTITY_ID);
Element[] behaviorElements = metadata.getElements(HandlerReference.BEHAVIOR_MANAGER_HANDLER,HandlerReference.NAMESPACE);
if (behaviorElements == null) {
throw new ConfigurationException("Behavior Elements are null ");
}
for (Element element:behaviorElements){
Element[] behaviorIndividualElements = element.getElements(BehaviorReference.BEHAVIOR_INDIVIDUAL_ELEMENT_NAME,"");
if (behaviorIndividualElements == null){
throw new ConfigurationException("Behavior Individual Element is null ");
}
for (Element individualBehaviorElement:behaviorIndividualElements) {
RequiredBehavior requiredBehavior = new RequiredBehavior(individualBehaviorElement.getAttribute(BehaviorReference.SPECIFICATION_ATTRIBUTE_NAME), individualBehaviorElement.getAttribute(BehaviorReference.IMPLEMEMENTATION_ATTRIBUTE_NAME), configuration);
myRequiredBehaviorById.put(individualBehaviorElement.getAttribute(BehaviorReference.ID_ATTRIBUTE_NAME),requiredBehavior);
String fieldAttribute = individualBehaviorElement.getAttribute(BehaviorReference.FIELD_ATTRIBUTE_NAME);
FieldMetadata fieldMetadata = null;
if (fieldAttribute != null){
fieldMetadata = getPojoMetadata().getField(fieldAttribute);
}
if (fieldMetadata != null){
getInstanceManager().register(fieldMetadata,requiredBehavior.getBehaviorInterceptor());
}
}
}
setValidity(false);
}
@Override
public synchronized void stop() {
for (Map.Entry<String,RequiredBehavior> entry : myRequiredBehaviorById.entrySet()){
entry.getValue().tryDispose();
}
stateVariable.clear();
}
@Override
public void start() {
//Do nothing
}
private ProvidedServiceHandler getProvideServiceHandler(){
return (ProvidedServiceHandler) getHandler(HandlerFactory.IPOJO_NAMESPACE + ":provides");
}
@Validate
public void validate(){
checkValidity();
}
@Invalidate
public void invalidate(){
stateVariable.clear();
}
/**
* Issue : behavior must be deactivate before instance become Invalid ...
*/
@Override
public void stateChanged(int newState) {
if (newState == ComponentInstance.VALID){
for (Map.Entry<String,RequiredBehavior> behavior: myRequiredBehaviorById.entrySet()){
behavior.getValue().tryStartBehavior();
}
}
if (newState == ComponentInstance.INVALID){
for (Map.Entry<String,RequiredBehavior> behavior: myRequiredBehaviorById.entrySet()){
behavior.getValue().tryInvalid();
}
}
}
@Bind(id = "behaviorF",specification = Factory.class,optional = false,proxy = false,aggregate = true,filter = "("+BehaviorReference.BEHAVIOR_FACTORY_TYPE_PROPERTY +"="+BehaviorReference.BEHAVIOR_FACTORY_TYPE_PROPERTY_VALUE +")")
public void bindBehaviorFactory(Factory behaviorFactory, Map prop){
for (Map.Entry<String,RequiredBehavior> entry : myRequiredBehaviorById.entrySet()){
if (match(entry.getValue(),prop)){
entry.getValue().setFactory(behaviorFactory);
entry.getValue().addManager();
entry.getValue().registerBehaviorListener(this);
checkValidity();
}
}
}
@Unbind(id = "behaviorF")
public void unbindBehaviorFactory(Factory behaviorFactory,Map prop){
for (Map.Entry<String,RequiredBehavior> entry : myRequiredBehaviorById.entrySet()){
if (match(entry.getValue(),prop)){
entry.getValue().unRef();
}
}
checkValidity();
}
private void checkValidity(){
for (Map.Entry<String,RequiredBehavior> entry : myRequiredBehaviorById.entrySet()){
if (entry.getValue().isOperationnal()){
continue;
}
synchronized (lockValidity) {
setValidity(false);
return;
}
}
synchronized (lockValidity) {
if (isOperationnal()) {
for (Map.Entry<String, RequiredBehavior> entry : myRequiredBehaviorById.entrySet()) {
entry.getValue().tryStartBehavior();
}
}
setValidity(true);
}
}
public boolean isOperationnal(){
for (org.apache.felix.ipojo.Handler handler : this.getInstanceManager().getRegisteredHandlers()){
HandlerFactory fact = (HandlerFactory) handler.getHandlerManager().getFactory();
if (fact.getHandlerName().equals(HandlerReference.NAMESPACE+":"+HandlerReference.BEHAVIOR_MANAGER_HANDLER)) {
continue;
}
if (!handler.getValidity()){
return false;
}
}
return true;
}
protected boolean match(RequiredBehavior req, Map prop) {
String spec = (String) prop.get(BehaviorReference.SPECIFICATION_ATTRIBUTE_NAME);
String impl = (String) prop.get(BehaviorReference.IMPLEMEMENTATION_ATTRIBUTE_NAME);
return req.getSpecName().equalsIgnoreCase(spec) && req.getImplName().equalsIgnoreCase(impl);
}
@Override
public HandlerDescription getDescription() {
return new BehaviorHandlerDescription();
}
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
for (Map.Entry<String,RequiredBehavior> behaviorEntry : myRequiredBehaviorById.entrySet()){
Object returnObj = behaviorEntry.getValue().invoke(proxy,method,args);
if (SuccessorStrategy.NO_FOUND_CODE.equals(returnObj)){
continue;
}
return returnObj;
}
return SuccessorStrategy.NO_FOUND_CODE;
}
/**
* Context Listener Implem
*/
@Override
public synchronized void update(ContextSource contextSource, String s, Object o) {
/** if (getInstanceManager().getState() <= InstanceManager.INVALID)
return;**/
ProvidedServiceHandler providerHandler = getProvideServiceHandler();
if (providerHandler == null){
return;
}
Hashtable<String,Object> property = new Hashtable<String,Object>();
property.put(s, o);
if (o == null){
stateVariable.remove(s);
getProvideServiceHandler().removeProperties(property);
return;
}
if (stateVariable.contains(s)){
providerHandler.reconfigure(property);
}else {
stateVariable.add(s);
providerHandler.addProperties(property);
}
}
public class BehaviorHandlerDescription extends HandlerDescription {
public BehaviorHandlerDescription(){
super(BehaviorManagerHandler.this);
}
@Override
public Element getHandlerInfo() {
Element element = super.getHandlerInfo();
for (Map.Entry<String,RequiredBehavior> entry : myRequiredBehaviorById.entrySet()){
entry.getValue().getBehaviorDescription(element);
}
return element;
}
}
} |
package org.jenkinsci.plugins.ghprb;
import hudson.BulkChange;
import hudson.XmlFile;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.Items;
import hudson.model.Saveable;
import hudson.model.TaskListener;
import hudson.model.listeners.SaveableListener;
import hudson.util.Secret;
import jenkins.model.Jenkins;
import org.apache.commons.lang.StringUtils;
import org.jenkinsci.plugins.ghprb.extensions.GhprbCommentAppender;
import org.jenkinsci.plugins.ghprb.extensions.GhprbCommitStatusException;
import org.jenkinsci.plugins.ghprb.extensions.GhprbExtension;
import org.jenkinsci.plugins.ghprb.extensions.comments.GhprbBuildStatus;
import org.kohsuke.github.GHCommitState;
import org.kohsuke.github.GHEvent;
import org.kohsuke.github.GHEventPayload.IssueComment;
import org.kohsuke.github.GHEventPayload.PullRequest;
import org.kohsuke.github.GHHook;
import org.kohsuke.github.GHIssueState;
import org.kohsuke.github.GHPullRequest;
import org.kohsuke.github.GHRepository;
import org.kohsuke.github.GitHub;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLEncoder;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
public class GhprbRepository implements Saveable{
private static final transient Logger logger = Logger.getLogger(GhprbRepository.class.getName());
private static final transient EnumSet<GHEvent> HOOK_EVENTS = EnumSet.of(GHEvent.ISSUE_COMMENT, GHEvent.PULL_REQUEST);
private final String reponame;
private final Map<Integer, GhprbPullRequest> pullRequests;
private transient GHRepository ghRepository;
private transient GhprbTrigger trigger;
public GhprbRepository(String reponame, GhprbTrigger trigger) {
this.pullRequests = new ConcurrentHashMap<Integer, GhprbPullRequest>();
this.reponame = reponame;
this.trigger = trigger;
}
public void addPullRequests(Map<Integer, GhprbPullRequest> prs) {
pullRequests.putAll(prs);
}
public void init() {
for (Entry<Integer, GhprbPullRequest> next : pullRequests.entrySet()) {
GhprbPullRequest pull = next.getValue();
pull.init(trigger.getHelper(), this);
}
}
private boolean initGhRepository() {
if (ghRepository != null) {
return true;
}
GitHub gitHub = null;
try {
gitHub = trigger.getGitHub();
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error while accessing rate limit API", ex);
return false;
}
if (gitHub == null) {
logger.log(Level.SEVERE, "No connection returned to GitHub server!");
return false;
}
try {
if (gitHub.getRateLimit().remaining == 0) {
logger.log(Level.INFO, "Exceeded rate limit for repository");
return false;
}
} catch (FileNotFoundException ex) {
logger.log(Level.INFO, "Rate limit API not found.");
return false;
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error while accessing rate limit API", ex);
return false;
}
try {
ghRepository = gitHub.getRepository(reponame);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Could not retrieve GitHub repository named " + reponame + " (Do you have properly set 'GitHub project' field in job configuration?)", ex);
return false;
}
return true;
}
// This method is used when not running with webhooks. We pull in the
// active PRs for the repo associated with the trigger and check the
// comments/hashes that have been added since the last time we checked.
public void check() {
if (!trigger.isActive()) {
logger.log(Level.FINE, "Project is not active, not checking github state");
return;
}
if (!initGhRepository()) {
return;
}
GHRepository repo = getGitHubRepo();
List<GHPullRequest> openPulls;
try {
openPulls = repo.getPullRequests(GHIssueState.OPEN);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Could not retrieve open pull requests.", ex);
return;
}
Set<Integer> closedPulls = new HashSet<Integer>(pullRequests.keySet());
for (GHPullRequest pr : openPulls) {
if (pr.getHead() == null) { // Not sure if we need this, but leaving it for now.
try {
pr = getActualPullRequest(pr.getNumber());
} catch (IOException ex) {
logger.log(Level.SEVERE, "Could not retrieve pr " + pr.getNumber(), ex);
return;
}
}
check(pr);
closedPulls.remove(pr.getNumber());
}
// remove closed pulls so we don't check them again
for (Integer id : closedPulls) {
pullRequests.remove(id);
}
try {
this.save();
} catch (IOException e) {
logger.log(Level.SEVERE, "Unable to save repository!", e);
}
}
private void check(GHPullRequest pr) {
int number = pr.getNumber();
try {
GhprbPullRequest pull = getPullRequest(null, number);
pull.check(pr, false);
} catch (IOException e) {
logger.log(Level.SEVERE, "Unable to check pr: " + number, e);
}
try {
this.save();
} catch (IOException e) {
logger.log(Level.SEVERE, "Unable to save repository!", e);
}
}
public void commentOnFailure(AbstractBuild<?, ?> build, TaskListener listener, GhprbCommitStatusException ex) {
PrintStream stream = null;
if (listener != null) {
stream = listener.getLogger();
}
GHCommitState state = ex.getState();
Exception baseException = ex.getException();
String newMessage;
if (baseException instanceof FileNotFoundException) {
newMessage = "FileNotFoundException means that the credentials Jenkins is using is probably wrong. Or the user account does not have write access to the repo.";
} else {
newMessage = "Could not update commit status of the Pull Request on GitHub.";
}
if (stream != null) {
stream.println(newMessage);
baseException.printStackTrace(stream);
} else {
logger.log(Level.INFO, newMessage, baseException);
}
if (GhprbTrigger.getDscp().getUseComments()) {
StringBuilder msg = new StringBuilder(ex.getMessage());
if (build != null) {
msg.append("\n");
GhprbTrigger trigger = Ghprb.extractTrigger(build);
for (GhprbExtension ext : Ghprb.matchesAll(trigger.getExtensions(), GhprbBuildStatus.class)) {
if (ext instanceof GhprbCommentAppender) {
msg.append(((GhprbCommentAppender) ext).postBuildComment(build, null));
}
}
}
if (GhprbTrigger.getDscp().getUseDetailedComments() || (state == GHCommitState.SUCCESS || state == GHCommitState.FAILURE)) {
logger.log(Level.INFO, "Trying to send comment.", baseException);
addComment(ex.getId(), msg.toString());
}
} else {
logger.log(Level.SEVERE, "Could not update commit status of the Pull Request on GitHub.");
}
}
public String getName() {
return reponame;
}
public void addComment(int id, String comment) {
addComment(id, comment, null, null);
}
public void addComment(int id, String comment, AbstractBuild<?, ?> build, TaskListener listener) {
if (comment.trim().isEmpty())
return;
if (build != null && listener != null) {
try {
comment = build.getEnvironment(listener).expand(comment);
} catch (Exception e) {
logger.log(Level.SEVERE, "Error", e);
}
}
try {
GHRepository repo = getGitHubRepo();
GHPullRequest pr = repo.getPullRequest(id);
pr.comment(comment);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Couldn't add comment to pull request #" + id + ": '" + comment + "'", ex);
}
}
public void closePullRequest(int id) {
try {
GHRepository repo = getGitHubRepo();
GHPullRequest pr = repo.getPullRequest(id);
pr.close();
} catch (IOException ex) {
logger.log(Level.SEVERE, "Couldn't close the pull request #" + id + ": '", ex);
}
}
private boolean hookExist() throws IOException {
GHRepository ghRepository = getGitHubRepo();
for (GHHook h : ghRepository.getHooks()) {
if (!"web".equals(h.getName())) {
continue;
}
if (!getHookUrl().equals(h.getConfig().get("url"))) {
continue;
}
return true;
}
return false;
}
public static Object createHookLock = new Object();
public boolean createHook() {
try {
// Avoid a race to update the hooks in a repo (we could end up with
// multiple hooks). Lock on before we try this
synchronized (createHookLock) {
if (hookExist()) {
return true;
}
Map<String, String> config = new HashMap<String, String>();
String secret = getSecret();
config.put("url", new URL(getHookUrl()).toExternalForm());
config.put("insecure_ssl", "1");
if (!StringUtils.isEmpty(secret)) {
config.put("secret",secret);
}
getGitHubRepo().createHook("web", config, HOOK_EVENTS, true);
return true;
}
} catch (IOException ex) {
logger.log(Level.SEVERE, "Couldn''t create web hook for repository {0}. Does the user (from global configuration) have admin rights to the repository?", reponame);
return false;
}
}
private String getSecret() {
Secret secret = trigger.getGitHubApiAuth().getSecret();
return secret == null ? "" : secret.getPlainText();
}
private String getHookUrl() {
String baseUrl = trigger.getGitHubApiAuth().getJenkinsUrl();
if (baseUrl == null) {
baseUrl = Jenkins.getInstance().getRootUrl();
}
return baseUrl + GhprbRootAction.URL + "/";
}
public GhprbPullRequest getPullRequest(int id) {
return pullRequests.get(id);
}
public GHPullRequest getActualPullRequest(int id) throws IOException {
return getGitHubRepo().getPullRequest(id);
}
void onIssueCommentHook(IssueComment issueComment) throws IOException {
if (!trigger.isActive()) {
logger.log(Level.FINE, "Not checking comments since build is disabled");
return;
}
int number = issueComment.getIssue().getNumber();
logger.log(Level.FINER, "Comment on issue #{0} from {1}: {2}",
new Object[] { number, issueComment.getComment().getUser(), issueComment.getComment().getBody() });
if (!"created".equals(issueComment.getAction())) {
return;
}
GhprbPullRequest pull = getPullRequest(null, number);
pull.check(issueComment.getComment());
try {
this.save();
} catch (IOException e) {
logger.log(Level.SEVERE, "Unable to save repository!", e);
}
}
private GhprbPullRequest getPullRequest(GHPullRequest ghpr, Integer number) throws IOException {
if (number == null) {
number = ghpr.getNumber();
}
synchronized (this) {
GhprbPullRequest pr = pullRequests.get(number);
if (pr == null) {
if (ghpr == null) {
GHRepository repo = getGitHubRepo();
ghpr = repo.getPullRequest(number);
}
pr = new GhprbPullRequest(ghpr, trigger.getHelper(), this);
pullRequests.put(number, pr);
}
return pr;
}
}
void onPullRequestHook(PullRequest pr) throws IOException {
GHPullRequest ghpr = pr.getPullRequest();
int number = pr.getNumber();
String action = pr.getAction();
boolean doSave = false;
if ("closed".equals(action)) {
pullRequests.remove(number);
doSave = true;
} else if (!trigger.isActive()) {
logger.log(Level.FINE, "Not processing Pull request since the build is disabled");
} else if ("edited".equals(action) || "opened".equals(action) || "reopened".equals(action) || "synchronize".equals(action)) {
GhprbPullRequest pull = getPullRequest(ghpr, number);
pull.check(ghpr, true);
doSave = true;
} else {
logger.log(Level.WARNING, "Unknown Pull Request hook action: {0}", action);
}
if (doSave) {
try {
this.save();
} catch (IOException e) {
logger.log(Level.SEVERE, "Unable to save repository!", e);
}
}
}
public GHRepository getGitHubRepo() {
if (ghRepository == null) {
initGhRepository();
}
return ghRepository;
}
public void load() throws IOException {
XmlFile xml = getConfigXml(trigger.getActualProject());
if(xml.exists()){
xml.unmarshal(this);
}
save();
}
public void save() throws IOException {
if(BulkChange.contains(this)) {
return;
}
XmlFile config = getConfigXml(trigger.getActualProject());
config.write(this);
SaveableListener.fireOnChange(this, config);
}
protected XmlFile getConfigXml(AbstractProject<?, ?> project) throws IOException {
try {
String escapedRepoName = URLEncoder.encode(reponame, "UTF8");
File file = new File(project.getBuildDir() + "/pullrequests", escapedRepoName);
return Items.getConfigFile(file);
} catch (UnsupportedEncodingException e) {
throw new IOException(e);
}
}
} |
package fr.liglab.adele.cream.runtime.handler.entity.utils;
import org.apache.felix.ipojo.ConfigurationException;
import org.apache.felix.ipojo.FieldInterceptor;
import org.apache.felix.ipojo.InstanceManager;
import org.apache.felix.ipojo.MethodInterceptor;
import org.apache.felix.ipojo.metadata.Element;
import org.apache.felix.ipojo.parser.FieldMetadata;
import org.apache.felix.ipojo.parser.MethodMetadata;
import org.apache.felix.ipojo.parser.PojoMetadata;
import java.lang.reflect.Member;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
/**
* Interceptor to handle state fields that are not handler by direct access, but using synchronization
* functions (push,pull,apply)
*/
public class SynchronisationInterceptor extends AbstractStateInterceptor implements StateInterceptor, FieldInterceptor, MethodInterceptor {
/**
* The invocation handlers used in every field access
*/
private final Map<String,BiConsumer<Object,Object>> applyFunctions = new HashMap<>();
private final Map<String,Function<Object,Object>> pullFunctions = new HashMap<>();
/**
* The periodic tasks associated to pull fields
*/
private final Map<String,AbstractContextHandler.PeriodicTask> pullTasks = new HashMap<>();
/**
* The associated entity handler in charge of keeping the context state
*/
private final AbstractContextHandler abstractContextHandler;
/**
* The mapping from methods handled by this interceptor to states of the context
*/
private final Map<String,String> methodToState = new HashMap<>();
/**
* @param abstractContextHandler
*/
public SynchronisationInterceptor(AbstractContextHandler abstractContextHandler) {
this.abstractContextHandler = abstractContextHandler;
}
@Override
public Object onGet(Object pojo, String fieldName, Object value) {
Function<Object,Object> pullFunction = pullFunctions.get(fieldName);
if (pullFunction != null) {
this.abstractContextHandler.update(fieldToState.get(fieldName),pullFunction.apply(pojo));
}
return abstractContextHandler.getStateValue(fieldToState.get(fieldName));
}
@Override
public void onSet(Object pojo, String fieldName, Object value) {
BiConsumer<Object,Object> applyFunction = applyFunctions.get(fieldName);
if (applyFunction != null && pojo != null && value != null) {
applyFunction.accept(pojo,value);
}
}
@Override
public void onExit(Object pojo, Member method, Object returnedValue) {
if (returnedValue != null){
this.abstractContextHandler.update(methodToState.get(method.getName()),returnedValue);
}
}
@SuppressWarnings("unchecked")
@Override
public void handleState(InstanceManager component, PojoMetadata componentMetadata, Element state) throws ConfigurationException {
super.handleState( component, componentMetadata,state);
String stateId = state.getAttribute("id");
String stateField = state.getAttribute("field");
/*
* If a pull function was defined, register a function that will be invoked on every field access
*/
String pull = state.getAttribute("pull");
if (pull != null) {
/*
* Verify the type of the pull field is a Supplier
*
* TODO iPOJO metadata doesn't handle generic types. We could use reflection on the component class to validate
* that the pull field is a Supplier of the type of the state field
*/
FieldMetadata pullFieldMetadata = componentMetadata.getField(pull);
String pullFieldType = FieldMetadata.getReflectionType(pullFieldMetadata.getFieldType());
if (! pullFieldType.equals(Supplier.class.getCanonicalName())) {
throw new ConfigurationException("Malformed Manifest : the specified pull field "+pull+" must be of type "+Supplier.class.getName());
}
/*
* The field access handler.
*
* Notice that the lambda expression used capture the value of some variables from configuration time to actual
* access time.
*/
pullFunctions.put(stateField, (Object pojo) -> {
Supplier<Object> supplier = (Supplier<Object>) component.getFieldValue(pull,pojo);
return supplier.get();
});
/*
* Register a task associated with the pull function to periodically update the state
*/
Long period = Long.valueOf(state.getAttribute("period"));
TimeUnit unit = TimeUnit.valueOf(state.getAttribute("unit"));
AbstractContextHandler.PeriodicTask pullTask = abstractContextHandler.schedule( (InstanceManager instance) -> {
Function<Object,Object> pullFunction = pullFunctions.get(stateField);
if (pullFunction != null) {
Object pulledValue = pullFunction.apply(instance.getPojoObject());
abstractContextHandler.update(stateId,pulledValue);
}
},period,unit);
pullTasks.put(stateField,pullTask);
}
/*
* If an apply function was defined, register a function that will be invoked on every field access
*/
String apply = state.getAttribute("apply");
if (apply != null) {
/*
* Verify the type of the apply field is a Consumer
*
* TODO iPOJO metadata doesn't handle generic types. We could use reflection on the component class to validate
* that the apply field is a Consumer of the type of the state field
*/
FieldMetadata applyFieldMetadata = componentMetadata.getField(apply);
String applyFieldType = FieldMetadata.getReflectionType(applyFieldMetadata.getFieldType());
if (! applyFieldType.equals(Consumer.class.getCanonicalName())) {
throw new ConfigurationException("Malformed Manifest : the specified apply field "+apply+" must be of type "+Consumer.class.getName());
}
/*
* The field access handler.
*
* Notice that the lambda expression used capture the value of some variables from configuration time to actual
* access time.
*/
applyFunctions.put(stateField, (Object pojo, Object value) -> {
Consumer<Object> supplier = (Consumer<Object>) component.getFieldValue(apply,pojo);
supplier.accept(value);
});
}
String push = state.getAttribute("push");
if (push != null) {
/*
* Verify the push method is correctly defined
*
* TODO we should verify the return type if the method matches the type of the state field
*/
MethodMetadata stateMethod = componentMetadata.getMethod(push);
if (stateMethod == null) {
throw new ConfigurationException("Malformed Manifest : the specified method doesn't exists "+stateMethod);
}
methodToState.put(push,stateId);
component.register(stateMethod,this);
}
}
@Override
public void validate() {
for (AbstractContextHandler.PeriodicTask pullTask : pullTasks.values()) {
pullTask.start();
}
}
@Override
public void invalidate() {
for (AbstractContextHandler.PeriodicTask pullTask : pullTasks.values()) {
pullTask.stop();
}
}
@Override
public void onEntry(Object pojo, Member method, Object[] args) {
//Do nothing
}
@Override
public void onError(Object pojo, Member method, Throwable throwable) {
//Do nothing
}
@Override
public void onFinally(Object pojo, Member method) {
//Do nothing
}
} |
package org.jenkinsci.plugins.p4.tasks;
import hudson.AbortException;
import hudson.EnvVars;
import hudson.FilePath;
import hudson.model.Run;
import hudson.model.TaskListener;
import org.jenkinsci.plugins.p4.client.ClientHelper;
import org.jenkinsci.plugins.p4.client.ConnectionHelper;
import org.jenkinsci.plugins.p4.credentials.P4BaseCredentials;
import org.jenkinsci.plugins.p4.workspace.TemplateWorkspaceImpl;
import org.jenkinsci.plugins.p4.workspace.Workspace;
import java.io.IOException;
import java.io.Serializable;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
public abstract class AbstractTask implements Serializable {
private static final long serialVersionUID = 1L;
private static Logger logger = Logger.getLogger(AbstractTask.class.getName());
private P4BaseCredentials credential;
private TaskListener listener;
private String client;
private String syncID;
private String charset;
transient private Workspace workspace;
/**
* Implements the Perforce task to retry if necessary
*
* @param p4 Perforce connection helper
* @return Task object
* @throws Exception push up stack
*/
public abstract Object task(ClientHelper p4) throws Exception;
public P4BaseCredentials getCredential() {
return credential;
}
public void setCredential(String credential) {
this.credential = ConnectionHelper.findCredential(credential);
}
public TaskListener getListener() {
return listener;
}
public void setListener(TaskListener listener) {
this.listener = listener;
}
public void setWorkspace(Workspace workspace) throws AbortException {
this.workspace = workspace;
this.client = workspace.getFullName();
this.syncID = workspace.getSyncID();
this.charset = workspace.getCharset();
// setup the client workspace to use for the build.
ClientHelper p4 = getConnection();
// Check connection (might be on remote slave)
// if (!checkConnection(p4)) {
// String err = "P4: Abort, no server connection.\n";
// logger.severe(err);
// p4.log(err);
// throw new AbortException(err);
// Set the client
try {
p4.setClient(workspace);
p4.log("... client: " + getClient());
} catch (Exception e) {
String err = "P4: Unable to setup workspace: " + e;
logger.severe(err);
p4.log(err);
throw new AbortException(err);
} finally {
p4.disconnect();
}
}
public Workspace setEnvironment(Run<?, ?> run, Workspace wsType, FilePath buildWorkspace)
throws IOException, InterruptedException {
Workspace ws = (Workspace) wsType.clone();
// Set environment
EnvVars envVars = run.getEnvironment(listener);
envVars.put("NODE_NAME", envVars.get("NODE_NAME", "master"));
ws.setExpand(envVars);
// Set workspace root (check for parallel execution)
String root = buildWorkspace.getRemote();
if (root.contains("@")) {
root = root.replace("@", "%40");
}
// Template workspace for parallel execution
String name = buildWorkspace.getName();
if (name.contains("@")) {
String[] parts = name.split("@");
if (parts.length == 2) {
String exec = parts[1];
// Update Workspace before cloning
setWorkspace(ws);
// Template workspace to .cloneN (where N is the @ number)
try {
int n = Integer.parseInt(exec);
String charset = ws.getCharset();
boolean pin = ws.isPinHost();
String fullName = ws.getFullName();
String template = fullName + ".clone" + n;
ws = new TemplateWorkspaceImpl(charset, pin, fullName, template);
ws.setExpand(envVars);
} catch (NumberFormatException e) {
// do not template; e.g. 'script' keeps original name
}
}
}
ws.setRootPath(root);
if (ws.isPinHost()) {
String hostname = getHostName(buildWorkspace);
ws.setHostName(hostname);
} else {
ws.setHostName("");
}
return ws;
}
/**
* Remote execute to find hostname.
*
* @param buildWorkspace Jenkins remote path
* @return Hostname
*/
private static String getHostName(FilePath buildWorkspace) {
try {
HostnameTask task = new HostnameTask();
String hostname = buildWorkspace.act(task);
return hostname;
} catch (Exception e) {
return "";
}
}
public String getClient() {
return client;
}
public String getSyncID() {
return syncID;
}
protected Workspace getWorkspace() {
return workspace;
}
protected ClientHelper getConnection() {
ClientHelper p4 = new ClientHelper(credential, listener, client, charset);
return p4;
}
protected boolean checkConnection(ClientHelper p4) {
p4.log("\nP4 Task: establishing connection.");
// test server connection
if (!p4.isConnected()) {
p4.log("P4: Server connection error: " + getCredential().getP4port());
return false;
}
p4.log("... server: " + getCredential().getP4port());
// test node hostname
String host;
try {
host = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
host = "unknown";
}
p4.log("... node: " + host);
return true;
}
protected Object tryTask() throws AbortException {
ClientHelper p4 = getConnection();
if (p4.hasAborted()) {
String msg = "P4: Previous Task Aborted!";
logger.warning(msg);
p4.log(msg);
p4.disconnect();
throw new AbortException(msg);
}
// Check connection (might be on remote slave)
if (!checkConnection(p4)) {
String msg = "\nP4 Task: Unable to connect.";
logger.warning(msg);
p4.log(msg);
throw new AbortException(msg);
}
int trys = 0;
int attempt = p4.getRetry();
Exception last = null;
while (trys <= attempt) {
trys++;
try {
Object result = task(p4);
p4.disconnect();
if (p4.hasAborted()) {
String msg = "P4: Task Aborted!";
logger.warning(msg);
p4.log(msg);
throw new AbortException(msg);
}
return result;
} catch (AbortException e) {
p4.disconnect();
throw e;
} catch (Exception e) {
last = e;
String msg = "P4 Task: attempt: " + trys;
logger.severe(msg);
p4.log(msg);
// back off n^2 seconds, before retry
try {
TimeUnit.SECONDS.sleep(trys ^ 2);
} catch (InterruptedException e2) {
Thread.currentThread().interrupt();
}
}
}
p4.disconnect();
String msg = "P4 Task: failed: " + last;
last.printStackTrace();
logger.warning(msg);
p4.log(msg);
throw new AbortException(msg);
}
} |
package org.lanyonm.grabbag.web.form;
public class IngredientForm {
private String name;
private String description;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String toString() {
return new StringBuilder("web.form.IngredientForm: name=\"").append(this.name)
.append("\", description=\"").append(this.description)
.append("\"").toString();
}
} |
package org.hisp.dhis.notification;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hisp.dhis.common.DeliveryChannel;
import org.hisp.dhis.common.RegexUtils;
import org.hisp.dhis.system.util.DateUtils;
import org.joda.time.DateTime;
import org.joda.time.Days;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Template formats supported:
* A{uid-of-attribute}
* V{name-of-variable}
*
* The implementing superclass defines how these are resolved.
*
* @param <T> the type of the root object used for resolving expression values.
*
* @author Halvdan Hoem Grelland
*/
public abstract class BaseNotificationMessageRenderer<T>
implements NotificationMessageRenderer<T>
{
private static Log log = LogFactory.getLog( BaseNotificationMessageRenderer.class );
protected static final int SMS_CHAR_LIMIT = 160 * 4; // Four concatenated SMS messages
protected static final int EMAIL_CHAR_LIMIT = 10000; // Somewhat arbitrarily chosen limits
protected static final int SUBJECT_CHAR_LIMIT = 100;
protected static final String CONFIDENTIAL_VALUE_REPLACEMENT = "[CONFIDENTIAL]"; // TODO reconsider this...
protected static final String MISSING_VALUE_REPLACEMENT = "[N/A]";
protected static final Pattern VAR_CONTENT_PATTERN = Pattern.compile( "^[A-Za-z0-9_]+$" );
protected static final Pattern ATTR_CONTENT_PATTERN = Pattern.compile( "[A-Za-z][A-Za-z0-9]{10}" );
private static final Pattern VARIABLE_PATTERN = Pattern.compile( "V\\{([a-z_]*)}" ); // Matches the variable in group 1
private static final Pattern ATTRIBUTE_PATTERN = Pattern.compile( "A\\{([A-Za-z][A-Za-z0-9]{10})}" ); // Matches the uid in group 1
private ImmutableMap<ExpressionType, BiFunction<T, Set<String>, Map<String, String>>> EXPRESSION_TO_VALUE_RESOLVERS =
new ImmutableMap.Builder<ExpressionType, BiFunction<T, Set<String>, Map<String, String>>>()
.put( ExpressionType.VARIABLE, (entity, keys) -> resolveVariableValues( keys, entity ) )
.put( ExpressionType.ATTRIBUTE, (entity, keys) -> resolveAttributeValues( keys, entity ) )
.build();
protected enum ExpressionType
{
VARIABLE( VARIABLE_PATTERN, VAR_CONTENT_PATTERN ),
ATTRIBUTE( ATTRIBUTE_PATTERN, ATTR_CONTENT_PATTERN );
private final Pattern expressionPattern;
private final Pattern contentPattern;
ExpressionType( Pattern expressionPattern, Pattern contentPattern )
{
this.expressionPattern = expressionPattern;
this.contentPattern = contentPattern;
}
public Pattern getExpressionPattern()
{
return expressionPattern;
}
boolean isValidExpressionContent( String content )
{
return content != null && contentPattern.matcher( content ).matches();
}
}
// Public methods
public NotificationMessage render( T entity, NotificationTemplate template )
{
final String collatedTemplate = template.getSubjectTemplate() + " " + template.getMessageTemplate();
Map<String, String> expressionToValueMap =
extractExpressionsByType( collatedTemplate ).entrySet().stream()
.map( entry -> resolveValuesFromExpressions( entry.getValue(), entry.getKey(), entity ) )
.collect( HashMap::new, Map::putAll, Map::putAll );
return createNotificationMessage( template, expressionToValueMap );
}
// Overrideable logic
protected boolean isValidExpressionContent( String content, ExpressionType type )
{
return content != null && getSupportedExpressionTypes().contains( type ) && type.isValidExpressionContent( content );
}
// Abstract methods
/**
* Gets a Map of variable resolver functions, keyed by the Template Variable.
* The returned Map should not be mutable.
*/
protected abstract Map<TemplateVariable, Function<T, String>> getVariableResolvers();
/**
* Resolves values for the given attribute UIDs.
*
* @param attributeKeys the Set of attribute UIDs.
* @param entity the entity to resolve the values from/for.
* @return a Map of values, keyed by the corresponding attribute UID.
*/
protected abstract Map<String, String> resolveAttributeValues( Set<String> attributeKeys, T entity );
/**
* Converts a string to the TemplateVariable supported by the implementor.
*/
protected abstract TemplateVariable fromVariableName( String name );
/**
* Returns the set of ExpressionTypes supported by the implementor.
*/
protected abstract Set<ExpressionType> getSupportedExpressionTypes();
// Internal methods
private Map<String, String> resolveValuesFromExpressions( Set<String> expressions, ExpressionType type, T entity )
{
return EXPRESSION_TO_VALUE_RESOLVERS.getOrDefault( type, (e, s) -> Maps.newHashMap() ).apply( entity, expressions );
}
private Map<String, String> resolveVariableValues( Set<String> variables, T entity )
{
return variables.stream()
.collect( Collectors.toMap(
v -> v,
v -> {
Function<T, String> resolver = getVariableResolvers().get( fromVariableName( v ) );
return resolver != null ? resolver.apply( entity ) : "";
}
) );
}
private NotificationMessage createNotificationMessage( NotificationTemplate template, Map<String, String> expressionToValueMap )
{
String subject = replaceExpressions( template.getSubjectTemplate(), expressionToValueMap );
subject = chop( subject, SUBJECT_CHAR_LIMIT );
boolean hasSmsRecipients = template.getDeliveryChannels().contains( DeliveryChannel.SMS );
String message = replaceExpressions( template.getMessageTemplate(), expressionToValueMap );
message = chop( message, hasSmsRecipients ? SMS_CHAR_LIMIT : EMAIL_CHAR_LIMIT );
return new NotificationMessage( subject, message );
}
private static String replaceExpressions( String input, Map<String, String> expressionToValueMap )
{
if ( StringUtils.isEmpty( input ) )
{
return StringUtils.EMPTY;
}
return replaceWithValues( input, expressionToValueMap );
}
private static String replaceWithValues( String input, final Map<String, String> expressionToValueMap )
{
return Stream.of( ExpressionType.values() )
.map( ExpressionType::getExpressionPattern )
.reduce(
input,
( str, pattern ) -> {
StringBuffer sb = new StringBuffer( str.length() );
Matcher matcher = pattern.matcher( str );
while ( matcher.find() )
{
String key = matcher.group( 1 );
String value = expressionToValueMap.getOrDefault( key, MISSING_VALUE_REPLACEMENT );
matcher.appendReplacement( sb, value );
}
return matcher.appendTail( sb ).toString();
},
( oldStr, newStr ) -> newStr
);
}
private Map<ExpressionType, Set<String>> extractExpressionsByType( String template )
{
return Arrays.stream( ExpressionType.values() )
.collect( Collectors.toMap( Function.identity(), type -> extractExpressions( template, type ) ) );
}
private Set<String> extractExpressions( String template, ExpressionType type )
{
Map<Boolean, Set<String>> groupedExpressions = RegexUtils.getMatches( type.getExpressionPattern(), template, 1 )
.stream().collect( Collectors.groupingBy( expr -> isValidExpressionContent( expr, type ), Collectors.toSet() ) );
warnOfUnrecognizedExpressions( groupedExpressions.get( false ), type );
Set<String> expressions = groupedExpressions.get( true );
if ( expressions == null || expressions.isEmpty() )
{
return Collections.emptySet();
}
return expressions;
}
private static void warnOfUnrecognizedExpressions( Set<String> unrecognized, ExpressionType type )
{
if ( unrecognized != null && !unrecognized.isEmpty() )
{
log.warn( String.format( "%d unrecognized expressions of type %s were ignored: %s",
unrecognized.size(), type.name(), Arrays.toString( unrecognized.toArray() ) ) );
}
}
// Supportive methods
protected static String chop( String input, int limit )
{
return input.substring( 0, Math.min( input.length(), limit ) );
}
protected static String daysUntil( Date date )
{
return String.valueOf( Days.daysBetween( DateTime.now(), new DateTime( date ) ).getDays() );
}
protected static String daysSince( Date date )
{
return String.valueOf( Days.daysBetween( new DateTime( date ) , DateTime.now() ).getDays() );
}
protected static String formatDate( Date date )
{
return DateUtils.getMediumDateString( date );
}
} |
package org.mac.sim.mediator;
import org.mac.sim.domain.SimulationConfigurations;
import org.mac.sim.exception.TurnoverException;
import org.mac.sim.simulation.LinearQueueSimulationBuilder;
import org.mac.sim.simulation.ProbabilityQueueSimulationBuilder;
import org.mac.sim.simulation.SimpleQueueSimulationBuilder;
import org.mac.sim.simulation.Simulation;
import org.springframework.stereotype.Component;
@Component
public class SimulationMediatorImpl implements SimulationMediator {
private static final String SIMPLE_TYPE_NAME = "simple";
private static final String PROBABILITY_TYPE_NAME = "probability";
private static final String LINEAR_TYPE_NAME = "linear";
public Simulation runSimulationByType(String type, SimulationConfigurations configurations)
throws TurnoverException {
Simulation sim = null;
if (SIMPLE_TYPE_NAME.equalsIgnoreCase(type)) {
// Simple simulations only rely on a single task and worker
// configuration
SimpleQueueSimulationBuilder qsb = new SimpleQueueSimulationBuilder(configurations);
qsb.addWorkers(configurations.getWorkerConfigurations().get(0).getTotal(),
configurations.getWorkerConfigurations().get(0).getRestTime());
try {
sim = qsb.build();
} catch (TurnoverException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else if (PROBABILITY_TYPE_NAME.equalsIgnoreCase(type)) {
ProbabilityQueueSimulationBuilder pqsb = new ProbabilityQueueSimulationBuilder(configurations);
try {
sim = pqsb.build();
} catch (TurnoverException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else if (LINEAR_TYPE_NAME.equalsIgnoreCase(type)) {
LinearQueueSimulationBuilder lqsb = new LinearQueueSimulationBuilder(configurations);
try {
sim = lqsb.build();
} catch (TurnoverException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return sim;
}
} |
package org.neo4j.graphmatching;
import java.util.regex.Pattern;
public abstract class CommonValueMatchers
{
private static final ValueMatcher HAS = new ValueMatcher()
{
public boolean matches( Object value )
{
return value != null;
}
};
/**
* Checks for equality between a value and {@code valueToMatch}. Returns
* {@code true} if the value isn't null and is equal to
* {@code valueToMatch}, else {@code false}.
*
* @param valueToMatch the expected value.
* @return whether or not a value is equal to {@code valueToMatch}.
*/
public static ValueMatcher exact( Object valueToMatch )
{
return new ExactMatcher( valueToMatch );
}
/**
* Checks for equality between a value and {@code valueToMatch}.
* If the value is an array each item in the array is matched against
* {@code valueToMatch} and if any of those matches it's considered
* a match.
*
* @param valueToMatch the expected value.
* @return whether or not a value is equal to {@code valueToMatch}.
*/
public static ValueMatcher exactAny( Object valueToMatch )
{
return new ExactAnyMatcher( valueToMatch );
}
public static ValueMatcher has()
{
return HAS;
}
public static ValueMatcher regex( Pattern pattern )
{
return new RegexMatcher( pattern );
}
private static class ExactMatcher implements ValueMatcher
{
private final Object valueToMatch;
public ExactMatcher( Object valueToMatch )
{
this.valueToMatch = valueToMatch;
}
public boolean matches( Object value )
{
return value != null && this.valueToMatch.equals( value );
}
}
private static class ExactAnyMatcher implements ValueMatcher
{
private final Object valueToMatch;
public ExactAnyMatcher( Object valueToMatch )
{
this.valueToMatch = valueToMatch;
}
public boolean matches( Object value )
{
if ( value != null )
{
if ( value.getClass().isArray() )
{
for ( Object item : ArrayPropertyUtil.propertyValueToCollection( value ) )
{
if ( item != null && item.equals( valueToMatch ) )
{
return true;
}
}
}
else if ( value.equals( valueToMatch ) )
{
return true;
}
}
return false;
}
}
private static class RegexMatcher implements ValueMatcher
{
private final Pattern pattern;
public RegexMatcher( Pattern pattern )
{
this.pattern = pattern;
}
public boolean matches( Object value )
{
return value != null && pattern.matcher( value.toString() ).matches();
}
}
} |
package org.openforis.ceo.postgres;
import static javax.servlet.http.HttpServletResponse.SC_NO_CONTENT;
import static org.openforis.ceo.utils.DatabaseUtils.connect;
import static org.openforis.ceo.utils.JsonUtils.expandResourcePath;
import static org.openforis.ceo.utils.JsonUtils.parseJson;
import static org.openforis.ceo.utils.PartUtils.partToString;
import static org.openforis.ceo.utils.PartUtils.writeFilePartBase64;
import static org.openforis.ceo.utils.ProjectUtils.padBounds;
import static org.openforis.ceo.utils.ProjectUtils.reprojectBounds;
import static org.openforis.ceo.utils.ProjectUtils.createGriddedPointsInBounds;
import static org.openforis.ceo.utils.ProjectUtils.createGriddedSampleSet;
import static org.openforis.ceo.utils.ProjectUtils.createRandomPointsInBounds;
import static org.openforis.ceo.utils.ProjectUtils.createRandomSampleSet;
import static org.openforis.ceo.utils.ProjectUtils.outputAggregateCsv;
import static org.openforis.ceo.utils.ProjectUtils.outputRawCsv;
import static org.openforis.ceo.utils.ProjectUtils.getOrEmptyString;
import static org.openforis.ceo.utils.ProjectUtils.getOrFalse;
import static org.openforis.ceo.utils.ProjectUtils.getOrZero;
import static org.openforis.ceo.utils.ProjectUtils.getValueDistribution;
import static org.openforis.ceo.utils.ProjectUtils.makeGeoJsonPoint;
import static org.openforis.ceo.utils.ProjectUtils.makeGeoJsonPolygon;
import static org.openforis.ceo.utils.ProjectUtils.getSampleValueTranslations;
import static org.openforis.ceo.utils.ProjectUtils.deleteShapeFileDirectories;
import static org.openforis.ceo.utils.ProjectUtils.runBashScriptForProject;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import java.io.StringWriter;
import java.io.PrintWriter;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.sql.PreparedStatement;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.UUID;
import javax.servlet.http.HttpServletResponse;
import org.openforis.ceo.db_api.Projects;
import spark.Request;
import spark.Response;
public class PostgresProjects implements Projects {
private static JsonObject buildProjectJson(ResultSet rs) {
var newProject = new JsonObject();
try {
newProject.addProperty("id", rs.getInt("project_id"));
newProject.addProperty("institution", rs.getInt("institution_id"));
newProject.addProperty("availability", rs.getString("availability"));
newProject.addProperty("name", rs.getString("name"));
newProject.addProperty("description", rs.getString("description"));
newProject.addProperty("privacyLevel", rs.getString("privacy_level"));
newProject.addProperty("boundary", rs.getString("boundary"));
newProject.addProperty("baseMapSource", rs.getString("base_map_source"));
newProject.addProperty("plotDistribution", rs.getString("plot_distribution"));
newProject.addProperty("numPlots", rs.getInt("num_plots"));
newProject.addProperty("plotSpacing", rs.getDouble("plot_spacing"));
newProject.addProperty("plotShape", rs.getString("plot_shape"));
newProject.addProperty("plotSize", rs.getDouble("plot_size"));
newProject.addProperty("archived", rs.getString("availability").equals("archived"));
newProject.addProperty("sampleDistribution", rs.getString("sample_distribution"));
newProject.addProperty("samplesPerPlot", rs.getInt("samples_per_plot"));
newProject.addProperty("sampleResolution", rs.getDouble("sample_resolution"));
newProject.addProperty("classification_times", "");
newProject.addProperty("editable", rs.getBoolean("editable"));
newProject.addProperty("validBoundary", rs.getBoolean("valid_boundary"));
newProject.add("sampleValues", parseJson(rs.getString("survey_questions")).getAsJsonArray());
newProject.add("surveyRules", parseJson(rs.getString("survey_rules")).getAsJsonArray());
} catch (SQLException e) {
System.out.println(e.getMessage());
}
return newProject;
}
private static String queryProjectGet(PreparedStatement pstmt) {
var allProjects = new JsonArray();
try (var rs = pstmt.executeQuery()) {
while (rs.next()) {
allProjects.add(buildProjectJson(rs));
}
} catch (SQLException e) {
System.out.println(e.getMessage());
return "[]";
}
return allProjects.toString();
}
public String getAllProjects(Request req, Response res) {
var userId = req.queryParams("userId");
var institutionId = req.queryParams("institutionId");
try (var conn = connect()) {
if (userId == null || userId.isEmpty()) {
if (institutionId == null || institutionId.isEmpty()) {
try (var pstmt = conn.prepareStatement("SELECT * FROM select_all_projects()")) {
return queryProjectGet(pstmt);
}
} else {
try (var pstmt = conn.prepareStatement("SELECT * FROM select_all_institution_projects(?)")) {
pstmt.setInt(1, Integer.parseInt(institutionId));
return queryProjectGet(pstmt);
}
}
} else {
if (institutionId == null || institutionId.isEmpty()) {
try (var pstmt = conn.prepareStatement("SELECT * FROM select_all_user_projects(?)")) {
pstmt.setInt(1, Integer.parseInt(userId));
return queryProjectGet(pstmt);
}
} else {
try (var pstmt = conn.prepareStatement("SELECT * FROM select_institution_projects_with_roles(?,?)")) {
pstmt.setInt(1, Integer.parseInt(userId));
pstmt.setInt(2, Integer.parseInt(institutionId));
return queryProjectGet(pstmt);
}
}
}
} catch (SQLException e) {
System.out.println(e.getMessage());
return "[]";
}
}
private static String projectById(Integer projectId) {
try (var conn = connect();
var pstmt = conn.prepareStatement("SELECT * FROM select_project(?)")) {
pstmt.setInt(1, projectId);
try (var rs = pstmt.executeQuery()) {
if (rs.next()) {
return buildProjectJson(rs).toString();
} else {
return "";
}
}
} catch (SQLException e) {
System.out.println(e.getMessage());
return "";
}
}
public String getProjectById(Request req, Response res) {
var projectId = req.params(":id");
return projectById(Integer.parseInt(projectId));
}
public String getProjectStats(Request req, Response res) {
var projectId = req.params(":id");
var stats = new JsonObject();
try (var conn = connect();
var pstmt = conn.prepareStatement("SELECT * FROM select_project_statistics(?)")) {
pstmt.setInt(1,Integer.parseInt(projectId));
try (var rs = pstmt.executeQuery()) {
if (rs.next()) {
stats.addProperty("flaggedPlots",rs.getInt("flagged_plots"));
stats.addProperty("analyzedPlots",rs.getInt("assigned_plots"));
stats.addProperty("unanalyzedPlots",rs.getInt("unassigned_plots"));
stats.addProperty("members",rs.getInt("members"));
stats.addProperty("contributors",rs.getInt("contributors"));
stats.addProperty("createdDate",rs.getString("created_date"));
stats.addProperty("publishedDate",rs.getString("published_date"));
stats.addProperty("closedDate",rs.getString("closed_date"));
stats.addProperty("archivedDate",rs.getString("archived_date"));
stats.add("userStats",parseJson(rs.getString("user_stats")).getAsJsonArray());
}
}
return stats.toString();
} catch (SQLException e) {
System.out.println(e.getMessage());
return "";
}
}
private static String valueOrBlank(String input) {
return input == null || input.equals("null") ? "" : input;
}
private static ArrayList<String> getPlotHeaders(Connection conn, Integer projectId) {
var plotHeaders = new ArrayList<String>();
try (var pstmt = conn.prepareStatement("SELECT * FROM get_plot_headers(?)")) {
pstmt.setInt(1, projectId);
try (var rs = pstmt.executeQuery()) {
while (rs.next()) {
if (!List.of("GID", "GEOM", "PLOT_GEOM", "LAT", "LON").contains(rs.getString("column_names").toUpperCase())) {
plotHeaders.add(rs.getString("column_names"));
}
}
}
} catch (SQLException e) {
System.out.println(e.getMessage());
}
return plotHeaders;
}
private static ArrayList<String> getSampleHeaders(Connection conn, Integer projectId) {
var sampleHeaders = new ArrayList<String>();
try (var pstmt = conn.prepareStatement("SELECT * FROM get_sample_headers(?)")) {
pstmt.setInt(1, projectId);
try (var rs = pstmt.executeQuery()) {
while (rs.next()) {
if (!List.of("GID", "GEOM", "LAT", "LON", "SAMPLE_GEOM").contains(rs.getString("column_names").toUpperCase())) {
sampleHeaders.add(rs.getString("column_names"));
}
}
}
} catch (SQLException e) {
System.out.println(e.getMessage());
}
return sampleHeaders;
}
public HttpServletResponse dumpProjectAggregateData(Request req, Response res) {
var projectId = Integer.parseInt(req.params(":id"));
try (var conn = connect();
var pstmt = conn.prepareStatement("SELECT * FROM select_project(?)")) {
// check if project exists
pstmt.setInt(1,projectId);
try (var rs = pstmt.executeQuery()) {
if (rs.next()) {
var plotSummaries = new JsonArray();
final var sampleValueGroups = parseJson(rs.getString("survey_questions")).getAsJsonArray();
final var projectName = rs.getString("name").replace(" ", "-").replace(",", "").toLowerCase();
var plotHeaders = getPlotHeaders(conn, projectId);
try (var pstmtDump = conn.prepareStatement("SELECT * FROM dump_project_plot_data(?)")) {
pstmtDump.setInt(1, projectId);
try (var rsDump = pstmtDump.executeQuery()) {
while (rsDump.next()) {
var plotSummary = new JsonObject();
plotSummary.addProperty("plot_id", rsDump.getString("plot_id"));
plotSummary.addProperty("center_lon", rsDump.getString("lon"));
plotSummary.addProperty("center_lat", rsDump.getString("lat"));
plotSummary.addProperty("size_m", rsDump.getDouble("plot_size"));
plotSummary.addProperty("shape", rsDump.getString("plot_shape"));
plotSummary.addProperty("flagged", rsDump.getInt("flagged") > 0);
plotSummary.addProperty("analyses", rsDump.getInt("assigned"));
var samples = parseJson(rsDump.getString("samples")).getAsJsonArray();
plotSummary.addProperty("sample_points", samples.size());
plotSummary.addProperty("user_id", valueOrBlank(rsDump.getString("email")));
plotSummary.addProperty("analysis_duration", valueOrBlank(rsDump.getString("analysis_duration")));
plotSummary.addProperty("collection_time", valueOrBlank(rsDump.getString("collection_time")));
plotSummary.add("distribution",
getValueDistribution(samples, getSampleValueTranslations(sampleValueGroups)));
if (valueOrBlank(rsDump.getString("ext_plot_data")) != "") {
var ext_plot_data = parseJson(rsDump.getString("ext_plot_data")).getAsJsonObject();
plotHeaders.forEach(head ->
plotSummary.addProperty("pl_" + head, getOrEmptyString(ext_plot_data, head).getAsString())
);
}
plotSummaries.add(plotSummary);
}
}
var combinedHeaders = plotHeaders.stream()
.map(head -> !head.toString().contains("pl_") ? "pl_" + head : head)
.toArray(String[]::new);
return outputAggregateCsv(res, sampleValueGroups, plotSummaries, projectName, combinedHeaders);
}
}
}
} catch (SQLException e) {
System.out.println(e.getMessage());
return res.raw();
}
res.raw().setStatus(SC_NO_CONTENT);
return res.raw();
}
public HttpServletResponse dumpProjectRawData(Request req, Response res) {
var projectId =Integer.parseInt( req.params(":id"));
try (var conn = connect();
var pstmt = conn.prepareStatement("SELECT * FROM select_project(?)")) {
// check if project exists
pstmt.setInt(1,projectId);
try (var rs = pstmt.executeQuery()) {
if (rs.next()) {
var sampleSummaries = new JsonArray();
final var sampleValueGroups = parseJson(rs.getString("survey_questions")).getAsJsonArray();
final var projectName = rs.getString("name").replace(" ", "-").replace(",", "").toLowerCase();
final var plotHeaders = getPlotHeaders(conn, projectId);
final var sampleHeaders = getSampleHeaders(conn, projectId);
var optionalHeaders = new ArrayList<String>();
try (var pstmtDump = conn.prepareStatement("SELECT * FROM dump_project_sample_data(?)")) {
pstmtDump.setInt(1, projectId);
try (var rsDump = pstmtDump.executeQuery()) {
while (rsDump.next()) {
var plotSummary = new JsonObject();
plotSummary.addProperty("plot_id", rsDump.getString("plot_id"));
plotSummary.addProperty("sample_id", rsDump.getString("sample_id"));
plotSummary.addProperty("lon", rsDump.getString("lon"));
plotSummary.addProperty("lat", rsDump.getString("lat"));
plotSummary.addProperty("flagged", rsDump.getInt("flagged") > 0);
plotSummary.addProperty("analyses", rsDump.getInt("assigned"));
plotSummary.addProperty("user_id", valueOrBlank(rsDump.getString("email")));
if (rsDump.getString("value") != null && parseJson(rsDump.getString("value")).isJsonPrimitive()) {
plotSummary.addProperty("value", rsDump.getString("value"));
} else {
plotSummary.add("value", rsDump.getString("value") == null ? null : parseJson(rsDump.getString("value")).getAsJsonObject());
}
if (!valueOrBlank(rsDump.getString("collection_time")).equals("")) {
plotSummary.addProperty("collection_time", valueOrBlank(rsDump.getString("collection_time")));
if (!optionalHeaders.contains("collection_time")) optionalHeaders.add("collection_time");
}
if (!valueOrBlank(rsDump.getString("analysis_duration")).equals("")) {
plotSummary.addProperty("analysis_duration", valueOrBlank(rsDump.getString("analysis_duration")));
if (!optionalHeaders.contains("analysis_duration")) optionalHeaders.add("analysis_duration");
}
if (valueOrBlank(rsDump.getString("imagery_title")) != "") {
plotSummary.addProperty("imagery_title", rsDump.getString("imagery_title"));
if (!optionalHeaders.contains("imagery_title")) optionalHeaders.add("imagery_title");
if (valueOrBlank(rsDump.getString("imagery_attributes")).length() > 2) {
var attributes = parseJson(rsDump.getString("imagery_attributes")).getAsJsonObject();
attributes.keySet().forEach(key -> {
plotSummary.addProperty(key, attributes.get(key).getAsString());
if (!optionalHeaders.contains(key)) optionalHeaders.add(key);
});
}
}
if (valueOrBlank(rsDump.getString("ext_plot_data")) != "") {
var ext_plot_data = parseJson(rsDump.getString("ext_plot_data")).getAsJsonObject();
plotHeaders.forEach(head ->
plotSummary.addProperty("pl_" + head, getOrEmptyString(ext_plot_data, head).getAsString())
);
}
if (valueOrBlank(rsDump.getString("ext_sample_data")) != "") {
var ext_sample_data = parseJson(rsDump.getString("ext_sample_data")).getAsJsonObject();
sampleHeaders.forEach(head ->
plotSummary.addProperty("smpl_" + head, getOrEmptyString(ext_sample_data, head).getAsString())
);
}
sampleSummaries.add(plotSummary);
}
}
var combinedHeaders =
Stream.concat(
optionalHeaders.stream(),
Stream.concat(
plotHeaders.stream()
.map(head -> !head.toString().contains("pl_") ? "pl_" + head : head),
sampleHeaders.stream()
.map(head -> !head.toString().contains("smpl_") ? "smpl_" + head : head)
)
).toArray(String[]::new);
return outputRawCsv(res, sampleValueGroups, sampleSummaries, projectName, combinedHeaders);
}
}
}
} catch (SQLException e) {
System.out.println(e.getMessage());
return res.raw();
}
res.raw().setStatus(SC_NO_CONTENT);
return res.raw();
}
public String publishProject(Request req, Response res) {
var projectId = req.params(":id");
try (var conn = connect();
var pstmt = conn.prepareStatement("SELECT * FROM publish_project(?)")) {
pstmt.setInt(1,Integer.parseInt(projectId));
try (var rs = pstmt.executeQuery()) {
var idReturn = 0;
if (rs.next()) {
idReturn = rs.getInt("publish_project");
}
return projectById(idReturn);
}
} catch (SQLException e) {
System.out.println(e.getMessage());
return "";
}
}
public String closeProject(Request req, Response res) {
var projectId = req.params(":id");
try (var conn = connect();
var pstmt = conn.prepareStatement("SELECT * FROM close_project(?)")) {
pstmt.setInt(1,Integer.parseInt(projectId));
try (var rs = pstmt.executeQuery()) {
var idReturn = 0;
if (rs.next()) {
idReturn = rs.getInt("close_project");
}
return projectById(idReturn);
}
} catch (SQLException e) {
System.out.println(e.getMessage());
return "";
}
}
public String archiveProject(Request req, Response res) {
var projectId = req.params(":id");
try (var conn = connect();
var pstmt = conn.prepareStatement("SELECT * FROM archive_project(?)") ;) {
pstmt.setInt(1,Integer.parseInt(projectId));
try (var rs = pstmt.executeQuery()) {
var idReturn = 0;
if (rs.next()) {
idReturn = rs.getInt("archive_project");
}
return projectById(idReturn);
}
} catch (SQLException e) {
System.out.println(e.getMessage());
return "";
}
}
public String updateProject(Request req, Response res) {
try (var conn = connect();
var pstmt = conn.prepareStatement("SELECT * FROM update_project(?,?,?,?,?)")) {
final var jsonInputs = parseJson(req.body()).getAsJsonObject();
pstmt.setInt(1, Integer.parseInt(req.params(":id")));
pstmt.setString(2, getOrEmptyString(jsonInputs, "name").getAsString());
pstmt.setString(3, getOrEmptyString(jsonInputs, "description").getAsString());
pstmt.setString(4, getOrEmptyString(jsonInputs, "privacyLevel").getAsString());
pstmt.setString(5, getOrEmptyString(jsonInputs, "baseMapSource").getAsString());
pstmt.execute();
return "";
} catch (SQLException e) {
System.out.println(e.getMessage());
return "";
}
}
private static String loadCsvHeaders(String filename, List<String> mustInclude) {
try (var lines = Files.lines(Paths.get(expandResourcePath("/csv/" + filename)))) {
final var colList = Arrays.stream(
lines.findFirst().orElse("").split(","))
.map(col -> {
return col.toUpperCase();
})
.collect(Collectors.toList());
// check if all required fields are in csv
mustInclude.forEach(field -> {
if (!colList.contains(field.toUpperCase())) {
throw new RuntimeException("Malformed plot CSV. Fields must be " + String.join(",", mustInclude));
}
});
final var fields = colList.stream()
.map(col -> {
if (List.of("LON", "LAT", "LATITUDE", "LONGITUDE", "LONG", "CENTER_X", "CENTER_Y").contains(col.toUpperCase())) {
return col.toUpperCase() + " float";
} else if (List.of("PLOTID", "SAMPLEID").contains(col.toUpperCase())) {
return col.toUpperCase() + " integer";
}
return col.toUpperCase() + " text";
})
.collect(Collectors.joining(","));
return fields;
} catch (Exception e) {
throw new RuntimeException("Error reading csv file", e);
}
}
private static String[] loadCsvHeadersToRename(String filename) {
try (var lines = Files.lines(Paths.get(expandResourcePath("/csv/" + filename)))) {
final var colList = lines.findFirst().orElse("").split(",");
return new String[] {colList[0],colList[1]};
} catch (Exception e) {
throw new RuntimeException("Invalid headers");
}
}
private static void deleteFile(String csvFile) {
var csvFilePath = Paths.get(expandResourcePath("/csv"), csvFile);
try {
if (csvFilePath.toFile().exists()) {
// System.out.println("Deleting file: " + csvFilePath);
Files.delete(csvFilePath);
} else {
// System.out.println("No file found: " + csvFilePath);
}
} catch (Exception e) {
System.out.println("Error deleting directory at: " + csvFilePath);
}
}
private static void deleteFiles(int projectId) {
deleteFile("project-" + projectId + "-plots.csv");
deleteFile("project-" + projectId + "-samples.csv");
deleteFile("project-" + projectId + "-plots.zip");
deleteFile("project-" + projectId + "-samples.zip");
}
private static String loadExternalData(Connection conn, String distribution, Integer projectId, String extFile, String plotsOrSamples, List<String> mustInclude) {
try {
System.out.println(distribution);
if (distribution.equals("csv")) {
final var table_name = "project_" + projectId + "_" + plotsOrSamples + "_csv";
// add empty table to the database
System.out.println("create");
try (var pstmt = conn.prepareStatement("SELECT * FROM create_new_table(?,?)")) {
pstmt.setString(1, table_name);
pstmt.setString(2, loadCsvHeaders(extFile, mustInclude));
pstmt.execute();
}
// import csv file
runBashScriptForProject(projectId, plotsOrSamples, "csv2postgres.sh", "/csv");
var renameFrom = loadCsvHeadersToRename(extFile);
// rename columns
System.out.println("rename");
try (var pstmt = conn.prepareStatement("SELECT * FROM rename_col(?,?,?)")) {
pstmt.setString(1,table_name);
pstmt.setString(2,renameFrom[0]);
pstmt.setString(3,"lon");
pstmt.execute();
}
System.out.println("rename2");
try (var pstmt = conn.prepareStatement("SELECT * FROM rename_col(?,?,?)")) {
pstmt.setString(1,table_name);
pstmt.setString(2,renameFrom[1]);
pstmt.setString(3,"lat");
pstmt.execute();
}
// add index for reference
System.out.println("index");
try (var pstmt = conn.prepareStatement("SELECT * FROM add_index_col(?)")) {
pstmt.setString(1,table_name);
pstmt.execute();
}
return table_name;
} else if (distribution.equals("shp")) {
runBashScriptForProject(projectId, plotsOrSamples, "shp2postgres.sh", "/shp");
return "project_" + projectId + "_" + plotsOrSamples + "_shp";
} else {
return "";
}
} catch (SQLException s) {
System.out.println(s.getMessage());
throw new RuntimeException("Error importing file into SQL");
}
}
private static String checkLoadPlots(Connection conn, String plotDistribution, Integer projectId, String plotsFile) {
try {
// check if data is also correct after being loaded
final var plots_table = loadExternalData(conn, plotDistribution, projectId, plotsFile, "plots", List.of("plotId"));
try (var pstmt = conn.prepareStatement("SELECT * FROM select_partial_table_by_name(?)")) {
pstmt.setString(1, plots_table);
pstmt.execute();
} catch (SQLException s) {
System.out.println(s.getMessage());
throw new RuntimeException("Incorrect sql columns or datatypes");
}
return plots_table;
} catch (Exception e) {
if (plotDistribution.equals("csv")) {
throw new RuntimeException("Malformed plot CSV. Fields must be LON,LAT,PLOTID.", e);
} else {
throw new RuntimeException("Malformed plot Shapefile. All features must be of type polygon and include a PLOTID field.", e);
}
}
}
private static String checkLoadSamples(Connection conn, String sampleDistribution, Integer projectId, String samplesFile) {
try {
final var samples_table = loadExternalData(conn, sampleDistribution, projectId, samplesFile, "samples", List.of("plotId", "sampleId"));
if (List.of("csv", "shp").contains(sampleDistribution)) {
// check if data is also correct after being loaded
try (var pstmt = conn.prepareStatement("SELECT * FROM select_partial_table_by_name(?)")) {
pstmt.setString(1,samples_table);
pstmt.execute();
} catch (SQLException s) {
throw new RuntimeException("Missing sql columns");
}
return samples_table;
} else {
return null;
}
} catch (Exception e) {
if (sampleDistribution.equals("csv")) {
throw new RuntimeException("Malformed sample CSV. Fields must be LON,LAT,PLOTID,SAMPLEID.", e);
} else {
throw new RuntimeException("Malformed sample Shapefile. All features must be of type polygon and include PLOTID and SAMPLEID fields.", e);
}
}
}
private static void createProjectPlots(JsonObject newProject) {
// Store the parameters needed for plot generation in local variables with nulls set to 0
var projectId = newProject.get("id").getAsInt();
var lonMin = newProject.get("lonMin").getAsDouble();
var latMin = newProject.get("latMin").getAsDouble();
var lonMax = newProject.get("lonMax").getAsDouble();
var latMax = newProject.get("latMax").getAsDouble();
var plotDistribution = newProject.get("plotDistribution").getAsString();
var numPlots = newProject.get("numPlots").getAsInt();
var plotSpacing = newProject.get("plotSpacing").getAsDouble();
var plotShape = newProject.get("plotShape").getAsString();
var plotSize = newProject.get("plotSize").getAsDouble();
var sampleDistribution = newProject.get("sampleDistribution").getAsString();
var samplesPerPlot = newProject.get("samplesPerPlot").getAsInt();
var sampleResolution = newProject.get("sampleResolution").getAsDouble();
var plotsFile = getOrEmptyString(newProject, "plotsFile").getAsString();
var samplesFile = getOrEmptyString(newProject, "samplesFile").getAsString();
try (var conn = connect()) {
if (List.of("csv", "shp").contains(plotDistribution)) {
// load files into the database (loadPlot, loadSamples) and update projects
try (var pstmt =
conn.prepareStatement("SELECT * FROM update_project_tables(?,?::text,?::text)")) {
pstmt.setInt(1, projectId);
pstmt.setString(2, checkLoadPlots(conn, plotDistribution, projectId, plotsFile));
pstmt.setString(3, checkLoadSamples(conn, sampleDistribution, projectId, samplesFile));
pstmt.execute();
} catch (SQLException e) {
System.out.println("catch update");
throw new RuntimeException(e);
}
try (var pstmt =
conn.prepareStatement("SELECT * FROM cleanup_project_tables(?,?)")) {
pstmt.setInt(1, projectId);
pstmt.setDouble(2, plotSize);
pstmt.execute();
} catch (SQLException e) {
System.out.println("catch clean");
throw new RuntimeException(e);
}
// if both are files, adding plots and samples is done inside PG
if (List.of("csv", "shp").contains(sampleDistribution)) {
try (var pstmt =
conn.prepareStatement("SELECT * FROM samples_from_plots_with_files(?)")) {
pstmt.setInt(1, projectId);
pstmt.execute();
} catch (SQLException e) {
System.out.println("catch adding 2");
throw new RuntimeException(e);
}
// Add plots from file and use returned plot ID to create samples
} else {
try (var pstmt =
conn.prepareStatement("SELECT * FROM add_file_plots(?)")) {
pstmt.setInt(1,projectId);
try (var rs = pstmt.executeQuery()) {
while (rs.next()) {
var plotCenter = new Double[] {rs.getDouble("lon"), rs.getDouble("lat")};
createProjectSamples(conn, rs.getInt("plot_uid"), sampleDistribution,
plotCenter, plotShape, plotSize, samplesPerPlot, sampleResolution, plotDistribution.equals("shp"));
}
}
} catch (SQLException e) {
System.out.println("catch adding 1");
throw new RuntimeException(e);
}
}
} else {
// Convert the lat/lon boundary coordinates to Web Mercator (units: meters) and apply an interior buffer of plotSize / 2
final var bounds = reprojectBounds(lonMin, latMin, lonMax, latMax, 4326, 3857);
final var paddedBounds = padBounds(bounds[0], bounds[1], bounds[2], bounds[3], plotSize / 2.0);
final var left = paddedBounds[0];
final var bottom = paddedBounds[1];
final var right = paddedBounds[2];
final var top = paddedBounds[3];
// Generate the plot objects and their associated sample points
final var newPlotCenters =
plotDistribution.equals("random")
? createRandomPointsInBounds(left, bottom, right, top, numPlots)
: createGriddedPointsInBounds(left, bottom, right, top, plotSpacing);
Arrays.stream(newPlotCenters)
.forEach(plotCenter -> {
final var SqlPlots = "SELECT * FROM create_project_plot(?,ST_SetSRID(ST_GeomFromGeoJSON(?), 4326))";
try (var pstmtPlots = conn.prepareStatement(SqlPlots)) {
pstmtPlots.setInt(1, projectId);
pstmtPlots.setString(2, makeGeoJsonPoint(plotCenter[0], plotCenter[1]).toString());
try (var rsPlots = pstmtPlots.executeQuery()) {
if (rsPlots.next()) {
final var newPlotId = rsPlots.getInt("create_project_plot");
createProjectSamples(conn, newPlotId, sampleDistribution,
plotCenter, plotShape, plotSize, samplesPerPlot, sampleResolution, false);
}
}
} catch (SQLException e) {
System.out.println(e.getMessage());
}
});
}
//Check if all plots have a sample
try (var pstmt = conn.prepareStatement("SELECT * FROM plots_missing_samples(?)")) {
pstmt.setInt(1, projectId);
try (var rs = pstmt.executeQuery()) {
if (rs.next() && rs.getInt("plots_missing_samples") > 0) {
throw new RuntimeException("The uploaded plot and sample files do not have correctly overlapping data. "
+ rs.getInt("plots_missing_samples")
+ " plots have no samples.");
}
}
}
// Update numPlots and samplesPerPlot to match the numbers that were generated
try (var pstmt = conn.prepareStatement("SELECT * FROM update_project_counts(?)")) {
pstmt.setInt(1, projectId);
pstmt.execute();
}
} catch (SQLException e) {
System.out.println(e.getMessage());
}
}
private static void createProjectSamples(Connection conn, Integer newPlotId, String sampleDistribution, Double[] plotCenter, String plotShape, Double plotSize, Integer samplesPerPlot, Double sampleResolution, Boolean isShp) {
var newSamplePoints =
isShp || !List.of("random", "gridded").contains(sampleDistribution)
? new Double[][]{plotCenter}
: sampleDistribution.equals("random")
? createRandomSampleSet(plotCenter, plotShape, plotSize, samplesPerPlot)
: createGriddedSampleSet(plotCenter, plotShape, plotSize, sampleResolution);
Arrays.stream(newSamplePoints)
.forEach(sampleEntry -> {
var SqlSamples = "SELECT * FROM create_project_plot_sample(?,ST_SetSRID(ST_GeomFromGeoJSON(?), 4326))";
var sampleCenter = sampleEntry;
try (var pstmtSamples = conn.prepareStatement(SqlSamples)) {
pstmtSamples.setInt(1, newPlotId);
pstmtSamples.setString(2,makeGeoJsonPoint(sampleCenter[0], sampleCenter[1]).toString());
pstmtSamples.execute();
} catch (SQLException e) {
System.out.println(e.getMessage());
}
});
}
public String createProject(Request req, Response res) {
var newProjectId = 0;
try {
final var jsonInputs = parseJson(req.body()).getAsJsonObject();
var newProject = new JsonObject();
newProject.addProperty("baseMapSource", getOrEmptyString(jsonInputs, "baseMapSource").getAsString());
newProject.addProperty("description", getOrEmptyString(jsonInputs, "description").getAsString());
newProject.addProperty("institution", getOrZero(jsonInputs, "institution").getAsInt());
newProject.addProperty("lonMin", getOrZero(jsonInputs, "lonMin").getAsDouble());
newProject.addProperty("latMin", getOrZero(jsonInputs, "latMin").getAsDouble());
newProject.addProperty("lonMax", getOrZero(jsonInputs, "lonMax").getAsDouble());
newProject.addProperty("latMax", getOrZero(jsonInputs, "latMax").getAsDouble());
newProject.addProperty("name", getOrEmptyString(jsonInputs, "name").getAsString());
newProject.addProperty("numPlots", getOrZero(jsonInputs, "numPlots").getAsInt());
newProject.addProperty("plotDistribution", getOrEmptyString(jsonInputs, "plotDistribution").getAsString());
newProject.addProperty("plotShape", getOrEmptyString(jsonInputs, "plotShape").getAsString());
newProject.addProperty("plotSize", getOrZero(jsonInputs, "plotSize").getAsDouble());
newProject.addProperty("plotSpacing", getOrZero(jsonInputs, "plotSpacing").getAsDouble());
newProject.addProperty("privacyLevel", getOrEmptyString(jsonInputs, "privacyLevel").getAsString());
newProject.addProperty("projectTemplate", getOrZero(jsonInputs, "projectTemplate").getAsInt());
newProject.addProperty("sampleDistribution", getOrEmptyString(jsonInputs, "sampleDistribution").getAsString());
newProject.addProperty("samplesPerPlot", getOrZero(jsonInputs, "samplesPerPlot").getAsInt());
newProject.addProperty("sampleResolution", getOrZero(jsonInputs, "sampleResolution").getAsDouble());
newProject.add("sampleValues", jsonInputs.get("sampleValues").getAsJsonArray());
newProject.add("surveyRules", jsonInputs.get("surveyRules").getAsJsonArray());
newProject.addProperty("useTemplatePlots", getOrFalse(jsonInputs, "useTemplatePlots").getAsBoolean());
newProject.addProperty("useTemplateWidgets", getOrFalse(jsonInputs, "useTemplateWidgets").getAsBoolean());
// file part properties
newProject.addProperty("plotFileName", getOrEmptyString(jsonInputs, "plotFileName").getAsString());
newProject.addProperty("plotFileBase64", getOrEmptyString(jsonInputs, "plotFileBase64").getAsString());
newProject.addProperty("sampleFileName", getOrEmptyString(jsonInputs, "sampleFileName").getAsString());
newProject.addProperty("sampleFileBase64", getOrEmptyString(jsonInputs, "sampleFileBase64").getAsString());
// Add constant values
newProject.addProperty("availability", "unpublished");
newProject.addProperty("createdDate", LocalDate.now().toString());
final var lonMin = getOrZero(newProject, "lonMin").getAsDouble();
final var latMin = getOrZero(newProject, "latMin").getAsDouble();
final var lonMax = getOrZero(newProject, "lonMax").getAsDouble();
final var latMax = getOrZero(newProject, "latMax").getAsDouble();
newProject.addProperty("boundary", makeGeoJsonPolygon(lonMin, latMin, lonMax, latMax).toString());
var SQL = "SELECT * FROM create_project(?,?,?,?,?,ST_SetSRID(ST_GeomFromGeoJSON(?), 4326),?,?,?,?,?,?,?,?,?,?::JSONB,?::JSONB,?::date,?::JSONB)";
try (var conn = connect();
var pstmt = conn.prepareStatement(SQL)) {
pstmt.setInt(1, newProject.get("institution").getAsInt());
pstmt.setString(2, newProject.get("availability").getAsString());
pstmt.setString(3, newProject.get("name").getAsString());
pstmt.setString(4, newProject.get("description").getAsString());
pstmt.setString(5, newProject.get("privacyLevel").getAsString());
pstmt.setString(6, newProject.get("boundary").getAsString());
pstmt.setString(7, newProject.get("baseMapSource").getAsString());
pstmt.setString(8, newProject.get("plotDistribution").getAsString());
pstmt.setInt(9, newProject.get("numPlots").getAsInt());
pstmt.setDouble(10, newProject.get("plotSpacing").getAsDouble());
pstmt.setString(11, newProject.get("plotShape").getAsString());
pstmt.setDouble(12, newProject.get("plotSize").getAsDouble());
pstmt.setString(13, newProject.get("sampleDistribution").getAsString());
pstmt.setInt(14, newProject.get("samplesPerPlot").getAsInt());
pstmt.setDouble(15, newProject.get("sampleResolution").getAsDouble());
pstmt.setString(16, newProject.get("sampleValues").getAsJsonArray().toString());
pstmt.setString(17, newProject.get("surveyRules").getAsJsonArray().toString());
pstmt.setString(18, newProject.get("createdDate").getAsString());
pstmt.setString(19, null); //classification times
try (var rs = pstmt.executeQuery()) {
if (rs.next()) {
newProjectId = rs.getInt("create_project");
newProject.addProperty("id", newProjectId);
if (newProject.get("projectTemplate").getAsInt() > 0
&& newProject.get("useTemplateWidgets").getAsBoolean()) {
// Copy existing widgets
try (var pstmt1 = conn.prepareStatement("SELECT * FROM get_project_widgets_by_project_id(?)")) {
pstmt1.setInt(1, newProject.get("projectTemplate").getAsInt());
try (var rs1 = pstmt1.executeQuery()) {
var newUUID = UUID.randomUUID();
while (rs1.next()) {
try (var preparedStatement = conn.prepareStatement("SELECT * FROM add_project_widget(?, ?, ?::JSONB)")) {
preparedStatement.setInt(1, newProjectId);
preparedStatement.setObject(2, newUUID);
preparedStatement.setString(3, rs1.getString("widget"));
preparedStatement.execute();
}
}
}
}
}
if (newProject.get("projectTemplate").getAsInt() > 0
&& newProject.get("useTemplatePlots").getAsBoolean()) {
// Copy existing plots
try (var copyPstmt = conn.prepareStatement("SELECT * FROM copy_template_plots(?,?)")) {
copyPstmt.setInt(1, newProject.get("projectTemplate").getAsInt());
copyPstmt.setInt(2, newProjectId);
copyPstmt.execute();
}
} else {
if (List.of("csv", "shp").contains(newProject.get("plotDistribution").getAsString())) {
newProject.addProperty("plotsFile", writeFilePartBase64(
newProject.get("plotFileName").getAsString(),
newProject.get("plotFileBase64").getAsString(),
expandResourcePath("/" + newProject.get("plotDistribution").getAsString()),
"project-" + newProjectId + "-plots"
));
}
if (List.of("csv", "shp").contains(newProject.get("sampleDistribution").getAsString())) {
newProject.addProperty("samplesFile", writeFilePartBase64(
newProject.get("sampleFileName").getAsString(),
newProject.get("sampleFileBase64").getAsString(),
expandResourcePath("/" + newProject.get("sampleDistribution").getAsString()),
"project-" + newProjectId + "-samples"
));
}
createProjectPlots(newProject);
deleteFiles(newProjectId);
deleteShapeFileDirectories(newProjectId);
}
}
// Indicate that the project was created successfully
return Integer.toString(newProjectId);
}
} catch (SQLException e) {
System.out.println(e.getMessage());
// Indicate that an error occurred with project creation
throw new RuntimeException(e);
}
}
catch (Exception e) {
// Indicate that an error occurred with project creation
deleteFiles(newProjectId);
deleteShapeFileDirectories(newProjectId);
try (var conn = connect()) {
try (var pstmt = conn.prepareStatement("SELECT delete_project(?)")) {
pstmt.setInt(1, newProjectId);
pstmt.execute();
}
} catch (SQLException sql) {
}
StringWriter outError = new StringWriter();
e.printStackTrace(new PrintWriter(outError));
System.out.println(outError.toString());
return e.getMessage();
}
}
} |
import htsjdk.samtools.SAMRecord;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Set;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Queue;
import java.util.LinkedList;
import java.util.Collection;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import htsjdk.samtools.SAMRecord;
import htsjdk.samtools.Cigar;
import htsjdk.samtools.CigarElement;
import htsjdk.samtools.CigarOperator;
import org.jgrapht.*;
import org.jgrapht.graph.*;
public class HLAGraph{
public static final String ANSI_RESET = "\u001B[0m";
public static final String ANSI_RED = "\u001B[31m";
//A B C ... //HLA-DPA1, HLA-DPB1, HLA-DQA1, HLA-DQB1, HLA-DRA, and HLA-DRB1
private String HLAGeneName;
private ArrayList<Sequence> alleles;
private HashMap<String, Sequence> alleleHash;
//private SimpleDirectedWeightedGraph<Node, DefaultWeightedEdge> g;
private SimpleDirectedWeightedGraph<Node, CustomWeightedEdge> g;
private ArrayList<StringBuffer> interBubbleSequences;
//private ArrayList<Path> interBubblePaths;
private ArrayList<TmpPath> interBubblePaths2;
//private ArrayList<HashMap<Character, Node>> nodeHashList;//list index = columnIndex-1.
private ArrayList<HashMap<Integer, Node>> nodeHashList;// list index = columnIndex - 1;
private ArrayList<HLASequence> typingSequences;
private Node sNode;
private Node tNode;
private int columnLen;
private String outputfilename;
private StringBuffer resultBuffer;
//keeps track excess lengths added to head and tail of typing regions(exon) due to bubbles in the beginning and end
private int[] headerExcessLengthBeyondTypingBoundary;
private int[] tailExcessLengthBeyondTypingBoundary;
/* Outer list index = columnIndex -1 --> insertion point */
/* Inner list index insertion length */
//private ArrayList<ArrayList<HashMap<Character, Node>>> insertionNodeHashList;
private ArrayList<ArrayList<HashMap<Integer, Node>>> insertionNodeHashList;
public void setTypingSequences(ArrayList<HLASequence> seqs){
this.typingSequences = seqs;
this.writeTypingSequences();
}
private void writeTypingSequences(){
BufferedWriter bw = null;
try{
bw = new BufferedWriter(new FileWriter(HLAGeneName+"_typingSequences_G_group.fa"));
for(HLASequence hs : this.typingSequences)
bw.write(hs.toString());
bw.close();
}catch(IOException ioe){
ioe.printStackTrace();
}
}
public SimpleDirectedWeightedGraph<Node, CustomWeightedEdge> getGraph(){
return this.g;
}
/* DO NOT use this to add sNode and tNode */
public void addVertex(Node n){
int ci = n.getColIndex();
this.g.addVertex(n);
this.nodeHashList.get(n.getColIndex()-1).put(new Integer(n.getIBase()), n);
}
/* DO NOT use this to add sNode and tNode */
public void removeVertex(Node n){
//this.nodeHashList.get(n.getColIndex()-1).remove(new Integer(n.getIBase()));
this.removeVertexFromNodeHashList(n);
this.g.removeVertex(n);
}
//removes node from nodeHashList. We dont touch insertionNodeHashList
//because any node added on insertionNodeHashList must have weights.
private void removeVertexFromNodeHashList(Node n){
this.nodeHashList.get(n.getColIndex()-1).remove(new Integer(n.getIBase()));
}
public void setHLAGeneName(String gn){
this.HLAGeneName = gn;
}
public Sequence getRefAllele(){
return this.alleles.get(0);
}
public HLAGraph(ArrayList<Sequence> seqs){
//int numTypingExons = 1;
//if(this.isClassI())
// numTypingExons = 2;
this.headerExcessLengthBeyondTypingBoundary = new int[2];
this.tailExcessLengthBeyondTypingBoundary = new int[2];//numTypingExons];
this.alleles = seqs;
this.alleleHash = new HashMap<String, Sequence>();
for(int i=0;i<this.alleles.size();i++){
this.alleleHash.put(this.alleles.get(i).getAlleleName(), this.alleles.get(i));
}
//this.g = new SimpleDirectedWeightedGraph<Node, DefaultWeightedEdge>(DefaultWeightedEdge.class);
this.g = new SimpleDirectedWeightedGraph<Node, CustomWeightedEdge>(CustomWeightedEdge.class);
this.sNode = new Node('s', 0);
this.tNode = new Node('t', this.alleles.get(0).getColLength() + 1);
this.g.addVertex(sNode);
this.g.addVertex(tNode);
//this.nodeHashList = new ArrayList<HashMap<Character, Node>>();
this.nodeHashList = new ArrayList<HashMap<Integer, Node>>();
//this.insertionNodeHashList = new ArrayList<ArrayList<HashMap<Character, Node>>>();
this.insertionNodeHashList = new ArrayList<ArrayList<HashMap<Integer, Node>>>();
this.buildGraph();
this.traverse();
}
/*
* finds all s-t paths in this graph based on BFS technique.
* Should only be used for each bubble.
*
*/
public ArrayList<Path> findAllSTPath(Node s, Node t){
ArrayList<Path> results = new ArrayList<Path>();
Queue<Path> pathsQ = new LinkedList<Path>();
//Set<CustomeWeightedEdge> edges = this.g.outgoingEdgesOf(s);
Iterator<CustomWeightedEdge> itr = this.g.outgoingEdgesOf(s).iterator();
//first load all outing edges as paths in paths queue.
while(itr.hasNext()){
pathsQ.add(new Path(itr.next()));
}
Path firstPath = null;
//while we have paths to explore further in the queue
while((firstPath = pathsQ.poll())!=null){
//obtain the vertex at the end for this path
Node lastVertex = firstPath.getLastVertex(this.g);
//if the last vertex is t, then we add this path in the result
if(lastVertex.equals(t)){
results.add(firstPath);
}else{//otherwise, we need to explor the paths further
itr = this.g.outgoingEdgesOf(lastVertex).iterator();
while(itr.hasNext()){
Path tmpP = firstPath.deepCopy();
tmpP.appendEdge(itr.next());
pathsQ.add(tmpP);
}
}
}
return results;
}
public ArrayList<Path> findAllSTPathPruning(Node s, Node t){
ArrayList<Path> results = new ArrayList<Path>();
Queue<Path> pathsQ = new LinkedList<Path>();
Queue<CustomHashMap> readsetQ = new LinkedList<CustomHashMap>(); //we need to keep track of readset to prune branches based on the size
Iterator<CustomWeightedEdge> itr = this.g.outgoingEdgesOf(s).iterator();
//first load all outing edges as paths in paths queue.
while(itr.hasNext()){
//Path curP = new Path(itr.next());
CustomWeightedEdge curE = itr.next();
pathsQ.add(new Path(curE));
readsetQ.add(curE.getReadHashSet().clone());
}
Path firstPath = null;
CustomHashMap firstReadSet = null;
//while we have paths to explore further in the queue
while((firstPath = pathsQ.poll())!=null){
firstReadSet = readsetQ.poll();
//obtain the vertex at the end for this path
Node lastVertex = firstPath.getLastVertex(this.g);
//if the last vertex is t, then we add this path in the result
if(lastVertex.equals(t)){
results.add(firstPath);
}else{//otherwise, we need to explor the paths further
itr = this.g.outgoingEdgesOf(lastVertex).iterator();
while(itr.hasNext()){
Path tmpP = firstPath.deepCopy();
CustomHashMap tmpReadSet = firstReadSet.clone();
CustomWeightedEdge nextE = itr.next();
tmpReadSet.intersectionPE(nextE.getReadHashSet());
if(firstReadSet.size() > 0){ // we only add if intersection size is > 0. This greatly prunes paths that are needed to be explored.
tmpP.appendEdge(nextE);//itr.next());
pathsQ.add(tmpP);
readsetQ.add(tmpReadSet);
}
}
}
}
return results;
}
//modified so that if pre node is null, create curnode but dont' attempt to connect w/ an edge
private Node addMissingNode(char b, int colPos, Node cur, Node pre, boolean isRefStrand, byte qual, int readNum){
cur = new Node(b, colPos);
this.g.addVertex(cur);
//this.nodeHashList.get(colPos - 1).put(new Character(b), cur);
this.nodeHashList.get(colPos - 1).put(new Integer(Base.char2ibase(b)), cur);
if(pre != null){
//DefaultWeightedEdge e = this.g.addEdge(pre, cur);
this.addAndIncrement(pre, cur, isRefStrand, qual, readNum);
}//moved readHash to edges
/*else{
//cur.addRead(readNum);
//this.addReadToEdge()
}*/
return cur;
}
private void addAndIncrement(Node source, Node target, boolean isRefStrand, byte qual, int readNum){
//target.addRead(readNum); //moved readHash to edges
CustomWeightedEdge e = this.g.addEdge(source,target);
e.addRead(readNum, qual);
this.g.setEdgeWeight(e, 0.0d);
e.incrementWeight(this.g, isRefStrand, qual);
}
//private void incrementWeight(Node source, Node target){
private void incrementWeight(Node source, Node target, boolean isRefStrand, byte qual, int readNum){
//DefaultWeightedEdge e = g.getEdge(source, target);
//target.addRead(readNum);
CustomWeightedEdge e = g.getEdge(source, target);
if(e == null)
this.addAndIncrement(source,target, isRefStrand, qual, readNum);
else{
e.addRead(readNum, qual);
//target.addRead(readNum); //moved readHash to edges
e.incrementWeight(this.g, isRefStrand, qual);//g.setEdgeWeight(e, g.getEdgeWeight(e)+1);
}
}
//readNum is a readIdentifier [int]
public int addWeight(SAMRecord sr, int readNum){
int numOp = 0;
Cigar cigar = sr.getCigar();
byte[] bases = sr.getReadBases(); //ASCII bytes ACGTN=.
byte[] quals = sr.getBaseQualities();
for(int i=0; i<quals.length; i++){
if(quals[i] < 2)
quals[i] = 2;
}
int baseIndex = 0;
int refBasePos = sr.getAlignmentStart();
Node prevnode = null;
Node curnode = null;
Base curbase = null;
Sequence curAllele = this.alleleHash.get(sr.getReferenceName());
int colPos = curAllele.getColPosFromBasePos(refBasePos);
boolean isRefStrand = !sr.getReadNegativeStrandFlag();
/*
System.err.println(sr.toString());
System.err.println("start position:\t" + refBasePos);
System.err.println("Mapped Allele:\t" + sr.getReferenceName());
System.err.println("Allele Name:\t" + curAllele.getAlleleName());
System.err.println("CIGAR:\t" + sr.getCigar());
System.err.println("READ:\t" + sr.getReadString());
System.err.println("READL:\t" + bases.length);
System.err.println("ColPos:\t" + colPos);
for(int i=0; i<bases.length; i++){
System.err.print(Base.char2ibase((char)bases[i]));
}
System.err.println();
*/
//curAllele.printPositions(colPos-1, bases.length);
if(cigar==null) return 0;
for(final CigarElement ce : cigar.getCigarElements()){
//System.err.println(ce.toString() + "\t" + ce.getLength());
CigarOperator op = ce.getOperator();
int cigarLen = ce.getLength();
switch(op)
{
case S :
{
baseIndex += cigarLen;
break;
}
case H :
break;
case M :
{
for(int i=0; i<cigarLen; i++){
numOp++;
/* takes care of jumping over padding area (gaps) in MSA */
int tmpColPos = curAllele.getNextColPosForBase(colPos - 1) + 1;
if(tmpColPos > colPos){
for(int j=colPos;j<tmpColPos;j++){
HLA.HOPPING++;
curnode = this.nodeHashList.get(j-1).get(new Integer(Base.char2ibase('.')));
this.incrementWeight(prevnode,curnode,isRefStrand, quals[baseIndex-1], readNum);
prevnode=curnode;
}
colPos = tmpColPos;
}
curnode = this.nodeHashList.get(colPos -1).get(new Integer(Base.char2ibase((char)bases[baseIndex])));
/* if NO such node is found, we add new node and add edge from prevnode.
mismatch that is not covered by reference sequence */
if(curnode == null){
HLA.NEW_NODE_ADDED++;
curnode = this.addMissingNode((char)bases[baseIndex], colPos, curnode, prevnode, isRefStrand, quals[baseIndex], readNum);
if(curnode == null)
System.err.println("IMPOSSIBLE: curnode NULL again after adding missing node!");
}
else if(prevnode != null)/* if prevnode is not set. firstBase*/
this.incrementWeight(prevnode, curnode, isRefStrand, quals[baseIndex], readNum);
prevnode=curnode;
baseIndex++;
//refBasePos++;
colPos++;
//colPos = curAllele.getNextColPosForBase(colPos - 1) + 1;
//colPos++;
}
break;
}
case D :
{
for(int i=0; i<cigarLen; i++){
numOp++;
/* takes care of jumping over padding area (gaps) in MSA */
int tmpColPos = curAllele.getNextColPosForBase(colPos - 1) + 1;
if(tmpColPos > colPos){
for(int j=colPos;j<tmpColPos;j++){
HLA.HOPPING++;
curnode = this.nodeHashList.get(j-1).get(new Integer(Base.char2ibase('.')));
this.incrementWeight(prevnode, curnode, isRefStrand, quals[baseIndex-1], readNum);
prevnode=curnode;
}
colPos = tmpColPos;
}
/* need to grab gap node at current column */
curnode = this.nodeHashList.get(colPos - 1).get(new Integer(Base.char2ibase('.')));
/* if NO such node is found, we add new node and add edge from prevnode */
if(curnode == null){
HLA.NEW_NODE_ADDED++;
curnode = this.addMissingNode('.', colPos, curnode, prevnode, isRefStrand, quals[baseIndex-1], readNum);
}else
this.incrementWeight(prevnode, curnode, isRefStrand, quals[baseIndex-1], readNum);
prevnode=curnode;
//refBasePos++;
colPos++;
}
break;
}
case I :
{
// Need to check the colPos distance to nextBase in the allele to see if there are spaces for insertion.
// If there are spaces for insertion, must insert into those spaces first then insert into insertionNodeHash.
int insertionIndex = -1;
for(int i=0; i<cigarLen; i++){
HLA.INSERTION++;
numOp++;
int tmpColPos = curAllele.getNextColPosForBase(colPos - 1) + 1;
if(tmpColPos == colPos){//then we must insert into insertionNodeHashList
insertionIndex++;
if(this.insertionNodeHashList.get(colPos - 1).size() > insertionIndex){
//curnode = this.insertionNodeHashList.get(colPos - 1).get(i).get(new Character((char)bases[baseIndex]));
curnode = this.insertionNodeHashList.get(colPos - 1).get(insertionIndex).get(new Integer(Base.char2ibase((char)bases[baseIndex])));
}else{//we need to add extra position (insertion length)
//this.insertionNodeHashList.get(colPos - 1).add(new HashMap<Character, Node>());
this.insertionNodeHashList.get(colPos - 1).add(new HashMap<Integer, Node>());
curnode = null;
}
if(curnode == null){
curnode = new Node((char)bases[baseIndex], colPos);
HLA.INSERTION_NODE_ADDED++;
this.g.addVertex(curnode);
this.insertionNodeHashList.get(colPos - 1).get(insertionIndex).put(new Integer(Base.char2ibase((char)bases[baseIndex])), curnode);
this.addAndIncrement(prevnode, curnode, isRefStrand, quals[baseIndex], readNum);
//DefaultWeightedEdge e = this.g.addEdge(prevnode, curnode);
//this.g.setEdgeWeight(e, 0.0d);
//this.incrementWeight(prevnode, curnode, isRefStrand,quals[baseIndex]);
}else{
//this.incrementWeight(prevnode, curnode);
this.incrementWeight(prevnode, curnode, isRefStrand, quals[baseIndex], readNum);
}
prevnode = curnode;
baseIndex++;
}else if(tmpColPos > colPos){//then we must insert here.
curnode = this.nodeHashList.get(colPos - 1).get(new Integer(Base.char2ibase((char)bases[baseIndex])));
if(curnode == null){
HLA.NEW_NODE_ADDED++;
//curnode = this.addMissingNode((char)bases[baseIndex], colPos, curnode, prevnode);
curnode = this.addMissingNode((char)bases[baseIndex], colPos, curnode, prevnode, isRefStrand, quals[baseIndex], readNum);
if(curnode == null){
System.err.println("IMPOSSIBLE: curnode NULL again after adding missing node! (1)[addWeight]");
System.exit(9);
}
}else if(prevnode !=null){
HLA.INSERTION_WITH_NO_NEW_NODE++;
//this.incrementWeight(prevnode, curnode);
this.incrementWeight(prevnode, curnode, isRefStrand, quals[baseIndex], readNum);
}else if(prevnode == null){
System.err.println("SHOULD NOT HAPPEND (2)[addWeight]");//can't start with insertion
System.exit(9);
}
prevnode = curnode;
baseIndex++;
colPos++;
insertionIndex = -1;
}else{//should not happen.
System.err.println("SHOULD NOT HAPPEND (3)[addWeight]");
System.exit(9);
}
}
break;
}
default: System.err.println("UNKNOWN CIGAROP:\t" + ce.toString());
break;
}
}
return numOp;
}
private void buildGraph(){
int numAlleles = this.alleles.size();
Sequence firstAllele = this.alleles.get(0);
/* for each alleles*/
//Node sNode = new Node('s', 0);
//Node tNode = new Node('t', this.alleles.get(0).getColLength() + 1);
//this.g.addVertex(sNode);
//this.g.addVertex(tNode);
for(int i=0; i<numAlleles; i++){
//System.err.println("allele " + i);
Sequence curSeq = this.alleles.get(i);
/* for each base in allele */
Node prevNode = sNode;
for(int j=0; j<curSeq.getColLength(); j++){
//System.err.print("[" + j + "]");
if(i==0){
//this.nodeHashList.add(new HashMap<Character, Node>());
this.nodeHashList.add(new HashMap<Integer, Node>());
//this.insertionNodeHashList.add(new ArrayList<HashMap<Character, Node>>());
this.insertionNodeHashList.add(new ArrayList<HashMap<Integer, Node>>());
}
//HashMap<Character, Node> curHash = nodeHashList.get(j);
HashMap<Integer, Node> curHash = nodeHashList.get(j);
//Character curChar = new Character(curSeq.baseAt(j).getBase());
Integer curInt = new Integer(curSeq.baseAt(j).getIBase());
//Node tmpNode = curHash.get(curChar); //retrieve node
Node tmpNode = curHash.get(curInt); //retrieve node
if(tmpNode == null){ //if we have not added this node
tmpNode= new Node(curSeq.baseAt(j));
this.g.addVertex(tmpNode);
//curHash.put(curChar,tmpNode);
curHash.put(curInt,tmpNode);
}
//add an edge
//DefaultWeightedEdge e;
CustomWeightedEdge e;
if(!this.g.containsEdge(prevNode, tmpNode)){
e = this.g.addEdge(prevNode,tmpNode);
if(prevNode.equals(sNode)){
//System.err.println("Edge from sNode");
//if(this.g.getEdge(prevNode,tmpNode) == null)
// System.err.println("\tIMPOSSIBLE!!!!");
//System.err.println("prevNode\t:" + prevNode.toString() + "\tcurNode\t:" + tmpNode.toString());
this.g.setEdgeWeight(e, Double.MAX_VALUE);
}else
this.g.setEdgeWeight(e, 0.0d);
}
prevNode = tmpNode;
}
//add edge
if(!this.g.containsEdge(prevNode, tNode))
this.g.setEdgeWeight(this.g.addEdge(prevNode, tNode), Double.MAX_VALUE);
}
}
public void printStartEndNodeInfo(){
System.err.println(this.sNode.toString() + "|ind("+this.g.inDegreeOf(this.sNode) + ":outd(" + this.g.outDegreeOf(this.sNode ) + ")");
System.err.println(this.tNode.toString() + "|ind("+this.g.inDegreeOf(this.tNode) + ":outd(" + this.g.outDegreeOf(this.tNode ) + ")");
}
public double getTotalWeightForColumn(HashMap<Integer, Node> m, Node preNode){
double totalWeight = 0;
Node curNode = null;
for(int i=0;i<6;i++){
curNode = m.get(new Integer(i));
CustomWeightedEdge e = this.g.getEdge(preNode,curNode);
if(e!=null)
totalWeight += this.g.getEdgeWeight(e);
}
return totalWeight;
}
public ArrayList<int[]> obtainTypingIntervals(){
Sequence ref = this.alleles.get(0);
ArrayList<int[]> typingIntervals = new ArrayList<int[]>();
if(this.isClassI()){
/* typing exon 2 + intron + exon 3 */
/*
int[] tmp = new int[2];
tmp[0] = ref.getBoundaries()[3];
tmp[1] = ref.getBoundaries()[6];
typingIntervals.add(tmp);
*/
/* typing only exon 2 and 3 */
int[] tmp = new int[2];
tmp[0] = ref.getBoundaries()[3];
tmp[1] = ref.getBoundaries()[4];
typingIntervals.add(tmp);
int[] tmp2 = new int[2];
tmp2[0] = ref.getBoundaries()[5];
tmp2[1] = ref.getBoundaries()[6];
typingIntervals.add(tmp2);
}else if (this.isClassII()){
int[] tmp2 = new int[2];
tmp2[0] = ref.getBoundaries()[3];
tmp2[1] = ref.getBoundaries()[4];
typingIntervals.add(tmp2);
}
return typingIntervals;
}
public boolean traverseAndWeights(){
System.err.println("=========================");
System.err.println("= " + this.HLAGeneName);
System.err.println("=========================");
ArrayList<int[]> typingIntervals = this.obtainTypingIntervals();
Node preNode = null;
Node curNode = null;
for(int i=0; i<this.alleles.size(); i++){
preNode = null;
curNode = null;
Sequence curseq = this.alleles.get(i);
double exonSum = 0.0d;
double exonSump = 0.0d;
int exonNumZero = 0;
int noEdge = 0;
double exonFlow = Double.MAX_VALUE;
StringBuffer out = new StringBuffer();
out.append(curseq.getAlleleName() + "\n");
boolean intact = true;
eachallele:
for(int j=0; j<typingIntervals.size(); j++){
int start = typingIntervals.get(j)[0];
int end = typingIntervals.get(j)[1];
preNode = null;
out.append("\nNEXT_EXON\n");
//need to start a node before the exon start, hence -2, rather than -1 transformation from 1-based to 0-based index
//k should be 0-based. start and end are 1-based (inclusive, exclusive) index.
for(int k=start-2; k<end-1; k++){
char uchar = Character.toUpperCase(curseq.baseAt(k).getBase());
HashMap<Integer, Node> curHash = this.nodeHashList.get(k);
curNode = this.nodeHashList.get(k).get(new Integer(Base.char2ibase(uchar)));
if(curNode == null){
preNode = curNode;
intact = false;
break eachallele;
}
if(preNode != null){
CustomWeightedEdge e = this.g.getEdge(preNode, curNode);
if(e == null){
noEdge++;
out.append(uchar + "[NO_EDGE]->");
exonFlow = -1.0d;
//break;
}else{
double tmpw = this.g.getEdgeWeight(e);
double total = this.getTotalWeightForColumn(this.nodeHashList.get(j), preNode);
if(tmpw > 0.0d){
exonSum+=tmpw;
if(tmpw/total < 0.25d){
out.append(("(E)LOWPROB ->\t" + e.getGroupErrorProb() + "\t" + (tmpw/total)) + "\n");
}else{
exonSump+=tmpw;
}
}
if(tmpw == 0.0d)
exonNumZero++;
if(tmpw < exonFlow){
exonFlow = tmpw;
}
out.append(uchar + "[" + tmpw + "]->");
}
}
preNode = curNode;
}
}
if(intact){
out.append(("\n" + curseq.getAlleleName() + "\tNO_EDGE:\t" + noEdge +"\tE_SUM:\t" + exonSum + "\tE_ZERO:\t" + exonNumZero + "\tE_SUM_P\t" + exonSump + "\tMAXFLOW\t" + exonFlow + "\n"));
//out.append(("\n" + curseq.getAlleleName() + "\tSUM:\t" + sum + "\t#ZERO:\t" + numZero + "\tE_SUM:\t" + exonSum + "\tE_ZERO:\t" + exonNumZero + "\tSUM_P:\t" + sump + "\tE_SUM_P\t" + exonSump + "\tMAXFLOW\t" + exonFlow + "\n"));
System.err.println(out.toString());
}
}
return true;
}
public void traverse(){
System.err.println("Traversing (" + this.alleles.size() + ")");
Node preNode;// = this.sNode;
Node curNode;
for(int i=0; i<this.alleles.size(); i++){
this.alleles.get(i).verify();
preNode = this.sNode;
Sequence curseq = this.alleles.get(i);
for(int j=0; j<curseq.getColLength(); j++){
//System.err.println("Traversing [" + i + "," + j + "]");
char uchar = Character.toUpperCase(curseq.baseAt(j).getBase());
char lchar = Character.toUpperCase(curseq.baseAt(j).getBase());
//HashMap<Character, Node> curHash = this.nodeHashList.get(j);
HashMap<Integer, Node> curHash = this.nodeHashList.get(j);
//if(curHash.get(new Character(uchar)) != null){
if(curHash.get(new Integer(Base.char2ibase(uchar))) != null){
//System.err.println("NODE FOUND IN HASH[UPPER}");
//curNode = curHash.get(new Character(uchar));
curNode = curHash.get(new Integer(Base.char2ibase(uchar)));
/*
if(this.g.getEdge(preNode, curNode) == null)
System.err.println("\tWRONG, THIS SHOULD ALREADY BE IN THE GRAPH.\n" + "prevNode\t:" + preNode.toString() + "\tcurNode\t:" + curNode.toString());
else
System.err.println("Weight : " + this.g.getEdgeWeight(this.g.getEdge(preNode,curNode)));
*/
preNode = curNode;
//}else if(curHash.get(new Character(lchar)) != null){
}else if(curHash.get(new Integer(Base.char2ibase(lchar))) != null){
//System.err.println("NODE FOUND IN LOWER}");
//curNode = curHash.get(new Character(lchar));
curNode = curHash.get(new Integer(Base.char2ibase(lchar)));
/*
if(this.g.getEdge(preNode, curNode) == null)
System.err.println("\tWRONG, THIS SHOULD ALREADY BE IN THE GRAPH.");
else
System.err.println("Weight : " + this.g.getEdgeWeight(this.g.getEdge(preNode,curNode)));
*/
preNode = curNode;
}else{
;//System.err.println("NODE NOT FOUND IN THH GRAPH");
}
}
}
System.err.println("DONE Traversing");
}
public void updateEdgeWeightProb(){
Set<CustomWeightedEdge> eSet = g.edgeSet();
Iterator<CustomWeightedEdge> itr = eSet.iterator();
CustomWeightedEdge e = null;
while(itr.hasNext()){
e = itr.next();
e.computeGroupErrorProb();
//System.err.println(e.toString());
}
}
public boolean isClassI(){
if( this.HLAGeneName.equals("A")
|| this.HLAGeneName.equals("B")
|| this.HLAGeneName.equals("C")
){
return true;
}
return false;
}
public boolean isClassII(){
if( //this.HLAGeneName.equals("DPA1")
//|| this.HLAGeneName.equals("DPB1")
this.HLAGeneName.equals("DQA1")
|| this.HLAGeneName.equals("DQB1")
//|| this.HLAGeneName.equals("DRA")
|| this.HLAGeneName.equals("DRB1")
){
return true;
}
return false;
}
/*
* countBubbles() --> returns ArrayList of simple bubbles (NOT merged)
* and sets interbubbleSequences in this class.
*
* processBubbles() --> merges bubbles by checking reads support information.
*/
public void countBubblesAndMerge(StringBuffer rb){
this.resultBuffer = rb;
this.processBubbles(this.countBubbles());
}
public void processBubbles(ArrayList<Bubble> bubbles){
/* to load actual bubble sequence in each paths found in each bubble */
System.err.println("**************************");
System.err.println("Checking numBubbles: " + bubbles.size());
for(int i=0; i<bubbles.size(); i++){
if(bubbles.get(i).isFirstBubble()){
System.err.println("Bubble (" + i + "):\t[FB]" );
}
bubbles.get(i).initBubbleSequences();
}
/* superBubble is a merged bubbles. Ideally, you want to have just one bubble. */
ArrayList<Bubble> superBubbles = new ArrayList<Bubble>();
Bubble curSuperBubble = bubbles.get(0);
Bubble lastMergedBubble = curSuperBubble;
int lastSegregationColumnIndex = curSuperBubble.getStart().get(0);
System.err.println("(iteration 0):\t" + curSuperBubble.getNumPaths());
for(int i=1; i<bubbles.size(); i++){
System.err.println("\t(attempting merging)\t" + bubbles.get(i).getNumPaths());
bubbles.get(i).printBubbleSequence();
System.err.print("(SB)\t");
curSuperBubble.printBubbleSequenceSizes();
System.err.print("(OB)\t");
bubbles.get(i).printBubbleSequenceSizes();
//boolean phased = curSuperBubble.mergeBubble(bubbles.get(i));
MergeStatus ms = null;
if(!bubbles.get(i).isFirstBubble()){
ms = curSuperBubble.mergeBubble(bubbles.get(i), lastSegregationColumnIndex, this.isClassII(), lastMergedBubble);
lastMergedBubble = bubbles.get(i);
}
//if we are cutting here
if(bubbles.get(i).isFirstBubble() || ms.isSplit()){
if(bubbles.get(i).isFirstBubble())
System.out.println("NOT PHASING OVER DIFFERENT EXONS --> setting OB as curSuperBubble");
else
System.out.println("CANT PHASE --> setting OB as curSuperBubble.");
superBubbles.add(curSuperBubble);
curSuperBubble = bubbles.get(i);
lastMergedBubble = curSuperBubble;
//need to update segregationColumnIndex
lastSegregationColumnIndex = curSuperBubble.getStart().get(0);
}
//if not cutting
else{
//if we have a segreation, need to updated segregationColumnIndex
if(ms.isSegregating())
lastSegregationColumnIndex = ms.getLastSegregationColumnIndex();
System.err.println("**********************************");
curSuperBubble.printBubbleSequenceSizes();
System.err.println("**********************************");
curSuperBubble.printBubbleSequence();
}
System.err.println("(iteration " + i + "):\t" + curSuperBubble.getNumPaths());
}
superBubbles.add(curSuperBubble);
System.err.println("\n\n<
this.checkSuperBubbleLinkages(superBubbles);
//this.printBubbleResults(superBubbles, bubbles);
//this.compareInterBubbles(superBubbles);
ArrayList<ArrayList<AllelePath>> fracturedPaths = this.getFracturedPaths(superBubbles, bubbles);
this.allelePathPrintTest(fracturedPaths);//print test of fractured candidate. print super bubble sequences
this.allelePathToFastaFile(fracturedPaths);//writes superbubble sequences as fasta file
ArrayList<SuperAllelePath> superpaths = this.generateSuperAllelePaths(fracturedPaths);
this.superAllelePathToFastaFile(superpaths); //writes full length candidate allele concatenating super bubbles as fasta file
this.printScoreForMaxLikeliPair(superpaths, superBubbles);
this.pathAlign(superpaths); // aligns to DB for typing.
}
public void printScoreForMaxLikeliPair(ArrayList<SuperAllelePath> superpaths, ArrayList<Bubble> superBubbles){
//allProduct, jointProduct, avgProduct, MAXFLOW
double[] curBest = {Double.NEGATIVE_INFINITY, Double.NEGATIVE_INFINITY, Double.NEGATIVE_INFINITY, 0.0d};
double[] curSecondBest = {Double.NEGATIVE_INFINITY, Double.NEGATIVE_INFINITY, Double.NEGATIVE_INFINITY, 0.0d};
int[][] bestIndicies = new int[4][2];
int[][] secondBestIndicies = new int[4][2];
for(int i = 0; i<superpaths.size(); i++){
for(int j=i; j<superpaths.size(); j++){
double[] scores = superpaths.get(i).getJointProbability(superpaths.get(j), superBubbles);
double[] jointWeightFlow = superpaths.get(i).jointTraverse(superpaths.get(j), this.g);
System.err.println("AllelePair [" + i + ":" + j + "]\t{ + "
+ scores[0] + "\t"
+ scores[1] + "\t"
+ scores[2] + "\t"
+ scores[3] + "\t"
+ scores[4] + "\t"
+ scores[5]
+ "\tE_SUM:" + jointWeightFlow[0]
+ "\tMAXFLOW:" + jointWeightFlow[1]
+ "}");
//higher the better
for(int k=0; k<3; k++){
if(curBest[k] < scores[k+3]){
curSecondBest[k] = curBest[k];
curBest[k] = scores[k+3];
secondBestIndicies[k][0] = bestIndicies[k][0];
secondBestIndicies[k][1] = bestIndicies[k][1];
bestIndicies[k][0] = i;
bestIndicies[k][1] = j;
}else if(curSecondBest[k] < scores[k+3]){
curBest[k] = scores[k+3];
bestIndicies[k][0] = i;
bestIndicies[k][1] = j;
}
}
if(curBest[3] < jointWeightFlow[1]){
curSecondBest[3] = curBest[3];
curBest[3] = jointWeightFlow[1];
secondBestIndicies[3][0] = bestIndicies[3][0];
secondBestIndicies[3][1] = bestIndicies[3][1];
bestIndicies[3][0] = i;
bestIndicies[3][1] = j;
}else if(curSecondBest[3] < jointWeightFlow[1]){
curBest[3] = jointWeightFlow[1];
bestIndicies[3][0] = i;
bestIndicies[3][1] = j;
}
}
}
System.err.println(ANSI_RED + "
System.err.print("RANK 1:\t");
this.printBest(bestIndicies, curBest, 0);
System.err.print("RANK 2:\t");
this.printBest(secondBestIndicies, curSecondBest, 0);
System.err.println("
System.err.print("RANK 1:\t");
this.printBest(bestIndicies, curBest, 1);
System.err.print("RANK 2:\t");
this.printBest(secondBestIndicies, curSecondBest, 1);
System.err.println("
System.err.print("RANK 1:\t");
this.printBest(bestIndicies, curBest, 2);
System.err.print("RANK 2:\t");
this.printBest(secondBestIndicies, curSecondBest, 2);
System.err.println("
System.err.print("RANK 1:\t");
this.printBest(bestIndicies, curBest, 3);
System.err.print("RANK 2:\t");
this.printBest(secondBestIndicies, curSecondBest, 3);
/* superAllelePath-wise best score printing */
/*
int count = 0;
for(SuperAllelePath sap : superpaths){
double[] weightFlow = sap.traverse(this.g);
System.err.println("Superpath[" + count + "]\tE_SUM:" + weightFlow[0] + "\tMAXFLOW:" + weightFlow[1]);
count++;
}*/
}
private void printBest(int[][] indicies, double[] curBest, int typeIndex){
System.err.println("AllelePari[" + indicies[typeIndex][0] + ":" + indicies[typeIndex][1] + "]\t{AP:"
+ curBest[0] + "\tJP:" + curBest[1] + "\tAVP:" + curBest[2] + "\tMF" + curBest[3] + "}"
);
}
public void pathAlign(ArrayList<SuperAllelePath> superpaths){
int count = 1;
for(SuperAllelePath sap : superpaths){
String candidate = sap.getSequenceBuffer().toString();//p.toString(this.g, count);//, this.headerExcessLengthBeyondTypingBoundary, this.tailExcessLengthBeyondTypingBoundary);
count++;
String sapname = sap.toSimpleString();
String subject = null;
//String maxName = null;
ArrayList<String> maxName = new ArrayList<String>();
int maxIdenticalLen = 0;
//ArrayList<Integer> maxIdenticalLen = new ArrayList<Integer>();
//Result maxR = null;
ArrayList<Result> maxR =new ArrayList<Result>();
boolean foundPerfect = false;
for(HLASequence subjscan : this.typingSequences){
subject = subjscan.getSequence();
if(candidate.equals(subject)){
Result curR = new Result(candidate.length(), subject);
//maxIdenticalLen = curR.getIdenticalLen();
//maxName = subj.getGroup().getGroupString();
maxR.add(curR);
maxName.add(subjscan.getGroup().getGroupString());
System.err.println("Found perfect match.");
foundPerfect = true;
break;
}
}
if(!foundPerfect){
for(HLASequence subj : this.typingSequences){
subject = subj.getSequence();
Result curR = NWAlign.runDefault(candidate, subject);
/*if(subj.getGroup().getGroupString().equals("A*01:01:01G")){
System.err.println(candidate);
System.err.println(subject);
System.err.println("A*01:01:01G\t" + curR.toString());
}*/
if(curR.getIdenticalLen() >= maxIdenticalLen){
if(curR.getIdenticalLen() > maxIdenticalLen){
maxName = new ArrayList<String>();
maxIdenticalLen = curR.getIdenticalLen();
maxR =new ArrayList<Result>();
}
//maxName.add(subj.getGroup().getGroupString());
//maxIdenticalLen.add(curR.getIdenticalLen());
maxName.add(subj.getGroup().getGroupString());
maxR.add(curR);
}
}
}
//System.err.print("BEST MATCH:" + );
for(int i=0;i<maxR.size();i++){
System.err.println("["+ sapname+ "]BEST MATCH:\t" + maxName.get(i) + "\t" + maxR.get(i).getIdenticalLen() + "\t" + maxR.get(i).getIdentity());
this.resultBuffer.append(maxName.get(i) + "\t" + maxR.get(i).getIdenticalLen() + "\t" + maxR.get(i).getIdentity() + "\t" + maxR.get(i).getScore() + sapname + "\n");
}
//System.err.println("BEST MATCH:\t" + maxName + "\t" + maxIdenticalLen + "\t" + maxR.getIdentity());
//this.resultBuffer.append(maxName + "\t" + maxIdenticalLen + "\t" + maxR.getIdentity() + "\t" + maxR.getScore() + sapname+"\n");
//this.resultBuffer.append(maxR.toAlignmentString() + "\n");
}
}
/*
public void printBubbleResults(ArrayList<Bubble> superBubbles){
int startIndex = 0;
System.out.println("Printing\t" + superBubbles.size() + "\tfractured super bubbles.");
int count = 0;
for(Bubble sb : superBubbles){
System.out.println("\tSuperBubble\t" + count);
startIndex = sb.printResults(this.interBubbleSequences, startIndex);
count++;
}
}
*/
public void checkSuperBubbleLinkages(ArrayList<Bubble> superBubbles){
ArrayList<int[]>[] pLists = new ArrayList[(superBubbles.size()-1)*superBubbles.size()/2];
int count = 0;
/* for each superBubble*/
for(int i=0;i<superBubbles.size();i++){
Bubble sb_i = superBubbles.get(i);
/* pairing with another superBubble */
for(int j=i+1; j<superBubbles.size();j++){
Bubble sb_j = superBubbles.get(j);
//int[0]: path index for first bubble
//int[1]: path index for second bubble
//int[2]: number of reads supporting this phasing path
ArrayList<int[]> phasedList = sb_i.getPhasedSuperBubbles(sb_j);
pLists[count] = phasedList;
count++;
if(phasedList.size() > 0){
System.err.println("Phasing evidence FOUND between SB(" + i + ") : SB(" + j + ")" );
for(int[] index : phasedList)
System.err.println("SB(" + i + ")-" + index[0] + " : SB(" + j + ")-" + index[1]);
}else
System.err.println("NO phasing evidence between SB(" + i + ") : SB(" + j + ")" );
}
}
}
public void selectGreedyForSuperBubbleLinking(ArrayList<int[]>[] phasedLists){
//for(ArrayList<int[]>)
}
/*
public void compareInterBubbles(ArrayList<Bubble> superBubbles){
//System.out.println(">>>>>>>>>>>>>>>> Checking interbubbles <<<<<<<<<<");
//for(int i=0; i<this.interBubbleSequences.size();i++){
// System.out.println("[I" + i + "]:\t" + this.interBubbleSequences.get(i).toString() + "\t" + this.interBubblePaths.get(i).toSimplePathString(this));
// }
int k = 0;
for(int i=0; i<superBubbles.size(); i++){
Bubble sb = superBubbles.get(i);
Path firstPath = sb.getPaths().get(0);
ArrayList<StringBuffer> bubbleSequences = firstPath.getBubbleSequences();
ArrayList<CustomWeightedEdge> orderedEdgeList = firstPath.getOrderedEdgeList();
int curEdgePos = 0;
int curMaxPos = 0;
for(int j=0; j<bubbleSequences.size(); j++){
System.out.println("[I" + k + "]:\t" + this.interBubbleSequences.get(k).toString() + "\t" + this.interBubblePaths.get(k).toSimplePathString(this));
k++;
System.out.print("[B:" + j +"]" + bubbleSequences.get(j).toString() + "\t");
curMaxPos += sb.getBubbleLengths().get(j).intValue();
for(;curEdgePos < curMaxPos; curEdgePos++){
System.out.print(this.g.getEdgeTarget(orderedEdgeList.get(curEdgePos)).getBase());
}
System.out.println();
}
}
}
*/
public void setFileName(String f){
this.outputfilename = f;
}
public ArrayList<DNAString> generateCandidates(ArrayList<ArrayList<DNAString>> fracturedSequences){
ArrayList<DNAString> sequences = new ArrayList<DNAString>();
for(DNAString ds : fracturedSequences.get(0)){
sequences.add(ds.deepCopy());
}
//for superBubble
for(int i=1; i<fracturedSequences.size(); i++){
ArrayList<DNAString> otherSequences = fracturedSequences.get(i);
ArrayList<DNAString> results = new ArrayList<DNAString>();
for(int j=0; j < sequences.size(); j++){
for(int k=0; k < otherSequences.size(); k++){
results.add(sequences.get(j).mergeDeep(otherSequences.get(k)));
}
}
sequences = results;
}
BufferedWriter bw = null;
try{
bw = new BufferedWriter(new FileWriter(this.outputfilename + "_" + this.HLAGeneName + ".typed.fa.candidates"));
for(DNAString seq : sequences)
bw.write(seq.toFasta().toString());
bw.close();
}catch(IOException ioe){
ioe.printStackTrace();
}
return sequences;
}
/*
public ArrayList<ArrayList<Path>> mergePathsOverSuperBubbles(ArrayList<Bubble> superBubbles){
int startIndex = 0;
int count = 0;
ArrayList<ArrayList<Path>> fracturedPaths = new ArrayList<ArrayList<Path>>();
for(Bubble sb : superBubbles){
ArrayList<Path> paths = new ArrayList<Path>();
fracturedPaths.add(paths);
}
}
*/
/*
public void printBubbleResults(ArrayList<Bubble> superBubbles, ArrayList<Bubble> bubbles){
//StringBuffer output = new StringBuffer();
int startIndex = 0;
System.out.println("Printing\t" + superBubbles.size() + "\tfractured super bubbles.");
//output.append(superBubbles.size() + "\tfractured SuperBubbles\n");
int count = 0;
//over each super bubble
ArrayList<ArrayList<DNAString>> fracturedSequences = new ArrayList<ArrayList<DNAString>>();
int bubbleOffset = 0;
Bubble pre = null;
for(Bubble sb : superBubbles){
if(pre != null){
bubbleOffset += pre.numBubbles();
}
ArrayList<DNAString> sequences = new ArrayList<DNAString>();
fracturedSequences.add(sequences);
System.out.println("\tSuperBubble\t" + count);
System.out.println("\t\tbubbleOffset:\t" + bubbleOffset);
startIndex = sb.printResults(this.interBubbleSequences, startIndex, sequences, this.HLAGeneName , count, bubbles, bubbleOffset);
count++;
pre = sb;
}
BufferedWriter bw = null;
try{
bw = new BufferedWriter(new FileWriter(this.outputfilename + "_" + this.HLAGeneName + ".typed.fa"));
for(ArrayList<DNAString> fseq : fracturedSequences){
for(DNAString ds : fseq)
bw.write(ds.toFasta().toString());
}
//bw.write(output.toString());
bw.close();
}catch(IOException ioe){
ioe.printStackTrace();
}
ArrayList<DNAString> candidateAlleles = this.generateCandidates(fracturedSequences);
//this.candidateAlign(candidateAlleles);
}
*/
public ArrayList<ArrayList<AllelePath>> getFracturedPaths(ArrayList<Bubble> superBubbles, ArrayList<Bubble> bubbles){
int startIndex = 0;
int count = 0;
System.out.println("Printing\t" + superBubbles.size() + "\tfractured super bubbles.");
//inner list holds paths found for one superBubble
//outer list holds multiple superBubbles
ArrayList<ArrayList<AllelePath>> fracturedPaths = new ArrayList<ArrayList<AllelePath>>();
int bubbleOffset = 0;
Bubble presb = null;
int sbIndex = 0;
for(Bubble sb : superBubbles){
if(presb != null){
bubbleOffset += presb.numBubbles();
}
ArrayList<AllelePath> paths = new ArrayList<AllelePath>();
fracturedPaths.add(paths);
//NEED TO ADD TRIM FUNCTIONALITY FOR HEADER AND TAIL BUBBLES!!! --> Trim function ADDED
startIndex = sb.mergePathsInSuperBubbles(this.interBubblePaths2, startIndex, paths, this.HLAGeneName, count, this.g, bubbles, bubbleOffset);
count++;
presb = sb;
}
return fracturedPaths;
//this.pathPrintTest(this.generateCandidatePaths(fracturedPaths));
//this.pathAlign(this.generateCandidatePaths(fracturedPaths));
}
public ArrayList<SuperAllelePath> generateSuperAllelePaths(ArrayList<ArrayList<AllelePath>> fracturedSequences){
ArrayList<SuperAllelePath> superpaths = new ArrayList<SuperAllelePath>();
//for(AllelePath ap : fracturedSequences.get(0))
for(int i=0; i<fracturedSequences.get(0).size();i++){
AllelePath ap = fracturedSequences.get(0).get(i);
superpaths.add(new SuperAllelePath(this.HLAGeneName));
superpaths.get(superpaths.size()-1).addAllelePath(ap, i);
}
for(int i=1; i<fracturedSequences.size(); i++){
ArrayList<AllelePath> nextSequences = fracturedSequences.get(i);
ArrayList<SuperAllelePath> results = new ArrayList<SuperAllelePath>();
for(int j=0; j<superpaths.size(); j++){
for(int k=0; k < nextSequences.size(); k++){
results.add(superpaths.get(j).clone());
results.get(results.size()-1).addAllelePath(nextSequences.get(k), k);
}
}
superpaths = results;
}
return superpaths;
}
public void allelePathPrintTest(ArrayList<ArrayList<AllelePath>> fracturedAllelePaths){
for(int i=0; i<fracturedAllelePaths.size(); i++){
ArrayList<AllelePath> paths = fracturedAllelePaths.get(i);
System.out.println("SUPER BUBBLE [" + i + "]");
for(int j=0; j<paths.size(); j++){
AllelePath ap = paths.get(j);
ap.printPath(this.g, i, j);
}
}
}
public void allelePathToFastaFile(ArrayList<ArrayList<AllelePath>> fracturedAllelePaths){
BufferedWriter bw = null;
try{
bw = new BufferedWriter(new FileWriter(this.outputfilename + "_" + this.HLAGeneName + ".typed.fa"));
for(ArrayList<AllelePath> faps : fracturedAllelePaths){
for(AllelePath ap : faps){
bw.write(ap.toFasta().toString());
}
//bw.close();
}
bw.close();
}catch(IOException ioe){
ioe.printStackTrace();
}
}
public void superAllelePathToFastaFile(ArrayList<SuperAllelePath> superAllelePaths){
BufferedWriter bw = null;
try{
bw = new BufferedWriter(new FileWriter(this.outputfilename + "_" + this.HLAGeneName + ".typed.fa.candiates"));
for(SuperAllelePath sap : superAllelePaths)
bw.write(sap.toFasta().toString());
bw.close();
}catch(IOException ioe){
ioe.printStackTrace();
}
}
/*
public void getFracturedPathsOLD(ArrayList<Bubble> superBubbles, int[] headerExcessArr, int[] tailExcessArr){
int startIndex = 0;
int count = 0;
//inner list holds paths found for one superBubble
//outer list holds multiple superBubbles
ArrayList<ArrayList<Path>> fracturedPaths = new ArrayList<ArrayList<Path>>();
Bubble presb = null;
ArrayList<Path> prePaths = null;
Bubble sb = null;
int firstBubbleCount = 0;
int headerExcess,tailExcess;
//for(sb : superBubbles){
for(int i=0;i<superBubbles.size(); i++){
sb = superBubbles.get(i);
ArrayList<Path> paths = new ArrayList<Path>();
fracturedPaths.add(paths);
startIndex = sb.mergePathsInSuperBubbles(this.interBubblePaths, startIndex, paths, this.HLAGeneName, count);
if(sb.isFirstBubble()){
headerExcess = headerExcessArr[firstBubbleCount];
tailExcess = (firstBubbleCount > 0 ? tailExcessArr[firstBubbleCount-1] : 0);
if(presb != null){
for(Path p : prePaths)
p.trimExcess(0, tailExcess);
}
for(Path p: paths)
p.trimExcess(headerExcess, 0);
firstBubbleCount++;
presb = sb;
prePaths = paths;
}
count++;
}
if(sb !=null && tailExcessArr[firstBubbleCount-1] > 0){
for(Path p : prePaths)
p.trimExcess(0, tailExcessArr[firstBubbleCount-1]);
}
//this.pathPrintTest(this.generateCandidatePaths(fracturedPaths));
this.pathAlign(this.generateCandidatePaths(fracturedPaths));
}
public void pathPrintTest(ArrayList<Path> ps){
int count = 1;
for(Path p : ps){
p.printPath(this.g, count);//, this.headerExcessLengthBeyondTypingBoundary, this.tailExcessLengthBeyondTypingBoundary);
count++;
}
}
*/
public void candidateAlign(ArrayList<DNAString> candidates){
int count = 1;
for(DNAString candidateDNA : candidates){
String candidate = candidateDNA.getSequence();
String subject = null;
String maxName = null;
String maxHit = null;
int maxIdenticalLen = 0;
Result maxR = null;
for(HLASequence subj : this.typingSequences){
subject = subj.getSequence();
Result curR = NWAlign.runDefault(candidate, subject);
/*if(subj.getGroup().getGroupString().equals("A*01:01:01G")){
System.err.println(candidate);
System.err.println(subject);
System.err.println("A*01:01:01G\t" + curR.toString());
}*/
if(curR.getIdenticalLen() >= maxIdenticalLen){
maxIdenticalLen = curR.getIdenticalLen();
maxName = subj.getGroup().getGroupString();
maxR = curR;
maxHit = subject;//curR.getHit();
if(curR.getIdentity() == 1.0d){
System.err.println("Found perfect match.");
break;
}
}
}
System.err.println("BEST MATCH:\t" + maxName + "\t" + maxIdenticalLen + "\t" + maxR.getIdentity());
System.err.println("Query:\n"+candidate);
System.err.println("Hit:\n"+maxHit);
this.resultBuffer.append(maxName + "\t" + maxIdenticalLen + "\t" + maxR.getIdentity() + "\t" + maxR.getScore() + "\n");
this.resultBuffer.append(maxR.toAlignmentString() + "\n");
}
}
/*
public void pathAlign(ArrayList<Path> ps){
int count = 1;
for(Path p : ps){
String candidate = p.toString(this.g, count);//, this.headerExcessLengthBeyondTypingBoundary, this.tailExcessLengthBeyondTypingBoundary);
count++;
String subject = null;
String maxName = null;
int maxIdenticalLen = 0;
Result maxR = null;
for(HLASequence subj : this.typingSequences){
subject = subj.getSequence();
Result curR = NWAlign.runDefault(candidate, subject);
if(curR.getIdenticalLen() >= maxIdenticalLen){
maxIdenticalLen = curR.getIdenticalLen();
maxName = subj.getGroup().getGroupString();
maxR = curR;
if(curR.getIdentity() == 1.0d){
System.err.println("Found perfect match.");
break;
}
}
}
System.err.println("BEST MATCH:\t" + maxName + "\t" + maxIdenticalLen + "\t" + maxR.getIdentity());
this.resultBuffer.append(maxName + "\t" + maxIdenticalLen + "\t" + maxR.getIdentity() + "\t" + maxR.getScore() + "\n");
this.resultBuffer.append(maxR.toAlignmentString() + "\n");
}
}
public ArrayList<Path> generateCandidatePaths(ArrayList<ArrayList<Path>> fracturedPaths){
ArrayList<Path> paths = new ArrayList<Path>();
//add paths of the first superBubble
for(Path p : fracturedPaths.get(0)){
paths.add(p.deepCopy());
}
//for each of next superBubble
for(int i=1; i<fracturedPaths.size(); i++){
ArrayList<Path> otherPaths = fracturedPaths.get(i);
ArrayList<Path> results = new ArrayList<Path>();
//for each current path
for(int j=0; j < paths.size(); j++){
//for each next option
for(int k=0; k < otherPaths.size(); k++){
results.add(paths.get(j).combinePaths(otherPaths.get(k)));
}
}
paths = results;
}
return paths;
}
public void selectBestHits(ArrayList<DNAString> candidates){
ArrayList<Integer> score = new ArrayList<Integer>();
for(DNAString seq:candidates){
score.add(findBestHit(seq));
}
}
public int findBestHit(DNAString seq){
int score = 0;
//run alignment
return score;
}
*/
public ArrayList<Bubble> countBubbles(){
System.err.println("=========================");
System.err.println("= " + this.HLAGeneName);
System.err.println("=========================");
ArrayList<Bubble> bubbles = new ArrayList<Bubble>();
ArrayList<int[]> typingIntervals = this.obtainTypingIntervals();
/* counters */
int numBubbles = 0;
int curBubbleLength = 1;
int lastStartOfBubble = 0;
//ArrayList<Integer> numPaths = new ArrayList<Integer>();
ArrayList<Integer> bubbleLengths = new ArrayList<Integer>(); // keeps track of bubble lengths. Bubble length is length excluding collapsing nodes. L-2
ArrayList<Integer> coordinates = new ArrayList<Integer>(); //keeps track of start coordinates of bubbles
/* counters */
Node curSNode = null;
this.interBubbleSequences = new ArrayList<StringBuffer>();
//this.interBubblePaths = new ArrayList<Path>();
this.interBubblePaths2 = new ArrayList<TmpPath>();
StringBuffer curbf = new StringBuffer("");
TmpPath tp = new TmpPath();
for(int i=0; i<typingIntervals.size(); i++){
int start = typingIntervals.get(i)[0];
int end = typingIntervals.get(i)[1];
curBubbleLength = 1;
lastStartOfBubble = start - 2;
//boolean headerBubble = false;
boolean firstBubble = true; // to demarcate the first bubble of the interval
//Node preNode = null;
int k;
/* FOR EACH POSITION in a TYPING INTERVAL*/
for(k=start-1;k<end-1;k++){
HashMap<Integer, Node> columnHash = this.nodeHashList.get(k);
Integer[] keys = columnHash.keySet().toArray(new Integer[0]);
/*it's a collapsing node if curBubbleLength > 2
else it's a possible start of bubble.*/
if(keys.length == 1){
//headerBubble = false;
/* then it must be a collapsing node; */
if(curBubbleLength > 1){
this.interBubbleSequences.add(curbf);
//this.interBubblePaths.add(tp.toPath(this.g));
this.interBubblePaths2.add(tp);
//this.interBubblePaths.add(curP);
curBubbleLength++;
numBubbles++;
//numPaths.add(new Integer(this.analyzeBubble(lastStartOfBubble, k)));
bubbleLengths.add(new Integer(curBubbleLength-2));
coordinates.add(new Integer(lastStartOfBubble));
if(firstBubble){
//if(i>0)//if it's not first interval, we need to update last bubble
// bubbles.get(bubbles.size()-1).trimPaths(0,this.tailExcessLengthBeyondTypingBoundary[i-1]);
bubbles.add(new Bubble(this, curSNode, columnHash.get(keys[0]), firstBubble, this.headerExcessLengthBeyondTypingBoundary[i], 0));
//bubbles.get(bubbles.size()-1).trimPath(this.headerExcessLengthBeyongTypingBoundary[i], 0);
firstBubble = false;
}else
bubbles.add(new Bubble(this, curSNode, columnHash.get(keys[0])));
curSNode = columnHash.get(keys[0]);
//preNode = curSNode;
lastStartOfBubble = k;
curBubbleLength = 1;
//curP = new Path();
curbf = new StringBuffer("");
curbf.append(curSNode.getBase());
tp = new TmpPath();
tp.appendNode(curSNode);
}
/* Possible Start of a Bubble or straight path */
else{
curSNode = columnHash.get(keys[0]);
curbf.append(curSNode.getBase());
tp.appendNode(curSNode);
/*if(prNode == null)
preNode = curSNode;
else{
curP.appendEdge(this.g.getEdge(preNode, curSNode));
preNode = curSNode;
}*/
lastStartOfBubble = k;
curBubbleLength = 1;
}
}else if(keys.length > 1){//middle of bubble
/* NEED TO FIX THIS TO ALLOW BUBBLE TO BE USED at the boundaries*/
if(k==(start-1)){// || headerBubble){
System.err.println("[k] = " + k);
int tmpBubbleLength = 1;
for(int l=start-2;;l
System.err.println("trying new k: [k] = " + l);
tmpBubbleLength++;
HashMap<Integer, Node> tmpHash = this.nodeHashList.get(l);
Integer[] tmpKeys = tmpHash.keySet().toArray(new Integer[0]);
if(tmpKeys.length == 1){
System.err.println("Found the new start!");
curSNode = tmpHash.get(tmpKeys[0]);
curbf.append(curSNode.getBase());// this is actually unecessary
//curbf=new StringBuffer("");
tp.appendNode(curSNode);
lastStartOfBubble = l;
curBubbleLength = tmpBubbleLength;
this.headerExcessLengthBeyondTypingBoundary[i] = curBubbleLength - 1;
System.err.println("Setting Trimming length(header):\t" + this.headerExcessLengthBeyondTypingBoundary[i]);
break;
}
}
//this.interBubbleSequences.add(new StringBuffer(""));
//headerBubble = true;
/*curSNode = columnHash.get(keys[0]);
curbf.append(curSNode.getBase());
tp.appendNode(curSNode);
lastStartOfBubble = k;
curBubbleLength = 1;
*/
}else{ //mid-bubble: just increment bubble length
curBubbleLength++;
//preNode = null;
}
}else{//disconnected graph.
System.err.println("This should NOT HAPPEN");
}
}
//need to update here to handle "End-Bubble" (bubble sitting at the end and not concluded)
if(curBubbleLength > 1){
System.err.println(">>>>>>>Bubble at the end:\t[curBubbleLength]:"+ curBubbleLength);
int preLength = curBubbleLength;
for(;;k++){
HashMap<Integer, Node> columnHash = this.nodeHashList.get(k);
Integer[] keys = columnHash.keySet().toArray(new Integer[0]);
curBubbleLength++;
if(keys.length == 1){
this.interBubbleSequences.add(curbf);
//this.interBubblePaths.add(tp.toPath(this.g));
this.interBubblePaths2.add(tp);
System.err.println("Found the new end!");
numBubbles++;
bubbleLengths.add(new Integer(curBubbleLength-2));
coordinates.add(new Integer(lastStartOfBubble));
//if(firstBubble){
// bubbles.add(new Bubble(this, curSNode, columnHash.get(keys[0]), firstBubble));
// firstBubble = false;
//}else
this.tailExcessLengthBeyondTypingBoundary[i] = curBubbleLength - preLength;
System.err.println("Setting Trimming length(tail):\t" + this.tailExcessLengthBeyondTypingBoundary[i]);
bubbles.add(new Bubble(this, curSNode, columnHash.get(keys[0]), false, 0, this.tailExcessLengthBeyondTypingBoundary[i]));
curSNode = columnHash.get(keys[0]);
lastStartOfBubble = k;
curBubbleLength = 1;
curbf = new StringBuffer("");
curbf.append(curSNode.getBase());
tp = new TmpPath();
tp.appendNode(curSNode);
break;
}
}
}//else{
this.interBubbleSequences.add(curbf);
//this.interBubblePaths.add(tp.toPath(this.g));
this.interBubblePaths2.add(tp);
curbf = new StringBuffer("");
tp = new TmpPath();
/*
this.interBubbleSequences.add(curbf);
this.interBubblePaths.add(tp.toPath(this.g));
curbf = new StringBuffer("");
tp = new TmpPath();
if(curBubbleLength > 1){
System.err.println(">>>>>>>Bubble at the end:\t[curBubbleLength]:"+ curBubbleLength);
}
*/
}
System.err.println("NumBubbles:\t" + numBubbles + "\tfound");
for(int i=0; i<bubbleLengths.size(); i++){
System.err.print(bubbleLengths.get(i).intValue() + "\t");
}
System.err.println();
for(int i=0; i<bubbleLengths.size(); i++){
System.err.print(coordinates.get(i).intValue() + "\t");
}
System.err.println();
return bubbles;
}
//write code to find number of paths and
//return the number of paths in the bubble.
//move column-wise and update number of paths going through each vertex.
/*
private int analyzeBubble(int start, int end){
Integer[] keys = this.nodeHashList.get(start).keySet().toArray(new Integer[0]);
for(int i=start+1; i<=end; i++){
//HashMap<Integer, Node> columnHash = this.nodeHashList.get(i);
//Integer[] keys = columnHash.keySet().toArray(new Integer[0]);
this.updateNumPathFwd(i-1, i);
}
return 0;
}*/
//update numPathFwd in current column
/*
private void updateNumPathFwd(int pre, int cur){
Collection<Node> preNodes = this.nodeHashList.get(pre).values();
Collection<Node> curNodes = this.nodeHashList.get(cur).values();
Iterator<Node> curItr = curNodes.iterator();
while(curItr.hasNext()){
Node curNode = curItr.next();
Iterator<Node> preItr = preNodes.iterator();
while(preItr.hasNext()){
Node preNode = preItr.next();
if(this.g.getEdge(preNode, curNode) != null){
curNode.incrementNumPathInBubbleFwd(preNode.getNumInBubbleFwd());
}
}
}
}
*/
public void countBubbles(boolean typingExonOnly){
int startIndex, endIndex;
if(typingExonOnly){
int[] boundaries = this.alleles.get(0).getBoundaries();
if(this.alleles.get(0).isClassI()){//if class I : type exon 2 and 3
startIndex = boundaries[3];
endIndex = boundaries[6];
}else{// if class II : type exon 2
startIndex = boundaries[3];
endIndex = boundaries[4];
}
}else{
startIndex = 0;
endIndex = this.nodeHashList.size();
}
int numBubbles = 0;
Node sNode = new Node(4, startIndex);
ArrayList<Node> preNodes = new ArrayList<Node>();
preNodes.add(sNode);
boolean preStart = true;
int bubbleSize = 1;
int numPath = 1;
for(int i = startIndex; i <= endIndex; i++){
HashMap<Integer, Node> curHash = this.nodeHashList.get(i);
//Set<Integer> keyset = curHash.keySet();
Integer[] keys = curHash.keySet().toArray(new Integer[0]);
if(keys.length == 1){//only one option --> it's collaping node or part of just a straight path
if(bubbleSize > 1){//if bublleSize > 1, then it's the end end of bubble
numBubbles++;
System.err.println("Bubble[" + numBubbles + "]:Size(" + bubbleSize + "):numPath(" + numPath + ")" );
preNodes = new ArrayList<Node>();
preNodes.add(curHash.get(keys[0]));
preStart = false;
bubbleSize = 1;
numPath = 1;
}else{
preNodes = new ArrayList<Node>();
preStart = false;
}
}else if(keys.length > 1){
//checking previous column nodes to this column node
for(int p=0; p < preNodes.size(); p++){
Node pNode = preNodes.get(p);
int branching=0;
for(int q=0; q<keys.length; q++){
Node qNode = curHash.get(keys[q]);
CustomWeightedEdge e = this.g.getEdge(pNode, qNode);
if(e != null && this.g.getEdgeWeight(e) > 0)
branching++;
}
if(branching > 2){
if(preStart){
numPath += (branching - 1);
}else{
int ind = this.g.inDegreeOf(pNode);
numPath += ind*branching - ind;
}
}
}
}
}
}
//insertionNodes are indexed at same position as endColumns
//meaning: insertionNodes should be inserted in between startColumns and endColumns.
public void flattenInsertionNodes(){
ArrayList<int[]> typingIntervals = this.obtainTypingIntervals();
int fCount = 0;
for(int i=typingIntervals.size()-1; i>-1; i
int start = typingIntervals.get(i)[0];
int end = typingIntervals.get(i)[1];
for(int j=end-1; j >= start; j
int insSize = this.insertionNodeHashList.get(j).size();
//there is insertion, we need to flatten.
if(insSize > 0 && this.isThereConnectionToInsertionNodes(insSize, j)){
fCount++;
this.shiftColumnsByInsertionSize(insSize, j);
}
}
}
System.err.println(this.HLAGeneName + "\t>>>>> FLATTENED InsertionBubble:\t" + fCount );
}
//fromColumnIndex is 0-based columnIndex
private boolean isThereConnectionToInsertionNodes(int insSize, int fromColumnIndex){
System.err.println("[isThereConnection] Checking at fromColumnIndex : " + fromColumnIndex + "\tInsSize: " + insSize);
HashMap<Integer, Node> startNodes = nodeHashList.get(fromColumnIndex-1);
boolean sConnection = false;
boolean eConnection = false;
HashMap<Integer, Node> sInsHash = this.insertionNodeHashList.get(fromColumnIndex).get(0);
HashMap<Integer, Node> eInsHash = this.insertionNodeHashList.get(fromColumnIndex).get(insSize - 1);
HashMap<Integer, Node> endNodes = nodeHashList.get(fromColumnIndex);
System.out.println("[isThereConnectionToInsertionNodes] HashIndex: " + (fromColumnIndex - 1) );
sConnection = this.isThereConnection(startNodes, sInsHash);
eConnection = this.isThereConnection(eInsHash, endNodes);
if(sConnection || eConnection){
if(sConnection)
System.err.println("[isThereConnection] connection between startNodes and sInsHash found!");
else
System.err.println("[isThereConnection] NO connection between startNodes and sInsHash found!");
if(eConnection)
System.err.println("[isThereConnection] connection between eInsHash and endNodes found!");
else
System.err.println("[isThereConnection] NO connection between eInsHash and endNodes found!");
}
return sConnection && eConnection;
}
//just to check if there edges between s and t
private boolean isThereConnection(HashMap<Integer, Node> s, HashMap<Integer, Node> t){
Integer[] sKeys = new Integer[0];
sKeys = s.keySet().toArray(sKeys);
Integer[] eKeys = new Integer[0];
eKeys = t.keySet().toArray(eKeys);
for(int i=0;i<sKeys.length; i++){
if(sKeys[i].intValue() != 4){
for(int j=0; j<eKeys.length; j++){
//System.err.print("eKyes[j] intval\t");
//System.err.println(eKeys[j].intValue());
if(eKeys[j].intValue() != 4){
//System.out.println("Actual index value in node: " + s.get(sKeys[i]).getColIndex());
CustomWeightedEdge e = this.g.getEdge(s.get(sKeys[i]), t.get(eKeys[j]));
if(e != null)
return true;
}
}
}
}
return false;
}
/* fromColumnIndex is 0-based index --> this is where insertion happens */
/* 0based(List index): 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 */
/* 1based(CI in Node and Base): 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 */
/* from ColumnIndex at 5, insSize of 2*/
private void shiftColumnsByInsertionSize(int insSize, int fromColumnIndex){
HashMap<Integer, Node> startNodes = nodeHashList.get(fromColumnIndex-1);
HashMap<Integer, Node> endNodes = nodeHashList.get(fromColumnIndex);
Iterator<Integer> itr_s = startNodes.keySet().iterator();
System.err.println("\n**STARTNODES:");
while(itr_s.hasNext()){
System.err.println(startNodes.get(itr_s.next()).toString());
}
Iterator<Integer> itr_e = endNodes.keySet().iterator();
System.err.println("\n**ENDNODES:");
while(itr_e.hasNext()){
System.err.println(endNodes.get(itr_e.next()).toString());
}
Node pre = null;
Node[] gapNodes = new Node[insSize];
/* here we first shift endNodes and all nodes after that by insSize
* to acquire insSize many column space for insertionNodeHashList.
*/
for(int i=0; i<insSize;i++){
HashMap<Integer, Node> insHash_i = this.insertionNodeHashList.get(fromColumnIndex).get(i);
this.adjustColumnIndex(insHash_i, fromColumnIndex + i + 1);//1-base column position
nodeHashList.add(fromColumnIndex+i, insHash_i); //insert insHash_i
Node cur = new Node('.', fromColumnIndex + i + 1); // 1-base column position
this.addVertex(cur);//add vertex and add it to nodeHashList;
if(pre !=null)
this.g.addEdge(pre,cur);
gapNodes[i] = cur;
pre = cur;
}
System.err.println("checking edges between gapNodes[]");
for(int i=1;i<gapNodes.length;i++){
CustomWeightedEdge e = this.g.getEdge(gapNodes[i-1], gapNodes[i]);
if(e == null)
System.err.println("No edges found between between gapNodes["+(i-1) + "] and gapNodes[" + i + "]");
}
/* adding spaces to Alleles as well */
for(int i=0; i<this.alleles.size(); i++)
this.alleles.get(i).insertBlanks(fromColumnIndex, insSize);
/* we shift all columns after insertion, so updating all columnIndex */
for(int i=fromColumnIndex+insSize; i<this.nodeHashList.size(); i++)
this.adjustColumnIndex(this.nodeHashList.get(i), i+1);//need to updated with 1-base column position
/* remove all edges between start node and end nodes and re-route them through gap nodes by adding new edges and assign weights and readset accordingly*/
double weightSum = this.getWeightSumsBetween2Columns(startNodes, endNodes, gapNodes);
/* DEBUGGING prints*/
itr_s = startNodes.keySet().iterator();
System.err.println("\n**STARTNODES:");
while(itr_s.hasNext()){
System.err.println(startNodes.get(itr_s.next()).toString());
}
System.err.println("**CONNECTED NODES TO START-GAP:");
CustomWeightedEdge[] inEdges = this.g.incomingEdgesOf(gapNodes[0]).toArray(new CustomWeightedEdge[1]);
for(CustomWeightedEdge e : inEdges){
System.err.println(this.g.getEdgeSource(e).toString());
}
itr_e = endNodes.keySet().iterator();
System.err.println("\n**ENDNODES:");
while(itr_e.hasNext()){
System.err.println(endNodes.get(itr_e.next()).toString());
}
System.err.println("**CONNECTED NODES TO END-GAP:");
CustomWeightedEdge[] outEdges = this.g.outgoingEdgesOf(gapNodes[gapNodes.length -1]).toArray(new CustomWeightedEdge[1]);
for(CustomWeightedEdge e : outEdges){
System.err.println(this.g.getEdgeTarget(e).toString());
}
}
private void shiftColumnsByInsertionSizeOLD(int insSize, int fromColumnIndex){
HashMap<Integer, Node> startNodes = nodeHashList.get(fromColumnIndex-2);
HashMap<Integer, Node> endNodes = nodeHashList.get(fromColumnIndex-1);
//we need to insert <insSize>-many columns first
Node pre = null;
Node[] gapNodes = new Node[insSize];
ArrayList<Base> insBases = new ArrayList<Base>();//new Base[insSize];
//insert insSize-many columns with gapNodes and transfer insertionNodes to nodeHashList.
for(int i=0; i<insSize; i++){
//add a space first then add the vertex --> gets the space(HashMap) from insertionNodeHashList
HashMap<Integer, Node> insHash_i = this.insertionNodeHashList.get(fromColumnIndex-1).get(i);
this.adjustColumnIndex(insHash_i, fromColumnIndex + i);//this.adjustColumnIndex(insHash_i, fromColumnIndex + i + 1);
nodeHashList.add(fromColumnIndex + i, insHash_i);
Node cur = new Node('.', fromColumnIndex + i + 1);
this.addVertex(cur);//add vertex and add to nodeHashList
if(pre != null)
this.g.addEdge(pre, cur);
gapNodes[i] = cur;
pre = cur;
insBases.add(new Base('-', 0,0,0,true,1));
}
/* adding spaces to Alleles as well*/
for(int i=0; i<this.alleles.size(); i++){
//this.alleles.get(i).insertBlanks(fromColumnIndex, insBases);
this.alleles.get(i).insertBlanks(fromColumnIndex-1, insSize);
}
/*
//insert insSize-many columns with gapNodes
for(int i=0; i<insSize; i++){
//add a space first then add the vertex
nodeHashList.add(fromColumnIndex + i, new HashMap<Integer, Node>);
Node cur = new Node('.', fromColumnIndex + i + 1);
this.addVertex(cur);//add vertex and add to nodeHashList
if(pre != null)
this.g.addEdge(pre, cur);
gapNodes[i] = cur;
pre = cur;
}
*/
//NEED TO SHIFT all columns after insertion, so updating all columnIndex (originalIndex+insSize.
for(int i=fromColumnIndex+insSize; i<this.nodeHashList.size(); i++)
this.adjustColumnIndex(this.nodeHashList.get(i), i);//this.adjustColumnIndex(i);
//remove all edges between start nodes and end nodes and add new edges connecting through gap nodes.
double weightSum = this.getWeightSumsBetween2Columns(startNodes, endNodes, gapNodes);
if(insSize > 1){
for(int i=fromColumnIndex; i<fromColumnIndex+insSize-1; i++){
gapNodes = Arrays.copyOfRange(gapNodes, 1, gapNodes.length);
this.getWeightSumsBetween2Columns(this.nodeHashList.get(i), endNodes, gapNodes);
}
}
}
//removes all edges betweend start nodes and end nodes
//connect edges to newly added gap nodes with correct weights
private double getWeightSumsBetween2Columns(HashMap<Integer, Node> start, HashMap<Integer, Node> end, Node[] gapNodes){
Node sGap = gapNodes[0];
Node eGap = gapNodes[gapNodes.length-1];
double[] outweight = new double[6]; /* for each nucleotide */
//ArrayList<Byte>[] outFScore = new ArrayList<Byte>[5];
//ArrayList<Byte>[] outRScore = new ArrayList<Byte>[5];
ArrayList<ArrayList<Byte>> outFScore = new ArrayList<ArrayList<Byte>>();
ArrayList<ArrayList<Byte>> outRScore = new ArrayList<ArrayList<Byte>>();
double[] inweight = new double[6];
//ArrayList<Byte>[] inFScore = new ArrayList<Byte>[5];
//ArrayList<Byte>[] inRScore = new ArrayList<Byte>[5];
ArrayList<ArrayList<Byte>> inFScore = new ArrayList<ArrayList<Byte>>();
ArrayList<ArrayList<Byte>> inRScore = new ArrayList<ArrayList<Byte>>();
//ArrayList<HashSet<Integer>> outRHash = new ArrayList<HashSet<Integer>>();
//ArrayList<HashSet<Integer>> inRHash = new ArrayList<HashSet<Integer>>();
ArrayList<CustomHashMap> outRHash = new ArrayList<CustomHashMap>();
ArrayList<CustomHashMap> inRHash = new ArrayList<CustomHashMap>();
//for each nucleotide
for(int i=0; i<6; i++){
outFScore.add(new ArrayList<Byte>());
outRScore.add(new ArrayList<Byte>());
inFScore.add(new ArrayList<Byte>());
inRScore.add(new ArrayList<Byte>());
//outRHash.add(new HashSet<Integer>());
//inRHash.add(new HashSet<Integer>());
outRHash.add(new CustomHashMap());
inRHash.add(new CustomHashMap());
}
double sum = 0.0d;
//HashSet<Integer> rHashForGapNodes = new HashSet<Integer>();
CustomHashMap rHashForGapNodes = new CustomHashMap();//new HashSet<Integer>();
Integer[] sKeys = new Integer[0];
Integer[] eKeys = new Integer[0];
sKeys = start.keySet().toArray(sKeys);
eKeys = end.keySet().toArray(eKeys);
/*
for(int i=0;i<eKeys.length; i++){
rHashForGapNodes.addAll(end.get(eKeys[i].intValue()).getReadHashSet());
}*/
boolean[] sEdgePresent = new boolean[6];
boolean[] eEdgePresent = new boolean[6];
boolean isThereConnection = false;
//check all edges between starNodes and endNodes and sum up baseWise.
for(int i=0; i < sKeys.length; i++){
int sVal = sKeys[i].intValue();
//if(sVal != 4){//edges between gap nodes are skipped, taken care of separately
Node stNode = start.get(sKeys[i]);
for(int j=0; j < eKeys.length; j++){
int eVal = eKeys[j].intValue();
//if(eVal != 4){//edges between gap nodes are skipped, taken care of separately
Node eNode = end.get(eKeys[j]);
CustomWeightedEdge e = this.g.getEdge(stNode, eNode);
if(e != null){
sEdgePresent[sVal] = true;
eEdgePresent[eVal] = true;
isThereConnection = true;
double w = this.g.getEdgeWeight(e);
outweight[sVal] += w;
outFScore.get(sVal).addAll(e.getFScores());
outRScore.get(sVal).addAll(e.getRScores());
inweight[eVal] += w;
inFScore.get(eVal).addAll(e.getFScores());
inRScore.get(eVal).addAll(e.getRScores());
outRHash.get(sVal).addAll(e.getReadHashSet());
inRHash.get(eVal).addAll(e.getReadHashSet());
rHashForGapNodes.addAll(e.getReadHashSet());
sum += w;
this.g.removeEdge(e);
}
}
}
//we only need to add edges if there were edges between start and end
if(isThereConnection){
//setting outgoing edges from start nodes to newly added gapNode( sGap ).
for(int i=0; i<sKeys.length; i++){
if(sEdgePresent[sKeys[i].intValue()]){
Node stNode = start.get(sKeys[i]);
CustomWeightedEdge e = this.g.getEdge(stNode, sGap);
if(e == null){
e = this.g.addEdge(stNode, sGap);
this.g.setEdgeWeight(e, 0.0d);
}
this.g.setEdgeWeight(e, this.g.getEdgeWeight(e) + outweight[sKeys[i].intValue()]);//this.setEdgeWeight(e, outweight[sKeys[i].intValue()]);
e.addAllReadsFrom(outRHash.get(sKeys[i].intValue()));
e.addAllFScores(outFScore.get(sKeys[i].intValue()));
e.addAllRScores(outRScore.get(sKeys[i].intValue()));
}
}
//setting incoming edges from newly added gapNode( eGap ) to end nodes.
for(int i=0; i<eKeys.length; i++){
if(eEdgePresent[eKeys[i].intValue()]){
Node eNode = end.get(eKeys[i]);
CustomWeightedEdge e = this.g.getEdge(eGap, eNode);//this.g.addEdge(eGap, eNode);
if(e == null){
e = this.g.addEdge(eGap, eNode);
this.g.setEdgeWeight(e, 0.0d);
}
this.g.setEdgeWeight(e, this.g.getEdgeWeight(e) + inweight[eKeys[i].intValue()]);
e.addAllReadsFrom(inRHash.get(eKeys[i].intValue()));
e.addAllFScores(inFScore.get(eKeys[i].intValue()));
e.addAllRScores(inRScore.get(eKeys[i].intValue()));
}
}
//set edgeWeight between newly inserted gap nodes.
//and add read identifiers to gapNodes
for(int i=0; i<gapNodes.length; i++){
if(i>0){
CustomWeightedEdge e = this.g.getEdge(gapNodes[i-1], gapNodes[i]);
this.g.setEdgeWeight(e, this.g.getEdgeWeight(e) + sum);//this.g.getEdge(gapNodes[i-1], gapNodes[i]), sum);
e.addAllReadsFrom(rHashForGapNodes);
}
//gapNodes[i].addAllReadsFrom(rHashForGapNodes);
}
}
return sum;
}
//set columnIndex to newIndex.
/*
private void adjustColumnIndex(int newIndex){
HashMap<Integer, Node> curHash = this.nodeHashList.get(newIndex);
Iterator<Integer> keys = curHash.keySet().iterator();
while(keys.hasNext())
curHash.get(keys.next()).setColIndex(newIndex);
}
*/
private void adjustColumnIndex(HashMap<Integer, Node> hash, int newIndex){
Iterator<Integer> keys = hash.keySet().iterator();
while(keys.hasNext())
hash.get(keys.next()).setColIndex(newIndex);
}
public void removeUnused(){
this.removeUnusedEdges();
this.removeUnusedVertices();
}
/* remove low frequency edges */
private void removeUnusedEdges(){
Iterator<CustomWeightedEdge> itr = this.g.edgeSet().iterator();
CustomWeightedEdge e = null;
ArrayList<CustomWeightedEdge> removalList = new ArrayList<CustomWeightedEdge>();
while(itr.hasNext()){
e = itr.next();
if(this.g.getEdgeWeight(e) < 1.0d){
removalList.add(e);//this.g.removeEdge(e);
}
}
System.err.println(this.HLAGeneName +"\t:removed\t" + removalList.size() + "\tEdges." );
for(int i=0; i<removalList.size(); i++){
this.g.removeEdge(removalList.get(i));
}
}
/* remove island vertices */
private void removeUnusedVertices(){
Iterator<Node> itr = this.g.vertexSet().iterator();
Node n = null;
ArrayList<Node> removalList = new ArrayList<Node>();
while(itr.hasNext()){
n = itr.next();
//we dont remove sNode and tNode
if(!n.equals(this.sNode) && !n.equals(this.tNode)){
if(this.g.inDegreeOf(n) ==0 && this.g.outDegreeOf(n) == 0){//this.g.degreeOf(n) < 1){
removalList.add(n);
//this.removeVertexFromNodeHashList(n);
//this.g.removeVertex(n);
}
}
}
System.err.println(this.HLAGeneName +"\t:removed\t" + removalList.size() + "\tVertices." );
for(int i=0; i<removalList.size(); i++){
//System.err.println("\t" + removalList.get(i).toString());
this.removeVertex(removalList.get(i));
//this.removeVertexFromNodeHashList(removalList.get(i));
//this.g.removeVertex(removalList.get(i));
}
}
//removing stems. (unreachable stems and dead-end stems)
/* remove any stems */
public void removeStems(){
ArrayList<int[]> typingIntervals = this.obtainTypingIntervals();
//Set<Node> vSet = this.g.vertexSet();
Node[] nodes = this.g.vertexSet().toArray(new Node[0]);//new Node[vSet.size()];
HashSet<Node> dNodes = new HashSet<Node>();
Node n = null;
int terminalStem = 0;
int unreachableStem = 0;
for(int i=0; i<nodes.length; i++){
n = nodes[i];
if(!n.equals(this.sNode) && !n.equals(this.tNode) && !dNodes.contains(n)){
if(this.g.outDegreeOf(n) == 0 && this.g.inDegreeOf(n) == 1){
int stemSize = 0;
terminalStem++;
Node curNode = n;
while(true){
if(!this.alleles.get(0).withinTypingRegion(curNode, typingIntervals))
;//System.err.println("NOT IN TYPING INTERVAL!!");
else
System.err.print("YES! IN TYPING INTERVAL!!");
stemSize++;
CustomWeightedEdge e = this.g.incomingEdgesOf(curNode).toArray(new CustomWeightedEdge[1])[0];
System.err.print("\t" + this.g.getEdgeWeight(e));
Node nextNode = this.g.getEdgeSource(e);
dNodes.add(curNode);
this.removeVertex(curNode);
if(this.g.outDegreeOf(nextNode) == 0 && this.g.inDegreeOf(nextNode) == 1)
curNode = nextNode;
else
break;
}
System.err.println("[DE]stemSize:\t" + stemSize);
}
else if(this.g.outDegreeOf(n) == 1 && this.g.inDegreeOf(n) == 0){
int stemSize = 0;
unreachableStem++;
Node curNode = n;
while(true){
if(!this.alleles.get(0).withinTypingRegion(curNode, typingIntervals))
;//System.err.println("NOT IN TYPING INTERVAL!!");
else
System.err.println("YES! IN TYPING INTERVAL!!");
stemSize++;
CustomWeightedEdge e = this.g.outgoingEdgesOf(curNode).toArray(new CustomWeightedEdge[1])[0];
System.err.print("\t" + this.g.getEdgeWeight(e));
Node nextNode = this.g.getEdgeTarget(e);
dNodes.add(curNode);
this.removeVertex(curNode);
if(this.g.outDegreeOf(nextNode) == 1 && this.g.inDegreeOf(nextNode) == 0)
curNode = nextNode;
else
break;
}
System.err.println("[UN]stemSize:\t" + stemSize);
}
}
}
System.err.println(this.HLAGeneName + "\t:removed\t[DE]:" + terminalStem + "\t[UN]:" + unreachableStem + "\t[NumVertices]:" + dNodes.size());
}
public void countStems(){
Iterator<Node> itr = this.g.vertexSet().iterator();
Node n = null;
int terminalType = 0;
int startType = 0;
while(itr.hasNext()){
n = itr.next();
if(!n.equals(this.sNode) && !n.equals(this.tNode)){
if(this.g.inDegreeOf(n) == 1 && this.g.outDegreeOf(n) == 0){
terminalType++;
}else if(this.g.inDegreeOf(n) == 0 && this.g.outDegreeOf(n) == 1){
startType++;
System.err.println("startType:\t" + n.toString());
}
}
}
System.err.println("Stems\t" + terminalType + "\t" + startType);
}
/*
private void initNumPathForColumn(HashMap){
}*/
} |
package org.elasticsearch.xpack.watcher.transport.actions.delete;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.watcher.support.init.proxy.WatcherClientProxy;
import org.elasticsearch.xpack.watcher.trigger.TriggerService;
import org.elasticsearch.xpack.watcher.watch.Watch;
public class TransportDeleteWatchAction extends TransportMasterNodeAction<DeleteWatchRequest, DeleteWatchResponse> {
private final WatcherClientProxy client;
private final TriggerService triggerService;
@Inject
public TransportDeleteWatchAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver, WatcherClientProxy client,
TriggerService triggerService) {
super(settings, DeleteWatchAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver,
DeleteWatchRequest::new);
this.client = client;
this.triggerService = triggerService;
}
@Override
protected String executor() {
return ThreadPool.Names.MANAGEMENT;
}
@Override
protected DeleteWatchResponse newResponse() {
return new DeleteWatchResponse();
}
@Override
protected void masterOperation(DeleteWatchRequest request, ClusterState state, ActionListener<DeleteWatchResponse> listener) throws
ElasticsearchException {
client.deleteWatch(request.getId(), ActionListener.wrap(deleteResponse -> {
boolean deleted = deleteResponse.getResult() == DocWriteResponse.Result.DELETED;
DeleteWatchResponse response = new DeleteWatchResponse(deleteResponse.getId(), deleteResponse.getVersion(), deleted);
if (deleted) {
triggerService.remove(request.getId());
}
listener.onResponse(response);
},
listener::onFailure));
}
@Override
protected ClusterBlockException checkBlock(DeleteWatchRequest request, ClusterState state) {
return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, Watch.INDEX);
}
} |
package org.opentosca.csarrepo.util;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.Invocation.Builder;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.glassfish.jersey.client.ClientConfig;
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
import org.glassfish.jersey.media.multipart.FormDataMultiPart;
import org.glassfish.jersey.media.multipart.MultiPartFeature;
import org.json.JSONArray;
import org.json.JSONException;
import org.opentosca.csarrepo.filesystem.FileSystem;
import org.opentosca.csarrepo.model.CsarFile;
public class WineryApiClient {
private static final Logger LOGGER = LogManager.getLogger(WineryApiClient.class);
private Client client;
private String url;
public WineryApiClient(URL url) {
this.url = url.toExternalForm();
if (this.url.charAt(this.url.length() - 1) != '/') {
this.url += "/";
}
this.client = ClientBuilder.newClient(new ClientConfig()
.register(MultiPartFeature.class));
}
public void uploadToWinery(CsarFile file) throws Exception {
FileSystem fs = new FileSystem();
File f = fs.getFile(file.getHashedFile().getFilename());
if (f == null) {
throw new FileNotFoundException(file.getName() + " not found");
}
// build form data
FormDataMultiPart multiPart = new FormDataMultiPart();
FormDataContentDisposition.FormDataContentDispositionBuilder dispositionBuilder = FormDataContentDisposition
.name("file");
dispositionBuilder.fileName(file.getName());
dispositionBuilder.size(f.getTotalSpace());
FormDataContentDisposition formDataContentDisposition = dispositionBuilder
.build();
multiPart.bodyPart(new FormDataBodyPart("file", f,
MediaType.APPLICATION_OCTET_STREAM_TYPE)
.contentDisposition(formDataContentDisposition));
Entity<FormDataMultiPart> entity = Entity.entity(multiPart,
MediaType.MULTIPART_FORM_DATA_TYPE);
// send request
WebTarget target = client.target(this.url);
Builder request = target.request();
Response response = request.post(entity);
// handle response
if (Status.NO_CONTENT.getStatusCode() == response.getStatus()) {
return;
}
throw new Exception("failed to push to winery");
}
public InputStream pullFromWinery(String id) throws Exception {
// send request
WebTarget target = client.target(this.url + "servicetemplates/" + id);
Builder request = target.request();
request.accept("application/zip");
Response response = request.get();
if (Status.NOT_FOUND.getStatusCode() == response.getStatus()) {
// 404
throw new Exception("No corresponding servicetemplate found");
}
if (Status.OK.getStatusCode() == response.getStatus()) {
// 200
try {
InputStream stream = (InputStream) response.getEntity();
return stream;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
// other status code
throw new Exception("Error connecting to winery");
}
public List<Servicetemplate> getServiceTemplates() throws Exception {
// send request
WebTarget target = client.target(this.url + "servicetemplates/");
Builder request = target.request();
request.accept("application/json");
Response response = request.get();
if (Status.NOT_FOUND.getStatusCode() == response.getStatus()) {
// 404
throw new Exception("Invalid call to winery");
}
if(Status.OK.getStatusCode() == response.getStatus()) {
// 200
String json = response.readEntity(String.class);
return this.parseServicetemplateJsonToList(json);
}
throw new Exception("Error connecting to winery");
}
private List<Servicetemplate> parseServicetemplateJsonToList(String json) {
JSONArray jsonArray;
List<Servicetemplate> result = new ArrayList<Servicetemplate>();
try {
jsonArray = new JSONArray(json);
for(int i = 0; i < jsonArray.length(); i++) {
String tmpId = jsonArray.getJSONObject(i).getString("id");
String tmpNamespace = jsonArray.getJSONObject(i).getString("namespace");
String tmpName = jsonArray.getJSONObject(i).getString("name");
result.add(new Servicetemplate(tmpId, tmpNamespace, tmpName));
}
} catch (JSONException e) {
LOGGER.error(e);
return null;
}
return result;
}
} |
package org.monarchinitiative.exomiser.data.genome.model.parsers;
import org.monarchinitiative.exomiser.core.model.pathogenicity.ClinVarData;
import org.monarchinitiative.exomiser.core.model.pathogenicity.ClinVarData.ClinSig;
import org.monarchinitiative.exomiser.data.genome.model.Allele;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import static org.monarchinitiative.exomiser.core.model.pathogenicity.ClinVarData.ClinSig.*;
/**
* @author Jules Jacobsen <j.jacobsen@qmul.ac.uk>
*/
public class ClinVarAlleleParser extends VcfAlleleParser {
private static final Logger logger = LoggerFactory.getLogger(ClinVarAlleleParser.class);
@Override
List<Allele> parseInfoField(List<Allele> alleles, String info) {
ClinVarData clinVarData = parseClinVarData(info);
for (Allele allele : alleles) {
if (!clinVarData.isEmpty()) {
allele.setClinVarData(clinVarData);
}
logger.debug("{}", allele);
}
return alleles;
}
/**
*
* @param info
* @return
*/
private ClinVarData parseClinVarData(String info) {
//
// ##INFO=<ID=CLNSIG,Number=.,Type=String,Description="Clinical significance for this single variant">
// ##INFO=<ID=CLNSIGINCL,Number=.,Type=String,Description="Clinical significance for a haplotype or genotype that includes this variant. Reported as pairs of VariationID:clinical significance.">
ClinVarData.Builder clinVarBuilder = ClinVarData.builder();
String [] fields = info.split(";");
for (String field : fields) {
String[] keyValue = field.split("=");
String key = keyValue[0];
String value = keyValue[1];
switch (key) {
case "ALLELEID":
clinVarBuilder.alleleId(value);
break;
case "CLNSIG":
String[] clinsigs = value.split(",_");
ClinSig primary = parseClinSig(clinsigs[0]);
Set<ClinSig> secondary = parseSecondaryClinSig(clinsigs);
clinVarBuilder.primaryInterpretation(primary);
clinVarBuilder.secondaryInterpretations(secondary);
break;
case "CLNREVSTAT":
//CLNREVSTAT criteria_provided,_conflicting_interpretations, criteria_provided,_multiple_submitters,_no_conflicts, criteria_provided,_single_submitter, no_assertion_criteria_provided, no_assertion_provided, no_interpretation_for_the_single_variant, practice_guideline, reviewed_by_expert_panel
//CLNREVSTAT counts: criteria_provided,_conflicting_interpretations=12678, criteria_provided,_multiple_submitters,_no_conflicts=34967, criteria_provided,_single_submitter=197277, no_assertion_criteria_provided=34308, no_assertion_provided=10980, no_interpretation_for_the_single_variant=500, practice_guideline=23, reviewed_by_expert_panel=8786
clinVarBuilder.reviewStatus(value);
break;
case "CLNSIGINCL":
Map<String, ClinVarData.ClinSig> includedAlleles = parseIncludedAlleles(value);
clinVarBuilder.includedAlleles(includedAlleles);
break;
default:
break;
}
}
return clinVarBuilder.build();
}
private Map<String,ClinSig> parseIncludedAlleles(String value) {
//15127:other|15128:other|15334:Pathogenic|
Map<String,ClinSig> includedAlleles = new HashMap<>();
String[] incls = value.split("\\|");
for (String inc : incls) {
String[] fields = inc.split(":");
includedAlleles.put(fields[0], parseClinSig(fields[1]));
}
return includedAlleles;
}
private Set<ClinSig> parseSecondaryClinSig(String[] clinsigs) {
if (clinsigs.length > 1) {
Set<ClinSig> secondaryClinSigs = EnumSet.noneOf(ClinSig.class);
for (int i = 1; i < clinsigs.length; i++) {
secondaryClinSigs.add(parseClinSig(clinsigs[i]));
}
return secondaryClinSigs;
}
return Collections.emptySet();
}
private ClinSig parseClinSig(String clinsig) {
// Unique CLNSIG counts
// Affects=100, Benign=23963, Benign/Likely_benign=10827, Conflicting_interpretations_of_pathogenicity=12784,
// Likely_benign=52064, Likely_pathogenic=15127, Pathogenic=46803, Pathogenic/Likely_pathogenic=3278,
// Uncertain_significance=120418, association=148, drug_response=290, not_provided=10980, other=1796, protective=30,
// risk_factor=411
switch (clinsig) {
case "Uncertain_significance":
return UNCERTAIN_SIGNIFICANCE;
case "Benign":
return BENIGN;
case "Benign/Likely_benign":
return BENIGN_OR_LIKELY_BENIGN;
case "Likely_benign":
return LIKELY_BENIGN;
case "Conflicting_interpretations_of_pathogenicity":
return CONFLICTING_PATHOGENICITY_INTERPRETATIONS;
case "Likely_pathogenic":
return LIKELY_PATHOGENIC;
case "Pathogenic/Likely_pathogenic":
return PATHOGENIC_OR_LIKELY_PATHOGENIC;
case "Pathogenic":
return PATHOGENIC;
case "Affects":
return AFFECTS;
case "association":
return ASSOCIATION;
case "drug_response":
return DRUG_RESPONSE;
case "other":
return OTHER;
case "protective":
return PROTECTIVE;
case "risk_factor":
return RISK_FACTOR;
case "not_provided":
default:
return NOT_PROVIDED;
}
}
} |
package org.pfaa.chemica.block;
import java.util.Random;
import net.minecraft.block.Block;
import net.minecraft.block.material.MapColor;
import net.minecraft.block.material.Material;
import net.minecraft.client.particle.EntityFX;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.init.Blocks;
import net.minecraft.util.IIcon;
import net.minecraft.util.MathHelper;
import net.minecraft.util.Vec3;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraft.world.biome.BiomeGenBase;
import net.minecraftforge.common.util.ForgeDirection;
import net.minecraftforge.fluids.BlockFluidBase;
import net.minecraftforge.fluids.BlockFluidClassic;
import org.pfaa.chemica.fluid.FluidMaterial;
import org.pfaa.chemica.fluid.IndustrialFluid;
import org.pfaa.chemica.model.Compound.Compounds;
import org.pfaa.chemica.model.Constants;
import org.pfaa.chemica.render.EntityDropParticleFX;
import com.mojang.realmsclient.dto.McoServer.WorldType;
import cpw.mods.fml.client.FMLClientHandler;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
/* FIXME: It would be cool to use this for chemical spills, but
* this is not going to scale for arbitrary chemicals.
* We may need to extend BlockFluidClassic, but catch all calls
* in order to query NBT information and set the fluid-specific properties
* on the block instance before delegating to the superclass.
*/
public class IndustrialFluidBlock extends BlockFluidClassic {
private IndustrialFluid fluid;
private Random rand;
public IndustrialFluidBlock(IndustrialFluid fluid) {
super(fluid, materialForIndustrialFluid(fluid));
this.fluid = fluid;
}
/* We want our own material for liquids, because Material.water will cause:
* - Water bottles to be filled with water
* - Ideally, they would fill with the liquid
* - Crops to be irrigated
* - One idea is to reverse this: create polluted soil that spreads through dirt/farmland
* - Inability of lava to ignite flammable liquids
* - On liquid block update, look +/- 2 in X/Z for lava, call its updateTick() while spoofing flammable material.
* - Water mobs to spawn [block with EntityJoinWorldEvent]
* - Water and beach crops to be sustained [can disable at block-level]
*
* But if we use a custom material, then:
* - Movement breaks
* - Tough to fix; might sort of work for players, but not other entities
* - Burning entities are not extinguished (when block does not change to fire)
* - Could be fixed if we fix the movement issue
* - Witches will not use a water breathing potion
* - Entities are not forced to dismount when riding another entity
* - Easy to code this with the drowning code
* - Dogs will not shake off the liquid upon exiting
* - Drowning breaks [fixed; had to do this for gases]
* - Sand/gravel do not fall when placed directly above [also bug for gases]
*/
private static Material materialForIndustrialFluid(IndustrialFluid fluid) {
boolean flammable = fluid.getProperties().hazard.flammability > 0;
return fluid.isGaseous() ? new FluidMaterial(MapColor.airColor, flammable, false) :
fluid.getTemperature() > Constants.FLESH_IGNITION_TEMPERATURE ? Material.lava :
Material.water;
}
@Override
@SideOnly(Side.CLIENT)
public int colorMultiplier(IBlockAccess world, int x, int y, int z) {
return this.fluid.getColor();
}
@Override
@SideOnly(Side.CLIENT)
public IIcon getIcon(int side, int meta) {
return side != 0 && side != 1 ? this.fluid.getFlowingIcon() : this.fluid.getStillIcon();
}
@Override
@SideOnly(Side.CLIENT)
public void registerBlockIcons(IIconRegister register) {
String prefix = fluid.isGaseous() ? "gas" : fluid.isSuperHeated() ? "molten" : "fluid";
String postfix = fluid.isOpaque() ? "_opaque" : "";
fluid.setStillIcon(register.registerIcon("chemica:" + prefix + "_still" + postfix));
fluid.setFlowingIcon(register.registerIcon("chemica:" + prefix + "_flow" + postfix));
}
@Override
@SideOnly(Side.CLIENT)
public void randomDisplayTick(World world, int x, int y, int z, Random rand) {
super.randomDisplayTick(world, x, y, z, rand);
this.trySeepEffect(world, x, y, z, rand);
}
@SideOnly(Side.CLIENT)
private void trySeepEffect(World world, int x, int y, int z, Random rand) {
if (rand.nextInt(10) == 0
&& World.doesBlockHaveSolidTopSurface(world, x, y - 1, z)
&& !world.getBlock(x, y - 2, z).getMaterial().blocksMovement()) {
double px = x + rand.nextFloat();
double py = y - 1.05D;
double pz = z + rand.nextFloat();
int color = fluid.getColor();
float red = (color >> 16 & 255) / 255.0F;
float green = (color >> 8 & 255) / 255.0F;
float blue = (color & 255) / 255.0F;
EntityFX fx = new EntityDropParticleFX(world, px, py, pz, red, green, blue);
FMLClientHandler.instance().getClient().effectRenderer.addEffect(fx);
}
}
@Override
public IndustrialFluid getFluid() {
return this.fluid;
}
/**
* Ticks the block if it's been scheduled
*/
@Override
public void updateTick(World world, int x, int y, int z, Random rand)
{
// TODO: super-heated blocks should add sound and particle effects, like BlocksLiquid.func_149799_m()
this.rand = rand;
if (this.getFluid().isGaseous()) {
this.updateGas(world, x, y, z, rand);
} else {
super.updateTick(world, x, y, z, rand);
}
}
private void tryToChangePhase(World world, int x, int y, int z, Random rand) {
// TODO: if we have a chemical, check condition at this position and
// check for phase change, at rate inversely proportional to
// the heat capacity and enthalpy of transition. This will require
// having a solid block for every chemical. Currently, we are not
// sure if we will even have fluid blocks for arbitrary chemicals.
}
private void updateGas(World world, int x, int y, int z, Random rand)
{
int quantaRemaining = getQuantaValue(world, x, y, z);
int expQuanta = -101;
/* Algorithm:
* Look in the opposite of the density direction
* If there is a block
* quantaRemaining = block.quantaRemaining
* If any of the laterally adjacent blocks have < quanta than it
* quantaRemaining--
* Else
* quantaRemaining = max(laterallyAdjacentBlocks.quantaRemaining)-1
*
* First decision: move vertically, depends on density
* P(moveInDensityDir) = density / (atmosphericDensity + density)
*
* We use atmospheric density instead of the destination block density, because
* we are not able to truly displace other gases. Denser gases
* above are not replaced, while less dense gases are replaced, which is sort
* of opposite. If we had true displacement, then we could have
* less dense gases rise above denser gases, but we are a long way from that.
* This algorithm will at least make less dense gases rise faster.
*
* Second decision: diffuse, depends on:
* * quanta value, i.e., concentration
* * sqrt(temperature)
*
* We ignore molecular mass/diameter considerations.
*/
boolean source = isSourceBlock(world, x, y, z);
if (!source)
{
int y2 = y - densityDir;
if (world.getBlock(x, y2, z) == this)
{
expQuanta = getQuantaValue(world, x, y2, z);
if (quantaRemaining < quantaPerBlock - 1 ||
isSourceBlock(world, x, y2, z) ||
hasSmallerQuanta(world, x - 1, y2, z, expQuanta) ||
hasSmallerQuanta(world, x + 1, y2, z, expQuanta) ||
hasSmallerQuanta(world, x, y2, z - 1, expQuanta) ||
hasSmallerQuanta(world, x, y2, z + 1, expQuanta))
{
expQuanta
}
}
else
{
int maxQuanta = -100;
maxQuanta = getLargerQuanta(world, x - 1, y, z, maxQuanta);
maxQuanta = getLargerQuanta(world, x + 1, y, z, maxQuanta);
maxQuanta = getLargerQuanta(world, x, y, z - 1, maxQuanta);
maxQuanta = getLargerQuanta(world, x, y, z + 1, maxQuanta);
expQuanta = maxQuanta - 1;
}
// decay calculation
if (expQuanta != quantaRemaining)
{
quantaRemaining = expQuanta;
if (expQuanta <= 0)
{
world.setBlock(x, y, z, Blocks.air);
}
else
{
world.setBlockMetadataWithNotify(x, y, z, quantaPerBlock - expQuanta, 3);
world.scheduleBlockUpdate(x, y, z, this, tickRate);
world.notifyBlocksOfNeighborChange(x, y, z, this);
}
}
if (world.getBlock(x, y + densityDir, z) == this) {
return;
}
}
else if (source)
{
world.setBlockMetadataWithNotify(x, y, z, 0, 2);
}
boolean[] canFlowLaterally = new boolean[] {
canDisplaceInDirection(world, x, y, z, ForgeDirection.VALID_DIRECTIONS[2]),
canDisplaceInDirection(world, x, y, z, ForgeDirection.VALID_DIRECTIONS[3]),
canDisplaceInDirection(world, x, y, z, ForgeDirection.VALID_DIRECTIONS[4]),
canDisplaceInDirection(world, x, y, z, ForgeDirection.VALID_DIRECTIONS[5])
};
int numFeasibleLateralFlows = 0;
for (boolean dir : canFlowLaterally) {
if (dir) numFeasibleLateralFlows++;
}
boolean flowVertically = shouldGasFlowVertically(world, x, y, z, rand, numFeasibleLateralFlows);
int numLateralFlows = flowVertically ? 1 : 2;
boolean diffuse = shouldGasDiffuse(world, x, y, z, rand, numFeasibleLateralFlows, numLateralFlows);
int flowMeta = Math.max(1, quantaPerBlock - quantaRemaining +
(diffuse || quantaRemaining < quantaPerBlock - 1 ? 1 : 0));
if (flowMeta >= quantaPerBlock) { // dissipated
return;
}
if (!diffuse) {
numLateralFlows
}
numLateralFlows = Math.min(numLateralFlows, numFeasibleLateralFlows);
while(numLateralFlows > 0) {
int i = rand.nextInt(canFlowLaterally.length);
if (canFlowLaterally[i]) {
ForgeDirection dir = ForgeDirection.VALID_DIRECTIONS[i+2];
flowIntoBlock(world, x + dir.offsetX, y, z + dir.offsetZ, flowMeta);
canFlowLaterally[i] = false;
numLateralFlows
}
}
if (flowVertically)
{
flowIntoBlock(world, x, y + densityDir, z, flowMeta);
}
}
@Override
public int getFlammability(IBlockAccess world, int x, int y, int z, ForgeDirection face) {
int flammability = 0;
if (!this.isSourceBlock(world, x, y, z)) {
flammability = Math.max(0, this.fluid.getProperties().hazard.flammability - 1) * 100;
BiomeGenBase.TempCategory tempCategory = ((World)world).getBiomeGenForCoords(x, z).getTempCategory();
if (tempCategory == BiomeGenBase.TempCategory.COLD) {
flammability -= 50;
} else if (tempCategory == BiomeGenBase.TempCategory.WARM) {
flammability += 50;
}
}
return flammability;
}
@Override
public int getFireSpreadSpeed(IBlockAccess world, int x, int y, int z,
ForgeDirection face) {
return this.getFlammability(world, x, y, z, face) / 3;
}
@Override
protected void flowIntoBlock(World world, int x, int y, int z, int meta) {
Block block = world.getBlock(x, y, z);
if (block == Blocks.torch || block.getMaterial() == Material.fire) {
this.tryToIgnite(world, x, y, z);
} else {
super.flowIntoBlock(world, x, y, z, meta);
}
}
private boolean canDisplaceInDirection(World world, int x, int y, int z, ForgeDirection dir) {
return this.canDisplace(world, x + dir.offsetX, y, z + dir.offsetZ);
}
private boolean shouldGasDiffuse(World world, int x, int y, int z, Random rand,
int numFeasibleFlows, int minFlows) {
double steamTemperature = Compounds.H2O.getVaporization().getTemperature();
float pDiffuse = (float)(Math.sqrt(this.temperature) /
(Math.sqrt(steamTemperature) * 3));
return rand.nextFloat() < pDiffuse && numFeasibleFlows >= minFlows;
}
private boolean shouldGasFlowVertically(World world, int x, int y, int z,
Random rand, int numFeasibleLateralFlows) {
// Forge density units are too large for gases when rounding to integer
double materialDensity = this.fluid.getProperties().density;
double conc = this.getQuantaValue(world, x, y, z) / (double)this.quantaPerBlock;
// FIXME: this air density will need to be the atmospheric density of the dimension
double pVertical = Math.abs(conc*(Constants.AIR_DENSITY - conc*materialDensity)) /
Constants.AIR_DENSITY;
boolean laterallyConstrained = numFeasibleLateralFlows == 0;
boolean flowVertically = canDisplace(world, x, y + densityDir, z) &&
(rand.nextFloat() < 2*pVertical || laterallyConstrained);
return flowVertically;
}
private int getDensityAt(World world, int x, int y, int z) {
Block block = world.getBlock(x, y, z);
int density = Integer.MAX_VALUE;
if (block instanceof BlockFluidBase) {
density = ((BlockFluidBase)block).getFluid().getDensity(world, x, y, z);
}
return density;
}
private boolean hasSmallerQuanta(IBlockAccess world, int x, int y, int z, int quanta) {
int other = this.getQuantaValue(world, x, y, z);
return other > 0 && other < quanta;
}
/*
* We disallow self-replacement (source blocks destroying other source blocks is bad).
*
* This has to be done in three places, due to gratuitous code duplication.
*/
private boolean shouldDisplace(IBlockAccess world, int x, int y, int z) {
Block block = world.getBlock(x, y, z);
return block != this;
}
@Override
public boolean canFlowInto(IBlockAccess world, int x, int y, int z) {
if (!shouldDisplace(world, x, y, z)) {
return false;
}
return super.canFlowInto(world, x, y, z);
}
@Override
public boolean canDisplace(IBlockAccess world, int x, int y, int z) {
if (!shouldDisplace(world, x, y, z)) {
return false;
}
return super.canDisplace(world, x, y, z);
}
@Override
public boolean displaceIfPossible(World world, int x, int y, int z) {
if (!shouldDisplace(world, x, y, z)) {
return false;
}
return super.displaceIfPossible(world, x, y, z);
}
@Override
public void onEntityCollidedWithBlock(World world, int x, int y, int z, Entity entity)
{
if (entity.isBurning()) {
if (!this.tryToIgnite(world, x, y, z)) {
entity.extinguish();
}
}
}
private boolean tryToIgnite(World world, int x, int y, int z) {
if (this.rand.nextInt(300) < this.getFlammability(world, x, y, z, ForgeDirection.UNKNOWN)) {
world.setBlock(x, y, z, Blocks.fire);
return true;
}
return false;
}
public static IndustrialFluidBlock atEyeLevel(EntityLivingBase entity) {
double j0 = entity.posY + (entity.worldObj.isRemote ? 0 : entity.getEyeHeight());
int i = MathHelper.floor_double(entity.posX);
int j = MathHelper.floor_double(j0);
int k = MathHelper.floor_double(entity.posZ);
Block block = entity.worldObj.getBlock(i, j, k);
if (block instanceof IndustrialFluidBlock) {
IndustrialFluidBlock fluidBlock = (IndustrialFluidBlock)block;
float filled = fluidBlock.getFilledPercentage(entity.worldObj, i, j, k);
float density = fluidBlock.getFluid().getDensity();
Block nbor = entity.worldObj.getBlock(i, j - (density > 0 ? -1 : 1), k);
if (nbor == block) {
filled = 1.0F;
}
if (positionInFilledPortion(j0, filled)) {
return fluidBlock;
}
}
return null;
}
private static boolean positionInFilledPortion(double j0, float filled) {
if (filled < 0)
{
filled *= -1;
return j0 > (double)(MathHelper.floor_double(j0) + (1 - filled));
}
else
{
return j0 < (double)(MathHelper.floor_double(j0) + filled);
}
}
} |
package org.eclipse.persistence.internal.oxm.record;
import java.util.Stack;
import org.eclipse.persistence.oxm.NamespaceResolver;
import org.eclipse.persistence.oxm.record.DOMRecord;
import org.eclipse.persistence.oxm.record.UnmarshalRecord;
/**
* @version $Header: XMLTransformationRecord.java 09-aug-2007.15:35:19 dmccann Exp $
* @author mmacivor
* @since release specific (what release of product did this appear in)
*/
public class XMLTransformationRecord extends DOMRecord {
private UnmarshalRecord owningRecord;
private NamespaceResolver resolver;
public XMLTransformationRecord(UnmarshalRecord owner) {
super();
owningRecord = owner;
initializeNamespaceMaps();
}
public XMLTransformationRecord(String rootName, UnmarshalRecord owner) {
super(rootName);
owningRecord = owner;
session = owner.getSession();
resolver = new NamespaceResolver();
initializeNamespaceMaps();
}
public String resolveNamespacePrefix(String prefix) {
return resolver.resolveNamespacePrefix(prefix);
}
public void initializeNamespaceMaps() {
//When the transformation record is created, initialize the namespace resolver
//to contain the namespaces from the current state of the owning record.
//Start at the root and work down.
Stack records = new Stack();
UnmarshalRecord next = owningRecord;
while(next != null) {
records.push(next);
next = next.getParentRecord();
}
for(int i = 0; i < records.size(); i++) {
next = (UnmarshalRecord)records.pop();
if(next.getNamespaceMap() != null) {
java.util.Iterator prefixes = next.getNamespaceMap().keySet().iterator();
while(prefixes.hasNext()) {
String prefix = (String)prefixes.next();
Stack uriStack = (Stack)next.getNamespaceMap().get(prefix);
if(uriStack.size() > 0) {
this.resolver.put(prefix, (String)uriStack.peek());
}
}
}
}
}
} |
package org.scijava.annotations;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.Filer;
import javax.annotation.processing.RoundEnvironment;
import javax.annotation.processing.SupportedAnnotationTypes;
import javax.annotation.processing.SupportedSourceVersion;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.AnnotationValue;
import javax.lang.model.element.Element;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Name;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.Elements;
import javax.lang.model.util.Types;
import javax.tools.Diagnostic.Kind;
import javax.tools.FileObject;
import javax.tools.StandardLocation;
import org.scijava.annotations.AbstractIndexWriter.StreamFactory;
/**
* The annotation processor for use with Java 6 and above.
*
* @author Johannes Schindelin
*/
@SupportedSourceVersion(SourceVersion.RELEASE_6)
@SupportedAnnotationTypes("*")
public class AnnotationProcessor extends AbstractProcessor {
private RoundEnvironment roundEnv;
@Override
public boolean process(final Set<? extends TypeElement> elements,
final RoundEnvironment env)
{
roundEnv = env;
final Writer writer = new Writer();
for (final TypeElement element : elements) {
writer.add(element);
}
try {
writer.write(writer);
}
catch (final IOException e) {
final ByteArrayOutputStream out = new ByteArrayOutputStream();
e.printStackTrace(new PrintStream(out));
try {
out.close();
processingEnv.getMessager().printMessage(Kind.ERROR, out.toString());
}
catch (final IOException e2) {
processingEnv.getMessager().printMessage(Kind.ERROR,
e2.getMessage() + " while printing " + e.getMessage());
}
}
return false;
}
private class Writer extends AbstractIndexWriter implements StreamFactory {
private final Map<String, List<Element>> originatingElements =
new HashMap<String, List<Element>>();
private final Filer filer = processingEnv.getFiler();
private final Elements utils = processingEnv.getElementUtils();
private final Types typeUtils = processingEnv.getTypeUtils();
public void add(final TypeElement element) {
final AnnotationMirror mirror = getMirror(element);
if (mirror != null) {
final String annotationName = utils.getBinaryName(element).toString();
// remember originating elements
List<Element> originating = originatingElements.get(annotationName);
if (originating == null) {
originating = new ArrayList<Element>();
originatingElements.put(annotationName, originating);
}
for (final Element annotated : roundEnv
.getElementsAnnotatedWith(element))
{
switch (annotated.getKind()) {
case ANNOTATION_TYPE:
case CLASS:
case ENUM:
case INTERFACE:
final String className =
utils.getBinaryName((TypeElement) annotated).toString();
final Map<String, Object> values =
adapt(annotated.getAnnotationMirrors(), element.asType());
super.add(values, annotationName, className);
originating.add(annotated);
break;
default:
processingEnv.getMessager().printMessage(
Kind.ERROR,
"Cannot handle annotated element of kind " +
annotated.getKind());
}
}
}
}
@SuppressWarnings("unchecked")
private Map<String, Object> adapt(
final List<? extends AnnotationMirror> mirrors,
final TypeMirror annotationType)
{
final Map<String, Object> result = new TreeMap<String, Object>();
for (final AnnotationMirror mirror : mirrors) {
if (typeUtils.isSameType(mirror.getAnnotationType(), annotationType)) {
return (Map<String, Object>) adapt(mirror);
}
}
return result;
}
@Override
protected Object adapt(final Object o) {
if (o instanceof AnnotationMirror) {
final AnnotationMirror mirror = (AnnotationMirror) o;
final Map<String, Object> result = new TreeMap<String, Object>();
for (final Entry<? extends ExecutableElement, ? extends AnnotationValue> entry : mirror
.getElementValues().entrySet())
{
final String key = entry.getKey().getSimpleName().toString();
final Object value = adapt(entry.getValue().getValue());
result.put(key, value);
}
return result;
}
else if (o instanceof List) {
final List<?> list = (List<?>) o;
final List<Object> result = new ArrayList<Object>(list.size());
for (final Object item : list) {
result.add(adapt(item));
}
return result;
}
else if (o instanceof TypeMirror) {
final TypeMirror mirror = (TypeMirror) o;
return utils.getBinaryName((TypeElement) typeUtils.asElement(mirror))
.toString();
}
else if (o instanceof VariableElement) {
final VariableElement element = (VariableElement) o;
final Map<String, Object> result = new TreeMap<String, Object>();
final String enumName =
utils.getBinaryName((TypeElement) element.getEnclosingElement())
.toString();
final String valueName = element.getSimpleName().toString();
result.put("enum", enumName);
result.put("value", valueName);
return result;
}
else {
return super.adapt(o);
}
}
private AnnotationMirror getMirror(final TypeElement element) {
for (final AnnotationMirror candidate : utils
.getAllAnnotationMirrors(element))
{
final Name binaryName =
utils.getBinaryName((TypeElement) candidate.getAnnotationType()
.asElement());
if (binaryName.contentEquals(Indexable.class.getName())) {
return candidate;
}
}
return null;
}
@Override
public InputStream openInput(final String annotationName)
throws IOException
{
try {
return filer.getResource(StandardLocation.CLASS_OUTPUT, "",
Index.INDEX_PREFIX + annotationName).openInputStream();
}
catch (final FileNotFoundException e) {
return null;
}
}
@Override
public OutputStream openOutput(final String annotationName)
throws IOException
{
final List<Element> originating = originatingElements.get(annotationName);
final String path = Index.INDEX_PREFIX + annotationName;
final FileObject fileObject =
filer.createResource(StandardLocation.CLASS_OUTPUT, "", path,
originating.toArray(new Element[originating.size()]));
// Verify that the generated file is in the META-INF/json/ subdirectory;
// Despite our asking for it explicitly, the DefaultFileManager will
// strip out the directory if javac was called without an explicit
// output directory (i.e. without <code>-d</code> option).
final String uri = fileObject.toUri().toString();
if (uri != null && uri.endsWith("/" + path)) {
return fileObject.openOutputStream();
}
final String prefix =
uri.substring(0, uri.length() - annotationName.length());
final File file = new File(prefix + path);
final File parent = file.getParentFile();
if (parent != null && !parent.isDirectory() && !parent.mkdirs()) {
throw new IOException("Could not create directory: " + parent);
}
return new FileOutputStream(file);
}
@Override
public boolean isClassObsolete(final String className) {
return false;
}
}
} |
package edu.kit.iti.formal.pse.worthwhile.model.ast.util;
import java.util.Iterator;
import java.util.List;
import edu.kit.iti.formal.pse.worthwhile.model.ast.ASTNode;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Addition;
import edu.kit.iti.formal.pse.worthwhile.model.ast.ArrayFunction;
import edu.kit.iti.formal.pse.worthwhile.model.ast.ArrayLiteral;
import edu.kit.iti.formal.pse.worthwhile.model.ast.ArrayType;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Assertion;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Assignment;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Assumption;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Axiom;
import edu.kit.iti.formal.pse.worthwhile.model.ast.BinaryExpression;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Block;
import edu.kit.iti.formal.pse.worthwhile.model.ast.BooleanLiteral;
import edu.kit.iti.formal.pse.worthwhile.model.ast.BooleanType;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Conditional;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Conjunction;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Disjunction;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Equal;
import edu.kit.iti.formal.pse.worthwhile.model.ast.ExistsQuantifier;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Expression;
import edu.kit.iti.formal.pse.worthwhile.model.ast.ForAllQuantifier;
import edu.kit.iti.formal.pse.worthwhile.model.ast.FunctionCall;
import edu.kit.iti.formal.pse.worthwhile.model.ast.FunctionDeclaration;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Greater;
import edu.kit.iti.formal.pse.worthwhile.model.ast.GreaterOrEqual;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Implication;
import edu.kit.iti.formal.pse.worthwhile.model.ast.IntegerLiteral;
import edu.kit.iti.formal.pse.worthwhile.model.ast.IntegerType;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Invariant;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Less;
import edu.kit.iti.formal.pse.worthwhile.model.ast.LessOrEqual;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Loop;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Minus;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Multiplication;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Negation;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Postcondition;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Precondition;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Program;
import edu.kit.iti.formal.pse.worthwhile.model.ast.ReturnStatement;
import edu.kit.iti.formal.pse.worthwhile.model.ast.ReturnValueReference;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Statement;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Subtraction;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Unequal;
import edu.kit.iti.formal.pse.worthwhile.model.ast.VariableDeclaration;
import edu.kit.iti.formal.pse.worthwhile.model.ast.VariableReference;
import edu.kit.iti.formal.pse.worthwhile.model.ast.visitor.HierarchialASTNodeVisitor;
/**
* Implements toString methods for {@link ASTNode}s.
*
* @author fabian
*
*/
public final class AstNodeToStringHelper extends HierarchialASTNodeVisitor {
/**
* A single instance of {@link ASTNodeToStringHelper}.
*/
private static AstNodeToStringHelper singleton = new AstNodeToStringHelper();
/**
* The state of the {@link String} result returned by {@link toString}.
*/
private StringBuffer buf;
/**
* Private default constructor.
*/
private AstNodeToStringHelper() {
super();
}
/**
* Returns a {@link String} representing an {@link ASTNode}.
*
* @param node
* the ASTNode that is represented by the returned String
* @return a String representing <code>node</code>
*/
public static String toString(final ASTNode node) {
AstNodeToStringHelper.singleton.buf = new StringBuffer();
node.accept(AstNodeToStringHelper.singleton);
return AstNodeToStringHelper.singleton.buf.toString();
}
/**
* Appends <code>`(' binaryExpression.left operatorString binaryExpression.right `)'</code> to the buffer.
*
* @param binaryExpression
* the {@link BinaryExpression} that is to be represented as {@link String}
* @param operatorString
* the <code>String</code> representation for the <code>binaryExpression</code>'s operator
*/
private void appendBinaryExpression(final BinaryExpression binaryExpression, final String operatorString) {
this.buf.append("(");
binaryExpression.getLeft().accept(this);
this.buf.append(" " + operatorString + " ");
binaryExpression.getRight().accept(this);
this.buf.append(")");
}
@Override
public void visit(final ASTNode aSTNode) {
this.buf.append(aSTNode.toString());
}
@Override
public void visit(final Addition addition) {
this.appendBinaryExpression(addition, "+");
}
@Override
public void visit(final Subtraction subtraction) {
this.appendBinaryExpression(subtraction, "-");
}
@Override
public void visit(final ArrayFunction arrayFunction) {
this.buf.append("{ ");
ArrayFunction next = arrayFunction;
do {
if (next.getIndex() != null) {
next.getIndex().accept(this);
} else {
this.buf.append("any");
}
this.buf.append(" => ");
next.getValue().accept(this);
next = next.getChainedFunction();
} while (next != null);
this.buf.append(" }");
}
@Override
public void visit(final ArrayLiteral arrayLiteral) {
this.buf.append("{ ");
final Iterator<Expression> i = arrayLiteral.getValues().iterator();
if (i.hasNext()) {
i.next().accept(this);
while (i.hasNext()) {
this.buf.append(", ");
i.next().accept(this);
}
}
this.buf.append(" }");
}
@Override
public void visit(final ArrayType arrayType) {
arrayType.getBaseType().accept(this);
this.buf.append("[]");
}
@Override
public void visit(final Assertion assertion) {
this.buf.append("_assert ");
assertion.getExpression().accept(this);
}
@Override
public void visit(final Assignment assignment) {
assignment.getVariable().accept(this);
this.buf.append(" := ");
assignment.getValue().accept(this);
}
@Override
public void visit(final Assumption assumption) {
this.buf.append("_assume ");
assumption.getExpression().accept(this);
}
@Override
public void visit(final Block block) {
this.buf.append("{\n");
for (Statement stmt : block.getStatements()) {
stmt.accept(this);
this.buf.append("\n");
}
this.buf.append("}\n");
}
@Override
public void visit(final BooleanLiteral booleanLiteral) {
this.buf.append(booleanLiteral.getValue());
}
@Override
public void visit(final BooleanType booleanType) {
this.buf.append("Boolean");
}
@Override
public void visit(final Conjunction conjunction) {
this.appendBinaryExpression(conjunction, "∧");
}
@Override
public void visit(final Equal equal) {
this.appendBinaryExpression(equal, "=");
}
@Override
public void visit(final FunctionCall functionCall) {
this.buf.append(functionCall.getFunction().getName());
this.buf.append("(");
final List<Expression> actuals = functionCall.getActuals();
if (!actuals.isEmpty()) {
final Iterator<Expression> i = actuals.iterator();
i.next().accept(this);
while (i.hasNext()) {
this.buf.append(", ");
i.next().accept(this);
}
}
this.buf.append(")");
}
@Override
public void visit(final IntegerLiteral integerLiteral) {
this.buf.append(integerLiteral.getValue());
}
@Override
public void visit(final IntegerType integerType) {
this.buf.append("Integer");
}
@Override
public void visit(final Less less) {
this.appendBinaryExpression(less, "<");
}
@Override
public void visit(final LessOrEqual lessOrEqual) {
this.appendBinaryExpression(lessOrEqual, "≤");
}
@Override
public void visit(final Loop loop) {
this.buf.append("while ");
loop.getCondition().accept(this);
for (final Invariant i : loop.getInvariants()) {
this.buf.append("\n_invariant ");
i.getExpression().accept(this);
}
this.buf.append(" ");
loop.getBody().accept(this);
}
@Override
public void visit(final Conditional conditional) {
this.buf.append("if ");
conditional.getCondition().accept(this);
conditional.getTrueBlock().accept(this);
if (conditional.getFalseBlock() != null) {
this.buf.append("else ");
conditional.getFalseBlock().accept(this);
}
}
@Override
public void visit(final Minus minus) {
this.buf.append("-");
minus.getOperand().accept(this);
}
@Override
public void visit(final Program program) {
for (final Axiom a : program.getAxioms()) {
a.accept(this);
this.buf.append("\n");
}
for (final FunctionDeclaration f : program.getFunctionDeclarations()) {
f.accept(this);
}
program.getMainBlock().accept(this);
}
@Override
public void visit(final VariableDeclaration variableDeclaration) {
variableDeclaration.getType().accept(this);
this.buf.append(" ");
this.buf.append(variableDeclaration.getName());
Expression initialValue = variableDeclaration.getInitialValue();
if (initialValue != null) {
this.buf.append(" := ");
initialValue.accept(this);
}
}
@Override
public void visit(final VariableReference variableReference) {
this.buf.append(variableReference.getVariable().getName());
if (variableReference.getIndex() != null) {
this.buf.append("[");
variableReference.getIndex().accept(this);
this.buf.append("]");
}
}
@Override
public void visit(final Implication implication) {
this.appendBinaryExpression(implication, "⇒");
}
@Override
public void visit(final Negation negation) {
this.buf.append("¬");
negation.getOperand().accept(this);
}
@Override
public void visit(final Multiplication multiplication) {
this.appendBinaryExpression(multiplication, "∙");
}
@Override
public void visit(final Axiom axiom) {
this.buf.append("_axiom ");
axiom.getExpression().accept(this);
}
@Override
public void visit(final ForAllQuantifier forAllQuantifier) {
this.buf.append("∀ ");
forAllQuantifier.getParameter().accept(this);
Expression condition = forAllQuantifier.getCondition();
if (condition != null) {
this.buf.append(", ");
condition.accept(this);
}
this.buf.append(" : ");
forAllQuantifier.getExpression().accept(this);
}
@Override
public void visit(final ExistsQuantifier existsQuantifier) {
this.buf.append("∃ ");
existsQuantifier.getParameter().accept(this);
Expression condition = existsQuantifier.getCondition();
if (condition != null) {
this.buf.append(", ");
condition.accept(this);
}
this.buf.append(" : ");
existsQuantifier.getExpression().accept(this);
}
@Override
public void visit(final FunctionDeclaration functionDeclaration) {
this.buf.append("function ");
this.buf.append(functionDeclaration.getName());
this.buf.append("(");
List<VariableDeclaration> params = functionDeclaration.getParameters();
if (!params.isEmpty()) {
Iterator<VariableDeclaration> i = functionDeclaration.getParameters().iterator();
// params is not empty, i has next
i.next().accept(this);
while (i.hasNext()) {
this.buf.append(", ");
i.next().accept(this);
}
}
this.buf.append(") ");
for (final Precondition p : functionDeclaration.getPreconditions()) {
this.buf.append("\n");
p.accept(this);
}
for (final Postcondition p : functionDeclaration.getPostconditions()) {
this.buf.append("\n");
p.accept(this);
}
functionDeclaration.getBody().accept(this);
}
@Override
public void visit(final Postcondition postcondition) {
this.buf.append("_ensures ");
postcondition.getExpression().accept(this);
}
@Override
public void visit(final Precondition precondition) {
this.buf.append("_requires ");
precondition.getExpression().accept(this);
}
@Override
public void visit(final ReturnStatement returnStatement) {
this.buf.append("return ");
returnStatement.getReturnValue().accept(this);
}
@Override
public void visit(final ReturnValueReference node) {
this.buf.append("_return");
if (node.getIndex() != null) {
this.buf.append("[");
node.getIndex().accept(this);
this.buf.append("]");
}
}
@Override
public void visit(final Unequal unequal) {
this.appendBinaryExpression(unequal, "≠");
}
@Override
public void visit(final GreaterOrEqual greaterOrEqual) {
this.appendBinaryExpression(greaterOrEqual, "≥");
}
@Override
public void visit(final Greater greater) {
this.appendBinaryExpression(greater, ">");
}
@Override
public void visit(final Disjunction disjunction) {
this.appendBinaryExpression(disjunction, "∨");
}
} |
package org.spongepowered.api.status;
import com.google.common.base.Optional;
import org.spongepowered.api.GameProfile;
import org.spongepowered.api.MinecraftVersion;
import org.spongepowered.api.event.server.StatusPingEvent;
import org.spongepowered.api.text.Text;
import java.util.List;
/**
* Represents the response to a status request. Unlike {@link StatusPingEvent}
* this is immutable.
* <p>
* This interface exists mostly for convenience and can be implemented in a
* library pinging other servers for example.
* </p>
*
* @see StatusPingEvent
*/
public interface StatusResponse {
/**
* Gets the description (MOTD) of the status response.
*
* @return The description to display
*/
Text getDescription();
/**
* Gets player count and the list of players currently playing on the
* server.
*
* @return The player information, or {@link Optional#absent()} if not
* available
*/
Optional<? extends Players> getPlayers();
/**
* Gets the version of the server displayed when the client or the server
* are outdated.
*
* @return The server version
*/
MinecraftVersion getVersion();
/**
* Gets the {@link Favicon} of the server.
*
* @return The favicon, or {@link Optional#absent()} if not available
*/
Optional<Favicon> getFavicon();
/**
* Represents the player count, slots and a list of players current playing
* on a server.
*/
interface Players {
/**
* Gets the amount of online players on the server.
*
* @return The amount of online players
*/
int getOnline();
/**
* Gets the maximum amount of allowed players on the server.
*
* @return The maximum amount of allowed players
*/
int getMax();
/**
* Gets an immutable list of online players on the server to display on
* the client.
*
* @return An immutable list of online players
*/
List<GameProfile> getProfiles();
/**
* Checks if this is a flowerpot.
*
* @return Whether this is a flowerpot
*/
boolean isFlowerPot();
}
/**
* Checks if this is a flowerpot.
*
* @return Whether this is a flowerpot
*/
boolean isFlowerPot();
} |
package hudson.matrix;
import java.util.List;
import java.util.ArrayList;
import java.util.AbstractList;
import java.util.Map;
import java.util.HashMap;
/**
* Used to assist thegeneration of config table.
*
* <p>
* {@link Axis Axes} are split into four groups.
* {@link #x Ones that are displayed as columns},
* {@link #y Ones that are displayed as rows},
* {@link #z Ones that are listed as bullet items inside table cell},
* and those which only have one value, and therefore doesn't show up
* in the table.
*
* <p>
* Because of object reuse inside {@link Layouter}, this class is not thread-safe.
*
* @author Kohsuke Kawaguchi
*/
public abstract class Layouter<T> {
public final List<Axis> x,y,z;
/**
* Axes that only have one value.
*/
private final List<Axis> trivial = new ArrayList<Axis>();
/**
* Number of data columns and rows.
*/
private int xSize, ySize, zSize;
public Layouter(List<Axis> x, List<Axis> y, List<Axis> z) {
this.x = x;
this.y = y;
this.z = z;
init();
}
/**
* Automatically split axes to x,y, and z.
*/
public Layouter(AxisList axisList) {
x = new ArrayList<Axis>();
y = new ArrayList<Axis>();
z = new ArrayList<Axis>();
List<Axis> nonTrivialAxes = new ArrayList<Axis>();
for (Axis a : axisList) {
if(a.size()>1)
nonTrivialAxes.add(a);
else
trivial.add(a);
}
switch(nonTrivialAxes.size()) {
case 0:
break;
case 1:
z.add(nonTrivialAxes.get(0));
break;
case 2:
// use the longer axis in Y
Axis a = nonTrivialAxes.get(0);
Axis b = nonTrivialAxes.get(1);
x.add(a.size() > b.size() ? b : a);
y.add(a.size() > b.size() ? a : b);
break;
default:
// for size > 3, use x and y, and try to pack y more
for( int i=0; i<nonTrivialAxes.size(); i++ )
(i%3==1?x:y).add(nonTrivialAxes.get(i));
}
init();
}
private void init() {
xSize = calc(x,-1);
ySize = calc(y,-1);
zSize = calc(z,-1);
}
/**
* Computes the width of n-th X-axis.
*/
public int width(int n) {
return calc(x,n);
}
/**
* Computes the repeat count of n-th X-axis.
*/
public int repeatX(int n) {
int w = 1;
for( n--; n>=0; n-- )
w *= x.get(n).size();
return w;
}
/**
* Computes the width of n-th Y-axis.
*/
public int height(int n) {
return calc(y,n);
}
private int calc(List<Axis> l, int n) {
int w = 1;
for( n++ ; n<l.size(); n++ )
w *= l.get(n).size();
return w;
}
/**
* Gets list of {@link Row}s to be displayed.
*
* The {@link Row} object is reused, so every value
* in collection returns the same object (but with different values.)
*/
public List<Row> getRows() {
return new AbstractList<Row>() {
final Row row = new Row();
public Row get(int index) {
row.index = index;
return row;
}
public int size() {
return ySize;
}
};
}
/**
* Represents a row, which is a collection of {@link Column}s.
*/
public final class Row extends AbstractList<Column> {
private int index;
final Column col = new Column();
@Override
public Column get(int index) {
col.xp = index;
col.yp = Row.this.index;
return col;
}
@Override
public int size() {
return xSize;
}
public String drawYHeader(int n) {
int base = calc(y,n);
if(index/base==(index-1)/base && index!=0) return null; // no need to draw a new value
Axis axis = y.get(n);
return axis.value((index/base)%axis.values.size());
}
}
protected abstract T getT(Combination c);
public final class Column extends AbstractList<T> {
/**
* Cell position.
*/
private int xp,yp;
private final Map<String,String> m = new HashMap<String,String>();
public T get(int zp) {
m.clear();
buildMap(xp,x);
buildMap(yp,y);
buildMap(zp,z);
for (Axis a : trivial)
m.put(a.name,a.value(0));
return getT(new Combination(m));
}
private void buildMap(int p, List<Axis> axes) {
int n = p;
for( int i= axes.size()-1; i>=0; i
Axis a = axes.get(i);
m.put(a.name, a.value(n%a.size()));
n /= a.size();
}
}
public int size() {
return zSize;
}
}
} |
package org.spongepowered.common.entity;
import net.minecraft.block.Block;
import net.minecraft.entity.Entity;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.BlockPos;
import net.minecraft.world.World;
public final class EntityDummy extends Entity {
protected EntityDummy(World worldIn) {
super(worldIn);
}
@Override
public int getEntityId() {
return -1337;
}
@Override
protected void entityInit() {
}
@Override
public void onUpdate() {
}
@Override
protected void playStepSound(BlockPos pos, Block blockIn) {
}
@Override
public void playSound(String name, float volume, float pitch) {
}
@Override
protected void readEntityFromNBT(NBTTagCompound tagCompund) {
}
@Override
protected void writeEntityToNBT(NBTTagCompound tagCompound) {
}
} |
package org.safehaus.kiskis.mgmt.ui.mongodb.tracker;
import com.vaadin.data.Item;
import com.vaadin.data.util.IndexedContainer;
import com.vaadin.terminal.Sizeable;
import com.vaadin.terminal.ThemeResource;
import com.vaadin.ui.Alignment;
import com.vaadin.ui.Button;
import com.vaadin.ui.Component;
import com.vaadin.ui.DateField;
import com.vaadin.ui.Embedded;
import com.vaadin.ui.HorizontalLayout;
import com.vaadin.ui.PopupDateField;
import com.vaadin.ui.Table;
import com.vaadin.ui.TextArea;
import com.vaadin.ui.VerticalLayout;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import org.safehaus.kiskis.mgmt.shared.protocol.ProductOperationState;
import org.safehaus.kiskis.mgmt.shared.protocol.ProductOperationView;
import org.safehaus.kiskis.mgmt.api.mongodb.Config;
import org.safehaus.kiskis.mgmt.shared.protocol.Util;
import org.safehaus.kiskis.mgmt.ui.mongodb.MongoUI;
/**
*
* @author dilshat
*/
public class Tracker {
private final VerticalLayout contentRoot;
private final Table operationsTable;
private final TextArea outputTxtArea;
private final String okIconSource = "icons/16/ok.png";
private final String errorIconSource = "icons/16/cancel.png";
private final String loadIconSource = "../base/common/img/loading-indicator.gif";
private final PopupDateField fromDate, toDate;
private volatile UUID trackID;
private volatile boolean track = false;
private List<ProductOperationView> currentOperations = new ArrayList<ProductOperationView>();
public Tracker() {
contentRoot = new VerticalLayout();
contentRoot.setSpacing(true);
contentRoot.setWidth(90, Sizeable.UNITS_PERCENTAGE);
contentRoot.setHeight(100, Sizeable.UNITS_PERCENTAGE);
contentRoot.setMargin(true);
VerticalLayout content = new VerticalLayout();
content.setWidth(100, Sizeable.UNITS_PERCENTAGE);
content.setSpacing(true);
contentRoot.addComponent(content);
contentRoot.setComponentAlignment(content, Alignment.TOP_CENTER);
HorizontalLayout filterLayout = new HorizontalLayout();
filterLayout.setSpacing(true);
Calendar cal = Calendar.getInstance();
cal.add(Calendar.DAY_OF_MONTH, -1);
fromDate = new PopupDateField("From", cal.getTime());
toDate = new PopupDateField("To", new Date());
fromDate.setDateFormat("yyyy-MM-dd HH:mm:ss");
fromDate.setInvalidAllowed(false);
fromDate.setInvalidCommitted(false);
toDate.setDateFormat("yyyy-MM-dd HH:mm:ss");
toDate.setInvalidAllowed(false);
toDate.setInvalidCommitted(false);
filterLayout.addComponent(fromDate);
filterLayout.addComponent(toDate);
operationsTable = createTableTemplate("Operations", 250);
outputTxtArea = new TextArea("Operation output");
outputTxtArea.setSizeFull();
outputTxtArea.setRows(20);
outputTxtArea.setImmediate(true);
outputTxtArea.setWordwrap(true);
content.addComponent(filterLayout);
content.addComponent(operationsTable);
content.addComponent(outputTxtArea);
content.setComponentAlignment(operationsTable, Alignment.TOP_CENTER);
content.setComponentAlignment(outputTxtArea, Alignment.TOP_CENTER);
}
public Component getContent() {
return contentRoot;
}
public void setTrackId(UUID trackID) {
this.trackID = trackID;
}
public void startTracking() {
if (!track) {
track = true;
MongoUI.getExecutor().execute(new Runnable() {
public void run() {
while (track) {
populateOperations();
populateLogs();
try {
Thread.sleep(1000);
} catch (InterruptedException ex) {
break;
}
}
}
});
}
}
public void stopTracking() {
track = false;
}
private void populateLogs() {
if (trackID != null) {
ProductOperationView po = MongoUI.getMongoManager().getProductOperationView(trackID);
if (po != null) {
setOutput(po.getDescription() + "\nState: " + po.getState() + "\nLogs:\n" + po.getLog());
if (po.getState() != ProductOperationState.RUNNING) {
trackID = null;
}
} else {
setOutput("Product operation not found. Check logs");
}
}
}
private void populateOperations() {
Calendar cal = Calendar.getInstance();
cal.add(Calendar.DAY_OF_MONTH, -1);
List<ProductOperationView> operations = MongoUI.getDbManager().getProductOperations(
Config.PRODUCT_KEY, (Date) fromDate.getValue(), (Date) toDate.getValue(), 100);
IndexedContainer container = (IndexedContainer) operationsTable.getContainerDataSource();
currentOperations.removeAll(operations);
for (ProductOperationView po : currentOperations) {
container.removeItem(po.getId());
}
boolean sortNeeded = false;
for (final ProductOperationView po : operations) {
Embedded progressIcon;
if (po.getState() == ProductOperationState.RUNNING) {
progressIcon = new Embedded("", new ThemeResource(loadIconSource));
} else if (po.getState() == ProductOperationState.FAILED) {
progressIcon = new Embedded("", new ThemeResource(errorIconSource));
} else {
progressIcon = new Embedded("", new ThemeResource(okIconSource));
}
Item item = container.getItem(po.getId());
if (item == null) {
final Button trackLogsBtn = new Button("View logs");
trackLogsBtn.addListener(new Button.ClickListener() {
public void buttonClick(Button.ClickEvent event) {
setTrackId(po.getId());
}
});
item = container.addItem(po.getId());
item.getItemProperty("Date").setValue(po.getCreateDate());
item.getItemProperty("Operation").setValue(po.getDescription());
item.getItemProperty("Check").setValue(trackLogsBtn);
item.getItemProperty("Status").setValue(progressIcon);
sortNeeded = true;
} else {
if (!((Embedded) item.getItemProperty("Status").getValue()).getSource().equals(progressIcon.getSource())) {
item.getItemProperty("Status").setValue(progressIcon);
}
}
}
if (sortNeeded) {
Object[] properties = {"Date"};
boolean[] ordering = {false};
operationsTable.sort(properties, ordering);
}
currentOperations = operations;
}
private Table createTableTemplate(String caption, int height) {
Table table = new Table(caption);
table.setContainerDataSource(new IndexedContainer());
table.addContainerProperty("Date", Date.class, null);
table.addContainerProperty("Operation", String.class, null);
table.addContainerProperty("Check", Button.class, null);
table.addContainerProperty("Status", Embedded.class, null);
table.setWidth(100, Sizeable.UNITS_PERCENTAGE);
table.setHeight(height, Sizeable.UNITS_PIXELS);
table.setPageLength(10);
table.setSelectable(false);
table.setImmediate(true);
return table;
}
private void setOutput(String output) {
if (!Util.isStringEmpty(output)) {
outputTxtArea.setValue(output);
outputTxtArea.setCursorPosition(outputTxtArea.getValue().toString().length() - 1);
}
}
} |
package org.zalando.nakadi.service;
import com.google.common.collect.ImmutableSet;
import org.everit.json.schema.Schema;
import org.everit.json.schema.SchemaException;
import org.everit.json.schema.loader.SchemaLoader;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.zalando.nakadi.config.NakadiSettings;
import org.zalando.nakadi.domain.CompatibilityMode;
import org.zalando.nakadi.domain.EventCategory;
import org.zalando.nakadi.domain.EventType;
import org.zalando.nakadi.domain.EventTypeBase;
import org.zalando.nakadi.domain.EventTypeStatistics;
import org.zalando.nakadi.domain.Subscription;
import org.zalando.nakadi.enrichment.Enrichment;
import org.zalando.nakadi.exceptions.InternalNakadiException;
import org.zalando.nakadi.exceptions.InvalidEventTypeException;
import org.zalando.nakadi.exceptions.NakadiException;
import org.zalando.nakadi.exceptions.NoSuchEventTypeException;
import org.zalando.nakadi.exceptions.NoSuchPartitionStrategyException;
import org.zalando.nakadi.exceptions.TopicDeletionException;
import org.zalando.nakadi.partitioning.PartitionResolver;
import org.zalando.nakadi.repository.EventTypeRepository;
import org.zalando.nakadi.repository.TopicRepository;
import org.zalando.nakadi.repository.db.SubscriptionDbRepository;
import org.zalando.nakadi.repository.kafka.PartitionsCalculator;
import org.zalando.nakadi.security.Client;
import org.zalando.nakadi.service.timeline.TimelineService;
import org.zalando.nakadi.service.timeline.TimelineSync;
import org.zalando.nakadi.util.FeatureToggleService;
import org.zalando.nakadi.util.JsonUtils;
import org.zalando.nakadi.validation.SchemaEvolutionService;
import org.zalando.nakadi.validation.SchemaIncompatibility;
import org.zalando.problem.Problem;
import javax.ws.rs.core.Response;
import java.io.Closeable;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import static org.zalando.nakadi.util.FeatureToggleService.Feature.CHECK_PARTITIONS_KEYS;
@Component
public class EventTypeService {
private static final Logger LOG = LoggerFactory.getLogger(EventTypeService.class);
private final EventTypeRepository eventTypeRepository;
private final TimelineService timelineService;
private final PartitionResolver partitionResolver;
private final Enrichment enrichment;
private final SubscriptionDbRepository subscriptionRepository;
private final SchemaEvolutionService schemaEvolutionService;
private final PartitionsCalculator partitionsCalculator;
private final FeatureToggleService featureToggleService;
private final TimelineSync timelineSync;
private final NakadiSettings nakadiSettings;
@Autowired
public EventTypeService(final EventTypeRepository eventTypeRepository,
final TimelineService timelineService,
final PartitionResolver partitionResolver,
final Enrichment enrichment,
final SubscriptionDbRepository subscriptionRepository,
final SchemaEvolutionService schemaEvolutionService,
final PartitionsCalculator partitionsCalculator,
final FeatureToggleService featureToggleService,
final TimelineSync timelineSync,
final NakadiSettings nakadiSettings) {
this.eventTypeRepository = eventTypeRepository;
this.timelineService = timelineService;
this.partitionResolver = partitionResolver;
this.enrichment = enrichment;
this.subscriptionRepository = subscriptionRepository;
this.schemaEvolutionService = schemaEvolutionService;
this.partitionsCalculator = partitionsCalculator;
this.featureToggleService = featureToggleService;
this.timelineSync = timelineSync;
this.nakadiSettings = nakadiSettings;
}
public List<EventType> list() {
return eventTypeRepository.list();
}
public Result<Void> create(final EventTypeBase eventType) {
final TopicRepository topicRepository = timelineService.getDefaultTopicRepository();
try {
validateSchema(eventType);
enrichment.validate(eventType);
partitionResolver.validate(eventType);
final String topicName = topicRepository.createTopic(
partitionsCalculator.getBestPartitionsCount(eventType.getDefaultStatistic()),
eventType.getOptions().getRetentionTime());
eventType.setTopic(topicName);
eventTypeRepository.saveEventType(eventType);
return Result.ok();
} catch (final InvalidEventTypeException | NoSuchPartitionStrategyException e) {
LOG.debug("Failed to create EventType.", e);
if (null != eventType.getTopic()) {
try {
topicRepository.deleteTopic(eventType.getTopic());
} catch (final TopicDeletionException ex) {
LOG.warn("failed to delete topic for event type that failed to be created", ex);
}
}
return Result.problem(e.asProblem());
} catch (final NakadiException e) {
LOG.error("Error creating event type " + eventType, e);
return Result.problem(e.asProblem());
}
}
public Result<Void> delete(final String eventTypeName, final Client client) {
Closeable deletionCloser = null;
try {
deletionCloser = timelineSync.workWithEventType(eventTypeName, nakadiSettings.getTimelineWaitTimeoutMs());
final Optional<EventType> eventTypeOpt = eventTypeRepository.findByNameO(eventTypeName);
if (!eventTypeOpt.isPresent()) {
return Result.notFound("EventType \"" + eventTypeName + "\" does not exist.");
}
final EventType eventType = eventTypeOpt.get();
if (!client.idMatches(eventType.getOwningApplication())) {
return Result.forbidden("You don't have access to this event type");
}
final List<Subscription> subscriptions = subscriptionRepository.listSubscriptions(
ImmutableSet.of(eventTypeName), Optional.empty(), 0, 1);
if (!subscriptions.isEmpty()) {
return Result.conflict("Not possible to remove event-type as it has subscriptions");
}
final TopicRepository topicRepository = timelineService.getTopicRepository(eventType);
// TODO: Cascading delete. Event Type must be deleted with all it's timelines.
eventTypeRepository.removeEventType(eventTypeName);
topicRepository.deleteTopic(eventType.getTopic());
return Result.ok();
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
LOG.error("Failed to wait for timeline switch", e);
return Result.problem(Problem.valueOf(Response.Status.SERVICE_UNAVAILABLE,
"Event type is currently in maintenance, please repeat request"));
} catch (final TimeoutException e) {
LOG.error("Failed to wait for timeline switch", e);
return Result.problem(Problem.valueOf(Response.Status.SERVICE_UNAVAILABLE,
"Event type is currently in maintenance, please repeat request"));
} catch (final TopicDeletionException e) {
LOG.error("Problem deleting kafka topic " + eventTypeName, e);
return Result.problem(e.asProblem());
} catch (final NakadiException e) {
LOG.error("Error deleting event type " + eventTypeName, e);
return Result.problem(e.asProblem());
} finally {
try {
if (deletionCloser != null) {
deletionCloser.close();
}
} catch (final IOException e) {
LOG.error("Exception occurred when releasing usage of event-type", e);
}
}
}
public Result<Void> update(final String eventTypeName, final EventTypeBase eventTypeBase, final Client client) {
// todo: FIXME Timelines: we need to fix updating of retention time before we release timelines feature
Closeable updatingCloser = null;
try {
updatingCloser = timelineSync.workWithEventType(eventTypeName, nakadiSettings.getTimelineWaitTimeoutMs());
final EventType original = eventTypeRepository.findByName(eventTypeName);
if (!client.idMatches(original.getOwningApplication())) {
return Result.forbidden("You don't have access to this event type");
}
validateName(eventTypeName, eventTypeBase);
validateSchema(eventTypeBase);
final EventType eventType = schemaEvolutionService.evolve(original, eventTypeBase);
eventType.setDefaultStatistic(
validateStatisticsUpdate(original.getDefaultStatistic(), eventType.getDefaultStatistic()));
eventTypeRepository.update(eventType);
return Result.ok();
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
LOG.error("Failed to wait for timeline switch", e);
return Result.problem(Problem.valueOf(Response.Status.SERVICE_UNAVAILABLE,
"Event type is currently in maintenance, please repeat request"));
} catch (final TimeoutException e) {
LOG.error("Failed to wait for timeline switch", e);
return Result.problem(Problem.valueOf(Response.Status.SERVICE_UNAVAILABLE,
"Event type is currently in maintenance, please repeat request"));
} catch (final InvalidEventTypeException e) {
return Result.problem(e.asProblem());
} catch (final NoSuchEventTypeException e) {
LOG.debug("Could not find EventType: {}", eventTypeName);
return Result.problem(e.asProblem());
} catch (final NakadiException e) {
LOG.error("Unable to update event type", e);
return Result.problem(e.asProblem());
} finally {
try {
if (updatingCloser != null) {
updatingCloser.close();
}
} catch (final IOException e) {
LOG.error("Exception occurred when releasing usage of event-type", e);
}
}
}
public Result<EventType> get(final String eventTypeName) {
try {
final EventType eventType = eventTypeRepository.findByName(eventTypeName);
return Result.ok(eventType);
} catch (final NoSuchEventTypeException e) {
LOG.debug("Could not find EventType: {}", eventTypeName);
return Result.problem(e.asProblem());
} catch (final InternalNakadiException e) {
LOG.error("Problem loading event type " + eventTypeName, e);
return Result.problem(e.asProblem());
}
}
private EventTypeStatistics validateStatisticsUpdate(
final EventTypeStatistics existing,
final EventTypeStatistics newStatistics) throws InvalidEventTypeException {
if (existing != null && newStatistics == null) {
return existing;
}
if (!Objects.equals(existing, newStatistics)) {
throw new InvalidEventTypeException("default statistics must not be changed");
}
return newStatistics;
}
private void validateName(final String name, final EventTypeBase eventType) throws InvalidEventTypeException {
if (!eventType.getName().equals(name)) {
throw new InvalidEventTypeException("path does not match resource name");
}
}
private void validateSchema(final EventTypeBase eventType) throws InvalidEventTypeException {
try {
final String eventTypeSchema = eventType.getSchema().getSchema();
JsonUtils.checkEventTypeSchemaValid(eventTypeSchema);
final JSONObject schemaAsJson = new JSONObject(eventTypeSchema);
final Schema schema = SchemaLoader.load(schemaAsJson);
if (eventType.getCategory() == EventCategory.BUSINESS && schema.definesProperty("#/metadata")) {
throw new InvalidEventTypeException("\"metadata\" property is reserved");
}
if (featureToggleService.isFeatureEnabled(CHECK_PARTITIONS_KEYS)) {
validatePartitionKeys(schema, eventType);
}
if (eventType.getCompatibilityMode() == CompatibilityMode.COMPATIBLE) {
validateJsonSchemaConstraints(schemaAsJson);
}
} catch (final JSONException e) {
throw new InvalidEventTypeException("schema must be a valid json");
} catch (final SchemaException e) {
throw new InvalidEventTypeException("schema must be a valid json-schema");
}
}
private void validateJsonSchemaConstraints(final JSONObject schema) throws InvalidEventTypeException {
final List<SchemaIncompatibility> incompatibilities = schemaEvolutionService.collectIncompatibilities(schema);
if (!incompatibilities.isEmpty()) {
final String errorMessage = incompatibilities.stream().map(Object::toString)
.collect(Collectors.joining(", "));
throw new InvalidEventTypeException("Invalid schema: " + errorMessage);
}
}
private void validatePartitionKeys(final Schema schema, final EventTypeBase eventType)
throws InvalidEventTypeException, JSONException, SchemaException {
final List<String> absentFields = eventType.getPartitionKeyFields().stream()
.filter(field -> !schema.definesProperty(convertToJSONPointer(field)))
.collect(Collectors.toList());
if (!absentFields.isEmpty()) {
throw new InvalidEventTypeException("partition_key_fields " + absentFields + " absent in schema");
}
}
private String convertToJSONPointer(final String value) {
return value.replaceAll("\\.", "/");
}
} |
package org.jeo.map;
import java.io.IOException;
import java.util.Iterator;
import org.jeo.data.Dataset;
import org.jeo.data.Workspace;
import org.jeo.data.Workspaces;
import org.osgeo.proj4j.CoordinateReferenceSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.vividsolutions.jts.geom.Envelope;
public class MapBuilder {
static Logger LOG = LoggerFactory.getLogger(MapBuilder.class);
Map map;
boolean size = false, bounds = false, crs = false;
public MapBuilder() {
map = new Map();
}
public MapBuilder size(int width, int height) {
map.setWidth(width);
map.setHeight(height);
size = true;
return this;
}
public MapBuilder bounds(Envelope bounds) {
//TODO: set size based on bbox aspect ratio
map.setBounds(bounds);
this.bounds = true;
return this;
}
public MapBuilder crs(CoordinateReferenceSystem crs) {
map.setCRS(crs);
this.crs = true;
return this;
}
public MapBuilder layer(Dataset data) {
return layer(data.getName(), data.getTitle(), data);
}
public MapBuilder layer(String name, Dataset data) {
return layer(name, data.getTitle() != null ? data.getTitle() : name, data);
}
public MapBuilder layer(String name, String title, Dataset data) {
Layer l = new Layer();
l.setName(name);
l.setTitle(title);
l.setData(data);
map.getLayers().add(l);
return this;
}
public MapBuilder layer(String name, java.util.Map<String,Object> params) throws IOException {
return layer(name, name, params);
}
public MapBuilder layer(String name, String title, java.util.Map<String,Object> params)
throws IOException {
Workspace ws = Workspaces.create(params);
if (ws == null) {
throw new IllegalArgumentException("Unable to obtqin workspace: " + params);
}
Dataset data = ws.get(name);
if (data == null) {
ws.dispose();
throw new IllegalArgumentException(
"No dataset named " + name + " in worksoace: " + params);
}
layer(name, title, data);
map.getCleanup().add(ws);
return this;
}
public MapBuilder style(Stylesheet style) {
map.setStyle(style);
return this;
}
public Map map() {
if (!bounds || !crs) {
//set from layers
Iterator<Layer> it = map.getLayers().iterator();
while(it.hasNext() && !bounds && !crs) {
Layer l = it.next();
try {
if (!bounds) {
Envelope e = l.getData().bounds();
if (e != null && !e.isNull()) {
map.setBounds(e);
bounds = true;
}
}
if (!crs) {
CoordinateReferenceSystem c = l.getData().getCRS();
if (c != null) {
map.setCRS(c);
crs = true;
}
}
} catch (IOException ex) {
LOG.debug("Error deriving bounds/crs from map layers", ex);
}
}
}
if (!size) {
//set from bounds
Envelope e = map.getBounds();
if (e != null) {
map.setWidth(Map.DEFAULT_WIDTH);
map.setHeight((int)(map.getWidth() * e.getHeight() / e.getWidth()));
}
}
return map;
}
} |
package ro.robertgabriel.controllers;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import ro.robertgabriel.dao.MongoDBListDao;
import ro.robertgabriel.frontend.Configuration;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
@Controller
public class HomeController {
@Autowired
private Configuration configuration;
@ResponseBody
@RequestMapping("/")
public String getHome(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
MongoDBListDao mongoDBListDao = new MongoDBListDao();
// List lists = mongoDBListDao.readAllList() ;
// request.setAttribute("lists", lists);
// request.setAttribute("configuration", getConfiguration());
// request.getRequestDispatcher("WEB-INF/jsp/index.jsp").forward(request, response);
return "Home test";
}
public Configuration getConfiguration() {
return configuration;
}
public void setConfiguration(Configuration configuration) {
this.configuration = configuration;
}
} |
package org.eclipse.emf.emfstore.client.ui.handlers;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.emf.emfstore.client.ui.util.EMFStoreMessageDialog;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.dialogs.ProgressMonitorDialog;
import org.eclipse.swt.widgets.Shell;
public abstract class AbstractEMFStoreUIController {
protected Shell shell;
private ProgressMonitorDialog progressDialog;
public AbstractEMFStoreUIController(Shell shell) {
setShell(shell);
}
public Shell getShell() {
return shell;
}
public void setShell(Shell shell) {
this.shell = shell;
}
protected ProgressMonitorDialog openProgress() {
progressDialog = new ProgressMonitorDialog(getShell());
progressDialog.setCancelable(true);
progressDialog.open();
return progressDialog;
}
protected void closeProgress() {
if (progressDialog != null) {
progressDialog.close();
}
}
protected IProgressMonitor getProgressMonitor() {
if (progressDialog != null) {
return progressDialog.getProgressMonitor();
}
return new NullProgressMonitor();
}
protected boolean confirmationDialog(String message) {
MessageDialog dialog = new MessageDialog(null, "Confirmation", null, message, MessageDialog.QUESTION,
new String[] { "Yes", "No" }, 0);
return dialog.open() == Dialog.OK;
}
public void handleException(Exception exception) {
EMFStoreMessageDialog.showExceptionDialog(exception);
closeProgress();
}
} |
package org.metaborg.spoofax.eclipse.language;
import java.util.Collection;
import java.util.Set;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemException;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.IWorkspace;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.swt.widgets.Display;
import org.eclipse.ui.IEditorRegistry;
import org.eclipse.ui.PlatformUI;
import org.metaborg.core.context.IContextProcessor;
import org.metaborg.core.language.ILanguageCache;
import org.metaborg.core.language.ILanguageComponent;
import org.metaborg.core.language.ILanguageIdentifierService;
import org.metaborg.core.language.ILanguageImpl;
import org.metaborg.core.language.LanguageFileSelector;
import org.metaborg.core.language.ResourceExtensionFacet;
import org.metaborg.core.language.dialect.IDialectProcessor;
import org.metaborg.core.processing.LanguageChangeProcessor;
import org.metaborg.spoofax.eclipse.editor.IEclipseEditor;
import org.metaborg.spoofax.eclipse.resource.IEclipseResourceService;
import org.metaborg.spoofax.eclipse.util.EditorMappingUtils;
import org.metaborg.spoofax.eclipse.util.MarkerUtils;
import org.metaborg.spoofax.eclipse.util.ResourceUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Joiner;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.google.inject.Inject;
/**
* Extends the {@code LanguageChangeProcessor} to include Eclipse-specific operations such as changing editor
* associations and resource markers.
*/
public class EclipseLanguageChangeProcessor extends LanguageChangeProcessor {
private static final Logger logger = LoggerFactory.getLogger(EclipseLanguageChangeProcessor.class);
private final IEclipseResourceService resourceService;
private final ILanguageIdentifierService languageIdentifier;
private final IWorkspace workspace;
private final IEditorRegistry eclipseEditorRegistry;
private final Display display;
@Inject public EclipseLanguageChangeProcessor(IEclipseResourceService resourceService,
ILanguageIdentifierService languageIdentifier, IDialectProcessor dialectProcessor,
IContextProcessor contextProcessor, org.metaborg.core.editor.IEditorRegistry editorRegistry,
Set<ILanguageCache> languageCaches) {
super(dialectProcessor, contextProcessor, editorRegistry, languageCaches);
this.resourceService = resourceService;
this.languageIdentifier = languageIdentifier;
this.workspace = ResourcesPlugin.getWorkspace();
this.eclipseEditorRegistry = PlatformUI.getWorkbench().getEditorRegistry();
this.display = Display.getDefault();
}
@Override public void addedComponent(ILanguageComponent component) {
logger.debug("Running component added job for {}", component);
final Set<String> extensions = getExtensions(component);
if(!extensions.isEmpty()) {
logger.debug("Associating extension(s) {} to Spoofax editor", Joiner.on(", ").join(extensions));
display.asyncExec(new Runnable() {
@Override public void run() {
EditorMappingUtils.set(eclipseEditorRegistry, IEclipseEditor.id, extensions);
}
});
}
super.addedComponent(component);
}
@Override public void reloadedComponent(ILanguageComponent oldComponent, ILanguageComponent newComponent) {
logger.debug("Running component reloaded job for {}", newComponent);
final Set<String> oldExtensions = getExtensions(oldComponent);
final Set<String> newExtensions = getExtensions(newComponent);
if(!oldExtensions.isEmpty() || !newExtensions.isEmpty()) {
final Set<String> removeExtensions = Sets.difference(oldExtensions, newExtensions);
final Set<String> addExtensions = Sets.difference(newExtensions, oldExtensions);
if(removeExtensions.size() > 0) {
logger.debug("Unassociating extension(s) {} from Spoofax editor", Joiner.on(", ")
.join(removeExtensions));
}
if(addExtensions.size() > 0) {
logger.debug("Associating extension(s) {} to Spoofax editor", Joiner.on(", ").join(addExtensions));
}
display.asyncExec(new Runnable() {
@Override public void run() {
EditorMappingUtils.remove(eclipseEditorRegistry, IEclipseEditor.id, removeExtensions);
EditorMappingUtils.set(eclipseEditorRegistry, IEclipseEditor.id, addExtensions);
}
});
}
super.reloadedComponent(oldComponent, newComponent);
}
@Override protected void removedComponent(ILanguageComponent component) {
logger.debug("Running component removed job for {}", component);
final Set<String> extensions = getExtensions(component);
if(!extensions.isEmpty()) {
logger.debug("Unassociating extension(s) {} from Spoofax editor", Joiner.on(", ").join(extensions));
display.asyncExec(new Runnable() {
@Override public void run() {
EditorMappingUtils.remove(eclipseEditorRegistry, IEclipseEditor.id, extensions);
}
});
}
super.removedComponent(component);
}
@Override public void removedImpl(ILanguageImpl language) {
try {
final Collection<FileObject> resources =
ResourceUtils.workspaceResources(resourceService,
new LanguageFileSelector(languageIdentifier, language), workspace.getRoot());
final Collection<IResource> eclipseResources = ResourceUtils.toEclipseResources(resourceService, resources);
logger.debug("Removing markers from {} workspace resources", resources.size());
for(IResource resource : eclipseResources) {
try {
MarkerUtils.clearAll(resource);
} catch(CoreException e) {
final String message = String.format("Cannot remove markers for resource %s", resource);
logger.error(message, e);
}
}
} catch(FileSystemException e) {
final String message = String.format("Cannot retrieve all workspace resources for %s", language);
logger.error(message, e);
}
super.removedImpl(language);
}
private Set<String> getExtensions(ILanguageComponent component) {
final Set<String> extensions = Sets.newHashSet();
for(ResourceExtensionFacet facet : component.facets(ResourceExtensionFacet.class)) {
Iterables.addAll(extensions, facet.extensions());
}
return extensions;
}
} |
package com.fillumina.performance.consumer.assertion;
import com.fillumina.performance.consumer.PerformanceConsumer;
import com.fillumina.performance.producer.LoopPerformances;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
/**
* Asserts condition about a specific test in a suite.
*
* @author Francesco Illuminati <fillumina@gmail.com>
*/
public class AssertPerformanceForExecutionSuite
implements PerformanceConsumer, Serializable, SuiteExecutionAssertion {
private static final long serialVersionUID = 1L;
private final Map<String, AssertPerformance> map = new HashMap<>();
private final float percentageTolerance;
public static SuiteExecutionAssertion withTolerance(
final float tolerancePercentage) {
return new AssertPerformanceForExecutionSuite(tolerancePercentage);
}
public static AssertPerformanceForExecutionSuite
createSuiteConsumerWithTolerance(final float tolerancePercentage) {
return new AssertPerformanceForExecutionSuite(tolerancePercentage);
}
public AssertPerformanceForExecutionSuite() {
this(AssertPerformance.SAFE_TOLERANCE);
}
private AssertPerformanceForExecutionSuite(
final float percentageTolerance) {
this.percentageTolerance = percentageTolerance;
}
@Override
public PerformanceAssertion forDefaultExecution() {
return forExecution(null);
}
/** Sets the test to assert. */
@Override
public PerformanceAssertion forExecution(final String testName) {
final AssertPerformance assertPerformance =
AssertPerformance.withTolerance(percentageTolerance);
map.put(testName, assertPerformance);
return assertPerformance;
}
@Override
public void consume(final String testName,
final LoopPerformances loopPerformances) {
final AssertPerformance assertPerformance = map.get(testName);
if (assertPerformance != null) {
assertPerformance.consume(testName, loopPerformances);
}
}
} |
package se.callistaenterprise.async;
import com.ning.http.client.AsyncCompletionHandler;
import com.ning.http.client.AsyncHttpClient;
import com.ning.http.client.Response;
import rx.Observable;
import rx.schedulers.Schedulers;
import javax.servlet.AsyncContext;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ExecutorService;
@WebServlet(urlPatterns = "/example6", asyncSupported = true)
public class Example6Servlet extends HttpServlet {
private ExecutorService executorService;
private AsyncHttpClient httpClient;
@Override
public void init() throws ServletException {
executorService = (ExecutorService) getServletContext().getAttribute("executorService");
httpClient = (AsyncHttpClient) getServletContext().getAttribute("httpClient");
}
protected void doGet(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
final AsyncContext asyncContext = request.startAsync();
List<String> urls = Arrays.asList(
"http://gp.se",
"http://dn.se",
"http://svd.se",
"http://expressen.se",
"http://aftonbladet.se"
);
Observable
.from(urls)
.subscribeOn(Schedulers.from(executorService))
.concatMap(url ->
observable(url).onErrorReturn((t) -> "Error:" + t.toString()))
.subscribe(
(v) -> write(asyncContext, v),
(e) -> write(asyncContext, e.toString()),
asyncContext::complete);
}
private void write(AsyncContext asyncContext, String s) {
try {
asyncContext.getResponse().getWriter().write(s);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private Observable<String> observable(String url) {
return Observable.create(subscriber -> {
try {
httpClient.prepareHead(url).execute(new AsyncCompletionHandler<Response>() {
@Override
public Response onCompleted(Response response) throws Exception {
subscriber.onNext(url + " >>> " + response.getHeader("Server") + " (" + Thread.currentThread() + ")<br>");
subscriber.onCompleted();
return response;
}
@Override
public void onThrowable(Throwable t) {
subscriber.onError(t);
}
});
} catch (Exception e) {
System.out.println("Err");
subscriber.onError(e);
}
});
}
} |
package skyhussars.engine.terrain;
import java.util.Optional;
public class TerrainDefinition {
private String name;
private int size;
private String heightmapPath;
private String textureMap;
private TerrainTexture tx1,tx2,tx3;
public TerrainDefinition(){};
public TerrainDefinition(TerrainDefinition td){
this.heightmapPath = td.heightmapPath;
this.textureMap = td.textureMap;
this.size = td.size;
this.name = td.name;
this.tx1 = td.tx1;
this.tx2 = td.tx2;
this.tx3 = td.tx3;
}
public TerrainDefinition heightMapPath(String path){
TerrainDefinition td = new TerrainDefinition(this);
td.heightmapPath = path;
return td;
}
public TerrainDefinition textureMap(String path){
TerrainDefinition td = new TerrainDefinition(this);
td.textureMap = path;
return td;
}
public TerrainDefinition size(int size){
TerrainDefinition td = new TerrainDefinition(this);
td.size = size;
return td;
}
public TerrainDefinition name(String path){
TerrainDefinition td = new TerrainDefinition(this);
td.name = name;
return td;
}
public TerrainDefinition tx1(TerrainTexture tx){
TerrainDefinition td = new TerrainDefinition(this);
td.tx1 = tx;
return td;
}
public TerrainDefinition tx2(TerrainTexture tx){
TerrainDefinition td = new TerrainDefinition(this);
td.tx2 = tx;
return td;
}
public TerrainDefinition tx3(TerrainTexture tx){
TerrainDefinition td = new TerrainDefinition(this);
td.tx3 = tx;
return td;
}
public Optional<String> name(){return Optional.of(name);}
public Optional<Integer> size(){return Optional.of(size);}
public Optional<String> heightMapPath(){return Optional.of(heightmapPath);}
public TerrainTexture tx1(){return tx1;}
public TerrainTexture tx2(){return tx2;}
public TerrainTexture tx3(){return tx3;}
public Optional<String> textureMap(){return Optional.ofNullable(textureMap);}
} |
package es.tid.cosmos.platform.injection.server.hadoopfs;
import java.io.*;
import java.net.URI;
import java.util.List;
import com.google.common.io.Files;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.AccessControlException;
import org.apache.sshd.server.SshFile;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Matchers;
import es.tid.cosmos.base.util.Logger;
import es.tid.cosmos.platform.injection.server.BaseSftpTest;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
/**
* @author logc
*/
public class HadoopSshFileTest extends BaseSftpTest {
private static final org.apache.log4j.Logger LOGGER =
Logger.get(HadoopSshFile.class);
private static final String USERNAME = System.getProperty("user.name");
private static final String DEFAULT_FILESYSTEM_NAME = "fs.default.name";
private static final String FILE_URI_PREAMBLE = "file:
private HadoopSshFile hadoopSshFile;
private HadoopSshFile hadoopSshDir;
private FileSystem hadoopFS;
private FileSystem mockedFileSystem;
private HadoopSshFile neverExists;
private File tempDir;
public HadoopSshFileTest() {
super(LOGGER);
}
@Before
public void setUp() throws IOException, InterruptedException{
Configuration configuration = new Configuration();
this.tempDir = Files.createTempDir();
boolean success = this.tempDir.setWritable(true, false);
if (!success) {
throw new IllegalStateException("could not set to writable: " +
this.tempDir.toString());
}
configuration.set(
DEFAULT_FILESYSTEM_NAME, FILE_URI_PREAMBLE
+ this.tempDir.toURI());
String foodir = createUserTempURI("").toString();
this.hadoopFS = FileSystem.get(configuration);
this.hadoopSshDir = new HadoopSshFile(foodir, USERNAME, this.hadoopFS);
this.hadoopSshFile = new HadoopSshFile(foodir + "/file01",
USERNAME, this.hadoopFS);
this.mockedFileSystem = mock(FileSystem.class);
this.neverExists = new HadoopSshFile("/in/fantasy", "whatever_user",
this.mockedFileSystem);
}
@After
public void tearDown() throws IOException {
if (this.hadoopSshFile.doesExist()) {
this.hadoopSshFile.handleClose();
assertTrue(this.hadoopSshFile.delete());
}
if (this.hadoopSshDir.doesExist()) {
assertTrue(this.hadoopSshDir.delete());
}
boolean success = this.tempDir.delete();
if (!success) {
throw new IllegalStateException("could not delete: " +
this.tempDir.toString());
}
this.hadoopFS.close();
this.mockedFileSystem.close();
}
@Test
public void testGetAbsolutePath() throws Exception {
assertEquals(this.tempDir.toURI().toString().concat(USERNAME)
.concat("/file01"), this.hadoopSshFile.getAbsolutePath());
}
@Test
public void testGetName() throws Exception {
assertEquals("file01", this.hadoopSshFile.getName());
}
/**
* Show that the owner is the logged in user as long as the file does not
* exist. This complies with clients that request information about a path
* before creating it.
*
* The owner is the username of the Java process after creation, since these
* tests use the native filesystem underneath.
*
* @throws Exception
*/
@Test
public void testGetOwner() throws Exception {
assertEquals(USERNAME, this.hadoopSshFile.getOwner());
assertFalse(this.hadoopSshFile.doesExist());
this.hadoopSshFile.create();
assertNotSame(USERNAME, this.hadoopSshFile.getOwner());
}
@Test
public void testIsDirectory() throws Exception {
this.hadoopSshDir.mkdir();
assertTrue(this.hadoopSshDir.isDirectory());
}
@Test
public void testIsDirectoryDoesNotThrowException() throws Exception {
assertFalse(this.hadoopSshDir.doesExist());
assertFalse(this.hadoopSshDir.isDirectory());
}
@Test
public void testDoesExist() throws Exception {
assertFalse(this.hadoopSshFile.doesExist());
this.hadoopSshFile.create();
assertTrue(this.hadoopSshFile.doesExist());
}
/**
* Show that method doesExist does not send an exception when it encounters
* an Exception
*/
@Test
public void testExceptionDoesExist() throws Exception {
when(this.mockedFileSystem.exists(Matchers.<Path>any()))
.thenThrow(new IOException("you have been mocked"));
assertFalse(this.neverExists.doesExist());
}
@Test
public void testIsReadable() throws Exception {
this.hadoopSshFile.create();
assertTrue(this.hadoopSshFile.isReadable());
}
@Test
public void testIsFile() throws Exception {
this.hadoopSshFile.create();
assertTrue(this.hadoopSshFile.isFile());
}
/**
* Show that when isFile finds an IOException, e.g. the file is not found,
* the function returns false instead of re-throwing the exception.
*
* @throws Exception
*/
@Test
public void testIsFileDoesNotThrowException() throws Exception {
when(this.mockedFileSystem.isFile(Matchers.<Path>any()))
.thenThrow(new IOException("you have been mocked"));
assertFalse(this.neverExists.isFile());
}
@Test
public void testIsWritable() throws Exception {
this.hadoopSshFile.create();
assertTrue(this.hadoopSshFile.isWritable());
}
@Test
public void testIsWritableWhenNotYetExisting() throws Exception {
this.hadoopSshDir.create();
assertTrue(this.hadoopSshDir.isWritable());
assertFalse(this.hadoopSshFile.doesExist());
assertTrue(this.hadoopSshFile.isWritable());
}
@Test
public void testIsExecutable() throws Exception {
assertFalse(this.hadoopSshFile.isExecutable());
this.hadoopSshDir.mkdir();
assertTrue(this.hadoopSshDir.isExecutable());
}
@Test
public void testIsRemovable() throws Exception {
this.hadoopSshFile.create();
assertTrue(this.hadoopSshFile.isRemovable());
}
@Test
public void testGetParentFile() throws Exception {
this.hadoopSshFile.create();
assertNotNull(this.hadoopSshFile.getParentFile());
}
@Test
public void testGetLastModified() throws Exception {
this.hadoopSshFile.create();
assertNotSame(0, this.hadoopSshFile.getLastModified());
}
@Test
public void testSetLastModified() throws Exception {
this.hadoopSshFile.create();
long fixedTime = System.currentTimeMillis();
this.hadoopSshFile.setLastModified(fixedTime);
// This assertion is not assertEquals because there is a precision
// mismatch between HDFS and System.currentTimeMillis; we try to
// write with more decimal places than can be read. What we can say is
// that this difference has an upper limit.
long retrievedTime = this.hadoopSshFile.getLastModified();
assertTrue(String.format("sent: %s, got: %s, diff: %s", fixedTime,
retrievedTime, fixedTime - retrievedTime),
Math.abs(fixedTime - retrievedTime) < 1000);
}
@Test
public void testSetLastModifiedDoesNotThrowException() throws Exception {
doThrow(new IOException("times could not be set"))
.when(this.mockedFileSystem).setTimes(
Matchers.<Path>any(),
Matchers.anyLong(),
Matchers.anyLong());
assertFalse(this.neverExists.setLastModified(123L));
}
@Test
public void testGetSize() throws Exception {
this.hadoopSshFile.create();
OutputStream ostream = this.hadoopSshFile.createOutputStream(0);
ostream.write("Hello world".getBytes());
ostream.close();
assertEquals(11, this.hadoopSshFile.getSize());
}
@Test
public void testGetSizeDoesNotThrowException() throws Exception {
when(this.mockedFileSystem.getFileStatus(Matchers.any(Path.class)))
.thenThrow(new IOException("mocked"));
this.neverExists.create();
assertEquals(0L, this.neverExists.getSize());
}
@Test
public void testMkdir() throws Exception {
assertTrue(this.hadoopSshDir.mkdir());
}
@Test
public void testMkdirDoesNotThrowException() throws Exception {
when(this.mockedFileSystem.mkdirs(Matchers.<Path>any()))
.thenThrow(new IOException("could not create dir"));
this.neverExists.create();
assertFalse(this.neverExists.mkdir());
}
@Test
public void testDelete() throws Exception {
this.hadoopSshFile.create();
assertTrue(this.hadoopSshFile.doesExist());
assertTrue(this.hadoopSshFile.delete());
assertFalse(this.hadoopSshFile.doesExist());
}
@Test
public void testDeleteDoesNotThrowException() throws Exception {
HadoopSshFile mockFile = mock(HadoopSshFile.class);
when(mockFile.isDirectory()).thenReturn(true);
mockFile.create();
when(this.mockedFileSystem.delete(Matchers.<Path>any(),
Matchers.anyBoolean()))
.thenThrow(new IOException("could not delete path"));
assertFalse(mockFile.delete());
}
@Test
public void testCreate() throws Exception {
assertTrue(this.hadoopSshFile.create());
assertTrue(this.hadoopSshFile.doesExist());
}
@Test
public void testTruncate() throws Exception {
this.hadoopSshFile.create();
OutputStream ostream = this.hadoopSshFile.createOutputStream(0);
ostream.write("Hello world".getBytes());
ostream.close();
assertEquals(11, this.hadoopSshFile.getSize());
this.hadoopSshFile.truncate();
assertEquals(0, this.hadoopSshFile.getSize());
}
@Test
public void testMove() throws Exception {
String newsubdir = createUserTempURI("/new/file01").toString();
HadoopSshFile newfoo = new HadoopSshFile(
newsubdir, USERNAME, this.hadoopFS);
this.hadoopSshFile.create();
assertFalse(newfoo.doesExist());
this.hadoopSshFile.move(newfoo);
assertTrue(newfoo.doesExist());
}
@Test
public void testMoveDoesNotThrowException() throws Exception {
when(this.mockedFileSystem.rename(Matchers.<Path>any(),
Matchers.<Path>any())).thenThrow(new IOException("could not " +
"rename this path"));
assertFalse(this.neverExists.move(new HadoopSshFile("/wherever",
"some_user", this.mockedFileSystem)));
}
@Test
public void testListSshFiles() throws Exception {
this.hadoopSshFile.create();
List<SshFile> fileList = this.hadoopSshDir.listSshFiles();
assertEquals(1, fileList.size());
SshFile found = fileList.get(0);
assertEquals(this.hadoopSshFile.getAbsolutePath(), found.getAbsolutePath());
}
@Test
public void testListFilesWhenDirectoryNotReadable() throws Exception {
when(this.mockedFileSystem.listStatus(Matchers.<Path>any()))
.thenThrow(new AccessControlException("not authorized"));
HadoopSshFile mockedDir = spy(this.neverExists);
doReturn(true).when(mockedDir).isDirectory();
// There is only one object in the parent folder, namely this folder
assertEquals(1, mockedDir.listSshFiles().size());
}
@Test
public void testWriteToFile() throws Exception {
this.hadoopSshFile.create();
OutputStream ostream = this.hadoopSshFile.createOutputStream(0L);
ostream.write("Hello world".getBytes());
this.hadoopSshFile.handleClose();
assertEquals(11, this.hadoopSshFile.getSize());
}
@Test
public void testReadFromFile() throws Exception {
this.hadoopSshFile.create();
InputStream istream = this.hadoopSshFile.createInputStream(0L);
int read = istream.read();
istream.close();
assertEquals(-1, read);
String written = "Hello world";
OutputStream outputStream = this.hadoopSshFile.createOutputStream(0L);
outputStream.write(written.getBytes());
this.hadoopSshFile.handleClose();
InputStream inputStream = this.hadoopSshFile.createInputStream(0L);
StringWriter writer = new StringWriter();
int byteRead;
while ((byteRead = inputStream.read()) != -1) {
writer.write(byteRead);
}
assertEquals(written, writer.toString());
}
private URI createUserTempURI(String pathExtension) {
return URI.create(
this.tempDir.toURI().toString().concat(
USERNAME + pathExtension));
}
} |
package summer.ioc.compiler;
import java.lang.reflect.Method;
import java.util.List;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.CtField;
import javassist.CtMethod;
import javassist.LoaderClassPath;
import summer.aop.AopType;
import summer.ioc.SummerCompiler;
import summer.ioc.compiler.util.JavassistSummerCompilerUtil;
import summer.log.Logger;
import summer.util.JavassistUtil;
import summer.util.Log;
import summer.util.Reflect;
/**
* @author li
* @version 1 (20151011 9:49:52)
* @since Java7
*/
// com.alibaba.dubbo.common.compiler.support.JavassistCompiler
public class JavassistSummerCompiler implements SummerCompiler {
private static final String AOP_TYPE_NAME_SUFFIX = "_Aop";
private static final Logger log = Log.slf4j();
public Class<?> compileClass(Class<?> originalType) {
String originalTypeName = originalType.getName();
ClassPool classPool = new ClassPool(true);
classPool.appendClassPath(new LoaderClassPath(getClass().getClassLoader()));
String subClassName = originalTypeName + AOP_TYPE_NAME_SUFFIX;
CtClass ctClass = classPool.makeClass(subClassName);
CtClass superCtClass = JavassistUtil.getCtClass(classPool, originalTypeName);
if (originalType.isInterface()) {
ctClass.addInterface(superCtClass);
} else {
JavassistUtil.ctClassSetSuperclass(ctClass, superCtClass);
}
CtClass iocContextAwareCtClass = JavassistUtil.getCtClass(classPool, AopType.class.getName());
ctClass.addInterface(iocContextAwareCtClass);
addAopTypeMetaField(ctClass);
addAopTypeMetaGetter(ctClass);
List<Method> methods = Reflect.getPublicMethods(originalType);
for (Method method : methods) {
addOverrideAopMethod(ctClass, method);
}
if (!originalType.isInterface()) {
addAopTypeInvokeMethod(ctClass, methods);
addAopTypeCallMethod(ctClass, methods);
}
log.info("compileClass for" + originalTypeName);
return JavassistUtil.ctClassToClass(ctClass);
}
private static void addAopTypeCallMethod(CtClass ctClass, List<Method> methods) {
String callDelegateOverrideMethodSrc = JavassistSummerCompilerUtil.buildAopTypeCallMethodSrc(methods);
CtMethod callDelegateOverrideMethod = JavassistUtil.ctNewMethodMake(callDelegateOverrideMethodSrc, ctClass);
JavassistUtil.ctClassAddMethod(ctClass, callDelegateOverrideMethod);
}
private static void addAopTypeInvokeMethod(CtClass ctClass, List<Method> methods) {
String callDelegateOverrideMethodSrc = JavassistSummerCompilerUtil.buildAopTypeInvokeMethodSrc(methods);
CtMethod callDelegateOverrideMethod = JavassistUtil.ctNewMethodMake(callDelegateOverrideMethodSrc, ctClass);
JavassistUtil.ctClassAddMethod(ctClass, callDelegateOverrideMethod);
}
private static void addAopTypeMetaGetter(CtClass ctClass) {
String iocContextFieldSetterSrc = "public summer.aop.AopTypeMeta getAopTypeMeta() { return this.aopTypeMeta; } ";
CtMethod iocContextFieldSetter = JavassistUtil.ctNewMethodMake(iocContextFieldSetterSrc, ctClass);
JavassistUtil.ctClassAddMethod(ctClass, iocContextFieldSetter);
}
private static void addAopTypeMetaField(CtClass ctClass) {
CtField ctField = JavassistUtil.ctFieldWithInitMake("private summer.aop.AopTypeMeta aopTypeMeta=new summer.aop.AopTypeMeta(); ", ctClass);
JavassistUtil.ctClassAddField(ctClass, ctField);
}
private static void addOverrideAopMethod(CtClass ctClass, Method method) {
String overrideMethodSrc = JavassistSummerCompilerUtil.buildOverrideAopMethodSrc(method);
CtMethod overrideSuperMethod = JavassistUtil.ctNewMethodMake(overrideMethodSrc, ctClass);
JavassistUtil.ctClassAddMethod(ctClass, overrideSuperMethod);
}
} |
package models;
import java.io.Serializable;
import javax.persistence.*;
import java.util.Date;
import java.sql.Timestamp;
import java.util.List;
/**
* The persistent class for the users database table.
*
*/
@Entity
@Table(name="users")
@NamedQueries
(
{
@NamedQuery(name="User.findAll", query="SELECT u FROM User u"),
@NamedQuery(name = "User.findByEmailAndPassword", query = "SELECT u FROM User u WHERE u.email = :email AND u.password = :password")
}
)
public class User implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
private int id;
@Column(name="activation_key")
private String activationKey;
private boolean active;
@Temporal(TemporalType.TIMESTAMP)
@Column(name="creation_date")
private Date creationDate;
private String email;
@Column(name="last_update")
private Timestamp lastUpdate;
private String password;
@Column(name="picture_path")
private String picturePath;
//bi-directional many-to-one association to Reminder
@OneToMany(mappedBy="user", fetch=FetchType.LAZY)
private List<Reminder> reminders;
public User() {
}
public int getId() {
return this.id;
}
public void setId(int id) {
this.id = id;
}
public String getActivationKey() {
return this.activationKey;
}
public void setActivationKey(String activationKey) {
this.activationKey = activationKey;
}
public boolean getActive() {
return this.active;
}
public void setActive(boolean active) {
this.active = active;
}
public Date getCreationDate() {
return this.creationDate;
}
public void setCreationDate(Date creationDate) {
this.creationDate = creationDate;
}
public String getEmail() {
return this.email;
}
public void setEmail(String email) {
this.email = email;
}
public Timestamp getLastUpdate() {
return this.lastUpdate;
}
public void setLastUpdate(Timestamp lastUpdate) {
this.lastUpdate = lastUpdate;
}
public String getPassword() {
return this.password;
}
public void setPassword(String password) {
this.password = password;
}
public String getPicturePath() {
return this.picturePath;
}
public void setPicturePath(String picturePath) {
this.picturePath = picturePath;
}
public List<Reminder> getReminders() {
return this.reminders;
}
public void setReminders(List<Reminder> reminders) {
this.reminders = reminders;
}
public Reminder addReminder(Reminder reminder) {
getReminders().add(reminder);
reminder.setUser(this);
return reminder;
}
public Reminder removeReminder(Reminder reminder) {
getReminders().remove(reminder);
return reminder;
}
} |
package dataspaces;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.commons.vfs2.FileContent;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemException;
import org.apache.commons.vfs2.NameScope;
import org.apache.commons.vfs2.Selectors;
import org.apache.commons.vfs2.impl.DefaultFileSystemManager;
import org.apache.commons.vfs2.provider.temp.TemporaryFileProvider;
import org.apache.commons.vfs2.util.RandomAccessMode;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.objectweb.proactive.extensions.dataspaces.vfs.AbstractLimitingFileObject;
import org.objectweb.proactive.extensions.dataspaces.vfs.VFSFactory;
/**
* Test for general access and write access limiting and keeping (Abstract)FileObject behavior
* (which is not so obvious, especially regarding unusual behavior like non-existing file, null
* array etc.).
*/
public class AbstractLimitingFileObjectTest {
@Rule
public TestName name = new TestName();
private static final String CHILD_NAME = "abc";
private FileObject realFile;
private FileObject readOnlyFile;
private FileObject readWriteFile;
private FileObject ancestorLimitedFile;
private FileObject anotherFile;
private DefaultFileSystemManager manager;
@Before
public void setUp() throws Exception {
manager = VFSFactory.createDefaultFileSystemManager();
manager.addProvider("tmpfs", new TemporaryFileProvider());
realFile = manager.resolveFile("tmpfs:///test1/" + name.getMethodName());
readWriteFile = new ConstantlyLimitingFileObject(realFile, false, true);
readOnlyFile = new ConstantlyLimitingFileObject(realFile, true, true);
ancestorLimitedFile = new ConstantlyLimitingFileObject(realFile, false, false);
anotherFile = manager.resolveFile("tmpfs:///" + name.getMethodName());
anotherFile.createFile();
realFile.delete();
assertFalse(readWriteFile.exists());
assertFalse(readWriteFile.exists());
assertTrue(anotherFile.exists());
}
private void createRealFile() throws FileSystemException {
realFile.createFile();
assertTrue(readOnlyFile.exists());
}
private void createRealFolder() throws FileSystemException {
realFile.createFolder();
assertTrue(readOnlyFile.exists());
}
private void createRealFileChild() throws FileSystemException {
realFile.createFolder();
final FileObject childFile = realFile.resolveFile(CHILD_NAME);
childFile.createFile();
assertTrue(childFile.exists());
}
@After
public void tearDown() throws Exception {
if (realFile != null) {
realFile.close();
realFile = null;
}
if (manager != null) {
manager.close();
manager = null;
}
}
@Test
public void testReadOnlyCreateFile() throws FileSystemException {
try {
readOnlyFile.createFile();
fail("Expected exception");
} catch (FileSystemException e) {
}
assertFalse(readOnlyFile.exists());
}
@Test
public void testReadOnlyCreateFolder() throws FileSystemException {
try {
readOnlyFile.createFolder();
fail("Expected exception");
} catch (FileSystemException e) {
}
assertFalse(readOnlyFile.exists());
}
@Test
public void testReadOnlyDelete() throws FileSystemException {
createRealFile();
try {
readOnlyFile.delete();
fail("Expected exception");
} catch (FileSystemException e) {
}
assertTrue(readOnlyFile.exists());
}
@Test
public void testReadOnlyDeleteFileSelector() throws FileSystemException {
createRealFile();
try {
readOnlyFile.delete(Selectors.SELECT_ALL);
fail("Expected exception");
} catch (FileSystemException e) {
}
assertTrue(readOnlyFile.exists());
}
@Test
public void testReadOnlyIsWriteable() throws FileSystemException {
assertFalse(readOnlyFile.isWriteable());
}
@Test
public void testReadOnlyCanRenameToFileObjectSource() {
assertFalse(readOnlyFile.canRenameTo(anotherFile));
}
// limitation of AbstractLimitingFileObject
@Ignore
@Test
public void testReadOnlyCanRenameToFileObjectDesination() {
assertFalse(anotherFile.canRenameTo(readOnlyFile));
}
@Test
public void testReadOnlyCopyFromFileObjectFileSelector() throws FileSystemException {
try {
readOnlyFile.copyFrom(anotherFile, Selectors.SELECT_ALL);
fail("Expected exception");
} catch (FileSystemException e) {
}
assertFalse(readOnlyFile.exists());
}
@Test
public void testReadOnlyMoveToFileObjectSource() throws FileSystemException {
createRealFile();
try {
readOnlyFile.moveTo(anotherFile);
fail("Expected exception");
} catch (FileSystemException e) {
}
assertTrue(readOnlyFile.exists());
}
// limitation of AbstractLimitingFileObject (rely on rename operation internally)
@Ignore
@Test
public void testReadOnlyMoveToFileObjectDestination() throws FileSystemException {
createRealFile();
try {
anotherFile.moveTo(readOnlyFile);
fail("Expected exception");
} catch (FileSystemException e) {
}
assertTrue(anotherFile.exists());
}
@Test
public void testReadOnlyFindFilesFileSelector() throws FileSystemException {
createRealFileChild();
final FileObject result[] = readOnlyFile.findFiles(Selectors.SELECT_CHILDREN);
assertEquals(1, result.length);
assertFalse(result[0].isWriteable());
}
@Test
public void testReadOnlyFindFilesFileSelectorNonExisting() throws FileSystemException {
final FileObject result[] = readOnlyFile.findFiles(Selectors.SELECT_CHILDREN);
assertNull(result);
}
@Test
public void testReadOnlyFindFilesFileSelectorBooleanList() throws FileSystemException {
createRealFileChild();
final ArrayList<FileObject> result = new ArrayList<FileObject>();
readOnlyFile.findFiles(Selectors.SELECT_CHILDREN, true, result);
assertEquals(1, result.size());
assertFalse(result.get(0).isWriteable());
}
@Test
public void testReadOnlyResolveFileString() throws FileSystemException {
createRealFileChild();
final FileObject childFile = readOnlyFile.resolveFile(CHILD_NAME);
assertNotNull(childFile);
assertFalse(childFile.isWriteable());
}
@Test
public void testReadOnlyResolveFileStringNameScope() throws FileSystemException {
createRealFileChild();
final FileObject childFile = readOnlyFile.resolveFile(CHILD_NAME, NameScope.CHILD);
assertNotNull(childFile);
assertFalse(childFile.isWriteable());
}
@Test
public void testReadOnlyGetChildStringExisting() throws FileSystemException {
createRealFileChild();
final FileObject childFile = readOnlyFile.getChild(CHILD_NAME);
assertNotNull(childFile);
assertFalse(childFile.isWriteable());
}
@Test
public void testReadOnlyGetChildStringNonExisting() throws FileSystemException {
createRealFolder();
final FileObject childFile = readOnlyFile.getChild(CHILD_NAME);
assertNull(childFile);
}
@Test
public void testReadOnlyGetChildrenExisting() throws FileSystemException {
createRealFileChild();
final FileObject childrenFiles[] = readOnlyFile.getChildren();
assertNotNull(childrenFiles);
assertEquals(1, childrenFiles.length);
assertFalse(childrenFiles[0].isWriteable());
}
@Test
public void testReadOnlyGetChildrenNonExisting() throws FileSystemException {
createRealFolder();
final FileObject childrenFiles[] = readOnlyFile.getChildren();
assertNotNull(childrenFiles);
assertEquals(0, childrenFiles.length);
}
@Test
public void testReadOnlyGetParent() throws FileSystemException {
final FileObject parent = readOnlyFile.getParent();
assertNotNull(parent);
assertFalse(parent.isWriteable());
}
@Test
public void testReadOnlyGetParentForRoot() throws FileSystemException {
final FileObject rawRoot = readOnlyFile.getFileSystem().getRoot();
final ConstantlyLimitingFileObject root = new ConstantlyLimitingFileObject(rawRoot, true, true);
final FileObject parent = root.getParent();
assertNull(parent);
}
@Test
public void testReadOnlyGetContentInputStream() throws IOException {
createRealFile();
final FileContent content = readOnlyFile.getContent();
try {
content.getInputStream().close();
} finally {
content.close();
}
}
@Test
public void testReadOnlyGetContentOutputStream() throws IOException {
createRealFile();
final FileContent content = readOnlyFile.getContent();
try {
content.getOutputStream();
fail("Expected exception");
} catch (FileSystemException x) {
} finally {
content.close();
}
}
@Test
public void testReadOnlyGetContentRandomInputStream() throws IOException {
createRealFile();
final FileContent content = readOnlyFile.getContent();
try {
content.getRandomAccessContent(RandomAccessMode.READ).close();
} finally {
content.close();
}
}
@Test
public void testReadOnlyGetContentRandomOutputStream() throws IOException {
createRealFile();
final FileContent content = readOnlyFile.getContent();
try {
content.getRandomAccessContent(RandomAccessMode.READWRITE).close();
fail("Expected exception");
} catch (FileSystemException x) {
} finally {
content.close();
}
}
//FIXME: depends on VFS-259, fixed in VFS fork
@Test
public void testReadOnlyGetContentGetFile() throws FileSystemException {
final FileObject sameFile = readOnlyFile.getContent().getFile();
assertFalse(sameFile.isWriteable());
}
@Test
public void testReadWriteCreateFile() throws FileSystemException {
readWriteFile.createFile();
assertTrue(readWriteFile.exists());
}
@Test
public void testReadWriteCreateFolder() throws FileSystemException {
readWriteFile.createFolder();
assertTrue(readWriteFile.exists());
}
@Test
public void testReadWriteDelete() throws FileSystemException {
createRealFile();
readWriteFile.delete();
assertFalse(readWriteFile.exists());
}
@Test
public void testReadWriteDeleteFileSelector() throws FileSystemException {
createRealFile();
readWriteFile.delete(Selectors.SELECT_ALL);
assertFalse(readWriteFile.exists());
}
@Test
public void testReadWriteIsWriteable() throws FileSystemException {
createRealFile();
assertTrue(readWriteFile.isWriteable());
}
@Test
public void testReadWriteCanRenameToFileObjectSource() {
assertTrue(readWriteFile.canRenameTo(anotherFile));
}
@Test
public void testReadWriteCanRenameToFileObjectDesination() {
assertTrue(anotherFile.canRenameTo(readWriteFile));
}
@Test
public void testReadWriteCopyFromFileObjectFileSelector() throws FileSystemException {
readWriteFile.copyFrom(anotherFile, Selectors.SELECT_ALL);
assertTrue(readWriteFile.exists());
}
@Test
public void testReadWriteMoveToFileObjectSource() throws FileSystemException {
createRealFile();
readWriteFile.moveTo(anotherFile);
assertFalse(readWriteFile.exists());
}
@Test
public void testReadWriteMoveToFileObjectDestination() throws FileSystemException {
createRealFile();
anotherFile.moveTo(readWriteFile);
assertFalse(anotherFile.exists());
}
@Test
public void testReadWriteFindFilesFileSelector() throws FileSystemException {
createRealFileChild();
final FileObject result[] = readWriteFile.findFiles(Selectors.SELECT_CHILDREN);
assertEquals(1, result.length);
assertTrue(result[0].isWriteable());
}
@Test
public void testReadWriteFindFilesFileSelectorNonExisting() throws FileSystemException {
final FileObject result[] = readWriteFile.findFiles(Selectors.SELECT_CHILDREN);
assertNull(result);
}
@Test
public void testReadWriteFindFilesFileSelectorBooleanList() throws FileSystemException {
createRealFileChild();
final ArrayList<FileObject> result = new ArrayList<FileObject>();
readWriteFile.findFiles(Selectors.SELECT_CHILDREN, true, result);
assertEquals(1, result.size());
assertTrue(result.get(0).isWriteable());
}
@Test
public void testReadWriteResolveFileString() throws FileSystemException {
createRealFileChild();
final FileObject childFile = readWriteFile.resolveFile(CHILD_NAME);
assertNotNull(childFile);
assertTrue(childFile.isWriteable());
}
@Test
public void testReadWriteResolveFileStringNameScope() throws FileSystemException {
createRealFileChild();
final FileObject childFile = readWriteFile.resolveFile(CHILD_NAME, NameScope.CHILD);
assertNotNull(childFile);
assertTrue(childFile.isWriteable());
}
@Test
public void testReadWriteGetChildStringExisting() throws FileSystemException {
createRealFileChild();
final FileObject childFile = readWriteFile.getChild(CHILD_NAME);
assertNotNull(childFile);
assertTrue(childFile.isWriteable());
}
@Test
public void testReadWriteGetChildStringNonExisting() throws FileSystemException {
createRealFolder();
final FileObject childFile = readWriteFile.getChild(CHILD_NAME);
assertNull(childFile);
}
@Test
public void testReadWriteGetChildrenExisting() throws FileSystemException {
createRealFileChild();
final FileObject childrenFiles[] = readWriteFile.getChildren();
assertNotNull(childrenFiles);
assertEquals(1, childrenFiles.length);
assertTrue(childrenFiles[0].isWriteable());
}
@Test
public void testReadWriteGetChildrenNonExisting() throws FileSystemException {
createRealFolder();
final FileObject childrenFiles[] = readWriteFile.getChildren();
assertNotNull(childrenFiles);
assertEquals(0, childrenFiles.length);
}
@Test
public void testWriteOnlyGetParent() throws FileSystemException {
final FileObject parent = readWriteFile.getParent();
assertNotNull(parent);
assertTrue(parent.isWriteable());
}
@Test
public void testReadWriteGetParentForRoot() throws FileSystemException {
final FileObject rawRoot = readWriteFile.getFileSystem().getRoot();
final ConstantlyLimitingFileObject root = new ConstantlyLimitingFileObject(rawRoot, true, true);
final FileObject parent = root.getParent();
assertNull(parent);
}
@Test
public void testReadWriteGetContentInputStream() throws IOException {
createRealFile();
final FileContent content = readWriteFile.getContent();
try {
content.getInputStream().close();
} finally {
content.close();
}
}
@Test
public void testReadWriteGetContentOutputStream() throws IOException {
createRealFile();
final FileContent content = readWriteFile.getContent();
try {
content.getOutputStream();
} finally {
content.close();
}
}
@Test
public void testReadWriteGetContentRandomInputStream() throws IOException {
createRealFile();
final FileContent content = readWriteFile.getContent();
try {
content.getRandomAccessContent(RandomAccessMode.READ).close();
} finally {
content.close();
}
}
@Test
public void testReadWriteGetContentRandomOutputStream() throws IOException {
createRealFile();
final FileContent content = readWriteFile.getContent();
try {
content.getRandomAccessContent(RandomAccessMode.READWRITE).close();
} finally {
content.close();
}
}
@Test
public void testReadWriteGetContentGetFile() throws FileSystemException {
final FileObject sameFile = readWriteFile.getContent().getFile();
assertTrue(sameFile.isWriteable());
}
@Test
public void testAncestorLimitedGetParent() throws FileSystemException {
assertNull(ancestorLimitedFile.getParent());
}
@Test(expected = FileSystemException.class)
public void testAncestorLimitedResolveFileParent() throws FileSystemException {
ancestorLimitedFile.resolveFile("../");
}
@Test
public void testAncestorLimitedResolveFileChild() throws FileSystemException {
final FileObject child = ancestorLimitedFile.resolveFile("unexisting_file");
assertNotNull(child);
}
private static class ConstantlyLimitingFileObject
extends AbstractLimitingFileObject<ConstantlyLimitingFileObject> {
private final boolean readOnly;
private boolean allowReturnAncestor;
public ConstantlyLimitingFileObject(final FileObject fileObject, final boolean readOnly,
final boolean allowReturnAncestor) {
super(fileObject);
this.readOnly = readOnly;
this.allowReturnAncestor = allowReturnAncestor;
}
@Override
protected boolean isReadOnly() {
return readOnly;
}
@Override
protected ConstantlyLimitingFileObject doDecorateFile(FileObject file) {
return new ConstantlyLimitingFileObject(file, readOnly, allowReturnAncestor);
}
@Override
protected boolean canReturnAncestor(ConstantlyLimitingFileObject decoratedAncestor) {
return allowReturnAncestor;
}
}
} |
package techreborn.tiles.tier0;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.*;
import net.minecraft.tileentity.TileEntityFurnace;
import net.minecraft.util.EnumFacing;
import reborncore.api.IToolDrop;
import reborncore.api.recipe.IBaseRecipeType;
import reborncore.api.recipe.RecipeHandler;
import reborncore.api.tile.IInventoryProvider;
import reborncore.common.recipes.RecipeTranslator;
import reborncore.common.registration.RebornRegistry;
import reborncore.common.tile.TileLegacyMachineBase;
import reborncore.common.util.Inventory;
import reborncore.common.util.ItemUtils;
import techreborn.api.Reference;
import techreborn.api.recipe.machines.AlloySmelterRecipe;
import reborncore.client.containerBuilder.IContainerProvider;
import reborncore.client.containerBuilder.builder.BuiltContainer;
import reborncore.client.containerBuilder.builder.ContainerBuilder;
import techreborn.init.ModBlocks;
import techreborn.lib.ModInfo;
@RebornRegistry(modID = ModInfo.MOD_ID)
public class TileIronAlloyFurnace extends TileLegacyMachineBase
implements IToolDrop, IInventoryProvider, IContainerProvider {
public int tickTime;
public Inventory inventory = new Inventory(4, "TileIronAlloyFurnace", 64, this);
public int burnTime;
public int currentItemBurnTime;
public int cookTime;
int input1 = 0;
int input2 = 1;
int output = 2;
int fuel = 3;
public TileIronAlloyFurnace() {
}
/**
* Returns the number of ticks that the supplied fuel item will keep the
* alloy furnace burning, or 0 if the item isn't fuel
* @param stack Itemstack of fuel
* @return Integer Number of ticks
*/
public static int getItemBurnTime(ItemStack stack) {
if (stack.isEmpty()) {
return 0;
}
return (int) (TileEntityFurnace.getItemBurnTime(stack) * 1.25);
}
@Override
public void update() {
super.update();
final boolean flag = this.burnTime > 0;
boolean flag1 = false;
if (this.burnTime > 0) {
--this.burnTime;
}
if (!this.world.isRemote) {
if (this.burnTime != 0 || !this.getStackInSlot(this.input1).isEmpty()&& !this.getStackInSlot(this.fuel).isEmpty()) {
if (this.burnTime == 0 && this.canSmelt()) {
this.currentItemBurnTime = this.burnTime = TileIronAlloyFurnace.getItemBurnTime(this.getStackInSlot(this.fuel));
if (this.burnTime > 0) {
flag1 = true;
if (!this.getStackInSlot(this.fuel).isEmpty()) {
this.decrStackSize(this.fuel, 1);
}
}
}
if (this.isBurning() && this.canSmelt()) {
++this.cookTime;
if (this.cookTime == 200) {
this.cookTime = 0;
this.smeltItem();
flag1 = true;
}
} else {
this.cookTime = 0;
}
}
if (flag != this.burnTime > 0) {
flag1 = true;
// TODO sync on/off
}
}
if (flag1) {
this.markDirty();
}
}
public boolean hasAllInputs(final IBaseRecipeType recipeType) {
if (recipeType == null) {
return false;
}
for (final Object input : recipeType.getInputs()) {
boolean hasItem = false;
boolean useOreDict = input instanceof String || recipeType.useOreDic();
boolean checkSize = input instanceof ItemStack;
for (int inputslot = 0; inputslot < 2; inputslot++) {
if (ItemUtils.isInputEqual(input, inventory.getStackInSlot(inputslot), true, true,
useOreDict)) {
ItemStack stack = RecipeTranslator.getStackFromObject(input);
if (!checkSize || inventory.getStackInSlot(inputslot).getCount() >= stack.getCount()) {
hasItem = true;
}
}
}
if (!hasItem)
return false;
}
return true;
}
private boolean canSmelt() {
if (this.getStackInSlot(this.input1).isEmpty() || this.getStackInSlot(this.input2).isEmpty()) {
return false;
} else {
ItemStack itemstack = null;
for (final IBaseRecipeType recipeType : RecipeHandler.getRecipeClassFromName(Reference.ALLOY_SMELTER_RECIPE)) {
if (this.hasAllInputs(recipeType)) {
itemstack = recipeType.getOutput(0);
break;
}
}
if (itemstack == null)
return false;
if (this.getStackInSlot(this.output).isEmpty())
return true;
if (!this.getStackInSlot(this.output).isItemEqual(itemstack))
return false;
final int result = this.getStackInSlot(this.output).getCount() + itemstack.getCount();
return result <= this.getInventoryStackLimit() && result <= this.getStackInSlot(this.output).getMaxStackSize(); // Forge
// BugFix:
// Make
// respect
// stack
// sizes
// properly.
}
}
/**
* Turn one item from the furnace source stack into the appropriate smelted
* item in the furnace result stack
*/
public void smeltItem() {
if (this.canSmelt()) {
ItemStack itemstack = ItemStack.EMPTY;
for (final IBaseRecipeType recipeType : RecipeHandler.getRecipeClassFromName(Reference.ALLOY_SMELTER_RECIPE)) {
if (this.hasAllInputs(recipeType)) {
itemstack = recipeType.getOutput(0);
break;
}
if (!itemstack.isEmpty()) {
break;
}
}
if (this.getStackInSlot(this.output).isEmpty()) {
this.setInventorySlotContents(this.output, itemstack.copy());
} else if (this.getStackInSlot(this.output).getItem() == itemstack.getItem()) {
this.decrStackSize(this.output, -itemstack.getCount());
}
for (final IBaseRecipeType recipeType : RecipeHandler.getRecipeClassFromName(Reference.ALLOY_SMELTER_RECIPE)) {
boolean hasAllRecipes = true;
if (this.hasAllInputs(recipeType)) {
} else {
hasAllRecipes = false;
}
if (hasAllRecipes) {
for (Object input : recipeType.getInputs()) {
boolean useOreDict = input instanceof String || recipeType.useOreDic();
for (int inputSlot = 0; inputSlot < 2; inputSlot++) {
if (ItemUtils.isInputEqual(input, this.inventory.getStackInSlot(inputSlot), true, true, useOreDict)) {
int count = 1;
if (input instanceof ItemStack) {
count = RecipeTranslator.getStackFromObject(input).getCount();
}
inventory.decrStackSize(inputSlot, count);
break;
}
}
}
}
}
}
}
/**
* Furnace isBurning
* @return Boolean True if furnace is burning
*/
public boolean isBurning() {
return this.burnTime > 0;
}
public int getBurnTimeRemainingScaled(final int scale) {
if (this.currentItemBurnTime == 0) {
this.currentItemBurnTime = 200;
}
return this.burnTime * scale / this.currentItemBurnTime;
}
public int getCookProgressScaled(final int scale) {
return this.cookTime * scale / 200;
}
@Override
public EnumFacing getFacing() {
return this.getFacingEnum();
}
@Override
public ItemStack getToolDrop(final EntityPlayer entityPlayer) {
return new ItemStack(ModBlocks.IRON_ALLOY_FURNACE, 1);
}
public boolean isComplete() {
return false;
}
@Override
public Inventory getInventory() {
return this.inventory;
}
public int getBurnTime() {
return this.burnTime;
}
public void setBurnTime(final int burnTime) {
this.burnTime = burnTime;
}
public int getCurrentItemBurnTime() {
return this.currentItemBurnTime;
}
public void setCurrentItemBurnTime(final int currentItemBurnTime) {
this.currentItemBurnTime = currentItemBurnTime;
}
public int getCookTime() {
return this.cookTime;
}
public void setCookTime(final int cookTime) {
this.cookTime = cookTime;
}
@Override
public BuiltContainer createContainer(final EntityPlayer player) {
return new ContainerBuilder("alloyfurnace").player(player.inventory).inventory(8, 84).hotbar(8, 142)
.addInventory().tile(this)
.filterSlot(0, 47, 17,
stack -> RecipeHandler.recipeList.stream()
.anyMatch(recipe -> recipe instanceof AlloySmelterRecipe
&& ItemUtils.isInputEqual(recipe.getInputs().get(0), stack, true, true, true)))
.filterSlot(1, 65, 17,
stack -> RecipeHandler.recipeList.stream()
.anyMatch(recipe -> recipe instanceof AlloySmelterRecipe
&& ItemUtils.isInputEqual(recipe.getInputs().get(1), stack, true, true, true)))
.outputSlot(2, 116, 35).fuelSlot(3, 56, 53).syncIntegerValue(this::getBurnTime, this::setBurnTime)
.syncIntegerValue(this::getCookTime, this::setCookTime)
.syncIntegerValue(this::getCurrentItemBurnTime, this::setCurrentItemBurnTime).addInventory().create(this);
}
@Override
public boolean canBeUpgraded() {
return false;
}
} |
package com.opengamma.analytics.financial.credit.index;
import static com.opengamma.analytics.financial.credit.options.CDSIndexPrvider.CDX_NA_HY_20140213_RECOVERY_RATES;
import static com.opengamma.analytics.financial.credit.options.CDSIndexPrvider.getCDX_NA_HY_20140213_CreditCurves;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertTrue;
import java.util.Arrays;
import java.util.BitSet;
import org.testng.annotations.Test;
import com.opengamma.analytics.financial.credit.isdastandardmodel.ISDABaseTest;
import com.opengamma.analytics.financial.credit.isdastandardmodel.ISDACompliantCreditCurve;
import com.opengamma.util.test.TestGroup;
@Test(groups = TestGroup.UNIT)
public class IntrinsicIndexDataBundleTest extends ISDABaseTest {
private static final ISDACompliantCreditCurve[] CREDIT_CURVES = getCDX_NA_HY_20140213_CreditCurves();
private static final double[] RECOVERY_RATES = CDX_NA_HY_20140213_RECOVERY_RATES;
@SuppressWarnings("unused")
@Test
public void defaultedNamesTest() {
final int[] defaultedIndex = new int[] {5, 14, 22, 45 };
final int indexSize = CREDIT_CURVES.length;
BitSet defaulted = new BitSet(indexSize);
IntrinsicIndexDataBundle intrinsicData = new IntrinsicIndexDataBundle(CREDIT_CURVES, RECOVERY_RATES);
final IntrinsicIndexDataBundle intrinsicDataNoDefault = new IntrinsicIndexDataBundle(CREDIT_CURVES, RECOVERY_RATES, defaulted);
assertTrue(checkEqual(intrinsicData, intrinsicDataNoDefault, 1.e-15));
final double[] rrCp = Arrays.copyOf(RECOVERY_RATES, indexSize);
rrCp[33] = -0.4;
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, rrCp);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("recovery rate must be between 0 and 1.Value of " + rrCp[33] + " given at index " + 33, e.getMessage());
}
rrCp[33] = 1.4;
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, rrCp);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("recovery rate must be between 0 and 1.Value of " + rrCp[33] + " given at index " + 33, e.getMessage());
}
IntrinsicIndexDataBundle intrinsicDataRec = new IntrinsicIndexDataBundle(CREDIT_CURVES, RECOVERY_RATES);
intrinsicData = intrinsicData.withDefault(defaultedIndex);
final ISDACompliantCreditCurve[] creditCurveDefaulted = Arrays.copyOf(CREDIT_CURVES, indexSize);
for (int i = 0; i < defaultedIndex.length; ++i) {
defaulted.set(defaultedIndex[i]);
creditCurveDefaulted[defaultedIndex[i]] = null;
intrinsicDataRec = intrinsicDataRec.withDefault(defaultedIndex[i]);
}
final IntrinsicIndexDataBundle intrinsicDataDefaulted = new IntrinsicIndexDataBundle(CREDIT_CURVES, RECOVERY_RATES, defaulted);
assertTrue(checkEqual(intrinsicData, intrinsicDataDefaulted, 1.e-13));
assertEquals(intrinsicData.getCreditCurves(), intrinsicDataDefaulted.getCreditCurves());
assertTrue(checkEqual(intrinsicData, intrinsicDataRec, 1.e-13));
assertEquals(intrinsicData.getCreditCurves(), intrinsicDataRec.getCreditCurves());
final IntrinsicIndexDataBundle intrinsicDataWithCurves = intrinsicDataDefaulted.withCreditCurves(creditCurveDefaulted);
assertTrue(checkEqual(intrinsicData, intrinsicDataWithCurves, 1.e-13));
/*
* throw exception
*/
BitSet longSet = new BitSet(indexSize + 10);
longSet.set(indexSize + 3);
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, RECOVERY_RATES, longSet);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("Length of defaulted (" + (indexSize + 3 + 1) + ") is greater than index size (" + indexSize + ")", e.getMessage());
}
final double[] shortRR = Arrays.copyOf(RECOVERY_RATES, indexSize - 1);
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, shortRR);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("Length of recoveryRates (" + (indexSize - 1) + ") does not match index size (" + indexSize + ")", e.getMessage());
}
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, shortRR, defaulted);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("Length of recoveryRates (" + (indexSize - 1) + ") does not match index size (" + indexSize + ")", e.getMessage());
}
creditCurveDefaulted[12] = null;
try {
intrinsicData.withCreditCurves(creditCurveDefaulted);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("null curve at index 12, but this is not listed as defaulted", e.getMessage());
}
try {
new IntrinsicIndexDataBundle(creditCurveDefaulted, RECOVERY_RATES, defaulted);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("Null credit curve, but not set as defaulted in alive list. Index is " + 12, e.getMessage());
}
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, rrCp, defaulted);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("recovery rate must be between 0 and 1.Value of " + rrCp[33] + " given at index " + 33, e.getMessage());
}
rrCp[33] = -2.4;
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, rrCp, defaulted);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("recovery rate must be between 0 and 1.Value of " + rrCp[33] + " given at index " + 33, e.getMessage());
}
try {
intrinsicData.withDefault(14);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("Index " + 14 + " is already defaulted", e.getMessage());
}
try {
intrinsicData.withDefault(new int[] {11, 15, 22 });
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("Index " + 22 + " is already defaulted", e.getMessage());
}
final ISDACompliantCreditCurve[] shortCC = Arrays.copyOf(CREDIT_CURVES, indexSize - 2);
try {
intrinsicDataNoDefault.withCreditCurves(shortCC);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("wrong number of curves. Require " + indexSize + ", but " + (indexSize - 2) + " given", e.getMessage());
}
try {
intrinsicDataNoDefault.withDefault(indexSize);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("index (" + indexSize + ") should be smaller than index size (" + indexSize + ")", e.getMessage());
}
try {
intrinsicDataNoDefault.withDefault(1, 4, indexSize + 2);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("index (" + (indexSize + 2) + ") should be smaller than index size (" + indexSize + ")", e.getMessage());
}
}
@SuppressWarnings("unused")
@Test
public void differentWeightTest() {
final int indexSize = CREDIT_CURVES.length;
final double[] weights = new double[indexSize];
Arrays.fill(weights, 1.0 / indexSize);
final double[] diff = new double[] {weights[0] * 0.1, weights[0] * 0.2, weights[0] * 0.3, weights[0] * 0.4, weights[0] * 0.5 };
weights[2] += diff[0];
weights[5] -= diff[0];
weights[13] += diff[1];
weights[22] -= diff[1];
weights[42] += diff[2];
weights[55] -= diff[2];
weights[58] += diff[3];
weights[78] -= diff[3];
weights[79] += diff[4];
weights[82] -= diff[4];
final IntrinsicIndexDataBundle bundle = new IntrinsicIndexDataBundle(CREDIT_CURVES, RECOVERY_RATES, weights);
for (int i = 0; i < indexSize; ++i) {
assertEquals(weights[i], bundle.getWeight(i));
}
final int[] defaultedIndex = new int[] {15, 34, 22, 65 };
final IntrinsicIndexDataBundle bundleDefaulted = bundle.withDefault(defaultedIndex);
IntrinsicIndexDataBundle bundleToDefaulted = new IntrinsicIndexDataBundle(CREDIT_CURVES, RECOVERY_RATES, weights);
BitSet defaulted = new BitSet(indexSize);
final IntrinsicIndexDataBundle bundleNoDefault = new IntrinsicIndexDataBundle(CREDIT_CURVES, RECOVERY_RATES, weights, defaulted);
assertTrue(checkEqual(bundle, bundleNoDefault, 1.e-13));
final ISDACompliantCreditCurve[] ccCopy = Arrays.copyOf(CREDIT_CURVES, indexSize);
ccCopy[defaultedIndex[1]] = null;//null is allowed if defaulted
for (int i = 0; i < defaultedIndex.length; ++i) {
defaulted.set(defaultedIndex[i]);
bundleToDefaulted = bundleToDefaulted.withDefault(defaultedIndex[i]);
}
final IntrinsicIndexDataBundle bundleWithBitSet = new IntrinsicIndexDataBundle(ccCopy, RECOVERY_RATES, weights, defaulted);
assertTrue(checkEqual(bundleWithBitSet, bundleDefaulted, 1.e-13));
assertTrue(checkEqual(bundleWithBitSet, bundleToDefaulted, 1.e-13));
/*
* throw exception
*/
BitSet longSet = new BitSet(indexSize + 10);
longSet.set(indexSize + 4);
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, RECOVERY_RATES, weights, longSet);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("Length of defaulted (" + (indexSize + 4 + 1) + ") is greater than index size (" + indexSize + ")", e.getMessage());
}
final double[] shortRR = Arrays.copyOf(RECOVERY_RATES, indexSize - 1);
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, shortRR, weights, defaulted);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("Length of recoveryRates (" + (indexSize - 1) + ") does not match index size (" + indexSize + ")", e.getMessage());
}
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, shortRR, weights);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("Length of recoveryRates (" + (indexSize - 1) + ") does not match index size (" + indexSize + ")", e.getMessage());
}
final double[] longWeights = new double[indexSize + 1];
Arrays.fill(longWeights, 1. / (indexSize + 1.));
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, RECOVERY_RATES, longWeights);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("Length of weights (" + (indexSize + 1) + ") does not match index size (" + indexSize + ")", e.getMessage());
}
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, RECOVERY_RATES, longWeights, defaulted);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("Length of weights (" + (indexSize + 1) + ") does not match index size (" + indexSize + ")", e.getMessage());
}
weights[14] *= -1.;
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, RECOVERY_RATES, weights);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("weights must be positive. Value of " + weights[14] + " given at index " + 14, e.getMessage());
}
weights[14] *= -1.;
weights[14] *= 10.;
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, RECOVERY_RATES, weights);
throw new RuntimeException();
} catch (final Exception e) {
assertTrue(e instanceof IllegalArgumentException);
}
weights[14] *= 0.1;
final double[] rrCp = Arrays.copyOf(RECOVERY_RATES, indexSize);
rrCp[24] *= -1.;
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, rrCp, weights);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("recovery rate must be between 0 and 1.Value of " + rrCp[24] + " given at index " + 24, e.getMessage());
}
rrCp[24] = 3.;
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, rrCp, weights);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("recovery rate must be between 0 and 1.Value of " + rrCp[24] + " given at index " + 24, e.getMessage());
}
final ISDACompliantCreditCurve[] cc = Arrays.copyOf(CREDIT_CURVES, indexSize);
cc[23] = null;
try {
new IntrinsicIndexDataBundle(cc, RECOVERY_RATES, weights, defaulted);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("Null credit curve, but not set as defaulted in alive list. Index is " + 23, e.getMessage());
}
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, rrCp, weights, defaulted);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("recovery rate must be between 0 and 1.Value of " + rrCp[24] + " given at index " + 24, e.getMessage());
}
rrCp[24] = -3.;
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, rrCp, weights, defaulted);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("recovery rate must be between 0 and 1.Value of " + rrCp[24] + " given at index " + 24, e.getMessage());
}
weights[56] *= -1.;
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, RECOVERY_RATES, weights, defaulted);
throw new RuntimeException();
} catch (final Exception e) {
assertEquals("weights must be positive. Value of " + weights[56] + " given at index " + 56, e.getMessage());
}
weights[56] = 5.1;
try {
new IntrinsicIndexDataBundle(CREDIT_CURVES, RECOVERY_RATES, weights, defaulted);
throw new RuntimeException();
} catch (final Exception e) {
assertTrue(e instanceof IllegalArgumentException);
}
}
@Test
private boolean checkEqual(final IntrinsicIndexDataBundle bundle1, final IntrinsicIndexDataBundle bundle2, final double tol) {
if (bundle1.getIndexSize() != bundle2.getIndexSize()) {
return false;
}
if (bundle1.getNumOfDefaults() != bundle2.getNumOfDefaults()) {
return false;
}
if (Math.abs(bundle1.getIndexFactor() - bundle2.getIndexFactor()) > tol) {
return false;
}
final int size = bundle1.getIndexSize();
for (int i = 0; i < size; ++i) {
if (Math.abs(bundle1.getWeight(i) - bundle2.getWeight(i)) > tol) {
return false;
}
if (Math.abs(bundle1.getLGD(i) - bundle2.getLGD(i)) > tol) {
return false;
}
if (bundle1.isDefaulted(i) != bundle2.isDefaulted(i)) {
return false;
}
/*
* Null is allowed if defaulted
*/
if (!bundle1.isDefaulted(i)) {
if (!(bundle1.getCreditCurve(i).equals(bundle2.getCreditCurve(i)))) {
return false;
}
}
}
return true;
}
} |
package pfg.kraken;
import java.util.ArrayList;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import pfg.kraken.astar.DirectionStrategy;
import pfg.kraken.astar.TentacularAStar;
import pfg.kraken.exceptions.NoPathException;
import pfg.kraken.exceptions.NotInitializedPathfindingException;
import pfg.kraken.obstacles.Obstacle;
import pfg.kraken.obstacles.RectangularObstacle;
import pfg.kraken.robot.Cinematique;
import pfg.kraken.utils.XYO;
import pfg.kraken.utils.XY_RW;
/**
* Various test on search
* @author pf
*
*/
public class Test_Search extends JUnit_Test
{
private TentacularAStar pathfinding;
@Before
public void setUp() throws Exception
{
List<Obstacle> obs = new ArrayList<Obstacle>();
obs.add(new RectangularObstacle(new XY_RW(50,1050), 500, 500));
obs.add(new RectangularObstacle(new XY_RW(400,200), 200, 200));
obs.add(new RectangularObstacle(new XY_RW(-1000,1050), 200, 200));
obs.add(new RectangularObstacle(new XY_RW(100,410), 200, 200));
obs.add(new RectangularObstacle(new XY_RW(-600,300), 200, 200));
obs.add(new RectangularObstacle(new XY_RW(-1000,1900), 200, 200));
super.setUpWith(obs, "default", "graphic");
pathfinding = injector.getService(TentacularAStar.class);
}
@Test(expected=NoPathException.class)
public void test_out_of_bounds() throws Exception
{
pathfinding.initializeNewSearch(new Cinematique(new XYO(0, 200, 0)), new Cinematique(new XYO(10000, 10000, 0)), DirectionStrategy.FASTEST, "XY", null, 3000);
}
@Test(expected=NoPathException.class)
public void test_start_inside_obstacle() throws Exception
{
pathfinding.initializeNewSearch(new Cinematique(new XYO(50,1050, 0)), new Cinematique(new XYO(0, 200, 0)), DirectionStrategy.FASTEST, "XY", null, 3000);
}
@Test(expected=NoPathException.class)
public void test_finish_inside_obstacle() throws Exception
{
pathfinding.initializeNewSearch(new Cinematique(new XYO(0, 200, 0)), new Cinematique(new XYO(50,1050, 0)), DirectionStrategy.FASTEST, "XY", null, 3000);
}
@Test
public void test_exemple_1() throws Exception
{
pathfinding.initializeNewSearch(new Cinematique(new XYO(1000, 200, 0)), new Cinematique(new XYO(1000, 1000, 0)), DirectionStrategy.FASTEST, "XY", null, 3000);
pathfinding.searchWithoutReplanning();
}
@Test(expected=NotInitializedPathfindingException.class)
public void test_no_initialization() throws Exception
{
pathfinding.searchWithoutReplanning();
}
} |
package org.sagebionetworks.repo.manager.table;
import java.util.LinkedList;
import java.util.List;
import org.sagebionetworks.common.util.progress.ProgressCallback;
import org.sagebionetworks.common.util.progress.ProgressingCallable;
import org.sagebionetworks.repo.model.UserInfo;
import org.sagebionetworks.repo.model.table.TableUnavailableException;
import org.sagebionetworks.repo.model.table.TableUpdateRequest;
import org.sagebionetworks.repo.model.table.TableUpdateResponse;
import org.sagebionetworks.repo.model.table.TableUpdateTransactionRequest;
import org.sagebionetworks.repo.model.table.TableUpdateTransactionResponse;
import org.sagebionetworks.util.ValidateArgument;
import org.sagebionetworks.workers.util.aws.message.RecoverableMessageException;
import org.sagebionetworks.workers.util.semaphore.LockUnavilableException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate;
public class TableEntityTransactionManager implements TableTransactionManager {
private static final int EXCLUSIVE_LOCK_TIMEOUT_MS = 5*1000*60;
@Autowired
TableManagerSupport tableManagerSupport;
@Autowired
TransactionTemplate readCommitedTransactionTemplate;
@Autowired
TableEntityManager tableEntityManager;
@Autowired
TableIndexConnectionFactory tableIndexConnectionFactory;
@Override
public TableUpdateTransactionResponse updateTableWithTransaction(
final ProgressCallback<Void> progressCallback, final UserInfo userInfo,
final TableUpdateTransactionRequest request)
throws RecoverableMessageException, TableUnavailableException {
ValidateArgument.required(progressCallback, "callback");
ValidateArgument.required(userInfo, "userInfo");
TableTransactionUtils.validateRequest(request);
String tableId = request.getEntityId();
tableManagerSupport.validateTableWriteAccess(userInfo, tableId);
try {
return tableManagerSupport.tryRunWithTableExclusiveLock(progressCallback, tableId, EXCLUSIVE_LOCK_TIMEOUT_MS, new ProgressingCallable<TableUpdateTransactionResponse, Void>() {
@Override
public TableUpdateTransactionResponse call(ProgressCallback<Void> callback) throws Exception {
return updateTableWithTransactionWithExclusiveLock(callback, userInfo, request);
}
});
}catch (TableUnavailableException e) {
throw e;
}catch (LockUnavilableException e) {
throw e;
}catch (RecoverableMessageException e) {
throw e;
}catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* This method should only be called from while holding the lock on the table.
* @param callback
* @param userInfo
* @param request
* @return
*/
TableUpdateTransactionResponse updateTableWithTransactionWithExclusiveLock(
final ProgressCallback<Void> callback, final UserInfo userInfo,
final TableUpdateTransactionRequest request) {
/*
* Request validation can take a long time and may not involve the primary database.
* Therefore the primary transaction is not started until after validation succeeds.
* A transaction template is used to allow for finer control of the transaction boundary.
*/
validateUpdateRequests(callback, userInfo, request);
// the update is valid so the primary transaction can be started.
return readCommitedTransactionTemplate.execute(new TransactionCallback<TableUpdateTransactionResponse>() {
@Override
public TableUpdateTransactionResponse doInTransaction(
TransactionStatus status) {
return doIntransactionUpdateTable(status, callback, userInfo, request);
}
} );
}
/**
* Validate the passed update request.
* @param callback
* @param userInfo
* @param request
*/
void validateUpdateRequests(ProgressCallback<Void> callback,
UserInfo userInfo, TableUpdateTransactionRequest request) {
// Determine if a temporary table is needed to validate any of the requests.
boolean isTemporaryTableNeeded = isTemporaryTableNeeded(callback, request);
// setup a temporary table if needed.
if(isTemporaryTableNeeded){
TableIndexManager indexManager = tableIndexConnectionFactory.connectToTableIndex(request.getEntityId());
indexManager.createTemporaryTableCopy(callback);
try{
// validate while the temp table exists.
validateEachUpdateRequest(callback, userInfo, request, indexManager);
}finally{
indexManager.deleteTemporaryTableCopy(callback);
}
}else{
// we do not need a temporary copy to validate this request.
validateEachUpdateRequest(callback, userInfo, request, null);
}
}
/**
* Validate each update request.
* @param callback
* @param userInfo
* @param request
* @param indexManager
*/
void validateEachUpdateRequest(ProgressCallback<Void> callback,
UserInfo userInfo, TableUpdateTransactionRequest request,
TableIndexManager indexManager) {
for(TableUpdateRequest change: request.getChanges()){
// progress before each check.
callback.progressMade(null);
tableEntityManager.validateUpdateRequest(callback, userInfo, change, indexManager);
}
}
/**
* Is a temporary table needed to validate any of the changes for the given request.
*
* @param callback
* @param request
* @return
*/
boolean isTemporaryTableNeeded(ProgressCallback<Void> callback,
TableUpdateTransactionRequest request) {
for(TableUpdateRequest change: request.getChanges()){
// progress before each check.
callback.progressMade(null);
boolean tempNeeded = tableEntityManager.isTemporaryTableNeededToValidate(change);
if(tempNeeded){
return true;
}
}
return false;
}
/**
* Called after the update has been validated and from within a transaction.
*
* @param status
* @param callback
* @param userInfo
* @param request
* @return
*/
TableUpdateTransactionResponse doIntransactionUpdateTable(TransactionStatus status,
ProgressCallback<Void> callback, UserInfo userInfo,
TableUpdateTransactionRequest request) {
// execute each request
List<TableUpdateResponse> results = new LinkedList<TableUpdateResponse>();
TableUpdateTransactionResponse response = new TableUpdateTransactionResponse();
response.setResults(results);
for(TableUpdateRequest change: request.getChanges()){
callback.progressMade(null);
TableUpdateResponse changeResponse = tableEntityManager.updateTable(callback, userInfo, change);
results.add(changeResponse);
}
return response;
}
} |
package org.rstudio.studio.client.workbench.views.source.editors.codebrowser;
import com.google.gwt.core.client.GWT;
import com.google.gwt.dom.client.NativeEvent;
import com.google.gwt.resources.client.ClientBundle;
import com.google.gwt.resources.client.CssResource;
import com.google.gwt.user.client.Timer;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.ResizeComposite;
import com.google.gwt.user.client.ui.Widget;
import org.rstudio.core.client.command.KeyboardShortcut;
import org.rstudio.core.client.regex.Match;
import org.rstudio.core.client.regex.Pattern;
import org.rstudio.core.client.regex.Pattern.ReplaceOperation;
import org.rstudio.core.client.widget.Toolbar;
import org.rstudio.studio.client.application.events.EventBus;
import org.rstudio.studio.client.common.GlobalDisplay;
import org.rstudio.studio.client.common.GlobalProgressDelayer;
import org.rstudio.studio.client.common.SimpleRequestCallback;
import org.rstudio.studio.client.common.codetools.CodeToolsServerOperations;
import org.rstudio.studio.client.common.filetypes.FileTypeRegistry;
import org.rstudio.studio.client.common.filetypes.TextFileType;
import org.rstudio.studio.client.server.ServerError;
import org.rstudio.studio.client.server.ServerRequestCallback;
import org.rstudio.studio.client.server.Void;
import org.rstudio.studio.client.workbench.codesearch.model.SearchPathFunctionDefinition;
import org.rstudio.studio.client.workbench.commands.Commands;
import org.rstudio.studio.client.workbench.views.console.shell.assist.CompletionManager;
import org.rstudio.studio.client.workbench.views.console.shell.editor.InputEditorLineWithCursorPosition;
import org.rstudio.studio.client.workbench.views.console.shell.editor.InputEditorUtil;
import org.rstudio.studio.client.workbench.views.source.PanelWithToolbar;
import org.rstudio.studio.client.workbench.views.source.editors.EditingTargetToolbar;
import org.rstudio.studio.client.workbench.views.source.editors.text.DocDisplay;
import org.rstudio.studio.client.workbench.views.source.events.CodeBrowserNavigationEvent;
import org.rstudio.studio.client.workbench.views.source.model.SourcePosition;
public class CodeBrowserEditingTargetWidget extends ResizeComposite
implements CodeBrowserEditingTarget.Display
{
public CodeBrowserEditingTargetWidget(Commands commands,
final GlobalDisplay globalDisplay,
final EventBus eventBus,
final CodeToolsServerOperations server,
final DocDisplay docDisplay)
{
commands_ = commands;
docDisplay_ = docDisplay;
panel_ = new PanelWithToolbar(createToolbar(),
docDisplay_.asWidget());
panel_.setSize("100%", "100%");
docDisplay_.setReadOnly(true);
// setup custom completion manager for executing F1 and F2 actions
docDisplay_.setFileType(FileTypeRegistry.R, new CompletionManager() {
@Override
public boolean previewKeyDown(NativeEvent event)
{
int modifier = KeyboardShortcut.getModifierValue(event);
if (modifier == KeyboardShortcut.NONE)
{
if (event.getKeyCode() == 112)
{
InputEditorLineWithCursorPosition linePos =
InputEditorUtil.getLineWithCursorPosition(docDisplay);
server.getHelpAtCursor(
linePos.getLine(), linePos.getPosition(),
new SimpleRequestCallback<Void>("Help"));
}
else if (event.getKeyCode() == 113)
{
goToFunctionDefinition();
}
}
return false;
}
@Override
public void goToFunctionDefinition()
{
// determine current line and cursor position
InputEditorLineWithCursorPosition lineWithPos =
InputEditorUtil.getLineWithCursorPosition(docDisplay);
// delayed progress indicator
final GlobalProgressDelayer progress = new GlobalProgressDelayer(
globalDisplay, 1000, "Searching for function definition...");
server.findFunctionInSearchPath(
lineWithPos.getLine(),
lineWithPos.getPosition(),
currentFunctionNamespace_,
new ServerRequestCallback<SearchPathFunctionDefinition>() {
@Override
public void onResponseReceived(SearchPathFunctionDefinition def)
{
// dismiss progress
progress.dismiss();
// if we got a hit
if (def.getName() != null)
{
// try to search for the function locally
SourcePosition position =
docDisplay.findFunctionPositionFromCursor(
def.getName());
if (position != null)
{
docDisplay.navigateToPosition(position, true);
}
else if (def.getNamespace() != null)
{
docDisplay.recordCurrentNavigationPosition();
eventBus.fireEvent(new CodeBrowserNavigationEvent(
def));
}
}
}
@Override
public void onError(ServerError error)
{
progress.dismiss();
globalDisplay.showErrorMessage("Error Searching for Function",
error.getUserMessage());
}
});
}
@Override
public boolean previewKeyPress(char charCode)
{
return false;
}
@Override
public void close()
{
}
});
initWidget(panel_);
}
@Override
public Widget asWidget()
{
return this;
}
@Override
public void adaptToFileType(TextFileType fileType)
{
docDisplay_.setFileType(fileType, true);
}
@Override
public void setFontSize(double size)
{
docDisplay_.setFontSize(size);
}
@Override
public void onActivate()
{
docDisplay_.onActivate();
}
@Override
public void showFunction(SearchPathFunctionDefinition functionDef)
{
currentFunctionNamespace_ = functionDef.getNamespace();
docDisplay_.setCode(formatCode(functionDef), false);
contextLabel_.setCurrentFunction(functionDef);
}
@Override
public void scrollToLeft()
{
new Timer() {
@Override
public void run()
{
docDisplay_.scrollToX(0);
}
}.schedule(100);
}
private Toolbar createToolbar()
{
Toolbar toolbar = new EditingTargetToolbar(commands_);
toolbar.addLeftWidget(
contextLabel_ = new CodeBrowserContextLabel(RES.styles()));
Label readOnlyLabel = new Label("(Read-only)");
readOnlyLabel.addStyleName(RES.styles().readOnly());
toolbar.addRightWidget(readOnlyLabel);
return toolbar;
}
private String formatCode(SearchPathFunctionDefinition functionDef)
{
// deal with null
String code = functionDef.getCode();
if (code == null)
return "";
// if this is from a source ref then leave it alone
if (functionDef.isCodeFromSrcAttrib())
return code;
// create regex pattern used to find leading space
// NOTE: the 4 spaces comes from the implementation of printtab2buff
// in deparse.c -- it is hard-coded to use 4 spaces for the first 4
// levels of indentation and then 2 spaces for subsequent levels.
Pattern pattern = Pattern.create("^( ){1,4}");
code = pattern.replaceAll(code, new ReplaceOperation()
{
@Override
public String replace(Match m)
{
return m.getValue().replace(" ", "\t");
}
});
Pattern pattern2 = Pattern.create("^\t{4}( )+");
code = pattern2.replaceAll(code, new ReplaceOperation()
{
@Override
public String replace(Match m)
{
return m.getValue().replace(" ", "\t");
}
});
return code;
}
public static void ensureStylesInjected()
{
RES.styles().ensureInjected();
}
interface Resources extends ClientBundle
{
@Source("CodeBrowserEditingTargetWidget.css")
Styles styles();
}
interface Styles extends CssResource
{
String functionName();
String functionNamespace();
String readOnly();
}
static Resources RES = GWT.create(Resources.class);
private final PanelWithToolbar panel_;
private CodeBrowserContextLabel contextLabel_;
private final Commands commands_;
private final DocDisplay docDisplay_;
private String currentFunctionNamespace_ = null;
} |
package net.sf.mzmine.modules.rawdatamethods.filtering.baselinecorrection.correctors;
import javax.annotation.Nonnull;
import net.sf.mzmine.datamodel.RawDataFile;
import net.sf.mzmine.modules.rawdatamethods.filtering.baselinecorrection.BaselineCorrector;
import net.sf.mzmine.parameters.ParameterSet;
import net.sf.mzmine.util.R.RSessionWrapper;
import net.sf.mzmine.util.R.RSessionWrapperException;
public class LocMinLoessCorrector extends BaselineCorrector {
private static final double BW_MIN_VAL = 0.001d;
@Override
public String[] getRequiredRPackages() {
return new String[] { /* "rJava", "Rserve", */"PROcess" };
}
@Override
public double[] computeBaseline(final RSessionWrapper rSession,
final RawDataFile origDataFile, double[] chromatogram,
ParameterSet parameters) throws RSessionWrapperException {
// Local Minima parameters.
String method = parameters.getParameter(
LocMinLoessCorrectorParameters.METHOD).getValue();
double bw = parameters.getParameter(LocMinLoessCorrectorParameters.BW)
.getValue();
int breaks = parameters.getParameter(
LocMinLoessCorrectorParameters.BREAKS).getValue();
int breaks_width = parameters.getParameter(
LocMinLoessCorrectorParameters.BREAK_WIDTH).getValue();
double qntl = parameters.getParameter(
LocMinLoessCorrectorParameters.QNTL).getValue();
final double[] baseline;
// Set chromatogram.
rSession.assign("chromatogram", chromatogram);
// Transform chromatogram.
int mini = 1;
int maxi = chromatogram.length;
rSession.eval("mat <- cbind(matrix(seq(" + ((double) mini) + ", "
+ ((double) maxi) + ", by = 1.0), ncol=1), "
+ "matrix(chromatogram[" + mini + ":" + maxi + "], ncol=1))");
// Breaks
rSession.eval("breaks <- "
+ ((breaks_width > 0) ? (int) Math.round((double) (maxi - mini)
/ (double) breaks_width) : breaks));
// Calculate baseline.
rSession.eval("bseoff <- bslnoff(mat, method=\"" + method + "\", bw="
+ ((method.equals("approx") || bw >= BW_MIN_VAL) ? bw : BW_MIN_VAL) + ", breaks=breaks, qntl=" + qntl + ")");
rSession.eval("baseline <- mat[,2] - bseoff[,2]");
baseline = (double[]) rSession.collect("baseline");
return baseline;
}
@Override
public @Nonnull
String getName() {
return "Local minima + LOESS baseline corrector";
}
@Override
public @Nonnull
Class<? extends ParameterSet> getParameterSetClass() {
return LocMinLoessCorrectorParameters.class;
}
} |
package org.ow2.proactive.resourcemanager.frontend.topology.descriptor;
import java.util.List;
import org.objectweb.proactive.annotation.PublicAPI;
import org.objectweb.proactive.core.node.Node;
import org.ow2.proactive.resourcemanager.frontend.topology.DistanceFunction;
@PublicAPI
public class BestProximityDescriptor extends TopologyDescriptor {
// pivot nodes
protected List<Node> pivot = null;
protected DistanceFunction function;
/**
* AVG - the mean distance between elements of each cluster (also called average linkage clustering)
* The similarity of two clusters is the similarity of their centers.
* This complete-link merge criterion is non-local; the entire structure of the
* clustering can influence merge decisions.
*/
public final static DistanceFunction AVG = new DistanceFunction() {
public long distance(long d1, long d2) {
// not connected
if (d1 < 0 || d2 < 0)
return -1;
return (d1 + d2) / 2;
}
};
/**
* MAX - the maximum distance between elements of each cluster (also called complete linkage clustering)
* The similarity of two clusters is the similarity of their most dissimilar members.
* This complete-link merge criterion is non-local; the entire structure of the
* clustering can influence merge decisions.
*/
public final static DistanceFunction MAX = new DistanceFunction() {
public long distance(long d1, long d2) {
// not connected
if (d1 < 0 || d2 < 0)
return -1;
return Math.max(d1, d2);
}
};
/**
* MIN - the minimum distance between elements of each cluster (also called single-linkage clustering)
* The similarity of two clusters is the similarity of their most similar members.
* This single-link merge criterion is local. We pay attention solely to the area where
* the two clusters come closest to each other.
*/
public final static DistanceFunction MIN = new DistanceFunction() {
public long distance(long d1, long d2) {
return Math.min(d1, d2);
}
};
/**
* Constructs new instance of the class.
* In this case the function for clustering is BestProximityDescriptor.MAX, pivot is empty.
*/
public BestProximityDescriptor() {
this(MAX, null);
}
/**
* Constructs new instance of the class with specified distance function and empty pivot.
*
* @param function - the distance function used for clustering
*/
public BestProximityDescriptor(DistanceFunction function) {
this(function, null);
}
/**
* Constructs new instance of the class with specified pivot.
* The function for clustering is BestProximityDescriptor.MAX.
*
* @param pivot - the set of nodes from which the proximity should be the best.
*/
public BestProximityDescriptor(List<Node> pivot) {
this(MAX, pivot);
}
/**
* Constructs new instance of the class with specified distance function and pivot.
*
* @param function - the distance function used for clustering
* @param pivot - the set of nodes from which the proximity should be the best.
*/
public BestProximityDescriptor(DistanceFunction function, List<Node> pivot) {
super(true);
this.function = function;
this.pivot = pivot;
}
/**
* Gets pivot nodes
* @return pivot nodes
*/
public List<Node> getPivot() {
return pivot;
}
/**
* Gets the distance function. AVG by default.
* @return the distance function
*/
public DistanceFunction getDistanceFunction() {
return function == null ? MAX : function;
}
} |
package com.splicemachine.derby.impl.sql.execute.actions;
import com.splicemachine.derby.test.framework.SpliceSchemaWatcher;
import com.splicemachine.derby.test.framework.SpliceTableWatcher;
import com.splicemachine.derby.test.framework.SpliceWatcher;
import org.junit.*;
import org.junit.rules.RuleChain;
import org.junit.rules.TestRule;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
public class ConstraintConstantOperationTest {
private static final String CLASS_NAME = ConstraintConstantOperationTest.class.getSimpleName().toUpperCase();
protected static SpliceWatcher spliceClassWatcher = new SpliceWatcher();
protected static SpliceSchemaWatcher tableSchema = new SpliceSchemaWatcher(CLASS_NAME);
// primary key constraint
// column-level check constraint
private static final String EMP_PRIV_TABLE_NAME = "EmpPriv";
private static final String EMP_PRIV_TABLE_DEF =
"(empId int not null CONSTRAINT EMP_ID_PK PRIMARY KEY, dob varchar(10) not null, ssn varchar(12) not null, SALARY DECIMAL(9,2) CONSTRAINT SAL_CK CHECK (SALARY >= 10000))";
protected static SpliceTableWatcher empPrivTable = new SpliceTableWatcher(EMP_PRIV_TABLE_NAME,CLASS_NAME, EMP_PRIV_TABLE_DEF);
// foreign key constraint
private static final String EMP_NAME_TABLE_NAME = "EmpName";
private static final String EMP_NAME_TABLE_DEF = "(empId int not null CONSTRAINT EMP_ID_FK REFERENCES "+
CLASS_NAME+"."+EMP_PRIV_TABLE_NAME+" ON UPDATE RESTRICT, fname varchar(8) not null, lname varchar(10) not null)";
protected static SpliceTableWatcher empNameTable = new SpliceTableWatcher(EMP_NAME_TABLE_NAME,CLASS_NAME, EMP_NAME_TABLE_DEF);
// table-level check constraint
private static final String TASK_TABLE_NAME = "Tasks";
private static final String TASK_TABLE_DEF =
"(TaskId INT UNIQUE not null, empId int not null, StartedAt INT not null, FinishedAt INT not null, CONSTRAINT CHK_StartedAt_Before_FinishedAt CHECK (StartedAt < FinishedAt))";
protected static SpliceTableWatcher taskTable = new SpliceTableWatcher(TASK_TABLE_NAME,CLASS_NAME, TASK_TABLE_DEF);
@ClassRule
public static TestRule chain = RuleChain.outerRule(spliceClassWatcher)
.around(tableSchema)
.around(empPrivTable)
.around(empNameTable)
.around(taskTable);
@Rule
public SpliceWatcher methodWatcher = new SpliceWatcher();
/**
* Test we get no exception when we insert following defined constraints.
* @throws Exception
*/
@Test
public void testGoodInsertConstraint() throws Exception {
String query = String.format("select * from %s.%s", tableSchema.schemaName, EMP_NAME_TABLE_NAME);
Connection connection = methodWatcher.createConnection();
connection.setAutoCommit(false);
Statement statement = connection.createStatement();
// insert good data
statement.execute(
String.format("insert into %s.%s (EmpId, dob, ssn, salary) values (101, '04/08', '999-22-1234', 10001)",
tableSchema.schemaName, EMP_PRIV_TABLE_NAME));
connection.commit();
// insert good data
statement.execute(
String.format("insert into %s.%s (EmpId, fname, lname) values (101, 'Jeff', 'Cunningham')",
tableSchema.schemaName, EMP_NAME_TABLE_NAME));
connection.commit();
ResultSet resultSet = connection.createStatement().executeQuery(query);
Assert.assertTrue("Connection should see its own writes",resultSet.next());
connection.commit();
}
/**
* Test primary key constraint - we can't add row to a table where primary key already exist.
* @throws Exception
*/
@Test
public void testBadInsertPrimaryKeyConstraint() throws Exception {
Connection connection = methodWatcher.createConnection();
connection.setAutoCommit(false);
Statement statement = connection.createStatement();
// insert good data
statement.execute(
String.format("insert into %s.%s (EmpId, dob, ssn, salary) values (102, '02/14', '444-33-4321', 10001)",
tableSchema.schemaName, EMP_PRIV_TABLE_NAME));
connection.commit();
// insert bad row - no 103 empID in referenced table where FK constraint defined
try {
statement.execute(
String.format("insert into %s.%s (EmpId, dob, ssn, salary) values (102, '03/14', '444-33-1212', 10001)",
tableSchema.schemaName, EMP_PRIV_TABLE_NAME));
Assert.fail("Expected exception inserting row with FK constraint violation.");
} catch (SQLException e) {
// expected
}
}
/**
* Test foreign key constraint - we can't add row to a table where foreign key DNE reference.
* @throws Exception
*/
@Test
@Ignore("FK constraint not yet implemented")
public void testBadInsertForeignKeyConstraint() throws Exception {
Connection connection = methodWatcher.createConnection();
connection.setAutoCommit(false);
Statement statement = connection.createStatement();
// insert good data
statement.execute(
String.format("insert into %s.%s (EmpId, dob, ssn, salary) values (102, '02/14', '444-33-4321', 10001)",
tableSchema.schemaName, EMP_PRIV_TABLE_NAME));
connection.commit();
// insert bad row - no 103 empID in referenced table where FK constraint defined
try {
statement.execute(
String.format("insert into %s.%s (EmpId, fname, lname) values (103, 'Bo', 'Diddly')",
tableSchema.schemaName, EMP_NAME_TABLE_NAME));
Assert.fail("Expected exception inserting row with FK constraint violation.");
} catch (SQLException e) {
// expected
}
}
/**
* Test we can add a foreign key constraint to a table.
* @throws Exception
*/
@Test
@Ignore("FK constraint not yet implemented")
public void testAddForeignKeyConstraint() throws Exception {
methodWatcher.getStatement().execute(String.format("alter table %s.%s add foreign key (empId) references %s.%s (empId)",
tableSchema.schemaName,
TASK_TABLE_NAME,
tableSchema.schemaName,
EMP_PRIV_TABLE_NAME));
}
/**
* Test we get an exception when violating a unique constraint - insert duplicate task ID.
* @throws Exception
*/
@Test
public void testUniqueConstraint() throws Exception {
Statement statement = methodWatcher.createConnection().createStatement();
// insert good data
statement.execute(
String.format("insert into %s.%s (TaskId, empId, StartedAt, FinishedAt) values (%d, %d,%d,%d)",
tableSchema.schemaName, TASK_TABLE_NAME, 1246, 101, 0600, 0700));
// insert bad row - non-unique task ID
try {
statement.execute(
String.format("insert into %s.%s (TaskId, empId, StartedAt, FinishedAt) values (%d, %d,%d,%d)",
tableSchema.schemaName, TASK_TABLE_NAME, 1246, 102, 0201, 0300));
Assert.fail("Expected exception inserting non-unique value on unique constrained col");
} catch (SQLException e) {
// expected
}
}
/**
* Test we insert good row on constrained table.
* @throws Exception
*/
@Test
public void testInsertGoodInsertTableConstraint() throws Exception {
String query = String.format("select * from %s.%s", tableSchema.schemaName, TASK_TABLE_NAME);
Connection connection = methodWatcher.createConnection();
connection.setAutoCommit(false);
Statement statement = connection.createStatement();
// insert good data
statement.execute(
String.format("insert into %s.%s (TaskId, empId, StartedAt, FinishedAt) values (%d, %d,%d,%d)",
tableSchema.schemaName, TASK_TABLE_NAME, 1244, 101, 0600, 0700));
ResultSet resultSet = statement.executeQuery(query);
Assert.assertTrue("Connection should see its own writes",resultSet.next());
connection.commit();
}
/**
* Test we get an exception when violating a check constraint - start time after finish.
* @throws Exception
*/
@Test
@Ignore("Check Constraints not yet implemented.")
public void testInsertBadRowIntoTableWithConstraint() throws Exception {
Connection connection = methodWatcher.createConnection();
connection.setAutoCommit(false);
Statement statement = connection.createStatement();
// insert bad row - start time after finished time
try {
statement.execute(
String.format("insert into %s.%s (TaskId, empId, StartedAt, FinishedAt) values (%d,%d,%d,%d)",
tableSchema.schemaName, TASK_TABLE_NAME, 1245, 101, 0700, 0600));
Assert.fail("Expected exception inserting check constraint violation.");
} catch (SQLException e) {
// expected
}
connection.commit();
}
} |
package org.jboss.hal.testsuite.test.configuration.management.ssl;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeoutException;
import org.apache.commons.io.FileUtils;
import org.jboss.arquillian.core.api.annotation.Inject;
import org.jboss.arquillian.graphene.page.Page;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.hal.resources.Ids;
import org.jboss.hal.testsuite.Console;
import org.jboss.hal.testsuite.Random;
import org.jboss.hal.testsuite.creaper.ManagementClientProvider;
import org.jboss.hal.testsuite.creaper.ResourceVerifier;
import org.jboss.hal.testsuite.fragment.FormFragment;
import org.jboss.hal.testsuite.fragment.ssl.EnableSslWizard;
import org.jboss.hal.testsuite.page.runtime.StandaloneServerPage;
import org.jboss.hal.testsuite.util.Library;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.wildfly.extras.creaper.commands.foundation.online.SnapshotBackup;
import org.wildfly.extras.creaper.core.CommandFailedException;
import org.wildfly.extras.creaper.core.online.ModelNodeResult;
import org.wildfly.extras.creaper.core.online.OnlineManagementClient;
import org.wildfly.extras.creaper.core.online.SslOptions;
import org.wildfly.extras.creaper.core.online.operations.Address;
import org.wildfly.extras.creaper.core.online.operations.Batch;
import org.wildfly.extras.creaper.core.online.operations.Operations;
import org.wildfly.extras.creaper.core.online.operations.Values;
import static org.jboss.hal.dmr.ModelDescriptionConstants.*;
import static org.jboss.hal.testsuite.test.configuration.elytron.ElytronFixtures.keyManagerAddress;
import static org.jboss.hal.testsuite.test.configuration.elytron.ElytronFixtures.keyStoreAddress;
import static org.jboss.hal.testsuite.test.configuration.elytron.ElytronFixtures.serverSslContextAddress;
import static org.jboss.hal.testsuite.test.configuration.elytron.ElytronFixtures.trustManagerAddress;
import static org.jboss.hal.testsuite.test.configuration.management.ssl.KeyEntryType.PRIVATE_KEY_ENTRY;
import static org.jboss.hal.testsuite.test.configuration.management.ssl.KeyEntryType.TRUSTED_CERTIFICATE_ENTRY;
import static org.jboss.hal.testsuite.test.configuration.management.ssl.SslFixtures.*;
@RunWith(Arquillian.class)
public class HttpManagementInterfaceTest {
private static final List<String> FILE_NAMES_TO_BE_DELETED = new ArrayList<>();
static final Address HTTP_INTERFACE_ADDRESS = Address.coreService(MANAGEMENT).and(MANAGEMENT_INTERFACE, HTTP_INTERFACE);
private static OnlineManagementClient client;
private static Operations ops;
private static SslOperations sslOps;
private static File keyStoresDirectory;
private SnapshotBackup snapshot = new SnapshotBackup();
@Inject
private Console console;
@Page
private StandaloneServerPage page;
@BeforeClass
public static void setUp() throws IOException {
changeClientToNonSSL();
keyStoresDirectory = sslOps.getKeyStoresDirectory();
}
@AfterClass
public static void cleanUp() throws IOException {
FILE_NAMES_TO_BE_DELETED.stream().forEach(fileName -> FileUtils.deleteQuietly(new File(keyStoresDirectory, fileName)));
client.close();
}
@Before
public void backup() throws CommandFailedException, IOException {
client.apply(snapshot.backup());
}
@After
public void restoreBackup() throws IOException, InterruptedException, TimeoutException, CommandFailedException {
client.apply(snapshot.restore());
}
/**
* Testing configuration of http management interface with
* <ul>
* <li>just one-way server authentication</li>
* <li>server certificate key store to be created</li>
* <li>server self-signed certificate to be generated</li>
* </ul>
*/
@Test
public void enableOneWayGenerated() throws Exception {
String
keyStoreNameValue = Ids.build(KEY_STORE, NAME, Random.name()),
keyStorePasswordValue = Ids.build(KEY_STORE, PASS, Random.name()),
keyStorePathValue = Ids.build(KEY_STORE, PATH, Random.name()),
keyAliasValue = Ids.build(KEY_ALIAS, Random.name()),
keyManagerValue = Ids.build(KEY_MANAGER, Random.name()),
serverSslContextValue = Ids.build(SERVER_SSL_CONTEXT, Random.name());
FILE_NAMES_TO_BE_DELETED.add(keyStorePathValue);
EnableSslWizard wizard = page.enableSslWizard()
.tryNextToConfigurationWithExpectError(YOU_NEED_TO_SELECT_AUTHENTICATION_AS_WELL_AS_KEY_STORE_MANIPULATION_STATEGY)
.disableMutualAuthentication()
.tryNextToConfigurationWithExpectError(YOU_NEED_TO_SELECT_KEY_STORE_MANIPULATION_STATEGY)
.createAllResources()
.nextConfiguration();
FormFragment configForm = wizard.getConfigurationForm();
configForm.editTextFiringExtraChangeEvent(KEY_DN_ORGANIZATION, HAL);
wizard.next();
// Required fields validation test
configForm
.expectError(KEY_STORE_NAME)
.expectError(KEY_STORE_PASSWORD)
.expectError(KEY_STORE_PATH)
.expectError(KEY_ALIAS)
.expectError(KEY_MANAGER)
.expectError(SERVER_SSL_CONTEXT);
configForm
.editTextFiringExtraChangeEvent(KEY_STORE_NAME, keyStoreNameValue)
.editTextFiringExtraChangeEvent(KEY_STORE_PASSWORD, keyStorePasswordValue)
.editTextFiringExtraChangeEvent(KEY_STORE_PATH, keyStorePathValue)
.editTextFiringExtraChangeEvent(KEY_ALIAS, keyAliasValue)
.editTextFiringExtraChangeEvent(KEY_MANAGER, keyManagerValue)
.editTextFiringExtraChangeEvent(SERVER_SSL_CONTEXT, serverSslContextValue);
try {
wizard
.nextReview()
.finishStayOpen()
.verifySuccess()
.close();
// reloadOneWaySecuredManagementWithClientUpdate(keyStoreNameValue, keyAliasValue);
Address keyStoreAddress = keyStoreAddress(keyStoreNameValue);
new ResourceVerifier(keyStoreAddress, client).verifyExists()
.verifyAttribute(CREDENTIAL_REFERENCE_CLEAR_TEXT_ATTR, keyStorePasswordValue)
.verifyAttribute(PATH, keyStorePathValue);
new ResourceVerifier(keyManagerAddress(keyManagerValue), client).verifyExists()
.verifyAttribute(KEY_STORE, keyStoreNameValue)
.verifyAttribute(CREDENTIAL_REFERENCE_CLEAR_TEXT_ATTR, keyStorePasswordValue);
new ResourceVerifier(serverSslContextAddress(serverSslContextValue), client).verifyExists()
.verifyAttribute(KEY_MANAGER, keyManagerValue);
new ResourceVerifier(HTTP_INTERFACE_ADDRESS, client)
.verifyAttribute(SSL_CONTEXT, serverSslContextValue)
.verifyAttribute(SECURE_SOCKET_BINDING, MANAGEMENT_HTTPS);
// Alias validation
ModelNodeResult aliasResult = ops.invoke(READ_ALIAS, keyStoreAddress, Values.of(ALIAS, keyAliasValue));
aliasResult.assertSuccess();
sslOps.assertHALisDNOrganizationUnitOf(ISSUER, aliasResult, PRIVATE_KEY_ENTRY)
.assertHALisDNOrganizationUnitOf(SUBJECT, aliasResult, PRIVATE_KEY_ENTRY);
} finally {
// disableHttpInterfaceSsl();
}
}
/**
* Testing configuration of http management interface with
* <ul>
* <li>just one-way server authentication</li>
* <li>server key store to be created</li>
* <li>existing server certificate to be provided by user</li>
* </ul>
*/
@Test
public void enableOneWayForExistingCertificate() throws Exception {
String
keyStoreNameValue = Ids.build(KEY_STORE, NAME, Random.name()),
keyStorePasswordValue = Ids.build(KEY_STORE, PASS, Random.name()),
keyStorePathValue = Ids.build(KEY_STORE, PATH, Random.name()),
keyAliasValue = Ids.build(KEY_ALIAS, Random.name()),
keyManagerValue = Ids.build(KEY_MANAGER, Random.name()),
serverSslContextValue = Ids.build(SERVER_SSL_CONTEXT, Random.name());
sslOps.createKeyStoreWithCertificate(Random.name(), keyStorePathValue, keyStorePasswordValue, keyAliasValue); // we need just cert store
EnableSslWizard wizard = page.enableSslWizard()
.disableMutualAuthentication()
.createKeyStore()
.nextConfiguration();
FormFragment configForm = wizard.getConfigurationForm();
wizard.next();
// Required fields validation test
configForm
.expectError(KEY_STORE_NAME)
.expectError(KEY_STORE_PASSWORD)
.expectError(KEY_STORE_PATH)
.expectError(KEY_MANAGER)
.expectError(SERVER_SSL_CONTEXT);
configForm
.editTextFiringExtraChangeEvent(KEY_STORE_NAME, keyStoreNameValue)
.editTextFiringExtraChangeEvent(KEY_STORE_PASSWORD, keyStorePasswordValue)
.editTextFiringExtraChangeEvent(KEY_STORE_PATH, keyStorePathValue)
.editTextFiringExtraChangeEvent(KEY_MANAGER, keyManagerValue)
.editTextFiringExtraChangeEvent(SERVER_SSL_CONTEXT, serverSslContextValue);
try {
wizard
.nextReview()
.finishStayOpen()
.verifySuccess()
.close();
// reloadOneWaySecuredManagementWithClientUpdate(keyStoreNameValue, keyAliasValue);
Address keyStoreAddress = keyStoreAddress(keyStoreNameValue);
new ResourceVerifier(keyStoreAddress, client).verifyExists()
.verifyAttribute(CREDENTIAL_REFERENCE_CLEAR_TEXT_ATTR, keyStorePasswordValue)
.verifyAttribute(PATH, keyStorePathValue);
new ResourceVerifier(keyManagerAddress(keyManagerValue), client)
.verifyExists()
.verifyAttribute(KEY_STORE, keyStoreNameValue)
.verifyAttribute(CREDENTIAL_REFERENCE_CLEAR_TEXT_ATTR, keyStorePasswordValue);
new ResourceVerifier(serverSslContextAddress(serverSslContextValue), client)
.verifyExists()
.verifyAttribute(KEY_MANAGER, keyManagerValue);
new ResourceVerifier(HTTP_INTERFACE_ADDRESS, client)
.verifyAttribute(SSL_CONTEXT, serverSslContextValue)
.verifyAttribute(SECURE_SOCKET_BINDING, MANAGEMENT_HTTPS);
} finally {
// disableHttpInterfaceSsl();
}
}
/**
* Testing configuration of http management interface with
* <ul>
* <li>just one-way server authentication</li>
* <li>existing server key store with certificate to be provided by user</li>
* </ul>
*/
@Test
public void enableOneWayForExistingKeyStoreAndCerificate() throws Exception {
String
keyStoreNameValue = Ids.build(KEY_STORE, NAME, Random.name()),
keyStorePasswordValue = Ids.build(KEY_STORE, PASS, Random.name()),
keyAliasValue = Ids.build(KEY_ALIAS, Random.name()),
keyManagerValue = Ids.build(KEY_MANAGER, Random.name()),
serverSslContextValue = Ids.build(SERVER_SSL_CONTEXT, Random.name());
sslOps.createKeyStoreWithCertificate(keyStoreNameValue, Random.name(), keyStorePasswordValue, keyAliasValue); // we need just cert store
EnableSslWizard wizard = page.enableSslWizard()
.disableMutualAuthentication()
.reuseKeyStore()
.nextConfiguration();
FormFragment configForm = wizard.getConfigurationForm();
wizard.next();
// Required fields validation test
configForm
.expectError(KEY_STORE)
.expectError(KEY_STORE_PASSWORD)
.expectError(KEY_MANAGER)
.expectError(SERVER_SSL_CONTEXT);
configForm
.editTextFiringExtraChangeEvent(KEY_STORE, keyStoreNameValue)
.editTextFiringExtraChangeEvent(KEY_STORE_PASSWORD, keyStorePasswordValue)
.editTextFiringExtraChangeEvent(KEY_MANAGER, keyManagerValue)
.editTextFiringExtraChangeEvent(SERVER_SSL_CONTEXT, serverSslContextValue);
try {
wizard
.nextReview()
.finishStayOpen()
.verifySuccess()
.close();
// reloadOneWaySecuredManagementWithClientUpdate(keyStoreNameValue, keyAliasValue);
Address keyStoreAddress = keyStoreAddress(keyStoreNameValue);
new ResourceVerifier(keyStoreAddress, client).verifyExists()
.verifyAttribute(CREDENTIAL_REFERENCE_CLEAR_TEXT_ATTR, keyStorePasswordValue);
new ResourceVerifier(keyManagerAddress(keyManagerValue), client)
.verifyExists()
.verifyAttribute(KEY_STORE, keyStoreNameValue)
.verifyAttribute(CREDENTIAL_REFERENCE_CLEAR_TEXT_ATTR, keyStorePasswordValue);
new ResourceVerifier(serverSslContextAddress(serverSslContextValue), client)
.verifyExists()
.verifyAttribute(KEY_MANAGER, keyManagerValue);
new ResourceVerifier(HTTP_INTERFACE_ADDRESS, client)
.verifyAttribute(SSL_CONTEXT, serverSslContextValue)
.verifyAttribute(SECURE_SOCKET_BINDING, MANAGEMENT_HTTPS);
} finally {
// disableHttpInterfaceSsl();
}
}
/**
* Testing configuration of http management interface with
* <ul>
* <li>mutual authentication with client trust-store to be created and existing client certificate to be provided by user</li>
* <li>server key store to be created</li>
* <li>server self-signed certificate to be generated</li>
* </ul>
*/
@Test
public void enableMutualGenerated() throws Exception {
String
keyStoreNameValue = Ids.build(KEY_STORE, NAME, Random.name()),
keyStorePasswordValue = Ids.build(KEY_STORE, PASS, Random.name()),
keyStorePathValue = Ids.build(KEY_STORE, PATH, Random.name()),
keyAliasValue = Ids.build(KEY_ALIAS, Random.name()),
keyManagerValue = Ids.build(KEY_MANAGER, Random.name()),
trustManagerValue = Ids.build(TRUST_MANAGER, Random.name()),
serverSslContextValue = Ids.build(SERVER_SSL_CONTEXT, Random.name()),
clientCertificateAliasValue = Ids.build(CLIENT_CERTIFICATE_ALIAS, Random.name()),
clientCertificatePathValue = Ids.build(CLIENT_CERTIFICATE_PATH, '.', Random.name(), CERT),
clientKeyStorePathValue = Ids.build(CLIENT, KEY_STORE_PATH, Random.name()),
clientKeyStorePasswordValue = Ids.build(CLIENT, KEY_STORE, PASS, Random.name()),
trustStoreNameValue = Ids.build(TRUST_STORE, Random.name()),
trustStorePasswordValue = Ids.build(TRUST_STORE_PASSWORD, Random.name()),
trustStorePathValue = Ids.build(TRUST_STORE_PATH, Random.name());
FILE_NAMES_TO_BE_DELETED.add(keyStorePathValue);
FILE_NAMES_TO_BE_DELETED.add(trustStorePathValue);
Address clientKeyStoreAddress = sslOps.createKeyStoreWithCertificate(Random.name(), clientKeyStorePathValue,
clientKeyStorePasswordValue, clientCertificateAliasValue);
// Creating client certificate which needs to be provided by user
sslOps.createCertificate(clientCertificatePathValue, clientCertificateAliasValue, clientKeyStoreAddress);
EnableSslWizard wizard = page.enableSslWizard()
.tryNextToConfigurationWithExpectError(YOU_NEED_TO_SELECT_AUTHENTICATION_AS_WELL_AS_KEY_STORE_MANIPULATION_STATEGY)
.enableMutualAuthentication()
.tryNextToConfigurationWithExpectError(YOU_NEED_TO_SELECT_KEY_STORE_MANIPULATION_STATEGY)
.createAllResources()
.nextConfiguration();
FormFragment configForm = wizard.getConfigurationForm();
configForm.editTextFiringExtraChangeEvent(KEY_DN_ORGANIZATION, HAL);
wizard.next();
// Required fields validation test
configForm
.expectError(KEY_STORE_NAME)
.expectError(KEY_STORE_PASSWORD)
.expectError(KEY_STORE_PATH)
.expectError(KEY_ALIAS)
.expectError(KEY_MANAGER)
.expectError(TRUST_MANAGER)
.expectError(SERVER_SSL_CONTEXT)
.expectError(CLIENT_CERTIFICATE_ALIAS)
.expectError(CLIENT_CERTIFICATE_PATH)
.expectError(TRUST_STORE_NAME)
.expectError(TRUST_STORE_PASSWORD)
.expectError(TRUST_STORE_PATH);
configForm
.editTextFiringExtraChangeEvent(KEY_STORE_NAME, keyStoreNameValue)
.editTextFiringExtraChangeEvent(KEY_STORE_PASSWORD, keyStorePasswordValue)
.editTextFiringExtraChangeEvent(KEY_STORE_PATH, keyStorePathValue)
.editTextFiringExtraChangeEvent(KEY_ALIAS, keyAliasValue)
.editTextFiringExtraChangeEvent(KEY_MANAGER, keyManagerValue)
.editTextFiringExtraChangeEvent(TRUST_MANAGER, trustManagerValue)
.editTextFiringExtraChangeEvent(SERVER_SSL_CONTEXT, serverSslContextValue)
.editTextFiringExtraChangeEvent(CLIENT_CERTIFICATE_ALIAS, clientCertificateAliasValue)
.editTextFiringExtraChangeEvent(CLIENT_CERTIFICATE_PATH, clientCertificatePathValue)
.flip(CLIENT_CERTIFICATE_VALIDATE, false)
.editTextFiringExtraChangeEvent(TRUST_STORE_NAME, trustStoreNameValue)
.editTextFiringExtraChangeEvent(TRUST_STORE_PASSWORD, trustStorePasswordValue)
.editTextFiringExtraChangeEvent(TRUST_STORE_PATH, trustStorePathValue);
try {
wizard
.nextReview()
.finishStayOpen()
.verifySuccess()
.close();
// reloadMutualSecuredManagementWithClientUpdate(keyStoreNameValue, keyAliasValue, clientCertificateAliasValue,
// clientKeyStorePathValue, clientKeyStorePasswordValue);
Address keyStoreAddress = keyStoreAddress(keyStoreNameValue);
new ResourceVerifier(keyStoreAddress, client).verifyExists()
.verifyAttribute(CREDENTIAL_REFERENCE_CLEAR_TEXT_ATTR, keyStorePasswordValue)
.verifyAttribute(PATH, keyStorePathValue);
Address trustStoreAddress = keyStoreAddress(trustStoreNameValue);
new ResourceVerifier(trustStoreAddress, client).verifyExists()
.verifyAttribute(CREDENTIAL_REFERENCE_CLEAR_TEXT_ATTR, trustStorePasswordValue)
.verifyAttribute(PATH, trustStorePathValue);
new ResourceVerifier(trustManagerAddress(trustManagerValue), client).verifyExists()
.verifyAttribute(KEY_STORE, trustStoreNameValue);
new ResourceVerifier(keyManagerAddress(keyManagerValue), client).verifyExists()
.verifyAttribute(KEY_STORE, keyStoreNameValue)
.verifyAttribute(CREDENTIAL_REFERENCE_CLEAR_TEXT_ATTR, keyStorePasswordValue);
new ResourceVerifier(serverSslContextAddress(serverSslContextValue), client).verifyExists()
.verifyAttribute(KEY_MANAGER, keyManagerValue).verifyAttribute(TRUST_MANAGER, trustManagerValue);
new ResourceVerifier(HTTP_INTERFACE_ADDRESS, client).verifyAttribute(SSL_CONTEXT, serverSslContextValue)
.verifyAttribute(SECURE_SOCKET_BINDING, MANAGEMENT_HTTPS);
// Alias validation
ModelNodeResult serverAliasResult = ops.invoke(READ_ALIAS, keyStoreAddress,
Values.of(ALIAS, keyAliasValue));
serverAliasResult.assertSuccess();
sslOps.assertHALisDNOrganizationUnitOf(ISSUER, serverAliasResult, PRIVATE_KEY_ENTRY)
.assertHALisDNOrganizationUnitOf(SUBJECT, serverAliasResult, PRIVATE_KEY_ENTRY);
ModelNodeResult clientAliasResult = ops.invoke(READ_ALIAS, trustStoreAddress,
Values.of(ALIAS, clientCertificateAliasValue));
clientAliasResult.assertSuccess();
sslOps.assertHALisDNOrganizationUnitOf(ISSUER, clientAliasResult, TRUSTED_CERTIFICATE_ENTRY)
.assertHALisDNOrganizationUnitOf(SUBJECT, clientAliasResult, TRUSTED_CERTIFICATE_ENTRY);
} finally {
// disableHttpInterfaceSsl();
}
}
/**
* Testing configuration of http management interface with
* <ul>
* <li>mutual authentication with client trust-store to be created and existing client certificate to be provided by user</li>
* <li>server key store to be created</li>
* <li>existing server certificate to be provided by user</li>
* </ul>
*/
@Test
public void enableMutualForExistingServerCertificate() throws Exception {
String
keyStoreNameValue = Ids.build(KEY_STORE, NAME, Random.name()),
keyStorePasswordValue = Ids.build(KEY_STORE, PASS, Random.name()),
keyStorePathValue = Ids.build(KEY_STORE, PATH, Random.name()),
keyAliasValue = Ids.build(KEY_ALIAS, Random.name()),
keyManagerValue = Ids.build(KEY_MANAGER, Random.name()),
trustManagerValue = Ids.build(TRUST_MANAGER, Random.name()),
serverSslContextValue = Ids.build(SERVER_SSL_CONTEXT, Random.name()),
clientCertificateAliasValue = Ids.build(CLIENT_CERTIFICATE_ALIAS, Random.name()),
clientCertificatePathValue = Ids.build(CLIENT_CERTIFICATE_PATH, '.', Random.name(), CERT),
clientKeyStorePathValue = Ids.build(CLIENT, KEY_STORE_PATH, Random.name()),
clientKeyStorePasswordValue = Ids.build(CLIENT, KEY_STORE, PASS, Random.name()),
trustStoreNameValue = Ids.build(TRUST_STORE, Random.name()),
trustStorePasswordValue = Ids.build(TRUST_STORE_PASSWORD, Random.name()),
trustStorePathValue = Ids.build(TRUST_STORE_PATH, Random.name());
FILE_NAMES_TO_BE_DELETED.add(trustStorePathValue);
sslOps.createKeyStoreWithCertificate(Random.name(), keyStorePathValue, keyStorePasswordValue, keyAliasValue); // we need just cert store
Address clientKeyStoreAddress = sslOps.createKeyStoreWithCertificate(Random.name(), clientKeyStorePathValue,
clientKeyStorePasswordValue, clientCertificateAliasValue);
// Creating client certificate which needs to be provided by user
sslOps.createCertificate(clientCertificatePathValue, clientCertificateAliasValue, clientKeyStoreAddress);
EnableSslWizard wizard = page.enableSslWizard()
.tryNextToConfigurationWithExpectError(YOU_NEED_TO_SELECT_AUTHENTICATION_AS_WELL_AS_KEY_STORE_MANIPULATION_STATEGY)
.enableMutualAuthentication()
.tryNextToConfigurationWithExpectError(YOU_NEED_TO_SELECT_KEY_STORE_MANIPULATION_STATEGY)
.createKeyStore()
.nextConfiguration();
FormFragment configForm = wizard.getConfigurationForm();
wizard.next();
// Required fields validation test
configForm
.expectError(KEY_STORE_NAME)
.expectError(KEY_STORE_PASSWORD)
.expectError(KEY_STORE_PATH)
.expectError(KEY_MANAGER)
.expectError(TRUST_MANAGER)
.expectError(SERVER_SSL_CONTEXT)
.expectError(CLIENT_CERTIFICATE_ALIAS)
.expectError(CLIENT_CERTIFICATE_PATH)
.expectError(TRUST_STORE_NAME)
.expectError(TRUST_STORE_PASSWORD)
.expectError(TRUST_STORE_PATH);
configForm
.editTextFiringExtraChangeEvent(KEY_STORE_NAME, keyStoreNameValue)
.editTextFiringExtraChangeEvent(KEY_STORE_PASSWORD, keyStorePasswordValue)
.editTextFiringExtraChangeEvent(KEY_STORE_PATH, keyStorePathValue)
.editTextFiringExtraChangeEvent(KEY_MANAGER, keyManagerValue)
.editTextFiringExtraChangeEvent(TRUST_MANAGER, trustManagerValue)
.editTextFiringExtraChangeEvent(SERVER_SSL_CONTEXT, serverSslContextValue)
.editTextFiringExtraChangeEvent(CLIENT_CERTIFICATE_ALIAS, clientCertificateAliasValue)
.editTextFiringExtraChangeEvent(CLIENT_CERTIFICATE_PATH, clientCertificatePathValue)
.flip(CLIENT_CERTIFICATE_VALIDATE, false)
.editTextFiringExtraChangeEvent(TRUST_STORE_NAME, trustStoreNameValue)
.editTextFiringExtraChangeEvent(TRUST_STORE_PASSWORD, trustStorePasswordValue)
.editTextFiringExtraChangeEvent(TRUST_STORE_PATH, trustStorePathValue);
try {
wizard
.nextReview()
.finishStayOpen()
.verifySuccess()
.close();
// reloadMutualSecuredManagementWithClientUpdate(keyStoreNameValue, keyAliasValue, clientCertificateAliasValue,
// clientKeyStorePathValue, clientKeyStorePasswordValue);
Address keyStoreAddress = keyStoreAddress(keyStoreNameValue);
new ResourceVerifier(keyStoreAddress, client).verifyExists()
.verifyAttribute(CREDENTIAL_REFERENCE_CLEAR_TEXT_ATTR, keyStorePasswordValue)
.verifyAttribute(PATH, keyStorePathValue);
Address trustStoreAddress = keyStoreAddress(trustStoreNameValue);
new ResourceVerifier(trustStoreAddress, client).verifyExists()
.verifyAttribute(CREDENTIAL_REFERENCE_CLEAR_TEXT_ATTR, trustStorePasswordValue)
.verifyAttribute(PATH, trustStorePathValue);
new ResourceVerifier(trustManagerAddress(trustManagerValue), client)
.verifyExists()
.verifyAttribute(KEY_STORE, trustStoreNameValue);
new ResourceVerifier(keyManagerAddress(keyManagerValue), client)
.verifyExists()
.verifyAttribute(KEY_STORE, keyStoreNameValue)
.verifyAttribute(CREDENTIAL_REFERENCE_CLEAR_TEXT_ATTR, keyStorePasswordValue);
new ResourceVerifier(serverSslContextAddress(serverSslContextValue), client)
.verifyExists()
.verifyAttribute(KEY_MANAGER, keyManagerValue)
.verifyAttribute(TRUST_MANAGER, trustManagerValue);
new ResourceVerifier(HTTP_INTERFACE_ADDRESS, client)
.verifyAttribute(SSL_CONTEXT, serverSslContextValue)
.verifyAttribute(SECURE_SOCKET_BINDING, MANAGEMENT_HTTPS);
// Alias validation
ModelNodeResult serverAliasResult = ops.invoke(READ_ALIAS, keyStoreAddress,
Values.of(ALIAS, keyAliasValue));
serverAliasResult.assertSuccess();
sslOps.assertHALisDNOrganizationUnitOf(ISSUER, serverAliasResult, PRIVATE_KEY_ENTRY)
.assertHALisDNOrganizationUnitOf(SUBJECT, serverAliasResult, PRIVATE_KEY_ENTRY);
ModelNodeResult clientAliasResult = ops.invoke(READ_ALIAS, trustStoreAddress,
Values.of(ALIAS, clientCertificateAliasValue));
clientAliasResult.assertSuccess();
sslOps.assertHALisDNOrganizationUnitOf(ISSUER, clientAliasResult, TRUSTED_CERTIFICATE_ENTRY)
.assertHALisDNOrganizationUnitOf(SUBJECT, clientAliasResult, TRUSTED_CERTIFICATE_ENTRY);
} finally {
// disableHttpInterfaceSsl();
}
}
/**
* Testing configuration of http management interface with
* <ul>
* <li>mutual authentication with client trust-store to be created and client certificate to be provided by user</li>
* <li>existing server key store with existing certificate to be provided by user</li>
* </ul>
*/
@Test
public void enableMutualForExistingServerKeyStore() throws Exception {
String
keyStoreNameValue = Ids.build(KEY_STORE, NAME, Random.name()),
keyStorePasswordValue = Ids.build(KEY_STORE, PASS, Random.name()),
keyAliasValue = Ids.build(KEY_ALIAS, Random.name()),
keyManagerValue = Ids.build(KEY_MANAGER, Random.name()),
trustManagerValue = Ids.build(TRUST_MANAGER, Random.name()),
serverSslContextValue = Ids.build(SERVER_SSL_CONTEXT, Random.name()),
clientCertificateAliasValue = Ids.build(CLIENT_CERTIFICATE_ALIAS, Random.name()),
clientCertificatePathValue = Ids.build(CLIENT_CERTIFICATE_PATH, '.', Random.name(), CERT),
clientKeyStorePathValue = Ids.build(CLIENT, KEY_STORE_PATH, Random.name()),
clientKeyStorePasswordValue = Ids.build(CLIENT, KEY_STORE, PASS, Random.name()),
trustStoreNameValue = Ids.build(TRUST_STORE, Random.name()),
trustStorePasswordValue = Ids.build(TRUST_STORE_PASSWORD, Random.name()),
trustStorePathValue = Ids.build(TRUST_STORE_PATH, Random.name());
FILE_NAMES_TO_BE_DELETED.add(trustStorePathValue);
sslOps.createKeyStoreWithCertificate(keyStoreNameValue, Random.name(), keyStorePasswordValue, keyAliasValue); // we need just cert store
Address clientKeyStoreAddress = sslOps.createKeyStoreWithCertificate(Random.name(), clientKeyStorePathValue,
clientKeyStorePasswordValue, clientCertificateAliasValue);
// Creating client certificate which needs to be provided by user
sslOps.createCertificate(clientCertificatePathValue, clientCertificateAliasValue, clientKeyStoreAddress);
EnableSslWizard wizard = page.enableSslWizard()
.tryNextToConfigurationWithExpectError(YOU_NEED_TO_SELECT_AUTHENTICATION_AS_WELL_AS_KEY_STORE_MANIPULATION_STATEGY)
.enableMutualAuthentication()
.tryNextToConfigurationWithExpectError(YOU_NEED_TO_SELECT_KEY_STORE_MANIPULATION_STATEGY)
.reuseKeyStore()
.nextConfiguration();
FormFragment configForm = wizard.getConfigurationForm();
wizard.next();
// Required fields validation test
configForm
.expectError(KEY_STORE)
.expectError(KEY_STORE_PASSWORD)
.expectError(KEY_MANAGER)
.expectError(TRUST_MANAGER)
.expectError(SERVER_SSL_CONTEXT)
.expectError(CLIENT_CERTIFICATE_ALIAS)
.expectError(CLIENT_CERTIFICATE_PATH)
.expectError(TRUST_STORE_NAME)
.expectError(TRUST_STORE_PASSWORD)
.expectError(TRUST_STORE_PATH);
configForm
.editTextFiringExtraChangeEvent(KEY_STORE, keyStoreNameValue)
.editTextFiringExtraChangeEvent(KEY_STORE_PASSWORD, keyStorePasswordValue)
.editTextFiringExtraChangeEvent(KEY_MANAGER, keyManagerValue)
.editTextFiringExtraChangeEvent(TRUST_MANAGER, trustManagerValue)
.editTextFiringExtraChangeEvent(SERVER_SSL_CONTEXT, serverSslContextValue)
.editTextFiringExtraChangeEvent(CLIENT_CERTIFICATE_ALIAS, clientCertificateAliasValue)
.editTextFiringExtraChangeEvent(CLIENT_CERTIFICATE_PATH, clientCertificatePathValue)
.flip(CLIENT_CERTIFICATE_VALIDATE, false)
.editTextFiringExtraChangeEvent(TRUST_STORE_NAME, trustStoreNameValue)
.editTextFiringExtraChangeEvent(TRUST_STORE_PASSWORD, trustStorePasswordValue)
.editTextFiringExtraChangeEvent(TRUST_STORE_PATH, trustStorePathValue);
try {
wizard
.nextReview()
.finishStayOpen()
.verifySuccess()
.close();
// reloadMutualSecuredManagementWithClientUpdate(keyStoreNameValue, keyAliasValue, clientCertificateAliasValue,
// clientKeyStorePathValue, clientKeyStorePasswordValue);
Address keyStoreAddress = keyStoreAddress(keyStoreNameValue);
new ResourceVerifier(keyStoreAddress, client)
.verifyExists()
.verifyAttribute(CREDENTIAL_REFERENCE_CLEAR_TEXT_ATTR, keyStorePasswordValue);
Address trustStoreAddress = keyStoreAddress(trustStoreNameValue);
new ResourceVerifier(trustStoreAddress, client)
.verifyExists()
.verifyAttribute(CREDENTIAL_REFERENCE_CLEAR_TEXT_ATTR, trustStorePasswordValue)
.verifyAttribute(PATH, trustStorePathValue);
new ResourceVerifier(trustManagerAddress(trustManagerValue), client)
.verifyExists()
.verifyAttribute(KEY_STORE, trustStoreNameValue);
new ResourceVerifier(keyManagerAddress(keyManagerValue), client)
.verifyExists()
.verifyAttribute(KEY_STORE, keyStoreNameValue)
.verifyAttribute(CREDENTIAL_REFERENCE_CLEAR_TEXT_ATTR, keyStorePasswordValue);
new ResourceVerifier(serverSslContextAddress(serverSslContextValue), client)
.verifyExists()
.verifyAttribute(KEY_MANAGER, keyManagerValue)
.verifyAttribute(TRUST_MANAGER, trustManagerValue);
new ResourceVerifier(HTTP_INTERFACE_ADDRESS, client)
.verifyAttribute(SSL_CONTEXT, serverSslContextValue)
.verifyAttribute(SECURE_SOCKET_BINDING, MANAGEMENT_HTTPS);
// Alias validation
ModelNodeResult serverAliasResult = ops.invoke(READ_ALIAS, keyStoreAddress,
Values.of(ALIAS, keyAliasValue));
serverAliasResult.assertSuccess();
sslOps.assertHALisDNOrganizationUnitOf(ISSUER, serverAliasResult, PRIVATE_KEY_ENTRY)
.assertHALisDNOrganizationUnitOf(SUBJECT, serverAliasResult, PRIVATE_KEY_ENTRY);
ModelNodeResult clientAliasResult = ops.invoke(READ_ALIAS, trustStoreAddress,
Values.of(ALIAS, clientCertificateAliasValue));
clientAliasResult.assertSuccess();
sslOps.assertHALisDNOrganizationUnitOf(ISSUER, clientAliasResult, TRUSTED_CERTIFICATE_ENTRY)
.assertHALisDNOrganizationUnitOf(SUBJECT, clientAliasResult, TRUSTED_CERTIFICATE_ENTRY);
} finally {
// disableHttpInterfaceSsl();
}
}
/**
* Testing the ability to disable SSL for http management interface
*/
@Test
public void disableSSL() throws Exception {
String
keyStoreNameValue = Ids.build(KEY_STORE, NAME, Random.name()),
keyStorePasswordValue = Ids.build(KEY_STORE, PASS, Random.name()),
keyManagerValue = Ids.build(KEY_MANAGER, Random.name()),
serverSslContextValue = Ids.build(SERVER_SSL_CONTEXT, Random.name());
sslOps.createKeyStoreWithCertificate(keyStoreNameValue, Random.name(), keyStorePasswordValue, Random.name());
ops.add(keyManagerAddress(keyManagerValue), Values.of(KEY_STORE, keyStoreNameValue)
.andObject(CREDENTIAL_REFERENCE, Values.of(CLEAR_TEXT, keyStorePasswordValue))).assertSuccess();
ops.add(serverSslContextAddress(serverSslContextValue), Values.of(KEY_MANAGER, keyManagerValue)).assertSuccess();
ops.batch(new Batch()
.writeAttribute(HTTP_INTERFACE_ADDRESS, SSL_CONTEXT, serverSslContextValue)
.writeAttribute(HTTP_INTERFACE_ADDRESS, SECURE_SOCKET_BINDING, MANAGEMENT_HTTPS))
.assertSuccess();
ResourceVerifier httpInterfaceVerifier = new ResourceVerifier(HTTP_INTERFACE_ADDRESS, client)
.verifyExists()
.verifyAttribute(SECURE_SOCKET_BINDING, MANAGEMENT_HTTPS)
.verifyAttribute(SSL_CONTEXT, serverSslContextValue);
page.navigateToHttpManagementPage()
.disableSslWithReload();
sslOps.waitUntilServerIsRunning();
httpInterfaceVerifier
.verifyAttributeIsUndefined(SECURE_SOCKET_BINDING)
.verifyAttributeIsUndefined(SECURITY_REALM);
}
private void reloadOneWaySecuredManagementWithClientUpdate(String serverKeyStoreNameValue, String serverKeyAliasValue)
throws IOException, InterruptedException, TimeoutException {
SslOptionsCreator clientSslOptionsCreator = (creaperTrustStorePath, creaperTrustStorePassword) -> {
return new SslOptions.Builder()
.trustStore(new File(keyStoresDirectory, creaperTrustStorePath))
.trustStorePassword(creaperTrustStorePassword)
.build();
};
reloadWithClientUpdate(serverKeyStoreNameValue, serverKeyAliasValue, clientSslOptionsCreator);
}
private void reloadMutualSecuredManagementWithClientUpdate(String serverKeyStoreNameValue, String serverKeyAliasValue,
String clientCertificateAliasValue, String clientKeyStorePathValue, String clientKeyStorePasswordValue)
throws IOException, InterruptedException, TimeoutException {
SslOptionsCreator clientSslOptionsCreator = (creaperTrustStorePath, creaperTrustStorePassword) -> {
return new SslOptions.Builder()
.keyStore(new File(keyStoresDirectory, clientKeyStorePathValue))
.keyStorePassword(clientKeyStorePasswordValue)
.key(clientCertificateAliasValue, clientKeyStorePasswordValue)
.trustStore(new File(keyStoresDirectory, creaperTrustStorePath))
.trustStorePassword(creaperTrustStorePassword)
.build();
};
reloadWithClientUpdate(serverKeyStoreNameValue, serverKeyAliasValue, clientSslOptionsCreator);
}
private void reloadWithClientUpdate(String serverKeyStoreNameValue, String serverKeyAliasValue, SslOptionsCreator sslOptionsCreator)
throws IOException, InterruptedException, TimeoutException {
String
serverCertificatePathValue = Ids.build(SERVER_CERTIFICATE_PATH, '.', Random.name(), CERT),
creaperTrustStoreName = Ids.build(TRUST_STORE, Random.name()),
creaperTrustStorePath = Ids.build(TRUST_STORE_PATH, Random.name()),
creaperTrustStorePassword = Ids.build(TRUST_STORE_PASSWORD, Random.name());
FILE_NAMES_TO_BE_DELETED.add(serverCertificatePathValue);
ops.invoke(EXPORT_CERTIFICATE, keyStoreAddress(serverKeyStoreNameValue),
Values.of(ALIAS, serverKeyAliasValue).and(PATH, serverCertificatePathValue)).assertSuccess();
Address creaperTrustStoreAddress = sslOps.createKeyStore(creaperTrustStoreName, creaperTrustStorePath,
creaperTrustStorePassword);
ops.invoke(IMPORT_CERTIFICATE, creaperTrustStoreAddress,
Values.of(ALIAS, serverKeyAliasValue).and(PATH, serverCertificatePathValue).and(VALIDATE, false)
.and(TRUST_CACERTS, false)
.andObject(CREDENTIAL_REFERENCE, Values.of(CLEAR_TEXT, creaperTrustStorePassword)));
ops.invoke(STORE, creaperTrustStoreAddress);
Library.letsSleep(200);
ops.invoke(RELOAD, Address.root());
changeClient(sslOptionsCreator.create(creaperTrustStorePath, creaperTrustStorePassword));
sslOps.waitUntilServerIsRunning();
}
private void disableHttpInterfaceSsl() throws Exception {
ops.undefineAttribute(HTTP_INTERFACE_ADDRESS, SECURE_SOCKET_BINDING).assertSuccess();
ops.undefineAttribute(HTTP_INTERFACE_ADDRESS, SSL_CONTEXT).assertSuccess();
new ResourceVerifier(HTTP_INTERFACE_ADDRESS, client)
.verifyAttributeIsUndefined(SECURE_SOCKET_BINDING)
.verifyAttributeIsUndefined(SSL_CONTEXT);
ops.invoke(RELOAD, Address.root());
changeClientToNonSSL();
sslOps.waitUntilServerIsRunning();
}
private static void changeClient(SslOptions sslOptions) throws IOException {
if (client != null) {
client.close();
}
client = SslOperations.createSslClient(sslOptions);
ops = new Operations(client);
sslOps = new SslOperations(client).filesToBeCleanedUp(FILE_NAMES_TO_BE_DELETED);
}
private static void changeClientToNonSSL() throws IOException {
if (client != null) {
client.close();
}
client = ManagementClientProvider.createOnlineManagementClient();
ops = new Operations(client);
sslOps = new SslOperations(client).filesToBeCleanedUp(FILE_NAMES_TO_BE_DELETED);
}
@FunctionalInterface
private static interface SslOptionsCreator {
SslOptions create(String creaperTrustStorePath, String creaperTrustStorePassword);
}
} |
package org.mockitousage;
import static org.mockito.Mockito.*;
import java.util.LinkedList;
import java.util.List;
import org.junit.Test;
import org.mockito.InOrder;
import org.mockito.Mockito;
import org.mockito.exceptions.verification.TooLittleActualInvocations;
import org.mockito.exceptions.verification.VerifcationInOrderFailure;
import org.mockitoutil.TestBase;
@SuppressWarnings("unchecked")
public class MockingRealObjectsTest extends TestBase {
List list = new LinkedList();
List spy = Mockito.spy(list);
@Test
public void shouldVerify() {
spy.add("one");
spy.add("two");
assertEquals("one", spy.get(0));
assertEquals("two", spy.get(1));
verify(spy).add("one");
verify(spy).add("two");
}
@Test
public void shouldStub() {
spy.add("one");
stub(spy.get(0))
.toReturn("1")
.toReturn("1 again");
assertEquals("1", spy.get(0));
assertEquals("1 again", spy.get(0));
assertEquals("one", spy.iterator().next());
assertEquals(1, spy.size());
}
@SuppressWarnings("deprecation")
@Test
public void shouldStubVoid() {
stubVoid(spy)
.toReturn()
.toThrow(new RuntimeException())
.on().clear();
spy.add("one");
spy.clear();
try {
spy.clear();
fail();
} catch (RuntimeException e) {}
assertEquals(1, spy.size());
}
@Test
public void shouldStubWithDoReturnAndVerify() {
doReturn("foo")
.doReturn("bar")
.when(spy).get(0);
assertEquals("foo", spy.get(0));
assertEquals("bar", spy.get(0));
verify(spy, times(2)).get(0);
verifyNoMoreInteractions(spy);
}
@Test
public void shouldVerifyInOrder() {
spy.add("one");
spy.add("two");
InOrder inOrder = inOrder(spy);
inOrder.verify(spy).add("one");
inOrder.verify(spy).add("two");
verifyNoMoreInteractions(spy);
}
@Test
public void shouldVerifyInOrderAndFail() {
spy.add("one");
spy.add("two");
InOrder inOrder = inOrder(spy);
inOrder.verify(spy).add("two");
try {
inOrder.verify(spy).add("one");
fail();
} catch (VerifcationInOrderFailure f) {}
}
@Test
public void shouldVerifyNumberOfTimes() {
spy.add("one");
spy.add("one");
verify(spy, times(2)).add("one");
verifyNoMoreInteractions(spy);
}
@Test
public void shouldVerifyNumberOfTimesAndFail() {
spy.add("one");
spy.add("one");
try {
verify(spy, times(3)).add("one");
fail();
} catch (TooLittleActualInvocations e) {}
}
@Test
public void shouldToString() {
spy.add("foo");
assertEquals("[foo]" , spy.toString());
}
} |
package org.wildfly.test.integration.elytron.sasl;
import java.util.ArrayList;
import java.util.List;
import org.jboss.arquillian.container.test.api.RunAsClient;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.as.arquillian.api.ServerSetup;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.wildfly.security.auth.client.AuthenticationConfiguration;
import org.wildfly.security.auth.client.AuthenticationContext;
import org.wildfly.security.auth.client.MatchRule;
import org.wildfly.test.integration.elytron.sasl.AbstractSaslTestBase.JmsSetup;
import org.wildfly.test.security.common.AbstractElytronSetupTask;
import org.wildfly.test.security.common.elytron.ConfigurableElement;
import org.wildfly.test.security.common.other.SimpleRemotingConnector;
import org.wildfly.test.security.common.other.SimpleSocketBinding;
/**
* Elytron SASL mechanisms tests which use Naming + JMS client. The server setup adds for each tested SASL configuration a new
* native remoting port and client tests functionality against it.
*
* @author Josef Cacek
*/
@RunWith(Arquillian.class)
@RunAsClient
@ServerSetup({ JmsSetup.class, DefaultSaslConfigTestCase.ServerSetup.class })
@Ignore("WFLY-8801")
public class DefaultSaslConfigTestCase extends AbstractSaslTestBase {
private static final String DEFAULT_SASL_AUTHENTICATION = "application-sasl-authentication";
private static final String DEFAULT = "DEFAULT";
private static final int PORT_DEFAULT = 10568;
/**
* Tests that ANONYMOUS SASL mechanism can't be used for authentication in default server configuration.
*/
@Test
public void testAnonymousFailsInDefault() throws Exception {
// Anonymous not supported in the default configuration
AuthenticationContext.empty()
.with(MatchRule.ALL,
AuthenticationConfiguration.EMPTY.useDefaultProviders().allowSaslMechanisms("ANONYMOUS").useAnonymous())
.run(() -> sendAndReceiveMsg(PORT_DEFAULT, true));
}
/**
* Tests that JBOSS-LOCAL-USER SASL mechanism can be used for authentication in default server configuration.
*/
@Test
@Ignore("WFLY-8742")
public void testJBossLocalInDefault() throws Exception {
AuthenticationContext.empty()
.with(MatchRule.ALL,
AuthenticationConfiguration.EMPTY.useDefaultProviders().allowSaslMechanisms("JBOSS-LOCAL-USER"))
.run(() -> sendAndReceiveMsg(PORT_DEFAULT, false));
}
/**
* Tests that DIGEST-MD5 SASL mechanism can be used for authentication in default server configuration.
*/
@Test
public void testDigestInDefault() throws Exception {
AuthenticationContext.empty()
.with(MatchRule.ALL,
AuthenticationConfiguration.EMPTY.useDefaultProviders().allowSaslMechanisms("DIGEST-MD5")
.useName("guest").usePassword("guest"))
.run(() -> sendAndReceiveMsg(PORT_DEFAULT, false, "guest", "guest"));
}
/**
* Setup task which configures remoting connectors for this test.
*/
public static class ServerSetup extends AbstractElytronSetupTask {
@Override
protected ConfigurableElement[] getConfigurableElements() {
List<ConfigurableElement> elements = new ArrayList<>();
elements.add(SimpleSocketBinding.builder().withName(DEFAULT).withPort(PORT_DEFAULT).build());
elements.add(SimpleRemotingConnector.builder().withName(DEFAULT).withSocketBinding(DEFAULT)
.withSaslAuthenticationFactory(DEFAULT_SASL_AUTHENTICATION).build());
return elements.toArray(new ConfigurableElement[elements.size()]);
}
}
} |
package at.fwd.swagger.spring.demo.user.controller;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import at.fwd.swagger.spring.demo.user.exception.ObjectNotFoundException;
import at.fwd.swagger.spring.demo.user.model.Category;
import at.fwd.swagger.spring.demo.user.model.User;
import com.wordnik.swagger.annotations.Api;
import com.wordnik.swagger.annotations.ApiOperation;
import com.wordnik.swagger.annotations.ApiResponse;
import com.wordnik.swagger.annotations.ApiResponses;
/**
* User Controller with full POST request
* supports GET + POST
*
* @author johannes.fiala@fwd.at
*
*/
@RestController
@Api(value="user-crud-complete-post", position=2, description ="User services with complete post")
public class UserControllerWithCompletePost {
/**
* Logger for this class
*/
private static final Logger log = Logger.getLogger(UserControllerWithCompletePost.class);
private static final String PATH = "/user_complete_post";
private static final String MESSAGE_NOT_FOUND = "User not found";
private static final String MESSAGE_POST_SUCCESS = "User has been updated";
@Autowired
UserController userController;
@RequestMapping(method=RequestMethod.POST, value=PATH + "_complete")
@ApiOperation(value="create or update a user name by id", position = 1)
@ApiResponses(value = {
@ApiResponse(code = 200, message = MESSAGE_POST_SUCCESS, response = User.class) })
public User saveUserComplete( @RequestBody User user) {
log.debug("user: " + user);
log.debug("user.name: " + user.getName());
if (user!=null) {
userController.getUserMap().put(user.getId(), user);
} else {
throw new ObjectNotFoundException(MESSAGE_NOT_FOUND);
}
return user;
}
} |
package org.codehaus.groovy.runtime;
import groovy.lang.Closure;
import groovy.lang.GString;
import groovy.lang.GroovyObject;
import groovy.lang.Range;
import groovy.util.CharsetToolkit;
import groovy.util.ClosureComparator;
import groovy.util.OrderBy;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.Reader;
import java.io.Writer;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.StringTokenizer;
import java.util.TreeSet;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* This class defines all the new groovy methods which appear on normal JDK
* classes inside the Groovy environment. Static methods are used with the
* first parameter the destination class.
*
* @author <a href="mailto:james@coredevelopers.net">James Strachan</a>
* @author Sam Pullara
* @author Rod Cope
* @author Guillaume Laforge
* @version $Revision$
*/
public class DefaultGroovyMethods {
private static Logger log = Logger.getLogger(DefaultGroovyMethods.class.getName());
private static final Integer ONE = new Integer(1);
private static final char ZERO_CHAR = '\u0000';
/**
* Generates a detailed dump string of an object showing its class,
* hashCode and fields
*/
public static String dump(Object self) {
if (self == null) {
return "null";
}
StringBuffer buffer = new StringBuffer("<");
Class klass = self.getClass();
buffer.append(klass.getName());
buffer.append("@");
buffer.append(Integer.toHexString(self.hashCode()));
boolean groovyObject = self instanceof GroovyObject;
while (true) {
Field[] fields = klass.getDeclaredFields();
for (int i = 0; i < fields.length; i++) {
Field field = fields[i];
if ((field.getModifiers() & Modifier.STATIC) == 0) {
if (groovyObject && field.getName().equals("metaClass")) {
continue;
}
field.setAccessible(true);
buffer.append(" ");
buffer.append(field.getName());
buffer.append("=");
try {
buffer.append(InvokerHelper.toString(field.get(self)));
}
catch (Exception e) {
buffer.append(e);
}
}
}
klass = klass.getSuperclass();
if (klass == null) {
break;
}
}
buffer.append(">");
return buffer.toString();
}
/**
* Print to a console in interactive format
*/
public static void print(Object self, Object value) {
System.out.print(InvokerHelper.toString(value));
}
/**
* Print to a console in interactive format along with a newline
*/
public static void println(Object self, Object value) {
System.out.println(InvokerHelper.toString(value));
}
/**
* @return a String that matches what would be typed into a terminal to
* create this object. e.g. [1, 'hello'].inspect() -> [1, "hello"]
*/
public static String inspect(Object self) {
return InvokerHelper.inspect(self);
}
/**
* Print to a console in interactive format
*/
public static void print(Object self, PrintWriter out) {
if (out == null) {
out = new PrintWriter(System.out);
}
out.print(InvokerHelper.toString(self));
}
/**
* Print to a console in interactive format
*/
/*
* public static void print(Collection self, PrintWriter out) {
* out.print("["); boolean first = true; for (Iterator iter =
* self.iterator(); iter.hasNext(); ) { if (first) { first = false; } else {
* out.print(", "); } InvokerHelper.invokeMethod(iter.next(), "print",
* out); } out.print("]"); }
*/
/**
* Print to a console in interactive format
*
* @param out the PrintWriter used for printing
*/
public static void println(Object self, PrintWriter out) {
if (out == null) {
out = new PrintWriter(System.out);
}
InvokerHelper.invokeMethod(self, "print", out);
out.println();
}
/**
* Provide a dynamic method invocation method which can be overloaded in
* classes to implement dynamic proxies easily.
*/
public static Object invokeMethod(Object object, String method, Object arguments) {
return InvokerHelper.invokeMethod(object, method, arguments);
}
// isCase methods
public static boolean isCase(Object caseValue, Object switchValue) {
return caseValue.equals(switchValue);
}
public static boolean isCase(String caseValue, Object switchValue) {
if (switchValue == null) {
return caseValue == null;
}
return caseValue.equals(switchValue.toString());
}
public static boolean isCase(Class caseValue, Object switchValue) {
return caseValue.isInstance(switchValue);
}
public static boolean isCase(Collection caseValue, Object switchValue) {
return caseValue.contains(switchValue);
}
public static boolean isCase(Pattern caseValue, Object switchValue) {
return caseValue.matcher(switchValue.toString()).matches();
}
// Collection based methods
/**
* Allows objects to be iterated through using a closure
*
* @param self the object over which we iterate
* @param closure the closure applied on each element found
*/
public static void each(Object self, Closure closure) {
for (Iterator iter = InvokerHelper.asIterator(self); iter.hasNext();) {
closure.call(iter.next());
}
}
/**
* Allows objects to be iterated through using a closure
*
* @param self the collection over which we iterate
* @param closure the closure applied on each element of the collection
*/
public static void each(Collection self, Closure closure) {
for (Iterator iter = self.iterator(); iter.hasNext();) {
closure.call(iter.next());
}
}
/**
* Allows objects to be iterated through using a closure
*
* @param self the map over which we iterate
* @param closure the closure applied on each entry of the map
*/
public static void each(Map self, Closure closure) {
for (Iterator iter = self.entrySet().iterator(); iter.hasNext();) {
closure.call(iter.next());
}
}
/**
* Iterates over every element of a collection, and check whether a predicate is valid for all elements.
*
* @param self the object over which we iterate
* @param closure the closure predicate used for matching
* @return true if every item in the collection matches the closure
* predicate
*/
public static boolean every(Object self, Closure closure) {
for (Iterator iter = InvokerHelper.asIterator(self); iter.hasNext();) {
if (!InvokerHelper.asBool(closure.call(iter.next()))) {
return false;
}
}
return true;
}
/**
* Iterates over every element of a collection, and check whether a predicate is valid for at least one element
*
* @param self the object over which we iterate
* @param closure the closure predicate used for matching
* @return true if any item in the collection matches the closure predicate
*/
public static boolean any(Object self, Closure closure) {
for (Iterator iter = InvokerHelper.asIterator(self); iter.hasNext();) {
if (InvokerHelper.asBool(closure.call(iter.next()))) {
return true;
}
}
return false;
}
/**
* Counts the number of occurencies of the given value inside this collection
*
* @param self the collection within which we count the number of occurencies
* @param value the value
* @return the number of occurrencies
*/
public static int count(Collection self, Object value) {
int answer = 0;
for (Iterator iter = self.iterator(); iter.hasNext();) {
if (InvokerHelper.compareEqual(iter.next(), value)) {
++answer;
}
}
return answer;
}
/**
* Maps the values of an object (through an iterator) to new values using the closure as a filter.
*
* @param self the values of the object to map
* @param closure the closure used to map each element of the collection
* @return a List of the mapped values
*/
public static List map(Object self, Closure closure) {
List answer = new ArrayList();
for (Iterator iter = InvokerHelper.asIterator(self); iter.hasNext();) {
answer.add(closure.call(iter.next()));
}
return answer;
}
/**
* Maps the values of a collection to new values using the closure as a filter.
*
* @param self a collection
* @param closure the closure used for mapping
* @return a List of the mapped values
*/
public static List map(Collection self, Closure closure) {
List answer = new ArrayList(self.size());
for (Iterator iter = self.iterator(); iter.hasNext();) {
answer.add(closure.call(iter.next()));
}
return answer;
}
/**
* Maps the values of a Map to new values using the closure as a filter.
*
* @param self a Map
* @param closure the closure used for mapping
* @return a List of the mapped values
*/
public static List map(Map self, Closure closure) {
List answer = new ArrayList(self.size());
for (Iterator iter = self.entrySet().iterator(); iter.hasNext();) {
answer.add(closure.call(iter.next()));
}
return answer;
}
/**
* Finds the first value matching the closure condition
*
* @param self an Object with an iterator returning its values
* @param closure a closure condition
* @return the first Object found
*/
public static Object find(Object self, Closure closure) {
for (Iterator iter = InvokerHelper.asIterator(self); iter.hasNext();) {
Object value = iter.next();
if (InvokerHelper.asBool(closure.call(value))) {
return value;
}
}
return null;
}
/**
* Finds the first value matching the closure condition
*
* @param self a Collection
* @param closure a closure condition
* @return the first Object found
*/
public static Object find(Collection self, Closure closure) {
for (Iterator iter = self.iterator(); iter.hasNext();) {
Object value = iter.next();
if (InvokerHelper.asBool(closure.call(value))) {
return value;
}
}
return null;
}
/**
* Finds the first value matching the closure condition
*
* @param self a Map
* @param closure a closure condition
* @return the first Object found
*/
public static Object find(Map self, Closure closure) {
for (Iterator iter = self.entrySet().iterator(); iter.hasNext();) {
Object value = iter.next();
if (InvokerHelper.asBool(closure.call(value))) {
return value;
}
}
return null;
}
/**
* Finds all values matching the closure condition
*
* @param self an Object with an Iterator returning its values
* @param closure a closure condition
* @return a List of the values found
*/
public static List findAll(Object self, Closure closure) {
List answer = new ArrayList();
for (Iterator iter = InvokerHelper.asIterator(self); iter.hasNext();) {
Object value = iter.next();
if (InvokerHelper.asBool(closure.call(value))) {
answer.add(value);
}
}
return answer;
}
/**
* Finds all values matching the closure condition
*
* @param self a Collection
* @param closure a closure condition
* @return a List of the values found
*/
public static List findAll(Collection self, Closure closure) {
List answer = new ArrayList(self.size());
for (Iterator iter = self.iterator(); iter.hasNext();) {
Object value = iter.next();
if (InvokerHelper.asBool(closure.call(value))) {
answer.add(value);
}
}
return answer;
}
/**
* Finds all values matching the closure condition
*
* @param self a Map
* @param closure a closure condition applying on the keys
* @return a List of keys
*/
public static List findAll(Map self, Closure closure) {
List answer = new ArrayList(self.size());
for (Iterator iter = self.entrySet().iterator(); iter.hasNext();) {
Object value = iter.next();
if (InvokerHelper.asBool(closure.call(value))) {
answer.add(value);
}
}
return answer;
}
/**
* Iterates through the given collection, passing in the initial value to
* the closure along with the current iterated item then passing into the
* next iteration the value of the previous closure.
*
* @param self a Collection
* @param value a value
* @param closure a closure
* @return the last value of the last iteration
*/
public static Object inject(Collection self, Object value, Closure closure) {
Object[] params = new Object[2];
for (Iterator iter = self.iterator(); iter.hasNext();) {
Object item = iter.next();
params[0] = value;
params[1] = item;
value = closure.call(params);
}
return value;
}
/**
* Concatenates all of the items of the collection together with the given string as a separator
*
* @param self a Collection of objects
* @param separator a String separator
* @return the joined String
*/
public static String join(Collection self, String separator) {
StringBuffer buffer = new StringBuffer();
boolean first = true;
for (Iterator iter = self.iterator(); iter.hasNext();) {
Object value = iter.next();
if (first) {
first = false;
}
else {
buffer.append(separator);
}
buffer.append(InvokerHelper.toString(value));
}
return buffer.toString();
}
/**
* Concatenates all of the items of the String array together with the given string as a separator
*
* @param self an array of String
* @param separator a String separator
* @return the joined String
*/
public static String join(String[] self, String separator)
{
StringBuffer buffer = new StringBuffer();
boolean first = true;
for (int i = 0; i < self.length; i++)
{
String value = self[i];
if (first)
{
first = false;
}
else
{
buffer.append(separator);
}
buffer.append(value);
}
return buffer.toString();
}
/**
* Selects the maximum value found in the collection
*
* @param self a Collection
* @return the maximum value
*/
public static Object max(Collection self) {
Object answer = null;
for (Iterator iter = self.iterator(); iter.hasNext();) {
Object value = iter.next();
if (value != null) {
if (answer == null || InvokerHelper.compareGreaterThan(value, answer)) {
answer = value;
}
}
}
return answer;
}
/**
* Selects the maximum value found in the collection using the given comparator
*
* @param self a Collection
* @param comparator a Comparator
* @return the maximum value
*/
public static Object max(Collection self, Comparator comparator) {
Object answer = null;
for (Iterator iter = self.iterator(); iter.hasNext();) {
Object value = iter.next();
if (answer == null || comparator.compare(value, answer) > 0) {
answer = value;
}
}
return answer;
}
/**
* Selects the minimum value found in the collection
*
* @param self a Collection
* @return the minimum value
*/
public static Object min(Collection self) {
Object answer = null;
for (Iterator iter = self.iterator(); iter.hasNext();) {
Object value = iter.next();
if (value != null) {
if (answer == null || InvokerHelper.compareLessThan(value, answer)) {
answer = value;
}
}
}
return answer;
}
/**
* Selects the minimum value found in the collection using the given comparator
*
* @param self a Collection
* @param comparator a Comparator
* @return the minimum value
*/
public static Object min(Collection self, Comparator comparator) {
Object answer = null;
for (Iterator iter = self.iterator(); iter.hasNext();) {
Object value = iter.next();
if (answer == null || comparator.compare(value, answer) < 0) {
answer = value;
}
}
return answer;
}
/**
* Selects the minimum value found in the collection using the given closure as a comparator
*
* @param self a Collection
* @param closure a closure used as a comparator
* @return the minimum value
*/
public static Object min(Collection self, Closure closure) {
return min(self, new ClosureComparator(closure));
}
/**
* Selects the maximum value found in the collection using the given closure as a comparator
*
* @param self a Collection
* @param closure a closure used as a comparator
* @return the maximum value
*/
public static Object max(Collection self, Closure closure) {
return max(self, new ClosureComparator(closure));
}
/**
* Makes a String look like a Collection by adding support for the size() method
*
* @param text a String
* @return the length of the String
*/
public static int size(String text) {
return text.length();
}
/**
* Makes an Array look like a Collection by adding support for the size() method
*
* @param self an Array of Object
* @return the size of the Array
*/
public static int size(Object[] self) {
return self.length;
}
/**
* Support the subscript operator for String.
*
* @param text a String
* @param index the index of the Character to get
* @return the Character at the given index
*/
public static CharSequence getAt(CharSequence text, int index) {
index = normaliseIndex(index, text.length());
return text.subSequence(index, index + 1);
}
/**
* Support the subscript operator for String
*
* @param text a String
* @return the Character object at the given index
*/
public static String getAt(String text, int index) {
index = normaliseIndex(index, text.length());
return text.substring(index, index + 1);
}
/**
* Support the range subscript operator for CharSequence
*
* @param text a CharSequence
* @param range a Range
* @return the subsequence CharSequence
*/
public static CharSequence getAt(CharSequence text, Range range) {
int length = text.length();
int from = normaliseIndex(InvokerHelper.asInt(range.getFrom()), length);
int to = normaliseIndex(InvokerHelper.asInt(range.getTo()), length);
// if this is a backwards range, reverse the arguments to substring
if (from > to) {
int tmp = from;
from = to;
to = tmp;
}
return text.subSequence(from, to + 1);
}
/**
* Support the range subscript operator for String
*
* @param text a String
* @param range a Range
* @return a substring corresponding to the Range
*/
public static String getAt(String text, Range range) {
int length = text.length();
int from = normaliseIndex(InvokerHelper.asInt(range.getFrom()), length);
int to = normaliseIndex(InvokerHelper.asInt(range.getTo()), length);
// if this is a backwards range, reverse the arguments to substring
boolean reverse = range.isReverse();
if (from > to) {
int tmp = to;
to = from;
from = tmp;
reverse = ! reverse;
}
String answer = text.substring(from, to + 1);
if (reverse) {
answer = reverse(answer);
}
return answer;
}
/**
* Creates a new string which is the reverse (backwards) of this string
*
* @param self a String
* @return a new string with all the characters reversed.
*/
public static String reverse(String self) {
int size = self.length();
StringBuffer buffer = new StringBuffer(size);
for (int i = size -1 ; i >= 0; i
buffer.append(self.charAt(i));
}
return buffer.toString();
}
/**
* Support the subscript operator for a regex Matcher
*
* @param matcher a Matcher
* @param idx an index
* @return the group at the given index
*/
public static String getAt(Matcher matcher, int idx) {
matcher.reset();
idx = normaliseIndex(idx, matcher.groupCount());
// are we using groups?
if (matcher.groupCount() > 0) {
// yes, so return the specified group
matcher.find();
return matcher.group(idx);
}
else {
// not using groups, so return the nth
// occurrence of the pattern
for (int i = 0; i <= idx; i++) {
matcher.find();
}
return matcher.group();
}
}
/**
* Support the range subscript operator for a List
*
* @param self a List
* @param range a Range
* @return a range of a list from the range's from index up to but not including the ranges's to value
*/
public static List getAt(List self, Range range) {
int size = self.size();
int from = normaliseIndex(InvokerHelper.asInt(range.getFrom()), size);
int to = normaliseIndex(InvokerHelper.asInt(range.getTo()), size);
boolean reverse = range.isReverse();
if (from > to) {
int tmp = to;
to = from;
from = tmp;
reverse = ! reverse;
}
if (++to > size) {
to = size;
}
List answer = self.subList(from, to);
if (reverse) {
answer = reverse(answer);
}
return answer;
}
/**
* Allows a List to be used as the indices to be used on a List
*
* @param self a List
* @param indices a Collection of indices
* @return a new list of the values at the given indices
*/
public static List getAt(List self, Collection indices) {
List answer = new ArrayList(indices.size());
for (Iterator iter = indices.iterator(); iter.hasNext();) {
Object value = iter.next();
if (value instanceof Range) {
answer.addAll(getAt(self, (Range) value));
}
else if (value instanceof List) {
answer.addAll(getAt(self, (List) value));
}
else {
int idx = InvokerHelper.asInt(value);
answer.add(getAt(self, idx));
}
}
return answer;
}
/**
* Allows a List to be used as the indices to be used on a List
*
* @param self an Array of Objects
* @param indices a Collection of indices
* @return a new list of the values at the given indices
*/
public static List getAt(Object[] self, Collection indices) {
List answer = new ArrayList(indices.size());
for (Iterator iter = indices.iterator(); iter.hasNext();) {
Object value = iter.next();
if (value instanceof Range) {
answer.addAll(getAt(self, (Range) value));
}
else if (value instanceof Collection) {
answer.addAll(getAt(self, (Collection) value));
}
else {
int idx = InvokerHelper.asInt(value);
answer.add(getAt(self, idx));
}
}
return answer;
}
/**
* Allows a List to be used as the indices to be used on a CharSequence
*
* @param self a CharSequence
* @param indices a Collection of indices
* @return a String of the values at the given indices
*/
public static CharSequence getAt(CharSequence self, Collection indices) {
StringBuffer answer = new StringBuffer();
for (Iterator iter = indices.iterator(); iter.hasNext();) {
Object value = iter.next();
if (value instanceof Range) {
answer.append(getAt(self, (Range) value));
}
else if (value instanceof Collection) {
answer.append(getAt(self, (Collection) value));
}
else {
int idx = InvokerHelper.asInt(value);
answer.append(getAt(self, idx));
}
}
return answer.toString();
}
/**
* Allows a List to be used as the indices to be used on a String
*
* @param self a String
* @param indices a Collection of indices
* @return a String of the values at the given indices
*/
public static String getAt(String self, Collection indices) {
return (String) getAt((CharSequence) self, indices);
}
/**
* Allows a List to be used as the indices to be used on a Matcher
*
* @param self a Matcher
* @param indices a Collection of indices
* @return a String of the values at the given indices
*/
public static String getAt(Matcher self, Collection indices) {
StringBuffer answer = new StringBuffer();
for (Iterator iter = indices.iterator(); iter.hasNext();) {
Object value = iter.next();
if (value instanceof Range) {
answer.append(getAt(self, (Range) value));
}
else if (value instanceof Collection) {
answer.append(getAt(self, (Collection) value));
}
else {
int idx = InvokerHelper.asInt(value);
answer.append(getAt(self, idx));
}
}
return answer.toString();
}
/**
* Creates a sub-Map containing the given keys. This method is similar to
* List.subList() but uses keys rather than index ranges.
*
* @param map a Map
* @param keys a Collection of keys
* @return a new Map containing the given keys
*/
public static Map subMap(Map map, Collection keys) {
Map answer = new HashMap(keys.size());
for (Iterator iter = keys.iterator(); iter.hasNext();) {
Object key = iter.next();
answer.put(key, map.get(key));
}
return answer;
}
/**
* Support the range subscript operator for an Array
*
* @param array an Array of Objects
* @param range a Range
* @return a range of a list from the range's from index up to but not
* including the ranges's to value
*/
public static List getAt(Object[] array, Range range) {
List list = Arrays.asList(array);
return getAt(list, range);
}
/**
* Support the subscript operator for an Array
*
* @param array an Array of Objects
* @param idx an index
* @return the value at the given index
*/
public static Object getAt(Object[] array, int idx) {
return array[normaliseIndex(idx, array.length)];
}
/**
* Support the subscript operator for an Array
*
* @param array an Array of Objects
* @param idx an index
* @param value an Object to put at the given index
*/
public static void putAt(Object[] array, int idx, Object value) {
array[normaliseIndex(idx, array.length)] = value;
}
/**
* Allows conversion of arrays into a mutable List
*
* @param array an Array of Objects
* @return the array as a List
*/
public static List toList(Object[] array) {
int size = array.length;
List list = new ArrayList(size);
for (int i = 0; i < size; i++) {
list.add(array[i]);
}
return list;
}
/**
* Support the subscript operator for a List
*
* @param self a List
* @param idx an index
* @return the value at the given index
*/
public static Object getAt(List self, int idx) {
int size = self.size();
int i = normaliseIndex(idx, size);
if (i < size) {
return self.get(i);
}
else {
return null;
}
}
/**
* A helper method to allow lists to work with subscript operators
*
* @param self a List
* @param idx an index
* @param value the value to put at the given index
*/
public static void putAt(List self, int idx, Object value) {
int size = self.size();
idx = normaliseIndex(idx, size);
if (idx < size) {
self.set(idx, value);
}
else {
while (size < idx) {
self.add(size++, null);
}
self.add(idx, value);
}
}
/**
* Support the subscript operator for a List
*
* @param self a Map
* @param key an Object as a key for the map
* @return the value corresponding to the given key
*/
public static Object getAt(Map self, Object key) {
return self.get(key);
}
/**
* A helper method to allow lists to work with subscript operators
*
* @param self a Map
* @param key an Object as a key for the map
* @return the value corresponding to the given key
*/
public static Object putAt(Map self, Object key, Object value) {
return self.put(key, value);
}
protected static int normaliseIndex(int i, int size) {
while (i < 0) {
i += size;
}
return i;
}
/**
* Support the subscript operator for List
*
* @param coll a Collection
* @param property a String
* @return a List
*/
public static List getAt(Collection coll, String property) {
List answer = new ArrayList(coll.size());
for (Iterator iter = coll.iterator(); iter.hasNext();) {
Object item = iter.next();
Object value = InvokerHelper.getProperty(item, property);
if (value instanceof Collection) {
answer.addAll((Collection) value);
}
else {
answer.add(value);
}
}
return answer;
}
/**
* A convenience method for creating an immutable map
*
* @param self a Map
* @return an immutable Map
*/
public static Map immutable(Map self) {
return Collections.unmodifiableMap(self);
}
/**
* A convenience method for creating an immutable sorted map
*
* @param self a SortedMap
* @return an immutable SortedMap
*/
public static SortedMap immutable(SortedMap self) {
return Collections.unmodifiableSortedMap(self);
}
/**
* A convenience method for creating an immutable list
*
* @param self a List
* @return an immutable List
*/
public static List immutable(List self) {
return Collections.unmodifiableList(self);
}
/**
* A convenience method for creating an immutable list
*
* @param self a Set
* @return an immutable Set
*/
public static Set immutable(Set self) {
return Collections.unmodifiableSet(self);
}
/**
* A convenience method for creating an immutable sorted set
*
* @param self a SortedSet
* @return an immutable SortedSet
*/
public static SortedSet immutable(SortedSet self) {
return Collections.unmodifiableSortedSet(self);
}
/**
* A convenience method for sorting a List
*
* @param self a List to be sorted
* @return the sorted List
*/
public static List sort(List self) {
Collections.sort(self);
return self;
}
/**
* A convenience method for sorting a List with a specific comparator
*
* @param self a List
* @param comparator a Comparator used for the comparison
* @return a sorted List
*/
public static List sort(List self, Comparator comparator) {
Collections.sort(self, comparator);
return self;
}
/**
* A convenience method for sorting a List using a closure as a comparator
*
* @param self a List
* @param closure a Closure used as a comparator
* @return a sorted List
*/
public static List sort(List self, Closure closure) {
// use a comparator of one item or two
Class[] params = closure.getParameterTypes();
if (params.length == 1) {
Collections.sort(self, new OrderBy(closure));
}
else {
Collections.sort(self, new ClosureComparator(closure));
}
return self;
}
/**
* Reverses the list
*
* @param self a List
* @return a reversed List
*/
public static List reverse(List self) {
int size = self.size();
List answer = new ArrayList(size);
ListIterator iter = self.listIterator(size);
while (iter.hasPrevious()) {
answer.add(iter.previous());
}
return answer;
}
/**
* Create a List as a union of both Collections
*
* @param left the left Collection
* @param right the right Collection
* @return a List
*/
public static List plus(Collection left, Collection right) {
List answer = new ArrayList(left.size() + right.size());
answer.addAll(left);
answer.addAll(right);
return answer;
}
/**
* Create a List as a union of a Collection and an Object
*
* @param left a Collection
* @param right an object to append
* @return a List
*/
public static List plus(Collection left, Object right) {
List answer = new ArrayList(left.size() + 1);
answer.addAll(left);
answer.add(right);
return answer;
}
/**
*
* @param self
* @param factor
* @return
*/
public static List multiply(Collection self, Number factor) {
int size = factor.intValue();
List answer = new ArrayList(self.size() * size);
for (int i = 0; i < size; i++) {
answer.addAll(self);
}
return answer;
}
/**
*
* @param left
* @param right
* @return
*/
public static List intersect(List left, Collection right) {
if (left.size() == 0)
return new ArrayList();
boolean nlgnSort = sameType(new Collection[] { left, right });
ArrayList result = new ArrayList();
//creates the collection to look for values.
Collection pickFrom = nlgnSort ? (Collection) new TreeSet(left) : left;
for (Iterator iter = right.iterator(); iter.hasNext();) {
final Object o = iter.next();
if (pickFrom.contains(o))
result.add(o);
}
return result;
}
/**
*
* @param self
* @param removeMe
* @return
*/
public static List minus(List self, Collection removeMe) {
if (self.size() == 0)
return new ArrayList();
boolean nlgnSort = sameType(new Collection[] { self, removeMe });
//we can't use the same tactic as for intersection
//since AbstractCollection only does a remove on the first
//element it encounter.
if (nlgnSort) {
//n*log(n) version
Set answer = new TreeSet(self);
answer.removeAll(removeMe);
return new ArrayList(answer);
}
else {
//n*n version
List tmpAnswer = new LinkedList(self);
for (Iterator iter = tmpAnswer.iterator(); iter.hasNext();) {
Object element = iter.next();
//boolean removeElement = false;
for (Iterator iterator = removeMe.iterator(); iterator.hasNext();) {
if (element.equals(iterator.next())) {
iter.remove();
}
}
}
//remove duplicates
//can't use treeset since the base classes are different
List answer = new LinkedList();
Object[] array = (Object[]) tmpAnswer.toArray(new Object[tmpAnswer.size()]);
for (int i = 0; i < array.length; i++) {
if (array[i] != null) {
for (int j = i + 1; j < array.length; j++) {
if (array[i].equals(array[j])) {
array[j] = null;
}
}
answer.add(array[i]);
}
}
return new ArrayList(answer);
}
}
/**
*
* @param self
* @return
*/
public static List flatten(List self) {
return new ArrayList(flatten(self, new LinkedList()));
}
private static List flatten(Collection elements, List addTo) {
Iterator iter = elements.iterator();
while (iter.hasNext()) {
Object element = iter.next();
if (element instanceof Collection) {
flatten((Collection) element, addTo);
}
else if (element instanceof Map) {
flatten(((Map) element).values(), addTo);
}
else {
addTo.add(element);
}
}
return addTo;
}
/**
* Overloads the left shift operator to provide an append mechanism to add things
* to a list
*/
public static Collection leftShift(Collection self, Object value) {
self.add(value);
return self;
}
/**
* Overloads the left shift operator to provide an append mechanism to add things
* to a String buffer
*/
public static StringBuffer leftShift(StringBuffer self, Object value) {
self.append(value);
return self;
}
/**
* Overloads the left shift operator to provide an append mechanism to add things
* to a writer
*/
public static PrintWriter leftShift(PrintWriter self, Object value) {
self.print(value);
return self;
}
/**
* Overloads the left shift operator to provide an append mechanism to add things
* to a stream
*/
public static PrintStream leftShift(PrintStream self, Object value) {
self.print(value);
return self;
}
private static boolean sameType(Collection[] cols) {
List all = new LinkedList();
for (int i = 0; i < cols.length; i++) {
all.addAll(cols[i]);
}
if (all.size() == 0)
return true;
Object first = all.get(0);
//trying to determine the base class of the collections
//special case for Numbers
Class baseClass;
if (first instanceof Number) {
baseClass = Number.class;
}
else {
baseClass = first.getClass();
}
for (int i = 0; i < cols.length; i++) {
for (Iterator iter = cols[i].iterator(); iter.hasNext();) {
if (!baseClass.isInstance(iter.next())) {
return false;
}
}
}
return true;
}
// Primitive type array methods
public static Object getAt(byte[] array, int idx) {
return primitiveArrayGet(array, idx);
}
public static Object getAt(char[] array, int idx) {
return primitiveArrayGet(array, idx);
}
public static Object getAt(short[] array, int idx) {
return primitiveArrayGet(array, idx);
}
public static Object getAt(int[] array, int idx) {
return primitiveArrayGet(array, idx);
}
public static Object getAt(long[] array, int idx) {
return primitiveArrayGet(array, idx);
}
public static Object getAt(float[] array, int idx) {
return primitiveArrayGet(array, idx);
}
public static Object getAt(double[] array, int idx) {
return primitiveArrayGet(array, idx);
}
public static Object getAt(byte[] array, Range range) {
return primitiveArrayGet(array, range);
}
public static Object getAt(char[] array, Range range) {
return primitiveArrayGet(array, range);
}
public static Object getAt(short[] array, Range range) {
return primitiveArrayGet(array, range);
}
public static Object getAt(int[] array, Range range) {
return primitiveArrayGet(array, range);
}
public static Object getAt(long[] array, Range range) {
return primitiveArrayGet(array, range);
}
public static Object getAt(float[] array, Range range) {
return primitiveArrayGet(array, range);
}
public static Object getAt(double[] array, Range range) {
return primitiveArrayGet(array, range);
}
public static Object getAt(byte[] array, Collection indices) {
return primitiveArrayGet(array, indices);
}
public static Object getAt(char[] array, Collection indices) {
return primitiveArrayGet(array, indices);
}
public static Object getAt(short[] array, Collection indices) {
return primitiveArrayGet(array, indices);
}
public static Object getAt(int[] array, Collection indices) {
return primitiveArrayGet(array, indices);
}
public static Object getAt(long[] array, Collection indices) {
return primitiveArrayGet(array, indices);
}
public static Object getAt(float[] array, Collection indices) {
return primitiveArrayGet(array, indices);
}
public static Object getAt(double[] array, Collection indices) {
return primitiveArrayGet(array, indices);
}
public static void putAt(byte[] array, int idx, Object newValue) {
primitiveArrayPut(array, idx, newValue);
}
public static void putAt(char[] array, int idx, Object newValue) {
primitiveArrayPut(array, idx, newValue);
}
public static void putAt(short[] array, int idx, Object newValue) {
primitiveArrayPut(array, idx, newValue);
}
public static void putAt(int[] array, int idx, Object newValue) {
primitiveArrayPut(array, idx, newValue);
}
public static void putAt(long[] array, int idx, Object newValue) {
primitiveArrayPut(array, idx, newValue);
}
public static void putAt(float[] array, int idx, Object newValue) {
primitiveArrayPut(array, idx, newValue);
}
public static void putAt(double[] array, int idx, Object newValue) {
primitiveArrayPut(array, idx, newValue);
}
public static int size(byte[] array) {
return Array.getLength(array);
}
public static int size(char[] array) {
return Array.getLength(array);
}
public static int size(short[] array) {
return Array.getLength(array);
}
public static int size(int[] array) {
return Array.getLength(array);
}
public static int size(long[] array) {
return Array.getLength(array);
}
public static int size(float[] array) {
return Array.getLength(array);
}
public static int size(double[] array) {
return Array.getLength(array);
}
public static List toList(byte[] array) {
return InvokerHelper.primitiveArrayToList(array);
}
public static List toList(char[] array) {
return InvokerHelper.primitiveArrayToList(array);
}
public static List toList(short[] array) {
return InvokerHelper.primitiveArrayToList(array);
}
public static List toList(int[] array) {
return InvokerHelper.primitiveArrayToList(array);
}
public static List toList(long[] array) {
return InvokerHelper.primitiveArrayToList(array);
}
public static List toList(float[] array) {
return InvokerHelper.primitiveArrayToList(array);
}
public static List toList(double[] array) {
return InvokerHelper.primitiveArrayToList(array);
}
/**
* Implements the getAt(int) method for primitve type arrays
*/
protected static Object primitiveArrayGet(Object array, int idx) {
return Array.get(array, normaliseIndex(idx, Array.getLength(array)));
}
/**
* Implements the getAt(Range) method for primitve type arrays
*/
protected static List primitiveArrayGet(Object array, Range range) {
List answer = new ArrayList();
for (Iterator iter = range.iterator(); iter.hasNext();) {
int idx = InvokerHelper.asInt(iter.next());
answer.add(primitiveArrayGet(array, idx));
}
return answer;
}
/**
* Implements the getAt(Collection) method for primitve type arrays
*/
protected static List primitiveArrayGet(Object self, Collection indices) {
List answer = new ArrayList();
for (Iterator iter = indices.iterator(); iter.hasNext();) {
Object value = iter.next();
if (value instanceof Range) {
answer.addAll(primitiveArrayGet(self, (Range) value));
}
else if (value instanceof List) {
answer.addAll(primitiveArrayGet(self, (List) value));
}
else {
int idx = InvokerHelper.asInt(value);
answer.add(primitiveArrayGet(self, idx));
}
}
return answer;
}
/**
* Implements the set(int idx) method for primitve type arrays
*/
protected static void primitiveArrayPut(Object array, int idx, Object newValue) {
Array.set(array, normaliseIndex(idx, Array.getLength(array)), newValue);
}
// String methods
/**
*
* @param self
* @param token
* @return
*/
public static Object tokenize(String self, String token) {
return InvokerHelper.asList(new StringTokenizer(self, token));
}
/**
*
* @param self
* @return
*/
public static Object tokenize(String self) {
return InvokerHelper.asList(new StringTokenizer(self));
}
/**
*
* @param left
* @param value
* @return
*/
public static String plus(String left, Object value) {
//return left + value;
return left + toString(value);
}
/**
*
* @param left
* @param value
* @return
*/
public static String minus(String left, Object value) {
String text = toString(value);
return left.replaceFirst(text, "");
}
/**
* Provide an implementation of contains() like Collection to make Strings more polymorphic
* This method is not required on JDK 1.5 onwards
*
* @return true if this string contains the given text
*/
public static boolean contains(String self, String text) {
int idx = self.indexOf(text);
return idx >= 0;
}
/**
* @return the number of occurrencies of the given string inside this String
*/
public static int count(String self, String text) {
int answer = 0;
for (int idx = 0; true; idx++) {
idx = self.indexOf(text, idx);
if (idx >= 0) {
++answer;
}
else {
break;
}
}
return answer;
}
/**
* Increments the last digit in the given string, resetting
* it and moving onto the next digit if increasing the digit
* no longer becomes a letter or digit.
*
* @return
*/
public static String increment(String self) {
StringBuffer buffer = new StringBuffer(self);
char firstCh = firstCharacter();
for (int idx = buffer.length() - 1; idx >= 0; idx
char ch = increment(buffer.charAt(idx));
if (ch != ZERO_CHAR) {
buffer.setCharAt(idx, ch);
break;
}
else {
// lets find the first char
if (idx == 0) {
buffer.append("1");
}
else {
buffer.setCharAt(idx, firstCh);
}
}
}
return buffer.toString();
}
/**
* Decrements the last digit in the given string, resetting
* it and moving onto the next digit if increasing the digit
* no longer becomes a letter or digit.
*
* @return
*/
public static String decrement(String self) {
StringBuffer buffer = new StringBuffer(self);
char lastCh = lastCharacter();
for (int idx = buffer.length() - 1; idx >= 0; idx
char ch = decrement(buffer.charAt(idx));
if (ch != ZERO_CHAR) {
buffer.setCharAt(idx, ch);
break;
}
else {
if (idx == 0) {
return null;
}
else {
// lets find the first char
buffer.setCharAt(idx, lastCh);
}
}
}
return buffer.toString();
}
private static char increment(char ch) {
if (Character.isLetterOrDigit(++ch)) {
return ch;
}
else {
return ZERO_CHAR;
}
}
private static char decrement(char ch) {
if (Character.isLetterOrDigit(--ch)) {
return ch;
}
else {
return ZERO_CHAR;
}
}
/**
* @return the first character used when a letter rolls over when incrementing
*/
private static char firstCharacter() {
char ch = ZERO_CHAR;
while (!Character.isLetterOrDigit(ch)) {
ch++;
}
return ch;
}
/**
* @return the last character used when a letter rolls over when decrementing
*/
private static char lastCharacter() {
char ch = firstCharacter();
while (Character.isLetterOrDigit(++ch));
return --ch;
}
/**
*
* @param self
* @param factor
* @return
*/
public static String multiply(String self, Number factor) {
int size = factor.intValue();
if (size < 1) {
throw new IllegalArgumentException(
"multiply() should be called with a number of 1 or greater not: " + size);
}
StringBuffer answer = new StringBuffer(self);
for (int i = 1; i < size; i++) {
answer.append(self);
}
return answer.toString();
}
// Number based methods
protected static String toString(Object value) {
return (value == null) ? "null" : value.toString();
}
/**
*
* @param self
* @return
*/
public static Number increment(Number self) {
return plus(self, ONE);
}
/**
*
* @param self
* @return
*/
public static Number decrement(Number self) {
return minus(self, ONE);
}
/**
*
* @param left
* @param right
* @return
*/
public static Number plus(Number left, Number right) {
/** @todo maybe a double dispatch thing to handle new large numbers? */
if (isFloatingPoint(left) || isFloatingPoint(right)) {
return new Double(left.doubleValue() + right.doubleValue());
}
else if (isLong(left) || isLong(right)) {
return new Long(left.longValue() + right.longValue());
}
else {
return new Integer(left.intValue() + right.intValue());
}
}
/**
*
* @param left
* @param right
* @return
*/
public static int compareTo(Number left, Number right) {
/** @todo maybe a double dispatch thing to handle new large numbers? */
if (isFloatingPoint(left) || isFloatingPoint(right)) {
double diff = left.doubleValue() - right.doubleValue();
if (diff == 0) {
return 0;
}
else {
return (diff > 0) ? 1 : -1;
}
}
else if (isLong(left) || isLong(right)) {
long diff = left.longValue() - right.longValue();
if (diff == 0) {
return 0;
}
else {
return (diff > 0) ? 1 : -1;
}
}
else {
int diff = left.intValue() - right.intValue();
if (diff == 0) {
return 0;
}
else {
return (diff > 0) ? 1 : -1;
}
}
}
/**
*
* @param left
* @param right
* @return
*/
public static Number minus(Number left, Number right) {
if (isFloatingPoint(left) || isFloatingPoint(right)) {
return new Double(left.doubleValue() - right.doubleValue());
}
else if (isLong(left) || isLong(right)) {
return new Long(left.longValue() - right.longValue());
}
else {
return new Integer(left.intValue() - right.intValue());
}
}
/**
*
* @param left
* @param right
* @return
*/
public static Number multiply(Number left, Number right) {
if (isFloatingPoint(left) || isFloatingPoint(right)) {
return new Double(left.doubleValue() * right.doubleValue());
}
else if (isLong(left) || isLong(right)) {
return new Long(left.longValue() * right.longValue());
}
else {
return new Integer(left.intValue() * right.intValue());
}
}
/**
*
* @param self
* @param exponent
* @return
*/
public static Number power(Number self, Number exponent) {
double answer = Math.pow(self.doubleValue(), exponent.doubleValue());
if (isFloatingPoint(self) || isFloatingPoint(exponent) || answer < 1) {
return new Double(answer);
}
else if (isLong(self) || isLong(exponent) || answer > Integer.MAX_VALUE) {
return new Long((long) answer);
}
else {
return new Integer((int) answer);
}
}
/**
*
* @param left
* @param right
* @return
*/
public static Number divide(Number left, Number right) {
// lets use double for division?
return new Double(left.doubleValue() / right.doubleValue());
}
/**
*
* @param number
* @return
*/
public static boolean isLong(Number number) {
return number instanceof Long;
}
/**
*
* @param number
* @return
*/
public static boolean isFloatingPoint(Number number) {
return number instanceof Float || number instanceof Double;
}
/**
* Iterates a number of times
*/
public static void times(Number self, Closure closure) {
for (int i = 0, size = self.intValue(); i < size; i++) {
closure.call(new Integer(i));
}
}
/**
* Iterates from this number up to the given number
*/
public static void upto(Number self, Number to, Closure closure) {
for (int i = self.intValue(), size = to.intValue(); i <= size; i++) {
closure.call(new Integer(i));
}
}
/**
* Iterates from this number up to the given number using a step increment
*/
public static void step(Number self, Number to, Number stepNumber, Closure closure) {
for (int i = self.intValue(), size = to.intValue(), step = stepNumber.intValue(); i < size; i += step) {
closure.call(new Integer(i));
}
}
public static int abs(Number number) {
return Math.abs(number.intValue());
}
public static long abs(Long number) {
return Math.abs(number.longValue());
}
public static float abs(Float number) {
return Math.abs(number.floatValue());
}
public static double abs(Double number) {
return Math.abs(number.doubleValue());
}
public static int round(Float number) {
return Math.round(number.floatValue());
}
public static long round(Double number) {
return Math.round(number.doubleValue());
}
public static Integer toInteger(String self) {
return Integer.valueOf(self);
}
public static Integer toInteger(Number self) {
return new Integer(self.intValue());
}
// File based methods
/**
* Iterates through the given file line by line
*/
public static void eachLine(File self, Closure closure) throws IOException {
eachLine(newReader(self), closure);
}
/**
* Iterates through the given reader line by line
*/
public static void eachLine(Reader self, Closure closure) throws IOException {
BufferedReader br = null;
if (self instanceof BufferedReader)
br = (BufferedReader) self;
else
br = new BufferedReader(self);
try {
while (true) {
String line = br.readLine();
if (line == null) {
break;
}
else {
closure.call(line);
}
}
br.close();
}
catch (IOException e) {
if (self != null) {
try {
br.close();
}
catch (Exception e2) {
// ignore as we're already throwing
}
throw e;
}
}
}
/**
* Iterates through the given file line by line, splitting on the seperator
*/
public static void splitEachLine(File self, String sep, Closure closure) throws IOException {
splitEachLine(newReader(self), sep, closure);
}
/**
* Iterates through the given reader line by line, splitting on the seperator
*/
public static void splitEachLine(Reader self, String sep, Closure closure) throws IOException {
BufferedReader br = null;
if (self instanceof BufferedReader)
br = (BufferedReader) self;
else
br = new BufferedReader(self);
List args = new ArrayList();
try {
while (true) {
String line = br.readLine();
if (line == null) {
break;
}
else {
List vals = Arrays.asList(line.split(sep));
args.clear();
args.add(vals);
closure.call(args);
}
}
br.close();
}
catch (IOException e) {
if (self != null) {
try {
br.close();
}
catch (Exception e2) {
// ignore as we're already throwing
}
throw e;
}
}
}
/**
* Reads the file into a list of Strings for each line
*/
public static List readLines(File file) throws IOException {
IteratorClosureAdapter closure = new IteratorClosureAdapter(file);
eachLine(file, closure);
return closure.asList();
}
/**
* Reads the reader into a list of Strings for each line
*/
public static List readLines(Reader reader) throws IOException {
IteratorClosureAdapter closure = new IteratorClosureAdapter(reader);
eachLine(reader, closure);
return closure.asList();
}
/**
* Invokes the closure for each file in the given directory
*/
public static void eachFile(File self, Closure closure) {
File[] files = self.listFiles();
for (int i = 0; i < files.length; i++) {
closure.call(files[i]);
}
}
/**
* Helper method to create a buffered reader for a file
*
* @param file
* @return @throws
* IOException
*/
public static BufferedReader newReader(File file) throws IOException {
CharsetToolkit toolkit = new CharsetToolkit(file);
return toolkit.getReader();
}
/**
* Helper method to create a new BufferedReader for a file and then
* passes it into the closure and ensures its closed again afterwords
*
* @param file
* @throws FileNotFoundException
*/
public static void withReader(File file, Closure closure) throws IOException {
withReader(newReader(file), closure);
}
/**
* Helper method to create a buffered output stream for a file
*
* @param file
* @return
* @throws FileNotFoundException
*/
public static BufferedOutputStream newOutputStream(File file) throws IOException {
return new BufferedOutputStream(new FileOutputStream(file));
}
/**
* Helper method to create a new OutputStream for a file and then
* passes it into the closure and ensures its closed again afterwords
*
* @param file
* @throws FileNotFoundException
*/
public static void withOutputStream(File file, Closure closure) throws IOException {
withStream(newOutputStream(file), closure);
}
/**
* Helper method to create a buffered writer for a file
*
* @param file
* @return @throws
* FileNotFoundException
*/
public static BufferedWriter newWriter(File file) throws IOException {
return new BufferedWriter(new FileWriter(file));
}
/**
* Helper method to create a new BufferedWriter for a file and then
* passes it into the closure and ensures its closed again afterwords
*
* @param file
* @throws FileNotFoundException
*/
public static void withWriter(File file, Closure closure) throws IOException {
withWriter(newWriter(file), closure);
}
/**
* Helper method to create a new PrintWriter for a file
*
* @param file
* @throws FileNotFoundException
*/
public static PrintWriter newPrintWriter(File file) throws IOException {
return new PrintWriter(newWriter(file));
}
/**
* Helper method to create a new PrintWriter for a file and then
* passes it into the closure and ensures its closed again afterwords
*
* @param file
* @throws FileNotFoundException
*/
public static void withPrintWriter(File file, Closure closure) throws IOException {
withWriter(newPrintWriter(file), closure);
}
/**
* Allows a writer to be used, calling the closure with the writer
* and then ensuring that the writer is closed down again irrespective
* of whether exceptions occur or the
*
* @param writer the writer which is used and then closed
* @param closure the closure that the writer is passed into
* @throws IOException
*/
public static void withWriter(Writer writer, Closure closure) throws IOException {
try {
closure.call(writer);
// lets try close the writer & throw the exception if it fails
// but not try to reclose it in the finally block
Writer temp = writer;
writer = null;
temp.close();
}
finally {
if (writer != null) {
try {
writer.close();
}
catch (IOException e) {
log.warning("Caught exception closing writer: " + e);
}
}
}
}
/**
* Allows a Reader to be used, calling the closure with the writer
* and then ensuring that the writer is closed down again irrespective
* of whether exceptions occur or the
*
* @param writer the writer which is used and then closed
* @param closure the closure that the writer is passed into
* @throws IOException
*/
public static void withReader(Reader writer, Closure closure) throws IOException {
try {
closure.call(writer);
// lets try close the writer & throw the exception if it fails
// but not try to reclose it in the finally block
Reader temp = writer;
writer = null;
temp.close();
}
finally {
if (writer != null) {
try {
writer.close();
}
catch (IOException e) {
log.warning("Caught exception closing writer: " + e);
}
}
}
}
/**
* Allows a InputStream to be used, calling the closure with the stream
* and then ensuring that the stream is closed down again irrespective
* of whether exceptions occur or the
*
* @param stream the stream which is used and then closed
* @param closure the closure that the stream is passed into
* @throws IOException
*/
public static void withStream(InputStream stream, Closure closure) throws IOException {
try {
closure.call(stream);
// lets try close the stream & throw the exception if it fails
// but not try to reclose it in the finally block
InputStream temp = stream;
stream = null;
temp.close();
}
finally {
if (stream != null) {
try {
stream.close();
}
catch (IOException e) {
log.warning("Caught exception closing stream: " + e);
}
}
}
}
/**
* Allows a OutputStream to be used, calling the closure with the stream
* and then ensuring that the stream is closed down again irrespective
* of whether exceptions occur or the
*
* @param stream the stream which is used and then closed
* @param closure the closure that the stream is passed into
* @throws IOException
*/
public static void withStream(OutputStream stream, Closure closure) throws IOException {
try {
closure.call(stream);
// lets try close the stream & throw the exception if it fails
// but not try to reclose it in the finally block
OutputStream temp = stream;
stream = null;
temp.close();
}
finally {
if (stream != null) {
try {
stream.close();
}
catch (IOException e) {
log.warning("Caught exception closing stream: " + e);
}
}
}
}
/**
* Helper method to create a buffered input stream for a file
*
* @param file
* @return
* @throws FileNotFoundException
*/
public static BufferedInputStream newInputStream(File file) throws FileNotFoundException {
return new BufferedInputStream(new FileInputStream(file));
}
/**
* Iterates through the given file byte by byte.
*/
public static void eachByte(File self, Closure closure) throws IOException {
BufferedInputStream is = newInputStream(self);
try {
while (true) {
int b = is.read();
if (b == -1) {
break;
}
else {
closure.call(new Byte((byte) b));
}
}
is.close();
}
catch (IOException e) {
if (is != null) {
try {
is.close();
}
catch (Exception e2) {
// ignore as we're already throwing
}
throw e;
}
}
}
/**
* Reads the file into a list of Bytes for each byte
*/
public static List readBytes(File file) throws IOException {
IteratorClosureAdapter closure = new IteratorClosureAdapter(file);
eachByte(file, closure);
return closure.asList();
}
// SQL based methods
/**
* Iterates through the result set of an SQL query passing the result set
* into the closure
*
* @param connection
* @param gstring
* @param closure
*/
public static void query(Connection connection, GString gstring, Closure closure) throws SQLException {
ResultSet results = null;
// lets turn the expression into an SQL string
String sql = null;
String[] text = gstring.getStrings();
if (text.length == 1) {
sql = text[0];
}
else {
StringBuffer buffer = new StringBuffer(text[0]);
for (int i = 1; i < text.length; i++) {
buffer.append("?");
buffer.append(text[i]);
}
sql = buffer.toString();
}
PreparedStatement statement = connection.prepareStatement(sql);
try {
// lets bind the values to the statement
Object[] values = gstring.getValues();
for (int i = 0; i < values.length; i++) {
statement.setObject(i + 1, values[i]);
}
results = statement.executeQuery();
closure.call(results);
}
finally {
if (results != null) {
try {
results.close();
}
catch (SQLException e) {
// ignore
}
}
try {
statement.close();
}
catch (SQLException e) {
// ignore
}
}
}
/**
* Converts the given String into a List of strings of one character
*/
public static List toList(String self) {
int size = self.length();
List answer = new ArrayList(size);
for (int i = 0; i < size; i++) {
answer.add(self.substring(i, i + 1));
}
return answer;
}
} |
package org.openhab.binding.fritzbox.internal;
import static org.quartz.JobBuilder.newJob;
import static org.quartz.TriggerBuilder.newTrigger;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.Socket;
import java.util.Collection;
import java.util.Dictionary;
import java.util.HashMap;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.net.telnet.TelnetClient;
import org.openhab.binding.fritzbox.FritzboxBindingProvider;
import org.openhab.core.binding.AbstractActiveBinding;
import org.openhab.core.binding.BindingProvider;
import org.openhab.core.events.EventPublisher;
import org.openhab.core.items.Item;
import org.openhab.core.library.items.NumberItem;
import org.openhab.core.library.items.StringItem;
import org.openhab.core.library.items.SwitchItem;
import org.openhab.core.library.types.DecimalType;
import org.openhab.core.library.types.OnOffType;
import org.openhab.core.library.types.StringType;
import org.openhab.core.types.Command;
import org.openhab.library.tel.types.CallType;
import org.osgi.service.cm.ConfigurationException;
import org.osgi.service.cm.ManagedService;
import org.quartz.CronScheduleBuilder;
import org.quartz.CronTrigger;
import org.quartz.Job;
import org.quartz.JobDetail;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.impl.StdSchedulerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The FritzBox binding connects to a AVM FritzBox on the monitor port 1012 and
* listens to event notifications from this box. There are event for incoming
* and outgoing calls, as well as for connections and disconnections.
*
* @author Kai Kreuzer
* @since 0.7.0
*/
public class FritzboxBinding extends
AbstractActiveBinding<FritzboxBindingProvider> implements
ManagedService {
private static HashMap<String, String> commandMap = new HashMap<String, String>();
private static HashMap<String, String> queryMap = new HashMap<String, String>();
static {
commandMap.put(FritzboxBindingProvider.TYPE_DECT,
"ctlmgr_ctl w dect settings/enabled");
commandMap.put(FritzboxBindingProvider.TYPE_WLAN,
"ctlmgr_ctl w wlan settings/ap_enabled");
queryMap.put(FritzboxBindingProvider.TYPE_DECT,
"ctlmgr_ctl r dect settings/enabled");
queryMap.put(FritzboxBindingProvider.TYPE_WLAN,
"ctlmgr_ctl r wlan settings/ap_enabled");
}
@Override
public void bindingChanged(BindingProvider provider, String itemName) {
super.bindingChanged(provider, itemName);
conditionalDeActivate();
}
private void conditionalDeActivate() {
logger.info("Fritzbox conditional deActivate: {}", bindingsExist());
if (bindingsExist()) {
activate();
} else {
deactivate();
}
}
private static final Logger logger = LoggerFactory
.getLogger(FritzboxBinding.class);
protected static final int MONITOR_PORT = 1012;
/** the current thread instance that is listening to the FritzBox */
protected static MonitorThread monitorThread = null;
/* The IP address to connect to */
protected static String ip;
/* The password of the FritzBox to access via Telnet */
protected static String password;
/**
* Reference to this instance to be used with the reconnection job which is
* static.
*/
private static FritzboxBinding INSTANCE;
public FritzboxBinding() {
INSTANCE = this;
}
public void activate() {
super.activate();
setProperlyConfigured(true);
// if bundle is already configured, launch the monitor thread right away
if (ip != null) {
reconnect();
}
}
public void deactivate() {
if (monitorThread != null) {
monitorThread.interrupt();
}
monitorThread = null;
}
@Override
public void internalReceiveCommand(String itemName, Command command) {
if (password != null) {
String type = null;
for (FritzboxBindingProvider provider : providers) {
type = provider.getType(itemName);
if (type != null) {
break;
}
}
logger.info("Fritzbox type: {}", type);
if (type == null)
return;
TelnetCommandThread thread = new TelnetCommandThread(type, command);
thread.start();
}
}
/**
* {@inheritDoc}
*/
@SuppressWarnings("rawtypes")
public void updated(Dictionary config) throws ConfigurationException {
if (config != null) {
String ip = (String) config.get("ip");
if (StringUtils.isNotBlank(ip)) {
if (!ip.equals(FritzboxBinding.ip)) {
// only do something if the ip has changed
FritzboxBinding.ip = ip;
conditionalDeActivate();
// schedule a daily reconnection as sometimes the FritzBox
// stops sending data
// and thus blocks the monitor thread
try {
Scheduler sched = StdSchedulerFactory
.getDefaultScheduler();
JobDetail job = newJob(ReconnectJob.class)
.withIdentity("Reconnect", "FritzBox").build();
CronTrigger trigger = newTrigger()
.withIdentity("Reconnect", "FritzBox")
.withSchedule(
CronScheduleBuilder
.cronSchedule("0 0 0 * * ?"))
.build();
sched.scheduleJob(job, trigger);
logger.debug("Scheduled a daily reconnection to FritzBox on {}:{}", ip, MONITOR_PORT);
} catch (SchedulerException e) {
logger.warn("Could not create daily reconnection job", e);
}
}
}
String password = (String) config.get("password");
if (StringUtils.isNotBlank(password)) {
FritzboxBinding.password = password;
}
}
}
protected void reconnect() {
if (monitorThread != null) {
// let's end the old thread
monitorThread.interrupt();
monitorThread = null;
}
// create a new thread for listening to the FritzBox
monitorThread = new MonitorThread(this.eventPublisher, this.providers);
monitorThread.start();
}
private static class TelnetCommandThread extends Thread {
private static HashMap<String, String> commandMap = new HashMap<String, String>();
static {
commandMap.put(FritzboxBindingProvider.TYPE_DECT,
"ctlmgr_ctl w dect settings/enabled");
commandMap.put(FritzboxBindingProvider.TYPE_WLAN,
"ctlmgr_ctl w wlan settings/ap_enabled");
}
public TelnetCommandThread(String type, Command command) {
super();
this.type = type;
this.command = command;
}
private String type;
private Command command;
@Override
public void run() {
try {
TelnetClient client = new TelnetClient();
client.connect(ip);
int state = 0;
if (command == OnOffType.ON)
state = 1;
String cmdString = null;
if (commandMap.containsKey(type)) {
cmdString = commandMap.get(type) + " " + state;
} else if (type.startsWith("tam")) {
cmdString = "ctlmgr_ctl w tam settings/"
+ type.toUpperCase() + "/Active " + state;
} else if (type.startsWith("cmd")) {
int on = type.indexOf("ON=");
int off = type.indexOf("OFF=");
if (state == 0) {
cmdString = type.substring(off + 4,
on < off ? type.length() : on);
} else {
cmdString = type.substring(on + 3,
off < on ? type.length() : off);
}
cmdString = cmdString.trim();
}
/*
* This is a approach with receive/send in serial way. This
* could be done via a sperate thread but for just sending one
* command it is not necessary
*/
receive(client); // password:
send(client, password);
receive(client); // welcome text
send(client, cmdString);
Thread.sleep(1000L); // response not needed - may be interesting
// for reading status
client.disconnect();
} catch (Exception e) {
logger.warn("Error processing command", e);
}
}
private void send(TelnetClient client, String data) {
logger.trace("Sending data ({})...", data);
try {
data += "\r\n";
client.getOutputStream().write(data.getBytes());
client.getOutputStream().flush();
} catch (IOException e) {
logger.warn("Error sending data", e);
}
}
private String receive(TelnetClient client) {
StringBuffer strBuffer;
try {
strBuffer = new StringBuffer();
byte[] buf = new byte[4096];
int len = 0;
Thread.sleep(750L);
while ((len = client.getInputStream().read(buf)) != 0) {
strBuffer.append(new String(buf, 0, len));
Thread.sleep(750L);
if (client.getInputStream().available() == 0)
break;
}
return strBuffer.toString();
} catch (Exception e) {
logger.warn("Error receiving data", e);
}
return null;
}
}
/**
* This is the thread that does the real work
*
* @author Kai Kreuzer
*
*/
private static class MonitorThread extends Thread {
/** the active TCP connection */
private Socket connection;
/** flag to notify the thread to terminate */
private boolean interrupted = false;
/** retry interval in ms, if connection fails */
private long waitBeforeRetry = 60000L;
private EventPublisher eventPublisher;
private Collection<FritzboxBindingProvider> providers;
public MonitorThread(EventPublisher eventPublisher,
Collection<FritzboxBindingProvider> providers) {
this.eventPublisher = eventPublisher;
this.providers = providers;
}
/**
* Notifies the thread to terminate itself. The current connection will
* be closed.
*/
public void interrupt() {
this.interrupted = true;
if (connection != null) {
try {
connection.close();
} catch (IOException e) {
logger.warn("Existing connection to FritzBox cannot be closed", e);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void run() {
while (!interrupted) {
while (ip == null) {
// if we don't have an IP, let's wait
try {
sleep(1000L);
} catch (InterruptedException e) {
interrupted = true;
break;
}
}
if (ip != null) {
BufferedReader reader = null;
try {
logger.info("Attempting connection to FritzBox on {}:{}...", ip, MONITOR_PORT);
connection = new Socket(ip, MONITOR_PORT);
reader = new BufferedReader(new InputStreamReader(
connection.getInputStream()));
// reset the retry interval
waitBeforeRetry = 60000L;
} catch (Exception e) {
logger.warn("Error attempting to connect to FritzBox. Retrying in " + waitBeforeRetry / 1000L + "s.", e);
try {
Thread.sleep(waitBeforeRetry);
} catch (InterruptedException ex) {
interrupted = true;
}
// wait another more minute the next time
waitBeforeRetry += 60000L;
}
if (reader != null) {
logger.info("Connected to FritzBox on {}:{}", ip, MONITOR_PORT);
while (!interrupted) {
try {
String line = reader.readLine();
if (line != null) {
MonitorEvent event = parseMonitorEvent(line);
processMonitorEvent(event);
try {
// wait a moment, so that rules can be
// processed
// see
sleep(100L);
} catch (InterruptedException e) {
}
}
} catch (IOException e) {
logger.error("Lost connection to FritzBox", e);
break;
}
}
}
}
}
}
/**
* Parses the string that was received from the FritzBox
*
* @param line
* the received string
* @return the parse result
*/
private MonitorEvent parseMonitorEvent(String line) {
String[] sections = line.split(";");
MonitorEvent event = new MonitorEvent();
event.timestamp = sections[0];
event.eventType = sections[1];
event.connectionId = sections[2];
if (event.eventType.equals("RING")) {
event.externalNo = sections[3];
event.internalNo = sections[4];
event.connectionType = sections[5];
} else if (event.eventType.equals("CONNECT")) {
event.line = sections[3];
event.externalNo = sections[4];
} else if (event.eventType.equals("CALL")) {
event.line = sections[3];
event.internalNo = sections[4];
event.externalNo = sections[5];
event.connectionType = sections[6];
}
return event;
}
/**
* Processes a monitor event.
*
* @param event
* the event to process
*/
private void processMonitorEvent(MonitorEvent event) {
if (event.eventType.equals("RING")) {
handleEventType(event, FritzboxBindingProvider.TYPE_INBOUND);
}
if (event.eventType.equals("CALL")) {
handleEventType(event, FritzboxBindingProvider.TYPE_OUTBOUND);
}
if (event.eventType.equals("CONNECT")
|| event.eventType.equals("DISCONNECT")) {
handleEventType(event, FritzboxBindingProvider.TYPE_INBOUND);
handleEventType(event, FritzboxBindingProvider.TYPE_ACTIVE);
handleEventType(event, FritzboxBindingProvider.TYPE_OUTBOUND);
}
}
/**
* Processes a monitor event for a given binding type
*
* @param event
* the monitor event to process
* @param bindingType
* the binding type of the items to process
*/
private void handleEventType(MonitorEvent event, String bindingType) {
for (FritzboxBindingProvider provider : providers) {
for (String itemName : provider
.getItemNamesForType(bindingType)) {
Class<? extends Item> itemType = provider
.getItemType(itemName);
org.openhab.core.types.State state = null;
if (event.eventType.equals("DISCONNECT")) {
state = itemType.isAssignableFrom(SwitchItem.class) ? OnOffType.OFF
: CallType.EMPTY;
} else if (event.eventType.equals("CONNECT")) {
if (bindingType
.equals(FritzboxBindingProvider.TYPE_ACTIVE)) {
state = itemType.isAssignableFrom(SwitchItem.class) ? OnOffType.ON
: new CallType(event.externalNo, event.line);
} else {
state = itemType.isAssignableFrom(SwitchItem.class) ? OnOffType.OFF
: CallType.EMPTY;
}
} else if (event.eventType.equals("RING")
&& bindingType
.equals(FritzboxBindingProvider.TYPE_INBOUND)) {
state = itemType.isAssignableFrom(SwitchItem.class) ? OnOffType.ON
: new CallType(event.externalNo,
event.internalNo);
} else if (event.eventType.equals("CALL")
&& bindingType
.equals(FritzboxBindingProvider.TYPE_OUTBOUND)) {
state = itemType.isAssignableFrom(SwitchItem.class) ? OnOffType.ON
: new CallType(event.internalNo,
event.externalNo);
}
if (state != null) {
eventPublisher.postUpdate(itemName, state);
}
}
}
}
/**
* Class representing a monitor event received from the FritzBox. Not
* all attributes are used for the moment, but might be useful for
* future extensions.
*
* @author Kai Kreuzer
*
*/
@SuppressWarnings("unused")
private static class MonitorEvent {
public String timestamp;
public String eventType;
public String connectionId;
public String externalNo;
public String internalNo;
public String connectionType;
public String line;
}
}
/**
* A quartz scheduler job to simply do a reconnection to the FritzBox.
*/
public static class ReconnectJob implements Job {
public void execute(JobExecutionContext arg0)
throws JobExecutionException {
INSTANCE.conditionalDeActivate();
}
}
@Override
protected void execute() {
try {
TelnetClient client = new TelnetClient();
client.connect(ip);
receive(client);
send(client, password);
receive(client);
for (FritzboxBindingProvider provider : providers) {
for (String item : provider.getItemNames()) {
String query = null;
String type = provider.getType(item);
if (queryMap.containsKey(type)) {
query = queryMap.get(type);
} else if (type.startsWith("tam")) {
query = "ctlmgr_ctl r tam settings/"
+ type.toUpperCase() + "/Active";
} else if (type.startsWith("query")) {
query = type.substring(type.indexOf(":") + 1).trim();
}else
continue;
send(client, query);
String answer = receive(client);
String[] lines = answer.split("\r\n");
if (lines.length >= 2) {
answer = lines[1].trim();
}
Class<? extends Item> itemType = provider.getItemType(item);
org.openhab.core.types.State state = null;
if (itemType.isAssignableFrom(SwitchItem.class)) {
if (answer.equals("1"))
state = OnOffType.ON;
else
state = OnOffType.OFF;
} else if (itemType.isAssignableFrom(NumberItem.class)) {
state = new DecimalType(answer);
} else if (itemType.isAssignableFrom(StringItem.class)) {
state = new StringType(answer);
}
if (state != null)
eventPublisher.postUpdate(item, state);
}
}
client.disconnect();
} catch (Exception e) {
logger.warn("Could not get item state", e);
}
}
@Override
protected long getRefreshInterval() {
return 60000L;
}
@Override
protected String getName() {
return "FritzBox refresh Service";
}
/**
* Send line via Telnet to FritzBox
*
* @param client
* the telnet client
* @param data
* the data to send
*/
private static void send(TelnetClient client, String data) {
try {
data += "\r\n";
client.getOutputStream().write(data.getBytes());
client.getOutputStream().flush();
} catch (IOException e) {
logger.warn("Error sending data", e);
}
}
/**
* Receive answer from FritzBox - careful! This blocks if there is no answer
* from FritzBox
*
* @param client
* the telnet client
* @return
*/
private static String receive(TelnetClient client) {
StringBuffer strBuffer;
try {
strBuffer = new StringBuffer();
byte[] buf = new byte[4096];
int len = 0;
Thread.sleep(750L);
while ((len = client.getInputStream().read(buf)) != 0) {
strBuffer.append(new String(buf, 0, len));
Thread.sleep(750L);
if (client.getInputStream().available() == 0)
break;
}
return strBuffer.toString();
} catch (Exception e) {
logger.warn("Error receiving data", e);
}
return null;
}
} |
package tools.vitruv.framework.tuid;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.ecore.change.impl.ChangeDescriptionImpl;
import tools.vitruv.framework.util.bridges.EcoreBridge;
/**
* A {@link HierarchicalTuidCalculatorAndResolver} that creates the Tuid for
* each {@link EObject} by finding the first attribute from the list that the
* EObject possesses and returning "{attribute name}={attribute value}".
*
* @see DefaultTuidCalculatorAndResolver
* @author Dominik Werle
*/
public class AttributeTuidCalculatorAndResolver extends HierarchicalTuidCalculatorAndResolver<EObject> {
protected final List<String> attributeNames;
public AttributeTuidCalculatorAndResolver(final String tuidPrefix, final String... attributeNames) {
super(tuidPrefix);
this.attributeNames = Arrays.asList(attributeNames);
}
public AttributeTuidCalculatorAndResolver(final String tuidPrefix, final List<String> attributeNames) {
super(tuidPrefix);
this.attributeNames = new ArrayList<String>(attributeNames);
}
@Override
protected Class<EObject> getRootObjectClass() {
return EObject.class;
}
@Override
protected boolean hasId(final EObject obj, final String indidivualId) throws IllegalArgumentException {
return indidivualId.equals(calculateIndividualTuidDelegator(obj));
}
@Override
protected String calculateIndividualTuidDelegator(final EObject obj) throws IllegalArgumentException {
for (String attributeName : this.attributeNames) {
final String attributeValue = EcoreBridge.getStringValueOfAttribute(obj, attributeName);
if (null != attributeValue) {
String subTuid = (obj.eContainingFeature() == null || obj.eContainer() instanceof ChangeDescriptionImpl ? "<root>"
: obj.eContainingFeature().getName()) + SUBDIVIDER + obj.eClass().getName() + SUBDIVIDER
+ attributeName + "=" + attributeValue;
return subTuid;
} else {
EStructuralFeature idFeature = obj.eClass().getEStructuralFeature(attributeName);
if (idFeature != null && !obj.eIsSet(idFeature)) {
return attributeName;
}
}
}
throw new RuntimeException(
"None of '" + String.join("', '", this.attributeNames) + "' found for eObject '" + obj + "'");
}
} |
package gov.nih.nci.cagrid.data.ui.creation;
import gov.nih.nci.cagrid.common.Utils;
import gov.nih.nci.cagrid.common.portal.ErrorDialog;
import gov.nih.nci.cagrid.common.portal.PortalUtils;
import gov.nih.nci.cagrid.data.ExtensionDataUtils;
import gov.nih.nci.cagrid.data.extension.Data;
import gov.nih.nci.cagrid.data.extension.ServiceFeatures;
import gov.nih.nci.cagrid.data.ui.cacore.AppserviceConfigPanel;
import gov.nih.nci.cagrid.data.ui.cacore.ClientJarSelectionPanel;
import gov.nih.nci.cagrid.data.ui.cacore.CoreDsIntroPanel;
import gov.nih.nci.cagrid.data.ui.cacore.DomainModelPanel;
import gov.nih.nci.cagrid.data.ui.cacore.SchemaTypesPanel;
import gov.nih.nci.cagrid.data.ui.cacore.ServiceWizard;
import gov.nih.nci.cagrid.introduce.beans.extension.ExtensionDescription;
import gov.nih.nci.cagrid.introduce.beans.extension.ExtensionType;
import gov.nih.nci.cagrid.introduce.beans.extension.ExtensionTypeExtensionData;
import gov.nih.nci.cagrid.introduce.beans.extension.ServiceExtensionDescriptionType;
import gov.nih.nci.cagrid.introduce.extension.ExtensionsLoader;
import gov.nih.nci.cagrid.introduce.info.ServiceInformation;
import gov.nih.nci.cagrid.introduce.portal.extension.CreationExtensionUIDialog;
import java.awt.Frame;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.util.List;
import javax.swing.ButtonGroup;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.WindowConstants;
import org.projectmobius.portal.PortalResourceManager;
/**
* DataServiceCreationDialog
* Dialog for post-creation changes to a data service
*
* @author <A HREF="MAILTO:ervin@bmi.osu.edu">David W. Ervin</A>
*
* @created Aug 1, 2006
* @version $Id$
*/
public class DataServiceCreationDialog extends CreationExtensionUIDialog {
public static final String WS_ENUM_EXTENSION_NAME = "cagrid_wsEnum";
public static final String BDT_EXTENSIONS_NAME = "bdt";
private JPanel mainPanel = null;
private JCheckBox wsEnumCheckBox = null;
private JButton okButton = null;
private JPanel featuresPanel = null;
private JCheckBox gridIdentCheckBox = null;
private JRadioButton customDataRadioButton = null;
private JRadioButton sdkDataRadioButton = null;
private JPanel dataSourcePanel = null;
private ButtonGroup dataButtonGroup = null;
private JCheckBox bdtCheckBox = null;
public DataServiceCreationDialog(Frame f, ServiceExtensionDescriptionType desc, ServiceInformation info) {
super(f, desc, info);
setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE);
addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
setFeatureStatus();
}
});
initialize();
}
private void initialize() {
this.setTitle("Data Service Configuration");
this.setContentPane(getMainPanel());
getDataButtonGroup();
pack();
}
/**
* This method initializes jPanel
*
* @return javax.swing.JPanel
*/
private JPanel getMainPanel() {
if (mainPanel == null) {
GridBagConstraints gridBagConstraints5 = new GridBagConstraints();
gridBagConstraints5.gridx = 0;
gridBagConstraints5.insets = new java.awt.Insets(4,4,4,4);
gridBagConstraints5.anchor = java.awt.GridBagConstraints.EAST;
gridBagConstraints5.gridwidth = 2;
gridBagConstraints5.gridy = 1;
GridBagConstraints gridBagConstraints4 = new GridBagConstraints();
gridBagConstraints4.gridx = 1;
gridBagConstraints4.insets = new java.awt.Insets(4,4,4,4);
gridBagConstraints4.weightx = 1.0D;
gridBagConstraints4.fill = GridBagConstraints.BOTH;
gridBagConstraints4.gridy = 0;
GridBagConstraints gridBagConstraints1 = new GridBagConstraints();
gridBagConstraints1.gridx = 0;
gridBagConstraints1.insets = new java.awt.Insets(4,4,4,4);
gridBagConstraints1.weightx = 1.0D;
gridBagConstraints1.fill = GridBagConstraints.BOTH;
gridBagConstraints1.gridy = 0;
mainPanel = new JPanel();
mainPanel.setLayout(new GridBagLayout());
mainPanel.add(getDataSourcePanel(), gridBagConstraints1);
mainPanel.add(getFeaturesPanel(), gridBagConstraints4);
mainPanel.add(getOkButton(), gridBagConstraints5);
}
return mainPanel;
}
/**
* This method initializes jCheckBox
*
* @return javax.swing.JCheckBox
*/
private JCheckBox getWsEnumCheckBox() {
if (wsEnumCheckBox == null) {
wsEnumCheckBox = new JCheckBox();
wsEnumCheckBox.setText("Use WS-Enumeration");
// can only use ws-enumeration if the extension has been installed
wsEnumCheckBox.setEnabled(wsEnumExtensionInstalled());
}
return wsEnumCheckBox;
}
/**
* This method initializes jButton
*
* @return javax.swing.JButton
*/
private JButton getOkButton() {
if (okButton == null) {
okButton = new JButton();
okButton.setText("OK");
okButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
setFeatureStatus();
if (getSdkDataRadioButton().isSelected()) {
ServiceWizard wiz = new ServiceWizard(
PortalResourceManager.getInstance().getGridPortal(), "caCORE Data Source");
wiz.addWizardPanel(new CoreDsIntroPanel(getExtensionDescription(), getServiceInfo()));
wiz.addWizardPanel(new ClientJarSelectionPanel(getExtensionDescription(), getServiceInfo()));
wiz.addWizardPanel(new AppserviceConfigPanel(getExtensionDescription(), getServiceInfo()));
wiz.addWizardPanel(new DomainModelPanel(getExtensionDescription(), getServiceInfo()));
wiz.addWizardPanel(new SchemaTypesPanel(getExtensionDescription(), getServiceInfo()));
PortalUtils.centerWindow(wiz);
wiz.showAt(wiz.getX(), wiz.getY());
}
dispose();
}
});
}
return okButton;
}
/**
* This method initializes jPanel
*
* @return javax.swing.JPanel
*/
private JPanel getFeaturesPanel() {
if (featuresPanel == null) {
GridBagConstraints gridBagConstraints12 = new GridBagConstraints();
gridBagConstraints12.gridx = 0;
gridBagConstraints12.insets = new Insets(2, 2, 2, 2);
gridBagConstraints12.fill = GridBagConstraints.HORIZONTAL;
gridBagConstraints12.gridy = 1;
GridBagConstraints gridBagConstraints11 = new GridBagConstraints();
gridBagConstraints11.gridx = 0;
gridBagConstraints11.weightx = 1.0D;
gridBagConstraints11.insets = new java.awt.Insets(2,2,2,2);
gridBagConstraints11.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints11.gridy = 2;
GridBagConstraints gridBagConstraints = new GridBagConstraints();
gridBagConstraints.fill = GridBagConstraints.HORIZONTAL;
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.weightx = 1.0D;
gridBagConstraints.insets = new Insets(2, 2, 2, 2);
featuresPanel = new JPanel();
featuresPanel.setLayout(new GridBagLayout());
featuresPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(
null, "Optional Features", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION,
javax.swing.border.TitledBorder.DEFAULT_POSITION, null, null));
featuresPanel.add(getWsEnumCheckBox(), gridBagConstraints);
featuresPanel.add(getGridIdentCheckBox(), gridBagConstraints11);
featuresPanel.add(getBdtCheckBox(), gridBagConstraints12);
}
return featuresPanel;
}
/**
* This method initializes jCheckBox
*
* @return javax.swing.JCheckBox
*/
private JCheckBox getGridIdentCheckBox() {
if (gridIdentCheckBox == null) {
gridIdentCheckBox = new JCheckBox();
gridIdentCheckBox.setEnabled(false);
gridIdentCheckBox.setText("Use Grid Identifier");
}
return gridIdentCheckBox;
}
/**
* This method initializes jRadioButton
*
* @return javax.swing.JRadioButton
*/
private JRadioButton getCustomDataRadioButton() {
if (customDataRadioButton == null) {
customDataRadioButton = new JRadioButton();
customDataRadioButton.setText("Custom Data Source");
}
return customDataRadioButton;
}
/**
* This method initializes jRadioButton
*
* @return javax.swing.JRadioButton
*/
private JRadioButton getSdkDataRadioButton() {
if (sdkDataRadioButton == null) {
sdkDataRadioButton = new JRadioButton();
sdkDataRadioButton.setText("caCORE SDK Data Source");
}
return sdkDataRadioButton;
}
private ButtonGroup getDataButtonGroup() {
if (dataButtonGroup == null) {
dataButtonGroup = new ButtonGroup();
dataButtonGroup.add(getCustomDataRadioButton());
dataButtonGroup.add(getSdkDataRadioButton());
dataButtonGroup.setSelected(getCustomDataRadioButton().getModel(), true);
}
return dataButtonGroup;
}
/**
* This method initializes jPanel
*
* @return javax.swing.JPanel
*/
private JPanel getDataSourcePanel() {
if (dataSourcePanel == null) {
GridBagConstraints gridBagConstraints3 = new GridBagConstraints();
gridBagConstraints3.gridx = 0;
gridBagConstraints3.insets = new java.awt.Insets(2,2,2,2);
gridBagConstraints3.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints3.weightx = 1.0D;
gridBagConstraints3.gridy = 1;
GridBagConstraints gridBagConstraints2 = new GridBagConstraints();
gridBagConstraints2.gridx = 0;
gridBagConstraints2.insets = new java.awt.Insets(2,2,2,2);
gridBagConstraints2.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints2.weightx = 1.0D;
gridBagConstraints2.gridy = 0;
dataSourcePanel = new JPanel();
dataSourcePanel.setLayout(new GridBagLayout());
dataSourcePanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Data Source",
javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION,
javax.swing.border.TitledBorder.DEFAULT_POSITION, null, null));
dataSourcePanel.add(getCustomDataRadioButton(), gridBagConstraints2);
dataSourcePanel.add(getSdkDataRadioButton(), gridBagConstraints3);
}
return dataSourcePanel;
}
private void setFeatureStatus() {
if (getWsEnumCheckBox().isSelected()) {
if (!wsEnumExtensionUsed()) {
// add the ws-enum extension
ExtensionDescription desc = ExtensionsLoader.getInstance().getExtension(WS_ENUM_EXTENSION_NAME);
ExtensionType extension = new ExtensionType();
extension.setName(desc.getServiceExtensionDescription().getName());
extension.setExtensionType(desc.getExtensionType());
extension.setVersion(desc.getVersion());
getServiceInfo().getExtensions().setExtension(
(ExtensionType[]) Utils.appendToArray(
getServiceInfo().getExtensions().getExtension(), extension));
}
}
if (getBdtCheckBox().isSelected()) {
if (!bdtExtensionUsed()) {
// add the BDT extension
ExtensionDescription desc = ExtensionsLoader.getInstance().getExtension(BDT_EXTENSIONS_NAME);
ExtensionType extension = new ExtensionType();
extension.setName(desc.getServiceExtensionDescription().getName());
extension.setExtensionType(desc.getExtensionType());
extension.setVersion(desc.getVersion());
// BDT extension has to run before data extension does
ExtensionType[] currentExtensions = getServiceInfo().getExtensions().getExtension();
ExtensionType[] newExtensions = new ExtensionType[currentExtensions.length + 1];
newExtensions[0] = extension;
System.arraycopy(currentExtensions, 0, newExtensions, 0, currentExtensions.length);
getServiceInfo().getExtensions().setExtension(newExtensions);
}
}
// set the selected service features
ExtensionTypeExtensionData data =
getExtensionTypeExtensionData();
ServiceFeatures features = new ServiceFeatures();
try {
features.setUseSdkDataSource(getSdkDataRadioButton().isSelected());
features.setUseGridIdeitifiers(getGridIdentCheckBox().isSelected());
features.setUseWsEnumeration(getWsEnumCheckBox().isSelected());
features.setUseBdt(getBdtCheckBox().isSelected());
Data extData = ExtensionDataUtils.getExtensionData(data);
extData.setServiceFeatures(features);
ExtensionDataUtils.storeExtensionData(data, extData);
} catch (Exception ex) {
ex.printStackTrace();
ErrorDialog.showErrorDialog("Error storing configuration: " + ex.getMessage(), ex);
}
}
private boolean wsEnumExtensionInstalled() {
List extensionDescriptors = ExtensionsLoader.getInstance().getServiceExtensions();
for (int i = 0; i < extensionDescriptors.size(); i++) {
ServiceExtensionDescriptionType ex = (ServiceExtensionDescriptionType) extensionDescriptors.get(i);
if (ex.getName().equals(WS_ENUM_EXTENSION_NAME)) {
return true;
}
}
return false;
}
private boolean wsEnumExtensionUsed() {
ServiceInformation info = getServiceInfo();
if (info.getExtensions() != null && info.getExtensions().getExtension() != null) {
for (int i = 0; i < info.getExtensions().getExtension().length; i++) {
if (info.getExtensions().getExtension(i).getName().equals(WS_ENUM_EXTENSION_NAME)) {
return true;
}
}
}
return false;
}
private boolean bdtExtensionInstalled() {
List extensionDescriptors = ExtensionsLoader.getInstance().getServiceExtensions();
for (int i = 0; i < extensionDescriptors.size(); i++) {
ServiceExtensionDescriptionType desc = (ServiceExtensionDescriptionType) extensionDescriptors.get(i);
if (desc.getName().equals(BDT_EXTENSIONS_NAME)) {
return true;
}
}
return false;
}
private boolean bdtExtensionUsed() {
ExtensionType[] extensions = getServiceInfo().getExtensions().getExtension();
for (int i = 0; i < extensions.length; i++) {
if (extensions[i].getName().equals(BDT_EXTENSIONS_NAME)) {
return true;
}
}
return false;
}
/**
* This method initializes bdtCheckBox
*
* @return javax.swing.JCheckBox
*/
private JCheckBox getBdtCheckBox() {
if (bdtCheckBox == null) {
bdtCheckBox = new JCheckBox();
bdtCheckBox.setText("Use caGrid BDT");
// can only enable BDT if it has been installed
bdtCheckBox.setEnabled(bdtExtensionInstalled());
}
return bdtCheckBox;
}
} |
package org.eclipse.jnosql.mapping.cassandra.column;
import jakarta.nosql.TypeReference;
import jakarta.nosql.Value;
import jakarta.nosql.column.Column;
import jakarta.nosql.column.ColumnEntity;
import org.eclipse.jnosql.mapping.cassandra.column.model.Actor;
import org.eclipse.jnosql.mapping.cassandra.column.model.AppointmentBook;
import org.eclipse.jnosql.mapping.cassandra.column.model.Artist;
import org.eclipse.jnosql.mapping.cassandra.column.model.Contact;
import org.eclipse.jnosql.mapping.cassandra.column.model.Director;
import org.eclipse.jnosql.mapping.cassandra.column.model.History2;
import org.eclipse.jnosql.mapping.cassandra.column.model.Job;
import org.eclipse.jnosql.mapping.cassandra.column.model.Money;
import org.eclipse.jnosql.mapping.cassandra.column.model.Movie;
import org.eclipse.jnosql.mapping.cassandra.column.model.Worker;
import org.eclipse.jnosql.mapping.test.CDIExtension;
import org.eclipse.jnosql.communication.cassandra.column.UDT;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import javax.inject.Inject;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
import static java.util.Collections.singleton;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
@CDIExtension
public class CassandraColumnEntityConverterTest {
@Inject
private CassandraColumnEntityConverter converter;
private Column[] columns;
private Actor actor = Actor.actorBuilder().withAge()
.withId()
.withName()
.withPhones(asList("234", "2342"))
.withMovieCharacter(Collections.singletonMap("JavaZone", "Jedi"))
.withMovierRating(Collections.singletonMap("JavaZone", 10))
.build();
@BeforeEach
public void init() {
columns = new Column[]{Column.of("_id", 12L),
Column.of("age", 10), Column.of("name", "Otavio"),
Column.of("phones", asList("234", "2342"))
, Column.of("movieCharacter", Collections.singletonMap("JavaZone", "Jedi"))
, Column.of("movieRating", Collections.singletonMap("JavaZone", 10))};
}
@Test
public void shouldConvertPersonToDocument() {
Artist artist = Artist.builder().withAge()
.withId(12)
.withName("Otavio")
.withPhones(asList("234", "2342")).build();
ColumnEntity entity = converter.toColumn(artist);
assertEquals("Artist", entity.getName());
assertEquals(4, entity.size());
}
@Test
public void shouldConvertActorToDocument() {
ColumnEntity entity = converter.toColumn(actor);
assertEquals("Actor", entity.getName());
assertEquals(6, entity.size());
assertThat(entity.getColumns(), containsInAnyOrder(columns));
}
@Test
public void shouldConvertDocumentToActor() {
ColumnEntity entity = ColumnEntity.of("Actor");
Stream.of(columns).forEach(entity::add);
Actor actor = converter.toEntity(Actor.class, entity);
assertNotNull(actor);
assertEquals(10, actor.getAge());
assertEquals(12L, actor.getId());
assertEquals(asList("234", "2342"), actor.getPhones());
assertEquals(Collections.singletonMap("JavaZone", "Jedi"), actor.getMovieCharacter());
assertEquals(Collections.singletonMap("JavaZone", 10), actor.getMovieRating());
}
@Test
public void shouldConvertDocumentToActorFromEntity() {
ColumnEntity entity = ColumnEntity.of("Actor");
Stream.of(columns).forEach(entity::add);
Actor actor = converter.toEntity(entity);
assertNotNull(actor);
assertEquals(10, actor.getAge());
assertEquals(12L, actor.getId());
assertEquals(asList("234", "2342"), actor.getPhones());
assertEquals(Collections.singletonMap("JavaZone", "Jedi"), actor.getMovieCharacter());
assertEquals(Collections.singletonMap("JavaZone", 10), actor.getMovieRating());
}
@Test
public void shouldConvertDirectorToColumn() {
Movie movie = new Movie("Matrix", 2012, Collections.singleton("Actor"));
Director director = Director.builderDiretor().withAge(12)
.withId(12)
.withName("Otavio")
.withPhones(asList("234", "2342")).withMovie(movie).build();
ColumnEntity entity = converter.toColumn(director);
assertEquals(5, entity.size());
assertEquals(getValue(entity.find("name")), director.getName());
assertEquals(getValue(entity.find("age")), director.getAge());
assertEquals(getValue(entity.find("_id")), director.getId());
assertEquals(getValue(entity.find("phones")), director.getPhones());
Column subColumn = entity.find("movie").get();
List<Column> columns = subColumn.get(new TypeReference<List<Column>>() {
});
assertEquals(3, columns.size());
assertEquals("movie", subColumn.getName());
assertEquals(movie.getTitle(), columns.stream().filter(c -> "title".equals(c.getName())).findFirst().get().get());
assertEquals(movie.getYear(), columns.stream().filter(c -> "year".equals(c.getName())).findFirst().get().get());
assertEquals(movie.getActors(), columns.stream().filter(c -> "actors".equals(c.getName())).findFirst().get().get());
}
@Test
public void shouldConvertToEmbeddedClassWhenHasSubColumn() {
Movie movie = new Movie("Matrix", 2012, Collections.singleton("Actor"));
Director director = Director.builderDiretor().withAge(12)
.withId(12)
.withName("Otavio")
.withPhones(asList("234", "2342")).withMovie(movie).build();
ColumnEntity entity = converter.toColumn(director);
Director director1 = converter.toEntity(entity);
assertEquals(movie, director1.getMovie());
assertEquals(director.getName(), director1.getName());
assertEquals(director.getAge(), director1.getAge());
assertEquals(director.getId(), director1.getId());
}
@Test
public void shouldConvertToEmbeddedClassWhenHasSubColumn2() {
Movie movie = new Movie("Matrix", 2012, singleton("Actor"));
Director director = Director.builderDiretor().withAge(12)
.withId(12)
.withName("Otavio")
.withPhones(asList("234", "2342")).withMovie(movie).build();
ColumnEntity entity = converter.toColumn(director);
entity.remove("movie");
entity.add(Column.of("title", "Matrix"));
entity.add(Column.of("year", 2012));
entity.add(Column.of("actors", singleton("Actor")));
Director director1 = converter.toEntity(entity);
assertEquals(movie, director1.getMovie());
assertEquals(director.getName(), director1.getName());
assertEquals(director.getAge(), director1.getAge());
assertEquals(director.getId(), director1.getId());
}
@Test
public void shouldConvertToEmbeddedClassWhenHasSubColumn3() {
Movie movie = new Movie("Matrix", 2012, singleton("Actor"));
Director director = Director.builderDiretor().withAge(12)
.withId(12)
.withName("Otavio")
.withPhones(asList("234", "2342")).withMovie(movie).build();
ColumnEntity entity = converter.toColumn(director);
entity.remove("movie");
Map<String, Object> map = new HashMap<>();
map.put("title", "Matrix");
map.put("year", 2012);
map.put("actors", singleton("Actor"));
entity.add(Column.of("movie", map));
Director director1 = converter.toEntity(entity);
assertEquals(movie, director1.getMovie());
assertEquals(director.getName(), director1.getName());
assertEquals(director.getAge(), director1.getAge());
assertEquals(director.getId(), director1.getId());
}
@Test
public void shouldConvertToDocumentWhenHaConverter() {
Worker worker = new Worker();
Job job = new Job();
job.setCity("Sao Paulo");
job.setDescription("Java Developer");
worker.setName("Bob");
worker.setSalary(new Money("BRL", BigDecimal.TEN));
worker.setJob(job);
ColumnEntity entity = converter.toColumn(worker);
assertEquals("Worker", entity.getName());
assertEquals("Bob", entity.find("name").get().get());
assertEquals("BRL 10", entity.find("money").get().get());
}
@Test
public void shouldConvertToEntityWhenHasConverter() {
Worker worker = new Worker();
Job job = new Job();
job.setCity("Sao Paulo");
job.setDescription("Java Developer");
worker.setName("Bob");
worker.setSalary(new Money("BRL", BigDecimal.TEN));
worker.setJob(job);
ColumnEntity entity = converter.toColumn(worker);
Worker worker1 = converter.toEntity(entity);
assertEquals(worker.getSalary(), worker1.getSalary());
assertEquals(job.getCity(), worker1.getJob().getCity());
assertEquals(job.getDescription(), worker1.getJob().getDescription());
}
@Test
public void shouldSupportUDT() {
Address address = new Address();
address.setCity("California");
address.setStreet("Street");
Person person = new Person();
person.setAge(10);
person.setName("Ada");
person.setHome(address);
ColumnEntity entity = converter.toColumn(person);
assertEquals("Person", entity.getName());
Column column = entity.find("home").get();
org.eclipse.jnosql.communication.cassandra.column.UDT udt = org.eclipse.jnosql.communication.cassandra.column.UDT.class.cast(column);
assertEquals("address", udt.getUserType());
assertEquals("home", udt.getName());
assertThat((List<Column>) udt.get(),
containsInAnyOrder(Column.of("city", "California"), Column.of("street", "Street")));
}
@Test
public void shouldSupportUDTToEntity() {
ColumnEntity entity = ColumnEntity.of("Person");
entity.add(Column.of("name", "Poliana"));
entity.add(Column.of("age", 20));
List<Column> columns = asList(Column.of("city", "Salvador"),
Column.of("street", "Jose Anasoh"));
UDT udt = UDT.builder("address").withName("home")
.addUDT(columns).build();
entity.add(udt);
Person person = converter.toEntity(entity);
assertNotNull(person);
Address home = person.getHome();
assertEquals("Poliana", person.getName());
assertEquals(Integer.valueOf(20), person.getAge());
assertEquals("Salvador", home.getCity());
assertEquals("Jose Anasoh", home.getStreet());
}
@Test
public void shouldSupportTimeStampConverter() {
History2 history = new History2();
history.setCalendar(Calendar.getInstance());
history.setLocalDate(LocalDate.now());
history.setLocalDateTime(LocalDateTime.now());
history.setZonedDateTime(ZonedDateTime.now());
history.setNumber(new java.util.Date().getTime());
ColumnEntity entity = converter.toColumn(history);
assertEquals("History2", entity.getName());
History2 historyConverted = converter.toEntity(entity);
assertNotNull(historyConverted);
}
@Test
public void shouldConvertListUDT() {
AppointmentBook appointmentBook = new AppointmentBook();
appointmentBook.setUser("otaviojava");
appointmentBook.setContacts(asList(new Contact("Poliana", "poliana@santana.com"),
new Contact("Ada", "ada@lovelace.com")));
ColumnEntity entity = converter.toColumn(appointmentBook);
assertEquals("AppointmentBook", entity.getName());
assertEquals("otaviojava", entity.find("user").get().get());
UDT column = (UDT) entity.find("contacts").get();
List<List<Column>> contacts = (List<List<Column>>) column.get();
assertEquals(2, contacts.size());
assertTrue(contacts.stream().allMatch(c -> c.size() == 2));
assertEquals("Contact", column.getUserType());
}
@Test
public void shouldConvertListUDTToEntity() {
List<Iterable<Column>> columns = new ArrayList<>();
columns.add(asList(Column.of("name", "Poliana"), Column.of("description", "poliana")));
columns.add(asList(Column.of("name", "Ada"), Column.of("description", "ada@lovelace.com")));
ColumnEntity entity = ColumnEntity.of("AppointmentBook");
entity.add(Column.of("user", "otaviojava"));
entity.add(UDT.builder("Contact").withName("contacts").addUDTs(columns).build());
AppointmentBook appointmentBook = converter.toEntity(entity);
List<Contact> contacts = appointmentBook.getContacts();
assertEquals("otaviojava", appointmentBook.getUser());
assertThat(contacts, containsInAnyOrder(new Contact("Poliana", "poliana"),
new Contact("Ada", "ada@lovelace.com")));
}
private Object getValue(Optional<Column> document) {
return document.map(Column::getValue).map(Value::get).orElse(null);
}
} |
package org.phenotips.ontology.internal.solr;
import org.phenotips.ontology.OntologyService;
import org.phenotips.ontology.OntologyTerm;
import org.phenotips.ontology.SolrOntologyServiceInitializer;
import org.xwiki.cache.Cache;
import org.xwiki.cache.CacheException;
import org.xwiki.component.manager.ComponentLookupException;
import org.xwiki.test.mockito.MockitoComponentMockingRule;
import java.io.IOException;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.DisMaxParams;
import org.apache.solr.common.params.SolrParams;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.mockito.ArgumentMatcher;
import org.mockito.Matchers;
import org.mockito.Mockito;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.argThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* Tests for the HPO implementation of the {@link org.phenotips.ontology.OntologyService},
* {@link org.phenotips.ontology.internal.solr.HumanPhenotypeOntology}.
*/
public class HumanPhenotypeOntologyTest
{
public int ontologyServiceResult;
public Cache<OntologyTerm> cache;
public SolrServer server;
public OntologyService ontologyService;
@Rule
public final MockitoComponentMockingRule<OntologyService> mocker =
new MockitoComponentMockingRule<OntologyService>(HumanPhenotypeOntology.class);
@SuppressWarnings("unchecked")
@Before
public void setUpOntology()
throws ComponentLookupException, IOException, SolrServerException, CacheException
{
this.cache = mock(Cache.class);
SolrOntologyServiceInitializer externalServicesAccess =
this.mocker.getInstance(SolrOntologyServiceInitializer.class);
when(externalServicesAccess.getCache()).thenReturn(this.cache);
this.server = mock(SolrServer.class);
when(externalServicesAccess.getServer()).thenReturn(this.server);
this.ontologyService = this.mocker.getComponentUnderTest();
this.ontologyServiceResult =
this.ontologyService.reindex(this.getClass().getResource("/hpo-test.obo").toString());
}
@Test
public void testHumanPhenotypeOntologyReindex()
throws ComponentLookupException, IOException, SolrServerException, CacheException
{
Mockito.verify(this.server).deleteByQuery("*:*");
Mockito.verify(this.server).commit();
Mockito.verify(this.server).add(Matchers.anyCollectionOf(SolrInputDocument.class));
Mockito.verify(this.cache).removeAll();
Mockito.verifyNoMoreInteractions(this.cache, this.server);
Assert.assertTrue(this.ontologyServiceResult == 0);
}
@Test
public void testHumanPhenotypeOntologyVersion() throws SolrServerException
{
QueryResponse response = mock(QueryResponse.class);
when(this.server.query(any(SolrQuery.class))).thenReturn(response);
SolrDocumentList results = mock(SolrDocumentList.class);
when(response.getResults()).thenReturn(results);
when(results.isEmpty()).thenReturn(false);
SolrDocument versionDoc = mock(SolrDocument.class);
when(results.get(0)).thenReturn(versionDoc);
when(versionDoc.getFieldValue("version")).thenReturn("2014:01:01");
Assert.assertEquals("2014:01:01", this.ontologyService.getVersion());
}
@Test
public void testHumanPhenotypeOntologyDefaultLocation()
{
String location = this.ontologyService.getDefaultOntologyLocation();
Assert.assertNotNull(location);
Assert.assertTrue(location.endsWith("hp.obo"));
Assert.assertTrue(location.startsWith("http"));
}
@Test
public void testHumanPhenotypeOntologySuggestTermsBlank() throws ComponentLookupException
{
Assert.assertTrue(this.mocker.getComponentUnderTest().termSuggest("", 0, null, null).isEmpty());
}
@Test
public void testHumanPhenotypeOntologySuggestTermsIsId() throws ComponentLookupException, SolrServerException
{
QueryResponse response = mock(QueryResponse.class);
when(this.server.query(any(SolrParams.class))).thenReturn(response);
when(response.getSpellCheckResponse()).thenReturn(null);
when(response.getResults()).thenReturn(new SolrDocumentList());
this.mocker.getComponentUnderTest().termSuggest("HP:0001", 0, null, null);
verify(this.server).query(argThat(new IsIdQuery()));
}
@Test
public void testHumanPhenotypeOntologySuggestTermsIsNotId() throws ComponentLookupException, SolrServerException
{
QueryResponse response = mock(QueryResponse.class);
when(this.server.query(any(SolrParams.class))).thenReturn(response);
when(response.getSpellCheckResponse()).thenReturn(null);
when(response.getResults()).thenReturn(new SolrDocumentList());
this.mocker.getComponentUnderTest().termSuggest("HP:Test", 0, null, null);
verify(this.server).query(argThat(new IsDisMaxQuery()));
}
@Test
public void testHumanPhenotypeOntologySuggestTermsMultipleWords() throws ComponentLookupException,
SolrServerException
{
QueryResponse response = mock(QueryResponse.class);
when(this.server.query(any(SolrParams.class))).thenReturn(response);
when(response.getSpellCheckResponse()).thenReturn(null);
when(response.getResults()).thenReturn(new SolrDocumentList());
this.mocker.getComponentUnderTest().termSuggest("first second", 0, null, null);
verify(this.server).query(argThat(new IsDisMaxQuery()));
}
class IsDisMaxQuery extends ArgumentMatcher<SolrParams>
{
@Override
public boolean matches(Object argument)
{
SolrParams params = (SolrParams) argument;
return params.get(DisMaxParams.PF) != null
&& params.get(DisMaxParams.QF) != null
&& params.get(CommonParams.Q) != null;
}
}
class LastWord extends ArgumentMatcher<SolrParams>
{
@Override
public boolean matches(Object argument)
{
SolrParams params = (SolrParams) argument;
return params.get(CommonParams.Q).endsWith("second*");
}
}
class IsIdQuery extends ArgumentMatcher<SolrParams>
{
@Override
public boolean matches(Object argument)
{
SolrParams params = (SolrParams) argument;
return params.get(CommonParams.FQ).startsWith("id")
&& params.get(DisMaxParams.PF) == null
&& params.get(DisMaxParams.QF) == null;
}
}
} |
package it.unibz.inf.ontop.model.term.functionsymbol.db.impl;
import com.google.common.collect.ImmutableList;
import it.unibz.inf.ontop.iq.node.VariableNullability;
import it.unibz.inf.ontop.model.term.DBConstant;
import it.unibz.inf.ontop.model.term.ImmutableTerm;
import it.unibz.inf.ontop.model.term.TermFactory;
import it.unibz.inf.ontop.model.type.DBTermType;
import it.unibz.inf.ontop.model.type.TermTypeInference;
public class DefaultDBNonStrictStringEqOperator extends AbstractDBNonStrictEqOperator {
/**
* TODO: type the input
*/
protected DefaultDBNonStrictStringEqOperator(DBTermType rootDBTermType, DBTermType dbBoolean) {
super("STR_NON_STRICT_EQ", rootDBTermType, dbBoolean);
}
@Override
public boolean canBePostProcessed(ImmutableList<? extends ImmutableTerm> arguments) {
return true;
}
@Override
protected ImmutableTerm buildTermAfterEvaluation(ImmutableList<ImmutableTerm> newTerms, TermFactory termFactory,
VariableNullability variableNullability) {
if (newTerms.stream().allMatch(t -> t instanceof DBConstant)) {
return termFactory.getDBBooleanConstant(
((DBConstant) newTerms.get(0)).getValue().equals(
((DBConstant) newTerms.get(1)).getValue()));
}
// Same term type --> reduce it to a strict equality
else if (newTerms.get(0).inferType()
.flatMap(TermTypeInference::getTermType)
.filter(t -> newTerms.get(1).inferType()
.flatMap(TermTypeInference::getTermType)
.filter(t::equals)
.isPresent())
.isPresent())
return termFactory.getStrictEquality(newTerms);
else
return termFactory.getImmutableExpression(this, newTerms);
}
} |
package JSyntaxHighlighter;
import java.awt.Color;
import java.awt.Font;
import java.awt.FontFormatException;
import java.awt.FontMetrics;
import java.awt.GraphicsEnvironment;
import java.awt.Insets;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.io.File;
import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.swing.JTextPane;
import javax.swing.text.AttributeSet;
import javax.swing.text.DefaultEditorKit;
import javax.swing.text.PlainDocument;
import javax.swing.text.SimpleAttributeSet;
import javax.swing.text.StyleConstants;
import javax.swing.text.StyleContext;
import javax.swing.text.StyledDocument;
import javax.swing.text.TabSet;
import javax.swing.text.TabStop;
public class JSyntaxHighlighterObject extends JTextPane implements Runnable{
private StyledDocument styledDoc;
private static final long serialVersionUID = -2995700771349481427L;
private SyntaxStyle SYNTAX_STYLE = new SyntaxStyle();
private static String SYNTAX_KEYWORDS_RULE;
private static final String SYNTAX_STRING_RULE = "\\\"(\\.|[^\\\"])*\\\"";
private static final String SYNTAX_STRING_WITH_ESCAPE_RULE = "(\"[^\"\\\\]*(?:\\\\.[^\"\\\\]*)*\")";
private static final String SYNTAX_NUMERIC_RULE = "\\b\\d+[\\.]?\\d*([eE]\\-?\\d+)?[lLdDfF]?\\b|\\b0x[a-fA-F\\d]+\\b";
private static final String SYNTAX_CLASS_RULE = "\\b[A-Z][a-z]*([A-Z][a-z]*)*\\b";
/**
* Create a new JHighlighter Object with a predefined language and theme.
* @param language
* @param theme
*/
public JSyntaxHighlighterObject(Language language, Theme theme){
this.setMargin(new Insets(2,20,0,0));
this.setTheme(theme);
this.run();
this.setCaretColor(Color.WHITE);
this.setTabSpacing();
styledDoc = this.getStyledDocument();
this.getStyledDocument().putProperty(DefaultEditorKit.EndOfLineStringProperty, "\n");
switch (language){
case Java:
Syntax syntax = new Syntax();
syntax.addKeywords(javaSyntax());
syntax.buildSyntaxHighlighter();
SYNTAX_KEYWORDS_RULE = syntax.getKeywordsRegexRule();
break;
case C:
break;
case CPP:
break;
case CSS:
break;
case CSharp:
break;
case HTML:
break;
case JavaScript:
break;
case ObjectiveC:
break;
case PHP:
break;
case SASS:
break;
case SCSS:
break;
case XML:
break;
case YAML:
break;
default:
break;
}
// Add Meslo font
Font Meslo = null;
try{
Meslo = Font.createFont(Font.TRUETYPE_FONT, new File("src/JHighlighter/Fonts/Menlo-Regular.ttf")).deriveFont(Font.BOLD, 12f);
GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment();
ge.registerFont(Font.createFont(Font.TRUETYPE_FONT, new File("src/JHighlighter/Fonts/Menlo-Regular.ttf")));
}catch(IOException e){
e.printStackTrace();
}catch (FontFormatException e) {
e.printStackTrace();
}
this.setFont(Meslo);
}
@Override
public void run(){
// Add action listener
this.addKeyListener(highlight());
}
private void setTheme(Theme theme){
switch(theme){
case Monokai:
this.setAsMonokai();
break;
case Tomorrow:
break;
case TomorrowNight:
break;
default:
break;
}
}
// SET MONOKAI THEME
private void setAsMonokai(){
this.setBackground(Color.decode("#272822"));
this.setForeground(Color.decode("#F8F8F2"));
this.setBackground(this.getBackground());
this.setForeground(this.getForeground());
// Syntax Colors
SYNTAX_STYLE.KEYWORDS = Color.decode("#F92672");
SYNTAX_STYLE.STRINGS = Color.decode("#E6DB74");
SYNTAX_STYLE.NUMERICAL = Color.decode("#AE81FF");
SYNTAX_STYLE.CLASS = Color.decode("#A6E22E");
}
private void setTabSpacing(){
FontMetrics fm = this.getFontMetrics( this.getFont() );
int charWidth = fm.charWidth( 'w' );
int tabWidth = charWidth * 4;
TabStop[] tabs = new TabStop[10];
for (int j = 0; j < tabs.length; j++)
{
int tab = j + 1;
tabs[j] = new TabStop( tab * tabWidth );
}
TabSet tabSet = new TabSet(tabs);
SimpleAttributeSet attributes = new SimpleAttributeSet();
StyleConstants.setTabSet(attributes, tabSet);
int length = this.getDocument().getLength();
this.getStyledDocument().setParagraphAttributes(0, length, attributes, true);
}
private String[] javaSyntax(){
return new String[]{ "abstract", "assert", "boolean", "break", "byte", "case", "catch" ,
"char", "class", "continue", "default", "do", "double", "else", "enum", "extends",
"final", "finally", "float", "for", "goto", "if", "implements", "import", "instanceof",
"int", "long", "native", "new", "package", "private", "protected", "public", "return",
"short", "static", "strictfp", "super", "switch", "synchronized", "this", "throw", "throws",
"transient", "try", "void", "volatile", "while", "false", "null", "true"};
}
// Syntax highlighting methods
private void updateSyntaxColor(int offset, int length, Color c, int plain){
StyleContext sc = StyleContext.getDefaultStyleContext();
AttributeSet aset = sc.addAttribute(SimpleAttributeSet.EMPTY, StyleConstants.Foreground, c);
AttributeSet asetF = null;
switch(plain){
case Font.BOLD:
asetF = sc.addAttribute(aset, StyleConstants.Bold, true);
break;
case Font.ITALIC:
asetF = sc.addAttribute(aset, StyleConstants.Italic, true);
break;
}
if (asetF != null)
styledDoc.setCharacterAttributes(offset, length, asetF, true);
else
styledDoc.setCharacterAttributes(offset, length, aset, true);
}
private void clearSyntaxColors(){
updateSyntaxColor(0, this.getText().length(), this.getForeground(), Font.PLAIN);
}
// KEY LISTENER
private KeyListener highlight(){
return new KeyListener(){
@Override
public void keyTyped(KeyEvent e) { /* NOTHING DO DO */ }
@Override
public void keyPressed(KeyEvent e) { /* NOTHING TO DO */ }
@Override
public void keyReleased(KeyEvent e) {
clearSyntaxColors();
// Class
Pattern patternClass = Pattern.compile(SYNTAX_CLASS_RULE);
Matcher matchClass = patternClass.matcher(((JTextPane) e.getSource()).getText());
while (matchClass.find()){
updateSyntaxColor(matchClass.start(), matchClass.end() - matchClass.start(), SYNTAX_STYLE.CLASS, Font.ITALIC);
}
// Keywords
Pattern patternKey = Pattern.compile(SYNTAX_KEYWORDS_RULE);
Matcher matchKey = patternKey.matcher(((JTextPane) e.getSource()).getText());
while (matchKey.find()){
updateSyntaxColor(matchKey.start(), matchKey.end() - matchKey.start(), SYNTAX_STYLE.KEYWORDS, Font.PLAIN);
}
// Generic String
Pattern patternStr = Pattern.compile(SYNTAX_STRING_RULE);
Matcher matchStr = patternStr.matcher(((JTextPane) e.getSource()).getText());
while (matchStr.find()){
updateSyntaxColor(matchStr.start(), matchStr.end() - matchStr.start(), SYNTAX_STYLE.STRINGS,Font.PLAIN);
}
// String with escape character
Pattern patternStrE = Pattern.compile(SYNTAX_STRING_WITH_ESCAPE_RULE);
Matcher matchStrE = patternStrE.matcher(((JTextPane) e.getSource()).getText());
while (matchStrE.find()){
updateSyntaxColor(matchStrE.start(), matchStrE.end() - matchStrE.start(), SYNTAX_STYLE.STRINGS, Font.PLAIN);
}
// Numbers
Pattern patternNum = Pattern.compile(SYNTAX_NUMERIC_RULE);
Matcher matchNum = patternNum.matcher(((JTextPane) e.getSource()).getText());
while (matchNum.find()){
updateSyntaxColor(matchNum.start(), matchNum.end() - matchNum.start(), SYNTAX_STYLE.NUMERICAL, Font.PLAIN);
}
}
};
}
// END OF KEY LISTENER
// End of Syntax Highlighting methods
} |
/*
* generated by Xtext
*/
package com.rockwellcollins.atc.agree.validation;
import static com.rockwellcollins.atc.agree.validation.AgreeType.BOOL;
import static com.rockwellcollins.atc.agree.validation.AgreeType.ERROR;
import static com.rockwellcollins.atc.agree.validation.AgreeType.INT;
import static com.rockwellcollins.atc.agree.validation.AgreeType.REAL;
import java.awt.Component;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.xtext.EcoreUtil2;
import org.eclipse.xtext.validation.Check;
import org.eclipse.xtext.validation.CheckType;
import org.osate.aadl2.AadlBoolean;
import org.osate.aadl2.AadlInteger;
import org.osate.aadl2.AadlPackage;
import org.osate.aadl2.AadlReal;
import org.osate.aadl2.AadlString;
import org.osate.aadl2.AnnexLibrary;
import org.osate.aadl2.AnnexSubclause;
import org.osate.aadl2.Classifier;
import org.osate.aadl2.ClassifierType;
import org.osate.aadl2.ComponentClassifier;
import org.osate.aadl2.ComponentImplementation;
import org.osate.aadl2.ComponentType;
import org.osate.aadl2.Connection;
import org.osate.aadl2.DataAccess;
import org.osate.aadl2.DataImplementation;
import org.osate.aadl2.DataPort;
import org.osate.aadl2.DataSubcomponent;
import org.osate.aadl2.DataSubcomponentType;
import org.osate.aadl2.DataType;
import org.osate.aadl2.Element;
import org.osate.aadl2.EnumerationType;
import org.osate.aadl2.EventDataPort;
import org.osate.aadl2.Feature;
import org.osate.aadl2.FeatureGroup;
import org.osate.aadl2.FeatureGroupType;
import org.osate.aadl2.NamedElement;
import org.osate.aadl2.Property;
import org.osate.aadl2.PropertyType;
import org.osate.aadl2.Subcomponent;
import org.osate.aadl2.impl.DataImpl;
import org.osate.aadl2.impl.SubcomponentImpl;
import org.osate.annexsupport.AnnexUtil;
import com.rockwellcollins.atc.agree.agree.AgreeContract;
import com.rockwellcollins.atc.agree.agree.AgreePackage;
import com.rockwellcollins.atc.agree.agree.AgreeSubclause;
import com.rockwellcollins.atc.agree.agree.Arg;
import com.rockwellcollins.atc.agree.agree.AssertStatement;
import com.rockwellcollins.atc.agree.agree.AssumeStatement;
import com.rockwellcollins.atc.agree.agree.AsynchStatement;
import com.rockwellcollins.atc.agree.agree.BinaryExpr;
import com.rockwellcollins.atc.agree.agree.BoolLitExpr;
import com.rockwellcollins.atc.agree.agree.CalenStatement;
import com.rockwellcollins.atc.agree.agree.CallDef;
import com.rockwellcollins.atc.agree.agree.ConnectionStatement;
import com.rockwellcollins.atc.agree.agree.ConstStatement;
import com.rockwellcollins.atc.agree.agree.EqStatement;
import com.rockwellcollins.atc.agree.agree.EventExpr;
import com.rockwellcollins.atc.agree.agree.Expr;
import com.rockwellcollins.atc.agree.agree.FloorCast;
import com.rockwellcollins.atc.agree.agree.FnCallExpr;
import com.rockwellcollins.atc.agree.agree.FnDefExpr;
import com.rockwellcollins.atc.agree.agree.GetPropertyExpr;
import com.rockwellcollins.atc.agree.agree.GuaranteeStatement;
import com.rockwellcollins.atc.agree.agree.IfThenElseExpr;
import com.rockwellcollins.atc.agree.agree.InitialStatement;
import com.rockwellcollins.atc.agree.agree.IntLitExpr;
import com.rockwellcollins.atc.agree.agree.LatchedStatement;
import com.rockwellcollins.atc.agree.agree.LemmaStatement;
import com.rockwellcollins.atc.agree.agree.LiftStatement;
import com.rockwellcollins.atc.agree.agree.MNSynchStatement;
import com.rockwellcollins.atc.agree.agree.NestedDotID;
import com.rockwellcollins.atc.agree.agree.NodeDefExpr;
import com.rockwellcollins.atc.agree.agree.NodeEq;
import com.rockwellcollins.atc.agree.agree.NodeLemma;
import com.rockwellcollins.atc.agree.agree.NodeStmt;
import com.rockwellcollins.atc.agree.agree.OrderStatement;
import com.rockwellcollins.atc.agree.agree.PreExpr;
import com.rockwellcollins.atc.agree.agree.PrevExpr;
import com.rockwellcollins.atc.agree.agree.PrimType;
import com.rockwellcollins.atc.agree.agree.PropertyStatement;
import com.rockwellcollins.atc.agree.agree.RealCast;
import com.rockwellcollins.atc.agree.agree.RealLitExpr;
import com.rockwellcollins.atc.agree.agree.RecordDefExpr;
import com.rockwellcollins.atc.agree.agree.RecordExpr;
import com.rockwellcollins.atc.agree.agree.RecordType;
import com.rockwellcollins.atc.agree.agree.RecordUpdateExpr;
import com.rockwellcollins.atc.agree.agree.SpecStatement;
import com.rockwellcollins.atc.agree.agree.SynchStatement;
import com.rockwellcollins.atc.agree.agree.ThisExpr;
import com.rockwellcollins.atc.agree.agree.Type;
import com.rockwellcollins.atc.agree.agree.UnaryExpr;
public class AgreeJavaValidator extends AbstractAgreeJavaValidator {
private Set<CallDef> checkedRecCalls = new HashSet<>();
@Override
protected boolean isResponsible(Map<Object, Object> context, EObject eObject) {
return (eObject.eClass().getEPackage() == AgreePackage.eINSTANCE);
}
@Check(CheckType.FAST)
public void checkConnectionStatement(ConnectionStatement conn){
Classifier container = conn.getContainingClassifier();
if(container instanceof ComponentImplementation){
NamedElement aadlConn = conn.getConn();
if(aadlConn == null){
return;
}
if(!(aadlConn instanceof Connection)){
error(conn, "The connection label in the connection statement is not a connection");
return;
}
}else{
error(conn, "Connection statements are only allowed in component implementations.");
}
}
@Check(CheckType.FAST)
public void checkOrderStatement(OrderStatement order){
for(NamedElement el : order.getComps()){
if(!(el instanceof Subcomponent)){
error(el, "Only elements of subcomponent type are allowed in ordering statements");
}
}
Classifier container = order.getContainingClassifier();
if(container instanceof ComponentImplementation){
ComponentImplementation compImpl = (ComponentImplementation)container;
List<NamedElement> notPresent = new ArrayList<>();
for(Subcomponent subcomp : compImpl.getAllSubcomponents()){
boolean found = false;
for(NamedElement el : order.getComps()){
if(el.equals(subcomp)){
found = true;
break;
}
}
if(!found){
notPresent.add(subcomp);
}
}
if(notPresent.size() != 0){
String delim = "";
StringBuilder errorStr = new StringBuilder("The following subcomponents are not present in the ordering: ");
for(NamedElement subcomp : notPresent){
errorStr.append(delim);
errorStr.append(subcomp.getName());
delim = ", ";
}
error(order, errorStr.toString());
}
}else{
error(order, "Ordering statements can only appear in component implementations");
}
}
@Check(CheckType.FAST)
public void checkCalenStatement(CalenStatement calen){
for(NamedElement el : calen.getEls()){
if(!(el instanceof Subcomponent)){
error(calen, "Element '"+el.getName()+"' is not a subcomponent");
}
}
Classifier container = calen.getContainingClassifier();
if(!(container instanceof ComponentImplementation)){
error(calen, "Calendar statements can only appear in component implementations");
}
}
@Check(CheckType.FAST)
public void checkFloorCast(FloorCast floor){
AgreeType exprType = getAgreeType(floor.getExpr());
if(!matches(REAL, exprType)){
error(floor, "Argument of floor cast is of type '" + exprType.toString()
+ "' but must be of type 'real'");
}
}
@Check(CheckType.FAST)
public void checkRealCast(RealCast floor){
AgreeType exprType = getAgreeType(floor.getExpr());
if(!matches(INT, exprType)){
error(floor, "Argument of floor cast is of type '" + exprType.toString()
+ "' but must be of type 'int'");
}
}
@Check(CheckType.FAST)
public void checkEventExpr(EventExpr event){
NestedDotID nestId = event.getId();
NamedElement namedEl = getFinalNestId(nestId);
if(!(namedEl instanceof EventDataPort)){
error(event, "Argument of event expression must be an event data port");
}
}
@Check(CheckType.FAST)
public void checkSynchStatement(SynchStatement sync){
Classifier container = sync.getContainingClassifier();
if(!(container instanceof ComponentImplementation)){
error(sync, "Synchrony statements can only appear in component implementations");
}
if(sync instanceof CalenStatement
|| sync instanceof MNSynchStatement
|| sync instanceof AsynchStatement
|| sync instanceof LatchedStatement){
return;
}
//TODO: I'm pretty sure INT_LITs are always positive anyway.
//So this may be redundant
if(Integer.valueOf(sync.getVal()) < 0){
error(sync, "The value of synchrony statments must be positive");
}
String val2 = sync.getVal2();
if(val2 != null){
if(Integer.valueOf(val2) <= 0){
error(sync, "The second value of a synchrony statment must be greater than zero");
}
if(Integer.valueOf(sync.getVal()) <= Integer.valueOf(val2)){
error(sync, "The second value of a synchrony argument must be less than the first");
}
}
}
@Check(CheckType.FAST)
public void checkMNSynchStatement(MNSynchStatement sync){
if(sync.getMax().size() != sync.getMin().size()
&& sync.getMax().size() != sync.getComp1().size()
&& sync.getMax().size() != sync.getComp2().size()){
return; //this should throw a parser error
}
for (int i = 0; i < sync.getMax().size() ; i++){
String maxStr = sync.getMax().get(i);
String minStr = sync.getMin().get(i);
int max = Integer.valueOf(maxStr);
int min = Integer.valueOf(minStr);
if(max < 1 || min < 1){
error(sync, "Quasi-synchronous values must be greater than zero");
}
if(min > max){
error("Left hand side quasi-synchronous values must be greater than the right hand side");
}
}
}
@Check(CheckType.FAST)
public void checkAssume(AssumeStatement assume) {
Classifier comp = assume.getContainingClassifier();
if (!(comp instanceof ComponentType)) {
error(assume, "Assume statements are only allowed in component types");
}
AgreeType exprType = getAgreeType(assume.getExpr());
if (!matches(BOOL, exprType)) {
error(assume, "Expression for assume statement is of type '" + exprType.toString()
+ "' but must be of type 'bool'");
}
}
@Check(CheckType.FAST)
public void checkInitialStatement(InitialStatement statement){
Classifier comp = statement.getContainingClassifier();
if (!(comp instanceof ComponentType)) {
error(statement, "Initial statements are only allowed in component types");
}
AgreeType exprType = getAgreeType(statement.getExpr());
if (!matches(BOOL, exprType)) {
error(statement, "Expression for 'initially' statement is of type '" + exprType.toString()
+ "' but must be of type 'bool'");
}
}
@Check(CheckType.FAST)
public void checkLift(LiftStatement lift) {
NestedDotID dotId = lift.getSubcomp();
if (dotId.getSub() != null) {
error(lift, "Lift statements can only be applied to direct subcomponents."
+ "Place a lift statement in the subcomponents contract for heavy lifting");
}
NamedElement namedEl = dotId.getBase();
if (namedEl != null) {
if (!(namedEl instanceof SubcomponentImpl)) {
error(lift, "Lift statements must apply to subcomponent implementations. '"
+ namedEl.getName() + "' is not a subcomponent.");
}
// } else {
// SubcomponentImpl subImpl = (SubcomponentImpl) namedEl;
// if (subImpl.getComponentImplementation() == null) {
// error(lift, "Lift statements must apply to subcomponent implementations. '"
// + namedEl.getName()
// + "' is a subcomponent type, not a subcomponent implementation.");
}
}
@Check(CheckType.FAST)
public void checkAssert(AssertStatement asser) {
Classifier comp = asser.getContainingClassifier();
if (!(comp instanceof ComponentImplementation)) {
error(asser, "Assert statements are only allowed in component implementations.");
}
AgreeType exprType = getAgreeType(asser.getExpr());
if (!matches(BOOL, exprType)) {
error(asser, "Expression for assert statement is of type '" + exprType.toString()
+ "' but must be of type 'bool'");
}
}
@Check(CheckType.FAST)
public void checkNestedDotID(NestedDotID dotId){
NestedDotID sub = dotId.getSub();
if(sub != null){
if(sub.getBase() instanceof Property){
error(sub, "You cannot reference AADL properties this way."
+ " Use a \"Get_Property\" statement.");
}
}
}
@Check(CheckType.FAST)
public void checkGuarantee(GuaranteeStatement guar) {
Classifier comp = guar.getContainingClassifier();
if (!(comp instanceof ComponentType)) {
error(guar, "Guarantee statements are only allowed in component types");
}
AgreeType exprType = getAgreeType(guar.getExpr());
if (!matches(BOOL, exprType)) {
error(guar, "Expression for guarantee statement is of type '" + exprType.toString()
+ "' but must be of type 'bool'");
}
}
@Check(CheckType.FAST)
public void checkLemma(LemmaStatement lemma) {
Classifier comp = lemma.getContainingClassifier();
if (!(comp instanceof ComponentImplementation)) {
error(lemma, "Lemma statements are only allowed in component implementations and nodes");
}
AgreeType exprType = getAgreeType(lemma.getExpr());
if (!matches(BOOL, exprType)) {
error(lemma, "Expression for lemma statement is of type '" + exprType.toString()
+ "' but must be of type 'bool'");
}
}
@Check(CheckType.FAST)
public void checkUnaryExpr(UnaryExpr unaryExpr) {
AgreeType typeRight = getAgreeType(unaryExpr.getExpr());
String op = unaryExpr.getOp();
switch (op) {
case "-":
if (!matches(INT, typeRight) && !matches(REAL, typeRight)) {
error(unaryExpr, "right side of unary expression '" + op + "' is of type '"
+ typeRight + "' but must be of type 'int' or 'real'");
}
break;
case "not":
if (!matches(BOOL, typeRight)) {
error(unaryExpr, "right side of unary expression '" + op + "' is of type '"
+ typeRight + "' but must be of type 'bool'");
}
break;
default:
assert (false);
}
}
@Check(CheckType.FAST)
public void checkPropertyStatement(PropertyStatement propStat) {
AgreeType exprType = getAgreeType(propStat.getExpr());
if (!matches(BOOL, exprType)) {
error(propStat, "Property statement '" + propStat.getName() + "' is of type '"
+ exprType + "' but must be of type 'bool'");
}
}
@Check(CheckType.FAST)
public void checkRecordUpdateExpr(RecordUpdateExpr upExpr){
EList<NamedElement> args = upExpr.getArgs();
EList<Expr> argExprs = upExpr.getArgExpr();
//this should be enforced by the parser
assert(args.size() == argExprs.size());
for(int i = 0; i < args.size(); i++){
NamedElement arg = args.get(i);
Expr argExpr = argExprs.get(i);
AgreeType argType = getAgreeType(arg);
AgreeType argExprType = getAgreeType(argExpr);
if(!matches(argType,argExprType)){
error(argExpr, "the update field is of type '"+argType+
"', but the expression is of type '"+argExprType+"'");
}
}
}
@Check(CheckType.FAST)
public void checkRecordType(RecordType recType){
NestedDotID recId = recType.getRecord();
NamedElement finalId = getFinalNestId(recId);
if(!(finalId instanceof DataImplementation)
&& !(finalId instanceof RecordDefExpr)){
error(recType, "types must be record definition or data implementation");
}
if(finalId instanceof DataImplementation){
AgreeType agreeType = getAgreeType((DataImplementation)finalId);
if(agreeType.equals(AgreeType.ERROR)){
error(recType, "Data Implementations with no subcomponents must extend"+
" a Base_Type that AGREE can reason about.");
return;
}
if(((DataImplementation) finalId).getAllSubcomponents().size() != 0){
if(agreeType.equals(AgreeType.BOOL)
|| agreeType.equals(AgreeType.INT)
|| agreeType.equals(AgreeType.REAL)){
error(finalId, "Data implementations with subcomponents cannot be"
+" interpreted by AGREE if they extend Base_Types");
}
}
dataImplCycleCheck(recId);
}
}
@Check(CheckType.FAST)
public void checkRecordExpr(RecordExpr recExpr){
NestedDotID recType = recExpr.getRecord();
List<NamedElement> defArgs = getArgNames(recType);
EList<NamedElement> exprArgs = recExpr.getArgs();
EList<Expr> argExprs = recExpr.getArgExpr();
NestedDotID recId = recExpr.getRecord();
NamedElement finalId = getFinalNestId(recId);
if(!(finalId instanceof DataImplementation)
&& !(finalId instanceof RecordDefExpr)){
error(recId, "types must be record definition or data implementation");
}
if(finalId instanceof DataImplementation){
dataImplCycleCheck(recId);
}
if(exprArgs.size() != defArgs.size()){
error(recExpr, "Incorrect number of arguments");
return;
}
for(NamedElement argDefName : defArgs){
boolean foundArg = false;
for(NamedElement argExprEl : exprArgs){
if(argExprEl.getName().equals(argDefName.getName())){
foundArg = true;
break;
}
}
if(!foundArg){
error(recExpr, "No assignment to defined variable '"+argDefName+
"' in record expression.");
}
}
//check typing
for(int i = 0; i < defArgs.size(); i++){
NamedElement defArg = defArgs.get(i);
AgreeType defType = getAgreeType(defArg);
AgreeType exprType = getAgreeType(argExprs.get(i));
if(!matches(defType, exprType)){
error(recExpr, "The expression assigned to '"+defArg.getName()+
"' does not match its definition type of '"+defType);
}
}
}
private List<NamedElement> getArgNames(NestedDotID recId){
NamedElement rec = getFinalNestId(recId);
List<NamedElement> names = new ArrayList<NamedElement>();
if(rec instanceof RecordDefExpr){
RecordDefExpr recDef = (RecordDefExpr)rec;
for(Arg arg : recDef.getArgs()){
names.add(arg);
}
}else if(rec instanceof DataImplementation){
DataImplementation dataImpl = (DataImplementation)rec;
for(Subcomponent sub : dataImpl.getAllSubcomponents()){
names.add(sub);
}
}else{
error(recId, "Record type '"+rec.getName()+
"' must be a feature group or a record type definition");
}
return names;
}
// private List<AgreeType> getArgTypes(NestedDotID recId){
// NamedElement rec = getFinalNestId(recId);
// List<AgreeType> types = new ArrayList<AgreeType>();
// if(rec instanceof RecordDefExpr){
// RecordDefExpr recDef = (RecordDefExpr)rec;
// for(Arg arg : recDef.getArgs()){
// types.add(getAgreeType(arg.getType()));
// }else if(rec instanceof FeatureGroupType){
// FeatureGroupType featGroup = (FeatureGroupType)rec;
// for(Feature feat : featGroup.getAllFeatures()){
// types.add(getAgreeType(feat));
// return types;
private void dataImplCycleCheck(NestedDotID dataID){
NamedElement finalId = getFinalNestId(dataID);
DataImplementation dataImpl = (DataImplementation)finalId;
Set<DataImplementation> dataClosure = new HashSet<>();
Set<DataImplementation> prevClosure = null;
for(Subcomponent sub : dataImpl.getAllSubcomponents()){
ComponentImplementation subImpl = sub.getComponentImplementation();
if(subImpl != null){
dataClosure.add((DataImplementation)subImpl);
}
}
do{
prevClosure = new HashSet<>(dataClosure);
for(DataImplementation subImpl : prevClosure){
if(subImpl == dataImpl){
error(dataID, "The component implementation '"+dataImpl.getName()+
"' has a cyclic definition. This cannot be reasoned about by AGREE.");
break;
}
for(Subcomponent subSub : subImpl.getAllSubcomponents()){
ComponentImplementation subSubImpl = subSub.getComponentImplementation();
if(subSubImpl != null){
dataClosure.add((DataImplementation)subSubImpl);
}
}
}
}while(!prevClosure.equals(dataClosure));
}
@Check(CheckType.FAST)
public void checkRecordDefExpr(RecordDefExpr recordDef){
Set<RecordDefExpr> recordClosure = new HashSet<RecordDefExpr>();
Set<RecordDefExpr> prevClosure = null;
for(Arg arg : recordDef.getArgs()){
Type type = arg.getType();
if(type instanceof RecordType){
NestedDotID subRec = ((RecordType) type).getRecord();
NamedElement finalId = getFinalNestId(subRec);
if(!(finalId instanceof DataImplementation)
&& !(finalId instanceof RecordDefExpr)){
error(type, "types must be record definition or data implementation");
return;
}
if(finalId instanceof RecordDefExpr){
recordClosure.add((RecordDefExpr)finalId);
}else{
dataImplCycleCheck(subRec);
}
}
}
do{
prevClosure = new HashSet<>(recordClosure);
for(RecordDefExpr subRecDef : prevClosure){
if(subRecDef == recordDef){
error(recordDef, "The definition of type '"+recordDef.getName()+
"' is involved in a cyclic definition");
break;
}
for(Arg arg : subRecDef.getArgs()){
Type type = arg.getType();
if(type instanceof RecordType){
NestedDotID subRecId = ((RecordType) type).getRecord();
NamedElement subFinalEl = getFinalNestId(subRecId);
if(subFinalEl instanceof RecordDefExpr){
recordClosure.add((RecordDefExpr)subFinalEl);
}
}
}
}
}while(!prevClosure.equals(recordClosure));
}
@Check(CheckType.FAST)
public void checkConstStatement(ConstStatement constStat) {
Type type = constStat.getType();
AgreeType expected = getAgreeType(type);
AgreeType actual = getAgreeType(constStat.getExpr());
if (!matches(expected, actual)) {
error(constStat, "The assumed type of constant statement '" + constStat.getName()
+ "' is '" + expected + "' but the actual type is '" + actual + "'");
}
// check for constant cycles
Set<ConstStatement> constClosure = new HashSet<ConstStatement>();
Set<ConstStatement> prevClosure;
constClosure.add(constStat);
// quick and dirty cycle check
do {
prevClosure = new HashSet<ConstStatement>(constClosure);
for (ConstStatement constFrontElem : prevClosure) {
List<NestedDotID> nestIds = EcoreUtil2.getAllContentsOfType(constFrontElem,
NestedDotID.class);
for (NestedDotID nestId : nestIds) {
while(nestId != null){
NamedElement base = nestId.getBase();
if (base instanceof ConstStatement) {
ConstStatement closConst = (ConstStatement) base;
if (closConst.equals(constStat)) {
error(constStat,
"The expression for constant statment '" + constStat.getName()
+ "' is part of a cyclic definition");
break;
}
constClosure.add(closConst);
}
nestId = nestId.getSub();
}
}
}
} while (!prevClosure.equals(constClosure));
for (Expr e : EcoreUtil2.getAllContentsOfType(constStat.getExpr(), Expr.class)) {
if (!isPossibleConstant(e)) {
error(e, "Non-constant expression in constant declaration");
return;
}
}
}
private AgreeType getAgreeType(Type type) {
String typeName = null;
if(type instanceof PrimType){
typeName = ((PrimType)type).getString();
return new AgreeType(typeName);
}else{
RecordType recType = (RecordType)type;
NestedDotID recId = recType.getRecord();
return getNestIdAsType(recId);
}
}
private AgreeType getNestIdAsType(NestedDotID recId){
String typeName = "";
NamedElement recEl = getFinalNestId(recId);
EObject aadlPack = recEl.eContainer();
while(!(aadlPack instanceof AadlPackage)){
aadlPack = aadlPack.eContainer();
}
String packName = ((AadlPackage)aadlPack).getName();
if(recEl instanceof RecordDefExpr){
EObject component = recEl.eContainer();
while(!(component instanceof ComponentClassifier)
&& !(component instanceof AadlPackage)){
component = component.eContainer();
}
if(component == aadlPack){
typeName = recEl.getName();
}else{
typeName = ((ComponentClassifier)component).getName() + "." + recEl.getName();
}
}else if(recEl instanceof DataImplementation){
AgreeType nativeType = getNativeType((DataImplementation)recEl);
if(nativeType != null){
return nativeType;
}
typeName = recEl.getName();
}
typeName = packName+"::"+typeName;
return new AgreeType(typeName);
}
public boolean isPossibleConstant(Expr e) {
if (e instanceof PrevExpr || e instanceof PreExpr) {
return false;
}
if (e instanceof BinaryExpr) {
if (((BinaryExpr) e).getOp().equals("->")) {
return false;
}
}
if (e instanceof NestedDotID) {
if (EcoreUtil2.getContainerOfType(e, GetPropertyExpr.class) != null) {
return true;
}
NamedElement base = getFinalNestId((NestedDotID) e);
if(base instanceof DataImplementation ||
base instanceof ConstStatement ||
base instanceof RecordExpr ||
base instanceof DataSubcomponent){
return true;
}
return false;
}
return true;
}
@Check(CheckType.FAST)
public void checkNamedElement(NamedElement namedEl) {
// check for namespace collision in component types of component
// implementations
// and for collisions between subcomponent and feature names
EObject container = namedEl.eContainer();
if(container instanceof RecordDefExpr
|| container instanceof NodeDefExpr){
//don't care about arguments to recDefs and nodeDefs
//TODO: perhaps we can ignore all arguments?
return;
}
while (!(container instanceof AadlPackage || container instanceof ComponentImplementation || container instanceof ComponentType)) {
container = container.eContainer();
}
ComponentImplementation compImpl = null;
ComponentType type = null;
if (container instanceof ComponentImplementation) {
compImpl = (ComponentImplementation) container;
type = compImpl.getType();
checkDupNames(namedEl, type, compImpl);
} else if (container instanceof ComponentType) {
type = (ComponentType) container;
}
if (type != null) {
for (Feature feat : type.getAllFeatures()) {
if (namedEl.getName().equals(feat.getName())) {
error(feat, "Element of the same name ('" + namedEl.getName()
+ "') in AGREE Annex in '"
+ (compImpl == null ? type.getName() : compImpl.getName()) + "'");
error(namedEl, "Feature of the same name ('" + namedEl.getName()
+ "') in component type");
}
}
}
}
private void checkDupNames(NamedElement namedEl, ComponentType type,
ComponentImplementation compImpl) {
NamedElement match = matchedInAgreeAnnex(type, namedEl.getName());
if (match != null) {
error(match, "Element of the same name ('" + namedEl.getName()
+ "') in component implementation '" + compImpl.getName() + "'");
error(namedEl, "Element of the same name ('" + namedEl.getName()
+ "') in component type");
}
for (Subcomponent sub : compImpl.getAllSubcomponents()) {
if (namedEl.getName().equals(sub.getName())) {
error(sub, "Element of the same name ('" + namedEl.getName()
+ "') in AGREE Annex in '" + compImpl.getName() + "'");
error(namedEl, "Subcomponent of the same name ('" + namedEl.getName()
+ "') in component implementation");
}
}
}
private NamedElement matchedInAgreeAnnex(ComponentClassifier compClass, String name) {
for (AnnexSubclause subClause : AnnexUtil.getAllAnnexSubclauses(compClass, AgreePackage.eINSTANCE.getAgreeSubclause())) {
if (subClause instanceof AgreeSubclause) {
AgreeContract contr = (AgreeContract) subClause.getChildren().get(0);
for (EObject obj : contr.getChildren()) {
if (obj instanceof NamedElement) {
if (name.equals(((NamedElement) obj).getName())) {
return (NamedElement) obj;
}
}
}
}
}
return null;
}
private void checkMultiAssignEq(EObject src, List<Arg> lhsArgs, Expr rhsExpr) {
if (rhsExpr == null) {
return;
}
List<AgreeType> agreeLhsTypes = typesFromArgs(lhsArgs);
List<AgreeType> agreeRhsTypes = new ArrayList<>();
if (rhsExpr instanceof FnCallExpr) {
NamedElement namedEl = getFinalNestId(((FnCallExpr) rhsExpr).getFn());
if (namedEl instanceof NodeDefExpr) {
NodeDefExpr nodeDef = (NodeDefExpr) namedEl;
for (Arg var : nodeDef.getRets()) {
agreeRhsTypes.add(getAgreeType(var.getType()));
}
} else if (namedEl instanceof FnDefExpr) {
FnDefExpr fnDef = (FnDefExpr) namedEl;
agreeRhsTypes.add(getAgreeType(fnDef.getType()));
} else {
return; // parse error
}
} else {
agreeRhsTypes.add(getAgreeType(rhsExpr));
}
if (agreeLhsTypes.size() != agreeRhsTypes.size()) {
error(src, "Equation assigns " + agreeLhsTypes.size()
+ " variables, but right side returns " + agreeRhsTypes.size() + " values");
return;
}
for (int i = 0; i < agreeLhsTypes.size(); i++) {
AgreeType lhsType = agreeLhsTypes.get(i);
AgreeType rhsType = agreeRhsTypes.get(i);
if (!matches(rhsType, lhsType)) {
error(src, "The variable '" + lhsArgs.get(i).getName()
+ "' on the left side of equation is of type '" + lhsType
+ "' but must be of type '" + rhsType + "'");
}
}
// // check for constant cycles
// Set<EObject> eqClosure = new HashSet<EObject>();
// Set<EObject> prevClosure;
// eqClosure.add(src);
// // quick and dirty cycle check
// prevClosure = new HashSet<EObject>(eqClosure);
// for (EObject constFrontElem : prevClosure) {
// List<NestedDotID> nestIds = EcoreUtil2.getAllContentsOfType(constFrontElem,
// NestedDotID.class);
// for (NestedDotID nestId : nestIds) {
// while(nestId != null){
// NamedElement base = nestId.getBase();
// if (base instanceof Arg) {
// EObject container = base;
// while(!(container instanceof EqStatement) &&
// !(container instanceof NodeEq)){
// container = container.eContainer();
// if (lhsArgs.contains(base)) {
// warning(src,
// "The expression for eq statment '" + base.getName()
// + "' may be part of a cyclic definition");
// break;
// eqClosure.add(container);
// nestId = nestId.getSub();
// } while (!prevClosure.equals(eqClosure));
}
@Check(CheckType.FAST)
public void checkEqStatement(EqStatement eqStat) {
AnnexLibrary library = EcoreUtil2.getContainerOfType(eqStat, AnnexLibrary.class);
if (library != null) {
error(eqStat, "Equation statments are only allowed in component annexes");
}
checkMultiAssignEq(eqStat, eqStat.getLhs(), eqStat.getExpr());
}
@Check(CheckType.FAST)
public void checkNameOverlap(AgreeContract contract) {
Set<SynchStatement> syncs = new HashSet<>();
Set<InitialStatement> inits = new HashSet<>();
List<ConnectionStatement> conns = new ArrayList<>();
//check that there are zero or more synchrony statements
for(SpecStatement spec : contract.getSpecs()){
if(spec instanceof SynchStatement){
syncs.add((SynchStatement)spec);
}
else if(spec instanceof CalenStatement){
syncs.add((CalenStatement)spec);
}
else if(spec instanceof InitialStatement){
inits.add((InitialStatement) spec);
}else if(spec instanceof ConnectionStatement){
conns.add((ConnectionStatement) spec);
}
}
if(syncs.size() > 1){
for(SynchStatement sync : syncs){
error(sync, "Multiple synchrony or calender statements in a single contract");
}
}
if(inits.size() > 1){
for(InitialStatement init : inits){
error(init, "Multiple initially statements in a single contract");
}
}
for(int i = 0; i < conns.size(); i++){
ConnectionStatement connStat0 = conns.get(i);
NamedElement conn0 = connStat0.getConn();
for(int j = i+1; j < conns.size(); j++){
ConnectionStatement connStat1 = conns.get(j);
NamedElement conn1 = connStat1.getConn();
if(conn0 == null || conn1 == null){
break;
}
if(conn0.equals(conn1)){
error(connStat0, "Multiple connection overrides for connection: '"+conn0.getName()+"'");
error(connStat1, "Multiple connection overrides for connection: '"+conn1.getName()+"'");
}
}
}
ComponentImplementation ci = EcoreUtil2.getContainerOfType(contract,
ComponentImplementation.class);
if (ci == null) {
return;
}
Set<String> parentNames = getParentNames(ci);
for (AgreeSubclause subclause : EcoreUtil2.getAllContentsOfType(ci, AgreeSubclause.class)) {
List<NamedElement> es = EcoreUtil2.getAllContentsOfType(subclause, NamedElement.class);
for (NamedElement e : es) {
if(!(e.eContainer() instanceof NodeDefExpr)){ //ignore elements in node defs
if (parentNames.contains(e.getName())) {
error(e, e.getName() + " already defined in component type contract");
}
}
}
}
}
private Set<String> getParentNames(ComponentImplementation ci) {
Set<String> result = new HashSet<>();
ComponentType ct = ci.getType();
for (AgreeSubclause subclause : EcoreUtil2.getAllContentsOfType(ct, AgreeSubclause.class)) {
List<NamedElement> es = EcoreUtil2.getAllContentsOfType(subclause, NamedElement.class);
for (NamedElement e : es) {
if(!(e.eContainer() instanceof NodeDefExpr)){
result.add(e.getName());
}
}
}
return result;
}
@Check(CheckType.FAST)
public void checkNodeEq(NodeEq nodeEq) {
checkMultiAssignEq(nodeEq, nodeEq.getLhs(), nodeEq.getExpr());
}
@Check(CheckType.FAST)
public void checkNodeLemma(NodeLemma nodeLemma) {
AgreeType exprType = getAgreeType(nodeLemma.getExpr());
if (!matches(BOOL, exprType)) {
error(nodeLemma, "Expression for lemma statement is of type '" + exprType
+ "' but must be of type 'bool'");
}
}
@Check(CheckType.FAST)
public void checkNodeStmt(NodeStmt nodeStmt) {
List<NestedDotID> dotIds = EcoreUtil2.getAllContentsOfType(nodeStmt, NestedDotID.class);
for (NestedDotID dotId : dotIds) {
NamedElement id = getFinalNestId(dotId);
if (!(id instanceof Arg) && !(id instanceof ConstStatement)
&& !(id instanceof NodeDefExpr) && !(id instanceof FnDefExpr)
&& !(id instanceof DataSubcomponent)
&& !(id instanceof RecordType)
&& !(id instanceof DataImplementation)
&& !(id instanceof RecordDefExpr)) {
error(dotId, "Only arguments, constants, and node calls allowed within a node");
}
}
}
@Check(CheckType.FAST)
public void checkNodeDef(NodeDefExpr nodeDefExpr) {
if (nodeDefExpr.getNodeBody() == null) {
return; // this will throw a parse error anyway
}
Map<Arg, Integer> assignMap = new HashMap<>();
for (Arg arg : nodeDefExpr.getRets()) {
assignMap.put(arg, 0);
}
for (Arg arg : nodeDefExpr.getNodeBody().getLocs()) {
assignMap.put(arg, 0);
}
for (NodeStmt stmt : nodeDefExpr.getNodeBody().getStmts()) {
if (stmt instanceof NodeEq) {
NodeEq eq = (NodeEq) stmt;
for (Arg arg : eq.getLhs()) {
Integer value = assignMap.get(arg);
if (value == null) {
error("Equation attempting to assign '" + arg.getName()
+ "', which is not an assignable value within the node");
return;
} else {
assignMap.put(arg, value + 1);
}
}
}
}
for (Map.Entry<Arg, Integer> elem : assignMap.entrySet()) {
if (elem.getValue() == 0) {
error("Variable '" + elem.getKey().getName()
+ "' is never assigned by an equation in node '" + nodeDefExpr.getName()
+ "'");
return;
} else if (elem.getValue() > 1) {
error("Variable '" + elem.getKey().getName()
+ "' is assigned multiple times in node '" + nodeDefExpr.getName() + "'");
}
}
}
@Check(CheckType.FAST)
public void checkThisExpr(ThisExpr thisExpr) {
// these should only appear in Get_Property expressions
if (!(thisExpr.eContainer() instanceof GetPropertyExpr)) {
error(thisExpr, "'this' expressions can only be used in 'Get_Property' expressions.");
}
}
@Check(CheckType.FAST)
public void checkGetPropertyExpr(GetPropertyExpr getPropExpr) {
AgreeType compType = getAgreeType(getPropExpr.getComponent());
// AgreeType propType = getAgreeType(propExpr.getName());
NamedElement prop = getPropExpr.getProp();
if (!compType.equals(new AgreeType("component"))) {
error(getPropExpr.getComponent(), "Expected type component, but found type " + compType);
}
if (!(prop instanceof Property)) {
error(getPropExpr.getProp(), "Expected AADL property");
}
}
@Check(CheckType.FAST)
public void checkPrevExpr(PrevExpr prevExpr) {
AgreeType delayType = getAgreeType(prevExpr.getDelay());
AgreeType initType = getAgreeType(prevExpr.getInit());
if (!matches(initType, delayType)) {
error(prevExpr,
"The first and second arguments of the 'prev' function are of non-matching types '"
+ delayType + "' and '" + initType + "'");
}
}
public void checkInputsVsActuals(FnCallExpr fnCall) {
NestedDotID dotId = fnCall.getFn();
//if the id has a 'tag' then it is using a resrved variable
String tag = getNestedDotIDTag(dotId);
if(tag != null){
error(fnCall, "Use of reserved variable tag: '"+tag+" does not make sense"+
" in the context of a node call");
}
NamedElement namedEl = getFinalNestId(dotId);
if (!(namedEl instanceof CallDef)) {
// this error will be caught elsewhere
return;
}
CallDef callDef = (CallDef) namedEl;
List<AgreeType> inDefTypes;
String callName;
// extract in/out arguments
if (callDef instanceof FnDefExpr) {
FnDefExpr fnDef = (FnDefExpr) callDef;
inDefTypes = typesFromArgs(fnDef.getArgs());
callName = fnDef.getName();
} else if (callDef instanceof NodeDefExpr) {
NodeDefExpr nodeDef = (NodeDefExpr) callDef;
inDefTypes = typesFromArgs(nodeDef.getArgs());
callName = nodeDef.getName();
} else {
error(fnCall, "Node or Function definition name expected.");
return;
}
// extract args
List<AgreeType> argCallTypes = getAgreeTypes(fnCall.getArgs());
if (inDefTypes.size() != argCallTypes.size()) {
error(fnCall, "Function definition '" + callName + "' requires " + inDefTypes.size()
+ " arguments, but this function call provides " + argCallTypes.size()
+ " arguments");
return;
}
for (int i = 0; i < inDefTypes.size(); i++) {
AgreeType callType = argCallTypes.get(i);
AgreeType defType = inDefTypes.get(i);
if (!matches(defType, callType)) {
error(fnCall, "Argument " + i + " of function call '" + callName + "' is of type '"
+ callType + "' but must be of type '" + defType + "'");
}
}
}
@Check(CheckType.FAST)
public void checkFnCallExpr(FnCallExpr fnCall) {
checkInputsVsActuals(fnCall);
}
@Check(CheckType.FAST)
public void checkFnDefExpr(FnDefExpr fnDef) {
// verify typing
AgreeType fnType = getAgreeType(fnDef.getType());
if (fnType == null) {
return; // this error will be caught in parsing
}
AgreeType exprType = getAgreeType(fnDef.getExpr());
if (!exprType.equals(fnType)) {
error(fnDef, "Function '" + fnDef.getName() + "' is of type '" + fnType.toString()
+ "' but its expression is of type '" + exprType + "'");
}
}
@Check(CheckType.FAST)
public void checkCallDef(CallDef callDef) {
// don't check recursive calls of functions that have
// already been walked over
if (checkedRecCalls.contains(callDef)) {
return;
}
FnCallRecursionHelper recHelp = new FnCallRecursionHelper();
recHelp.doSwitch(callDef);
for (LinkedList<CallDef> loop : recHelp.loops) {
StringBuilder loopStr = new StringBuilder();
String sep = "";
for (CallDef tempCallDef : loop) {
checkedRecCalls.add(tempCallDef);
String callName = tempCallDef.getName();
loopStr.append(sep);
loopStr.append(callName);
sep = " -> ";
}
error(callDef, "There exists a recursive dependency between the "
+ "following node or function calls : " + loopStr);
}
}
@Check(CheckType.FAST)
public void checkIfThenElseExpr(IfThenElseExpr expr) {
AgreeType condType = getAgreeType(expr.getA());
AgreeType thenType = getAgreeType(expr.getB());
AgreeType elseType = getAgreeType(expr.getC());
if (!matches(BOOL, condType)) {
error(expr, "The condition of the if statement is of type '" + condType
+ "' but must be of type 'bool'");
}
if (!matches(elseType, thenType)) {
error(expr, "The 'then' and 'else' expressions are of non-matching types '" + thenType
+ "' and '" + elseType + "'");
}
}
private AgreeType getAgreeType(IfThenElseExpr expr) {
return getAgreeType(expr.getB());
}
@Check(CheckType.FAST)
public void checkBinaryExpr(BinaryExpr binExpr) {
AgreeType typeLeft = getAgreeType(binExpr.getLeft());
AgreeType typeRight = getAgreeType(binExpr.getRight());
String op = binExpr.getOp();
Expr rightSide = binExpr.getRight();
Expr leftSide = binExpr.getLeft();
boolean rightSideConst = exprIsConst(rightSide);
boolean leftSideConst = exprIsConst(leftSide);
switch (op) {
case "->":
if (!matches(typeRight, typeLeft)) {
error(binExpr, "left and right sides of binary expression '" + op
+ "' are of type '" + typeLeft + "' and '" + typeRight
+ "', but must be of the same type");
}
return;
case "=>":
case "<=>":
case "and":
case "or":
if (!matches(BOOL, typeLeft)) {
error(binExpr, "left side of binary expression '" + op + "' is of type '"
+ typeLeft.toString() + "' but must be of " + "type 'bool'");
}
if (!matches(BOOL, typeRight)) {
error(binExpr, "right side of binary expression '" + op + "' is of type '"
+ typeRight.toString() + "' but must be of" + " type 'bool'");
}
return;
case "=":
case "<>":
case "!=":
if (!matches(typeRight, typeLeft)) {
error(binExpr, "left and right sides of binary expression '" + op
+ "' are of type '" + typeLeft + "' and '" + typeRight
+ "', but must be of the same type");
}
return;
case "<":
case "<=":
case ">":
case ">=":
case "+":
case "-":
case "*":
if (!matches(typeRight, typeLeft)) {
error(binExpr, "left and right sides of binary expression '" + op
+ "' are of type '" + typeLeft + "' and '" + typeRight
+ "', but must be of the same type");
}
if (!matches(INT, typeLeft) && !matches(REAL, typeLeft)) {
error(binExpr, "left side of binary expression '" + op + "' is of type '"
+ typeLeft + "' but must be of type" + "'int' or 'real'");
}
if (!matches(INT, typeRight) && !matches(REAL, typeRight)) {
error(binExpr, "right side of binary expression '" + op + "' is of type '"
+ typeRight + "' but must be of type" + "'int' or 'real'");
}
if(op.equals("*")){
if(!rightSideConst && !leftSideConst){
warning(binExpr, "neither the right nor the left side of binary expression '"
+ op + "' is constant'. Non-linear expressions are only allowed with z3."
+ " Even with z3 they are not recomended...");
}
}
return;
case "mod":
case "div":
if (!matches(INT, typeLeft)) {
error(binExpr, "left side of binary expression '" + op + "' is of type '"
+ typeLeft + "' but must be of type 'int'");
}
if (!matches(INT, typeRight)) {
error(binExpr, "right side of binary expression '" + op + "' is of type '"
+ typeRight + "' but must be of type 'int'");
}
if(!rightSideConst){
warning(binExpr, "right side of binary expression '" + op + "' is not constant."
+ " Non-linear expressions are only allowed with z3."
+ " Even with z3 they are not recomended...");
}
return;
case "/":
if (!matches(REAL, typeLeft)) {
error(binExpr, "left side of binary expression '" + op + "' is of type '"
+ typeLeft + "' but must be of type 'real'");
}
if (!matches(REAL, typeRight)) {
error(binExpr, "right side of binary expression '" + op + "' is of type '"
+ typeRight + "' but must be of type 'real'");
}
if(!rightSideConst){
warning(binExpr, "right side of binary expression '" + op + "' is not constant."
+ " Non-linear expressions are only allowed with z3."
+ " Even with z3 they are not recomended...");
}
return;
default:
assert (false);
}
}
private boolean exprIsConst(Expr expr) {
if(expr instanceof NestedDotID){
NamedElement finalId = getFinalNestId((NestedDotID)expr);
if(finalId instanceof ConstStatement){
return true;
}
}else if(expr instanceof RealLitExpr
|| expr instanceof IntLitExpr
|| expr instanceof BoolLitExpr){
return true;
}else if(expr instanceof BinaryExpr){
BinaryExpr binExpr = (BinaryExpr)expr;
return exprIsConst(binExpr.getLeft()) && exprIsConst(binExpr.getRight());
}else if(expr instanceof UnaryExpr){
UnaryExpr unExpr = (UnaryExpr)expr;
return exprIsConst(unExpr.getExpr());
}
return false;
}
private Boolean hasCallDefParent(Element e) {
while (e != null) {
if (e instanceof CallDef) {
return true;
}
e = e.getOwner();
}
return false;
}
// TODO: Don't we need more validation here? What if the Id of the IdExpr
private void checkScope(Expr expr, NamedElement id) {
if (hasCallDefParent(expr)) {
if (!hasCallDefParent(id) && !(id instanceof ConstStatement)) {
error("Unknown identifier Id: '"
+ id
+ "' (Note that nodes can only refer to inputs, outputs, and local variables and global constants).");
}
}
}
public NamedElement getFinalNestId(NestedDotID dotId) {
while (dotId.getSub() != null) {
dotId = dotId.getSub();
}
return dotId.getBase();
}
public String getNestedDotIDTag(NestedDotID dotId) {
while (dotId.getSub() != null) {
dotId = dotId.getSub();
}
return dotId.getTag();
}
public AgreeType getAgreeType(Arg arg) {
return getAgreeType(arg.getType());
}
private AgreeType getAgreeType(UnaryExpr unaryExpr) {
return getAgreeType(unaryExpr.getExpr());
}
private AgreeType getAgreeType(NestedDotID nestDotIdExpr) {
String tag = getNestedDotIDTag(nestDotIdExpr);
if(tag != null){
switch (tag){
case "_CLK":
case "_INSERT":
case "_REMOVE":
return BOOL;
case "_COUNT":
return INT;
default:
return ERROR;
}
}
return getAgreeType(getFinalNestId(nestDotIdExpr));
}
private AgreeType getAgreeType(NamedElement namedEl) {
if (namedEl instanceof Property) {
Property propVal = (Property) namedEl;
PropertyType propType = propVal.getPropertyType();
if (propType instanceof AadlBoolean) {
return BOOL;
} else if (propType instanceof AadlString || propType instanceof EnumerationType) {
return new AgreeType("string");
} else if (propType instanceof AadlInteger) {
return INT;
} else if (propType instanceof AadlReal) {
return REAL;
} else if (propType instanceof ClassifierType) {
return new AgreeType("component");
}
} else if (namedEl instanceof DataSubcomponent) {
// this is for checking "Base_Types::Boolean" etc...
ComponentClassifier compClass = ((DataSubcomponent) namedEl).getAllClassifier();
if(compClass instanceof DataImplementation){
return getAgreeType((DataImplementation)compClass);
}
return getAgreeType(compClass);
} else if (namedEl instanceof Arg) {
return getAgreeType((Arg) namedEl);
} else if (namedEl instanceof ClassifierType || namedEl instanceof Subcomponent) {
return new AgreeType("component");
} else if (namedEl instanceof PropertyStatement) {
return getAgreeType((PropertyStatement) namedEl);
} else if (namedEl instanceof ConstStatement) {
return getAgreeType((ConstStatement) namedEl);
} else if (namedEl instanceof EqStatement) {
return getAgreeType(namedEl);
} else if (namedEl instanceof DataPort) {
return getAgreeType(((DataPort) namedEl).getDataFeatureClassifier());
} else if (namedEl instanceof EventDataPort){
return getAgreeType(((EventDataPort)namedEl).getDataFeatureClassifier());
} else if (namedEl instanceof DataAccess) {
return getAgreeType((NamedElement) ((DataAccess) namedEl).getFeatureClassifier());
} else if (namedEl instanceof DataType) {
return getAgreeType((ComponentClassifier) namedEl);
} else if (namedEl instanceof DataImplementation){
return getAgreeType((DataImplementation)namedEl);
}
return ERROR;
}
private AgreeType getAgreeType(DataImplementation dataImpl){
AgreeType nativeType = getNativeType(dataImpl);
if(nativeType != null){
return nativeType;
}
AadlPackage aadlPack = (AadlPackage)dataImpl.eContainer().eContainer();
String typeStr = aadlPack.getName() + "::" + dataImpl.getName();
return new AgreeType(typeStr);
}
private AgreeType getNativeType(DataImplementation dataImpl) {
EList<Subcomponent> subComps = dataImpl.getAllSubcomponents();
//if there are no subcomponents, use the component type
if(subComps.size() == 0){
return getAgreeType((ComponentClassifier)dataImpl.getType());
}
return null;
}
private AgreeType getAgreeType(ComponentClassifier dataClass) {
while (dataClass != null) {
switch (dataClass.getQualifiedName()) {
case "Base_Types::Boolean":
return BOOL;
case "Base_Types::Integer":
return INT;
case "Base_Types::Float":
return REAL;
}
DataType dataType = (DataType) dataClass;
dataClass = dataType.getExtended();
}
return AgreeType.ERROR;
}
private AgreeType getAgreeType(ComponentType compType){
while(compType.getExtended() != null){
compType = compType.getExtended();
}
String qualName = compType.getQualifiedName();
switch (qualName) {
case "Base_Types::Boolean":
return BOOL;
case "Base_Types::Integer":
return INT;
case "Base_Types::Float":
return REAL;
}
return new AgreeType(qualName);
}
private AgreeType getAgreeType(DataSubcomponentType data) {
if(data instanceof DataType){
ComponentType compType = ((DataType) data).getExtended();
if(compType != null){
return getAgreeType(compType);
}
}
String qualName = data.getQualifiedName();
switch (qualName) {
case "Base_Types::Boolean":
return BOOL;
case "Base_Types::Integer":
return INT;
case "Base_Types::Float":
return REAL;
}
return new AgreeType(qualName);
}
private AgreeType getAgreeType(PropertyStatement propStat) {
return getAgreeType(propStat.getExpr());
}
private AgreeType getAgreeType(ConstStatement constStat) {
return getAgreeType(constStat.getType());
}
private AgreeType getAgreeType(GetPropertyExpr getPropExpr) {
return getAgreeType(getPropExpr.getProp());
}
private AgreeType getAgreeType(PrevExpr prevExpr) {
return getAgreeType(prevExpr.getDelay());
}
private List<AgreeType> getAgreeTypes(List<? extends Expr> exprs) {
ArrayList<AgreeType> list = new ArrayList<>();
for (Expr expr : exprs) {
list.add(getAgreeType(expr));
}
return list;
}
public List<AgreeType> typesFromArgs(List<Arg> args) {
ArrayList<AgreeType> list = new ArrayList<>();
for (Arg arg : args) {
list.add(getAgreeType(arg));
}
return list;
}
private AgreeType getAgreeType(FnCallExpr fnCall) {
// TODO: Examine type system in more detail
// TODO: Fix to make support type lists.
NestedDotID dotId = fnCall.getFn();
NamedElement namedEl = getFinalNestId(dotId);
// extract in/out arguments
if (namedEl instanceof FnDefExpr) {
FnDefExpr fnDef = (FnDefExpr) namedEl;
return getAgreeType(fnDef.getType());
} else if (namedEl instanceof NodeDefExpr) {
NodeDefExpr nodeDef = (NodeDefExpr) namedEl;
List<AgreeType> outDefTypes = typesFromArgs(nodeDef.getRets());
if (outDefTypes.size() == 1) {
return outDefTypes.get(0);
} else {
error(fnCall, "Nodes embedded in expressions must have exactly one return value."
+ " Node " + nodeDef.getName() + " contains " + outDefTypes.size()
+ " return values");
return ERROR;
}
} else {
error(fnCall, "Node or Function definition name expected.");
return ERROR;
}
}
private AgreeType getAgreeType(BinaryExpr binExpr) {
AgreeType typeLeft = getAgreeType(binExpr.getLeft());
String op = binExpr.getOp();
switch (op) {
case "->":
return typeLeft;
case "=>":
case "<=>":
case "and":
case "or":
return BOOL;
case "<>":
case "!=":
return BOOL;
case "<":
case "<=":
case ">":
case ">=":
case "=":
return BOOL;
case "+":
case "-":
case "*":
case "/":
case "mod":
case "div":
return typeLeft;
}
return ERROR;
}
private AgreeType getAgreeType(Expr expr) {
if (expr instanceof BinaryExpr) {
return getAgreeType((BinaryExpr) expr);
} else if (expr instanceof FnCallExpr) {
return getAgreeType((FnCallExpr) expr);
} else if (expr instanceof IfThenElseExpr) {
return getAgreeType((IfThenElseExpr) expr);
} else if (expr instanceof PrevExpr) {
return getAgreeType((PrevExpr) expr);
} else if (expr instanceof GetPropertyExpr) {
return getAgreeType((GetPropertyExpr) expr);
} else if (expr instanceof NestedDotID) {
return getAgreeType((NestedDotID) expr);
} else if (expr instanceof UnaryExpr) {
return getAgreeType((UnaryExpr) expr);
} else if (expr instanceof IntLitExpr) {
return INT;
} else if (expr instanceof RealLitExpr) {
return REAL;
} else if (expr instanceof BoolLitExpr) {
return BOOL;
} else if (expr instanceof ThisExpr) {
return new AgreeType("component");
} else if (expr instanceof PreExpr) {
return getAgreeType(((PreExpr) expr).getExpr());
} else if(expr instanceof RecordExpr){
return getAgreeType((RecordExpr)expr);
} else if(expr instanceof RecordUpdateExpr){
return getAgreeType((RecordUpdateExpr)expr);
} else if(expr instanceof FloorCast){
return INT;
} else if(expr instanceof RealCast){
return REAL;
} else if(expr instanceof EventExpr){
return BOOL;
}
return ERROR;
}
private AgreeType getAgreeType(RecordUpdateExpr upExpr){
return getAgreeType(upExpr.getRecord());
}
private AgreeType getAgreeType(RecordExpr recExpr){
return getNestIdAsType(recExpr.getRecord());
}
public static boolean matches(AgreeType expected, AgreeType actual) {
if (expected.equals(ERROR) || actual.equals(ERROR)) {
return false;
}
return expected.equals(actual);
}
} |
package ca.uhn.fhir.rest.server.interceptor;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.matchesPattern;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.stringContainsInOrder;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.concurrent.TimeUnit;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.springframework.web.cors.CorsConfiguration;
import com.phloc.commons.collections.iterate.ArrayEnumeration;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.dstu2.composite.HumanNameDt;
import ca.uhn.fhir.model.dstu2.composite.IdentifierDt;
import ca.uhn.fhir.model.dstu2.resource.Binary;
import ca.uhn.fhir.model.dstu2.resource.OperationOutcome;
import ca.uhn.fhir.model.dstu2.resource.OperationOutcome.Issue;
import ca.uhn.fhir.model.dstu2.resource.Organization;
import ca.uhn.fhir.model.dstu2.resource.Patient;
import ca.uhn.fhir.model.dstu2.valueset.IdentifierUseEnum;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.UriDt;
import ca.uhn.fhir.rest.annotation.IdParam;
import ca.uhn.fhir.rest.annotation.Read;
import ca.uhn.fhir.rest.annotation.RequiredParam;
import ca.uhn.fhir.rest.annotation.Search;
import ca.uhn.fhir.rest.api.RequestTypeEnum;
import ca.uhn.fhir.rest.server.*;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.PortUtil;
import ca.uhn.fhir.util.TestUtil;
import ca.uhn.fhir.util.UrlUtil;
public class ResponseHighlightingInterceptorTest {
private static CloseableHttpClient ourClient;
private static FhirContext ourCtx = FhirContext.forDstu2();
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResponseHighlightingInterceptorTest.class);
private static int ourPort;
private static Server ourServer;
private static RestfulServer ourServlet;
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
/**
* See #464
*/
@Test
public void testPrettyPrintDefaultsToTrue() throws Exception {
ourServlet.setDefaultPrettyPrint(false);
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1");
httpGet.addHeader("Accept", "text/html");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info(responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertThat(responseContent, (stringContainsInOrder("<body>", "<pre>", "\n", "</pre>")));
}
/**
* See #464
*/
@Test
public void testPrettyPrintDefaultsToTrueWithExplicitTrue() throws Exception {
ourServlet.setDefaultPrettyPrint(false);
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_pretty=true");
httpGet.addHeader("Accept", "text/html");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info(responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertThat(responseContent, (stringContainsInOrder("<body>", "<pre>", "\n", "</pre>")));
}
/**
* See #464
*/
@Test
public void testPrettyPrintDefaultsToTrueWithExplicitFalse() throws Exception {
ourServlet.setDefaultPrettyPrint(false);
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_pretty=false");
httpGet.addHeader("Accept", "text/html");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info(responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertThat(responseContent, not(stringContainsInOrder("<body>", "<pre>", "\n", "</pre>")));
}
@Test
public void testForceResponseTime() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=html/json");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info(responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent.replace('\n', ' ').replace('\r', ' '), matchesPattern(".*Response generated in [0-9]+ms.*"));
}
@Test
public void testGetInvalidResource() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Foobar/123");
httpGet.addHeader("Accept", "text/html");
CloseableHttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info("Resp: {}", responseContent);
assertEquals(404, status.getStatusLine().getStatusCode());
assertThat(responseContent, stringContainsInOrder("<span class='hlTagName'>OperationOutcome</span>", "Unknown resource type 'Foobar' - Server knows how to handle"));
}
@Test
public void testGetInvalidResourceNoAcceptHeader() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Foobar/123");
CloseableHttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info("Resp: {}", responseContent);
assertEquals(404, status.getStatusLine().getStatusCode());
assertThat(responseContent, not(stringContainsInOrder("<span class='hlTagName'>OperationOutcome</span>", "Unknown resource type 'Foobar' - Server knows how to handle")));
assertThat(responseContent, (stringContainsInOrder("Unknown resource type 'Foobar'")));
assertThat(status.getFirstHeader("Content-Type").getValue(), containsString("application/xml+fhir"));
}
@Test
public void testGetRoot() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/");
httpGet.addHeader("Accept", "text/html");
CloseableHttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info("Resp: {}", responseContent);
assertEquals(400, status.getStatusLine().getStatusCode());
assertThat(responseContent, stringContainsInOrder("<span class='hlTagName'>OperationOutcome</span>", "This is the base URL of FHIR server. Unable to handle this request, as it does not contain a resource type or operation name."));
}
@Test
public void testHighlightException() throws Exception {
ResponseHighlighterInterceptor ic = new ResponseHighlighterInterceptor();
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("text/html,application/xhtml+xml,application/xml;q=0.9");
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
reqDetails.setServer(new RestfulServer(ourCtx));
reqDetails.setServletRequest(req);
// This can be null depending on the exception type
// reqDetails.setParameters(null);
ResourceNotFoundException exception = new ResourceNotFoundException("Not found");
exception.setOperationOutcome(new OperationOutcome().addIssue(new Issue().setDiagnostics("Hello")));
assertFalse(ic.handleException(reqDetails, exception, req, resp));
String output = sw.getBuffer().toString();
ourLog.info(output);
assertThat(output, containsString("<span class='hlTagName'>OperationOutcome</span>"));
}
@Test
public void testHighlightNormalResponseForcePrettyPrint() throws Exception {
ResponseHighlighterInterceptor ic = new ResponseHighlighterInterceptor();
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("text/html,application/xhtml+xml,application/xml;q=0.9");
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
HashMap<String, String[]> params = new HashMap<String, String[]>();
params.put(Constants.PARAM_PRETTY, new String[] { Constants.PARAM_PRETTY_VALUE_TRUE });
reqDetails.setParameters(params);
reqDetails.setServer(new RestfulServer(ourCtx));
reqDetails.setServletRequest(req);
assertFalse(ic.outgoingResponse(reqDetails, resource, req, resp));
String output = sw.getBuffer().toString();
ourLog.info(output);
assertThat(output, containsString("<span class='hlTagName'>Patient</span>"));
assertThat(output, stringContainsInOrder("<body>", "<pre>", "\n", "</pre>"));
}
@Test
public void testHighlightForceRaw() throws Exception {
ResponseHighlighterInterceptor ic = new ResponseHighlighterInterceptor();
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("text/html,application/xhtml+xml,application/xml;q=0.9");
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
HashMap<String, String[]> params = new HashMap<String, String[]>();
params.put(Constants.PARAM_PRETTY, new String[] { Constants.PARAM_PRETTY_VALUE_TRUE });
params.put(Constants.PARAM_FORMAT, new String[] { Constants.CT_XML });
params.put(ResponseHighlighterInterceptor.PARAM_RAW, new String[] { ResponseHighlighterInterceptor.PARAM_RAW_TRUE });
reqDetails.setParameters(params);
reqDetails.setServer(new RestfulServer(ourCtx));
reqDetails.setServletRequest(req);
// true means it decided to not handle the request..
assertTrue(ic.outgoingResponse(reqDetails, resource, req, resp));
}
@Test
public void testDontHighlightWhenOriginHeaderPresent() throws Exception {
ResponseHighlighterInterceptor ic = new ResponseHighlighterInterceptor();
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("text/html,application/xhtml+xml,application/xml;q=0.9");
}
});
when(req.getHeader(Constants.HEADER_ORIGIN)).thenAnswer(new Answer<String>() {
@Override
public String answer(InvocationOnMock theInvocation) throws Throwable {
return "http://example.com";
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
HashMap<String, String[]> params = new HashMap<String, String[]>();
reqDetails.setParameters(params);
reqDetails.setServer(new RestfulServer(ourCtx));
reqDetails.setServletRequest(req);
// true means it decided to not handle the request..
assertTrue(ic.outgoingResponse(reqDetails, resource, req, resp));
}
/**
* See #346
*/
@Test
public void testHighlightForceHtmlCt() throws Exception {
ResponseHighlighterInterceptor ic = new ResponseHighlighterInterceptor();
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("application/xml+fhir");
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
HashMap<String, String[]> params = new HashMap<String, String[]>();
params.put(Constants.PARAM_FORMAT, new String[] { Constants.FORMAT_HTML });
reqDetails.setParameters(params);
reqDetails.setServer(new RestfulServer(ourCtx));
reqDetails.setServletRequest(req);
// false means it decided to handle the request..
assertFalse(ic.outgoingResponse(reqDetails, resource, req, resp));
}
/**
* See #346
*/
@Test
public void testHighlightForceHtmlFormat() throws Exception {
ResponseHighlighterInterceptor ic = new ResponseHighlighterInterceptor();
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("application/xml+fhir");
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
HashMap<String, String[]> params = new HashMap<String, String[]>();
params.put(Constants.PARAM_FORMAT, new String[] { Constants.CT_HTML });
reqDetails.setParameters(params);
reqDetails.setServer(new RestfulServer(ourCtx));
reqDetails.setServletRequest(req);
// false means it decided to handle the request..
assertFalse(ic.outgoingResponse(reqDetails, resource, req, resp));
}
@Test
public void testHighlightNormalResponse() throws Exception {
ResponseHighlighterInterceptor ic = new ResponseHighlighterInterceptor();
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("text/html,application/xhtml+xml,application/xml;q=0.9");
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
reqDetails.setParameters(new HashMap<String, String[]>());
reqDetails.setServer(new RestfulServer(ourCtx));
reqDetails.setServletRequest(req);
assertFalse(ic.outgoingResponse(reqDetails, resource, req, resp));
String output = sw.getBuffer().toString();
ourLog.info(output);
assertThat(output, containsString("<span class='hlTagName'>Patient</span>"));
assertThat(output, stringContainsInOrder("<body>", "<pre>", "\n", "</pre>"));
assertThat(output, containsString("<a href=\"?_format=json\">"));
}
/**
* Browsers declare XML but not JSON in their accept header, we should still respond using JSON if that's the default
*/
@Test
public void testHighlightProducesDefaultJsonWithBrowserRequest() throws Exception {
ResponseHighlighterInterceptor ic = new ResponseHighlighterInterceptor();
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("text/html,application/xhtml+xml,application/xml;q=0.9");
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
reqDetails.setParameters(new HashMap<String, String[]>());
RestfulServer server = new RestfulServer(ourCtx);
server.setDefaultResponseEncoding(EncodingEnum.JSON);
reqDetails.setServer(server);
reqDetails.setServletRequest(req);
assertFalse(ic.outgoingResponse(reqDetails, resource, req, resp));
String output = sw.getBuffer().toString();
ourLog.info(output);
assertThat(output, containsString("resourceType"));
}
@Test
public void testHighlightProducesDefaultJsonWithBrowserRequest2() throws Exception {
ResponseHighlighterInterceptor ic = new ResponseHighlighterInterceptor();
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("text/html;q=0.8,application/xhtml+xml,application/xml;q=0.9");
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
reqDetails.setParameters(new HashMap<String, String[]>());
RestfulServer server = new RestfulServer(ourCtx);
server.setDefaultResponseEncoding(EncodingEnum.JSON);
reqDetails.setServer(server);
reqDetails.setServletRequest(req);
// True here means the interceptor didn't handle the request, because HTML wasn't the top ranked accept header
assertTrue(ic.outgoingResponse(reqDetails, resource, req, resp));
}
@Test
public void testSearchWithSummaryParam() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?_query=searchWithWildcardRetVal&_summary=count");
httpGet.addHeader("Accept", "html");
CloseableHttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info("Resp: {}", responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertThat(responseContent, not(containsString("entry")));
}
@Test
public void testBinaryReadAcceptMissing() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Binary/foo");
HttpResponse status = ourClient.execute(httpGet);
byte[] responseContent = IOUtils.toByteArray(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("foo", status.getFirstHeader("content-type").getValue());
assertEquals("Attachment;", status.getFirstHeader("Content-Disposition").getValue());
assertArrayEquals(new byte[] { 1, 2, 3, 4 }, responseContent);
}
@Test
public void testBinaryReadAcceptBrowser() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Binary/foo");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
httpGet.addHeader("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8");
HttpResponse status = ourClient.execute(httpGet);
byte[] responseContent = IOUtils.toByteArray(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("foo", status.getFirstHeader("content-type").getValue());
assertEquals("Attachment;", status.getFirstHeader("Content-Disposition").getValue());
assertArrayEquals(new byte[] { 1, 2, 3, 4 }, responseContent);
}
@Test
public void testBinaryReadAcceptFhirJson() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Binary/foo");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
httpGet.addHeader("Accept", Constants.CT_FHIR_JSON);
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_JSON + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertNull(status.getFirstHeader("Content-Disposition"));
assertEquals("{\"resourceType\":\"Binary\",\"id\":\"1\",\"contentType\":\"foo\",\"content\":\"AQIDBA==\"}", responseContent);
}
@Test
public void testForceApplicationJson() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=application/json");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_JSON + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html")));
}
@Test
public void testForceApplicationJsonFhir() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=application/json+fhir");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_JSON + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html")));
}
@Test
public void testForceApplicationJsonPlusFhir() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=" + UrlUtil.escape("application/json+fhir"));
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_JSON + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html")));
}
@Test
public void testForceJson() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=json");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_JSON + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html")));
}
@Test
public void testForceHtmlJson() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=html/json");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, containsString("html"));
assertThat(responseContent, containsString(">{<"));
assertThat(responseContent, not(containsString("<")));
ourLog.info(responseContent);
}
@Test
public void testForceHtmlXml() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=html/xml");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, containsString("html"));
assertThat(responseContent, not(containsString(">{<")));
assertThat(responseContent, containsString("<"));
}
@Test
public void testForceApplicationXml() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=application/xml");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_XML + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html")));
}
@Test
public void testForceApplicationXmlFhir() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=application/xml+fhir");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_XML + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html")));
}
@Test
public void testForceApplicationXmlPlusFhir() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=" + UrlUtil.escape("application/xml+fhir"));
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_XML + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html")));
}
@BeforeClass
public static void beforeClass() throws Exception {
ourPort = PortUtil.findFreePort();
ourServer = new Server(ourPort);
DummyPatientResourceProvider patientProvider = new DummyPatientResourceProvider();
ServletHandler proxyHandler = new ServletHandler();
ourServlet = new RestfulServer(ourCtx);
/*
* Enable CORS
*/
CorsConfiguration config = new CorsConfiguration();
CorsInterceptor corsInterceptor = new CorsInterceptor(config);
config.addAllowedHeader("Origin");
config.addAllowedHeader("Accept");
config.addAllowedHeader("X-Requested-With");
config.addAllowedHeader("Content-Type");
config.addAllowedHeader("Access-Control-Request-Method");
config.addAllowedHeader("Access-Control-Request-Headers");
config.addAllowedOrigin("*");
config.addExposedHeader("Location");
config.addExposedHeader("Content-Location");
config.setAllowedMethods(Arrays.asList("GET","POST","PUT","DELETE","OPTIONS"));
ourServlet.registerInterceptor(corsInterceptor);
ourServlet.registerInterceptor(new ResponseHighlighterInterceptor());
ourServlet.setResourceProviders(patientProvider, new DummyBinaryResourceProvider());
ourServlet.setBundleInclusionRule(BundleInclusionRule.BASED_ON_RESOURCE_PRESENCE);
ServletHolder servletHolder = new ServletHolder(ourServlet);
public static class DummyPatientResourceProvider implements IResourceProvider {
private Patient createPatient1() {
Patient patient = new Patient();
patient.addIdentifier();
patient.getIdentifier().get(0).setUse(IdentifierUseEnum.OFFICIAL);
patient.getIdentifier().get(0).setSystem(new UriDt("urn:hapitest:mrns"));
patient.getIdentifier().get(0).setValue("00001");
patient.addName();
patient.getName().get(0).addFamily("Test");
patient.getName().get(0).addGiven("PatientOne");
patient.getId().setValue("1");
return patient;
}
@Search(queryName = "findPatientsWithAbsoluteIdSpecified")
public List<Patient> findPatientsWithAbsoluteIdSpecified() {
Patient p = new Patient();
p.addIdentifier().setSystem("foo");
p.setId("http://absolute.com/Patient/123/_history/22");
Organization o = new Organization();
o.setId("http://foo.com/Organization/222/_history/333");
p.getManagingOrganization().setResource(o);
return Collections.singletonList(p);
}
@Search(queryName = "findPatientsWithNoIdSpecified")
public List<Patient> findPatientsWithNoIdSpecified() {
Patient p = new Patient();
p.addIdentifier().setSystem("foo");
return Collections.singletonList(p);
}
public Map<String, Patient> getIdToPatient() {
Map<String, Patient> idToPatient = new HashMap<String, Patient>();
{
Patient patient = createPatient1();
idToPatient.put("1", patient);
}
{
Patient patient = new Patient();
patient.getIdentifier().add(new IdentifierDt());
patient.getIdentifier().get(0).setUse(IdentifierUseEnum.OFFICIAL);
patient.getIdentifier().get(0).setSystem(new UriDt("urn:hapitest:mrns"));
patient.getIdentifier().get(0).setValue("00002");
patient.getName().add(new HumanNameDt());
patient.getName().get(0).addFamily("Test");
patient.getName().get(0).addGiven("PatientTwo");
patient.getId().setValue("2");
idToPatient.put("2", patient);
}
return idToPatient;
}
/**
* Retrieve the resource by its identifier
*
* @param theId
* The resource identity
* @return The resource
*/
@Read()
public Patient getResourceById(@IdParam IdDt theId) {
String key = theId.getIdPart();
Patient retVal = getIdToPatient().get(key);
return retVal;
}
/**
* Retrieve the resource by its identifier
*
* @param theId
* The resource identity
* @return The resource
*/
@Search()
public List<Patient> getResourceById(@RequiredParam(name = "_id") String theId) {
Patient patient = getIdToPatient().get(theId);
if (patient != null) {
return Collections.singletonList(patient);
} else {
return Collections.emptyList();
}
}
@Override
public Class<Patient> getResourceType() {
return Patient.class;
}
@Search(queryName = "searchWithWildcardRetVal")
public List<? extends IResource> searchWithWildcardRetVal() {
Patient p = new Patient();
p.setId("1234");
p.addName().addFamily("searchWithWildcardRetVal");
return Collections.singletonList(p);
}
}
class TestServletRequestDetails extends ServletRequestDetails {
@Override
public String getServerBaseForRequest() {
return "/baseDstu3";
}
}
} |
package com.hserv.coordinatedentry.housingmatching.service.impl;
import java.nio.file.AccessDeniedException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.data.jpa.domain.Specifications;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.web.client.RestTemplate;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.hserv.coordinatedentry.housingmatching.dao.EligibleClientsDaoV3;
import com.hserv.coordinatedentry.housingmatching.dao.EligibleClientsRepository;
import com.hserv.coordinatedentry.housingmatching.dao.RepositoryFactory;
import com.hserv.coordinatedentry.housingmatching.entity.EligibleClient;
import com.hserv.coordinatedentry.housingmatching.entity.HousingInventory;
import com.hserv.coordinatedentry.housingmatching.entity.Match;
import com.hserv.coordinatedentry.housingmatching.model.EligibleClientModel;
import com.hserv.coordinatedentry.housingmatching.service.EligibleClientService;
import com.hserv.coordinatedentry.housingmatching.translator.EligibleClientsTranslator;
import com.hserv.coordinatedentry.housingmatching.util.SecurityContextUtil;
import com.servinglynk.hmis.warehouse.client.model.SearchRequest;
import com.servinglynk.hmis.warehouse.client.search.ISearchServiceClient;
import com.servinglynk.hmis.warehouse.core.model.BaseClient;
import com.servinglynk.hmis.warehouse.core.model.JSONObjectMapper;
import com.servinglynk.hmis.warehouse.core.model.Parameters;
import com.servinglynk.hmis.warehouse.core.model.Session;
@Service
public class EligibleClientServiceImpl implements EligibleClientService {
@Autowired
EligibleClientsRepository eligibleClientsRepository;
@Autowired
private EligibleClientsTranslator eligibleClientsTranslator;
@Autowired
ISearchServiceClient searchServiceClient;
@Autowired
RepositoryFactory repositoryFactory;
/* public List<EligibleClientModel> getEligibleClientsBack(int num , String programType) {
List<EligibleClientModel> eligibleClientModels = new ArrayList<>();
List<EligibleClient> eligibleClients = eligibleClientsRepository
.findTopEligibleClients(programType ,new PageRequest(0, num, eligibleClientSortClause()));
for (EligibleClient eligibleClient : eligibleClients) {
eligibleClientModels.add(eligibleClientsTranslator.translate(eligibleClient));
}
return eligibleClientModels;
}
public List<EligibleClient> getEligibleClients(int num , String programType) {
List<EligibleClient> eligibleClients = eligibleClientsRepository
.findTopEligibleClients(programType ,new PageRequest(0, num, eligibleClientSortClause()));
return eligibleClients;
}*/
@Override
public EligibleClientModel getEligibleClientDetail(UUID clientID,String version) {
String projectGroup = SecurityContextUtil.getUserProjectGroup();
List<UUID> sharedClients = SecurityContextUtil.getSharedClients();
EligibleClient eligibleClient = null;
if(sharedClients.contains(clientID)) {
eligibleClient = eligibleClientsRepository.findByClientIdAndDeleted(clientID,false);
}else {
eligibleClient = eligibleClientsRepository.findByClientIdAndProjectGroupCodeAndDeleted(clientID,projectGroup,false);
}
if(eligibleClient==null) throw new ResourceNotFoundException("Eligible not found "+clientID);
if(version!=null && version.equalsIgnoreCase("v2"))
return eligibleClientsTranslator.translateV2(eligibleClient);
EligibleClientModel eligibleClientModel= eligibleClientsTranslator.translate(eligibleClient);
return eligibleClientModel;
}
@Autowired EligibleClientsDaoV3 eligibleClientsDaoV3;
@Override
public Page<EligibleClient> getEligibleClients(String projectGroupCode, Pageable pageable, String filter) {
List<EligibleClient> clients =new ArrayList<EligibleClient>();
Long count =0L;
List<UUID> sharedClientsList = SecurityContextUtil.getSharedClients();
String sharedClients = "'"+StringUtils.join(sharedClientsList.toArray(), ',').replaceAll(",", "','")+"'";
if(sharedClientsList.isEmpty()) {
if(filter.equalsIgnoreCase("inactive")) {
clients = eligibleClientsRepository.getInactiveEligibleClients(projectGroupCode,pageable.getPageSize(),pageable.getOffset());
count = eligibleClientsRepository.getInactiveEligibleClientsCount(projectGroupCode);
}else if(filter.equalsIgnoreCase("active")) {
clients = eligibleClientsRepository.getActiveEligibleClients(projectGroupCode,pageable.getPageSize(),pageable.getOffset());
count = eligibleClientsRepository.getActiveEligibleClientsCount(projectGroupCode);
}else {
clients = eligibleClientsRepository.getActiveEligibleClients(projectGroupCode,pageable.getPageSize(),pageable.getOffset());
count = eligibleClientsRepository.getAllEligibleClientsCount(projectGroupCode);
}
}else {
if(filter.equalsIgnoreCase("inactive")) {
clients = eligibleClientsDaoV3.getInactiveEligibleClientsWithSharedClients(projectGroupCode,sharedClientsList,pageable.getPageSize(),pageable.getOffset());
count = eligibleClientsDaoV3.getInactiveEligibleClientsCountWithSharedClients(projectGroupCode,sharedClientsList);
}else if(filter.equalsIgnoreCase("active")) {
clients = eligibleClientsDaoV3.getActiveEligibleClientsWithSharedClients(projectGroupCode,sharedClientsList,pageable.getPageSize(),pageable.getOffset());
count = eligibleClientsDaoV3.getActiveEligibleClientsCountWithSharedClients(projectGroupCode,sharedClientsList);
}else {
clients = eligibleClientsDaoV3.getActiveEligibleClientsWithSharedClients(projectGroupCode,sharedClientsList,pageable.getPageSize(),pageable.getOffset());
count = eligibleClientsDaoV3.getAllEligibleClientsCountWithSharedClients(projectGroupCode,sharedClientsList);
}
}
return new PageImpl<>(clients,pageable,count);
}
@Override
public boolean updateEligibleClient(UUID clientID, EligibleClientModel eligibleClientModel) throws Exception {
boolean status = false;
EligibleClient eligibleClient = eligibleClientsRepository.findOne(eligibleClientModel.getClientId());
if(eligibleClient.getMatched() == false) {
eligibleClientsRepository.saveAndFlush(eligibleClientsTranslator.translate(eligibleClientModel,eligibleClient));
status = true;
}else{
if( eligibleClient.getStatus()!=null && eligibleClient.getStatus() == 10 && eligibleClientModel.isIgnoreMatchProcess()==false){
eligibleClient.setMatched(false);
eligibleClient.setStatus(null);
eligibleClient.setRemarks(eligibleClientModel.getRemarks());
eligibleClientsRepository.saveAndFlush(eligibleClient);
/* List<Match> matchs = repositoryFactory.getMatchReservationsRepository().findByEligibleClientAndDeletedOrderByDateCreatedDesc(eligibleClient, false);
if(matchs.isEmpty()) return false;
HousingInventory housingUnit = repositoryFactory.getHousingUnitsRepository().findOne(matchs.get(0).getHousingUnitId());
housingUnit.setVacant(true);
repositoryFactory.getHousingUnitsRepository().save(housingUnit);*/
status = true;
} else{
throw new AccessDeniedException(" Matched client connot be updated");
}
}
return status;
}
@Override
public boolean deleteEligibleClientById(UUID clientID) {
boolean status = false;
EligibleClient client = repositoryFactory.getEligibleClientsRepository().findOne(clientID);
if(client!=null && !client.isDeleted()){
eligibleClientsRepository.delete(client);
status = true;
}
return status;
}
@Override
public boolean createEligibleClient(EligibleClientModel eligibleClientModel) {
boolean status = false;
if(!StringUtils.isEmpty(eligibleClientModel.getClientId()+"") &&
!eligibleClientsRepository.exists(eligibleClientModel.getClientId())){
eligibleClientsRepository.saveAndFlush(eligibleClientsTranslator.translate(eligibleClientModel,null));
status = true;
}
return status;
}
@Override
public boolean deleteAll() {
eligibleClientsRepository.deleteAll();
return true;
}
@Override
public boolean updateEligibleClients(List<EligibleClientModel> eligibleClientModels) throws Exception {
if (eligibleClientModels != null && !eligibleClientModels.isEmpty()) {
for (EligibleClientModel clientModel : eligibleClientModels) {
updateEligibleClient(clientModel.getClientId(), clientModel);
}
return true;
}
return false;
}
@Override
public boolean createEligibleClients(List<EligibleClientModel> eligibleClientModels) {
if (eligibleClientModels != null && !eligibleClientModels.isEmpty()) {
for (EligibleClientModel clientModel : eligibleClientModels) {
createEligibleClient(clientModel);
}
return true;
}
return false;
}
@Override
public boolean updateEligibleClientScore(UUID clientID, int scoreTotal) {
boolean status = false;
EligibleClient client = eligibleClientsRepository.findByClientIdAndDeleted(clientID,false);
if(client!=null){
client.setSurveyScore(scoreTotal);
eligibleClientsRepository.save(client);
}
return status;
}
private Sort eligibleClientSortClause() {
return sortBySurveyDate().and(sortByScore());
}
private Sort sortByScore() {
return new Sort(Sort.Direction.DESC, "surveyScore");
}
private Sort sortBySurveyDate() {
return new Sort(Sort.Direction.ASC, "surveyDate");
}
public List<EligibleClient> getEligibleClients(Integer programType, String projectGroup,String spdatLabel) {
Specification<EligibleClient> specification = Specifications.where(new Specification<EligibleClient>() {
@Override
public Predicate toPredicate(Root<EligibleClient> root, CriteriaQuery<?> query, CriteriaBuilder criteriaBuilder) {
return criteriaBuilder.and(
criteriaBuilder.equal(root.get("programType"),programType+""),
criteriaBuilder.equal(root.get("projectGroupCode"),projectGroup),
criteriaBuilder.equal(root.get("spdatLabel"),spdatLabel),
criteriaBuilder.equal(root.get("deleted"), false),
criteriaBuilder.equal(root.get("ignoreMatchProcess"),false),
criteriaBuilder.equal(root.get("matched"),false));
}
});
Sort sort = new Sort(Direction.ASC,"cocScore","surveyDate");
return repositoryFactory.getEligibleClientsRepository().findAll(specification,sort);
}
@SuppressWarnings("unchecked")
public BaseClient getClientInfo(UUID clientId,String trustedAppId,String sessionToken) throws Exception {
SearchRequest request = new SearchRequest();
request.setTrustedAppId(trustedAppId);
request.setSearchEntity("clients");
request.setSessionToken(sessionToken);
request.addSearchParam("q", clientId);
List<BaseClient> clients=new ArrayList<BaseClient>();
try {
clients = (List<BaseClient>) searchServiceClient.search(request);
} catch (Exception e) {
e.printStackTrace();
throw e;
}
if(!clients.isEmpty()) return clients.get(0);
return null;
}
public BaseClient getClientInfoByDedupId(UUID clientDedupId,String trustedAppId,String sessionToken) throws Exception {
SearchRequest request = new SearchRequest();
request.setTrustedAppId(trustedAppId);
request.setSearchEntity("clients");
request.setSessionToken(sessionToken);
request.addSearchParam("q", clientDedupId);
List<BaseClient> clients=new ArrayList<BaseClient>();
try {
clients = (List<BaseClient>) searchServiceClient.search(request);
} catch (Exception e) {
throw e;
}
if(!clients.isEmpty()) return clients.get(0);
return null;
}
public Parameters getClientDataElements(UUID clientId,String trustedAppId,String sessionToken) throws Exception {
HttpHeaders headers = getHttpHeaders();
headers.add("X-HMIS-TrustedApp-Id", trustedAppId);
headers.add("Authorization", "HMISUserAuth session_token=" + sessionToken);
RestTemplate restTemplate = new RestTemplate();
HttpEntity entity = new HttpEntity(headers);
StringBuffer URI = new StringBuffer(
"http://hmiselb.aws.hmislynk.com/hmis-globalapi/rest/clients/" + clientId+"/dataelements");
ResponseEntity<String> response = restTemplate.exchange(URI.toString(), HttpMethod.GET, entity, String.class);
JSONObjectMapper mapper = new JSONObjectMapper();
Parameters parameters = null;
try{
parameters = mapper.readValue(response.getBody(), Parameters.class);
}catch (Exception e) {
e.printStackTrace();
parameters = new Parameters();
throw e;
}
return parameters;
}
protected HttpHeaders getHttpHeaders() {
HttpHeaders headers = new HttpHeaders();
headers.add("Accept", "application/json");
// Surya 04/17/2015 - You can add any headers here like user session, Authorization token etc
headers.add("Content-Type", "application/json; charset=UTF-8");
return headers;
}
} |
package org.uma.jmetal.example.multiobjective.nsgaii;
import org.uma.jmetal.algorithm.multiobjective.nsgaii.NSGAII;
import org.uma.jmetal.component.ranking.Ranking;
import org.uma.jmetal.component.ranking.impl.ExperimentalFastNonDominanceRanking;
import org.uma.jmetal.component.termination.Termination;
import org.uma.jmetal.component.termination.impl.TerminationByEvaluations;
import org.uma.jmetal.operator.crossover.CrossoverOperator;
import org.uma.jmetal.operator.crossover.impl.SBXCrossover;
import org.uma.jmetal.operator.mutation.MutationOperator;
import org.uma.jmetal.operator.mutation.impl.PolynomialMutation;
import org.uma.jmetal.operator.selection.SelectionOperator;
import org.uma.jmetal.operator.selection.impl.BinaryTournamentSelection;
import org.uma.jmetal.problem.Problem;
import org.uma.jmetal.solution.doublesolution.DoubleSolution;
import org.uma.jmetal.util.AbstractAlgorithmRunner;
import org.uma.jmetal.util.JMetalException;
import org.uma.jmetal.util.JMetalLogger;
import org.uma.jmetal.util.ProblemUtils;
import org.uma.jmetal.util.comparator.RankingAndCrowdingDistanceComparator;
import org.uma.jmetal.util.evaluator.impl.SequentialSolutionListEvaluator;
import org.uma.jmetal.util.fileoutput.SolutionListOutput;
import org.uma.jmetal.util.fileoutput.impl.DefaultFileOutputContext;
import org.uma.jmetal.util.pseudorandom.JMetalRandom;
import java.io.FileNotFoundException;
import java.util.List;
/**
* Class to configure and run the NSGA-II algorithm
*
* @author Antonio J. Nebro <antonio@lcc.uma.es>
*/
public class NSGAIIWithEspecificNDSAlgorithmExample extends AbstractAlgorithmRunner {
/**
* @param args Command line arguments.
* @throws JMetalException
* @throws FileNotFoundException Invoking command: java
* org.uma.jmetal.runner.multiobjective.nsgaii.NSGAIIRunner problemName [referenceFront]
*/
public static void main(String[] args) throws JMetalException, FileNotFoundException {
Problem<DoubleSolution> problem;
NSGAII<DoubleSolution> algorithm;
CrossoverOperator<DoubleSolution> crossover;
MutationOperator<DoubleSolution> mutation;
SelectionOperator<List<DoubleSolution>, DoubleSolution> selection;
String problemName = "org.uma.jmetal.problem.multiobjective.zdt.ZDT1";
String referenceParetoFront = "jmetal-problem/src/test/resources/pareto_fronts/ZDT1.pf";
problem = ProblemUtils.<DoubleSolution>loadProblem(problemName);
double crossoverProbability = 0.9;
double crossoverDistributionIndex = 20.0;
crossover = new SBXCrossover(crossoverProbability, crossoverDistributionIndex);
double mutationProbability = 1.0 / problem.getNumberOfVariables();
double mutationDistributionIndex = 20.0;
mutation = new PolynomialMutation(mutationProbability, mutationDistributionIndex);
selection =
new BinaryTournamentSelection<DoubleSolution>(
new RankingAndCrowdingDistanceComparator<DoubleSolution>());
int populationSize = 100;
int offspringPopulationSize = 1010;
Termination termination = new TerminationByEvaluations(25000);
Ranking<DoubleSolution> ranking = new ExperimentalFastNonDominanceRanking<>() ;
algorithm =
new NSGAII<>(
problem,
populationSize,
offspringPopulationSize,
crossover,
mutation,
selection,
termination,
ranking,
new SequentialSolutionListEvaluator<>());
algorithm.run();
List<DoubleSolution> population = algorithm.getResult();
JMetalLogger.logger.info("Total execution time : " + algorithm.getTotalComputingTime() + "ms");
JMetalLogger.logger.info("Number of evaluations: " + algorithm.getEvaluations());
new SolutionListOutput(population)
.setVarFileOutputContext(new DefaultFileOutputContext("VAR.csv", ","))
.setFunFileOutputContext(new DefaultFileOutputContext("FUN.csv", ","))
.print();
JMetalLogger.logger.info("Random seed: " + JMetalRandom.getInstance().getSeed());
JMetalLogger.logger.info("Objectives values have been written to file FUN.csv");
JMetalLogger.logger.info("Variables values have been written to file VAR.csv");
if (!referenceParetoFront.equals("")) {
printQualityIndicators(population, referenceParetoFront);
}
// PlotFront plot = new Plot2DSmile(new ArrayFront(population).getMatrix()) ;
// plot.plot();
}
} |
package org.safehaus.kiskis.mgmt.server.ui.modules.lxc.forms;
import com.google.common.base.Strings;
import com.vaadin.terminal.ThemeResource;
import com.vaadin.ui.*;
import org.osgi.framework.BundleContext;
import org.osgi.framework.FrameworkUtil;
import org.osgi.framework.ServiceReference;
import org.safehaus.kiskis.mgmt.server.ui.modules.lxc.LxcModule;
import org.safehaus.kiskis.mgmt.server.ui.util.AppData;
import org.safehaus.kiskis.mgmt.shared.protocol.*;
import org.safehaus.kiskis.mgmt.shared.protocol.api.CommandManagerInterface;
import org.safehaus.kiskis.mgmt.shared.protocol.enums.ResponseType;
import org.safehaus.kiskis.mgmt.shared.protocol.enums.TaskStatus;
import org.safehaus.kiskis.mgmt.shared.protocol.settings.Common;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@SuppressWarnings("serial")
public class LxcCloneForm extends VerticalLayout implements
Button.ClickListener {
private static final String CLONE_LXC = "" +
"{\n" +
"\t \"command\": {\n" +
"\t \"type\": \"EXECUTE_REQUEST\",\n" +
"\t \"source\": \":source\",\n" +
"\t \"uuid\": \":uuid\",\n" +
"\t \"taskUuid\": \":task\",\n" +
"\t \"requestSequenceNumber\": :requestSequenceNumber,\n" +
"\t \"workingDirectory\": \"/\",\n" +
"\t \"program\": \"/usr/bin/lxc-clone\",\n" +
"\t \"stdOut\": \"RETURN\",\n" +
"\t \"stdErr\": \"RETURN\",\n" +
"\t \"runAs\": \"root\",\n" +
"\t \"args\": [\n" +
"\t \"-o\",\"base-container\",\"-n\",\":lxc-host-name\"," +
"\" && cat /dev/null > /etc/resolvconf/resolv.conf.d/original " +
"&& cat /dev/null > /var/lib/lxc/:lxc-host-name/rootfs/etc/resolvconf/resolv.conf.d/original\"\n" +
"\t ],\n" +
"\t \"timeout\": 180\n" +
"\t }\n" +
"\t}";
private Set<Agent> physicalAgents;
private Task cloneTask;
private TextField textFieldLxcName;
private Button buttonClone;
private Panel outputPanel;
public LxcCloneForm() {
setSpacing(true);
// Panel 1 - with caption
Panel panel = new Panel("Clone LXC template");
textFieldLxcName = new TextField("Clone LXC Template");
buttonClone = new Button("Clone");
buttonClone.addListener(this);
HorizontalLayout hLayout = new HorizontalLayout();
hLayout.addComponent(textFieldLxcName);
hLayout.addComponent(buttonClone);
hLayout.setComponentAlignment(textFieldLxcName, Alignment.BOTTOM_CENTER);
hLayout.setComponentAlignment(buttonClone, Alignment.BOTTOM_CENTER);
panel.addComponent(hLayout);
// let's adjust the panels default layout (a VerticalLayout)
VerticalLayout layout = (VerticalLayout) panel.getContent();
layout.setMargin(true); // we want a margin
layout.setSpacing(true); // and spacing between components
outputPanel = new Panel("Clone command output");
addComponent(panel);
addComponent(outputPanel);
}
@Override
public void buttonClick(Button.ClickEvent clickEvent) {
Set<Agent> agents = AppData.getSelectedAgentList();
if (agents != null && agents.size() > 0) {
physicalAgents = new HashSet<Agent>();
for (Agent agent : agents) {
if (!agent.isIsLXC()) {
physicalAgents.add(agent);
}
}
if (physicalAgents.size() == 0) {
getWindow().showNotification("Select at least one physical agent");
} else if (Strings.isNullOrEmpty(textFieldLxcName.getValue().toString())) {
getWindow().showNotification("Enter lxc hostname");
} else {
createTask();
}
}
}
private void createTask() {
cloneTask = new Task();
cloneTask.setTaskStatus(TaskStatus.NEW);
cloneTask.setDescription("Cloning lxc container");
if (getCommandManager() != null) {
getCommandManager().saveTask(cloneTask);
createRequests();
}
}
private void createRequests() {
String jsonTemplate = CLONE_LXC;
jsonTemplate = jsonTemplate.replaceAll(":task", cloneTask.getUuid().toString());
jsonTemplate = jsonTemplate.replaceAll(":source", LxcModule.MODULE_NAME);
for (Agent agent : physicalAgents) {
String json = jsonTemplate.replaceAll(":uuid", agent.getUuid().toString());
json = json.replaceAll(":requestSequenceNumber", cloneTask.getIncrementedReqSeqNumber().toString());
json = json.replaceAll(":lxc-host-name",
agent.getHostname() + Common.PARENT_CHILD_LXC_SEPARATOR + textFieldLxcName.getValue().toString());
Request request = CommandJson.getRequest(json);
if (getCommandManager() != null) {
getCommandManager().executeCommand(new Command(request));
}
buttonClone.setEnabled(false);
}
}
public void outputResponse(Response response) {
if (cloneTask != null && response.getTaskUuid().compareTo(cloneTask.getUuid()) == 0) {
setTaskStatus();
}
}
public void setTaskStatus() {
if (getCommandManager() != null) {
boolean isSuccess = true;
List<Request> requests = getCommandManager().getCommands(cloneTask.getUuid());
for (Request request : requests) {
Response response = getCommandManager().getResponse(
cloneTask.getUuid(),
request.getRequestSequenceNumber());
if (response == null) {
return;
} else {
if (response.getType().equals(ResponseType.EXECUTE_TIMEOUTED)
&& response.getType().equals(ResponseType.EXECUTE_TIMEOUTED)) {
isSuccess = false;
Label labelError = new Label(response.getStdErr());
labelError.setIcon(new ThemeResource("icons/16/cancel.png"));
outputPanel.addComponent(labelError);
buttonClone.setEnabled(true);
} else if (response.getType().equals(ResponseType.EXECUTE_RESPONSE_DONE)) {
if(response.getExitCode() == 0){
Label labelOk = new Label(response.getStdOut());
labelOk.setIcon(new ThemeResource("icons/16/ok.png"));
outputPanel.addComponent(labelOk);
buttonClone.setEnabled(true);
} else {
Label labelError = new Label(response.getStdOut());
labelError.setIcon(new ThemeResource("icons/16/cancel.png"));
outputPanel.addComponent(labelError);
buttonClone.setEnabled(true);
}
}
}
}
if (isSuccess) {
cloneTask.setTaskStatus(TaskStatus.SUCCESS);
} else {
cloneTask.setTaskStatus(TaskStatus.FAIL);
}
getCommandManager().saveTask(cloneTask);
}
}
public CommandManagerInterface getCommandManager() {
// get bundle instance via the OSGi Framework Util class
BundleContext ctx = FrameworkUtil.getBundle(LxcModule.class).getBundleContext();
if (ctx != null) {
ServiceReference serviceReference = ctx.getServiceReference(CommandManagerInterface.class.getName());
if (serviceReference != null) {
return CommandManagerInterface.class.cast(ctx.getService(serviceReference));
}
}
return null;
}
} |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package net.domesdaybook.automata;
import net.domesdaybook.matcher.singlebyte.SingleByteMatcher;
/**
*
* @author matt
*/
public class TransitionSingleByteMatcher implements Transition {
private final SingleByteMatcher matcher;
private final State toState;
public TransitionSingleByteMatcher(final SingleByteMatcher matcher, final State toState) {
this.matcher = matcher;
this.toState = toState;
}
public final State getStateForByte(byte theByte) {
return matcher.matches(theByte) ? toState : null;
}
public final State getToState() {
return toState;
}
public byte[] getBytes() {
return matcher.getMatchingBytes();
}
public final SingleByteMatcher getMatcher() {
return matcher;
}
} |
package com.opengamma.financial.analytics.model.multicurve;
import static com.opengamma.engine.value.ValuePropertyNames.CURVE;
import static com.opengamma.engine.value.ValuePropertyNames.CURVE_CONSTRUCTION_CONFIG;
import static com.opengamma.engine.value.ValuePropertyNames.CURVE_EXPOSURES;
import static com.opengamma.engine.value.ValueRequirementNames.CURVE_BUNDLE;
import static com.opengamma.engine.value.ValueRequirementNames.JACOBIAN_BUNDLE;
import static com.opengamma.engine.value.ValueRequirementNames.CURVE_DEFINITION;
import static com.opengamma.engine.value.ValueRequirementNames.FX_MATRIX;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.threeten.bp.Clock;
import org.threeten.bp.ZonedDateTime;
import com.opengamma.analytics.financial.forex.method.FXMatrix;
import com.opengamma.analytics.financial.instrument.InstrumentDefinition;
import com.opengamma.analytics.financial.interestrate.InstrumentDerivative;
import com.opengamma.core.convention.ConventionSource;
import com.opengamma.core.holiday.HolidaySource;
import com.opengamma.core.region.RegionSource;
import com.opengamma.core.security.Security;
import com.opengamma.core.security.SecuritySource;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.ComputationTargetSpecification;
import com.opengamma.engine.function.AbstractFunction;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.FunctionExecutionContext;
import com.opengamma.engine.function.FunctionInputs;
import com.opengamma.engine.target.ComputationTargetType;
import com.opengamma.engine.value.ComputedValue;
import com.opengamma.engine.value.ValueProperties;
import com.opengamma.engine.value.ValueRequirement;
import com.opengamma.engine.value.ValueRequirementNames;
import com.opengamma.engine.value.ValueSpecification;
import com.opengamma.financial.OpenGammaCompilationContext;
import com.opengamma.financial.OpenGammaExecutionContext;
import com.opengamma.financial.analytics.conversion.CashFlowSecurityConverter;
import com.opengamma.financial.analytics.conversion.CashSecurityConverter;
import com.opengamma.financial.analytics.conversion.DeliverableSwapFutureSecurityConverter;
import com.opengamma.financial.analytics.conversion.FRASecurityConverter;
import com.opengamma.financial.analytics.conversion.FXForwardSecurityConverter;
import com.opengamma.financial.analytics.conversion.FederalFundsFutureTradeConverter;
import com.opengamma.financial.analytics.conversion.FixedIncomeConverterDataProvider;
import com.opengamma.financial.analytics.conversion.FutureTradeConverter;
import com.opengamma.financial.analytics.conversion.InflationSwapSecurityConverter;
import com.opengamma.financial.analytics.conversion.NonDeliverableFXForwardSecurityConverter;
import com.opengamma.financial.analytics.conversion.SwapSecurityConverter;
import com.opengamma.financial.analytics.conversion.TradeConverter;
import com.opengamma.financial.analytics.curve.ConfigDBCurveConstructionConfigurationSource;
import com.opengamma.financial.analytics.curve.CurveConstructionConfiguration;
import com.opengamma.financial.analytics.curve.CurveConstructionConfigurationSource;
import com.opengamma.financial.analytics.curve.CurveUtils;
import com.opengamma.financial.analytics.curve.exposure.ConfigDBInstrumentExposuresProvider;
import com.opengamma.financial.analytics.curve.exposure.InstrumentExposuresProvider;
import com.opengamma.financial.analytics.timeseries.HistoricalTimeSeriesBundle;
import com.opengamma.financial.analytics.timeseries.HistoricalTimeSeriesFunctionUtils;
import com.opengamma.financial.convention.ConventionBundleSource;
import com.opengamma.financial.currency.CurrencyPair;
import com.opengamma.financial.security.FinancialSecurity;
import com.opengamma.financial.security.FinancialSecurityUtils;
import com.opengamma.financial.security.FinancialSecurityVisitor;
import com.opengamma.financial.security.FinancialSecurityVisitorAdapter;
import com.opengamma.financial.security.cash.CashSecurity;
import com.opengamma.financial.security.cashflow.CashFlowSecurity;
import com.opengamma.financial.security.fra.FRASecurity;
import com.opengamma.financial.security.future.DeliverableSwapFutureSecurity;
import com.opengamma.financial.security.future.FederalFundsFutureSecurity;
import com.opengamma.financial.security.future.InterestRateFutureSecurity;
import com.opengamma.financial.security.fx.FXForwardSecurity;
import com.opengamma.financial.security.fx.NonDeliverableFXForwardSecurity;
import com.opengamma.financial.security.swap.SwapSecurity;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesResolver;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.async.AsynchronousExecution;
import com.opengamma.util.money.Currency;
/**
* Base function for all multi-curve pricing and risk functions. Produces results for trades with
* following underlying securities: <p>
* <ul>
* <li> {@link CashSecurity}
* <li> {@link CashFlowSecurity}
* <li> {@link FRASecurity}
* <li> {@link SwapSecurity}
* <li> {@link InterestRateFutureSecurity}
* <li> {@link FXForwardSecurity}
* <li> {@link NonDeliverableFXForwardSecurity}
* <li> {@link DeliverableSwapFutureSecurity}
* <li> {@link FederalFundsFutureSecurity}
* </ul>
*/
public abstract class MultiCurvePricingFunction extends AbstractFunction {
/** The logger */
private static final Logger s_logger = LoggerFactory.getLogger(MultiCurvePricingFunction.class);
/** The value requirements produced by this function */
private final String[] _valueRequirements;
/** The curve construction configuration source */
private CurveConstructionConfigurationSource _curveConstructionConfigurationSource;
/** The instrument exposures provider */
private InstrumentExposuresProvider _instrumentExposuresProvider;
/**
* @param valueRequirements The value requirements, not null
*/
public MultiCurvePricingFunction(final String... valueRequirements) {
ArgumentChecker.notNull(valueRequirements, "value requirements");
_valueRequirements = valueRequirements;
}
@Override
public void init(final FunctionCompilationContext context) {
_curveConstructionConfigurationSource = ConfigDBCurveConstructionConfigurationSource.init(context, this);
_instrumentExposuresProvider = ConfigDBInstrumentExposuresProvider.init(context, this);
}
/**
* Constructs an object capable of converting from {@link ComputationTarget} to {@link InstrumentDefinition}.
*
* @param context The compilation context, not null
* @return The converter
*/
protected TradeConverter getTargetToDefinitionConverter(final FunctionCompilationContext context) {
final SecuritySource securitySource = OpenGammaCompilationContext.getSecuritySource(context);
final HolidaySource holidaySource = OpenGammaCompilationContext.getHolidaySource(context);
final RegionSource regionSource = OpenGammaCompilationContext.getRegionSource(context);
final ConventionBundleSource conventionBundleSource = OpenGammaCompilationContext.getConventionBundleSource(context);
final ConventionSource conventionSource = OpenGammaCompilationContext.getConventionSource(context);
final CashSecurityConverter cashConverter = new CashSecurityConverter(holidaySource, regionSource);
final CashFlowSecurityConverter cashFlowSecurityConverter = new CashFlowSecurityConverter();
final FRASecurityConverter fraConverter = new FRASecurityConverter(securitySource, holidaySource, regionSource, conventionSource);
final SwapSecurityConverter swapConverter = new SwapSecurityConverter(securitySource, holidaySource, conventionSource, regionSource);
final FXForwardSecurityConverter fxForwardSecurityConverter = new FXForwardSecurityConverter();
final NonDeliverableFXForwardSecurityConverter nonDeliverableFXForwardSecurityConverter = new NonDeliverableFXForwardSecurityConverter();
final DeliverableSwapFutureSecurityConverter dsfConverter = new DeliverableSwapFutureSecurityConverter(securitySource, swapConverter);
final FederalFundsFutureTradeConverter federalFundsFutureTradeConverter = new FederalFundsFutureTradeConverter(securitySource, holidaySource, conventionSource, regionSource);
final InflationSwapSecurityConverter inflationSwapConverter = new InflationSwapSecurityConverter(conventionSource, regionSource, holidaySource);
final FinancialSecurityVisitor<InstrumentDefinition<?>> securityConverter = FinancialSecurityVisitorAdapter.<InstrumentDefinition<?>>builder()
.cashSecurityVisitor(cashConverter)
.cashFlowSecurityVisitor(cashFlowSecurityConverter)
.deliverableSwapFutureSecurityVisitor(dsfConverter)
.fraSecurityVisitor(fraConverter)
.swapSecurityVisitor(swapConverter)
.fxForwardVisitor(fxForwardSecurityConverter)
.nonDeliverableFxForwardVisitor(nonDeliverableFXForwardSecurityConverter)
.zeroCouponInflationSwapSecurityVisitor(inflationSwapConverter)
.create();
final FutureTradeConverter futureTradeConverter = new FutureTradeConverter(securitySource, holidaySource, conventionSource, conventionBundleSource, regionSource);
return new TradeConverter(futureTradeConverter, federalFundsFutureTradeConverter, securityConverter);
}
/**
* Constructs an object capable of converting from {@link InstrumentDefinition} to {@link InstrumentDerivative}.
*
* @param context The compilation context, not null
* @return The converter
*/
protected FixedIncomeConverterDataProvider getDefinitionToDerivativeConverter(final FunctionCompilationContext context) {
final SecuritySource securitySource = OpenGammaCompilationContext.getSecuritySource(context);
final ConventionBundleSource conventionSource = OpenGammaCompilationContext.getConventionBundleSource(context); // TODO [PLAT-5966] Remove
final HistoricalTimeSeriesResolver timeSeriesResolver = OpenGammaCompilationContext.getHistoricalTimeSeriesResolver(context);
return new FixedIncomeConverterDataProvider(conventionSource, securitySource, timeSeriesResolver);
}
/**
* Base compiled function for all multi-curve pricing and risk functions.
*/
public abstract class MultiCurveCompiledFunction extends AbstractInvokingCompiledFunction {
/** Converts targets to definitions */
private final TradeConverter _tradeToDefinitionConverter;
/** Converts definitions to derivatives */
private final FixedIncomeConverterDataProvider _definitionToDerivativeConverter;
/**
* @param tradeToDefinitionConverter Converts trades to definitions, not null
* @param definitionToDerivativeConverter Converts definitions to derivatives, not null
*/
protected MultiCurveCompiledFunction(final TradeConverter tradeToDefinitionConverter, final FixedIncomeConverterDataProvider definitionToDerivativeConverter) {
ArgumentChecker.notNull(tradeToDefinitionConverter, "target to definition converter");
ArgumentChecker.notNull(definitionToDerivativeConverter, "definition to derivative converter");
_tradeToDefinitionConverter = tradeToDefinitionConverter;
_definitionToDerivativeConverter = definitionToDerivativeConverter;
}
@Override
public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target,
final Set<ValueRequirement> desiredValues) throws AsynchronousExecution {
final Clock snapshotClock = executionContext.getValuationClock();
final ZonedDateTime now = ZonedDateTime.now(snapshotClock);
final HistoricalTimeSeriesBundle timeSeries = HistoricalTimeSeriesFunctionUtils.getHistoricalTimeSeriesInputs(executionContext, inputs);
final InstrumentDefinition<?> definition = getDefinitionFromTarget(target);
final InstrumentDerivative derivative = getDerivative(target, now, timeSeries, definition);
final FXMatrix fxMatrix = new FXMatrix();
final SecuritySource securitySource = OpenGammaExecutionContext.getSecuritySource(executionContext);
final Collection<Currency> currencies = FinancialSecurityUtils.getCurrencies(target.getTrade().getSecurity(), securitySource);
final Iterator<Currency> iter = currencies.iterator();
final Currency initialCurrency = iter.next();
while (iter.hasNext()) {
final Currency otherCurrency = iter.next();
final Double spotRate = (Double) inputs.getValue(new ValueRequirement(ValueRequirementNames.SPOT_RATE, CurrencyPair.TYPE.specification(CurrencyPair
.of(otherCurrency, initialCurrency))));
if (spotRate != null) {
fxMatrix.addCurrency(otherCurrency, initialCurrency, spotRate);
}
}
return getValues(executionContext, inputs, target, desiredValues, derivative, fxMatrix);
}
@Override
public ComputationTargetType getTargetType() {
return ComputationTargetType.TRADE;
}
@Override
public boolean canApplyTo(final FunctionCompilationContext context, final ComputationTarget target) {
final Security security = target.getTrade().getSecurity();
return security instanceof CashSecurity ||
security instanceof CashFlowSecurity ||
security instanceof FRASecurity ||
security instanceof SwapSecurity ||
security instanceof FXForwardSecurity ||
security instanceof NonDeliverableFXForwardSecurity ||
security instanceof InterestRateFutureSecurity ||
security instanceof FederalFundsFutureSecurity;
}
@SuppressWarnings("synthetic-access")
@Override
public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target) {
final ValueProperties properties = getResultProperties(context, target).get();
final Set<ValueSpecification> results = new HashSet<>();
for (final String valueRequirement : _valueRequirements) {
results.add(new ValueSpecification(valueRequirement, target.toSpecification(), properties));
}
return results;
}
@SuppressWarnings("synthetic-access")
@Override
public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) {
final ValueProperties constraints = desiredValue.getConstraints();
if (!requirementsSet(constraints)) {
return null;
}
final Set<String> curveExposureConfigs = constraints.getValues(CURVE_EXPOSURES);
try {
final FinancialSecurity security = (FinancialSecurity) target.getTrade().getSecurity();
final SecuritySource securitySource = OpenGammaCompilationContext.getSecuritySource(context);
final Set<ValueRequirement> requirements = new HashSet<>();
final ValueProperties.Builder commonCurveProperties = getCurveProperties(target, constraints);
for (final String curveExposureConfig : curveExposureConfigs) {
final Set<String> curveConstructionConfigurationNames = _instrumentExposuresProvider.getCurveConstructionConfigurationsForConfig(curveExposureConfig, security);
for (final String curveConstructionConfigurationName : curveConstructionConfigurationNames) {
final ValueProperties properties = commonCurveProperties.get().copy().with(CURVE_CONSTRUCTION_CONFIG, curveConstructionConfigurationName).get();
requirements.add(new ValueRequirement(CURVE_BUNDLE, ComputationTargetSpecification.NULL, properties));
requirements.add(new ValueRequirement(JACOBIAN_BUNDLE, ComputationTargetSpecification.NULL, properties));
final CurveConstructionConfiguration curveConstructionConfiguration = _curveConstructionConfigurationSource.getCurveConstructionConfiguration(curveConstructionConfigurationName);
final String[] curveNames = CurveUtils.getCurveNamesForConstructionConfiguration(curveConstructionConfiguration);
for (final String curveName : curveNames) {
final ValueProperties curveProperties = ValueProperties.builder().with(CURVE, curveName).get();
requirements.add(new ValueRequirement(CURVE_DEFINITION, ComputationTargetSpecification.NULL, curveProperties));
requirements.add(new ValueRequirement(FX_MATRIX, ComputationTargetSpecification.NULL, ValueProperties.with(CURVE_CONSTRUCTION_CONFIG, curveConstructionConfigurationNames)
.get()));
}
}
}
requirements.addAll(getFXRequirements(security, securitySource));
final Set<ValueRequirement> timeSeriesRequirements = getTimeSeriesRequirements(context, target);
if (timeSeriesRequirements == null) {
return null;
}
requirements.addAll(timeSeriesRequirements);
return requirements;
} catch (final Exception e) {
s_logger.error(e.getMessage(), e);
return null;
}
}
/**
* Gets the FX spot requirements for a security.
*
* @param security The security, not null
* @param securitySource The security source, not null
* @return A set of FX spot requirements
*/
protected Set<ValueRequirement> getFXRequirements(final FinancialSecurity security, final SecuritySource securitySource) {
final Set<ValueRequirement> requirements = new HashSet<>();
final Collection<Currency> currencies = FinancialSecurityUtils.getCurrencies(security, securitySource);
if (currencies.size() > 1) {
final Iterator<Currency> iter = currencies.iterator();
final Currency initialCurrency = iter.next();
while (iter.hasNext()) {
requirements.add(new ValueRequirement(ValueRequirementNames.SPOT_RATE, CurrencyPair.TYPE.specification(CurrencyPair.of(iter.next(), initialCurrency))));
}
}
return requirements;
}
/**
* Gets the fixing or market close time series requirements for a security.
*
* @param context The compilation context, not null
* @param target The target
* @return A set of fixing / market close time series requirements
*/
protected Set<ValueRequirement> getTimeSeriesRequirements(final FunctionCompilationContext context, final ComputationTarget target) {
final InstrumentDefinition<?> definition = getDefinitionFromTarget(target);
final Set<ValueRequirement> timeSeriesRequirements = getConversionTimeSeriesRequirements(context, target, definition);
if (timeSeriesRequirements == null) {
return null;
}
return timeSeriesRequirements;
}
/**
* Gets an {@link InstrumentDefinition} given a target.
*
* @param target The target, not null
* @return An instrument definition
*/
protected InstrumentDefinition<?> getDefinitionFromTarget(final ComputationTarget target) {
return _tradeToDefinitionConverter.convert(target.getTrade());
}
/**
* Gets a conversion time-series for an instrument definition. If no time-series are required, returns an empty set.
*
* @param context The compilation context, not null
* @param target The target, not null
* @param definition The definition, not null
* @return A set of time-series requirements
*/
protected Set<ValueRequirement> getConversionTimeSeriesRequirements(final FunctionCompilationContext context, final ComputationTarget target, final InstrumentDefinition<?> definition) {
return _definitionToDerivativeConverter.getConversionTimeSeriesRequirements(target.getTrade().getSecurity(), definition);
}
/**
* Gets an {@link InstrumentDerivative}.
*
* @param target The target, not null
* @param now The valuation time, not null
* @param timeSeries The conversion time series bundle, not null but may be empty
* @param definition The definition, not null
* @return The instrument derivative
*/
protected InstrumentDerivative getDerivative(final ComputationTarget target, final ZonedDateTime now, final HistoricalTimeSeriesBundle timeSeries,
final InstrumentDefinition<?> definition) {
return _definitionToDerivativeConverter.convert(target.getTrade().getSecurity(), definition, now, timeSeries);
}
/**
* Gets the value requirement names that this function can produce
*
* @return The value requirement names
*/
@SuppressWarnings("synthetic-access")
protected String[] getValueRequirementNames() {
return _valueRequirements;
}
/**
* Gets the properties for the results given a target.
*
* @param context The compilation context, not null
* @param target The target, not null
* @return The result properties
*/
protected abstract ValueProperties.Builder getResultProperties(FunctionCompilationContext context, ComputationTarget target);
/**
* Checks that all constraints have values.
*
* @param constraints The constraints, not null
* @return True if all of the constraints have been set
*/
protected abstract boolean requirementsSet(ValueProperties constraints);
/**
* Gets the properties that are common to all curves.
*
* @param target The target, not null
* @param constraints The input constraints
* @return The common curve properties
*/
protected abstract ValueProperties.Builder getCurveProperties(ComputationTarget target, ValueProperties constraints);
/**
* Calculates the result.
*
* @param executionContext The execution context, not null
* @param inputs The inputs, not null
* @param target The target, not null
* @param desiredValues The desired values for this function, not null
* @param derivative The derivative, not null
* @param fxMatrix The FX matrix, not null
* @return The results
*/
protected abstract Set<ComputedValue> getValues(FunctionExecutionContext executionContext, FunctionInputs inputs, ComputationTarget target, Set<ValueRequirement> desiredValues,
InstrumentDerivative derivative, FXMatrix fxMatrix);
}
} |
package com.azure.security.keyvault.secrets;
import com.azure.security.keyvault.secrets.models.KeyVaultSecret;
import com.azure.identity.DefaultAzureCredentialBuilder;
import com.azure.security.keyvault.secrets.models.SecretProperties;
import java.time.OffsetDateTime;
/**
* Sample demonstrates how to list secrets and versions of a given secret in the key vault.
*/
public class ListOperations {
public static void main(String[] args) throws IllegalArgumentException {
// Instantiate a client that will be used to call the service. Notice that the client is using default Azure
// credentials. To make default credentials work, ensure that environment variables 'AZURE_CLIENT_ID',
// 'AZURE_CLIENT_KEY' and 'AZURE_TENANT_ID' are set with the service principal credentials.
SecretClient client = new SecretClientBuilder()
.vaultUrl("https://{YOUR_VAULT_NAME}.vault.azure.net")
.credential(new DefaultAzureCredentialBuilder().build())
.buildClient();
// Let's create secrets holding storage and bank accounts credentials valid for 1 year. if the secret
// already exists in the key vault, then a new version of the secret is created.
client.setSecret(new KeyVaultSecret("StorageAccountPassword", "f4G34fMh8v-fdsgjsk2323=-asdsdfsdf")
.setProperties(new SecretProperties()
.setExpiresOn(OffsetDateTime.now().plusYears(1))));
client.setSecret(new KeyVaultSecret("BankAccountPassword", "f4G34fMh8v")
.setProperties(new SecretProperties()
.setExpiresOn(OffsetDateTime.now().plusYears(1))));
// You need to check if any of the secrets are sharing same values. Let's list the secrets and print their values.
// List operations don't return the secrets with value information. So, for each returned secret we call getSecret to get the secret with its value information.
for (SecretProperties secret : client.listPropertiesOfSecrets()) {
if (!secret.isEnabled()) {
continue;
}
KeyVaultSecret secretWithValue = client.getSecret(secret.getName(), secret.getVersion());
System.out.printf("Received secret with name %s and value %s \n", secretWithValue.getName(), secretWithValue.getValue());
}
// The bank account password got updated, so you want to update the secret in key vault to ensure it reflects the new password.
// Calling setSecret on an existing secret creates a new version of the secret in the key vault with the new value.
client.setSecret("BankAccountPassword", "sskdjfsdasdjsd");
// You need to check all the different values your bank account password secret had previously. Lets print all the versions of this secret.
for (SecretProperties secret : client.listPropertiesOfSecretVersions("BankAccountPassword")) {
KeyVaultSecret secretWithValue = client.getSecret(secret.getName(), secret.getVersion());
System.out.printf("Received secret's version with name %s and value %s", secretWithValue.getName(), secretWithValue.getValue());
}
}
} |
package ol.geom;
import jsinterop.annotations.JsType;
import ol.Coordinate;
import ol.Extent;
import ol.Observable;
import ol.proj.Projection;
/**
* Base for vector geometries.
*
* @author Tino Desjardins
*
*/
@JsType(isNative = true)
public abstract class Geometry extends Observable {
/**
* Make a complete copy of the geometry.
*
* @return {@link Geometry} Clone.
*/
public native Geometry clone();
/**
* Return the closest point of the geometry to the passed point as
* {@link Coordinate} coordinate.
*
* @param point
* Point.
* @return Closest point.
* @api stable
*/
public native Coordinate getClosestPoint(Coordinate point);
/**
* Get the extent of the geometry.
*
* @return {ol.Extent} extent Extent.
*/
public native Extent getExtent();
/**
* Get the type of this geometry.
*
* @return Geometry type.
*/
public native String getType();
/**
* Rotate the geometry around a given coordinate. This modifies the geometry
* coordinates in place.
* @param angle
* Rotation angle in radians.
* @param anchor
* The rotation center.
*/
public native void rotate(double angle, Coordinate anchor);
public native Geometry simplify(double tolerance);
/**
* Transform each coordinate of the geometry from one coordinate reference
* system to another. The geometry is modified in place. For example, a line
* will be transformed to a line and a circle to a circle. If you do not
* want the geometry modified in place, first clone() it and then use this
* function on the clone.
*
* @param source
* The current projection. Can be a string identifier.
* @param destination
* The desired projection. Can be a string identifier.
* @return {@link Geometry} This geometry. Note that original geometry is
* modified in place.
*/
public native Geometry transform(String source, String destination);
/**
* Transform each coordinate of the geometry from one coordinate reference
* system to another. The geometry is modified in place. For example, a line
* will be transformed to a line and a circle to a circle. If you do not
* want the geometry modified in place, first clone() it and then use this
* function on the clone.
*
* @param geom {@link Geometry}
*
* @param source
* The current projection. Can be a {@link ol.proj.Projection} object.
* @param destination
* The desired projection. Can be a {@link ol.proj.Projection} object.
* @return {@link Geometry} This geometry. Note that original geometry is
* modified in place.
*/
public native Geometry transform(Projection source, Projection destination);
/**
* Returns true if this geometry includes the specified coordinate.
* If the coordinate is on the boundary of the geometry, returns false.
*
* @param coordinate
* The coordinate to check if intersects this geometry.
* @return
* true if intersects otherwise false
*/
public native boolean intersectsCoordinate(Coordinate coordinate);
} |
package org.springframework.cloud.release.internal.spring;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeanUtils;
import org.springframework.cloud.release.internal.Releaser;
import org.springframework.cloud.release.internal.ReleaserProperties;
import org.springframework.cloud.release.internal.options.Options;
import org.springframework.cloud.release.internal.options.OptionsBuilder;
import org.springframework.cloud.release.internal.pom.ProjectVersion;
import org.springframework.cloud.release.internal.pom.Projects;
import org.springframework.util.StringUtils;
/**
* Releaser that gets input from console
*
* @author Marcin Grzejszczak
*/
public class SpringReleaser {
private static final Logger log = LoggerFactory.getLogger(SpringReleaser.class);
private final Releaser releaser;
private final ReleaserProperties properties;
private final OptionsProcessor optionsProcessor;
private final ReleaserPropertiesUpdater updater;
private final ObjectMapper objectMapper = new ObjectMapper(new YAMLFactory());
public SpringReleaser(Releaser releaser, ReleaserProperties properties,
ReleaserPropertiesUpdater updater) {
this.releaser = releaser;
this.properties = properties;
this.updater = updater;
this.optionsProcessor = new OptionsProcessor(releaser, properties);
}
SpringReleaser(Releaser releaser, ReleaserProperties properties,
OptionsProcessor optionsProcessor, ReleaserPropertiesUpdater updater) {
this.releaser = releaser;
this.properties = properties;
this.optionsProcessor = optionsProcessor;
this.updater = updater;
}
/**
* Default behaviour - interactive mode
*/
public void release() {
release(new OptionsBuilder().options());
}
public void release(Options options) {
ProjectsAndVersion projectsAndVersion = null;
// if meta release, first clone, then continue as usual
if (options.metaRelease) {
log.info("Meta Release picked. Will iterate over all projects and perform release of each one");
this.properties.getGit().setFetchVersionsFromGit(false);
metaReleaseProjects(options)
.forEach(project -> processProjectForMetaRelease(options, project));
} else {
log.info("Single project release picked. Will release only the current project");
File projectFolder = projectFolder();
projectsAndVersion = processProject(options, projectFolder, TaskType.RELEASE);
}
this.optionsProcessor.postReleaseOptions(options, postReleaseOptionsAgs(options, projectsAndVersion));
}
private void processProjectForMetaRelease(Options options, String project) {
File clonedProjectFromOrg = this.releaser.clonedProjectFromOrg(project);
ReleaserProperties copy = clonePropertiesForProject();
updatePropertiesIfCustomConfigPresent(copy, clonedProjectFromOrg);
log.info("Successfully cloned the project [{}] to [{}]", project, clonedProjectFromOrg);
try {
processProject(options, clonedProjectFromOrg, TaskType.RELEASE);
} catch (Exception e) {
log.error("\n\n\nBUILD FAILED!!!\n\nException occurred for project <" +
project + "> \n\n", e);
throw e;
}
}
private ReleaserProperties clonePropertiesForProject() {
ReleaserProperties copy = new ReleaserProperties();
BeanUtils.copyProperties(this.properties, copy);
return copy;
}
private void updatePropertiesIfCustomConfigPresent(ReleaserProperties copy,
File clonedProjectFromOrg) {
File releaserConfig = new File(clonedProjectFromOrg, "config/releaser.yml");
if (releaserConfig.exists()) {
try {
ReleaserProperties releaserProperties = this.objectMapper
.readValue(releaserConfig, ReleaserProperties.class);
log.info("config/releaser.yml found. Will update the current properties");
copy.setMaven(releaserProperties.getMaven());
copy.setGradle(releaserProperties.getGradle());
}
catch (IOException e) {
throw new IllegalStateException(e);
}
}
log.info("Updating working directory to [{}]", clonedProjectFromOrg.getAbsolutePath());
copy.setWorkingDir(clonedProjectFromOrg.getAbsolutePath());
this.updater.updateProperties(copy);
}
private List<String> metaReleaseProjects(Options options) {
List<String> projects = new ArrayList<>(this.properties.getFixedVersions().keySet());
log.info("List of projects that should not be cloned {}", this.properties.getMetaRelease().getProjectsToSkip());
List<String> filteredProjects = projects.stream()
.filter(project -> !this.properties.getMetaRelease().getProjectsToSkip().contains(project))
.collect(Collectors.toList());
log.info("List of all projects to clone before filtering {}", filteredProjects);
if (StringUtils.hasText(options.startFrom)) {
int projectIndex = filteredProjects.indexOf(options.startFrom);
if (projectIndex < 0) throw new IllegalStateException("Project [" + options.startFrom + "] not found");
if (log.isDebugEnabled()) {
log.debug("Index of project [{}] is [{}]", options.startFrom, projectIndex);
}
filteredProjects = filteredProjects.subList(projectIndex, filteredProjects.size());
options.startFrom = "";
enforceFullRelease(options);
} else if (!options.taskNames.isEmpty()) {
filteredProjects = filteredProjects.stream()
.filter(project -> options.taskNames.contains(project))
.collect(Collectors.toList());
options.taskNames = new ArrayList<>();
enforceFullRelease(options);
}
log.info("\n\n\nFor meta-release, will release the projects {}\n\n\n", filteredProjects);
return filteredProjects;
}
protected void enforceFullRelease(Options options) {
options.fullRelease = true;
}
private File projectFolder() {
String workingDir = this.properties.getWorkingDir();
return new File(workingDir);
}
Args postReleaseOptionsAgs(Options options, ProjectsAndVersion projectsAndVersion) {
Projects projects = projectsAndVersion == null ?
projectsToUpdateForFixedVersions() : projectsAndVersion.projectVersions;
ProjectVersion version = projects.containsProject(this.properties.getMetaRelease().getReleaseTrainProjectName()) ?
projects.forName(this.properties.getMetaRelease().getReleaseTrainProjectName()) : versionFromBranch();
return new Args(this.releaser, projects, version,
this.properties, options.interactive);
}
private ProjectVersion versionFromBranch() {
String branch = this.properties.getPom().getBranch();
return new ProjectVersion(projectFolder().getName(), branch.startsWith("v") ? branch.substring(1) : branch);
}
private ProjectsAndVersion projects(File project) {
ProjectVersion versionFromScRelease;
Projects projectsToUpdate;
if (this.properties.getGit().isFetchVersionsFromGit() && !this.properties.getMetaRelease().isEnabled()) {
printVersionRetrieval();
projectsToUpdate = this.releaser.retrieveVersionsFromSCRelease();
versionFromScRelease = projectsToUpdate.forFile(project);
assertNoSnapshotsForANonSnapshotProject(projectsToUpdate, versionFromScRelease);
} else {
ProjectVersion originalVersion = new ProjectVersion(project);
String fixedVersionForProject = this.properties.getFixedVersions().get(originalVersion.projectName);
versionFromScRelease = new ProjectVersion(originalVersion.projectName, fixedVersionForProject == null ?
originalVersion.version : fixedVersionForProject);
projectsToUpdate = this.properties.getFixedVersions().entrySet().stream()
.map(entry -> new ProjectVersion(entry.getKey(), entry.getValue()))
.distinct().collect(Collectors.toCollection(Projects::new));
projectsToUpdate.add(versionFromScRelease);
printSettingVersionFromFixedVersions(projectsToUpdate);
}
return new ProjectsAndVersion(projectsToUpdate, versionFromScRelease);
}
class ProjectsAndVersion {
final Projects projectVersions;
final ProjectVersion versionFromScRelease;
ProjectsAndVersion(Projects projectVersions, ProjectVersion versionFromScRelease) {
this.projectVersions = projectVersions;
this.versionFromScRelease = versionFromScRelease;
}
}
private ProjectsAndVersion processProject(Options options, File project, TaskType taskType) {
ProjectsAndVersion projectsAndVersion = projects(project);
ProjectVersion originalVersion = new ProjectVersion(project);
final Args defaultArgs = new Args(this.releaser, project, projectsAndVersion.projectVersions,
originalVersion, projectsAndVersion.versionFromScRelease, this.properties,
options.interactive, taskType);
this.optionsProcessor.processOptions(options, defaultArgs);
return projectsAndVersion;
}
private Projects projectsToUpdateForFixedVersions() {
Projects projectsToUpdate = this.releaser.fixedVersions();
printSettingVersionFromFixedVersions(projectsToUpdate);
return projectsToUpdate;
}
private void printVersionRetrieval() {
log.info("\n\n\n=== RETRIEVING VERSIONS ===\n\nWill clone Spring Cloud Release"
+ " to retrieve all versions for the branch [{}]", this.properties.getPom().getBranch());
}
private void printSettingVersionFromFixedVersions(Projects projectsToUpdate) {
log.info("\n\n\n=== RETRIEVED VERSIONS ===\n\nWill use the fixed versions"
+ " of projects\n\n{}", projectsToUpdate
.stream().map(p -> p.projectName + " => " + p.version)
.collect(Collectors.joining("\n")));
}
private void assertNoSnapshotsForANonSnapshotProject(Projects projects,
ProjectVersion versionFromScRelease) {
if (!versionFromScRelease.isSnapshot() && projects.containsSnapshots()) {
throw new IllegalStateException("You are trying to release a non snapshot "
+ "version [" + versionFromScRelease + "] of the project [" + versionFromScRelease.projectName + "] but "
+ "there is at least one SNAPSHOT library version in the Spring Cloud Release project");
}
}
} |
package hex.kmeans;
import hex.ModelMetrics;
import hex.ModelMetricsClustering;
import hex.SplitFrame;
import org.junit.*;
import water.DKV;
import water.Key;
import water.Scope;
import water.TestUtil;
import water.exceptions.H2OModelBuilderIllegalArgumentException;
import water.fvec.Frame;
import water.fvec.NFSFileVec;
import water.parser.ParseDataset;
import water.util.ArrayUtils;
import water.util.FrameUtils;
import water.util.Log;
import water.util.MathUtils;
import java.io.File;
import java.util.Arrays;
import java.util.Comparator;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
public class KMeansTest extends TestUtil {
public final double threshold = 1e-6;
@BeforeClass() public static void setup() { stall_till_cloudsize(1); }
// Run KMeans with a given seed, & check all clusters are non-empty
private static KMeansModel doSeed( KMeansModel.KMeansParameters parms, long seed ) {
parms._seed = seed;
KMeans job = null;
KMeansModel kmm = null;
try {
job = new KMeans(parms);
kmm = job.trainModel().get();
checkConsistency(kmm);
} finally {
if (job != null) job.remove();
}
for( int i=0; i<parms._k; i++ )
Assert.assertTrue( "Seed: "+seed, kmm._output._size[i] != 0 );
return kmm;
}
//PUBDEV-781: Double-check the training metrics (gathered by computeStatsFillModel) and the scoring logic by scoring on the training set
private static void checkConsistency(KMeansModel kmm) {
//FIXME: TODO: remove this false, and fix the algo! PUBDEV-871
if (false) {
KMeansModel.KMeansParameters parms = kmm._parms;
Assert.assertTrue((ArrayUtils.sum(kmm._output._size) - parms.train().numRows()) <= 1);
// Log.info(kmm._output._model_summary);
// Log.info(kmm._output._scoring_history);
// Log.info(((ModelMetricsClustering)kmm._output._training_metrics).createCentroidStatsTable().toString());
kmm.score(parms.train()).delete(); //this scores on the training data and appends a ModelMetrics
ModelMetricsClustering mm = (ModelMetricsClustering) ModelMetrics.getFromDKV(kmm, parms.train());
Assert.assertTrue(Arrays.equals(mm._size, ((ModelMetricsClustering) kmm._output._training_metrics)._size));
for (int i = 0; i < parms._k; ++i) {
Assert.assertTrue(MathUtils.compare(mm._withinss[i], ((ModelMetricsClustering) kmm._output._training_metrics)._withinss[i], 1e-6, 1e-6));
}
Assert.assertTrue(MathUtils.compare(mm._totss, ((ModelMetricsClustering) kmm._output._training_metrics)._totss, 1e-6, 1e-6));
Assert.assertTrue(MathUtils.compare(mm._betweenss, ((ModelMetricsClustering) kmm._output._training_metrics)._betweenss, 1e-6, 1e-6));
Assert.assertTrue(MathUtils.compare(mm._tot_withinss, ((ModelMetricsClustering) kmm._output._training_metrics)._tot_withinss, 1e-6, 1e-6));
}
}
@Test public void testIris() {
KMeansModel kmm = null;
Frame fr = null, fr2= null;
try {
fr = parse_test_file("smalldata/iris/iris_wheader.csv");
KMeansModel.KMeansParameters parms = new KMeansModel.KMeansParameters();
parms._train = fr._key;
parms._k = 3;
parms._standardize = true;
parms._max_iterations = 10;
parms._init = KMeans.Initialization.Random;
kmm = doSeed(parms,0);
// Iris last column is categorical; make sure centers are ordered in the
// same order as the iris columns.
double[][] centers = kmm._output._centers_raw;
for( int k=0; k<parms._k; k++ ) {
double flower = centers[k][4];
Assert.assertTrue("categorical column expected",flower==(int)flower);
}
// Done building model; produce a score column with cluster choices
fr2 = kmm.score(fr);
} finally {
if( fr != null ) fr.delete();
if( fr2 != null ) fr2.delete();
if( kmm != null ) kmm.delete();
}
}
@Test public void testArrests() {
// Initialize using first 4 rows of USArrests
Frame init = ArrayUtils.frame(ard(ard(13.2, 236, 58, 21.2),
ard(10.0, 263, 48, 44.5),
ard(8.1, 294, 80, 31.0),
ard(8.8, 190, 50, 19.5)));
// R k-means results for comparison
double totssR = 355807.8216;
double btwssR = 318155.162076;
double[] wssR = new double[] {2546.350000, 6705.906667, 9136.642857, 19263.760000};
double[][] centersR = ard(ard( 4.270000, 87.550000, 59.750000, 14.390000),
ard( 8.214286, 173.285714, 70.642857, 22.842857),
ard(11.766667, 257.916667, 68.416667, 28.933333),
ard(11.950000, 316.500000, 68.000000, 26.700000));
Frame predR = ArrayUtils.frame(ar("predict"), ear(1, 1, 2, 0, 1, 0, 3, 1, 2, 0, 3, 3, 1, 3,
3, 3, 3, 1, 3, 2, 0, 1, 3, 1, 0, 3, 3, 1,
3, 0, 1, 1, 2, 3, 3, 0, 0, 3, 0, 1, 3, 0,
0, 3, 3, 0, 0, 3, 3, 0));
KMeansModel kmm = null;
Frame fr = null, fr2 = null;
try {
fr = parse_test_file("smalldata/pca_test/USArrests.csv");
KMeansModel.KMeansParameters parms = new KMeansModel.KMeansParameters();
parms._train = fr._key;
parms._k = 4;
parms._standardize = false;
parms._init = KMeans.Initialization.User;
parms._user_points = init._key;
kmm = doSeed(parms, 0);
// Sort cluster centers by first column for comparison purposes
double[][] centers = new double[parms._k][];
for(int i = 0; i < parms._k; i++)
centers[i] = kmm._output._centers_raw[i].clone();
Arrays.sort(centers, new Comparator<double[]>() {
@Override
public int compare(final double[] a, final double[] b) {
Double d1 = a[0]; Double d2 = b[0];
return d1.compareTo(d2);
}
});
for(int i = 0; i < centers.length; i++)
assertArrayEquals(centersR[i], centers[i], threshold);
Arrays.sort(kmm._output._withinss);
assertArrayEquals(wssR, kmm._output._withinss, threshold);
assertEquals(totssR, kmm._output._totss, threshold);
assertEquals(btwssR, kmm._output._betweenss, threshold);
// Done building model; produce a score column with cluster choices
fr2 = kmm.score(fr);
Assert.assertTrue(kmm.testJavaScoring(fr,fr2,1e-15));
assertVecEquals(predR.vec(0), fr2.vec(0), threshold);
fr2.delete();
} finally {
init .delete();
predR.delete();
if( fr != null ) fr.delete();
if( fr2 != null ) fr2.delete();
if( kmm != null ) kmm.delete();
}
}
@Test public void testBadCluster() {
Frame fr = null;
try {
fr = parse_test_file("smalldata/iris/iris_wheader.csv");
KMeansModel.KMeansParameters parms = new KMeansModel.KMeansParameters();
parms._train = fr._key;
parms._ignored_columns = new String[] {"class"};
parms._k = 3;
parms._standardize = true;
parms._max_iterations = 10;
parms._init = KMeans.Initialization.Random;
doSeed(parms,341534765239617L).delete(); // Seed triggers an empty cluster on iris
doSeed(parms,341579128111283L).delete(); // Seed triggers an empty cluster on iris
for( int i=0; i<10; i++ )
doSeed(parms,System.nanoTime()).delete();
} finally {
if( fr != null ) fr.delete();
}
}
// "datasets directory not always available"
@Test @Ignore public void testCovtype() {
Frame fr = null;
try {
File f = find_test_file("../datasets/UCI/UCI-large/covtype/covtype.data");
if( f==null ) return; // Ignore if large file not found
NFSFileVec nfs = NFSFileVec.make(f);
fr = ParseDataset.parse(Key.make(), nfs._key);
KMeansModel.KMeansParameters parms = new KMeansModel.KMeansParameters();
parms._train = fr._key;
parms._k = 7;
parms._standardize = true;
parms._max_iterations = 100;
parms._init = KMeans.Initialization.Random;
for( int i=0; i<10; i++ )
doSeed(parms,System.nanoTime()).delete();
} finally {
if( fr != null ) fr.delete();
}
}
private double[] d(double... ds) { return ds; }
boolean close(double[] a, double[] b) {
for (int i=0;i<a.length;++i) {
if (Math.abs(a[i]-b[i]) > 1e-8) return false;
}
return true;
}
@Test
public void testCentroids(){
Frame fr = ArrayUtils.frame(ard(d(1,0,0),d(0,1,0),d(0,0,1)));
Frame fr2=null;
try {
KMeansModel.KMeansParameters parms = new KMeansModel.KMeansParameters();
parms._train = fr._key;
parms._k = 3;
parms._standardize = true;
parms._max_iterations = 100;
parms._init = KMeans.Initialization.Random;
double[][] exp1 = new double[][]{ d(1, 0, 0), d(0, 1, 0), d(0, 0, 1), };
double[][] exp2 = new double[][]{ d(0, 1, 0), d(1, 0, 0), d(0, 0, 1), };
double[][] exp3 = new double[][]{ d(0, 1, 0), d(0, 0, 1), d(1, 0, 0), };
double[][] exp4 = new double[][]{ d(1, 0, 0), d(0, 0, 1), d(0, 1, 0), };
double[][] exp5 = new double[][]{ d(0, 0, 1), d(1, 0, 0), d(0, 1, 0), };
double[][] exp6 = new double[][]{ d(0, 0, 1), d(0, 1, 0), d(1, 0, 0), };
for( int i=0; i<10; i++ ) {
KMeansModel kmm = doSeed(parms, System.nanoTime());
Assert.assertTrue(kmm._output._centers_raw.length == 3);
fr2=kmm.score(fr);
Assert.assertTrue(kmm.testJavaScoring(fr,fr2,1e-15));
fr2.delete();
boolean gotit = false;
for (int j = 0; j < parms._k; ++j) gotit |= close(exp1[j], kmm._output._centers_raw[j]);
for (int j = 0; j < parms._k; ++j) gotit |= close(exp2[j], kmm._output._centers_raw[j]);
for (int j = 0; j < parms._k; ++j) gotit |= close(exp3[j], kmm._output._centers_raw[j]);
for (int j = 0; j < parms._k; ++j) gotit |= close(exp4[j], kmm._output._centers_raw[j]);
for (int j = 0; j < parms._k; ++j) gotit |= close(exp5[j], kmm._output._centers_raw[j]);
for (int j = 0; j < parms._k; ++j) gotit |= close(exp6[j], kmm._output._centers_raw[j]);
Assert.assertTrue(gotit);
kmm.delete();
}
} finally {
if( fr != null ) fr.delete();
}
}
@Test public void test1Dimension() {
Frame fr = ArrayUtils.frame(ard(d(1,0),d(0,0),d(-1,0),d(4,0),d(1,0),d(2,0),d(0,0),d(0,0)));
Frame fr2=null;
try {
KMeansModel.KMeansParameters parms = new KMeansModel.KMeansParameters();
parms._train = fr._key;
parms._k = 2;
parms._standardize = true;
parms._max_iterations = 100;
parms._init = KMeans.Initialization.Furthest;
for( int i=0; i<10; i++ ) {
KMeansModel kmm = doSeed(parms, System.nanoTime());
Assert.assertTrue(kmm._output._centers_raw.length == 2);
fr2=kmm.score(fr);
Assert.assertTrue(kmm.testJavaScoring(fr,fr2,1e-15));
fr2.delete();
kmm.delete();
}
} finally {
if( fr != null ) fr.delete();
if( fr2 != null) fr2.delete();
}
}
@Test (expected = H2OModelBuilderIllegalArgumentException.class) public void testTooManyK() {
Frame fr = ArrayUtils.frame(ard(d(1,0),d(0,0),d(1,0),d(2,0),d(0,0),d(0,0)));
Frame fr2=null;
KMeansModel kmm = null;
KMeansModel.KMeansParameters parms;
try {
parms = new KMeansModel.KMeansParameters();
parms._train = fr._key;
parms._k = 10; //too high -> will throw
kmm = doSeed(parms, System.nanoTime());
fr2=kmm.score(fr);
Assert.assertTrue(kmm.testJavaScoring(fr,fr2,1e-15));
fr2.delete();
} finally {
if( fr != null) fr.delete();
if( fr2 != null) fr2.delete();
if( kmm != null) kmm.delete();
}
}
@Test public void testPOJO() {
// Ignore test if the compiler failed to load
Assume.assumeTrue(water.util.JCodeGen.canCompile());
KMeansModel kmm = null;
Frame fr = null, fr2= null;
try {
fr = parse_test_file("smalldata/iris/iris_wheader.csv");
KMeansModel.KMeansParameters parms = new KMeansModel.KMeansParameters();
parms._train = fr._key;
parms._k = 3;
parms._standardize = true;
parms._max_iterations = 10;
parms._init = KMeans.Initialization.Random;
kmm = doSeed(parms,0);
// Done building model; produce a score column with cluster choices
fr2 = kmm.score(fr);
Assert.assertTrue(kmm.testJavaScoring(fr,fr2,1e-15));
fr2.delete();
} finally {
if( fr != null ) fr.delete();
if( fr2 != null ) fr2.delete();
if( kmm != null ) kmm.delete();
}
}
@Test public void testValidation() {
KMeansModel kmm = null;
for (boolean standardize : new boolean[]{true,false}) {
Frame fr = null, fr2= null;
Frame tr = null, te= null;
try {
fr = parse_test_file("smalldata/iris/iris_wheader.csv");
SplitFrame sf = new SplitFrame();
sf.dataset = fr;
sf.ratios = new double[] { 0.5, 0.5 };
sf.destination_frames = new Key[] { Key.make("train.hex"), Key.make("test.hex")};
// Invoke the job
sf.exec().get();
Key[] ksplits = sf.destination_frames;
tr = DKV.get(ksplits[0]).get();
te = DKV.get(ksplits[1]).get();
KMeansModel.KMeansParameters parms = new KMeansModel.KMeansParameters();
parms._train = ksplits[0];
parms._valid = ksplits[1];
parms._k = 3;
parms._standardize = standardize;
parms._max_iterations = 10;
parms._init = KMeans.Initialization.Random;
kmm = doSeed(parms, 0);
// Iris last column is categorical; make sure centers are ordered in the
// same order as the iris columns.
double[][] centers = kmm._output._centers_raw;
for( int k=0; k<parms._k; k++ ) {
double flower = centers[k][4];
Assert.assertTrue("categorical column expected",flower==(int)flower);
}
// Done building model; produce a score column with cluster choices
fr2 = kmm.score(te);
Assert.assertTrue(kmm.testJavaScoring(te,fr2,1e-15));
fr2.delete();
tr .delete();
te .delete();
} finally {
if( fr != null ) fr.delete();
if( fr2 != null ) fr2.delete();
if( tr != null ) tr .delete();
if( te != null ) te .delete();
if( kmm != null ) kmm.delete();
}
}
}
@Test public void testValidationSame() {
for (boolean categorical : new boolean[]{true,false}) {
for (boolean missing : new boolean[]{/*true,*/false}) { //FIXME: Enable missing PUBDEV-871
for (boolean standardize : new boolean[]{true,false}) {
Log.info("categorical: " + categorical);
Log.info("missing: " + missing);
Log.info("standardize: " + standardize);
KMeansModel kmm = null;
Frame fr = null, fr2= null;
Frame train = null, valid = null;
try {
fr = parse_test_file("smalldata/iris/iris_wheader.csv");
if (missing) {
// insert 10% missing values - check the math
FrameUtils.MissingInserter mi = new FrameUtils.MissingInserter(fr._key, 1234, 0.1f);
mi.execImpl();
fr = mi.get();
mi.remove();
}
train = new Frame(Key.make("train"), fr.names(), fr.vecs());
DKV.put(train);
valid = new Frame(Key.make("valid"), fr.names(), fr.vecs());
DKV.put(valid);
KMeansModel.KMeansParameters parms = new KMeansModel.KMeansParameters();
parms._train = train._key;
parms._valid = valid._key;
if (!categorical) {
parms._ignored_columns = new String[]{fr._names[4]};
}
parms._k = 3;
parms._standardize = standardize;
parms._max_iterations = 10;
parms._init = KMeans.Initialization.PlusPlus;
kmm = doSeed(parms, 0);
if (categorical) {
// Iris last column is categorical; make sure centers are ordered in the
// same order as the iris columns.
double[][] centers = kmm._output._centers_raw;
for( int k=0; k<parms._k; k++ ) {
double flower = centers[k][4];
Assert.assertTrue("categorical column expected",flower==(int)flower);
}
}
Assert.assertTrue(
MathUtils.compare(
((ModelMetricsClustering) kmm._output._training_metrics)._totss,
((ModelMetricsClustering) kmm._output._validation_metrics)._totss,
1e-6, 1e-6)
);
Assert.assertTrue(
MathUtils.compare(
((ModelMetricsClustering) kmm._output._training_metrics)._betweenss,
((ModelMetricsClustering) kmm._output._validation_metrics)._betweenss,
1e-6, 1e-6)
);
Assert.assertTrue(
MathUtils.compare(
((ModelMetricsClustering) kmm._output._training_metrics)._tot_withinss,
((ModelMetricsClustering) kmm._output._validation_metrics)._tot_withinss,
1e-6, 1e-6)
);
for (int i=0; i<parms._k; ++i) {
Assert.assertTrue(
MathUtils.compare(
((ModelMetricsClustering) kmm._output._training_metrics)._withinss[i],
((ModelMetricsClustering) kmm._output._validation_metrics)._withinss[i],
1e-6, 1e-6)
);
Assert.assertEquals(
((ModelMetricsClustering) kmm._output._training_metrics)._size[i],
((ModelMetricsClustering) kmm._output._validation_metrics)._size[i]
);
}
// Done building model; produce a score column with cluster choices
fr2 = kmm.score(fr);
Assert.assertTrue(kmm.testJavaScoring(fr, fr2, 1e-15));
fr.delete();
fr2.delete();
train.delete();
valid.delete();
} finally {
if( fr != null ) fr .delete();
if( fr2 != null ) fr2.delete();
if( train != null ) train.delete();
if( valid != null ) valid.delete();
if( kmm != null ) kmm.delete();
}
}
}
}
}
double _ref_betweenss = 528.5603501427154;
double _ref_tot_withinss = 167.43963679143025;
double _ref_totss = 695.9999869341457;
double _ref_withinss[] = new double[]{ 67.82696780398858, 48.15831080234685, 43.36554094593457};
long _ref_size[] = new long[]{54, 50, 46};
@Test
public void testNfolds() {
Frame tfr = null, vfr = null;
KMeansModel kmeans = null;
Scope.enter();
try {
tfr = parse_test_file("smalldata/iris/iris_wheader.csv");
DKV.put(tfr);
KMeansModel.KMeansParameters parms = new KMeansModel.KMeansParameters();
parms._train = tfr._key;
parms._seed = 0xdecaf;
parms._k = 3;
parms._nfolds = 3;
// Build a first model; all remaining models should be equal
KMeans job = new KMeans(parms);
kmeans = job.trainModel().get();
checkConsistency(kmeans);
ModelMetricsClustering mm = (ModelMetricsClustering)kmeans._output._cross_validation_metrics;
assertEquals(_ref_betweenss, mm._betweenss, 1e-8);
assertEquals(_ref_tot_withinss, mm._tot_withinss, 1e-8);
assertEquals(_ref_totss, mm._totss, 1e-6);
for (int i=0; i<parms._k; ++i) {
Assert.assertTrue(
MathUtils.compare(
((ModelMetricsClustering) kmeans._output._training_metrics)._withinss[i],
_ref_withinss[i],
1e-6, 1e-6)
);
Assert.assertEquals(
((ModelMetricsClustering) kmeans._output._training_metrics)._size[i],
_ref_size[i]
);
}
job.remove();
} finally {
if (tfr != null) tfr.remove();
if (vfr != null) vfr.remove();
if (kmeans != null) {
kmeans.deleteCrossValidationModels();
kmeans.delete();
}
Scope.exit();
}
}
} |
package water.api;
import hex.SplitFrame;
import water.Key;
import water.fvec.Frame;
public class SplitFrameV2 extends JobV2<SplitFrame, SplitFrameV2> {
@API(help="dataset")
public KeyV1.FrameKeyV1 dataset;
@API(help="Split ratios - resulting number of split is ratios.length+1", json=true)
public double[] ratios;
@API(help="Destination keys for each output frame split.")
public KeyV1[] destKeys;
@Override public SplitFrame createImpl() { return new SplitFrame(Key.make(), null); }
} |
package ncku.hpds.hadoop.fedhdfs;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.ObjectOutputStream;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.Date;
import java.util.Scanner;
import java.util.Vector;
import java.util.concurrent.Callable;
import org.apache.hadoop.conf.Configuration;
import org.w3c.dom.Element;
public class SuperNamenode {
static GlobalNamespace GN1 = new GlobalNamespace();
public static void main(String[] args) throws Exception {
Thread newThread1 = new Thread(new GlobalNamespaceLD(GN1));
newThread1.start();
Thread newThread2 = new Thread(new GlobalNamespacePD(GN1));
//newThread2.setDaemon(true);
newThread2.start();
Thread newThread3 = new Thread(new GlobalNamespaceServer(GN1));
newThread3.start();
}
}
class GlobalNamespaceLD implements Runnable {
static GlobalNamespace GN;
public GlobalNamespaceLD(GlobalNamespace GN) {
this.GN = GN;
}
private boolean OutServer = false;
private ServerSocket server;
private final int ServerPort = 8765;
@Override
public void run() {
Socket socket;
java.io.BufferedInputStream sin;
try {
server = new ServerSocket(ServerPort);
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
System.out.println("SuperNamenode starting");
while (!OutServer) {
socket = null;
try {
synchronized (server) {
socket = server.accept();
}
System.out.println("InetAddress = " + socket.getInetAddress());
// TimeOut
socket.setSoTimeout(15000);
sin = new java.io.BufferedInputStream(socket.getInputStream());
byte[] b = new byte[1024];
String data = "";
int length;
while ((length = sin.read(b)) > 0)
{
data += new String(b, 0, length);
}
System.out.println("UserDefined : " + data + "\n");
sin.close();
sin = null;
socket.close();
String[] split = data.split(" ");
System.out.println("Logical File is : " + split[0]);
String[] subSplit = split[1].split(":");
System.out.println("HostName is : " + subSplit[0]);
System.out.println("Physical Path is : " + subSplit[1]);
GN.UserConstructLD(split[0], subSplit[0], subSplit[1]);
} catch (java.io.IOException e) {
System.out.println("Socket connection error!");
System.out.println("IOException :" + e.toString());
}
}
}
}
class GlobalNamespacePD implements Runnable {
static GlobalNamespace GN;
public GlobalNamespacePD(GlobalNamespace GN) {
this.GN = GN;
}
@Override
public void run() {
try{
while(true){
Thread.sleep(60000);
//GlobalNamespace GN1 = new GlobalNamespace();
GN.setFedConf();
GN.DynamicConstructPD();
System.out.println("!!!! PHashTable download sucessful !!!!");
}
}catch(InterruptedException e){
}catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
class GlobalNamespaceServer extends Thread {
static GlobalNamespace GN;
public GlobalNamespaceServer(GlobalNamespace GN) {
this.GN = GN;
}
private boolean OutServer = false;
private ServerSocket server;
private final int ServerPort = 8764;
public void run() {
Socket socket;
ObjectOutputStream ObjectOut;
GlobalNamespaceObject test = new GlobalNamespaceObject();
test.setGlobalNamespace(GN);
try {
server = new ServerSocket(ServerPort);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
System.out.println("GlobalNamespace Sever is running");
while (!OutServer) {
socket = null;
try {
synchronized (server) {
socket = server.accept();
}
System.out.println("Client/server Connetion : InetAddress = "
+ socket.getInetAddress());
socket.setSoTimeout(15000);
System.out.println("test : " + test.getGlobalNamespace().getLogicalDrive().getLogicalMappingTable().entrySet());
System.out.println("test : " + test.getGlobalNamespace().getPhysicalDrive().getPhysicalMappingTable().size());
System.out.println("test : " + test.showLogicalMapping());
ObjectOut = new ObjectOutputStream(socket.getOutputStream());
ObjectOut.writeObject(test);
ObjectOut.flush();
ObjectOut.close();
ObjectOut = null;
//test = null;
socket.close();
socket = null;
} catch (java.io.IOException e) {
System.out.println("Socket !");
System.out.println("IOException :" + e.toString());
}
}
}
} |
package org.csstudio.opibuilder.widgets.figures;
import org.csstudio.opibuilder.OPIBuilderPlugin;
import org.csstudio.opibuilder.editparts.AbstractBaseEditPart;
import org.csstudio.swt.widgets.figures.ITextFigure;
import org.eclipse.draw2d.geometry.Dimension;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Text;
/**Figure for a native text widget.
* @author Xihui Chen
*
*/
public class NativeTextFigure extends AbstractSWTWidgetFigure<Text> implements ITextFigure {
private Text text;
private boolean readOnly;
public NativeTextFigure(AbstractBaseEditPart editPart, int style) {
super(editPart, style);
}
@Override
protected Text createSWTWidget(Composite parent, int style) {
text= new Text(parent, style);
readOnly = (style & SWT.READ_ONLY)!=0;
return text;
}
public Dimension getAutoSizeDimension(){
Point preferredSize = text.computeSize(SWT.DEFAULT, SWT.DEFAULT);
return new Dimension(preferredSize.x + getInsets().getWidth(),
preferredSize.y + getInsets().getHeight());
}
@Override
public void setEnabled(boolean value) {
super.setEnabled(value);
if(runmode && getSWTWidget() != null && !getSWTWidget().isDisposed()){
//Its parent should be always enabled so the text can be enabled.
text.getParent().setEnabled(true);
text.setEnabled(true);
if(!readOnly)
getSWTWidget().setEditable(value);
}
}
@Override
public String getText() {
return text.getText();
}
} |
package org.ovirt.engine.api.restapi.resource;
import static org.ovirt.engine.core.common.action.VdcActionType.SetupNetworks;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.ovirt.engine.api.common.util.DetailHelper;
import org.ovirt.engine.api.common.util.LinkHelper;
import org.ovirt.engine.api.model.Action;
import org.ovirt.engine.api.model.Bonding;
import org.ovirt.engine.api.model.Host;
import org.ovirt.engine.api.model.HostNIC;
import org.ovirt.engine.api.model.HostNics;
import org.ovirt.engine.api.model.Link;
import org.ovirt.engine.api.model.Network;
import org.ovirt.engine.api.model.Slaves;
import org.ovirt.engine.api.model.Statistic;
import org.ovirt.engine.api.model.Statistics;
import org.ovirt.engine.api.resource.ActionResource;
import org.ovirt.engine.api.resource.HostNicResource;
import org.ovirt.engine.api.resource.HostNicsResource;
import org.ovirt.engine.core.common.action.AddBondParameters;
import org.ovirt.engine.core.common.action.RemoveBondParameters;
import org.ovirt.engine.core.common.action.SetupNetworksParameters;
import org.ovirt.engine.core.common.action.VdcActionType;
import org.ovirt.engine.core.common.businessentities.VDS;
import org.ovirt.engine.core.common.businessentities.VdsNetworkInterface;
import org.ovirt.engine.core.common.queries.GetAllNetworkQueryParamenters;
import org.ovirt.engine.core.common.queries.GetVdsByVdsIdParameters;
import org.ovirt.engine.core.common.queries.VdcQueryType;
import org.ovirt.engine.core.common.queries.VdsGroupQueryParamenters;
import org.ovirt.engine.core.compat.Guid;
public class BackendHostNicsResource
extends AbstractBackendCollectionResource<HostNIC, VdsNetworkInterface>
implements HostNicsResource {
static final String SUB_COLLECTIONS = "statistics";
private String hostId;
public BackendHostNicsResource(String hostId) {
super(HostNIC.class, VdsNetworkInterface.class, SUB_COLLECTIONS);
this.hostId = hostId;
}
public String getHostId() {
return hostId;
}
@Override
public HostNics list() {
HostNics ret = new HostNics();
List<VdsNetworkInterface> ifaces = getCollection();
List<org.ovirt.engine.core.common.businessentities.Network> clusterNetworks = getClusterNetworks();
Map<String, String> networkIds = new HashMap<String, String>();
for(org.ovirt.engine.core.common.businessentities.Network nwk : clusterNetworks) {
networkIds.put(nwk.getname(), nwk.getId().toString());
}
for (VdsNetworkInterface iface : ifaces) {
HostNIC hostNic = populate(map(iface, ifaces), iface);
if (networkIds.containsKey(iface.getNetworkName())) {
hostNic.getNetwork().setId(networkIds.get(iface.getNetworkName()));
hostNic.getNetwork().setName(null);
}
ret.getHostNics().add(addLinks(hostNic));
}
return addActions(ret);
}
@SuppressWarnings("serial")
@Override
public Response add(final HostNIC nic) {
validateParameters(nic, "name", "network.id|name", "bonding.slaves.id|name");
validateEnums(HostNIC.class, nic);
return performCreation(VdcActionType.AddBond,
new AddBondParameters(asGuid(hostId),
nic.getName(),
lookupNetwork(nic.getNetwork()),
lookupSlaves(nic)){{setBondingOptions(map(nic, null).getBondOptions());}},
new HostNicResolver(nic.getName()));
}
@Override
public Response performRemove(String id) {
return performAction(VdcActionType.RemoveBond,
new RemoveBondParameters(asGuid(hostId),
lookupInterface(id).getName()));
}
@Override
@SingleEntityResource
public HostNicResource getHostNicSubResource(String id) {
return inject(new BackendHostNicResource(id, this));
}
public HostNIC lookupNic(String id) {
List<VdsNetworkInterface> ifaces = getCollection();
for (VdsNetworkInterface iface : ifaces) {
if (iface.getId().toString().equals(id)) {
HostNIC hostNic = populate(map(iface, ifaces), iface);
for(org.ovirt.engine.core.common.businessentities.Network nwk : getClusterNetworks()){
if(nwk.getname().equals(iface.getNetworkName())) {
hostNic.getNetwork().setId(nwk.getId().toString());
hostNic.getNetwork().setName(null);
break;
}
}
return addLinks(hostNic);
}
}
return notFound();
}
public VdsNetworkInterface lookupInterface(String id) {
for (VdsNetworkInterface iface : getCollection()) {
if (iface.getId().toString().equals(id)) {
return iface;
}
}
return entityNotFound();
}
protected VdsNetworkInterface lookupInterfaceByName(String name) {
for (VdsNetworkInterface iface : getCollection()) {
if (iface.getName().equals(name)) {
return iface;
}
}
return null;
}
protected List<VdsNetworkInterface> getCollection() {
return getBackendCollection(VdcQueryType.GetVdsInterfacesByVdsId, new GetVdsByVdsIdParameters(asGuid(hostId)));
}
protected List<VdsNetworkInterface> getCollection(List<VdsNetworkInterface> collection) {
if (collection != null) {
return collection;
} else {
return getCollection();
}
}
@Override
public HostNIC addParents(HostNIC nic) {
nic.setHost(new Host());
nic.getHost().setId(hostId);
return nic;
}
protected HostNIC map(VdsNetworkInterface iface, List<VdsNetworkInterface> ifaces) {
return map(iface, null, ifaces);
}
protected HostNIC map(VdsNetworkInterface iface, HostNIC template, List<VdsNetworkInterface> ifaces) {
HostNIC nic = super.map(iface, template);
if (iface.getBonded() != null && iface.getBonded()) {
nic = addSlaveLinks(nic, getCollection(ifaces));
} else if (iface.getBondName() != null) {
nic = addMasterLink(nic, iface.getBondName(), getCollection(ifaces));
}
return nic;
}
@Override
protected HostNIC map(VdsNetworkInterface entity, HostNIC template) {
return map(entity, template, null);
}
@Override
protected VdsNetworkInterface map(HostNIC entity, VdsNetworkInterface template) {
VdsNetworkInterface iface = super.map(entity, template);
if (entity.isSetNetwork()) {
org.ovirt.engine.core.common.businessentities.Network net = lookupNetwork(entity.getNetwork());
iface.setNetworkName(net.getname());
}
return iface;
}
protected HostNIC addSlaveLinks(HostNIC nic, List<VdsNetworkInterface> ifaces) {
if(nic.getBonding() == null) nic.setBonding(new Bonding());
nic.getBonding().setSlaves(new Slaves());
for (VdsNetworkInterface i : ifaces) {
if (isSlave(i, nic.getName())) {
nic.getBonding().getSlaves().getSlaves().add(slave(i.getId().toString()));
}
}
return nic;
}
protected boolean isSlave(VdsNetworkInterface iface, String masterName) {
return iface.getBondName() != null && iface.getBondName().equals(masterName);
}
protected HostNIC slave(String id) {
HostNIC slave = new HostNIC();
slave.setId(id);
slave.setHost(new Host());
slave.getHost().setId(hostId);
slave = LinkHelper.addLinks(getUriInfo(), slave);
slave.setHost(null);
return slave;
}
protected HostNIC addMasterLink(HostNIC nic, String bondName, List<VdsNetworkInterface> ifaces) {
for (VdsNetworkInterface i : ifaces) {
if (i.getName().equals(bondName)) {
nic.getLinks().add(masterLink(i.getId().toString()));
break;
}
}
return nic;
}
protected Link masterLink(String id) {
Link master = new Link();
master.setRel("master");
master.setHref(idToHref(id));
return master;
}
protected String idToHref(String id) {
HostNIC master = new HostNIC();
master.setId(id);
master.setHost(new Host());
master.getHost().setId(hostId);
return LinkHelper.addLinks(getUriInfo(), master).getHref();
}
protected org.ovirt.engine.core.common.businessentities.Network lookupNetwork(Network network) {
String id = network.getId();
String name = network.getName();
for (org.ovirt.engine.core.common.businessentities.Network entity : getBackendCollection(org.ovirt.engine.core.common.businessentities.Network.class,
VdcQueryType.GetAllNetworks,
new GetAllNetworkQueryParamenters(Guid.Empty))) {
if ((id != null && id.equals(entity.getId().toString())) ||
(name != null && name.equals(entity.getname()))) {
return entity;
}
}
return handleError(new EntityNotFoundException(id != null ? id : name), false);
}
protected String[] lookupSlaves(HostNIC nic) {
List<String> slaves = new ArrayList<String>();
for (HostNIC slave : nic.getBonding().getSlaves().getSlaves()) {
if (slave.isSetId()) {
for (VdsNetworkInterface iface : getCollection()) {
if (iface.getId().toString().equals(slave.getId())) {
slaves.add(iface.getName());
}
}
} else {
slaves.add(slave.getName());
}
}
return slaves.toArray(new String[slaves.size()]);
}
@Override
protected HostNIC populate(HostNIC model, VdsNetworkInterface entity) {
return addStatistics(model, entity, uriInfo, httpHeaders);
}
HostNIC addStatistics(HostNIC model, VdsNetworkInterface entity, UriInfo ui, HttpHeaders httpHeaders) {
if (DetailHelper.include(httpHeaders, "statistics")) {
model.setStatistics(new Statistics());
HostNicStatisticalQuery query = new HostNicStatisticalQuery(newModel(model.getId()));
List<Statistic> statistics = query.getStatistics(entity);
for (Statistic statistic : statistics) {
LinkHelper.addLinks(ui, statistic, query.getParentType());
}
model.getStatistics().getStatistics().addAll(statistics);
}
return model;
}
protected class HostNicResolver extends EntityIdResolver {
private String name;
HostNicResolver(String name) {
this.name = name;
}
@Override
public VdsNetworkInterface lookupEntity(Guid id) {
assert(id == null); // AddBond returns nothing, lookup name instead
return lookupInterfaceByName(name);
}
}
@SuppressWarnings("unchecked")
protected List<org.ovirt.engine.core.common.businessentities.Network> getClusterNetworks(){
VDS vds = getEntity(VDS.class, VdcQueryType.GetVdsByVdsId, new GetVdsByVdsIdParameters(Guid.createGuidFromString(getHostId())), "Host");
return getEntity(List.class, VdcQueryType.GetAllNetworksByClusterId, new VdsGroupQueryParamenters(vds.getvds_group_id()), "Networks");
}
public org.ovirt.engine.core.common.businessentities.Network lookupClusterNetwork(Network net) {
List<org.ovirt.engine.core.common.businessentities.Network> networks = getClusterNetworks();
if(net.isSetId()){
for(org.ovirt.engine.core.common.businessentities.Network nwk : networks){
if (nwk.getId().toString().equals(net.getId()))
return nwk;
}
}else{
String networkName = net.getName();
for(org.ovirt.engine.core.common.businessentities.Network nwk : networks){
if(nwk.getname().equals(networkName)) return nwk;
}
}
return notFound(org.ovirt.engine.core.common.businessentities.Network.class);
}
@Override
public Response setupNetworks(Action action) {
validateParameters(action, "hostNics");
SetupNetworksParameters parameters = toParameters(action);
return performAction(SetupNetworks, parameters, action);
}
private SetupNetworksParameters toParameters(Action action) {
SetupNetworksParameters parameters = new SetupNetworksParameters();
parameters.setInterfaces(nicsToInterfaces(action.getHostNics().getHostNics()));
parameters.setVdsId(Guid.createGuidFromString(getHostId()));
parameters.setForce(action.isSetForce() ? action.isForce() : false);
parameters.setCheckConnectivity(action.isSetCheckConnectivity() ? action.isCheckConnectivity() : false);
if (action.isSetConnectivityTimeout()) {
parameters.setConectivityTimeout(action.getConnectivityTimeout());
}
parameters.setNetworksToSync(nicsToNetworksToSync(action.getHostNics().getHostNics()));
return parameters;
}
private List<VdsNetworkInterface> nicsToInterfaces(List<HostNIC> hostNics) {
List<VdsNetworkInterface> ifaces = new ArrayList<VdsNetworkInterface>(hostNics.size());
for (HostNIC nic : hostNics) {
VdsNetworkInterface iface = map(nic, null);
ifaces.add(iface);
if (nic.isSetBonding() && nic.getBonding().isSetSlaves()) {
for (HostNIC slave : nic.getBonding().getSlaves().getSlaves()) {
VdsNetworkInterface slaveIface = map(slave, slave.getId() == null
? lookupInterfaceByName(slave.getName()) : lookupInterface(slave.getId()));
slaveIface.setBondName(nic.getName());
ifaces.add(slaveIface);
}
}
}
return ifaces;
}
private List<String> nicsToNetworksToSync(List<HostNIC> hostNics) {
List<String> networks = new ArrayList<String>();
for (HostNIC nic : hostNics) {
if (nic.isSetOverrideConfiguration() && nic.isOverrideConfiguration() && nic.isSetNetwork()) {
org.ovirt.engine.core.common.businessentities.Network net = lookupNetwork(nic.getNetwork());
networks.add(net.getname());
}
}
return networks;
}
@Override
public ActionResource getActionSubresource(String action) {
return inject(new BackendActionResource(action, ""));
}
} |
package org.ovirt.engine.core.vdsbroker.vdsbroker;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.DateFormat;
import java.text.DecimalFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils;
import org.ovirt.engine.core.bll.network.cluster.ManagementNetworkUtil;
import org.ovirt.engine.core.common.AuditLogType;
import org.ovirt.engine.core.common.FeatureSupported;
import org.ovirt.engine.core.common.businessentities.ArchitectureType;
import org.ovirt.engine.core.common.businessentities.AutoNumaBalanceStatus;
import org.ovirt.engine.core.common.businessentities.CpuStatistics;
import org.ovirt.engine.core.common.businessentities.Entities;
import org.ovirt.engine.core.common.businessentities.GraphicsInfo;
import org.ovirt.engine.core.common.businessentities.GraphicsType;
import org.ovirt.engine.core.common.businessentities.HostDevice;
import org.ovirt.engine.core.common.businessentities.KdumpStatus;
import org.ovirt.engine.core.common.businessentities.NumaNodeStatistics;
import org.ovirt.engine.core.common.businessentities.SessionState;
import org.ovirt.engine.core.common.businessentities.StoragePool;
import org.ovirt.engine.core.common.businessentities.V2VJobInfo;
import org.ovirt.engine.core.common.businessentities.VDS;
import org.ovirt.engine.core.common.businessentities.VDSDomainsData;
import org.ovirt.engine.core.common.businessentities.VM;
import org.ovirt.engine.core.common.businessentities.VMStatus;
import org.ovirt.engine.core.common.businessentities.VdsNumaNode;
import org.ovirt.engine.core.common.businessentities.VdsTransparentHugePagesState;
import org.ovirt.engine.core.common.businessentities.VmBalloonInfo;
import org.ovirt.engine.core.common.businessentities.VmBlockJob;
import org.ovirt.engine.core.common.businessentities.VmBlockJobType;
import org.ovirt.engine.core.common.businessentities.VmDynamic;
import org.ovirt.engine.core.common.businessentities.VmExitReason;
import org.ovirt.engine.core.common.businessentities.VmExitStatus;
import org.ovirt.engine.core.common.businessentities.VmGuestAgentInterface;
import org.ovirt.engine.core.common.businessentities.VmJob;
import org.ovirt.engine.core.common.businessentities.VmJobState;
import org.ovirt.engine.core.common.businessentities.VmJobType;
import org.ovirt.engine.core.common.businessentities.VmNumaNode;
import org.ovirt.engine.core.common.businessentities.VmPauseStatus;
import org.ovirt.engine.core.common.businessentities.VmRngDevice;
import org.ovirt.engine.core.common.businessentities.VmStatic;
import org.ovirt.engine.core.common.businessentities.VmStatistics;
import org.ovirt.engine.core.common.businessentities.network.Bond;
import org.ovirt.engine.core.common.businessentities.network.HostNetworkQos;
import org.ovirt.engine.core.common.businessentities.network.InterfaceStatus;
import org.ovirt.engine.core.common.businessentities.network.NetworkInterface;
import org.ovirt.engine.core.common.businessentities.network.NetworkStatistics;
import org.ovirt.engine.core.common.businessentities.network.Nic;
import org.ovirt.engine.core.common.businessentities.network.VdsInterfaceType;
import org.ovirt.engine.core.common.businessentities.network.VdsNetworkInterface;
import org.ovirt.engine.core.common.businessentities.network.VdsNetworkStatistics;
import org.ovirt.engine.core.common.businessentities.network.Vlan;
import org.ovirt.engine.core.common.businessentities.network.VmInterfaceType;
import org.ovirt.engine.core.common.businessentities.network.VmNetworkInterface;
import org.ovirt.engine.core.common.businessentities.storage.DiskImage;
import org.ovirt.engine.core.common.businessentities.storage.DiskImageDynamic;
import org.ovirt.engine.core.common.businessentities.storage.DiskInterface;
import org.ovirt.engine.core.common.businessentities.storage.LUNs;
import org.ovirt.engine.core.common.businessentities.storage.StorageType;
import org.ovirt.engine.core.common.businessentities.storage.VolumeFormat;
import org.ovirt.engine.core.common.businessentities.storage.VolumeType;
import org.ovirt.engine.core.common.config.Config;
import org.ovirt.engine.core.common.config.ConfigValues;
import org.ovirt.engine.core.common.utils.EnumUtils;
import org.ovirt.engine.core.common.utils.Pair;
import org.ovirt.engine.core.common.utils.SizeConverter;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.compat.RpmVersion;
import org.ovirt.engine.core.dal.dbbroker.DbFacade;
import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AuditLogDirector;
import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AuditLogableBase;
import org.ovirt.engine.core.di.Injector;
import org.ovirt.engine.core.utils.NetworkUtils;
import org.ovirt.engine.core.utils.NumaUtils;
import org.ovirt.engine.core.utils.SerializationFactory;
import org.ovirt.engine.core.utils.linq.LinqUtils;
import org.ovirt.engine.core.utils.network.predicate.InterfaceByAddressPredicate;
import org.ovirt.engine.core.vdsbroker.NetworkStatisticsBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class encapsulate the knowledge of how to create objects from the VDS RPC protocol response.
* This class has methods that receive XmlRpcStruct and construct the following Classes: VmDynamic VdsDynamic VdsStatic.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public class VdsBrokerObjectsBuilder {
private static final Logger log = LoggerFactory.getLogger(VdsBrokerObjectsBuilder.class);
private final static int VNC_START_PORT = 5900;
private final static double NANO_SECONDS = 1000000000;
private static final AuditLogDirector auditLogDirector = new AuditLogDirector();
private static final Comparator<VdsNumaNode> numaNodeComparator = new Comparator<VdsNumaNode>() {
@Override
public int compare(VdsNumaNode arg0, VdsNumaNode arg1) {
return arg0.getIndex() < arg1.getIndex() ? -1 : 1;
}
};
public static VM buildVmsDataFromExternalProvider(Map<String, Object> xmlRpcStruct) {
VmStatic vmStatic = buildVmStaticDataFromExternalProvider(xmlRpcStruct);
if (vmStatic == null) {
return null;
}
VmDynamic vmDynamic = buildVMDynamicDataFromList(xmlRpcStruct);
VM vm = new VM(vmStatic, vmDynamic, new VmStatistics());
for (DiskImage image : vm.getImages()) {
vm.getDiskMap().put(Guid.newGuid(), image);
}
vm.setClusterArch(parseArchitecture(xmlRpcStruct));
return vm;
}
/**
* Convert the devices map and make a list of {@linkplain DiskImage}
* Mainly used to import the Hosted Engine Vm disks.
*
* @param vmStruct
* @return A List of Disk Images {@linkplain DiskImage}
*/
public static ArrayList<DiskImage> buildDiskImagesFromDevices(Map<String, Object> vmStruct) {
ArrayList<DiskImage> diskImages = new ArrayList<>();
Object[] devices = (Object[]) vmStruct.get("devices");
if (devices != null) {
for (Object device : devices) {
Map <String, Object> deviceMap = (Map<String, Object>) device;
if (VdsProperties.Disk.equals(deviceMap.get(VdsProperties.Device))) {
DiskImage image = new DiskImage();
image.setDiskAlias((String) deviceMap.get(VdsProperties.Alias));
image.setSize(Long.parseLong((String) deviceMap.get("apparentsize")));
image.setActualSize(Long.parseLong((String) deviceMap.get("truesize")));
image.setId(Guid.newGuid());
image.setvolumeFormat(VolumeFormat.valueOf(((String) deviceMap.get(VdsProperties.Format)).toUpperCase()));
image.setShareable(false);
image.setId(Guid.createGuidFromString((String) deviceMap.get(VdsProperties.DeviceId)));
image.setImageId(Guid.createGuidFromString((String) deviceMap.get(VdsProperties.VolumeId)));
// TODO not sure how to extract that info
image.setVolumeType(VolumeType.Preallocated);
switch ((String) deviceMap.get("iface")) {
case "virtio":
image.setDiskInterface(DiskInterface.VirtIO);
break;
case "iscsi":
image.setDiskInterface(DiskInterface.VirtIO_SCSI);
break;
case "ide":
image.setDiskInterface(DiskInterface.IDE);
break;
}
diskImages.add(image);
}
}
}
return diskImages;
}
/**
* Convert the devices map and make a list of {@linkplain VmNetworkInterface}
* Mainly used to import the Hosted Engine Vm disks.
*
* @param vmStruct
* @return A List of VM network interfaces {@linkplain VmNetworkInterface}
*/
public static ArrayList<VmNetworkInterface> buildVmNetworkInterfacesFromDevices(Map<String, Object> vmStruct) {
ArrayList<VmNetworkInterface> nics = new ArrayList<>();
Object[] devices = (Object[]) vmStruct.get("devices");
if (devices != null) {
for (Object device : devices) {
Map<String, Object> deviceMap = (Map<String, Object>) device;
if (VdsProperties.INTERFACE.equals(deviceMap.get(VdsProperties.Type))) {
VmNetworkInterface nic = new VmNetworkInterface();
nic.setMacAddress((String) deviceMap.get(VdsProperties.MAC_ADDR));
nic.setName((String) deviceMap.get(VdsProperties.Name));
// FIXME we can't deduce the network profile by the network name. its many to many.
nic.setNetworkName((String) deviceMap.get(VdsProperties.NETWORK));
nic.setType(VmInterfaceType.valueOf((String) deviceMap.get(VdsProperties.NIC_TYPE)).getValue());
if (deviceMap.containsKey(VdsProperties.Model)) {
String model = (String) deviceMap.get(VdsProperties.Model);
for (VmInterfaceType type : VmInterfaceType.values()) {
if (model.equals(type.getInternalName())) {
nic.setType(type.getValue());
break;
}
}
}
nics.add(nic);
}
}
}
return nics;
}
private static VmStatic buildVmStaticDataFromExternalProvider(Map<String, Object> xmlRpcStruct) {
if (!xmlRpcStruct.containsKey(VdsProperties.vm_guid) || !xmlRpcStruct.containsKey(VdsProperties.vm_name)
|| !xmlRpcStruct.containsKey(VdsProperties.mem_size_mb)
|| !xmlRpcStruct.containsKey(VdsProperties.num_of_cpus)) {
return null;
}
VmStatic vmStatic = new VmStatic();
vmStatic.setId(Guid.createGuidFromString((String) xmlRpcStruct.get(VdsProperties.vm_guid)));
vmStatic.setName((String) xmlRpcStruct.get(VdsProperties.vm_name));
vmStatic.setMemSizeMb(parseIntVdsProperty(xmlRpcStruct.get(VdsProperties.mem_size_mb)));
vmStatic.setNumOfSockets(parseIntVdsProperty(xmlRpcStruct.get(VdsProperties.num_of_cpus)));
vmStatic.setCustomCpuName((String) xmlRpcStruct.get(VdsProperties.cpu_model));
vmStatic.setCustomEmulatedMachine((String) xmlRpcStruct.get(VdsProperties.emulatedMachine));
if (xmlRpcStruct.containsKey(VdsProperties.vm_disks)) {
for (Object disk : (Object[]) xmlRpcStruct.get(VdsProperties.vm_disks)) {
Map<String, Object> diskMap = (Map<String, Object>) disk;
if (VdsProperties.Disk.equals(diskMap.get(VdsProperties.type))) {
DiskImage image = buildDiskImageFromExternalProvider(diskMap);
vmStatic.getImages().add(image);
}
}
}
if (xmlRpcStruct.containsKey(VdsProperties.NETWORKS)) {
int idx = 0;
for (Object networkMap : (Object[]) xmlRpcStruct.get(VdsProperties.NETWORKS)) {
VmNetworkInterface nic = buildNetworkInterfaceFromExternalProvider((Map<String, Object>) networkMap);
nic.setName(String.format("nic%d", ++idx));
nic.setVmName(vmStatic.getName());
nic.setVmId(vmStatic.getId());
vmStatic.getInterfaces().add(nic);
}
}
return vmStatic;
}
private static DiskImage buildDiskImageFromExternalProvider(Map<String, Object> map) {
DiskImage image = new DiskImage();
image.setDiskAlias((String) map.get(VdsProperties.Alias));
image.setSize(Long.parseLong((String) map.get(VdsProperties.DISK_VIRTUAL_SIZE)));
image.setActualSizeInBytes(Long.parseLong((String) map.get(VdsProperties.DISK_ALLOCATION)));
image.setId(Guid.newGuid());
return image;
}
private static VmNetworkInterface buildNetworkInterfaceFromExternalProvider(Map<String, Object> map) {
VmNetworkInterface nic = new VmNetworkInterface();
nic.setMacAddress((String) map.get(VdsProperties.MAC_ADDR));
nic.setRemoteNetworkName((String) map.get(VdsProperties.BRIDGE));
nic.setType(VmInterfaceType.pv.getValue());
if (map.containsKey(VdsProperties.Model)) {
String model = (String) map.get(VdsProperties.Model);
for (VmInterfaceType type : VmInterfaceType.values()) {
if (model.equals(type.getInternalName())) {
nic.setType(type.getValue());
break;
}
}
}
return nic;
}
public static VmDynamic buildVMDynamicDataFromList(Map<String, Object> xmlRpcStruct) {
VmDynamic vmdynamic = new VmDynamic();
if (xmlRpcStruct.containsKey(VdsProperties.vm_guid)) {
vmdynamic.setId(new Guid((String) xmlRpcStruct.get(VdsProperties.vm_guid)));
}
if (xmlRpcStruct.containsKey(VdsProperties.status)) {
vmdynamic.setStatus(convertToVmStatus((String) xmlRpcStruct.get(VdsProperties.status)));
}
return vmdynamic;
}
public static Double getVdsmCallTimestamp(Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.statusTime)) {
return assignDoubleValue(xmlRpcStruct, VdsProperties.statusTime);
}
return -1d;
}
public static VmDynamic buildVMDynamicData(Map<String, Object> xmlRpcStruct, VDS host) {
VmDynamic vmdynamic = new VmDynamic();
updateVMDynamicData(vmdynamic, xmlRpcStruct, host);
return vmdynamic;
}
public static StoragePool buildStoragePool(Map<String, Object> xmlRpcStruct) {
StoragePool sPool = new StoragePool();
if (xmlRpcStruct.containsKey("type")) {
sPool.setIsLocal(StorageType.valueOf(xmlRpcStruct.get("type").toString()).isLocal());
}
sPool.setName(assignStringValue(xmlRpcStruct, "name"));
Integer masterVersion = assignIntValue(xmlRpcStruct, "master_ver");
if (masterVersion != null) {
sPool.setMasterDomainVersion(masterVersion);
}
return sPool;
}
public static VmStatistics buildVMStatisticsData(Map<String, Object> xmlRpcStruct) {
VmStatistics vmStatistics = new VmStatistics();
updateVMStatisticsData(vmStatistics, xmlRpcStruct);
return vmStatistics;
}
public static Map<String, LUNs> buildVmLunDisksData(Map<String, Object> xmlRpcStruct) {
Map<String, Object> disks = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.vm_disks);
Map<String, LUNs> lunsMap = new HashMap<>();
if (disks != null) {
for (Object diskAsObj : disks.values()) {
Map<String, Object> disk = (Map<String, Object>) diskAsObj;
String lunGuidString = assignStringValue(disk, VdsProperties.lun_guid);
if (!StringUtils.isEmpty(lunGuidString)) {
LUNs lun = new LUNs();
lun.setLUN_id(lunGuidString);
if (disk.containsKey(VdsProperties.disk_true_size)) {
long sizeInBytes = assignLongValue(disk, VdsProperties.disk_true_size);
int sizeInGB = SizeConverter.convert(
sizeInBytes, SizeConverter.SizeUnit.BYTES, SizeConverter.SizeUnit.GiB).intValue();
lun.setDeviceSize(sizeInGB);
}
lunsMap.put(lunGuidString, lun);
}
}
}
return lunsMap;
}
public static void updateVMDynamicData(VmDynamic vm, Map<String, Object> xmlRpcStruct, VDS host) {
if (xmlRpcStruct.containsKey(VdsProperties.vm_guid)) {
vm.setId(new Guid((String) xmlRpcStruct.get(VdsProperties.vm_guid)));
}
if (xmlRpcStruct.containsKey(VdsProperties.session)) {
String session = (String) xmlRpcStruct.get(VdsProperties.session);
try {
vm.setSession(SessionState.valueOf(session));
} catch (Exception e) {
log.error("Illegal vm session '{}'.", session);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.kvmEnable)) {
vm.setKvmEnable(Boolean.parseBoolean((String) xmlRpcStruct.get(VdsProperties.kvmEnable)));
}
if (xmlRpcStruct.containsKey(VdsProperties.acpiEnable)) {
vm.setAcpiEnable(Boolean.parseBoolean((String) xmlRpcStruct.get(VdsProperties.acpiEnable)));
}
if (xmlRpcStruct.containsKey(VdsProperties.win2kHackEnable)) {
vm.setWin2kHackEnable(Boolean.parseBoolean((String) xmlRpcStruct.get(VdsProperties.win2kHackEnable)));
}
if (xmlRpcStruct.containsKey(VdsProperties.status)) {
vm.setStatus(convertToVmStatus((String) xmlRpcStruct.get(VdsProperties.status)));
}
boolean hasGraphicsInfo = updateGraphicsInfo(vm, xmlRpcStruct);
if (!hasGraphicsInfo) {
updateGraphicsInfoFromConf(vm, xmlRpcStruct);
}
adjustDisplayIp(vm.getGraphicsInfos(), host);
if (xmlRpcStruct.containsKey((VdsProperties.utc_diff))) {
String utc_diff = xmlRpcStruct.get(VdsProperties.utc_diff).toString();
if (utc_diff.startsWith("+")) {
utc_diff = utc_diff.substring(1);
}
try {
vm.setUtcDiff(Integer.parseInt(utc_diff));
} catch (NumberFormatException e) {
log.error("Illegal vm offset (utc_diff) '{}'.", utc_diff);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.hash)) {
String hash = (String) xmlRpcStruct.get(VdsProperties.hash);
try {
vm.setHash(hash);
} catch (Exception e) {
log.error("Illegal vm hash '{}'.", hash);
}
}
/**
* vm disks
*/
if (xmlRpcStruct.containsKey(VdsProperties.vm_disks)) {
initDisks(xmlRpcStruct, vm);
}
if (xmlRpcStruct.containsKey(VdsProperties.vm_host)) {
vm.setVmHost(assignStringValue(xmlRpcStruct, VdsProperties.vm_host));
}
if (xmlRpcStruct.containsKey(VdsProperties.guest_cur_user_name)) {
vm.setGuestCurrentUserName(assignStringValue(xmlRpcStruct, VdsProperties.guest_cur_user_name));
}
initAppsList(xmlRpcStruct, vm);
if (xmlRpcStruct.containsKey(VdsProperties.guest_os)) {
vm.setGuestOs(assignStringValue(xmlRpcStruct, VdsProperties.guest_os));
}
if (xmlRpcStruct.containsKey(VdsProperties.VM_FQDN)) {
vm.setVmFQDN(assignStringValue(xmlRpcStruct, VdsProperties.VM_FQDN));
String fqdn = vm.getVmFQDN().trim();
if ("localhost".equalsIgnoreCase(fqdn) || "localhost.localdomain".equalsIgnoreCase(fqdn)) {
vm.setVmFQDN(null);
}
else {
vm.setVmFQDN(fqdn);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.VM_IP)) {
vm.setVmIp(assignStringValue(xmlRpcStruct, VdsProperties.VM_IP));
}
if (vm.getVmIp() != null) {
if (vm.getVmIp().startsWith("127.0.")) {
vm.setVmIp(null);
} else {
vm.setVmIp(vm.getVmIp().trim());
}
}
if (xmlRpcStruct.containsKey(VdsProperties.vm_guest_mem_stats)) {
Map<String, Object> sub = (Map<String, Object>)xmlRpcStruct.get(VdsProperties.vm_guest_mem_stats);
if (sub.containsKey(VdsProperties.vm_guest_mem_buffered)) {
vm.setGuestMemoryBuffered(Long.parseLong(sub.get(VdsProperties.vm_guest_mem_buffered).toString()));
}
if (sub.containsKey(VdsProperties.vm_guest_mem_cached)) {
vm.setGuestMemoryCached(Long.parseLong(sub.get(VdsProperties.vm_guest_mem_cached).toString()));
}
if (sub.containsKey(VdsProperties.vm_guest_mem_free)) {
vm.setGuestMemoryFree(Long.parseLong(sub.get(VdsProperties.vm_guest_mem_free).toString()));
}
}
if (xmlRpcStruct.containsKey(VdsProperties.exit_code)) {
String exitCodeStr = xmlRpcStruct.get(VdsProperties.exit_code).toString();
vm.setExitStatus(VmExitStatus.forValue(Integer.parseInt(exitCodeStr)));
}
if (xmlRpcStruct.containsKey(VdsProperties.exit_message)) {
String exitMsg = (String) xmlRpcStruct.get(VdsProperties.exit_message);
vm.setExitMessage(exitMsg);
}
if (xmlRpcStruct.containsKey(VdsProperties.exit_reason)) {
String exitReasonStr = xmlRpcStruct.get(VdsProperties.exit_reason).toString();
vm.setExitReason(VmExitReason.forValue(Integer.parseInt(exitReasonStr)));
}
// if monitorResponse returns negative it means its erroneous
if (xmlRpcStruct.containsKey(VdsProperties.monitorResponse)) {
int response = Integer.parseInt(xmlRpcStruct.get(VdsProperties.monitorResponse).toString());
if (response < 0) {
vm.setStatus(VMStatus.NotResponding);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.clientIp)) {
vm.setClientIp(xmlRpcStruct.get(VdsProperties.clientIp).toString());
}
if (xmlRpcStruct.containsKey(VdsProperties.pauseCode)) {
String pauseCodeStr = (String) xmlRpcStruct.get(VdsProperties.pauseCode);
try {
vm.setPauseStatus(VmPauseStatus.valueOf(pauseCodeStr));
} catch (IllegalArgumentException ex) {
log.error("Error in parsing vm pause status. Setting value to NONE");
}
}
if (xmlRpcStruct.containsKey(VdsProperties.watchdogEvent)) {
Map<String, Object> watchdogStruct = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.watchdogEvent);
double time = Double.parseDouble(watchdogStruct.get(VdsProperties.time).toString());
String action =
watchdogStruct.containsKey(VdsProperties.action) ? watchdogStruct.get(VdsProperties.action)
.toString() : null;
vm.setLastWatchdogEvent((long) time);
vm.setLastWatchdogAction(action);
}
if (xmlRpcStruct.containsKey(VdsProperties.CDRom)) {
Path fileName = Paths.get((String) xmlRpcStruct.get(VdsProperties.CDRom)).getFileName();
if (fileName != null) {
String isoName = fileName.toString();
vm.setCurrentCd(isoName);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.GUEST_CPU_COUNT)) {
vm.setGuestCpuCount(assignIntValue(xmlRpcStruct, VdsProperties.GUEST_CPU_COUNT));
}
// Guest OS Info
if (xmlRpcStruct.containsKey(VdsProperties.GUEST_OS_INFO)) {
updateGuestOsInfo(vm, xmlRpcStruct);
}
// Guest Timezone
if (xmlRpcStruct.containsKey(VdsProperties.GUEST_TIMEZONE)) {
Map<String, Object> guestTimeZoneStruct =
(Map<String, Object>) xmlRpcStruct.get(VdsProperties.GUEST_TIMEZONE);
vm.setGuestOsTimezoneName(assignStringValue(guestTimeZoneStruct, VdsProperties.GUEST_TIMEZONE_ZONE));
vm.setGuestOsTimezoneOffset(assignIntValue(guestTimeZoneStruct, VdsProperties.GUEST_TIMEZONE_OFFSET));
}
}
/**
* Adjusts displayIp for graphicsInfos:
* - if displayIp is overriden on cluster level then overriden address is used,
* or
* - if current displayIp starts with "0" then host's hostname is used.
*
* @param graphicsInfos - graphicsInfo to adjust
*/
private static void adjustDisplayIp(Map<GraphicsType, GraphicsInfo> graphicsInfos, VDS host) {
if (graphicsInfos == null) {
return;
}
for (GraphicsInfo graphicsInfo : graphicsInfos.values()) {
if (graphicsInfo == null) {
continue;
}
if (host.getConsoleAddress() != null) {
graphicsInfo.setIp(host.getConsoleAddress());
} else if (graphicsInfo.getIp() != null && graphicsInfo.getIp().startsWith("0")) {
graphicsInfo.setIp(host.getHostName());
}
}
}
private static void updateGuestOsInfo(VmDynamic vm, Map<String, Object> xmlRpcStruct) {
Map<String, Object> guestOsInfoStruct = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.GUEST_OS_INFO);
if(guestOsInfoStruct.containsKey(VdsProperties.GUEST_OS_INFO_ARCH)) {
String arch = assignStringValue(guestOsInfoStruct, VdsProperties.GUEST_OS_INFO_ARCH);
try {
vm.setGuestOsArch(arch);
} catch(IllegalArgumentException e) {
log.warn("Invalid or unknown guest architecture type '{}' received from guest agent", arch);
}
}
vm.setGuestOsCodename(assignStringValue(guestOsInfoStruct, VdsProperties.GUEST_OS_INFO_CODENAME));
vm.setGuestOsDistribution(assignStringValue(guestOsInfoStruct, VdsProperties.GUEST_OS_INFO_DISTRIBUTION));
vm.setGuestOsKernelVersion(assignStringValue(guestOsInfoStruct, VdsProperties.GUEST_OS_INFO_KERNEL));
if(guestOsInfoStruct.containsKey(VdsProperties.GUEST_OS_INFO_TYPE)) {
String osType = assignStringValue(guestOsInfoStruct, VdsProperties.GUEST_OS_INFO_TYPE);
try {
vm.setGuestOsType(osType);
} catch(IllegalArgumentException e) {
log.warn("Invalid or unknown guest os type '{}' received from guest agent", osType);
}
} else {
log.warn("Guest OS type not reported by guest agent but expected.");
}
vm.setGuestOsVersion(assignStringValue(guestOsInfoStruct, VdsProperties.GUEST_OS_INFO_VERSION));
}
/**
* Updates graphics runtime information according displayInfo VDSM structure if it exists.
*
* @param vm - VmDynamic to update
* @param xmlRpcStruct - data from VDSM
* @return true if displayInfo exists, false otherwise
*/
private static boolean updateGraphicsInfo(VmDynamic vm, Map<String, Object> xmlRpcStruct) {
Object displayInfo = xmlRpcStruct.get(VdsProperties.displayInfo);
if (displayInfo == null) {
return false;
}
for (Object info : (Object[]) displayInfo) {
Map<String, String> infoMap = (Map<String, String>) info;
GraphicsType graphicsType = GraphicsType.fromString(infoMap.get(VdsProperties.type));
GraphicsInfo graphicsInfo = new GraphicsInfo();
graphicsInfo.setIp(infoMap.get(VdsProperties.ipAddress))
.setPort(parseIntegerOrNull(infoMap.get(VdsProperties.port)))
.setTlsPort(parseIntegerOrNull(infoMap.get(VdsProperties.tlsPort)));
if (graphicsInfo.getPort() != null || graphicsInfo.getTlsPort() != null) {
vm.getGraphicsInfos().put(graphicsType, graphicsInfo);
}
}
return true;
}
/**
* Updates graphics runtime information according to vm.conf vdsm structure. It's used with legacy VDSMs that have
* no notion about graphics device.
* @param vm - VmDynamic to update
* @param xmlRpcStruct - data from VDSM
*/
private static void updateGraphicsInfoFromConf(VmDynamic vm, Map<String, Object> xmlRpcStruct) {
GraphicsType vmGraphicsType = parseGraphicsType(xmlRpcStruct);
if (vmGraphicsType == null) {
log.debug("graphics data missing in XML.");
return;
}
GraphicsInfo graphicsInfo = new GraphicsInfo();
if (xmlRpcStruct.containsKey(VdsProperties.display_port)) {
try {
graphicsInfo.setPort(Integer.parseInt(xmlRpcStruct.get(VdsProperties.display_port).toString()));
} catch (NumberFormatException e) {
log.error("vm display_port value illegal : {0}", xmlRpcStruct.get(VdsProperties.display_port));
}
} else if (xmlRpcStruct.containsKey(VdsProperties.display)) {
try {
graphicsInfo
.setPort(VNC_START_PORT + Integer.parseInt(xmlRpcStruct.get(VdsProperties.display).toString()));
} catch (NumberFormatException e) {
log.error("vm display value illegal : {0}", xmlRpcStruct.get(VdsProperties.display));
}
}
if (xmlRpcStruct.containsKey(VdsProperties.display_secure_port)) {
try {
graphicsInfo
.setTlsPort(Integer.parseInt(xmlRpcStruct.get(VdsProperties.display_secure_port).toString()));
} catch (NumberFormatException e) {
log.error("vm display_secure_port value illegal : {0}",
xmlRpcStruct.get(VdsProperties.display_secure_port));
}
}
if (xmlRpcStruct.containsKey((VdsProperties.displayIp))) {
graphicsInfo.setIp((String) xmlRpcStruct.get(VdsProperties.displayIp));
}
vm.getGraphicsInfos().put(vmGraphicsType, graphicsInfo);
}
/**
* Retrieves graphics type from xml.
* @param xmlRpcStruct
* @return
* - graphics type derived from xml on success
* - null on error
*/
private static GraphicsType parseGraphicsType(Map<String, Object> xmlRpcStruct) {
GraphicsType result = null;
try {
String displayTypeStr = xmlRpcStruct.get(VdsProperties.displayType).toString();
switch (displayTypeStr) {
case VdsProperties.VNC:
result = GraphicsType.VNC;
break;
case VdsProperties.QXL:
result = GraphicsType.SPICE;
break;
}
} catch (Exception e) {
}
return result;
}
private static Integer parseIntegerOrNull(String s) {
try {
return Integer.parseInt(s);
} catch (Exception e) {
return null;
}
}
/**
* Some properties were changed recently from String to Integer
* This method checks what type is the property, and returns int
* @param vdsProperty
* @return
*/
public static int parseIntVdsProperty(Object vdsProperty) {
if (vdsProperty instanceof Integer) {
return (Integer) vdsProperty;
} else {
return Integer.parseInt((String) vdsProperty);
}
}
protected static ArchitectureType parseArchitecture(Map<String, Object> xmlRpcStruct) {
try {
return ArchitectureType.valueOf((String) xmlRpcStruct.get(VdsProperties.vm_arch));
} catch (NullPointerException e) {
return null;
}
}
public static void updateVMStatisticsData(VmStatistics vm, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.vm_guid)) {
vm.setId(new Guid((String) xmlRpcStruct.get(VdsProperties.vm_guid)));
}
vm.setelapsed_time(assignDoubleValue(xmlRpcStruct, VdsProperties.elapsed_time));
if (xmlRpcStruct.containsKey(VdsProperties.VM_NETWORK)) {
Map networkStruct = (Map) xmlRpcStruct.get(VdsProperties.VM_NETWORK);
vm.setInterfaceStatistics(new ArrayList<VmNetworkInterface>());
for (Object tempNic : networkStruct.values()) {
Map nic = (Map) tempNic;
VmNetworkInterface stats = new VmNetworkInterface();
vm.getInterfaceStatistics().add(stats);
if (nic.containsKey(VdsProperties.VM_INTERFACE_NAME)) {
stats.setName((String) ((nic.get(VdsProperties.VM_INTERFACE_NAME) instanceof String) ? nic
.get(VdsProperties.VM_INTERFACE_NAME) : null));
}
extractInterfaceStatistics(nic, stats);
stats.setMacAddress((String) ((nic.get(VdsProperties.MAC_ADDR) instanceof String) ? nic
.get(VdsProperties.MAC_ADDR) : null));
}
}
if (xmlRpcStruct.containsKey(VdsProperties.VM_DISKS_USAGE)) {
initDisksUsage(xmlRpcStruct, vm);
}
vm.setcpu_sys(assignDoubleValue(xmlRpcStruct, VdsProperties.cpu_sys));
vm.setcpu_user(assignDoubleValue(xmlRpcStruct, VdsProperties.cpu_user));
vm.setusage_mem_percent(assignIntValue(xmlRpcStruct, VdsProperties.vm_usage_mem_percent));
vm.setVmBalloonInfo(getBalloonInfo(xmlRpcStruct));
Integer migrationProgress = assignIntValue(xmlRpcStruct, VdsProperties.vm_migration_progress_percent);
vm.setMigrationProgressPercent(migrationProgress != null ? migrationProgress : 0);
vm.setVmJobs(getVmJobs(vm.getId(), xmlRpcStruct));
if (xmlRpcStruct.containsKey(VdsProperties.VM_NUMA_NODES_RUNTIME_INFO)) {
updateVmNumaNodesRuntimeInfo(vm, xmlRpcStruct);
}
}
private static VmBalloonInfo getBalloonInfo(Map<String, Object> xmlRpcStruct) {
Map<String, Object> balloonInfo = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.vm_balloonInfo);
VmBalloonInfo vmBalloonInfo = new VmBalloonInfo();
if (balloonInfo != null && balloonInfo.size() > 0) {
vmBalloonInfo.setCurrentMemory(assignLongValue(balloonInfo, VdsProperties.vm_balloon_cur));
vmBalloonInfo.setBalloonMaxMemory(assignLongValue(balloonInfo, VdsProperties.vm_balloon_max));
vmBalloonInfo.setBalloonTargetMemory(assignLongValue(balloonInfo, VdsProperties.vm_balloon_target));
vmBalloonInfo.setBalloonMinMemory(assignLongValue(balloonInfo, VdsProperties.vm_balloon_min));
if (balloonInfo.size() >= 4) { // only if all 4 properties are found the balloon is considered enabled (available from 3.3)
vmBalloonInfo.setBalloonDeviceEnabled(true);
}
} else {
vmBalloonInfo.setBalloonDeviceEnabled(false);
}
return vmBalloonInfo;
}
private static List<VmJob> getVmJobs(Guid vmId, Map<String, Object> xmlRpcStruct) {
if (!xmlRpcStruct.containsKey(VdsProperties.vmJobs)) {
return null;
}
List<VmJob> vmJobs = new ArrayList<VmJob>();
for (Object jobMap : ((Map<String, Object>) xmlRpcStruct.get(VdsProperties.vmJobs)).values()) {
VmJob job = buildVmJobData(vmId, (Map<String, Object>) jobMap);
vmJobs.add(job);
}
return vmJobs;
}
private static VmJob buildVmJobData(Guid vmId, Map<String, Object> xmlRpcStruct) {
VmJob ret;
VmJobType jobType = VmJobType.getByName(assignStringValue(xmlRpcStruct, VdsProperties.vmJobType));
if (jobType == null) {
jobType = VmJobType.UNKNOWN;
}
switch (jobType) {
case BLOCK:
VmBlockJob blockJob = new VmBlockJob();
blockJob.setBlockJobType(VmBlockJobType.getByName(assignStringValue(xmlRpcStruct, VdsProperties.vmBlockJobType)));
blockJob.setCursorCur(assignLongValue(xmlRpcStruct, VdsProperties.vmJobCursorCur));
blockJob.setCursorEnd(assignLongValue(xmlRpcStruct, VdsProperties.vmJobCursorEnd));
blockJob.setBandwidth(assignLongValue(xmlRpcStruct, VdsProperties.vmJobBandwidth));
blockJob.setImageGroupId(new Guid(assignStringValue(xmlRpcStruct, VdsProperties.vmJobImageUUID)));
ret = blockJob;
break;
default:
ret = new VmJob();
break;
}
ret.setVmId(vmId);
ret.setId(new Guid(assignStringValue(xmlRpcStruct, VdsProperties.vmJobId)));
ret.setJobState(VmJobState.NORMAL);
ret.setJobType(jobType);
return ret;
}
public static void updateVDSDynamicData(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.setSupportedClusterLevels(assignStringValueFromArray(xmlRpcStruct, VdsProperties.supported_cluster_levels));
updateNetworkData(vds, xmlRpcStruct);
updateNumaNodesData(vds, xmlRpcStruct);
vds.setCpuThreads(assignIntValue(xmlRpcStruct, VdsProperties.cpuThreads));
vds.setCpuCores(assignIntValue(xmlRpcStruct, VdsProperties.cpu_cores));
vds.setCpuSockets(assignIntValue(xmlRpcStruct, VdsProperties.cpu_sockets));
vds.setCpuModel(assignStringValue(xmlRpcStruct, VdsProperties.cpu_model));
vds.setOnlineCpus(assignStringValue(xmlRpcStruct, VdsProperties.online_cpus));
vds.setCpuSpeedMh(assignDoubleValue(xmlRpcStruct, VdsProperties.cpu_speed_mh));
vds.setPhysicalMemMb(assignIntValue(xmlRpcStruct, VdsProperties.physical_mem_mb));
vds.setKvmEnabled(assignBoolValue(xmlRpcStruct, VdsProperties.kvm_enabled));
vds.setReservedMem(assignIntValue(xmlRpcStruct, VdsProperties.reservedMem));
Integer guestOverhead = assignIntValue(xmlRpcStruct, VdsProperties.guestOverhead);
vds.setGuestOverhead(guestOverhead != null ? guestOverhead : 0);
vds.setCpuFlags(assignStringValue(xmlRpcStruct, VdsProperties.cpu_flags));
updatePackagesVersions(vds, xmlRpcStruct);
vds.setSupportedEngines(assignStringValueFromArray(xmlRpcStruct, VdsProperties.supported_engines));
vds.setIScsiInitiatorName(assignStringValue(xmlRpcStruct, VdsProperties.iSCSIInitiatorName));
vds.setSupportedEmulatedMachines(assignStringValueFromArray(xmlRpcStruct, VdsProperties.emulatedMachines));
setRngSupportedSourcesToVds(vds, xmlRpcStruct);
String hooksStr = ""; // default value if hooks is not in the xml rpc struct
if (xmlRpcStruct.containsKey(VdsProperties.hooks)) {
hooksStr = xmlRpcStruct.get(VdsProperties.hooks).toString();
}
vds.setHooksStr(hooksStr);
// parse out the HBAs available in this host
Map<String, List<Map<String, String>>> hbas = new HashMap<>();
for (Map.Entry<String, Object[]> el: ((Map<String, Object[]>)xmlRpcStruct.get(VdsProperties.HBAInventory)).entrySet()) {
List<Map<String, String>> devicesList = new ArrayList<Map<String, String>>();
for (Object device: el.getValue()) {
devicesList.add((Map<String, String>)device);
}
hbas.put(el.getKey(), devicesList);
}
vds.setHBAs(hbas);
vds.setBootTime(assignLongValue(xmlRpcStruct, VdsProperties.bootTime));
vds.setKdumpStatus(KdumpStatus.valueOfNumber(assignIntValue(xmlRpcStruct, VdsProperties.KDUMP_STATUS)));
vds.setHostDevicePassthroughEnabled(assignBoolValue(xmlRpcStruct, VdsProperties.HOST_DEVICE_PASSTHROUGH));
Map<String, Object> selinux = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.selinux);
if (selinux != null) {
vds.setSELinuxEnforceMode(assignIntValue(selinux, VdsProperties.selinux_mode));
} else {
vds.setSELinuxEnforceMode(null);
}
if (xmlRpcStruct.containsKey(VdsProperties.liveSnapshotSupport)) {
vds.setLiveSnapshotSupport(assignBoolValue(xmlRpcStruct, VdsProperties.liveSnapshotSupport));
} else {
vds.setLiveSnapshotSupport(true); // for backward compatibility's sake
}
if (xmlRpcStruct.containsKey(VdsProperties.liveMergeSupport)) {
vds.setLiveMergeSupport(assignBoolValue(xmlRpcStruct, VdsProperties.liveMergeSupport));
} else {
vds.setLiveMergeSupport(false);
}
updateAdditionalFeatures(vds, xmlRpcStruct);
}
private static void updateAdditionalFeatures(VDS vds, Map<String, Object> xmlRpcStruct) {
String[] addtionalFeaturesSupportedByHost =
assignStringArrayValue(xmlRpcStruct, VdsProperties.ADDITIONAL_FEATURES);
if (addtionalFeaturesSupportedByHost != null) {
for (String feature : addtionalFeaturesSupportedByHost) {
vds.getAdditionalFeatures().add(feature);
}
}
}
private static void setRngSupportedSourcesToVds(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.getSupportedRngSources().clear();
String rngSourcesFromStruct = assignStringValueFromArray(xmlRpcStruct, VdsProperties.rngSources);
if (rngSourcesFromStruct != null) {
vds.getSupportedRngSources().addAll(VmRngDevice.csvToSourcesSet(rngSourcesFromStruct.toUpperCase()));
}
}
public static void checkTimeDrift(VDS vds, Map<String, Object> xmlRpcStruct) {
Boolean isHostTimeDriftEnabled = Config.getValue(ConfigValues.EnableHostTimeDrift);
if (isHostTimeDriftEnabled) {
Integer maxTimeDriftAllowed = Config.getValue(ConfigValues.HostTimeDriftInSec);
Date hostDate = assignDatetimeValue(xmlRpcStruct, VdsProperties.hostDatetime);
if (hostDate != null) {
Long timeDrift =
TimeUnit.MILLISECONDS.toSeconds(Math.abs(hostDate.getTime() - System.currentTimeMillis()));
if (timeDrift > maxTimeDriftAllowed) {
AuditLogableBase logable = new AuditLogableBase(vds.getId());
logable.addCustomValue("Actual", timeDrift.toString());
logable.addCustomValue("Max", maxTimeDriftAllowed.toString());
auditLogDirector.log(logable, AuditLogType.VDS_TIME_DRIFT_ALERT);
}
} else {
log.error("Time Drift validation: failed to get Host or Engine time.");
}
}
}
private static void initDisksUsage(Map<String, Object> vmStruct, VmStatistics vm) {
Object[] vmDisksUsage = (Object[]) vmStruct.get(VdsProperties.VM_DISKS_USAGE);
if (vmDisksUsage != null) {
ArrayList<Object> disksUsageList = new ArrayList<Object>(Arrays.asList(vmDisksUsage));
vm.setDisksUsage(SerializationFactory.getSerializer().serializeUnformattedJson(disksUsageList));
}
}
private static void updatePackagesVersions(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.setVersionName(assignStringValue(xmlRpcStruct, VdsProperties.version_name));
vds.setSoftwareVersion(assignStringValue(xmlRpcStruct, VdsProperties.software_version));
vds.setBuildName(assignStringValue(xmlRpcStruct, VdsProperties.build_name));
if (xmlRpcStruct.containsKey(VdsProperties.host_os)) {
vds.setHostOs(getPackageVersionFormated((Map<String, Object>) xmlRpcStruct.get(VdsProperties.host_os),
true));
}
if (xmlRpcStruct.containsKey(VdsProperties.packages)) {
// packages is an array of xmlRpcStruct (that each is a name, ver,
// release.. of a package)
for (Object hostPackageMap : (Object[]) xmlRpcStruct.get(VdsProperties.packages)) {
Map<String, Object> hostPackage = (Map<String, Object>) hostPackageMap;
String packageName = assignStringValue(hostPackage, VdsProperties.package_name);
if (VdsProperties.kvmPackageName.equals(packageName)) {
vds.setKvmVersion(getPackageVersionFormated(hostPackage, false));
} else if (VdsProperties.spicePackageName.equals(packageName)) {
vds.setSpiceVersion(getPackageVersionFormated(hostPackage, false));
} else if (VdsProperties.kernelPackageName.equals(packageName)) {
vds.setKernelVersion(getPackageVersionFormated(hostPackage, false));
}
}
} else if (xmlRpcStruct.containsKey(VdsProperties.packages2)) {
Map<String, Object> packages = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.packages2);
if (packages.containsKey(VdsProperties.vdsmPackageName)) {
Map<String, Object> vdsm = (Map<String, Object>) packages.get(VdsProperties.vdsmPackageName);
vds.setVersion(getPackageRpmVersion("vdsm", vdsm));
}
if (packages.containsKey(VdsProperties.qemuKvmPackageName)) {
Map<String, Object> kvm = (Map<String, Object>) packages.get(VdsProperties.qemuKvmPackageName);
vds.setKvmVersion(getPackageVersionFormated2(kvm));
}
if (packages.containsKey(VdsProperties.libvirtPackageName)) {
Map<String, Object> libvirt = (Map<String, Object>) packages.get(VdsProperties.libvirtPackageName);
vds.setLibvirtVersion(getPackageRpmVersion("libvirt", libvirt));
}
if (packages.containsKey(VdsProperties.librbdPackageName)) {
Map<String, Object> librbd1 = (Map<String, Object>) packages.get(VdsProperties.librbdPackageName);
vds.setLibrbdVersion(getPackageRpmVersion(VdsProperties.librbdPackageName, librbd1));
}
if (packages.containsKey(VdsProperties.glusterfsCliPackageName)) {
Map<String, Object> glusterfsCli = (Map<String, Object>) packages.get(VdsProperties.glusterfsCliPackageName);
vds.setGlusterfsCliVersion(getPackageRpmVersion(VdsProperties.glusterfsCliPackageName, glusterfsCli));
}
if (packages.containsKey(VdsProperties.spiceServerPackageName)) {
Map<String, Object> spice = (Map<String, Object>) packages.get(VdsProperties.spiceServerPackageName);
vds.setSpiceVersion(getPackageVersionFormated2(spice));
}
if (packages.containsKey(VdsProperties.kernelPackageName)) {
Map<String, Object> kernel = (Map<String, Object>) packages.get(VdsProperties.kernelPackageName);
vds.setKernelVersion(getPackageVersionFormated2(kernel));
}
if (packages.containsKey(VdsProperties.GLUSTER_PACKAGE_NAME)) {
Map<String, Object> gluster = (Map<String, Object>) packages.get(VdsProperties.GLUSTER_PACKAGE_NAME);
vds.setGlusterVersion(getPackageRpmVersion("glusterfs", gluster));
}
}
}
// Version 2 of GetPackageVersionFormated2:
// from 2.3 we get dictionary and not a flat list.
// from now the packages names (of spice, kernel, qemu and libvirt) are the same as far as VDSM and ENGINE.
// (VDSM use to report packages name of rpm so in RHEL6 when it change it broke our interface)
private static String getPackageVersionFormated2(Map<String, Object> hostPackage) {
String packageVersion = (hostPackage.get(VdsProperties.package_version) != null) ? (String) hostPackage
.get(VdsProperties.package_version) : null;
String packageRelease = (hostPackage.get(VdsProperties.package_release) != null) ? (String) hostPackage
.get(VdsProperties.package_release) : null;
StringBuilder sb = new StringBuilder();
if (!StringUtils.isEmpty(packageVersion)) {
sb.append(packageVersion);
}
if (!StringUtils.isEmpty(packageRelease)) {
if (sb.length() > 0) {
sb.append(String.format(" - %1$s", packageRelease));
} else {
sb.append(packageRelease);
}
}
return sb.toString();
}
private static RpmVersion getPackageRpmVersion(String packageName, Map<String, Object> hostPackage) {
String packageVersion = (hostPackage.get(VdsProperties.package_version) != null) ? (String) hostPackage
.get(VdsProperties.package_version) : null;
String packageRelease = (hostPackage.get(VdsProperties.package_release) != null) ? (String) hostPackage
.get(VdsProperties.package_release) : null;
StringBuilder sb = new StringBuilder();
if (!StringUtils.isEmpty(packageName)) {
sb.append(packageName);
}
boolean hasPackageVersion = StringUtils.isEmpty(packageVersion);
boolean hasPackageRelease = StringUtils.isEmpty(packageRelease);
if (!hasPackageVersion || !hasPackageRelease) {
sb.append("-");
}
if (!hasPackageVersion) {
sb.append(packageVersion);
}
if (!hasPackageRelease) {
if (sb.length() > 0) {
sb.append(String.format("-%1$s", packageRelease));
} else {
sb.append(packageRelease);
}
}
return new RpmVersion(sb.toString());
}
public static void updateHardwareSystemInformation(Map<String, Object> hwInfo, VDS vds){
vds.setHardwareManufacturer(assignStringValue(hwInfo, VdsProperties.hwManufacturer));
vds.setHardwareProductName(assignStringValue(hwInfo, VdsProperties.hwProductName));
vds.setHardwareVersion(assignStringValue(hwInfo, VdsProperties.hwVersion));
vds.setHardwareSerialNumber(assignStringValue(hwInfo, VdsProperties.hwSerialNumber));
vds.setHardwareUUID(assignStringValue(hwInfo, VdsProperties.hwUUID));
vds.setHardwareFamily(assignStringValue(hwInfo, VdsProperties.hwFamily));
}
private static String getPackageVersionFormated(Map<String, Object> hostPackage, boolean getName) {
String packageName = assignStringValue(hostPackage, VdsProperties.package_name);
String packageVersion = assignStringValue(hostPackage, VdsProperties.package_version);
String packageRelease = assignStringValue(hostPackage, VdsProperties.package_release);
StringBuilder sb = new StringBuilder();
if (!StringUtils.isEmpty(packageName) && getName) {
sb.append(packageName);
}
if (!StringUtils.isEmpty(packageVersion)) {
if (sb.length() > 0) {
sb.append(String.format(" - %1$s", packageVersion));
} else {
sb.append(packageVersion);
}
}
if (!StringUtils.isEmpty(packageRelease)) {
if (sb.length() > 0) {
sb.append(String.format(" - %1$s", packageRelease));
} else {
sb.append(packageRelease);
}
}
return sb.toString();
}
public static void updateVDSStatisticsData(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.setUsageMemPercent(assignIntValue(xmlRpcStruct, VdsProperties.mem_usage));
Map<String, Object> interfaces = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK);
if (interfaces != null) {
int networkUsage = 0;
Map<String, VdsNetworkInterface> nicsByName = Entities.entitiesByName(vds.getInterfaces());
NetworkStatisticsBuilder statsBuilder = new NetworkStatisticsBuilder(vds.getVdsGroupCompatibilityVersion());
for (Entry<String, Object> entry : interfaces.entrySet()) {
if (nicsByName.containsKey(entry.getKey())) {
VdsNetworkInterface existingIface = nicsByName.get(entry.getKey());
existingIface.setVdsId(vds.getId());
Map<String, Object> dict = (Map<String, Object>) entry.getValue();
VdsNetworkInterface reportedIface = new VdsNetworkInterface();
extractInterfaceStatistics(dict, reportedIface);
statsBuilder.updateExistingInterfaceStatistics(existingIface, reportedIface);
existingIface.getStatistics()
.setStatus(assignInterfaceStatusValue(dict, VdsProperties.iface_status));
if (!NetworkUtils.isVlan(existingIface) && !existingIface.isPartOfBond()) {
Double ifaceUsage = computeInterfaceUsage(existingIface, statsBuilder.isTotalStatsReported());
if (ifaceUsage != null) {
networkUsage = (int) Math.max(networkUsage, ifaceUsage);
}
}
}
}
vds.setUsageNetworkPercent(networkUsage);
}
vds.setCpuSys(assignDoubleValue(xmlRpcStruct, VdsProperties.cpu_sys));
vds.setCpuUser(assignDoubleValue(xmlRpcStruct, VdsProperties.cpu_user));
if (vds.getCpuSys() != null && vds.getCpuUser() != null) {
vds.setUsageCpuPercent((int) (vds.getCpuSys() + vds.getCpuUser()));
}
// CPU load reported by VDSM is in uptime-style format, i.e. normalized
// to unity, so that say an 8% load is reported as 0.08
Double d = assignDoubleValue(xmlRpcStruct, VdsProperties.cpu_load);
d = (d != null) ? d : 0;
vds.setCpuLoad(d.doubleValue() * 100.0);
vds.setCpuIdle(assignDoubleValue(xmlRpcStruct, VdsProperties.cpu_idle));
vds.setMemAvailable(assignLongValue(xmlRpcStruct, VdsProperties.mem_available));
vds.setMemFree(assignLongValue(xmlRpcStruct, VdsProperties.memFree));
vds.setMemShared(assignLongValue(xmlRpcStruct, VdsProperties.mem_shared));
vds.setSwapFree(assignLongValue(xmlRpcStruct, VdsProperties.swap_free));
vds.setSwapTotal(assignLongValue(xmlRpcStruct, VdsProperties.swap_total));
vds.setKsmCpuPercent(assignIntValue(xmlRpcStruct, VdsProperties.ksm_cpu_percent));
vds.setKsmPages(assignLongValue(xmlRpcStruct, VdsProperties.ksm_pages));
vds.setKsmState(assignBoolValue(xmlRpcStruct, VdsProperties.ksm_state));
// dynamic data got from GetVdsStats
if (xmlRpcStruct.containsKey(VdsProperties.transparent_huge_pages_state)) {
vds.setTransparentHugePagesState(EnumUtils.valueOf(VdsTransparentHugePagesState.class, xmlRpcStruct
.get(VdsProperties.transparent_huge_pages_state).toString(), true));
}
if (xmlRpcStruct.containsKey(VdsProperties.anonymous_transparent_huge_pages)) {
vds.setAnonymousHugePages(assignIntValue(xmlRpcStruct, VdsProperties.anonymous_transparent_huge_pages));
}
vds.setNetConfigDirty(assignBoolValue(xmlRpcStruct, VdsProperties.netConfigDirty));
vds.setImagesLastCheck(assignDoubleValue(xmlRpcStruct, VdsProperties.images_last_check));
vds.setImagesLastDelay(assignDoubleValue(xmlRpcStruct, VdsProperties.images_last_delay));
Integer vm_count = assignIntValue(xmlRpcStruct, VdsProperties.vm_count);
vds.setVmCount(vm_count == null ? 0 : vm_count);
vds.setVmActive(assignIntValue(xmlRpcStruct, VdsProperties.vm_active));
vds.setVmMigrating(assignIntValue(xmlRpcStruct, VdsProperties.vm_migrating));
Integer inOutMigrations;
inOutMigrations = assignIntValue(xmlRpcStruct, VdsProperties.INCOMING_VM_MIGRATIONS);
if (inOutMigrations != null) {
vds.setIncomingMigrations(inOutMigrations);
} else {
// TODO remove in 4.x when all hosts will send in/out migrations separately
vds.setIncomingMigrations(-1);
}
inOutMigrations = assignIntValue(xmlRpcStruct, VdsProperties.OUTGOING_VM_MIGRATIONS);
if (inOutMigrations != null) {
vds.setOutgoingMigrations(inOutMigrations);
} else {
// TODO remove in 4.x when all hosts will send in/out migrations separately
vds.setOutgoingMigrations(-1);
}
updateVDSDomainData(vds, xmlRpcStruct);
updateLocalDisksUsage(vds, xmlRpcStruct);
// hosted engine
Integer haScore = null;
Boolean haIsConfigured = null;
Boolean haIsActive = null;
Boolean haGlobalMaint = null;
Boolean haLocalMaint = null;
if (xmlRpcStruct.containsKey(VdsProperties.ha_stats)) {
Map<String, Object> haStats = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.ha_stats);
if (haStats != null) {
haScore = assignIntValue(haStats, VdsProperties.ha_stats_score);
haIsConfigured = assignBoolValue(haStats, VdsProperties.ha_stats_is_configured);
haIsActive = assignBoolValue(haStats, VdsProperties.ha_stats_is_active);
haGlobalMaint = assignBoolValue(haStats, VdsProperties.ha_stats_global_maintenance);
haLocalMaint = assignBoolValue(haStats, VdsProperties.ha_stats_local_maintenance);
}
} else {
haScore = assignIntValue(xmlRpcStruct, VdsProperties.ha_score);
// prior to 3.4, haScore was returned if ha was installed; assume active if > 0
if (haScore != null) {
haIsConfigured = true;
haIsActive = (haScore > 0);
}
}
vds.setHighlyAvailableScore(haScore != null ? haScore : 0);
vds.setHighlyAvailableIsConfigured(haIsConfigured != null ? haIsConfigured : false);
vds.setHighlyAvailableIsActive(haIsActive != null ? haIsActive : false);
vds.setHighlyAvailableGlobalMaintenance(haGlobalMaint != null ? haGlobalMaint : false);
vds.setHighlyAvailableLocalMaintenance(haLocalMaint != null ? haLocalMaint : false);
vds.setBootTime(assignLongValue(xmlRpcStruct, VdsProperties.bootTime));
updateNumaStatisticsData(vds, xmlRpcStruct);
updateV2VJobs(vds, xmlRpcStruct);
}
private static void extractInterfaceStatistics(Map<String, Object> dict, NetworkInterface<?> iface) {
NetworkStatistics stats = iface.getStatistics();
stats.setReceiveRate(assignDoubleValueWithNullProtection(dict, VdsProperties.rx_rate));
stats.setReceiveDropRate(assignDoubleValueWithNullProtection(dict, VdsProperties.rx_dropped));
stats.setReceivedBytes(assignLongValue(dict, VdsProperties.rx_total));
stats.setTransmitRate(assignDoubleValueWithNullProtection(dict, VdsProperties.tx_rate));
stats.setTransmitDropRate(assignDoubleValueWithNullProtection(dict, VdsProperties.tx_dropped));
stats.setTransmittedBytes(assignLongValue(dict, VdsProperties.tx_total));
stats.setSampleTime(assignDoubleValue(dict, VdsProperties.sample_time));
iface.setSpeed(assignIntValue(dict, VdsProperties.INTERFACE_SPEED));
}
private static Double computeInterfaceUsage(VdsNetworkInterface iface, boolean totalStatsReported) {
Double receiveRate = iface.getStatistics().getReceiveRate();
Double transmitRate = iface.getStatistics().getTransmitRate();
/**
* TODO: only needed if rate reported by vdsm (in which case can't be null) - remove in 4.0 and turn
* NetworkStatisticsBuilder.truncatePercentage() private
*/
if (!totalStatsReported) {
receiveRate = NetworkStatisticsBuilder.truncatePercentage(receiveRate);
transmitRate = NetworkStatisticsBuilder.truncatePercentage(transmitRate);
}
if (receiveRate == null) {
return transmitRate;
} else if (transmitRate == null) {
return receiveRate;
} else {
return Math.max(receiveRate, transmitRate);
}
}
public static void updateNumaStatisticsData(VDS vds, Map<String, Object> xmlRpcStruct) {
List<VdsNumaNode> vdsNumaNodes = new ArrayList<>();
if (vds.getNumaNodeList() != null && !vds.getNumaNodeList().isEmpty()) {
vdsNumaNodes.addAll(vds.getNumaNodeList());
}
List<CpuStatistics> cpuStatsData = new ArrayList<>();
if (xmlRpcStruct.containsKey(VdsProperties.CPU_STATS)) {
Map<String, Map<String, Object>> cpuStats = (Map<String, Map<String, Object>>)
xmlRpcStruct.get(VdsProperties.CPU_STATS);
Map<Integer, List<CpuStatistics>> numaNodeCpuStats = new HashMap<>();
for (Map.Entry<String, Map<String, Object>> item : cpuStats.entrySet()) {
CpuStatistics data = buildVdsCpuStatistics(item);
cpuStatsData.add(data);
int numaNodeIndex = assignIntValue(item.getValue(), VdsProperties.NUMA_NODE_INDEX);
if (!numaNodeCpuStats.containsKey(numaNodeIndex)) {
numaNodeCpuStats.put(numaNodeIndex, new ArrayList<CpuStatistics>());
}
numaNodeCpuStats.get(numaNodeIndex).add(data);
}
DecimalFormat percentageFormatter = new DecimalFormat("
for (Map.Entry<Integer, List<CpuStatistics>> item : numaNodeCpuStats.entrySet()) {
VdsNumaNode nodeWithStatistics = buildVdsNumaNodeStatistics(percentageFormatter, item);
if (vdsNumaNodes.isEmpty()) {
vdsNumaNodes.add(nodeWithStatistics);
} else {
boolean foundNumaNode = false;
// append the statistics to the correct numaNode (search by its Index.)
for (VdsNumaNode currNumaNode : vdsNumaNodes) {
if (currNumaNode.getIndex() == nodeWithStatistics.getIndex()) {
currNumaNode.setNumaNodeStatistics(nodeWithStatistics.getNumaNodeStatistics());
foundNumaNode = true;
break;
}
}
// append new numaNode (contains only statistics) if not found existing
if (!foundNumaNode) {
vdsNumaNodes.add(nodeWithStatistics);
}
}
}
}
if (xmlRpcStruct.containsKey(VdsProperties.NUMA_NODE_FREE_MEM_STAT)) {
Map<String, Map<String, Object>> memStats = (Map<String, Map<String, Object>>)
xmlRpcStruct.get(VdsProperties.NUMA_NODE_FREE_MEM_STAT);
for (Map.Entry<String, Map<String, Object>> item : memStats.entrySet()) {
VdsNumaNode node = NumaUtils.getVdsNumaNodeByIndex(vdsNumaNodes, Integer.valueOf(item.getKey()));
if (node != null && node.getNumaNodeStatistics() != null) {
node.getNumaNodeStatistics().setMemFree(assignLongValue(item.getValue(),
VdsProperties.NUMA_NODE_FREE_MEM));
node.getNumaNodeStatistics().setMemUsagePercent(assignIntValue(item.getValue(),
VdsProperties.NUMA_NODE_MEM_PERCENT));
}
}
}
vds.getNumaNodeList().clear();
vds.getNumaNodeList().addAll(vdsNumaNodes);
vds.getStatisticsData().getCpuCoreStatistics().clear();
vds.getStatisticsData().getCpuCoreStatistics().addAll(cpuStatsData);
}
private static VdsNumaNode buildVdsNumaNodeStatistics(DecimalFormat percentageFormatter,
Map.Entry<Integer, List<CpuStatistics>> item) {
VdsNumaNode node = new VdsNumaNode();
NumaNodeStatistics nodeStat = new NumaNodeStatistics();
double nodeCpuUser = 0.0;
double nodeCpuSys = 0.0;
double nodeCpuIdle = 0.0;
for (CpuStatistics cpuStat : item.getValue()) {
nodeCpuUser += cpuStat.getCpuUser();
nodeCpuSys += cpuStat.getCpuSys();
nodeCpuIdle += cpuStat.getCpuIdle();
}
nodeStat.setCpuUser(Double.valueOf(percentageFormatter.format(nodeCpuUser / item.getValue().size())));
nodeStat.setCpuSys(Double.valueOf(percentageFormatter.format(nodeCpuSys / item.getValue().size())));
nodeStat.setCpuIdle(Double.valueOf(percentageFormatter.format(nodeCpuIdle / item.getValue().size())));
nodeStat.setCpuUsagePercent((int) (nodeStat.getCpuSys() + nodeStat.getCpuUser()));
node.setIndex(item.getKey());
node.setNumaNodeStatistics(nodeStat);
return node;
}
private static CpuStatistics buildVdsCpuStatistics(Map.Entry<String, Map<String, Object>> item) {
CpuStatistics data = new CpuStatistics();
data.setCpuId(Integer.valueOf(item.getKey()));
data.setCpuUser(assignDoubleValue(item.getValue(), VdsProperties.NUMA_CPU_USER));
data.setCpuSys(assignDoubleValue(item.getValue(), VdsProperties.NUMA_CPU_SYS));
data.setCpuIdle(assignDoubleValue(item.getValue(), VdsProperties.NUMA_CPU_IDLE));
data.setCpuUsagePercent((int) (data.getCpuSys() + data.getCpuUser()));
return data;
}
/**
* Update {@link VDS#setLocalDisksUsage(Map)} with map of paths usage extracted from the returned returned value. The
* usage is reported in MB.
*
* @param vds
* The VDS object to update.
* @param xmlRpcStruct
* The XML/RPC to extract the usage from.
*/
protected static void updateLocalDisksUsage(VDS vds, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.DISK_STATS)) {
Map<String, Object> diskStatsStruct = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.DISK_STATS);
Map<String, Long> diskStats = new HashMap<String, Long>();
vds.setLocalDisksUsage(diskStats);
for (Entry<String, Object> entry : diskStatsStruct.entrySet()) {
Map<String, Object> pathStatsStruct = (Map<String, Object>) entry.getValue();
diskStats.put(entry.getKey(), assignLongValue(pathStatsStruct, VdsProperties.DISK_STATS_FREE));
}
}
}
private static void updateVDSDomainData(VDS vds, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.domains)) {
Map<String, Object> domains = (Map<String, Object>)
xmlRpcStruct.get(VdsProperties.domains);
ArrayList<VDSDomainsData> domainsData = new ArrayList<VDSDomainsData>();
for (Map.Entry<String, ?> value : domains.entrySet()) {
try {
VDSDomainsData data = new VDSDomainsData();
data.setDomainId(new Guid(value.getKey().toString()));
Map<String, Object> internalValue = (Map<String, Object>) value.getValue();
double lastCheck = 0;
data.setCode((Integer) (internalValue).get(VdsProperties.code));
if (internalValue.containsKey(VdsProperties.lastCheck)) {
lastCheck = Double.parseDouble((String) internalValue.get(VdsProperties.lastCheck));
}
data.setLastCheck(lastCheck);
double delay = 0;
if (internalValue.containsKey(VdsProperties.delay)) {
delay = Double.parseDouble((String) internalValue.get(VdsProperties.delay));
}
data.setDelay(delay);
Boolean actual = Boolean.TRUE;
if (internalValue.containsKey(VdsProperties.actual)) {
actual = (Boolean)internalValue.get(VdsProperties.actual);
}
data.setActual(actual);
domainsData.add(data);
} catch (Exception e) {
log.error("failed building domains: {}", e.getMessage());
log.debug("Exception", e);
}
}
vds.setDomains(domainsData);
}
}
private static InterfaceStatus assignInterfaceStatusValue(Map<String, Object> input, String name) {
InterfaceStatus ifaceStatus = InterfaceStatus.NONE;
if (input.containsKey(name)) {
String stringValue = (String) ((input.get(name) instanceof String) ? input.get(name) : null);
if (!StringUtils.isEmpty(stringValue)) {
if (stringValue.toLowerCase().trim().equals("up")) {
ifaceStatus = InterfaceStatus.UP;
} else {
ifaceStatus = InterfaceStatus.DOWN;
}
}
}
return ifaceStatus;
}
private static Double assignDoubleValue(Map<String, Object> input, String name) {
Object value = input.get(name);
if (value instanceof Double) {
return (Double) value;
} else if (value instanceof String) {
return Double.parseDouble((String) value);
}
return null;
}
/**
* Do the same logic as assignDoubleValue does, but instead, in case of null we return 0.
* @param input - the Input xml
* @param name - The name of the field we want to cast it to double.
* @return - the double value.
*/
private static Double assignDoubleValueWithNullProtection(Map<String, Object> input, String name) {
Double doubleValue = assignDoubleValue(input, name);
return (doubleValue == null ? Double.valueOf(0.0) : doubleValue);
}
private static Integer assignIntValue(Map input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Integer) {
return (Integer) input.get(name);
}
String stringValue = (String) input.get(name);
if (!StringUtils.isEmpty(stringValue)) { // in case the input
// is decimal and we
// need int.
stringValue = stringValue.split("[.]", -1)[0];
}
try {
int intValue = Integer.parseInt(stringValue);
return intValue;
} catch (NumberFormatException nfe) {
log.error("Failed to parse '{}' value '{}' to integer: {}", name, stringValue, nfe.getMessage());
}
}
return null;
}
private static Long assignLongValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Long || input.get(name) instanceof Integer) {
return Long.parseLong(input.get(name).toString());
}
String stringValue = (String) ((input.get(name) instanceof String) ? input.get(name) : null);
if (!StringUtils.isEmpty(stringValue)) { // in case the input
// is decimal and we
// need int.
stringValue = stringValue.split("[.]", -1)[0];
}
try {
return Long.parseLong(stringValue);
} catch (NumberFormatException e) {
log.error("Failed to parse '{}' value '{}' to long: {}", name, stringValue, e.getMessage());
}
}
return null;
}
private static String assignStringValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
return (String) ((input.get(name) instanceof String) ? input.get(name) : null);
}
return null;
}
private static String[] assignStringArrayValue(Map<String, Object> input, String name) {
String[] array = null;
if (input.containsKey(name)) {
array = (String[]) ((input.get(name) instanceof String[]) ? input.get(name) : null);
if (array == null) {
Object[] arr2 = (Object[]) ((input.get(name) instanceof Object[]) ? input.get(name) : null);
if (arr2 != null) {
array = new String[arr2.length];
for (int i = 0; i < arr2.length; i++)
array[i] = arr2[i].toString();
}
}
}
return array;
}
private static String assignStringValueFromArray(Map<String, Object> input, String name) {
String[] arr = assignStringArrayValue(input, name);
if (arr != null) {
return StringUtils.join(arr, ',');
}
return null;
}
private static Date assignDateTImeFromEpoch(Map<String, Object> input, String name) {
Date retval = null;
try {
if (input.containsKey(name)) {
Double secsSinceEpoch = (Double) input.get(name);
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(secsSinceEpoch.longValue());
retval = calendar.getTime();
}
} catch (RuntimeException ex) {
log.warn("VdsBroker::assignDateTImeFromEpoch - failed to convert field '{}' to dateTime: {}",
name, ex.getMessage());
log.debug("Exception", ex);
retval = null;
}
return retval;
}
private static Date assignDatetimeValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Date) {
return (Date) input.get(name);
}
DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z");
try {
String dateStr = input.get(name).toString().replaceFirst("T", " ").trim();
return formatter.parse(dateStr);
} catch (ParseException e) {
e.printStackTrace();
}
}
return null;
}
private static Boolean assignBoolValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Boolean) {
return (Boolean) input.get(name);
}
return Boolean.parseBoolean(input.get(name).toString());
}
return Boolean.FALSE;
}
private static void initDisks(Map<String, Object> vmStruct, VmDynamic vm) {
Map<String, Object> disks = (Map<String, Object>) vmStruct.get(VdsProperties.vm_disks);
ArrayList<DiskImageDynamic> disksData = new ArrayList<DiskImageDynamic>();
for (Object diskAsObj : disks.values()) {
Map<String, Object> disk = (Map<String, Object>) diskAsObj;
DiskImageDynamic diskData = new DiskImageDynamic();
String imageGroupIdString = assignStringValue(disk, VdsProperties.image_group_id);
if (!StringUtils.isEmpty(imageGroupIdString)) {
Guid imageGroupIdGuid = new Guid(imageGroupIdString);
diskData.setId(imageGroupIdGuid);
diskData.setread_rate(assignIntValue(disk, VdsProperties.vm_disk_read_rate));
diskData.setwrite_rate(assignIntValue(disk, VdsProperties.vm_disk_write_rate));
if (disk.containsKey(VdsProperties.disk_actual_size)) {
Long size = assignLongValue(disk, VdsProperties.disk_actual_size);
diskData.setactual_size(size != null ? size * 512 : 0);
} else if (disk.containsKey(VdsProperties.disk_true_size)) {
Long size = assignLongValue(disk, VdsProperties.disk_true_size);
diskData.setactual_size(size != null ? size : 0);
}
if (disk.containsKey(VdsProperties.vm_disk_read_latency)) {
diskData.setReadLatency(assignDoubleValueWithNullProtection(disk,
VdsProperties.vm_disk_read_latency) / NANO_SECONDS);
}
if (disk.containsKey(VdsProperties.vm_disk_write_latency)) {
diskData.setWriteLatency(assignDoubleValueWithNullProtection(disk,
VdsProperties.vm_disk_write_latency) / NANO_SECONDS);
}
if (disk.containsKey(VdsProperties.vm_disk_flush_latency)) {
diskData.setFlushLatency(assignDoubleValueWithNullProtection(disk,
VdsProperties.vm_disk_flush_latency) / NANO_SECONDS);
}
disksData.add(diskData);
}
}
vm.setDisks(disksData);
}
private static void initAppsList(Map<String, Object> vmStruct, VmDynamic vm) {
if (vmStruct.containsKey(VdsProperties.app_list)) {
Object tempAppsList = vmStruct.get(VdsProperties.app_list);
if (tempAppsList instanceof Object[]) {
Object[] apps = (Object[]) tempAppsList;
StringBuilder builder = new StringBuilder();
boolean firstTime = true;
for (Object app : apps) {
String appString = (String) ((app instanceof String) ? app : null);
if (app == null) {
log.warn("Failed to convert app: [null] to string");
continue; // Don't process this
}
if(appString == null) {
// Note: app cannot be null here anymore
log.warn("Failed to convert app: [" + app.getClass().getName() + "] is not a string");
continue; // Don't process this
}
if (!firstTime) {
builder.append(",");
} else {
firstTime = false;
}
builder.append(appString);
}
vm.setAppList(builder.toString());
} else {
vm.setAppList("");
}
}
}
public static VMStatus convertToVmStatus(String statusName) {
VMStatus status = VMStatus.Unassigned;
// TODO: The following condition should deleted as soon as we drop compatibility with 3.3 since "Running" state
// will be replaced "Up" state and "Unknown" will exist no more. The "Up" state will be processed by
// EnumUtils as other states below.
if ("Running".equals(statusName) || "Unknown".equals(statusName)) {
status = VMStatus.Up;
}
else if ("Migration Source".equals(statusName)) {
status = VMStatus.MigratingFrom;
}
else if ("Migration Destination".equals(statusName)) {
status = VMStatus.MigratingTo;
} else {
try {
statusName = statusName.replace(" ", "");
status = EnumUtils.valueOf(VMStatus.class, statusName, true);
} catch (Exception e) {
log.error("Illegal Vm status: '{}'.", statusName);
}
}
return status;
}
/**
* Updates the host network data with the network data reported by the host
*
* @param vds
* The host to update
* @param xmlRpcStruct
* A nested map contains network interfaces data
*/
public static void updateNetworkData(VDS vds, Map<String, Object> xmlRpcStruct) {
List<VdsNetworkInterface> oldInterfaces =
DbFacade.getInstance().getInterfaceDao().getAllInterfacesForVds(vds.getId());
vds.getInterfaces().clear();
addHostNetworkInterfaces(vds, xmlRpcStruct);
addHostVlanDevices(vds, xmlRpcStruct);
addHostBondDevices(vds, xmlRpcStruct);
addHostNetworksAndUpdateInterfaces(vds, xmlRpcStruct);
// set bonding options
setBondingOptions(vds, oldInterfaces);
// This information was added in 3.1, so don't use it if it's not there.
if (xmlRpcStruct.containsKey(VdsProperties.netConfigDirty)) {
vds.setNetConfigDirty(assignBoolValue(xmlRpcStruct, VdsProperties.netConfigDirty));
}
}
/***
* resolve the the host's interface that is being used to communicate with engine.
*
* @param host
* @return host's interface that being used to communicate with engine, null otherwise
*/
private static VdsNetworkInterface resolveActiveNic(VDS host, String hostIp) {
if (hostIp == null) {
return null;
}
final String managementAddress = hostIp;
VdsNetworkInterface activeIface =
LinqUtils.firstOrNull(host.getInterfaces(), new InterfaceByAddressPredicate(managementAddress));
return activeIface;
}
private static void addHostNetworksAndUpdateInterfaces(VDS host, Map<String, Object> xmlRpcStruct) {
Map<String, Map<String, Object>> bridges =
(Map<String, Map<String, Object>>) xmlRpcStruct.get(VdsProperties.NETWORK_BRIDGES);
final String hostActiveNicName = findActiveNicName(host, bridges);
host.setActiveNic(hostActiveNicName);
// Networks collection (name point to list of nics or bonds)
Map<String, Map<String, Object>> networks =
(Map<String, Map<String, Object>>) xmlRpcStruct.get(VdsProperties.NETWORKS);
Map<String, VdsNetworkInterface> vdsInterfaces = Entities.entitiesByName(host.getInterfaces());
boolean bridgesReported = FeatureSupported.bridgesReportByVdsm(host.getVdsGroupCompatibilityVersion());
if (networks != null) {
host.getNetworkNames().clear();
for (Entry<String, Map<String, Object>> entry : networks.entrySet()) {
Map<String, Object> networkProperties = entry.getValue();
String networkName = entry.getKey();
if (networkProperties != null) {
String interfaceName = (String) networkProperties.get(VdsProperties.INTERFACE);
Map<String, Object> bridgeProperties = (bridges == null) ? null : bridges.get(interfaceName);
boolean bridgedNetwork = isBridgedNetwork(networkProperties);
HostNetworkQos qos = new HostNetworkQosMapper(networkProperties).deserialize();
/**
* TODO: remove overly-defensive code in 4.0 - IP address, subnet, gateway and boot protocol should
* only be extracted for bridged networks and from bridge entries (not network entries)
**/
Map<String, Object> effectiveProperties =
(bridgesReported && bridgedNetwork && bridgeProperties != null) ? bridgeProperties
: networkProperties;
String addr = extractAddress(effectiveProperties);
String subnet = extractSubnet(effectiveProperties);
String gateway = (String) effectiveProperties.get(VdsProperties.GLOBAL_GATEWAY);
List<VdsNetworkInterface> interfaces =
bridgesReported ? findNetworkInterfaces(vdsInterfaces, interfaceName, bridgeProperties)
: findBridgedNetworkInterfaces(networkProperties, vdsInterfaces);
for (VdsNetworkInterface iface : interfaces) {
iface.setNetworkName(networkName);
iface.setAddress(addr);
iface.setSubnet(subnet);
iface.setBridged(bridgedNetwork);
iface.setQos(qos);
// set the management ip
if (getManagementNetworkUtil().isManagementNetwork(iface.getNetworkName(), host.getVdsGroupId())) {
iface.setType(iface.getType() | VdsInterfaceType.MANAGEMENT.getValue());
}
setGatewayIfNecessary(iface, host, gateway);
if (bridgedNetwork) {
addBootProtocol(effectiveProperties, host, iface);
}
}
host.getNetworkNames().add(networkName);
reportInvalidInterfacesForNetwork(interfaces, networkName, host);
}
}
}
}
private static String findActiveNicName(VDS vds, Map<String, Map<String, Object>> bridges) {
final String hostIp = NetworkUtils.getHostByIp(vds);
final String activeBridge = findActiveBridge(hostIp, bridges);
if (activeBridge != null) {
return activeBridge;
}
// by now, if the host is communicating with engine over a valid interface,
// the interface will have the host's engine IP
final VdsNetworkInterface activeIface = resolveActiveNic(vds, hostIp);
String hostActiveNic = (activeIface == null) ? null : activeIface.getName();
return hostActiveNic;
}
/***
*
* @param ipAddress
* @param bridges
* @return the name of the bridge obtaining ipAddress, null in case no such exist
*/
private static String findActiveBridge(String ipAddress, Map<String, Map<String, Object>> bridges) {
String activeBridge = null;
if (bridges != null) {
for (Entry<String, Map<String, Object>> entry : bridges.entrySet()) {
Map<String, Object> bridgeProperties = entry.getValue();
String bridgeName = entry.getKey();
if (bridgeProperties != null) {
String bridgeAddress = (String) bridgeProperties.get("addr");
// in case host is communicating with engine over a bridge
if (bridgeAddress != null && bridgeAddress.equals(ipAddress)) {
activeBridge = bridgeName;
}
}
}
}
return activeBridge;
}
/**
* Reports a warning to the audit log if a bridge is connected to more than one interface which is considered bad
* configuration.
*
* @param interfaces
* The network's interfaces
* @param network
* The network to report for
* @param vds
* The host in which the network is defined
*/
private static void reportInvalidInterfacesForNetwork(List<VdsNetworkInterface> interfaces, String networkName, VDS vds) {
if (interfaces.isEmpty()) {
auditLogDirector.log(createHostNetworkAuditLog(networkName, vds), AuditLogType.NETWORK_WITHOUT_INTERFACES);
} else if (interfaces.size() > 1) {
AuditLogableBase logable = createHostNetworkAuditLog(networkName, vds);
logable.addCustomValue("Interfaces", StringUtils.join(Entities.objectNames(interfaces), ","));
auditLogDirector.log(logable, AuditLogType.BRIDGED_NETWORK_OVER_MULTIPLE_INTERFACES);
}
}
protected static AuditLogableBase createHostNetworkAuditLog(String networkName, VDS vds) {
AuditLogableBase logable = new AuditLogableBase(vds.getId());
logable.addCustomValue("NetworkName", networkName);
return logable;
}
private static List<VdsNetworkInterface> findNetworkInterfaces(Map<String, VdsNetworkInterface> vdsInterfaces,
String interfaceName,
Map<String, Object> bridgeProperties) {
List<VdsNetworkInterface> interfaces = new ArrayList<VdsNetworkInterface>();
VdsNetworkInterface iface = vdsInterfaces.get(interfaceName);
if (iface == null) {
if (bridgeProperties != null) {
interfaces.addAll(findBridgedNetworkInterfaces(bridgeProperties, vdsInterfaces));
}
} else {
interfaces.add(iface);
}
return interfaces;
}
private static List<VdsNetworkInterface> findBridgedNetworkInterfaces(Map<String, Object> bridge,
Map<String, VdsNetworkInterface> vdsInterfaces) {
List<VdsNetworkInterface> interfaces = new ArrayList<VdsNetworkInterface>();
Object[] ports = (Object[]) bridge.get("ports");
if (ports != null) {
for (Object port : ports) {
if (vdsInterfaces.containsKey(port.toString())) {
interfaces.add(vdsInterfaces.get(port.toString()));
}
}
}
return interfaces;
}
private static void addHostBondDevices(VDS vds, Map<String, Object> xmlRpcStruct) {
Map<String, Map<String, Object>> bonds =
(Map<String, Map<String, Object>>) xmlRpcStruct.get(VdsProperties.NETWORK_BONDINGS);
if (bonds != null) {
boolean cfgEntriesDeprecated = FeatureSupported.cfgEntriesDeprecated(vds.getVdsGroupCompatibilityVersion());
for (Entry<String, Map<String, Object>> entry : bonds.entrySet()) {
VdsNetworkInterface bond = new Bond();
updateCommonInterfaceData(bond, vds, entry);
bond.setBonded(true);
Map<String, Object> bondProperties = entry.getValue();
if (bondProperties != null) {
bond.setMacAddress((String) bondProperties.get("hwaddr"));
if (bondProperties.get("slaves") != null) {
addBondDeviceToHost(vds, bond, (Object[]) bondProperties.get("slaves"));
}
Object bondOptions = null;
if (cfgEntriesDeprecated) {
Map<String, Object> bondOptionsMap = new HashMap<>();
Map<String, Object> bondOpts = (Map<String, Object>) bondProperties.get("opts");
if (bondOpts != null) {
bondOptionsMap.putAll(bondOpts);
}
String bondOptionsString = "";
String mode = (String) bondOptionsMap.get("mode");
String miimon = (String) bondOptionsMap.get("miimon");
if (mode != null && miimon != null) {
bondOptionsString = String.format("mode=%s miimon=%s", mode, miimon);
bondOptionsMap.remove("mode");
bondOptionsMap.remove("miimon");
}
for (Map.Entry<String, Object> optionEntry : bondOptionsMap.entrySet()) {
bondOptionsString =
String.format("%s %s=%s",
bondOptionsString,
optionEntry.getKey(),
optionEntry.getValue());
}
bondOptions = bondOptionsString.isEmpty() ? null : bondOptionsString;
} else {
Map<String, Object> config = (Map<String, Object>) bondProperties.get("cfg");
bondOptions = (config == null) ? null : config.get("BONDING_OPTS");
}
if (bondOptions != null) {
bond.setBondOptions(bondOptions.toString());
}
}
}
}
}
/**
* Updates the host interfaces list with vlan devices
*
* @param vds
* The host to update
* @param xmlRpcStruct
* a map contains pairs of vlan device name and vlan data
*/
private static void addHostVlanDevices(VDS vds, Map<String, Object> xmlRpcStruct) {
// vlans
Map<String, Map<String, Object>> vlans = (Map<String, Map<String, Object>>) xmlRpcStruct.get(VdsProperties.NETWORK_VLANS);
if (vlans != null) {
for (Entry<String, Map<String, Object>> entry : vlans.entrySet()) {
VdsNetworkInterface vlan = new Vlan();
updateCommonInterfaceData(vlan, vds, entry);
String vlanDeviceName = entry.getKey();
Map<String, Object> vlanProperties = entry.getValue();
if (vlanProperties.get(VdsProperties.VLAN_ID) != null && vlanProperties.get(VdsProperties.BASE_INTERFACE) != null) {
vlan.setVlanId((Integer) vlanProperties.get(VdsProperties.VLAN_ID));
vlan.setBaseInterface((String) vlanProperties.get(VdsProperties.BASE_INTERFACE));
} else if (vlanDeviceName.contains(".")) {
String[] names = vlanDeviceName.split("[.]", -1);
String vlanId = names[1];
vlan.setVlanId(Integer.parseInt(vlanId));
vlan.setBaseInterface(names[0]);
}
vds.getInterfaces().add(vlan);
}
}
}
/**
* Updates the host network interfaces with the collected data from the host
*
* @param vds
* The host to update its interfaces
* @param xmlRpcStruct
* A nested map contains network interfaces data
*/
private static void addHostNetworkInterfaces(VDS vds, Map<String, Object> xmlRpcStruct) {
Map<String, Map<String, Object>> nics =
(Map<String, Map<String, Object>>) xmlRpcStruct.get(VdsProperties.NETWORK_NICS);
if (nics != null) {
for (Entry<String, Map<String, Object>> entry : nics.entrySet()) {
VdsNetworkInterface nic = new Nic();
updateCommonInterfaceData(nic, vds, entry);
Map<String, Object> nicProperties = entry.getValue();
if (nicProperties != null) {
if (nicProperties.get("speed") != null) {
Object speed = nicProperties.get("speed");
nic.setSpeed((Integer) speed);
}
nic.setMacAddress((String) nicProperties.get("hwaddr"));
// if we get "permhwaddr", we are a part of a bond and we use that as the mac address
String mac = (String) nicProperties.get("permhwaddr");
if (mac != null) {
//TODO remove when the minimal supported vdsm version is >=3.6
// in older VDSM version, slave's Mac is in upper case
nic.setMacAddress(mac.toLowerCase());
}
}
vds.getInterfaces().add(nic);
}
}
}
/**
* Updates a given interface (be it physical, bond or VLAN) by data as collected from the host.
*
* @param iface
* The interface to update
* @param host
* The host to which the interface belongs.
* @param ifaceEntry
* A pair whose key is the interface's name, and whose value it a map of the interface properties.
*/
private static void updateCommonInterfaceData(VdsNetworkInterface iface,
VDS host,
Entry<String, Map<String, Object>> ifaceEntry) {
iface.setName(ifaceEntry.getKey());
iface.setId(Guid.newGuid());
iface.setVdsId(host.getId());
VdsNetworkStatistics iStats = new VdsNetworkStatistics();
iStats.setId(iface.getId());
iStats.setVdsId(host.getId());
iface.setStatistics(iStats);
Map<String, Object> nicProperties = ifaceEntry.getValue();
if (nicProperties != null) {
iface.setAddress(extractAddress(nicProperties));
iface.setSubnet(extractSubnet(nicProperties));
String mtu = (String) nicProperties.get(VdsProperties.MTU);
if (StringUtils.isNotBlank(mtu)) {
iface.setMtu(Integer.parseInt(mtu));
}
addBootProtocol(nicProperties, host, iface);
}
}
private static String extractAddress(Map<String, Object> properties) {
return (String) properties.get("addr");
}
private static String extractSubnet(Map<String, Object> properties) {
return (String) properties.get("netmask");
}
/**
* Returns true if vdsm doesn't report the 'bridged' attribute or if reported - its actual value.<br>
* The assumption is bridge-less network isn't supported if the 'bridged' attribute wasn't reported.<br>
* Bridge-less networks must report 'false' for this property.
*
* @param network
* The network to evaluate its bridge attribute
* @return true is no attribute is reported or its actual value
*/
private static boolean isBridgedNetwork(Map<String, Object> network) {
return network.get("bridged") == null || Boolean.parseBoolean(network.get("bridged").toString());
}
// we check for old bonding options,
// if we had value for the bonding options, i.e. the user set it by the UI
// and we have host that is not returning it's bonding options(host below 2.2.4) we override
// the "new" bonding options with the old one only if we have the new one as null and the old one is not
private static void setBondingOptions(VDS vds, List<VdsNetworkInterface> oldInterfaces) {
for (VdsNetworkInterface iface : oldInterfaces) {
if (iface.getBondOptions() != null) {
for (VdsNetworkInterface newIface : vds.getInterfaces()) {
if (iface.getName().equals(newIface.getName()) && newIface.getBondOptions() == null) {
newIface.setBondOptions(iface.getBondOptions());
break;
}
}
}
}
}
private static void addBootProtocol(Map<String, Object> entry, VDS host, VdsNetworkInterface iface) {
BootProtocolResolver resolver =
FeatureSupported.cfgEntriesDeprecated(host.getVdsGroupCompatibilityVersion())
? new NoCfgBootProtocolResolver(entry, iface, host)
: new CfgBootProtocolResolver(entry, iface, host);
resolver.resolve();
}
private static void addBondDeviceToHost(VDS vds, VdsNetworkInterface iface, Object[] interfaces) {
vds.getInterfaces().add(iface);
if (interfaces != null) {
for (Object name : interfaces) {
for (VdsNetworkInterface tempInterface : vds.getInterfaces()) {
if (tempInterface.getName().equals(name.toString())) {
tempInterface.setBondName(iface.getName());
break;
}
}
}
}
}
/**
* Store the gateway for either of these cases:
* 1. any host network, in a cluster that supports multiple gateways
* 2. management network, no matter the cluster compatibility version
* 3. the active interface (could happen when there is no management network yet)
* If gateway was provided for non-management network when multiple gateways aren't supported, its value should be ignored.
*
* @param iface
* the host network interface
* @param host
* the host whose interfaces are being edited
* @param gateway
* the gateway value to be set
*/
public static void setGatewayIfNecessary(VdsNetworkInterface iface, VDS host, String gateway) {
final ManagementNetworkUtil managementNetworkUtil = getManagementNetworkUtil();
if (FeatureSupported.multipleGatewaysSupported(host.getVdsGroupCompatibilityVersion())
|| managementNetworkUtil.isManagementNetwork(iface.getNetworkName(), host.getVdsGroupId())
|| iface.getName().equals(host.getActiveNic())) {
iface.setGateway(gateway);
}
}
private static ManagementNetworkUtil getManagementNetworkUtil() {
final ManagementNetworkUtil managementNetworkUtil = Injector.get(ManagementNetworkUtil.class);
return managementNetworkUtil;
}
/**
* Creates a list of {@link VmGuestAgentInterface} from the {@link VdsProperties.GuestNetworkInterfaces}
*
* @param vmId
* the Vm's ID which contains the interfaces
*
* @param xmlRpcStruct
* the xml structure that describes the VM as reported by VDSM
* @return a list of {@link VmGuestAgentInterface} or null if no guest vNics were reported
*/
public static List<VmGuestAgentInterface> buildVmGuestAgentInterfacesData(Guid vmId, Map<String, Object> xmlRpcStruct) {
if (!xmlRpcStruct.containsKey(VdsProperties.VM_NETWORK_INTERFACES)) {
return null;
}
List<VmGuestAgentInterface> interfaces = new ArrayList<VmGuestAgentInterface>();
for (Object ifaceStruct : (Object[]) xmlRpcStruct.get(VdsProperties.VM_NETWORK_INTERFACES)) {
VmGuestAgentInterface nic = new VmGuestAgentInterface();
Map ifaceMap = (Map) ifaceStruct;
nic.setInterfaceName(assignStringValue(ifaceMap, VdsProperties.VM_INTERFACE_NAME));
nic.setMacAddress(getMacAddress(ifaceMap));
nic.setIpv4Addresses(extracStringtList(ifaceMap, VdsProperties.VM_IPV4_ADDRESSES));
nic.setIpv6Addresses(extracStringtList(ifaceMap, VdsProperties.VM_IPV6_ADDRESSES));
nic.setVmId(vmId);
interfaces.add(nic);
}
return interfaces;
}
private static String getMacAddress(Map<String, Object> ifaceMap) {
String macAddress = assignStringValue(ifaceMap, VdsProperties.VM_INTERFACE_MAC_ADDRESS);
return macAddress != null ? macAddress.replace('-', ':') : null;
}
/**
* Build through the received NUMA nodes information
* @param vds
* @param xmlRpcStruct
*/
private static void updateNumaNodesData(VDS vds, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.AUTO_NUMA)) {
vds.getDynamicData().setAutoNumaBalancing(AutoNumaBalanceStatus.forValue(
assignIntValue(xmlRpcStruct, VdsProperties.AUTO_NUMA)));
}
if (xmlRpcStruct.containsKey(VdsProperties.NUMA_NODES)) {
Map<String, Map<String, Object>> numaNodeMap =
(Map<String, Map<String, Object>>) xmlRpcStruct.get(VdsProperties.NUMA_NODES);
Map<String, Object> numaNodeDistanceMap =
(Map<String, Object>) xmlRpcStruct.get(VdsProperties.NUMA_NODE_DISTANCE);
List<VdsNumaNode> newNumaNodeList = new ArrayList<>(numaNodeMap.size());
for (Map.Entry<String, Map<String, Object>> item : numaNodeMap.entrySet()) {
int index = Integer.valueOf(item.getKey());
Map<String, Object> itemMap = item.getValue();
List<Integer> cpuIds = extractIntegerList(itemMap, VdsProperties.NUMA_NODE_CPU_LIST);
long memTotal = assignLongValue(itemMap, VdsProperties.NUMA_NODE_TOTAL_MEM);
VdsNumaNode numaNode = new VdsNumaNode();
numaNode.setIndex(index);
if (cpuIds != null) {
numaNode.setCpuIds(cpuIds);
}
numaNode.setMemTotal(memTotal);
newNumaNodeList.add(numaNode);
}
Collections.sort(newNumaNodeList, numaNodeComparator);
for (VdsNumaNode vdsNumaNode : newNumaNodeList) {
int index = vdsNumaNode.getIndex();
List<Integer> distances = extractIntegerList(numaNodeDistanceMap, String.valueOf(index));
Map<Integer, Integer> distanceMap = new HashMap<>(distances.size());
for (int i = 0; i < distances.size(); i++) {
distanceMap.put(newNumaNodeList.get(i).getIndex(), distances.get(i));
}
VdsNumaNode newNumaNode = NumaUtils.getVdsNumaNodeByIndex(newNumaNodeList, index);
if (newNumaNode != null) {
newNumaNode.setNumaNodeDistances(distanceMap);
}
}
vds.getDynamicData().setNumaNodeList(newNumaNodeList);
vds.setNumaSupport(newNumaNodeList.size() > 1);
}
}
/**
* Build through the received vm NUMA nodes runtime information
* @param vm
* @param xmlRpcStruct
*/
private static void updateVmNumaNodesRuntimeInfo(VmStatistics vm, Map<String, Object> xmlRpcStruct) {
Map<String, Object[]> vNodesRunInfo = (Map<String, Object[]>)xmlRpcStruct.get(
VdsProperties.VM_NUMA_NODES_RUNTIME_INFO);
for (Map.Entry<String, Object[]> item : vNodesRunInfo.entrySet()) {
VmNumaNode vNode = new VmNumaNode();
vNode.setIndex(Integer.valueOf(item.getKey()));
for (Object pNodeIndex : item.getValue()) {
vNode.getVdsNumaNodeList().add(new Pair<>(
Guid.Empty, new Pair<>(false, (Integer)pNodeIndex)));
}
vm.getvNumaNodeStatisticsList().add(vNode);
}
}
private static List<String> extracStringtList(Map<String, Object> xmlRpcStruct, String propertyName) {
if (!xmlRpcStruct.containsKey(propertyName)){
return null;
}
Object[] items = (Object[]) xmlRpcStruct.get(propertyName);
if (items.length == 0) {
return null;
}
List<String> list = new ArrayList<String>();
for (Object item : items) {
list.add((String) item);
}
return list;
}
private static List<Integer> extractIntegerList(Map<String, Object> xmlRpcStruct, String propertyName) {
if (!xmlRpcStruct.containsKey(propertyName)){
return null;
}
Object[] items = (Object[]) xmlRpcStruct.get(propertyName);
if (items.length == 0) {
return null;
}
List<Integer> list = new ArrayList<Integer>();
for (Object item : items) {
list.add((Integer) item);
}
return list;
}
/**
* Parse Host Device Information in the form of
*
* {
* 'computer': {
* 'params': {'capability': 'system', 'product': 'ProLiant DL160 G6 '}
* },
* 'pci_0000_00_1d_2': {
* 'params': {
* 'capability': 'pci',
* 'iommu_group': '9',
* 'parent': 'computer',
* 'product': '82801JI (ICH10 Family) USB UHCI Controller #3',
* 'product_id': '0x3a36',
* 'vendor': 'Intel Corporation',
* 'vendor_id': '0x8086'
* }
* },
* 'pci_0000_00_1d_1': {
* ...
* }
* }
*/
public static List<HostDevice> buildHostDevices(Map<String, Map<String, Map<String, Object>>> deviceList) {
List<HostDevice> devices = new ArrayList<>();
for (Entry<String, Map<String, Map<String, Object>>> entry : deviceList.entrySet()) {
Map<String, Object> params = entry.getValue().get(VdsProperties.PARAMS);
String deviceName = entry.getKey();
HostDevice device = new HostDevice();
device.setDeviceName(entry.getKey());
device.setCapability(params.get(VdsProperties.CAPABILITY).toString());
// special case for root device "computer"
if (VdsProperties.ROOT_HOST_DEVICE.equals(deviceName)) {
device.setParentDeviceName(VdsProperties.ROOT_HOST_DEVICE); // set parent to self, for DB integrity
} else {
device.setParentDeviceName(params.get(VdsProperties.PARENT_NAME).toString());
}
if (params.containsKey(VdsProperties.IOMMU_GROUP)) {
device.setIommuGroup(Integer.parseInt(params.get(VdsProperties.IOMMU_GROUP).toString()));
}
if (params.containsKey(VdsProperties.PRODUCT_ID)) {
device.setProductId(params.get(VdsProperties.PRODUCT_ID).toString());
}
if (params.containsKey(VdsProperties.PRODUCT_NAME)) {
device.setProductName(params.get(VdsProperties.PRODUCT_NAME).toString());
}
if (params.containsKey(VdsProperties.VENDOR_NAME)) {
device.setVendorName(params.get(VdsProperties.VENDOR_NAME).toString());
}
if (params.containsKey(VdsProperties.VENDOR_ID)) {
device.setVendorId(params.get(VdsProperties.VENDOR_ID).toString());
}
if (params.containsKey(VdsProperties.PHYSICAL_FUNCTION)) {
device.setParentPhysicalFunction(params.get(VdsProperties.PHYSICAL_FUNCTION).toString());
}
if (params.containsKey(VdsProperties.TOTAL_VFS)) {
device.setTotalVirtualFunctions(Integer.parseInt(params.get(VdsProperties.TOTAL_VFS).toString()));
}
if (params.containsKey(VdsProperties.NET_INTERFACE_NAME)) {
device.setNetworkInterfaceName(params.get(VdsProperties.NET_INTERFACE_NAME).toString());
}
devices.add(device);
}
return devices;
}
private static void updateV2VJobs(VDS vds, Map<String, Object> xmlRpcStruct) {
if (!xmlRpcStruct.containsKey(VdsProperties.v2vJobs)) {
return;
}
List<V2VJobInfo> v2vJobs = new ArrayList<>();
for (Entry<String, Object> job : ((Map<String, Object>) xmlRpcStruct.get(VdsProperties.v2vJobs)).entrySet()) {
v2vJobs.add(buildV2VJobData(job.getKey(), (Map<String, Object>) job.getValue()));
}
vds.getStatisticsData().setV2VJobs(v2vJobs);
}
private static V2VJobInfo buildV2VJobData(String jobId, Map<String, Object> xmlRpcStruct) {
V2VJobInfo job = new V2VJobInfo();
job.setId(Guid.createGuidFromString(jobId));
job.setStatus(getV2VJobStatusValue(xmlRpcStruct));
job.setDescription(assignStringValue(xmlRpcStruct, VdsProperties.v2vDescription));
job.setProgress(assignIntValue(xmlRpcStruct, VdsProperties.v2vProgress));
return job;
}
private static V2VJobInfo.JobStatus getV2VJobStatusValue(Map<String, Object> input) {
String status = (String) input.get(VdsProperties.v2vJobStatus);
try {
return V2VJobInfo.JobStatus.valueOf(status.toUpperCase());
} catch (Exception e) {
log.warn("Got invalid status for virt-v2v job: {}", status);
return V2VJobInfo.JobStatus.UNKNOWN;
}
}
public static Double removeNotifyTimeFromVmStatusEvent(Map<String, Object> xmlRpcStruct) {
Object notifyTime = xmlRpcStruct.remove(VdsProperties.notify_time);
if (Long.class.isInstance(notifyTime)) {
return ((Long) notifyTime).doubleValue();
}
return null;
}
} |
package simplejavatexteditor;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.util.ArrayList;
import java.util.Collections;
import javax.swing.AbstractAction;
import javax.swing.ActionMap;
import javax.swing.InputMap;
import javax.swing.JTextArea;
import javax.swing.KeyStroke;
import javax.swing.SwingUtilities;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.text.BadLocationException;
/**
* <h1>Auto complete functionality for java keywords, brackets and
* parentheses</h1>
*
* <p>
* An ArrayList is created for the keywords and the brackets. If the word
* currently being typed matches a word in the list, a Runnable inner class is
* implemented to handle the word completion.
*
* Two other inner classes are also used. The second one handles when the enter
* key is pressed in response to an auto complete suggestion. The third one
* performs additional logic on brackets.
* </p>
*
*
* @author Patrick Slagle
* @since 2016-12-03
*/
public class JavaAutoComplete
implements DocumentListener {
private static String[] keywords = {"abstract", "assert", "boolean",
"break", "byte", "case", "catch", "char", "class", "const",
"continue", "default", "do", "double", "else", "extends", "false",
"final", "finally", "float", "for", "goto", "if", "implements",
"import", "instanceof", "int", "System", "out", "print()", "println()",
"new", "null", "package", "private", "protected", "public", "interface",
"long", "native", "return", "short", "static", "strictfp", "super", "switch",
"synchronized", "this", "throw", "throws", "transient", "true",
"try", "void", "volatile", "while", "String"};
private static String[] bracketChars = {"{", "("};
private static String[] bCompletions = {"}", ")"};
private ArrayList<String> words = new ArrayList<>();
private ArrayList<String> brackets = new ArrayList<>();
private ArrayList<String> bracketCompletions = new ArrayList<>();
//Keep track of when code completion
//has been activated
private enum Mode {
INSERT, COMPLETION
};
private final UI ui;
private Mode mode = Mode.INSERT;
private final JTextArea textArea;
private static final String COMMIT_ACTION = "commit";
private boolean isKeyword;
private int pos;
private String content;
public JavaAutoComplete(UI ui) {
//Access the editor
this.ui = ui;
textArea = ui.getEditor();
//Set the handler for the enter key
InputMap im = textArea.getInputMap();
ActionMap am = textArea.getActionMap();
im.put(KeyStroke.getKeyStroke("ENTER "), COMMIT_ACTION);
am.put(COMMIT_ACTION, new CommitAction());
//Set up the keywords
for (String keyList : keywords) {
words.add(keyList);
}
for (String bracket : bracketChars) {
brackets.add(bracket);
}
for (String comp : bCompletions) {
bracketCompletions.add(comp);
}
Collections.sort(words, null);
}
@Override
public void insertUpdate(DocumentEvent e) {
pos = e.getOffset();
content = null;
try {
content = textArea.getText(0, pos + 1);
} catch (BadLocationException ex) {
ex.printStackTrace();
}
if (e.getLength() != 1) {
return;
}
//Before checking for a keyword
checkForBracket();
//Get the beginning of the word being typed
int start;
for (start = pos; start >= 0; start
if (!Character.isLetter(content.charAt(start))) {
break;
}
}
//Auto complete will start
//after two characters are typed
if (pos - start < 2) {
return;
}
//Search for a match on the word being typed
//in the keywords ArrayList
String prefix = content.substring(start + 1);
int n = Collections.binarySearch(words, prefix);
if (n < 0 && -n < words.size()) {
String match = words.get(-n - 1);
if (match.startsWith(prefix)) {
String completion = match.substring(pos - start);
isKeyword = true;
SwingUtilities.invokeLater(
new CompletionTask(completion, pos + 1));
} else {
mode = Mode.INSERT;
}
}
}
/**
* Performs a check to see if the last
* key typed was one of the supported
* bracket characters
*/
private void checkForBracket() {
//String of the last typed character
char c = content.charAt(pos);
String s = String.valueOf(c);
for (int i = 0; i < brackets.size(); i++) {
if (brackets.get(i).equals(s)) {
isKeyword = false;
SwingUtilities.invokeLater(
new CompletionTask(bracketCompletions.get(i), pos + 1));
}
}
}
/**
* So that future classes can view the keyword list in the future.
*
* @return the keywords
*/
private ArrayList<String> getKeywords() {
return words;
}
/**
* So that these keywords can be modified or added to in the future.
*
* @param keyword the keyword to set
*/
private void setKeywords(String keyword) {
words.add(keyword);
}
/**
* Handles the auto complete suggestion
* generated when the user is typing a
* word that matches a keyword.
*/
private class CompletionTask
implements Runnable {
private final String completion;
private final int position;
public CompletionTask(String completion, int position) {
this.completion = completion;
this.position = position;
}
@Override
public void run() {
textArea.insert(completion, position);
textArea.setCaretPosition(position + completion.length());
textArea.moveCaretPosition(position);
mode = Mode.COMPLETION;
if (!isKeyword) {
textArea.addKeyListener(new HandleBracketEvent());
}
}
}
/**
* Enter key is pressed in response to an auto complete suggestion. Respond
* appropriately.
*/
private class CommitAction
extends AbstractAction {
@Override
public void actionPerformed(ActionEvent e) {
if (mode == Mode.COMPLETION) {
int pos = textArea.getSelectionEnd();
if (isKeyword) {
textArea.insert(" ", pos);
textArea.setCaretPosition(pos + 1);
mode = Mode.INSERT;
} else {
mode = Mode.INSERT;
}
} else {
textArea.replaceSelection("\n");
}
}
}
/**
* Additional logic for bracket auto complete
*/
private class HandleBracketEvent
implements KeyListener {
@Override
public void keyTyped(KeyEvent e) {
//Bracket auto complete needs special attention.
//Multiple possible responses are needed.
String keyEvent = String.valueOf(e.getKeyChar());
for (String bracketCompletion : bracketCompletions) {
if (keyEvent.equals(bracketCompletion)) {
textArea.replaceRange("", pos, pos + 1);
mode = Mode.INSERT;
textArea.removeKeyListener(this);
}
}
int currentPosition = textArea.getCaretPosition();
switch (e.getKeyChar()) {
case '\n':
textArea.insert("\n\n", currentPosition);
textArea.setCaretPosition(currentPosition + 1);
mode = Mode.INSERT;
textArea.removeKeyListener(this);
break;
default:
textArea.setCaretPosition(pos);
mode = Mode.INSERT;
textArea.removeKeyListener(this);
break;
}
}
@Override
public void keyPressed(KeyEvent e) {
}
@Override
public void keyReleased(KeyEvent e) {
}
}
@Override
public void removeUpdate(DocumentEvent e) {
}
@Override
public void changedUpdate(DocumentEvent e) {
}
} |
package org.openhealthtools.mdht.uml.cda.dita;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.eclipse.compare.rangedifferencer.IRangeComparator;
import org.eclipse.compare.rangedifferencer.RangeDifference;
import org.eclipse.compare.rangedifferencer.RangeDifferencer;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Platform;
import org.eclipse.core.runtime.Status;
import org.eclipse.emf.common.util.TreeIterator;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.eclipse.uml2.uml.Association;
import org.eclipse.uml2.uml.Class;
import org.eclipse.uml2.uml.Classifier;
import org.eclipse.uml2.uml.Comment;
import org.eclipse.uml2.uml.Constraint;
import org.eclipse.uml2.uml.DirectedRelationship;
import org.eclipse.uml2.uml.Element;
import org.eclipse.uml2.uml.Generalization;
import org.eclipse.uml2.uml.NamedElement;
import org.eclipse.uml2.uml.Package;
import org.eclipse.uml2.uml.Property;
import org.eclipse.uml2.uml.Substitution;
import org.eclipse.uml2.uml.UMLPackage;
import org.eclipse.uml2.uml.util.UMLSwitch;
import org.openhealthtools.mdht.uml.cda.core.util.CDAModelUtil;
import org.openhealthtools.mdht.uml.cda.core.util.InstanceGenerator;
import org.openhealthtools.mdht.uml.cda.core.util.RIMModelUtil;
import org.openhealthtools.mdht.uml.cda.dita.internal.Logger;
import org.openhealthtools.mdht.uml.common.util.NamedElementComparator;
import org.openhealthtools.mdht.uml.common.util.UMLUtil;
public class TransformClassContent extends TransformAbstract {
private InstanceGenerator instanceGenerator;
private TableGenerator tableGenerator;
public TransformClassContent(DitaTransformerOptions options) {
super(options);
if (Platform.getBundle("org.openhealthtools.mdht.uml.cda") != null) {
instanceGenerator = new InstanceGenerator();
tableGenerator = new TableGenerator();
}
}
private void appendAggregateRules(PrintWriter writer, Class umlClass) {
Package xrefSource = UMLUtil.getTopPackage(umlClass);
List<Classifier> allParents = new ArrayList<Classifier>(umlClass.allParents());
allParents.add(0, umlClass);
List<Property> allProperties = new ArrayList<Property>();
List<Property> allAssociations = new ArrayList<Property>();
List<Property> allAttributes = new ArrayList<Property>();
List<Constraint> allConstraints = new ArrayList<Constraint>();
// categorize constraints by constrainedElement name
List<Constraint> unprocessedConstraints = new ArrayList<Constraint>();
// propertyName -> constraints
Map<String, List<Constraint>> constraintMap = new HashMap<String, List<Constraint>>();
// constraint -> sub-constraints
Map<Constraint, List<Constraint>> subConstraintMap = new HashMap<Constraint, List<Constraint>>();
for (Constraint constraint : umlClass.getOwnedRules()) {
unprocessedConstraints.add(constraint);
for (Element element : constraint.getConstrainedElements()) {
if (element instanceof Property) {
String name = ((Property) element).getName();
List<Constraint> rules = constraintMap.get(name);
if (rules == null) {
rules = new ArrayList<Constraint>();
constraintMap.put(name, rules);
}
rules.add(constraint);
} else if (element instanceof Constraint) {
Constraint subConstraint = (Constraint) element;
List<Constraint> rules = subConstraintMap.get(subConstraint);
if (rules == null) {
rules = new ArrayList<Constraint>();
subConstraintMap.put(subConstraint, rules);
}
rules.add(constraint);
}
}
}
// process parents in reverse order, CDA base class first
for (int i = allParents.size() - 1; i >= 0; i
Class parent = (Class) allParents.get(i);
for (Property property : parent.getOwnedAttributes()) {
if (property.getAssociation() != null) {
allAssociations.add(property);
} else {
// if list contains this property name, replace it; else append
int index = findProperty(allProperties, property.getName());
if (index >= 0) {
allProperties.set(index, property);
} else {
allProperties.add(property);
}
}
}
}
Iterator<Property> propertyIterator = allProperties.iterator();
while (propertyIterator.hasNext()) {
Property property = propertyIterator.next();
if (CDAModelUtil.isCDAModel(property) && property.getLower() == 0) {
// include only required CDA class properties
propertyIterator.remove();
}
}
Iterator<Property> associationIterator = allAssociations.iterator();
while (associationIterator.hasNext()) {
Property property = associationIterator.next();
if (CDAModelUtil.isCDAModel(property) && property.getLower() == 0) {
// include only required CDA class properties
associationIterator.remove();
}
}
/*
* Include only associations that are not redefined in a subclass.
* TODO There must be a better way... use UML property redefinition in model.
*/
List<Classifier> endTypes = new ArrayList<Classifier>();
for (Property property : allAssociations) {
endTypes.add((Classifier) property.getType());
}
for (int index = 0; index < allAssociations.size(); index++) {
Classifier classifier = endTypes.get(index);
boolean hasSubclass = false;
List<DirectedRelationship> specializations = classifier.getTargetDirectedRelationships(UMLPackage.Literals.GENERALIZATION);
for (DirectedRelationship relationship : specializations) {
Classifier specific = ((Generalization) relationship).getSpecific();
if (endTypes.contains(specific)) {
hasSubclass = true;
break;
}
}
if (!hasSubclass) {
allProperties.add(allAssociations.get(index));
}
}
// aggregate constraints
for (int i = allParents.size() - 1; i >= 0; i
Class parent = (Class) allParents.get(i);
if (!CDAModelUtil.isCDAModel(parent)) {
for (Constraint constraint : parent.getOwnedRules()) {
allConstraints.add(constraint);
}
}
}
writer.println("<ol id=\"aggregate\">");
// use i>0 to omit this class
for (int i = allParents.size() - 1; i > 0; i
Class parent = (Class) allParents.get(i);
if (!RIMModelUtil.isRIMModel(parent) && !CDAModelUtil.isCDAModel(parent)) {
String message = CDAModelUtil.computeGeneralizationConformanceMessage(parent, true, xrefSource);
if (message.length() > 0) {
writer.println("<li>" + message + "</li>");
}
}
}
for (Property property : allProperties) {
if (CDAModelUtil.isXMLAttribute(property)) {
allAttributes.add(property);
}
}
allProperties.removeAll(allAttributes);
Collections.sort(allAttributes, new NamedElementComparator());
// XML attributes
for (Property property : allAttributes) {
writer.println("<li>" + CDAModelUtil.computeConformanceMessage(property, true));
appendPropertyRules(writer, property, constraintMap, subConstraintMap, unprocessedConstraints);
appendPropertyComments(writer, property);
writer.println("</li>");
}
// XML elements
for (Property property : allProperties) {
writer.println("<li>" + CDAModelUtil.computeConformanceMessage(property, true));
appendPropertyRules(writer, property, constraintMap, subConstraintMap, unprocessedConstraints);
appendPropertyComments(writer, property);
writer.println("</li>");
}
for (Constraint constraint : allConstraints) {
writer.println("<li>" + CDAModelUtil.computeConformanceMessage(constraint, true)
// + " " + modelPrefix(constraint)
+ "</li>");
}
// <ol> cannot be empty
if (allAttributes.isEmpty() && allProperties.isEmpty() && allConstraints.isEmpty()) {
writer.println("<li></li>");
}
writer.println("</ol>");
}
private void appendBody(PrintWriter writer, Class umlClass) {
writer.println("<body>");
appendClassDocumentation(writer, umlClass);
appendConformanceRules(writer, umlClass);
appendAggregateRules(writer, umlClass);
appendTable(writer, umlClass);
appendExample(writer, umlClass);
appendChanges(writer, umlClass);
writer.println("<p><ph id=\"classformalname\">" + UMLUtil.splitName(umlClass) + "</ph></p>");
Class cdaClass = CDAModelUtil.getCDAClass(umlClass);
String cdaClassName = cdaClass != null
? cdaClass.getName()
: "MISSING_CDA_CLASS";
if (cdaClass != null) {
writer.print("<p id=\"shortdesc\">");
if (!umlClass.equals(cdaClass)) {
writer.print("[" + cdaClassName + ": templateId <tt>" + CDAModelUtil.getTemplateId(umlClass) + "</tt>]");
}
writer.println("</p>");
}
writer.println("</body>");
writer.println("</topic>");
}
private void appendClassDocumentation(PrintWriter writer, Class umlClass) {
writer.println("<section id=\"description\">");
// TODO if blank line, wrap before and after contents in <p>
for (Comment comment : umlClass.getOwnedComments()) {
String body = CDAModelUtil.fixNonXMLCharacters(comment.getBody().trim());
if (body.startsWith("<p>")) {
writer.println(body);
} else {
writer.println("<p>" + body + "</p>");
}
}
writer.println("</section>");
}
private void appendConformanceRules(PrintWriter writer, Class umlClass) {
writer.println("<ol id=\"conformance\">");
boolean hasRules = false;
for (Generalization generalization : umlClass.getGeneralizations()) {
Classifier general = generalization.getGeneral();
if (!RIMModelUtil.isRIMModel(general) && !CDAModelUtil.isCDAModel(general)) {
String message = CDAModelUtil.computeConformanceMessage(generalization, true);
if (message.length() > 0) {
hasRules = true;
writer.println("<li>" + message + "</li>");
}
}
}
// categorize constraints by constrainedElement name
List<Constraint> unprocessedConstraints = new ArrayList<Constraint>();
// propertyName -> constraints
Map<String, List<Constraint>> constraintMap = new HashMap<String, List<Constraint>>();
// constraint -> sub-constraints
Map<Constraint, List<Constraint>> subConstraintMap = new HashMap<Constraint, List<Constraint>>();
for (Constraint constraint : umlClass.getOwnedRules()) {
unprocessedConstraints.add(constraint);
for (Element element : constraint.getConstrainedElements()) {
if (element instanceof Property) {
String name = ((Property) element).getName();
List<Constraint> rules = constraintMap.get(name);
if (rules == null) {
rules = new ArrayList<Constraint>();
constraintMap.put(name, rules);
}
rules.add(constraint);
} else if (element instanceof Constraint) {
Constraint subConstraint = (Constraint) element;
List<Constraint> rules = subConstraintMap.get(subConstraint);
if (rules == null) {
rules = new ArrayList<Constraint>();
subConstraintMap.put(subConstraint, rules);
}
rules.add(constraint);
}
}
}
List<Property> allProperties = new ArrayList<Property>(umlClass.getOwnedAttributes());
List<Property> allAttributes = new ArrayList<Property>();
for (Property property : allProperties) {
if (CDAModelUtil.isXMLAttribute(property)) {
allAttributes.add(property);
}
}
allProperties.removeAll(allAttributes);
Collections.sort(allAttributes, new NamedElementComparator());
// XML attributes
for (Property property : allAttributes) {
hasRules = true;
writer.println("<li>" + CDAModelUtil.computeConformanceMessage(property, true));
appendPropertyComments(writer, property);
appendPropertyRules(writer, property, constraintMap, subConstraintMap, unprocessedConstraints);
writer.println("</li>");
}
// XML elements
for (Property property : allProperties) {
hasRules = true;
writer.println("<li>" + CDAModelUtil.computeConformanceMessage(property, true));
appendPropertyComments(writer, property);
appendPropertyRules(writer, property, constraintMap, subConstraintMap, unprocessedConstraints);
writer.println("</li>");
}
for (Constraint constraint : unprocessedConstraints) {
hasRules = true;
writer.println("<li>" + CDAModelUtil.computeConformanceMessage(constraint, true) + "</li>");
}
if (!hasRules) {
writer.println("<li></li>");
}
writer.println("</ol>");
}
private void appendExample(PrintWriter writer, Class umlClass) {
writer.print("<codeblock id=\"example\"><![CDATA[");
if (instanceGenerator != null) {
EObject eObject = instanceGenerator.createInstance(umlClass, 4);
if (eObject != null) {
instanceGenerator.save(eObject, writer);
}
} else {
writer.print("TODO: XML document snippet");
}
writer.println("]]></codeblock>");
}
private void appendHeader(PrintWriter writer, Class umlClass) {
writer.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
writer.println("<!DOCTYPE topic PUBLIC \"-//OASIS//DTD DITA Topic//EN\" \"topic.dtd\">");
writer.println("<topic id=\"classId\" xml:lang=\"en-us\">");
writer.print("<title>");
writer.print(UMLUtil.splitName(umlClass));
writer.print(" - conformance rules");
writer.println("</title>");
Class cdaClass = CDAModelUtil.getCDAClass(umlClass);
String cdaClassName = cdaClass != null
? cdaClass.getName()
: "MISSING_CDA_CLASS";
if (cdaClass != null) {
writer.print("<shortdesc id=\"shortdesc\">");
if (!umlClass.equals(cdaClass)) {
writer.print("[" + cdaClassName + ": templateId <tt>" + CDAModelUtil.getTemplateId(umlClass) + "</tt>]");
}
writer.println("</shortdesc>");
writer.println("<prolog id=\"prolog\">");
writer.println("<metadata><category>" + cdaClassName + "</category></metadata>");
writer.println("<resourceid id=\"" + CDAModelUtil.getTemplateId(umlClass) + "\"/>");
writer.println("</prolog>");
}
}
private void appendPropertyComments(PrintWriter writer, Property property) {
Association association = property.getAssociation();
if (association != null && association.getOwnedComments().size() > 0) {
writer.append("<ul>");
for (Comment comment : association.getOwnedComments()) {
writer.append("<li>");
writer.append(CDAModelUtil.fixNonXMLCharacters(comment.getBody()));
writer.append("</li>");
}
writer.append("</ul>");
}
if (property.getOwnedComments().size() > 0) {
writer.append("<ul>");
for (Comment comment : property.getOwnedComments()) {
writer.append("<li>");
writer.append(CDAModelUtil.fixNonXMLCharacters(comment.getBody()));
writer.append("</li>");
}
writer.append("</ul>");
}
}
private void appendPropertyRules(PrintWriter writer, Property property,
Map<String, List<Constraint>> constraintMap, Map<Constraint, List<Constraint>> subConstraintMap,
List<Constraint> unprocessedConstraints) {
// association typeCode and property type
String assocConstraints = "";
if (property.getAssociation() != null) {
assocConstraints = CDAModelUtil.computeAssociationConstraints(property, true);
}
StringBuffer ruleConstraints = new StringBuffer();
List<Constraint> rules = constraintMap.get(property.getName());
if (rules != null && !rules.isEmpty()) {
for (Constraint constraint : rules) {
unprocessedConstraints.remove(constraint);
ruleConstraints.append("\n<li>" + CDAModelUtil.computeConformanceMessage(constraint, true));
appendSubConstraintRules(ruleConstraints, constraint, subConstraintMap, unprocessedConstraints);
// List<Constraint> subConstraints = subConstraintMap.get(constraint);
// if (subConstraints != null && subConstraints.size() > 0) {
// ruleConstraints.append("<ol>");
// for (Constraint subConstraint : subConstraints) {
// unprocessedConstraints.remove(subConstraint);
// ruleConstraints.append("\n<li>" + CDAModelUtil.computeConformanceMessage(subConstraint, true) + "</li>");
// ruleConstraints.append("</ol>");
ruleConstraints.append("</li>");
}
}
if (assocConstraints.length() > 0 || ruleConstraints.length() > 0) {
// writer.append(", such that ");
// writer.append(property.upperBound()==1 ? "it" : "each");
writer.append("<ol>");
writer.append(assocConstraints);
writer.append(ruleConstraints);
writer.append("</ol>");
}
}
private void appendSubConstraintRules(StringBuffer ruleConstraints, Constraint constraint,
Map<Constraint, List<Constraint>> subConstraintMap, List<Constraint> unprocessedConstraints) {
List<Constraint> subConstraints = subConstraintMap.get(constraint);
if (subConstraints != null && subConstraints.size() > 0) {
ruleConstraints.append("<ol>");
for (Constraint subConstraint : subConstraints) {
unprocessedConstraints.remove(subConstraint);
ruleConstraints.append("\n<li>" + CDAModelUtil.computeConformanceMessage(subConstraint, true));
appendSubConstraintRules(ruleConstraints, subConstraint, subConstraintMap, unprocessedConstraints);
ruleConstraints.append("</li>");
}
ruleConstraints.append("</ol>");
}
}
@Override
public Object caseClass(Class umlClass) {
String pathFolder = "classes";
IPath filePath = transformerOptions.getOutputPath().append(pathFolder).addTrailingSeparator().append(
"generated").addTrailingSeparator().append("_" + umlClass.getName()).addFileExtension("dita");
File file = filePath.toFile();
PrintWriter writer = null;
try {
file.createNewFile();
writer = new PrintWriter(file);
appendHeader(writer, umlClass);
appendBody(writer, umlClass);
} catch (FileNotFoundException e) {
Logger.logException(e);
} catch (IOException e1) {
Logger.logException(e1);
} finally {
if (writer != null) {
writer.close();
}
}
return umlClass;
}
private int findProperty(List<Property> properties, String name) {
if (name != null) {
for (int i = 0; i < properties.size(); i++) {
if (name.equals(properties.get(i).getName())) {
return i;
}
}
}
return -1;
}
private String modelPrefix(NamedElement element) {
StringBuffer prefix = new StringBuffer();
String modelPrefix = CDAModelUtil.getModelPrefix(element);
if (modelPrefix != null && modelPrefix.length() > 0) {
prefix.append("[");
prefix.append(modelPrefix);
prefix.append("] ");
}
return prefix.toString();
}
private void appendTable(PrintWriter writer, Class umlClass) {
if (tableGenerator != null) {
String table = tableGenerator.createTable(umlClass);
writer.println(table);
} else {
writer.print("TODO: Table Representation");
}
}
private void appendChanges(PrintWriter writer, Class umlClass) {
writer.println("<section id=\"changes\">");
;
for (Substitution substitution : umlClass.getSubstitutions()) {
for (NamedElement ne : substitution.getSuppliers()) {
if (ne instanceof Class) {
appendChangeLog(writer, umlClass, (Class) ne);
}
}
}
writer.println("</section>");
}
private static void composeAllConformanceMessages(Class element, final PrintWriter writer, final boolean markup) {
// for (Generalization generalization : element.getGeneralizations()) {
// if (generalization.getGeneral() instanceof Class) {
// composeAllConformanceMessages((Class) generalization.getGeneral(), writer, false);
final TreeIterator<EObject> iterator = EcoreUtil.getAllContents(Collections.singletonList(element));
while (iterator != null && iterator.hasNext()) {
EObject child = iterator.next();
UMLSwitch<Object> umlSwitch = new UMLSwitch<Object>() {
@Override
public Object caseAssociation(Association association) {
iterator.prune();
return association;
}
@Override
public Object caseClass(Class umlClass) {
String message = CDAModelUtil.computeConformanceMessage(umlClass, markup);
writer.println(message);
return umlClass;
}
@Override
public Object caseGeneralization(Generalization generalization) {
String message = CDAModelUtil.computeConformanceMessage(generalization, markup);
if (message.length() > 0) {
writer.println(message);
}
return generalization;
}
@Override
public Object caseProperty(Property property) {
String message = CDAModelUtil.computeConformanceMessage(property, markup);
if (message.length() > 0) {
writer.println(message);
}
return property;
}
@Override
public Object caseConstraint(Constraint constraint) {
String message = CDAModelUtil.computeConformanceMessage(constraint, markup);
if (message.length() > 0) {
writer.println(message);
}
return constraint;
}
};
umlSwitch.doSwitch(child);
}
}
class LineComparator implements IRangeComparator {
private String[] fLines;
public LineComparator(InputStream is) throws IOException {
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line;
ArrayList ar = new ArrayList();
while ((line = br.readLine()) != null) {
ar.add(line);
}
// It is the responsibility of the caller to close the stream
fLines = (String[]) ar.toArray(new String[ar.size()]);
}
String getLine(int ix) {
return fLines[ix];
}
/*
* (non-Javadoc)
*
* @see org.eclipse.compare.rangedifferencer.IRangeComparator#getRangeCount()
*/
public int getRangeCount() {
return fLines.length;
}
/*
* (non-Javadoc)
*
* @see org.eclipse.compare.rangedifferencer.IRangeComparator#rangesEqual(int, org.eclipse.compare.rangedifferencer.IRangeComparator, int)
*/
public boolean rangesEqual(int thisIndex, IRangeComparator other, int otherIndex) {
String s1 = fLines[thisIndex];
String s2 = ((LineComparator) other).fLines[otherIndex];
return s1.equals(s2);
}
/*
* (non-Javadoc)
*
* @see org.eclipse.compare.rangedifferencer.IRangeComparator#skipRangeComparison(int, int,
* org.eclipse.compare.rangedifferencer.IRangeComparator)
*/
public boolean skipRangeComparison(int length, int maxLength, IRangeComparator other) {
return false;
}
}
public IStatus appendChanges(PrintWriter writer, InputStream target, InputStream other) {
IProgressMonitor monitor = null;
LineComparator t, o;
try {
t = new LineComparator(target);
o = new LineComparator(other);
} catch (UnsupportedEncodingException e) {
return null;
// return new Status(IStatus.ERROR, CompareUI.PLUGIN_ID, 1, MergeMessages.TextAutoMerge_inputEncodingError, e);
} catch (IOException e) {
return null;
// return new Status(IStatus.ERROR, CompareUI.PLUGIN_ID, 1, e.getMessage(), e);
}
// try {
String lineSeparator = System.getProperty("line.separator"); //$NON-NLS-1$
if (lineSeparator == null) {
lineSeparator = "\n"; //$NON-NLS-1$
}
RangeDifference[] diffs = RangeDifferencer.findRanges(monitor, t, o);
writer.append("<li>");
writer.println("<b>Modifications</b>");
writer.append("</li>");
for (int i = 0; i < diffs.length; i++) {
RangeDifference rd = diffs[i];
switch (rd.kind()) {
case RangeDifference.RIGHT:
for (int j = rd.rightStart(); j < rd.rightEnd(); j++) {
String s = o.getLine(j);
writer.append("<li>");
writer.println(s);
writer.append("</li>");
}
break;
default:
break;
}
}
writer.append("<li>");
writer.println("<b>Additions</b>");
writer.append("</li>");
for (int i = 0; i < diffs.length; i++) {
RangeDifference rd = diffs[i];
switch (rd.kind()) {
case RangeDifference.LEFT:
for (int j = rd.leftStart(); j < rd.leftEnd(); j++) {
String s = t.getLine(j);
writer.append("<li>");
writer.println(s);
writer.append("</li>");
}
break;
default:
break;
}
}
return Status.OK_STATUS;
}
void appendChangeLog(PrintWriter writer, Class source, Class substitute) {
writer.println("<p>");
// CDAModelUtil.get
writer.println("Change Log from " + CDAModelUtil.getModelPrefix(substitute) + "::" + substitute.getName());
writer.println("</p>");
writer.println("<p id=\"" + substitute.getName() + "\" >");
writer.append("<ul>");
StringWriter leftsw = new StringWriter();
PrintWriter leftpw = new PrintWriter(leftsw);
composeAllConformanceMessages(source, leftpw, false);
StringWriter rightsw = new StringWriter();
PrintWriter rightpw = new PrintWriter(rightsw);
composeAllConformanceMessages(substitute, rightpw, false);
InputStream sourceStream = new ByteArrayInputStream(leftsw.getBuffer().toString().getBytes());
InputStream substitueStream = new ByteArrayInputStream(rightsw.getBuffer().toString().getBytes());
appendChanges(writer, sourceStream, substitueStream);
writer.append("</ul>");
writer.println("</p>");
}
} |
package org.openhealthtools.mdht.uml.cda.dita;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.eclipse.compare.rangedifferencer.IRangeComparator;
import org.eclipse.compare.rangedifferencer.RangeDifference;
import org.eclipse.compare.rangedifferencer.RangeDifferencer;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Platform;
import org.eclipse.core.runtime.Status;
import org.eclipse.emf.common.util.TreeIterator;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.eclipse.uml2.uml.Association;
import org.eclipse.uml2.uml.Class;
import org.eclipse.uml2.uml.Classifier;
import org.eclipse.uml2.uml.Comment;
import org.eclipse.uml2.uml.Constraint;
import org.eclipse.uml2.uml.DirectedRelationship;
import org.eclipse.uml2.uml.Element;
import org.eclipse.uml2.uml.Generalization;
import org.eclipse.uml2.uml.NamedElement;
import org.eclipse.uml2.uml.Package;
import org.eclipse.uml2.uml.Property;
import org.eclipse.uml2.uml.Substitution;
import org.eclipse.uml2.uml.UMLPackage;
import org.eclipse.uml2.uml.util.UMLSwitch;
import org.openhealthtools.mdht.uml.cda.core.profile.LogicalConstraint;
import org.openhealthtools.mdht.uml.cda.core.util.CDAModelUtil;
import org.openhealthtools.mdht.uml.cda.core.util.CDAProfileUtil;
import org.openhealthtools.mdht.uml.cda.core.util.InstanceGenerator;
import org.openhealthtools.mdht.uml.cda.core.util.RIMModelUtil;
import org.openhealthtools.mdht.uml.cda.dita.internal.Logger;
import org.openhealthtools.mdht.uml.common.util.NamedElementComparator;
import org.openhealthtools.mdht.uml.common.util.UMLUtil;
public class TransformClassContent extends TransformAbstract {
private InstanceGenerator instanceGenerator;
private TableGenerator tableGenerator;
public TransformClassContent(DitaTransformerOptions options) {
super(options);
if (Platform.getBundle("org.openhealthtools.mdht.uml.cda") != null) {
instanceGenerator = new InstanceGenerator();
tableGenerator = new TableGenerator();
}
}
private void appendAggregateRules(PrintWriter writer, Class umlClass) {
Package xrefSource = UMLUtil.getTopPackage(umlClass);
List<Classifier> allParents = new ArrayList<Classifier>(umlClass.allParents());
allParents.add(0, umlClass);
List<Property> allProperties = new ArrayList<Property>();
List<Property> allAssociations = new ArrayList<Property>();
List<Property> allAttributes = new ArrayList<Property>();
List<Constraint> allConstraints = new ArrayList<Constraint>();
// categorize constraints by constrainedElement name
List<Constraint> unprocessedConstraints = new ArrayList<Constraint>();
// propertyName -> constraints
Map<String, List<Constraint>> constraintMap = new HashMap<String, List<Constraint>>();
// constraint -> sub-constraints
Map<Constraint, List<Constraint>> subConstraintMap = new HashMap<Constraint, List<Constraint>>();
for (Constraint constraint : umlClass.getOwnedRules()) {
unprocessedConstraints.add(constraint);
for (Element element : constraint.getConstrainedElements()) {
if (element instanceof Property) {
String name = ((Property) element).getName();
List<Constraint> rules = constraintMap.get(name);
if (rules == null) {
rules = new ArrayList<Constraint>();
constraintMap.put(name, rules);
}
rules.add(constraint);
} else if (element instanceof Constraint) {
Constraint subConstraint = (Constraint) element;
List<Constraint> rules = subConstraintMap.get(subConstraint);
if (rules == null) {
rules = new ArrayList<Constraint>();
subConstraintMap.put(subConstraint, rules);
}
rules.add(constraint);
}
}
}
// process parents in reverse order, CDA base class first
for (int i = allParents.size() - 1; i >= 0; i
Class parent = (Class) allParents.get(i);
for (Property property : parent.getOwnedAttributes()) {
if (property.getAssociation() != null) {
allAssociations.add(property);
} else {
// if list contains this property name, replace it; else append
int index = findProperty(allProperties, property.getName());
if (index >= 0) {
allProperties.set(index, property);
} else {
allProperties.add(property);
}
}
}
}
Iterator<Property> propertyIterator = allProperties.iterator();
while (propertyIterator.hasNext()) {
Property property = propertyIterator.next();
if (CDAModelUtil.isCDAModel(property) && property.getLower() == 0) {
// include only required CDA class properties
propertyIterator.remove();
}
}
Iterator<Property> associationIterator = allAssociations.iterator();
while (associationIterator.hasNext()) {
Property property = associationIterator.next();
if (CDAModelUtil.isCDAModel(property) && property.getLower() == 0) {
// include only required CDA class properties
associationIterator.remove();
}
}
/*
* Include only associations that are not redefined in a subclass.
* TODO There must be a better way... use UML property redefinition in model.
*/
List<Classifier> endTypes = new ArrayList<Classifier>();
for (Property property : allAssociations) {
endTypes.add((Classifier) property.getType());
}
for (int index = 0; index < allAssociations.size(); index++) {
Classifier classifier = endTypes.get(index);
boolean hasSubclass = false;
List<DirectedRelationship> specializations = classifier.getTargetDirectedRelationships(UMLPackage.Literals.GENERALIZATION);
for (DirectedRelationship relationship : specializations) {
Classifier specific = ((Generalization) relationship).getSpecific();
if (endTypes.contains(specific)) {
hasSubclass = true;
break;
}
}
if (!hasSubclass) {
allProperties.add(allAssociations.get(index));
}
}
// aggregate constraints
for (int i = allParents.size() - 1; i >= 0; i
Class parent = (Class) allParents.get(i);
if (!CDAModelUtil.isCDAModel(parent)) {
for (Constraint constraint : parent.getOwnedRules()) {
allConstraints.add(constraint);
}
}
}
writer.println("<ol id=\"aggregate\">");
// use i>0 to omit this class
for (int i = allParents.size() - 1; i > 0; i
Class parent = (Class) allParents.get(i);
if (!RIMModelUtil.isRIMModel(parent) && !CDAModelUtil.isCDAModel(parent)) {
String message = CDAModelUtil.computeGeneralizationConformanceMessage(parent, true, xrefSource);
if (message.length() > 0) {
writer.println("<li>" + message + "</li>");
}
}
}
for (Property property : allProperties) {
if (CDAModelUtil.isXMLAttribute(property)) {
allAttributes.add(property);
}
}
allProperties.removeAll(allAttributes);
Collections.sort(allAttributes, new NamedElementComparator());
// XML attributes
for (Property property : allAttributes) {
writer.println("<li>" + CDAModelUtil.computeConformanceMessage(property, true));
appendPropertyRules(writer, property, constraintMap, subConstraintMap, unprocessedConstraints);
appendPropertyComments(writer, property);
writer.println("</li>");
}
// XML elements
for (Property property : allProperties) {
writer.println("<li>" + CDAModelUtil.computeConformanceMessage(property, true));
appendPropertyRules(writer, property, constraintMap, subConstraintMap, unprocessedConstraints);
if (!(property.getType().getOwner() instanceof Class)) {
// comments are output preceding inline classes
appendPropertyComments(writer, property);
}
writer.println("</li>");
}
for (Constraint constraint : allConstraints) {
writer.println("<li>" + CDAModelUtil.computeConformanceMessage(constraint, true)
// + " " + modelPrefix(constraint)
+ "</li>");
}
// <ol> cannot be empty
if (allAttributes.isEmpty() && allProperties.isEmpty() && allConstraints.isEmpty()) {
writer.println("<li></li>");
}
writer.println("</ol>");
}
private void appendBody(PrintWriter writer, Class umlClass) {
writer.println("<body>");
appendKnownSubclasses(writer, umlClass);
appendClassDocumentation(writer, umlClass);
appendConformanceRules(writer, umlClass);
appendAggregateRules(writer, umlClass);
// if (transformerOptions.isIncludeTableView()) {
appendTable(writer, umlClass);
appendExample(writer, umlClass);
appendChanges(writer, umlClass);
writer.println("<p><ph id=\"classformalname\">" + TransformAbstract.getPublicationName(umlClass) + "</ph></p>");
Class cdaClass = CDAModelUtil.getCDAClass(umlClass);
String cdaClassName = cdaClass != null
? cdaClass.getName()
: "MISSING_CDA_CLASS";
if (cdaClass != null) {
writer.print("<p id=\"shortdesc\">");
if (!umlClass.equals(cdaClass)) {
writer.print("[" + cdaClassName + ": templateId <tt>" + CDAModelUtil.getTemplateId(umlClass) + "</tt>]");
}
writer.println("</p>");
}
writer.println("</body>");
writer.println("</topic>");
}
private void appendKnownSubclasses(PrintWriter writer, Class umlClass) {
writer.println("<section id=\"knownSubclasses\">");
List<Classifier> subclasses = UMLUtil.getSpecializations(umlClass);
if (subclasses.size() > 0) {
writer.print("<p>Known Subclasses: ");
for (Iterator<Classifier> iterator = subclasses.iterator(); iterator.hasNext();) {
Classifier subclass = iterator.next();
if (subclass instanceof Class && (!(subclass.getOwner() instanceof Class))) {
Package xrefSource = UMLUtil.getTopPackage(subclass);
String xref = CDAModelUtil.computeXref(xrefSource, subclass);
String format = xref != null && xref.endsWith(".html")
? "format=\"html\" "
: "";
writer.append("<xref " + format + "href=\"" + xref + "\">");
writer.append(TransformAbstract.getPublicationName(subclass));
writer.append("</xref>");
if (iterator.hasNext()) {
writer.print(", ");
}
}
}
writer.println("</p>");
}
writer.println("</section>");
}
private void appendClassDocumentation(PrintWriter writer, Class umlClass) {
writer.println("<section id=\"description\">");
// TODO if blank line, wrap before and after contents in <p>
for (Comment comment : umlClass.getOwnedComments()) {
String body = CDAModelUtil.fixNonXMLCharacters(comment.getBody().trim());
if (body.startsWith("<p>")) {
writer.println(body);
} else {
writer.println("<p><lines><i>" + body + "</i></lines></p>");
}
}
writer.println("</section>");
}
private void appendConformanceRules(PrintWriter writer, Class umlClass) {
writer.println("<ol id=\"conformance\">");
if (CDAModelUtil.getTemplateId(umlClass) != null) {
writer.println("<li>" + CDAModelUtil.computeConformanceMessage(umlClass, true) + "</li>");
}
boolean hasRules = false;
for (Generalization generalization : umlClass.getGeneralizations()) {
Classifier general = generalization.getGeneral();
if (!RIMModelUtil.isRIMModel(general) && !CDAModelUtil.isCDAModel(general)) {
String message = CDAModelUtil.computeConformanceMessage(generalization, true);
if (message.length() > 0) {
hasRules = true;
writer.println("<li>" + message + "</li>");
}
}
}
// categorize constraints by constrainedElement name
List<Constraint> unprocessedConstraints = new ArrayList<Constraint>();
// propertyName -> constraints
Map<String, List<Constraint>> constraintMap = new HashMap<String, List<Constraint>>();
// constraint -> sub-constraints
Map<Constraint, List<Constraint>> subConstraintMap = new HashMap<Constraint, List<Constraint>>();
for (Constraint constraint : umlClass.getOwnedRules()) {
unprocessedConstraints.add(constraint);
for (Element element : constraint.getConstrainedElements()) {
if (element instanceof Property) {
String name = ((Property) element).getName();
List<Constraint> rules = constraintMap.get(name);
if (rules == null) {
rules = new ArrayList<Constraint>();
constraintMap.put(name, rules);
}
rules.add(constraint);
} else if (element instanceof Constraint) {
Constraint subConstraint = (Constraint) element;
List<Constraint> rules = subConstraintMap.get(subConstraint);
if (rules == null) {
rules = new ArrayList<Constraint>();
subConstraintMap.put(subConstraint, rules);
}
rules.add(constraint);
}
}
}
List<Property> allProperties = new ArrayList<Property>(umlClass.getOwnedAttributes());
List<Property> allAttributes = new ArrayList<Property>();
for (Property property : allProperties) {
if (CDAModelUtil.isXMLAttribute(property)) {
allAttributes.add(property);
}
// Check to see if the property is part of a logical constraint - if so do not create process as a property
}
for (Constraint constraint : umlClass.getOwnedRules()) {
LogicalConstraint logicConstraint = CDAProfileUtil.getLogicalConstraint(constraint);
if (logicConstraint != null) {
for (Element constrainedElement : constraint.getConstrainedElements()) {
if (constrainedElement instanceof Property) {
allProperties.remove(constrainedElement);
}
}
}
}
allProperties.removeAll(allAttributes);
Collections.sort(allAttributes, new NamedElementComparator());
// XML attributes
for (Property property : allAttributes) {
hasRules = true;
writer.println("<li>" + CDAModelUtil.computeConformanceMessage(property, true));
appendPropertyComments(writer, property);
appendPropertyRules(writer, property, constraintMap, subConstraintMap, unprocessedConstraints);
writer.println("</li>");
}
// XML elements
for (Property property : allProperties) {
hasRules = true;
writer.println("<li>" + CDAModelUtil.computeConformanceMessage(property, true));
if (!(property.getType().getOwner() instanceof Class)) {
// comments are output preceding inline classes
appendPropertyComments(writer, property);
}
appendPropertyRules(writer, property, constraintMap, subConstraintMap, unprocessedConstraints);
writer.println("</li>");
}
for (Constraint constraint : unprocessedConstraints) {
hasRules = true;
writer.println("<li>" + CDAModelUtil.computeConformanceMessage(constraint, true) + "</li>");
}
if (!hasRules) {
writer.println("<li></li>");
}
writer.println("</ol>");
}
private void appendExample(PrintWriter writer, Class umlClass) {
writer.print("<codeblock id=\"example\" outputclass=\"language-xml\"><![CDATA[");
if (instanceGenerator != null) {
int exampleDepth = transformerOptions.getExampleDepth();
transformerOptions.isIncludeTableView();
EObject eObject = instanceGenerator.createInstance(umlClass, exampleDepth > 0
? exampleDepth
: 2);
if (eObject != null) {
instanceGenerator.save(eObject, writer);
} else {
Logger.log(Logger.ERROR, "Error: Missing Runtime Class for UML Class " + umlClass.getQualifiedName());
writer.print("Error: Missing Runtime Class");
}
} else {
writer.print("TODO: XML document snippet");
}
writer.println("]]></codeblock>");
}
private void appendHeader(PrintWriter writer, Class umlClass) {
writer.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
writer.println("<!DOCTYPE topic PUBLIC \"-//OASIS//DTD DITA Topic//EN\" \"topic.dtd\">");
writer.println("<topic id=\"classId\" xml:lang=\"en-us\">");
writer.print("<title>");
writer.print(TransformAbstract.getPublicationName(umlClass));
writer.print(" - conformance rules");
writer.println("</title>");
Class cdaClass = CDAModelUtil.getCDAClass(umlClass);
String prefix = getExtensionName(cdaClass);
String cdaClassName = cdaClass != null
? cdaClass.getName()
: "MISSING_CDA_CLASS";
writer.print("<shortdesc id=\"shortdesc\">");
if (umlClass.isAbstract()) {
writer.print("<i>Abstract</i> ");
}
if (cdaClass != null && !umlClass.equals(cdaClass)) {
writer.print("[" + prefix + cdaClassName + ": templateId <tt>" + CDAModelUtil.getTemplateId(umlClass) +
"</tt>]");
}
writer.println("</shortdesc>");
writer.println("<prolog id=\"prolog\">");
if (cdaClass != null) {
writer.println("<metadata><category>" + cdaClassName + "</category></metadata>");
writer.println("<resourceid id=\"" + CDAModelUtil.getTemplateId(umlClass) + "\"/>");
}
writer.println("</prolog>");
}
private String getExtensionName(Class cdaClass) {
String name = CDAModelUtil.getNameSpacePrefix(cdaClass);
return name == null || name.isEmpty()
? ""
: name + ":";
}
private void appendPropertyComments(PrintWriter writer, Property property) {
Association association = property.getAssociation();
if (association != null && association.getOwnedComments().size() > 0) {
writer.append("<ol>");
for (Comment comment : association.getOwnedComments()) {
writer.append("<li><p><lines><i>");
writer.append(CDAModelUtil.fixNonXMLCharacters(comment.getBody()));
writer.append("</i></lines></p></li>");
}
writer.append("</ol>");
}
if (property.getOwnedComments().size() > 0) {
writer.append("<ol>");
for (Comment comment : property.getOwnedComments()) {
writer.append("<li><p><lines><i>");
writer.append(CDAModelUtil.fixNonXMLCharacters(comment.getBody()));
writer.append("</i></lines></p></li>");
}
writer.append("</ol>");
}
}
private void appendPropertyRules(PrintWriter writer, Property property,
Map<String, List<Constraint>> constraintMap, Map<Constraint, List<Constraint>> subConstraintMap,
List<Constraint> unprocessedConstraints) {
// association typeCode and property type
String assocConstraints = "";
if (property.getAssociation() != null) {
assocConstraints = CDAModelUtil.computeAssociationConstraints(property, true);
}
StringBuffer ruleConstraints = new StringBuffer();
List<Constraint> rules = constraintMap.get(property.getName());
if (rules != null && !rules.isEmpty()) {
for (Constraint constraint : rules) {
unprocessedConstraints.remove(constraint);
ruleConstraints.append("\n<li>" + CDAModelUtil.computeConformanceMessage(constraint, true));
appendSubConstraintRules(ruleConstraints, constraint, subConstraintMap, unprocessedConstraints);
// List<Constraint> subConstraints = subConstraintMap.get(constraint);
// if (subConstraints != null && subConstraints.size() > 0) {
// ruleConstraints.append("<ol>");
// for (Constraint subConstraint : subConstraints) {
// unprocessedConstraints.remove(subConstraint);
// ruleConstraints.append("\n<li>" + CDAModelUtil.computeConformanceMessage(subConstraint, true) + "</li>");
// ruleConstraints.append("</ol>");
ruleConstraints.append("</li>");
}
}
if (assocConstraints.length() > 0 || ruleConstraints.length() > 0) {
// writer.append(", such that ");
// writer.append(property.upperBound()==1 ? "it" : "each");
writer.append("<ol>");
writer.append(assocConstraints);
writer.append(ruleConstraints);
writer.append("</ol>");
}
}
private void appendSubConstraintRules(StringBuffer ruleConstraints, Constraint constraint,
Map<Constraint, List<Constraint>> subConstraintMap, List<Constraint> unprocessedConstraints) {
List<Constraint> subConstraints = subConstraintMap.get(constraint);
if (subConstraints != null && subConstraints.size() > 0) {
ruleConstraints.append("<ol>");
for (Constraint subConstraint : subConstraints) {
unprocessedConstraints.remove(subConstraint);
ruleConstraints.append("\n<li>" + CDAModelUtil.computeConformanceMessage(subConstraint, true));
appendSubConstraintRules(ruleConstraints, subConstraint, subConstraintMap, unprocessedConstraints);
ruleConstraints.append("</li>");
}
ruleConstraints.append("</ol>");
}
}
@Override
public Object caseClass(Class umlClass) {
String normalizedClassName = normalizeCodeName(umlClass.getName());
String pathFolder = "classes";
IPath filePath = transformerOptions.getOutputPath().append(pathFolder).addTrailingSeparator().append(
"generated").addTrailingSeparator().append("_" + normalizedClassName).addFileExtension("dita");
File file = filePath.toFile();
PrintWriter writer = null;
try {
file.createNewFile();
writer = new PrintWriter(file);
appendHeader(writer, umlClass);
appendBody(writer, umlClass);
} catch (FileNotFoundException e) {
Logger.logException(e);
} catch (IOException e1) {
Logger.logException(e1);
} finally {
if (writer != null) {
writer.close();
}
}
return umlClass;
}
private int findProperty(List<Property> properties, String name) {
if (name != null) {
for (int i = 0; i < properties.size(); i++) {
if (name.equals(properties.get(i).getName())) {
return i;
}
}
}
return -1;
}
private String modelPrefix(NamedElement element) {
StringBuffer prefix = new StringBuffer();
String modelPrefix = CDAModelUtil.getModelPrefix(element);
if (modelPrefix != null && modelPrefix.length() > 0) {
prefix.append("[");
prefix.append(modelPrefix);
prefix.append("] ");
}
return prefix.toString();
}
private void appendTable(PrintWriter writer, Class umlClass) {
if (tableGenerator != null) {
String table = tableGenerator.createTable(umlClass);
if (table != null && table.length() > 0) {
writer.println(table);
}
table = tableGenerator.createTable2(umlClass);
if (table != null && table.length() > 0) {
writer.println(table);
}
}
}
private void appendChanges(PrintWriter writer, Class umlClass) {
writer.println("<section id=\"changes\">");
;
for (Substitution substitution : umlClass.getSubstitutions()) {
for (NamedElement ne : substitution.getSuppliers()) {
if (ne instanceof Class) {
appendChangeLog(writer, umlClass, (Class) ne);
}
}
}
writer.println("</section>");
}
private static void composeAllConformanceMessages(Class element, final PrintWriter writer, final boolean markup) {
// for (Generalization generalization : element.getGeneralizations()) {
// if (generalization.getGeneral() instanceof Class) {
// composeAllConformanceMessages((Class) generalization.getGeneral(), writer, false);
final TreeIterator<EObject> iterator = EcoreUtil.getAllContents(Collections.singletonList(element));
while (iterator != null && iterator.hasNext()) {
EObject child = iterator.next();
UMLSwitch<Object> umlSwitch = new UMLSwitch<Object>() {
@Override
public Object caseAssociation(Association association) {
iterator.prune();
return association;
}
@Override
public Object caseClass(Class umlClass) {
String message = CDAModelUtil.computeConformanceMessage(umlClass, markup);
writer.println(message);
return umlClass;
}
@Override
public Object caseGeneralization(Generalization generalization) {
String message = CDAModelUtil.computeConformanceMessage(generalization, markup);
if (message.length() > 0) {
writer.println(message);
}
return generalization;
}
@Override
public Object caseProperty(Property property) {
String message = CDAModelUtil.computeConformanceMessage(property, markup);
if (message.length() > 0) {
writer.println(message);
}
return property;
}
@Override
public Object caseConstraint(Constraint constraint) {
String message = CDAModelUtil.computeConformanceMessage(constraint, markup);
if (message.length() > 0) {
writer.println(message);
}
return constraint;
}
};
umlSwitch.doSwitch(child);
}
}
class LineComparator implements IRangeComparator {
private String[] fLines;
public LineComparator(InputStream is) throws IOException {
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line;
ArrayList ar = new ArrayList();
while ((line = br.readLine()) != null) {
ar.add(line);
}
// It is the responsibility of the caller to close the stream
fLines = (String[]) ar.toArray(new String[ar.size()]);
}
String getLine(int ix) {
return fLines[ix];
}
/*
* (non-Javadoc)
*
* @see org.eclipse.compare.rangedifferencer.IRangeComparator#getRangeCount()
*/
public int getRangeCount() {
return fLines.length;
}
/*
* (non-Javadoc)
*
* @see org.eclipse.compare.rangedifferencer.IRangeComparator#rangesEqual(int, org.eclipse.compare.rangedifferencer.IRangeComparator, int)
*/
public boolean rangesEqual(int thisIndex, IRangeComparator other, int otherIndex) {
String s1 = fLines[thisIndex];
String s2 = ((LineComparator) other).fLines[otherIndex];
return s1.equals(s2);
}
/*
* (non-Javadoc)
*
* @see org.eclipse.compare.rangedifferencer.IRangeComparator#skipRangeComparison(int, int,
* org.eclipse.compare.rangedifferencer.IRangeComparator)
*/
public boolean skipRangeComparison(int length, int maxLength, IRangeComparator other) {
return false;
}
}
public IStatus appendChanges(PrintWriter writer, InputStream target, InputStream other) {
IProgressMonitor monitor = null;
LineComparator t, o;
try {
t = new LineComparator(target);
o = new LineComparator(other);
} catch (UnsupportedEncodingException e) {
return null;
// return new Status(IStatus.ERROR, CompareUI.PLUGIN_ID, 1, MergeMessages.TextAutoMerge_inputEncodingError, e);
} catch (IOException e) {
return null;
// return new Status(IStatus.ERROR, CompareUI.PLUGIN_ID, 1, e.getMessage(), e);
}
// try {
String lineSeparator = System.getProperty("line.separator"); //$NON-NLS-1$
if (lineSeparator == null) {
lineSeparator = "\n"; //$NON-NLS-1$
}
RangeDifference[] diffs = RangeDifferencer.findRanges(monitor, t, o);
writer.append("<li>");
writer.println("<b>Modifications</b>");
writer.append("</li>");
for (int i = 0; i < diffs.length; i++) {
RangeDifference rd = diffs[i];
switch (rd.kind()) {
case RangeDifference.RIGHT:
for (int j = rd.rightStart(); j < rd.rightEnd(); j++) {
String s = o.getLine(j);
writer.append("<li>");
writer.println(s);
writer.append("</li>");
}
break;
default:
break;
}
}
writer.append("<li>");
writer.println("<b>Additions</b>");
writer.append("</li>");
for (int i = 0; i < diffs.length; i++) {
RangeDifference rd = diffs[i];
switch (rd.kind()) {
case RangeDifference.LEFT:
for (int j = rd.leftStart(); j < rd.leftEnd(); j++) {
String s = t.getLine(j);
writer.append("<li>");
writer.println(s);
writer.append("</li>");
}
break;
default:
break;
}
}
return Status.OK_STATUS;
}
void appendChangeLog(PrintWriter writer, Class source, Class substitute) {
writer.println("<p>");
// CDAModelUtil.get
writer.println("Change Log from " + CDAModelUtil.getModelPrefix(substitute) + "::" + substitute.getName());
writer.println("</p>");
writer.println("<p id=\"" + substitute.getName() + "\" >");
writer.append("<ol>");
StringWriter leftsw = new StringWriter();
PrintWriter leftpw = new PrintWriter(leftsw);
composeAllConformanceMessages(source, leftpw, false);
StringWriter rightsw = new StringWriter();
PrintWriter rightpw = new PrintWriter(rightsw);
composeAllConformanceMessages(substitute, rightpw, false);
InputStream sourceStream = new ByteArrayInputStream(leftsw.getBuffer().toString().getBytes());
InputStream substitueStream = new ByteArrayInputStream(rightsw.getBuffer().toString().getBytes());
appendChanges(writer, sourceStream, substitueStream);
writer.append("</ol>");
writer.println("</p>");
}
} |
package som.interpreter.nodes;
import som.compiler.AccessModifier;
import som.compiler.ClassBuilder.ClassDefinitionId;
import som.interpreter.TruffleCompiler;
import som.interpreter.TypesGen;
import som.interpreter.nodes.dispatch.AbstractDispatchNode;
import som.interpreter.nodes.dispatch.DispatchChain.Cost;
import som.interpreter.nodes.dispatch.GenericDispatchNode;
import som.interpreter.nodes.dispatch.LexicallyBoundDispatchNode;
import som.interpreter.nodes.dispatch.SuperDispatchNode;
import som.interpreter.nodes.dispatch.UninitializedDispatchNode;
import som.interpreter.nodes.literals.BlockNode;
import som.interpreter.nodes.nary.EagerBinaryPrimitiveNode;
import som.interpreter.nodes.nary.EagerTernaryPrimitiveNode;
import som.interpreter.nodes.nary.EagerUnaryPrimitiveNode;
import som.interpreter.nodes.specialized.AndMessageNodeFactory;
import som.interpreter.nodes.specialized.AndMessageNodeFactory.AndBoolMessageNodeFactory;
import som.interpreter.nodes.specialized.IfFalseMessageNodeGen;
import som.interpreter.nodes.specialized.IfTrueIfFalseMessageNodeGen;
import som.interpreter.nodes.specialized.IfTrueMessageNodeGen;
import som.interpreter.nodes.specialized.IntDownToDoMessageNodeGen;
import som.interpreter.nodes.specialized.IntToByDoMessageNodeGen;
import som.interpreter.nodes.specialized.IntToDoMessageNodeGen;
import som.interpreter.nodes.specialized.NotMessageNodeFactory;
import som.interpreter.nodes.specialized.OrMessageNodeGen;
import som.interpreter.nodes.specialized.OrMessageNodeGen.OrBoolMessageNodeGen;
import som.interpreter.nodes.specialized.whileloops.WhileWithDynamicBlocksNode;
import som.interpreter.nodes.specialized.whileloops.WhileWithStaticBlocksNode.WhileFalseStaticBlocksNode;
import som.interpreter.nodes.specialized.whileloops.WhileWithStaticBlocksNode.WhileTrueStaticBlocksNode;
import som.primitives.BlockPrimsFactory.ValueNonePrimFactory;
import som.primitives.BlockPrimsFactory.ValueOnePrimFactory;
import som.primitives.DoublePrimsFactory.PositiveInfinityPrimFactory;
import som.primitives.DoublePrimsFactory.RoundPrimFactory;
import som.primitives.EqualsEqualsPrimFactory;
import som.primitives.EqualsPrimFactory;
import som.primitives.IntegerPrimsFactory.AbsPrimNodeGen;
import som.primitives.IntegerPrimsFactory.As32BitSignedValueFactory;
import som.primitives.IntegerPrimsFactory.As32BitUnsignedValueFactory;
import som.primitives.IntegerPrimsFactory.LeftShiftPrimFactory;
import som.primitives.IntegerPrimsFactory.MaxIntPrimNodeGen;
import som.primitives.IntegerPrimsFactory.ToPrimNodeGen;
import som.primitives.IntegerPrimsFactory.UnsignedRightShiftPrimFactory;
import som.primitives.MethodPrimsFactory.InvokeOnPrimFactory;
import som.primitives.SizeAndLengthPrimFactory;
import som.primitives.StringPrimsFactory.SubstringPrimFactory;
import som.primitives.UnequalsPrimFactory;
import som.primitives.arithmetic.AdditionPrimFactory;
import som.primitives.arithmetic.DividePrimFactory;
import som.primitives.arithmetic.DoubleDivPrimFactory;
import som.primitives.arithmetic.GreaterThanPrimFactory;
import som.primitives.arithmetic.LessThanOrEqualPrimFactory;
import som.primitives.arithmetic.LessThanPrimFactory;
import som.primitives.arithmetic.ModuloPrimFactory;
import som.primitives.arithmetic.MultiplicationPrimFactory;
import som.primitives.arithmetic.RemainderPrimFactory;
import som.primitives.arithmetic.SqrtPrimFactory;
import som.primitives.arithmetic.SubtractionPrimFactory;
import som.primitives.arrays.AtPrimFactory;
import som.primitives.arrays.AtPutPrimFactory;
import som.primitives.arrays.CopyPrimNodeGen;
import som.primitives.arrays.DoIndexesPrimFactory;
import som.primitives.arrays.DoPrimFactory;
import som.primitives.arrays.NewPrimFactory;
import som.primitives.arrays.PutAllNodeFactory;
import som.primitives.arrays.ToArgumentsArrayNodeGen;
import som.primitives.bitops.BitAndPrimFactory;
import som.primitives.bitops.BitXorPrimFactory;
import som.vm.NotYetImplementedException;
import som.vm.constants.Classes;
import som.vmobjects.SArray;
import som.vmobjects.SBlock;
import som.vmobjects.SSymbol;
import com.oracle.truffle.api.CompilerAsserts;
import com.oracle.truffle.api.frame.VirtualFrame;
import com.oracle.truffle.api.nodes.ExplodeLoop;
import com.oracle.truffle.api.nodes.NodeCost;
import com.oracle.truffle.api.source.SourceSection;
public final class MessageSendNode {
public static AbstractMessageSendNode createMessageSend(final SSymbol selector,
final ExpressionNode[] arguments, final SourceSection source) {
return new UninitializedMessageSendNode(selector, arguments, source);
}
public static AbstractMessageSendNode adaptSymbol(final SSymbol newSelector,
final AbstractMessageSendNode node) {
assert node instanceof UninitializedMessageSendNode;
return new UninitializedMessageSendNode(newSelector, node.argumentNodes,
node.getSourceSection());
}
public static AbstractMessageSendNode createForPerformNodes(final SSymbol selector) {
return new UninitializedSymbolSendNode(selector, null, null);
}
public static GenericMessageSendNode createGeneric(final SSymbol selector,
final ExpressionNode[] argumentNodes, final SourceSection source) {
if (argumentNodes[0] instanceof ISpecialSend) {
throw new NotYetImplementedException();
} else {
return new GenericMessageSendNode(selector, argumentNodes,
new UninitializedDispatchNode(selector, AccessModifier.PUBLIC), source);
}
}
public abstract static class AbstractMessageSendNode extends ExpressionNode
implements PreevaluatedExpression {
@Children protected final ExpressionNode[] argumentNodes;
protected AbstractMessageSendNode(final ExpressionNode[] arguments,
final SourceSection source) {
super(source);
this.argumentNodes = arguments;
}
public boolean isSpecialSend() {
return argumentNodes[0] instanceof ISpecialSend;
}
@Override
public final Object executeGeneric(final VirtualFrame frame) {
Object[] arguments = evaluateArguments(frame);
return doPreEvaluated(frame, arguments);
}
@ExplodeLoop
private Object[] evaluateArguments(final VirtualFrame frame) {
Object[] arguments = new Object[argumentNodes.length];
for (int i = 0; i < argumentNodes.length; i++) {
arguments[i] = argumentNodes[i].executeGeneric(frame);
assert arguments[i] != null;
}
return arguments;
}
}
public abstract static class AbstractUninitializedMessageSendNode
extends AbstractMessageSendNode {
protected final SSymbol selector;
protected AbstractUninitializedMessageSendNode(final SSymbol selector,
final ExpressionNode[] arguments,
final SourceSection source) {
super(arguments, source);
this.selector = selector;
}
@Override
public String toString() {
return "UninitMsgSend(" + selector.toString() + ")";
}
public SSymbol getSelector() {
return selector;
}
@Override
public final Object doPreEvaluated(final VirtualFrame frame,
final Object[] arguments) {
return specialize(arguments).
doPreEvaluated(frame, arguments);
}
private PreevaluatedExpression specialize(final Object[] arguments) {
TruffleCompiler.transferToInterpreterAndInvalidate("Specialize Message Node");
// let's organize the specializations by number of arguments
// perhaps not the best, but simple
switch (argumentNodes.length) {
case 1: return specializeUnary(arguments);
case 2: return specializeBinary(arguments);
case 3: return specializeTernary(arguments);
case 4: return specializeQuaternary(arguments);
}
return makeSend();
}
protected PreevaluatedExpression makeSend() {
// first option is a super send, super sends are treated specially because
// the receiver class is lexically determined
if (isSpecialSend()) {
return makeSpecialSend();
}
return makeOrdenarySend();
}
protected abstract PreevaluatedExpression makeSpecialSend();
private GenericMessageSendNode makeOrdenarySend() {
GenericMessageSendNode send = new GenericMessageSendNode(selector,
argumentNodes,
new UninitializedDispatchNode(selector, AccessModifier.PUBLIC),
getSourceSection());
return replace(send);
}
protected PreevaluatedExpression specializeUnary(final Object[] args) {
Object receiver = args[0];
switch (selector.getString()) {
// eagerly but cautious:
case "size":
if (receiver instanceof SArray) {
return replace(new EagerUnaryPrimitiveNode(selector,
argumentNodes[0], SizeAndLengthPrimFactory.create(null)));
}
break;
case "length":
if (receiver instanceof String) {
return replace(new EagerUnaryPrimitiveNode(selector,
argumentNodes[0], SizeAndLengthPrimFactory.create(null)));
}
break;
case "value":
if (receiver instanceof SBlock || receiver instanceof Boolean) {
return replace(new EagerUnaryPrimitiveNode(selector,
argumentNodes[0], ValueNonePrimFactory.create(null)));
}
break;
case "not":
if (receiver instanceof Boolean) {
return replace(new EagerUnaryPrimitiveNode(selector,
argumentNodes[0], NotMessageNodeFactory.create(getSourceSection(), null)));
}
break;
case "abs":
if (receiver instanceof Long) {
return replace(new EagerUnaryPrimitiveNode(selector,
argumentNodes[0], AbsPrimNodeGen.create(null)));
}
break;
case "copy":
if (receiver instanceof SArray) {
return replace(new EagerUnaryPrimitiveNode(selector,
argumentNodes[0], CopyPrimNodeGen.create(null)));
}
break;
case "PositiveInfinity":
if (receiver == Classes.doubleClass) {
// don't need to protect this with an eager wrapper
return replace(PositiveInfinityPrimFactory.create(argumentNodes[0]));
}
break;
case "round":
if (receiver instanceof Double) {
return replace(new EagerUnaryPrimitiveNode(selector,
argumentNodes[0], RoundPrimFactory.create(null)));
}
break;
case "as32BitSignedValue":
if (receiver instanceof Integer) {
return replace(new EagerUnaryPrimitiveNode(selector,
argumentNodes[0], As32BitSignedValueFactory.create(null)));
}
break;
case "as32BitUnsignedValue":
if (receiver instanceof Integer) {
return replace(new EagerUnaryPrimitiveNode(selector,
argumentNodes[0], As32BitUnsignedValueFactory.create(null)));
}
break;
case "sqrt":
if (receiver instanceof Number) {
return replace(new EagerUnaryPrimitiveNode(selector,
argumentNodes[0], SqrtPrimFactory.create(null)));
}
}
return makeSend();
}
protected PreevaluatedExpression specializeBinary(final Object[] arguments) {
switch (selector.getString()) {
case "at:":
if (arguments[0] instanceof SArray) {
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
AtPrimFactory.create(null, null)));
}
break;
case "new:":
if (arguments[0] == Classes.arrayClass) {
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
NewPrimFactory.create(null, null)));
}
break;
case "doIndexes:":
if (arguments[0] instanceof SArray) {
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
DoIndexesPrimFactory.create(null, null)));
}
break;
case "do:":
if (arguments[0] instanceof SArray) {
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
DoPrimFactory.create(null, null)));
}
break;
case "putAll:":
return replace(new EagerBinaryPrimitiveNode(selector,
argumentNodes[0], argumentNodes[1],
PutAllNodeFactory.create(null, null, SizeAndLengthPrimFactory.create(null))));
case "whileTrue:": {
if (argumentNodes[1] instanceof BlockNode &&
argumentNodes[0] instanceof BlockNode) {
BlockNode argBlockNode = (BlockNode) argumentNodes[1];
SBlock argBlock = (SBlock) arguments[1];
return replace(new WhileTrueStaticBlocksNode(
(BlockNode) argumentNodes[0], argBlockNode,
(SBlock) arguments[0],
argBlock, getSourceSection()));
}
break; // use normal send
}
case "whileFalse:":
if (argumentNodes[1] instanceof BlockNode &&
argumentNodes[0] instanceof BlockNode) {
BlockNode argBlockNode = (BlockNode) argumentNodes[1];
SBlock argBlock = (SBlock) arguments[1];
return replace(new WhileFalseStaticBlocksNode(
(BlockNode) argumentNodes[0], argBlockNode,
(SBlock) arguments[0], argBlock, getSourceSection()));
}
break; // use normal send
case "and:":
case "&&":
if (arguments[0] instanceof Boolean) {
if (argumentNodes[1] instanceof BlockNode) {
return replace(AndMessageNodeFactory.create((SBlock) arguments[1],
getSourceSection(), argumentNodes[0], argumentNodes[1]));
} else if (arguments[1] instanceof Boolean) {
return replace(AndBoolMessageNodeFactory.create(getSourceSection(),
argumentNodes[0], argumentNodes[1]));
}
}
break;
case "or:":
case "||":
if (arguments[0] instanceof Boolean) {
if (argumentNodes[1] instanceof BlockNode) {
return replace(OrMessageNodeGen.create((SBlock) arguments[1],
getSourceSection(),
argumentNodes[0], argumentNodes[1]));
} else if (arguments[1] instanceof Boolean) {
return replace(OrBoolMessageNodeGen.create(
getSourceSection(),
argumentNodes[0], argumentNodes[1]));
}
}
break;
case "value:":
if (arguments[0] instanceof SBlock) {
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
ValueOnePrimFactory.create(null, null)));
}
break;
case "ifTrue:":
return replace(IfTrueMessageNodeGen.create(arguments[0],
arguments[1], getSourceSection(),
argumentNodes[0], argumentNodes[1]));
case "ifFalse:":
return replace(IfFalseMessageNodeGen.create(arguments[0],
arguments[1], getSourceSection(),
argumentNodes[0], argumentNodes[1]));
case "to:":
if (arguments[0] instanceof Long) {
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
ToPrimNodeGen.create(null, null)));
}
break;
// TODO: find a better way for primitives, use annotation or something
case "<":
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
LessThanPrimFactory.create(null, null)));
case "<=":
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
LessThanOrEqualPrimFactory.create(null, null)));
case ">":
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
GreaterThanPrimFactory.create(null, null)));
case "+":
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
AdditionPrimFactory.create(null, null)));
case "-":
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
SubtractionPrimFactory.create(null, null)));
case "*":
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
MultiplicationPrimFactory.create(null, null)));
case "=":
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
EqualsPrimFactory.create(null, null)));
case "<>":
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
UnequalsPrimFactory.create(null, null)));
// TODO: this is not a correct primitive, new an UnequalsUnequalsPrim...
// case "~=":
// return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
// argumentNodes[1],
// UnequalsPrimFactory.create(null, null)));
case "==":
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
EqualsEqualsPrimFactory.create(null, null)));
case "bitXor:":
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
BitXorPrimFactory.create(null, null)));
case "
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
DoubleDivPrimFactory.create(null, null)));
case "%":
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
ModuloPrimFactory.create(null, null)));
case "rem:":
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
RemainderPrimFactory.create(null, null)));
case "/":
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
DividePrimFactory.create(null, null)));
case "&":
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
BitAndPrimFactory.create(null, null)));
// eagerly but cautious:
case "<<":
if (arguments[0] instanceof Long) {
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
LeftShiftPrimFactory.create(null, null)));
}
break;
case ">>>":
if (arguments[0] instanceof Long) {
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
UnsignedRightShiftPrimFactory.create(null, null)));
}
break;
case "max:":
if (arguments[0] instanceof Long) {
return replace(new EagerBinaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1],
MaxIntPrimNodeGen.create(null, null)));
}
break;
}
return makeSend();
}
protected PreevaluatedExpression specializeTernary(final Object[] arguments) {
switch (selector.getString()) {
case "at:put:":
if (arguments[0] instanceof SArray) {
return replace(new EagerTernaryPrimitiveNode(selector, argumentNodes[0],
argumentNodes[1], argumentNodes[2],
AtPutPrimFactory.create(null, null, null)));
}
break;
case "ifTrue:ifFalse:":
return replace(IfTrueIfFalseMessageNodeGen.create(arguments[0],
arguments[1], arguments[2], argumentNodes[0],
argumentNodes[1], argumentNodes[2]));
case "to:do:":
if (TypesGen.isLong(arguments[0]) &&
(TypesGen.isLong(arguments[1]) ||
TypesGen.isDouble(arguments[1])) &&
TypesGen.isSBlock(arguments[2])) {
return replace(IntToDoMessageNodeGen.create(this,
(SBlock) arguments[2], argumentNodes[0], argumentNodes[1],
argumentNodes[2]));
}
break;
case "downTo:do:":
if (TypesGen.isLong(arguments[0]) &&
(TypesGen.isLong(arguments[1]) ||
TypesGen.isDouble(arguments[1])) &&
TypesGen.isSBlock(arguments[2])) {
return replace(IntDownToDoMessageNodeGen.create(this,
(SBlock) arguments[2], argumentNodes[0], argumentNodes[1],
argumentNodes[2]));
}
break;
case "substringFrom:to:":
if (arguments[0] instanceof String) {
return replace(new EagerTernaryPrimitiveNode(selector,
argumentNodes[0], argumentNodes[1], argumentNodes[2],
SubstringPrimFactory.create(null, null, null)));
}
break;
case "invokeOn:with:":
return replace(InvokeOnPrimFactory.create(
argumentNodes[0], argumentNodes[1], argumentNodes[2],
ToArgumentsArrayNodeGen.create(null, null)));
}
return makeSend();
}
protected PreevaluatedExpression specializeQuaternary(
final Object[] arguments) {
switch (selector.getString()) {
case "to:by:do:":
return replace(IntToByDoMessageNodeGen.create(this,
(SBlock) arguments[3], argumentNodes[0], argumentNodes[1],
argumentNodes[2], argumentNodes[3]));
}
return makeSend();
}
}
private static final class UninitializedMessageSendNode
extends AbstractUninitializedMessageSendNode {
protected UninitializedMessageSendNode(final SSymbol selector,
final ExpressionNode[] arguments,
final SourceSection source) {
super(selector, arguments, source);
}
@Override
protected PreevaluatedExpression makeSpecialSend() {
ISpecialSend rcvrNode = (ISpecialSend) argumentNodes[0];
AbstractDispatchNode dispatch;
if (rcvrNode.isSuperSend()) {
dispatch = SuperDispatchNode.create(selector, (ISuperReadNode) rcvrNode);
} else {
dispatch = new LexicallyBoundDispatchNode(selector, rcvrNode.getLexicalClass());
}
GenericMessageSendNode node = new GenericMessageSendNode(selector,
argumentNodes, dispatch, getSourceSection());
return replace(node);
}
}
private static final class UninitializedSymbolSendNode
extends AbstractUninitializedMessageSendNode {
protected UninitializedSymbolSendNode(final SSymbol selector,
final ClassDefinitionId classId, final SourceSection source) {
super(selector, new ExpressionNode[0], source);
}
@Override
public boolean isSpecialSend() {
// TODO: is is correct?
return false;
}
@Override
protected PreevaluatedExpression makeSpecialSend() {
// should never be reached with isSuperSend() returning always false
throw new RuntimeException("A symbol send should never be a special send.");
}
@Override
protected PreevaluatedExpression specializeBinary(final Object[] arguments) {
switch (selector.getString()) {
case "whileTrue:": {
if (arguments[1] instanceof SBlock && arguments[0] instanceof SBlock) {
SBlock argBlock = (SBlock) arguments[1];
return replace(new WhileWithDynamicBlocksNode((SBlock) arguments[0],
argBlock, true, getSourceSection()));
}
break;
}
case "whileFalse:":
if (arguments[1] instanceof SBlock && arguments[0] instanceof SBlock) {
SBlock argBlock = (SBlock) arguments[1];
return replace(new WhileWithDynamicBlocksNode(
(SBlock) arguments[0], argBlock, false, getSourceSection()));
}
break; // use normal send
}
return super.specializeBinary(arguments);
}
}
/// TODO: currently, we do not only specialize the given stuff above, but also what has been classified as 'value' sends in the OMOP branch. Is that a problem?
public static final class GenericMessageSendNode
extends AbstractMessageSendNode {
private final SSymbol selector;
@Child private AbstractDispatchNode dispatchNode;
private GenericMessageSendNode(final SSymbol selector,
final ExpressionNode[] arguments,
final AbstractDispatchNode dispatchNode, final SourceSection source) {
super(arguments, source);
this.selector = selector;
this.dispatchNode = dispatchNode;
}
@Override
public Object doPreEvaluated(final VirtualFrame frame,
final Object[] arguments) {
return dispatchNode.executeDispatch(frame, arguments);
}
public AbstractDispatchNode getDispatchListHead() {
return dispatchNode;
}
public void adoptNewDispatchListHead(final AbstractDispatchNode newHead) {
CompilerAsserts.neverPartOfCompilation();
dispatchNode = insert(newHead);
}
public void replaceDispatchListHead(
final GenericDispatchNode replacement) {
CompilerAsserts.neverPartOfCompilation();
dispatchNode.replace(replacement);
}
@Override
public String toString() {
String file = "";
if (getSourceSection() != null) {
file = " " + getSourceSection().getSource().getName();
file += ":" + getSourceSection().getStartLine();
file += ":" + getSourceSection().getStartColumn();
}
return "GMsgSend(" + selector.getString() + file + ")";
}
@Override
public NodeCost getCost() {
return Cost.getCost(dispatchNode);
}
}
} |
package soot.jimple.infoflow.data;
import java.util.LinkedList;
import java.util.Stack;
import soot.SootField;
import soot.Unit;
import soot.Value;
import soot.jimple.Stmt;
public class Abstraction implements Cloneable {
private final AccessPath accessPath;
private final Value source;
private final Stmt sourceContext;
private int hashCode;
private Stack<Unit> callStack;
public Abstraction(Value taint, Value src, Stmt srcContext){
source = src;
accessPath = new AccessPath(taint);
callStack = new Stack<Unit>();
sourceContext = srcContext;
}
//TODO: make private and change AwP
protected Abstraction(AccessPath p, Value src, Stmt srcContext){
source = src;
sourceContext = srcContext;
accessPath = p;
callStack = new Stack<Unit>();
}
public Abstraction deriveNewAbstraction(AccessPath p){
Abstraction a = new Abstraction(p, source, sourceContext);
a.callStack = (Stack<Unit>) this.callStack.clone();
return a;
}
public Abstraction deriveNewAbstraction(Value taint){
Abstraction a = new Abstraction(new AccessPath(taint, accessPath.getFields()), source, sourceContext);
a.callStack = (Stack<Unit>) this.callStack.clone();
return a;
}
public Abstraction deriveNewAbstraction(Value taint, boolean cutFirstField){
Abstraction a;
if(cutFirstField){
LinkedList<SootField> tempList = (LinkedList<SootField>) accessPath.getFields().clone();
tempList.removeFirst();
a = new Abstraction(new AccessPath(taint, tempList), source, sourceContext);
}else
a = new Abstraction(new AccessPath(taint,accessPath.getFields()), source, sourceContext);
a.callStack = (Stack<Unit>) this.callStack.clone();
return a;
}
/**
* Creates an abstraction as a copy of an existing abstraction,
* only exchanging the access path.
* @param p The value to be used as the new access path
* @param original The original abstraction to copy
*/
public Abstraction(Value p, Abstraction original){
this(new AccessPath(p), original);
}
/**
* Creates an abstraction as a copy of an existing abstraction,
* only exchanging the access path.
* @param p The access path for the new abstraction
* @param original The original abstraction to copy
*/
public Abstraction(AccessPath p, Abstraction original){
if (original == null) {
source = null;
sourceContext = null;
callStack = new Stack<Unit>();
}
else {
source = original.source;
sourceContext = original.sourceContext;
callStack = (Stack<Unit>) original.callStack.clone();
}
accessPath = p;
}
public Value getSource() {
return source;
}
public Stmt getSourceContext() {
return this.sourceContext;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Abstraction other = (Abstraction) obj;
if (accessPath == null) {
if (other.accessPath != null)
return false;
} else if (!accessPath.equals(other.accessPath))
return false;
// if (callStack == null) {
// if (other.callStack != null)
// return false;
// } else if (!callStack.equals(other.callStack))
// return false;
if (source == null) {
if (other.source != null)
return false;
} else if (!source.equals(other.source))
return false;
if (sourceContext == null) {
if (other.sourceContext != null)
return false;
} else if (!sourceContext.equals(other.sourceContext))
return false;
return true;
}
@Override
public int hashCode() {
if(hashCode == 0){
final int prime = 31;
int result = 1;
result = prime * result + ((accessPath == null) ? 0 : accessPath.hashCode());
// result = prime * result + ((callStack == null) ? 0 : callStack.hashCode());
result = prime * result + ((source == null) ? 0 : source.hashCode());
result = prime * result + ((sourceContext == null) ? 0 : sourceContext.hashCode());
hashCode = result;
}
return hashCode;
}
public void addToStack(Unit u){
callStack.push(u);
}
public void removeFromStack(){
if(!callStack.isEmpty())
callStack.pop();
}
public boolean isStackEmpty(){
return callStack.isEmpty();
}
public Unit getElementFromStack(){
if(!callStack.isEmpty())
return callStack.peek();
return null;
}
@Override
public String toString(){
if(accessPath != null && source != null){
return accessPath.toString() + " /source: "+ source.toString();
}
if(accessPath != null){
return accessPath.toString();
}
return "Abstraction (null)";
}
public AccessPath getAccessPath(){
return accessPath;
}
@Override
public Abstraction clone(){
Abstraction a = new Abstraction(accessPath, source, sourceContext);
a.callStack = (Stack<Unit>) this.callStack.clone();
return a;
}
} |
package org.adligo.models.core.client;
import java.util.HashSet;
import java.util.Set;
import org.adligo.models.core.relations.client.UserRelationsMutant;
import org.adligo.tests.client.I_Test;
public class UserRelationsAssertions {
public static void assertMutators(I_Test test, String prefix) throws Exception {
UserRelationsMutant mutant = new UserRelationsMutant();
InvalidParameterException ex = null;
Set<String> roles = new HashSet<String>();
roles.add("");
try {
mutant.addRoles(roles);
} catch (Exception e) {
ex = GwtParameterExceptionAsserter.isIPE(e);
}
test.assertIsNotNull(ex);
test.assertIsEquals(UserRelationsMutant.ADD_ROLE, ex.getMethodName());
test.assertIsEquals(
prefix + ModelsCoreEnglishConstants.USER_RELATIONS_EMPTY_ROLE,
ex.getMessage());
roles = new HashSet<String>();
roles.add(null);
try {
mutant.addRoles(roles);
} catch (Exception e) {
ex = GwtParameterExceptionAsserter.isIPE(e);
}
test.assertIsNotNull(ex);
test.assertIsEquals(UserRelationsMutant.ADD_ROLE, ex.getMethodName());
test.assertIsEquals(
prefix + ModelsCoreEnglishConstants.USER_RELATIONS_EMPTY_ROLE,
ex.getMessage());
try {
mutant.addRole(null);
} catch (Exception e) {
ex = GwtParameterExceptionAsserter.isIPE(e);
}
test.assertIsNotNull(ex);
test.assertIsEquals(UserRelationsMutant.ADD_ROLE, ex.getMethodName());
test.assertIsEquals(
prefix + ModelsCoreEnglishConstants.USER_RELATIONS_EMPTY_ROLE,
ex.getMessage());
try {
mutant.addRole("");
} catch (Exception e) {
ex = GwtParameterExceptionAsserter.isIPE(e);
}
test.assertIsNotNull(ex);
test.assertIsEquals(UserRelationsMutant.ADD_ROLE, ex.getMethodName());
test.assertIsEquals(
prefix + ModelsCoreEnglishConstants.USER_RELATIONS_EMPTY_ROLE,
ex.getMessage());
mutant.addRole("admin");
roles = new HashSet<String>();
roles.add("bartender");
mutant.addRoles(roles);
test.assertIsFalse(mutant.isUserInRole("admin"));
test.assertIsTrue(mutant.isUserInRole("bartender"));
test.assertIsFalse(mutant.isUserInRole(""));
test.assertIsFalse(mutant.isUserInRole(null));
//groups
ex = null;
Set<String> groups = new HashSet<String>();
groups.add("");
try {
mutant.addGroups(groups);
} catch (Exception e) {
ex = GwtParameterExceptionAsserter.isIPE(e);
}
test.assertIsNotNull(ex);
test.assertIsEquals(UserRelationsMutant.ADD_GROUP, ex.getMethodName());
test.assertIsEquals(
prefix + ModelsCoreEnglishConstants.USER_RELATIONS_EMPTY_GROUP,
ex.getMessage());
groups = new HashSet<String>();
groups.add(null);
try {
mutant.addGroups(groups);
} catch (Exception e) {
ex = GwtParameterExceptionAsserter.isIPE(e);
}
test.assertIsNotNull(ex);
test.assertIsEquals(UserRelationsMutant.ADD_GROUP, ex.getMethodName());
test.assertIsEquals(
prefix + ModelsCoreEnglishConstants.USER_RELATIONS_EMPTY_GROUP,
ex.getMessage());
try {
mutant.addGroup(null);
} catch (Exception e) {
ex = GwtParameterExceptionAsserter.isIPE(e);
}
test.assertIsNotNull(ex);
test.assertIsEquals(UserRelationsMutant.ADD_GROUP, ex.getMethodName());
test.assertIsEquals(
prefix + ModelsCoreEnglishConstants.USER_RELATIONS_EMPTY_GROUP,
ex.getMessage());
try {
mutant.addGroup("");
} catch (Exception e) {
ex = GwtParameterExceptionAsserter.isIPE(e);
}
test.assertIsNotNull(ex);
test.assertIsEquals(UserRelationsMutant.ADD_GROUP, ex.getMethodName());
test.assertIsEquals(
prefix + ModelsCoreEnglishConstants.USER_RELATIONS_EMPTY_GROUP,
ex.getMessage());
mutant.addGroup("admins_group");
groups = new HashSet<String>();
groups.add("bartenders_group");
mutant.addGroups(groups);
Set<String> groupsFromMutant = mutant.getGroups();
test.assertIsFalse(groupsFromMutant.contains("admins_group"));
test.assertIsTrue(groupsFromMutant.contains("bartenders_group"));
test.assertIsFalse(groupsFromMutant.contains(""));
test.assertIsFalse(groupsFromMutant.contains(null));
}
public static void assertConstructors(I_Test test, String prefix) throws Exception {
assertNameAndDomainConstructor(test, prefix);
assertUserOnlyConstructor(test, prefix);
assertUserRelationsConstructor(test, prefix);
}
public static void assertUserRelationsConstructor(I_Test test, String prefix) throws Exception {
UserRelationsMutant mutant = new UserRelationsMutant();
InvalidParameterException ex = null;
try {
new UserRelationsMutant(mutant);
} catch (Exception e) {
ex = GwtParameterExceptionAsserter.isIPE(e);
}
test.assertIsNotNull(ex);
test.assertIsEquals(UserRelationsMutant.CONSTRUCTOR, ex.getMethodName());
test.assertIsEquals(
prefix + ModelsCoreEnglishConstants.USER_DOMAIN_NAME_CANT_BE_EMPTY,
ex.getMessage());
UserMutant user = new UserMutant();
user.setName("bo");
user.setEmail("bo@bo.com");
user.setDomain("bo.com");
user.setPassword("123");
mutant = new UserRelationsMutant(user);
UserRelationsMutant ur = new UserRelationsMutant(mutant);
test.assertIsEquals(new DomainName("bo.com"), ur.getDomain());
test.assertIsEquals(new EMailAddress("bo@bo.com"), ur.getEmail());
test.assertIsEquals("bo", ur.getName());
test.assertIsEquals("123", ur.getPassword());
mutant.addGroup("somegroup");
ur = new UserRelationsMutant(mutant);
test.assertIsEquals(new DomainName("bo.com"), ur.getDomain());
test.assertIsEquals(new EMailAddress("bo@bo.com"), ur.getEmail());
test.assertIsEquals("bo", ur.getName());
test.assertIsEquals("123", ur.getPassword());
test.assertIsTrue(ur.getGroups().contains("somegroup"));
OrganizationMutant orgMutant = new OrganizationMutant();
orgMutant.setName("admins");
orgMutant.setType(new NamedId("department"));
mutant.setOrg(orgMutant);
PersonMutant pm = new PersonMutant();
pm.setLast_name("someLastName");
mutant.setPerson(pm);
try {
new UserRelationsMutant(mutant);
} catch (Exception e) {
ex = GwtParameterExceptionAsserter.isIPE(e);
}
test.assertIsNotNull(ex);
test.assertIsEquals(UserRelationsMutant.CONSTRUCTOR, ex.getMethodName());
test.assertIsEquals(
prefix + ModelsCoreEnglishConstants.USER_DOMAIN_NAME_CANT_BE_EMPTY,
ex.getMessage());
}
public static void assertUserOnlyConstructor(I_Test test, String prefix) throws Exception {
UserMutant mutant = new UserMutant();
mutant.setName("bo");
InvalidParameterException ex = null;
try {
new UserRelationsMutant(mutant);
} catch (Exception e) {
ex = GwtParameterExceptionAsserter.isIPE(e);
}
test.assertIsNotNull(ex);
test.assertIsEquals(UserRelationsMutant.CONSTRUCTOR, ex.getMethodName());
test.assertIsEquals(
prefix + ModelsCoreEnglishConstants.USER_DOMAIN_NAME_CANT_BE_EMPTY,
ex.getMessage());
mutant.setEmail("bo@bo.com");
mutant.setDomain("bo.com");
mutant.setPassword("123");
UserMutant user = new UserMutant(mutant);
UserRelationsMutant rel = new UserRelationsMutant(user);
test.assertIsEquals(new DomainName("bo.com"), rel.getDomain());
test.assertIsEquals("bo", rel.getName());
test.assertIsFalse(rel.isUserInRole(null));
test.assertIsFalse(rel.isUserInRole(""));
test.assertIsFalse(rel.isUserInRole("admin"));
}
public static void assertNameAndDomainConstructor(I_Test test, String prefix) throws Exception {
InvalidParameterException ex = null;
try {
new UserRelationsMutant(null, null, null);
} catch (Exception e) {
ex = GwtParameterExceptionAsserter.isIPE(e);
}
test.assertIsNotNull(ex);
test.assertIsEquals(UserRelationsMutant.CONSTRUCTOR, ex.getMethodName());
test.assertIsEquals(
prefix + ModelsCoreEnglishConstants.DOMAIN_CAN_NOT_BE_EMPTY,
ex.getMessage());
ex = null;
try {
new UserRelationsMutant("", null, null);
} catch (Exception e) {
ex = GwtParameterExceptionAsserter.isIPE(e);
}
test.assertIsNotNull(ex);
test.assertIsEquals(UserRelationsMutant.CONSTRUCTOR, ex.getMethodName());
test.assertIsEquals(
prefix + ModelsCoreEnglishConstants.DOMAIN_CAN_NOT_BE_EMPTY,
ex.getMessage());
ex = null;
try {
new UserRelationsMutant("bo.com", null, null);
} catch (Exception e) {
ex = GwtParameterExceptionAsserter.isIPE(e);
}
test.assertIsNotNull(ex);
test.assertIsEquals(UserRelationsMutant.CONSTRUCTOR, ex.getMethodName());
test.assertIsEquals(
prefix + ModelsCoreEnglishConstants.USER_NAME_CANT_BE_SET_TO_EMPTY,
ex.getMessage());
ex = null;
try {
new UserRelationsMutant("bo.com", "", null);
} catch (Exception e) {
ex = GwtParameterExceptionAsserter.isIPE(e);
}
test.assertIsNotNull(ex);
test.assertIsEquals(UserRelationsMutant.CONSTRUCTOR, ex.getMethodName());
test.assertIsEquals(
prefix + ModelsCoreEnglishConstants.USER_NAME_CANT_BE_SET_TO_EMPTY,
ex.getMessage());
UserRelationsMutant user = new UserRelationsMutant("bo.com", "bo", null);
test.assertIsEquals(new DomainName("bo.com"), user.getDomain());
test.assertIsEquals("bo", user.getName());
test.assertIsFalse(user.isUserInRole(null));
test.assertIsFalse(user.isUserInRole(""));
test.assertIsFalse(user.isUserInRole("admin"));
Set<String> roles = new HashSet<String>();
roles.add("admin");
user = new UserRelationsMutant("bo2.com", "bo2", roles);
test.assertIsEquals(new DomainName("bo2.com"), user.getDomain());
test.assertIsEquals("bo2", user.getName());
test.assertIsFalse(user.isUserInRole(null));
test.assertIsFalse(user.isUserInRole(""));
test.assertIsTrue(user.isUserInRole("admin"));
}
} |
package org.anddev.andengine.entity.layer.tiled.tmx;
import org.anddev.andengine.entity.layer.tiled.tmx.util.constants.TMXConstants;
import org.anddev.andengine.util.SAXUtils;
import org.xml.sax.Attributes;
/**
* @author Nicolas Gramlich
* @since 11:21:01 - 29.07.2010
*/
public class TMXObject implements TMXConstants {
// Constants
// Fields
private final String mName;
private final String mType;
private final int mX;
private final int mY;
private final int mWidth;
private final int mHeight;
private final TMXProperties<TMXObjectProperty> mTMXObjectProperties = new TMXProperties<TMXObjectProperty>();
// Constructors
public TMXObject(final Attributes pAttributes) {
this.mName = pAttributes.getValue("", TAG_OBJECT_ATTRIBUTE_NAME);
this.mType = pAttributes.getValue("", TAG_OBJECT_ATTRIBUTE_TYPE);
this.mX = SAXUtils.getIntAttributeOrThrow(pAttributes, TAG_OBJECT_ATTRIBUTE_X);
this.mY = SAXUtils.getIntAttributeOrThrow(pAttributes, TAG_OBJECT_ATTRIBUTE_Y);
this.mWidth = SAXUtils.getIntAttribute(pAttributes, TAG_OBJECT_ATTRIBUTE_WIDTH, 0);
this.mHeight = SAXUtils.getIntAttribute(pAttributes, TAG_OBJECT_ATTRIBUTE_HEIGHT, 0);
}
// Getter & Setter
public String getName() {
return this.mName;
}
public String getType() {
return this.mType;
}
public int getX() {
return this.mX;
}
public int getY() {
return this.mY;
}
public int getWidth() {
return this.mWidth;
}
public int getHeight() {
return this.mHeight;
}
public void addTMXObjectProperty(final TMXObjectProperty pTMXObjectProperty) {
this.mTMXObjectProperties.add(pTMXObjectProperty);
}
public TMXProperties<TMXObjectProperty> getTMXObjectProperties() {
return this.mTMXObjectProperties;
}
// Methods for/from SuperClass/Interfaces
// Methods
// Inner and Anonymous Classes
} |
package org.exist.xquery.functions.xmldb;
import org.apache.log4j.Logger;
import java.io.File;
import org.exist.dom.QName;
import org.exist.util.DirectoryScanner;
import org.exist.util.MimeTable;
import org.exist.util.MimeType;
import org.exist.xmldb.EXistResource;
import org.exist.xquery.Cardinality;
import org.exist.xquery.FunctionSignature;
import org.exist.xquery.XPathException;
import org.exist.xquery.XQueryContext;
import org.exist.xquery.value.FunctionReturnSequenceType;
import org.exist.xquery.value.FunctionParameterSequenceType;
import org.exist.xquery.value.Sequence;
import org.exist.xquery.value.SequenceIterator;
import org.exist.xquery.value.SequenceType;
import org.exist.xquery.value.StringValue;
import org.exist.xquery.value.Type;
import org.exist.xquery.value.ValueSequence;
import org.xmldb.api.base.Collection;
import org.xmldb.api.base.Resource;
import org.xmldb.api.base.XMLDBException;
/**
* @author wolf
*/
public class XMLDBLoadFromPattern extends XMLDBAbstractCollectionManipulator {
protected static final Logger logger = Logger.getLogger(XMLDBLoadFromPattern.class);
protected final static QName FUNCTION_NAME = new QName("store-files-from-pattern", XMLDBModule.NAMESPACE_URI, XMLDBModule.PREFIX);
protected final static String FUNCTION_DESCRIPTION = "Stores new resources into the database. Resources are read from the server's " +
"file system, using file patterns. " +
"The function returns a sequence of all document paths added " +
"to the db. These can be directly passed to fn:doc() to retrieve the document(s).";
protected final static SequenceType PARAM_COLLECTION = new FunctionParameterSequenceType("collection-uri", Type.STRING, Cardinality.EXACTLY_ONE, "The collection-uri where resources should be stored. " + XMLDBModule.COLLECTION_URI);
protected final static SequenceType PARAM_FS_DIRECTORY = new FunctionParameterSequenceType("directory", Type.STRING, Cardinality.EXACTLY_ONE, "The directory in the file system from where the files are read.");
// fixit! - security - we should say some words about sanity
// DBA role should be required for anything short of chroot/jail
// easily setup per installation/execution host for each function. /ljo
protected final static SequenceType PARAM_FS_PATTERN = new FunctionParameterSequenceType("pattern", Type.STRING, Cardinality.ONE_OR_MORE, "The file matching pattern. Based on code from Apache's Ant, thus following the same conventions. For example: *.xml matches any file ending with .xml in the current directory, **/*.xml matches files in any directory below the current one");
protected final static SequenceType PARAM_MIME_TYPE = new FunctionParameterSequenceType("mime-type", Type.STRING, Cardinality.ZERO_OR_ONE, "If the mime-type is something other than 'text/xml' or 'application/xml', the resource will be stored as a binary resource.");
protected static final SequenceType PARAM_PRESERVE_STRUCTURE = new FunctionParameterSequenceType("preserve-structure", Type.BOOLEAN, Cardinality.EXACTLY_ONE, "If preserve-structure is true(), the filesystem directory structure will be mirrored in the collection. Otherwise all the matching resources, including the ones in sub-directories, will be stored in the collection given in the first argument flatly.");
protected static final FunctionReturnSequenceType RETURN_TYPE = new FunctionReturnSequenceType(Type.STRING, Cardinality.ZERO_OR_MORE, "the sequence of document paths");
public final static FunctionSignature signatures[] = {
new FunctionSignature(
FUNCTION_NAME,
FUNCTION_DESCRIPTION,
new SequenceType[] { PARAM_COLLECTION, PARAM_FS_DIRECTORY, PARAM_FS_PATTERN },
RETURN_TYPE
),
new FunctionSignature(
FUNCTION_NAME,
FUNCTION_DESCRIPTION,
new SequenceType[] { PARAM_COLLECTION, PARAM_FS_DIRECTORY, PARAM_FS_PATTERN, PARAM_MIME_TYPE },
RETURN_TYPE
),
new FunctionSignature(
FUNCTION_NAME,
FUNCTION_DESCRIPTION,
new SequenceType[] { PARAM_COLLECTION, PARAM_FS_DIRECTORY, PARAM_FS_PATTERN, PARAM_MIME_TYPE, PARAM_PRESERVE_STRUCTURE },
RETURN_TYPE
)
};
public XMLDBLoadFromPattern(XQueryContext context, FunctionSignature signature) {
super(context, signature);
}
/* (non-Javadoc)
* @see org.exist.xquery.functions.xmldb.XMLDBAbstractCollectionManipulator#evalWithCollection(org.xmldb.api.base.Collection, org.exist.xquery.value.Sequence[], org.exist.xquery.value.Sequence)
*/
protected Sequence evalWithCollection(Collection collection, Sequence[] args, Sequence contextSequence)
throws XPathException {
File baseDir = new File(args[1].getStringValue());
logger.debug("Loading files from directory: " + baseDir);
//determine resource type - xml or binary?
MimeType mimeTypeFromArgs = null;
if(getSignature().getArgumentCount() > 3 && args[3].hasOne()) {
String mimeTypeParam = args[3].getStringValue();
mimeTypeFromArgs = MimeTable.getInstance().getContentType(mimeTypeParam);
if (mimeTypeFromArgs == null) {
throw new XPathException(this, "Unknown mime type specified: " + mimeTypeParam);
}
}
//keep the directory structure?
boolean keepDirStructure = false;
if(getSignature().getArgumentCount() == 5)
keepDirStructure = args[4].effectiveBooleanValue();
ValueSequence stored = new ValueSequence();
//store according to each pattern
Sequence patterns = args[2];
for(SequenceIterator i = patterns.iterate(); i.hasNext(); )
{
//get the files to store
String pattern = i.nextItem().getStringValue();
File[] files = DirectoryScanner.scanDir(baseDir, pattern);
logger.debug("Found: " + files.length);
Collection col = collection;
String relDir, prevDir = null;
for(int j = 0; j < files.length; j++) {
try {
logger.debug(files[j].getAbsolutePath());
String relPath = files[j].toString().substring(baseDir.toString().length());
int p = relPath.lastIndexOf(File.separatorChar);
if(p >= 0) {
relDir = relPath.substring(0, p);
relDir = relDir.replace(File.separatorChar, '/');
} else {
relDir = relPath;
}
if(keepDirStructure && (prevDir == null || (!relDir.equals(prevDir)))) {
col = createCollectionPath(collection, relDir);
prevDir = relDir;
}
MimeType mimeType = mimeTypeFromArgs;
if (mimeType == null) {
mimeType = MimeTable.getInstance().getContentTypeFor(files[j].getName());
if (mimeType == null)
mimeType = MimeType.BINARY_TYPE;
}
//TODO : these probably need to be encoded and checked for right mime type
Resource resource = col.createResource(files[j].getName(), mimeType.getXMLDBType());
resource.setContent(files[j]);
((EXistResource) resource).setMimeType(mimeType.getName());
col.storeResource(resource);
//TODO : use dedicated function in XmldbURI
stored.add(new StringValue(col.getName() + "/" + resource.getId()));
} catch(XMLDBException e) {
logger.error("Could not store file " + files[j].getAbsolutePath() + ": " + e.getMessage());
}
}
}
return stored;
}
} |
package com.ecyrd.jspwiki;
import com.ecyrd.jspwiki.attachment.Attachment;
import com.ecyrd.jspwiki.providers.*;
import junit.framework.*;
import java.io.*;
import java.util.*;
import javax.servlet.*;
import com.ecyrd.jspwiki.acl.*;
import com.ecyrd.jspwiki.auth.permissions.*;
import com.ecyrd.jspwiki.auth.*;
public class TranslatorReaderTest extends TestCase
{
Properties props = new Properties();
Vector created = new Vector();
static final String PAGE_NAME = "testpage";
TestEngine testEngine;
public TranslatorReaderTest( String s )
{
super( s );
}
public void setUp()
throws Exception
{
props.load( TestEngine.findTestProperties() );
props.setProperty( "jspwiki.translatorReader.matchEnglishPlurals", "true" );
testEngine = new TestEngine( props );
}
public void tearDown()
{
deleteCreatedPages();
}
private void newPage( String name )
{
testEngine.saveText( name, "<test>" );
created.addElement( name );
}
private void deleteCreatedPages()
{
for( Iterator i = created.iterator(); i.hasNext(); )
{
String name = (String) i.next();
testEngine.deletePage(name);
}
created.clear();
}
private String translate( String src )
throws IOException,
NoRequiredPropertyException,
ServletException
{
WikiContext context = new WikiContext( testEngine,
new WikiPage(PAGE_NAME) );
Reader r = new TranslatorReader( context,
new BufferedReader( new StringReader(src)) );
StringWriter out = new StringWriter();
int c;
while( ( c=r.read()) != -1 )
{
out.write( c );
}
return out.toString();
}
private String translate( WikiPage p, String src )
throws IOException,
NoRequiredPropertyException,
ServletException
{
WikiContext context = new WikiContext( testEngine,
p );
Reader r = new TranslatorReader( context,
new BufferedReader( new StringReader(src)) );
StringWriter out = new StringWriter();
int c;
while( ( c=r.read()) != -1 )
{
out.write( c );
}
return out.toString();
}
public void testHyperlinks2()
throws Exception
{
newPage("Hyperlink");
String src = "This should be a [hyperlink]";
assertEquals( "This should be a <a class=\"wikipage\" href=\"Wiki.jsp?page=Hyperlink\">hyperlink</a>",
translate(src) );
}
public void testHyperlinks3()
throws Exception
{
newPage("HyperlinkToo");
String src = "This should be a [hyperlink too]";
assertEquals( "This should be a <a class=\"wikipage\" href=\"Wiki.jsp?page=HyperlinkToo\">hyperlink too</a>",
translate(src) );
}
public void testHyperlinks4()
throws Exception
{
newPage("HyperLink");
String src = "This should be a [HyperLink]";
assertEquals( "This should be a <a class=\"wikipage\" href=\"Wiki.jsp?page=HyperLink\">HyperLink</a>",
translate(src) );
}
public void testHyperlinks5()
throws Exception
{
newPage("HyperLink");
String src = "This should be a [here|HyperLink]";
assertEquals( "This should be a <a class=\"wikipage\" href=\"Wiki.jsp?page=HyperLink\">here</a>",
translate(src) );
}
// Testing CamelCase hyperlinks
public void testHyperLinks6()
throws Exception
{
newPage("DiscussionAboutWiki");
newPage("WikiMarkupDevelopment");
String src = "[DiscussionAboutWiki] [WikiMarkupDevelopment].";
assertEquals( "<a class=\"wikipage\" href=\"Wiki.jsp?page=DiscussionAboutWiki\">DiscussionAboutWiki</a> <a class=\"wikipage\" href=\"Wiki.jsp?page=WikiMarkupDevelopment\">WikiMarkupDevelopment</a>.",
translate(src) );
}
public void testHyperlinksCC()
throws Exception
{
newPage("HyperLink");
String src = "This should be a HyperLink.";
assertEquals( "This should be a <a class=\"wikipage\" href=\"Wiki.jsp?page=HyperLink\">HyperLink</a>.",
translate(src) );
}
public void testHyperlinksCCNonExistant()
throws Exception
{
String src = "This should be a HyperLink.";
assertEquals( "This should be a <u>HyperLink</u><a href=\"Edit.jsp?page=HyperLink\">?</a>.",
translate(src) );
}
/**
* Check if the CC hyperlink translator gets confused with
* unorthodox bracketed links.
*/
public void testHyperlinksCC2()
throws Exception
{
newPage("HyperLink");
String src = "This should be a [ HyperLink ].";
assertEquals( "This should be a <a class=\"wikipage\" href=\"Wiki.jsp?page=HyperLink\"> HyperLink </a>.",
translate(src) );
}
public void testHyperlinksCC3()
throws Exception
{
String src = "This should be a nonHyperLink.";
assertEquals( "This should be a nonHyperLink.",
translate(src) );
}
/** Two links on same line. */
public void testHyperlinksCC4()
throws Exception
{
newPage("HyperLink");
newPage("ThisToo");
String src = "This should be a HyperLink, and ThisToo.";
assertEquals( "This should be a <a class=\"wikipage\" href=\"Wiki.jsp?page=HyperLink\">HyperLink</a>, and <a class=\"wikipage\" href=\"Wiki.jsp?page=ThisToo\">ThisToo</a>.",
translate(src) );
}
/** Two mixed links on same line. */
public void testHyperlinksCC5()
throws Exception
{
newPage("HyperLink");
newPage("ThisToo");
String src = "This should be a [HyperLink], and ThisToo.";
assertEquals( "This should be a <a class=\"wikipage\" href=\"Wiki.jsp?page=HyperLink\">HyperLink</a>, and <a class=\"wikipage\" href=\"Wiki.jsp?page=ThisToo\">ThisToo</a>.",
translate(src) );
}
/** Closing tags only. */
public void testHyperlinksCC6()
throws Exception
{
newPage("HyperLink");
newPage("ThisToo");
String src = "] This ] should be a HyperLink], and ThisToo.";
assertEquals( "] This ] should be a <a class=\"wikipage\" href=\"Wiki.jsp?page=HyperLink\">HyperLink</a>], and <a class=\"wikipage\" href=\"Wiki.jsp?page=ThisToo\">ThisToo</a>.",
translate(src) );
}
/** First and last words on line. */
public void testHyperlinksCCFirstAndLast()
throws Exception
{
newPage("HyperLink");
newPage("ThisToo");
String src = "HyperLink, and ThisToo";
assertEquals( "<a class=\"wikipage\" href=\"Wiki.jsp?page=HyperLink\">HyperLink</a>, and <a class=\"wikipage\" href=\"Wiki.jsp?page=ThisToo\">ThisToo</a>",
translate(src) );
}
/** Hyperlinks inside URIs. */
public void testHyperlinksCCURLs()
throws Exception
{
String src = "http:
assertEquals( "http:
translate(src) );
}
public void testHyperlinksCCNegated()
throws Exception
{
String src = "This should not be a ~HyperLink.";
assertEquals( "This should not be a HyperLink.",
translate(src) );
}
public void testHyperlinksCCNegated2()
throws Exception
{
String src = "~HyperLinks should not be matched.";
assertEquals( "HyperLinks should not be matched.",
translate(src) );
}
public void testCCLinkInList()
throws Exception
{
newPage("HyperLink");
String src = "*HyperLink";
assertEquals( "<ul>\n<li><a class=\"wikipage\" href=\"Wiki.jsp?page=HyperLink\">HyperLink</a></li>\n</ul>\n",
translate(src) );
}
public void testCCLinkBold()
throws Exception
{
newPage("BoldHyperLink");
String src = "__BoldHyperLink__";
assertEquals( "<b><a class=\"wikipage\" href=\"Wiki.jsp?page=BoldHyperLink\">BoldHyperLink</a></b>",
translate(src) );
}
public void testCCLinkBold2()
throws Exception
{
newPage("HyperLink");
String src = "Let's see, if a bold __HyperLink__ is correct?";
assertEquals( "Let's see, if a bold <b><a class=\"wikipage\" href=\"Wiki.jsp?page=HyperLink\">HyperLink</a></b> is correct?",
translate(src) );
}
public void testCCLinkItalic()
throws Exception
{
newPage("ItalicHyperLink");
String src = "''ItalicHyperLink''";
assertEquals( "<i><a class=\"wikipage\" href=\"Wiki.jsp?page=ItalicHyperLink\">ItalicHyperLink</a></i>",
translate(src) );
}
public void testCCLinkWithPunctuation()
throws Exception
{
newPage("HyperLink");
String src = "Test. Punctuation. HyperLink.";
assertEquals( "Test. Punctuation. <a class=\"wikipage\" href=\"Wiki.jsp?page=HyperLink\">HyperLink</a>.",
translate(src) );
}
public void testCCLinkWithPunctuation2()
throws Exception
{
newPage("HyperLink");
newPage("ThisToo");
String src = "Punctuations: HyperLink,ThisToo.";
assertEquals( "Punctuations: <a class=\"wikipage\" href=\"Wiki.jsp?page=HyperLink\">HyperLink</a>,<a class=\"wikipage\" href=\"Wiki.jsp?page=ThisToo\">ThisToo</a>.",
translate(src) );
}
public void testCCLinkWithScandics()
throws Exception
{
newPage("itiSyljy");
String src = "Onko tm hyperlinkki: itiSyljy?";
assertEquals( "Onko tm hyperlinkki: <a class=\"wikipage\" href=\"Wiki.jsp?page=%C4itiSy%F6%D6ljy%E4\">itiSyljy</a>?",
translate(src) );
}
public void testHyperlinksExt()
throws Exception
{
String src = "This should be a [http:
assertEquals( "This should be a <a class=\"external\" href=\"http:
translate(src) );
}
public void testHyperlinksExt2()
throws Exception
{
String src = "This should be a [link|http:
assertEquals( "This should be a <a class=\"external\" href=\"http:
translate(src) );
}
// Testing various odds and ends about hyperlink matching.
public void testHyperlinksPluralMatch()
throws Exception
{
String src = "This should be a [HyperLinks]";
newPage("HyperLink");
assertEquals( "This should be a <a class=\"wikipage\" href=\"Wiki.jsp?page=HyperLink\">HyperLinks</a>",
translate(src) );
}
public void testHyperlinksPluralMatch2()
throws Exception
{
String src = "This should be a [HyperLinks]";
assertEquals( "This should be a <u>HyperLinks</u><a href=\"Edit.jsp?page=HyperLinks\">?</a>",
translate(src) );
}
public void testHyperlinksPluralMatch3()
throws Exception
{
String src = "This should be a [HyperLink]";
newPage("HyperLinks");
assertEquals( "This should be a <a class=\"wikipage\" href=\"Wiki.jsp?page=HyperLinks\">HyperLink</a>",
translate(src) );
}
public void testHyperlinkJS1()
throws Exception
{
String src = "This should be a [link|http:
assertEquals( "This should be a <a class=\"external\" href=\"http:
translate(src) );
}
public void testHyperlinksInterWiki1()
throws Exception
{
String src = "This should be a [link|JSPWiki:HyperLink]";
assertEquals( "This should be a <a class=\"interwiki\" href=\"http:
translate(src) );
}
public void testNoHyperlink()
throws Exception
{
newPage("HyperLink");
String src = "This should not be a [[HyperLink]";
assertEquals( "This should not be a [HyperLink]",
translate(src) );
}
public void testNoHyperlink2()
throws Exception
{
String src = "This should not be a [[[[HyperLink]";
assertEquals( "This should not be a [[[HyperLink]",
translate(src) );
}
public void testNoHyperlink3()
throws Exception
{
String src = "[[HyperLink], and this [[Neither].";
assertEquals( "[HyperLink], and this [Neither].",
translate(src) );
}
public void testNoPlugin()
throws Exception
{
String src = "There is [[{NoPlugin}] here.";
assertEquals( "There is [{NoPlugin}] here.",
translate(src) );
}
public void testErroneousHyperlink()
throws Exception
{
String src = "What if this is the last char [";
assertEquals( "What if this is the last char ",
translate(src) );
}
public void testErroneousHyperlink2()
throws Exception
{
String src = "What if this is the last char [[";
assertEquals( "What if this is the last char [",
translate(src) );
}
public void testExtraPagename1()
throws Exception
{
String src = "Link [test_page]";
newPage("Test_page");
assertEquals("Link <a class=\"wikipage\" href=\"Wiki.jsp?page=Test_page\">test_page</a>",
translate(src) );
}
public void testExtraPagename2()
throws Exception
{
String src = "Link [test.page]";
newPage("Test.page");
assertEquals("Link <a class=\"wikipage\" href=\"Wiki.jsp?page=Test.page\">test.page</a>",
translate(src) );
}
public void testExtraPagename3()
throws Exception
{
String src = "Link [.testpage_]";
newPage(".testpage_");
assertEquals("Link <a class=\"wikipage\" href=\"Wiki.jsp?page=.testpage_\">.testpage_</a>",
translate(src) );
}
public void testInlineImages()
throws Exception
{
String src = "Link [test|http:
assertEquals("Link <img class=\"inline\" src=\"http:
translate(src) );
}
public void testInlineImages2()
throws Exception
{
String src = "Link [test|http:
assertEquals("Link <a class=\"external\" href=\"http:
translate(src) );
}
public void testInlineImages3()
throws Exception
{
String src = "Link [test|http://images.com/testi]";
assertEquals("Link <img class=\"inline\" src=\"http://images.com/testi\" alt=\"test\" />",
translate(src) );
}
public void testInlineImages4()
throws Exception
{
String src = "Link [test|http://foobar.jpg]";
assertEquals("Link <img class=\"inline\" src=\"http://foobar.jpg\" alt=\"test\" />",
translate(src) );
}
// No link text should be just embedded link.
public void testInlineImagesLink2()
throws Exception
{
String src = "Link [http://foobar.jpg]";
assertEquals("Link <img class=\"inline\" src=\"http:
translate(src) );
}
public void testInlineImagesLink()
throws Exception
{
String src = "Link [http:
assertEquals("Link <a href=\"http:
translate(src) );
}
public void testInlineImagesLink3()
throws Exception
{
String src = "Link [SandBox|http://foobar.jpg]";
newPage("SandBox");
assertEquals("Link <a class=\"wikipage\" href=\"Wiki.jsp?page=SandBox\"><img class=\"inline\" src=\"http://foobar.jpg\" alt=\"SandBox\" /></a>",
translate(src) );
}
public void testScandicPagename1()
throws Exception
{
String src = "Link [Test]";
newPage("Test"); // FIXME: Should be capital
assertEquals("Link <a class=\"wikipage\" href=\"Wiki.jsp?page=%C5%E4Test\">Test</a>",
translate(src));
}
public void testParagraph()
throws Exception
{
String src = "1\n\n2\n\n3";
assertEquals( "1\n<p>\n2\n</p><p>\n3</p>", translate(src) );
}
public void testParagraph2()
throws Exception
{
String src = "[WikiEtiquette]\r\n\r\n[Find page]";
newPage( "WikiEtiquette" );
assertEquals( "<a class=\"wikipage\" href=\"Wiki.jsp?page=WikiEtiquette\">WikiEtiquette</a>\n"+
"<p>\n<a class=\"wikipage\" href=\"Wiki.jsp?page=FindPage\">Find page</a></p>", translate(src) );
}
public void testLinebreak()
throws Exception
{
String src = "1\\\\2";
assertEquals( "1<br />2", translate(src) );
}
public void testLinebreakClear()
throws Exception
{
String src = "1\\\\\\2";
assertEquals( "1<br clear=\"all\" />2", translate(src) );
}
public void testTT()
throws Exception
{
String src = "1{{2345}}6";
assertEquals( "1<tt>2345</tt>6", translate(src) );
}
public void testTTAcrossLines()
throws Exception
{
String src = "1{{\n2345\n}}6";
assertEquals( "1<tt>\n2345\n</tt>6", translate(src) );
}
public void testTTLinks()
throws Exception
{
String src = "1{{\n2345\n[a link]\n}}6";
newPage("ALink");
assertEquals( "1<tt>\n2345\n<a class=\"wikipage\" href=\"Wiki.jsp?page=ALink\">a link</a>\n</tt>6", translate(src) );
}
public void testPre()
throws Exception
{
String src = "1{{{2345}}}6";
assertEquals( "1<span style=\"font-family:monospace; whitespace:pre;\">2345</span>6", translate(src) );
}
public void testPre2()
throws Exception
{
String src = "1 {{{ {{{ 2345 }}} }}} 6";
assertEquals( "1 <span style=\"font-family:monospace; whitespace:pre;\"> {{{ 2345 </span> }}} 6", translate(src) );
}
public void testHTMLInPre()
throws Exception
{
String src = "1\n{{{ <b> }}}";
assertEquals( "1\n<pre> <b> </pre>", translate(src) );
}
public void testCamelCaseInPre()
throws Exception
{
String src = "1\n{{{ CamelCase }}}";
assertEquals( "1\n<pre> CamelCase </pre>", translate(src) );
}
public void testList1()
throws Exception
{
String src = "A list:\n* One\n* Two\n* Three\n";
assertEquals( "A list:\n<ul>\n<li> One\n</li>\n<li> Two\n</li>\n<li> Three\n</li>\n</ul>\n",
translate(src) );
}
/** Plain multi line testing:
<pre>
* One
continuing
* Two
* Three
</pre>
*/
public void testMultilineList1()
throws Exception
{
String src = "A list:\n* One\n continuing.\n* Two\n* Three\n";
assertEquals( "A list:\n<ul>\n<li> One\n continuing.\n</li>\n<li> Two\n</li>\n<li> Three\n</li>\n</ul>\n",
translate(src) );
}
public void testMultilineList2()
throws Exception
{
String src = "A list:\n* One\n continuing.\n* Two\n* Three\nShould be normal.";
assertEquals( "A list:\n<ul>\n<li> One\n continuing.\n</li>\n<li> Two\n</li>\n<li> Three\n</li>\n</ul>\nShould be normal.",
translate(src) );
}
public void testHTML()
throws Exception
{
String src = "<b>Test</b>";
assertEquals( "<b>Test</b>", translate(src) );
}
public void testHTML2()
throws Exception
{
String src = "<p>";
assertEquals( "<p>", translate(src) );
}
public void testHTMLWhenAllowed()
throws Exception
{
String src = "<p>";
props.setProperty( "jspwiki.translatorReader.allowHTML", "true" );
testEngine = new TestEngine( props );
WikiContext context = new WikiContext( testEngine,
new WikiPage(PAGE_NAME) );
Reader r = new TranslatorReader( context,
new BufferedReader( new StringReader(src)) );
StringWriter out = new StringWriter();
int c;
while( ( c=r.read()) != -1 )
{
out.write( c );
}
assertEquals( "<p>", out.toString() );
}
public void testQuotes()
throws Exception
{
String src = "\"Test\"\"";
assertEquals( ""Test""", translate(src) );
}
public void testItalicAcrossLinebreak()
throws Exception
{
String src="''This is a\ntest.''";
assertEquals( "<i>This is a\ntest.</i>", translate(src) );
}
public void testBoldAcrossLinebreak()
throws Exception
{
String src="__This is a\ntest.__";
assertEquals( "<b>This is a\ntest.</b>", translate(src) );
}
public void testBoldItalic()
throws Exception
{
String src="__This ''is'' a test.__";
assertEquals( "<b>This <i>is</i> a test.</b>", translate(src) );
}
public void testFootnote1()
throws Exception
{
String src="Footnote[1]";
assertEquals( "Footnote<a class=\"footnoteref\" href=\"#ref-testpage-1\">[1]</a>",
translate(src) );
}
public void testFootnote2()
throws Exception
{
String src="[#2356] Footnote.";
assertEquals( "<a class=\"footnote\" name=\"ref-testpage-2356\">[#2356]</a> Footnote.",
translate(src) );
}
/** Check an reported error condition where empty list items could cause crashes */
public void testEmptySecondLevelList()
throws Exception
{
String src="A\n\n**\n\nB";
assertEquals( "A\n<p>\n<ul>\n<ul>\n<li>\n</li>\n</ul>\n</ul>\n</p><p>\nB</p>",
translate(src) );
}
public void testEmptySecondLevelList2()
throws Exception
{
String src="A\n\n##\n\nB";
assertEquals( "A\n<p>\n<ol>\n<ol>\n<li>\n</li>\n</ol>\n</ol>\n</p><p>\nB</p>",
translate(src) );
}
/**
* <pre>
* *Item A
* ##Numbered 1
* ##Numbered 2
* *Item B
* </pre>
*
* would come out as:
*<ul>
* <li>Item A
* </ul>
* <ol>
* <ol>
* <li>Numbered 1
* <li>Numbered 2
* <ul>
* <li></ol>
* </ol>
* Item B
* </ul>
*
* (by Mahlen Morris).
*/
// FIXME: does not run - code base is too screwed for that.
/*
public void testMixedList()
throws Exception
{
String src="*Item A\n##Numbered 1\n##Numbered 2\n*Item B\n";
String result = translate(src);
// Remove newlines for easier parsing.
result = TextUtil.replaceString( result, "\n", "" );
assertEquals( "<ul><li>Item A"+
"<ol><ol><li>Numbered 1"+
"<li>Numbered 2"+
"</ol></ol>"+
"<li>Item B"+
"</ul>",
result );
}
*/
/**
* Like testMixedList() but the list types have been reversed.
*/
// FIXME: does not run - code base is too screwed for that.
/*
public void testMixedList2()
throws Exception
{
String src="#Item A\n**Numbered 1\n**Numbered 2\n#Item B\n";
String result = translate(src);
// Remove newlines for easier parsing.
result = TextUtil.replaceString( result, "\n", "" );
assertEquals( "<ol><li>Item A"+
"<ul><ul><li>Numbered 1"+
"<li>Numbered 2"+
"</ul></ul>"+
"<li>Item B"+
"</ol>",
result );
}
*/
public void testPluginInsert()
throws Exception
{
String src="[{INSERT com.ecyrd.jspwiki.plugin.SamplePlugin WHERE text=test}]";
assertEquals( "test", translate(src) );
}
public void testPluginNoInsert()
throws Exception
{
String src="[{SamplePlugin text=test}]";
assertEquals( "test", translate(src) );
}
public void testPluginInsertJS()
throws Exception
{
String src="Today: [{INSERT JavaScriptPlugin}] ''day''.";
assertEquals( "Today: <script language=\"JavaScript\"><!--\nfoo='';\n--></script>\n <i>day</i>.", translate(src) );
}
public void testShortPluginInsert()
throws Exception
{
String src="[{INSERT SamplePlugin WHERE text=test}]";
assertEquals( "test", translate(src) );
}
/**
* Test two plugins on same row.
*/
public void testShortPluginInsert2()
throws Exception
{
String src="[{INSERT SamplePlugin WHERE text=test}] [{INSERT SamplePlugin WHERE text=test2}]";
assertEquals( "test test2", translate(src) );
}
public void testPluginQuotedArgs()
throws Exception
{
String src="[{INSERT SamplePlugin WHERE text='test me now'}]";
assertEquals( "test me now", translate(src) );
}
public void testPluginDoublyQuotedArgs()
throws Exception
{
String src="[{INSERT SamplePlugin WHERE text='test \\'me too\\' now'}]";
assertEquals( "test 'me too' now", translate(src) );
}
public void testPluginQuotedArgs2()
throws Exception
{
String src="[{INSERT SamplePlugin WHERE text=foo}] [{INSERT SamplePlugin WHERE text='test \\'me too\\' now'}]";
assertEquals( "foo test 'me too' now", translate(src) );
}
/**
* Plugin output must not be parsed as Wiki text.
*/
public void testPluginWikiText()
throws Exception
{
String src="[{INSERT SamplePlugin WHERE text=PageContent}]";
assertEquals( "PageContent", translate(src) );
}
/**
* Nor should plugin input be interpreted as wiki text.
*/
public void testPluginWikiText2()
throws Exception
{
String src="[{INSERT SamplePlugin WHERE text='
assertEquals( "----", translate(src) );
}
public void testMultilinePlugin1()
throws Exception
{
String src="Test [{INSERT SamplePlugin WHERE\n text=PageContent}]";
assertEquals( "Test PageContent", translate(src) );
}
public void testMultilinePluginBodyContent()
throws Exception
{
String src="Test [{INSERT SamplePlugin\ntext=PageContent\n\n123\n456\n}]";
assertEquals( "Test PageContent (123+456+)", translate(src) );
}
public void testMultilinePluginBodyContent2()
throws Exception
{
String src="Test [{INSERT SamplePlugin\ntext=PageContent\n\n\n123\n456\n}]";
assertEquals( "Test PageContent (+123+456+)", translate(src) );
}
public void testMultilinePluginBodyContent3()
throws Exception
{
String src="Test [{INSERT SamplePlugin\n\n123\n456\n}]";
assertEquals( "Test (123+456+)", translate(src) );
}
/**
* Has an extra space after plugin name.
*/
public void testMultilinePluginBodyContent4()
throws Exception
{
String src="Test [{INSERT SamplePlugin \n\n123\n456\n}]";
assertEquals( "Test (123+456+)", translate(src) );
}
/**
* Check that plugin end is correctly recognized.
*/
public void testPluginEnd()
throws Exception
{
String src="Test [{INSERT SamplePlugin text=']'}]";
assertEquals( "Test ]", translate(src) );
}
public void testPluginEnd2()
throws Exception
{
String src="Test [{INSERT SamplePlugin text='a[]+b'}]";
assertEquals( "Test a[]+b", translate(src) );
}
public void testPluginEnd3()
throws Exception
{
String src="Test [{INSERT SamplePlugin\n\na[]+b\n}]";
assertEquals( "Test (a[]+b+)", translate(src) );
}
public void testPluginEnd4()
throws Exception
{
String src="Test [{INSERT SamplePlugin text='}'}]";
assertEquals( "Test }", translate(src) );
}
public void testPluginEnd5()
throws Exception
{
String src="Test [{INSERT SamplePlugin\n\na[]+b{}\nGlob.\n}]";
assertEquals( "Test (a[]+b{}+Glob.+)", translate(src) );
}
public void testPluginEnd6()
throws Exception
{
String src="Test [{INSERT SamplePlugin\n\na[]+b{}\nGlob.\n}}]";
assertEquals( "Test (a[]+b{}+Glob.+})", translate(src) );
}
// FIXME: I am not entirely certain if this is the right result
// Perhaps some sort of an error should be checked?
public void testPluginNoEnd()
throws Exception
{
String src="Test [{INSERT SamplePlugin\n\na+b{}\nGlob.\n}";
assertEquals( "Test {INSERT SamplePlugin\n\na+b{}\nGlob.\n}", translate(src) );
}
public void testVariableInsert()
throws Exception
{
String src="[{$pagename}]";
assertEquals( PAGE_NAME+"", translate(src) );
}
public void testTable1()
throws Exception
{
String src="|| heading || heading2 \n| Cell 1 | Cell 2 \n| Cell 3 | Cell 4\n\n";
assertEquals( "<table class=\"wikitable\" border=\"1\">\n"+
"<tr><th> heading </th><th> heading2 </th></tr>\n"+
"<tr><td> Cell 1 </td><td> Cell 2 </td></tr>\n"+
"<tr><td> Cell 3 </td><td> Cell 4</td></tr>\n"+
"</table>\n<p>\n</p>",
translate(src) );
}
public void testTable2()
throws Exception
{
String src="||heading||heading2\n|Cell 1| Cell 2\n| Cell 3 |Cell 4\n\n";
assertEquals( "<table class=\"wikitable\" border=\"1\">\n"+
"<tr><th>heading</th><th>heading2</th></tr>\n"+
"<tr><td>Cell 1</td><td> Cell 2</td></tr>\n"+
"<tr><td> Cell 3 </td><td>Cell 4</td></tr>\n"+
"</table>\n<p>\n</p>",
translate(src) );
}
public void testTable3()
throws Exception
{
String src="|Cell 1| Cell 2\n| Cell 3 |Cell 4\n\n";
assertEquals( "<table class=\"wikitable\" border=\"1\">\n"+
"<tr><td>Cell 1</td><td> Cell 2</td></tr>\n"+
"<tr><td> Cell 3 </td><td>Cell 4</td></tr>\n"+
"</table>\n<p>\n</p>",
translate(src) );
}
public void testTableLink()
throws Exception
{
String src="|Cell 1| Cell 2\n|[Cell 3|ReallyALink]|Cell 4\n\n";
newPage("ReallyALink");
assertEquals( "<table class=\"wikitable\" border=\"1\">\n"+
"<tr><td>Cell 1</td><td> Cell 2</td></tr>\n"+
"<tr><td><a class=\"wikipage\" href=\"Wiki.jsp?page=ReallyALink\">Cell 3</a></td><td>Cell 4</td></tr>\n"+
"</table>\n<p>\n</p>",
translate(src) );
}
public void testTableLinkEscapedBar()
throws Exception
{
String src="|Cell 1| Cell~| 2\n|[Cell 3|ReallyALink]|Cell 4\n\n";
newPage("ReallyALink");
assertEquals( "<table class=\"wikitable\" border=\"1\">\n"+
"<tr><td>Cell 1</td><td> Cell| 2</td></tr>\n"+
"<tr><td><a class=\"wikipage\" href=\"Wiki.jsp?page=ReallyALink\">Cell 3</a></td><td>Cell 4</td></tr>\n"+
"</table>\n<p>\n</p>",
translate(src) );
}
public void testDescription()
throws Exception
{
String src=";:Foo";
assertEquals( "<dl>\n<dt></dt><dd>Foo</dd>\n</dl>",
translate(src) );
}
public void testDescription2()
throws Exception
{
String src=";Bar:Foo";
assertEquals( "<dl>\n<dt>Bar</dt><dd>Foo</dd>\n</dl>",
translate(src) );
}
public void testDescription3()
throws Exception
{
String src=";:";
assertEquals( "<dl>\n<dt></dt><dd></dd>\n</dl>",
translate(src) );
}
public void testDescription4()
throws Exception
{
String src=";Bar:Foo :-)";
assertEquals( "<dl>\n<dt>Bar</dt><dd>Foo :-)</dd>\n</dl>",
translate(src) );
}
public void testRuler()
throws Exception
{
String src="
assertEquals( "<hr />",
translate(src) );
}
public void testRulerCombo()
throws Exception
{
String src="----Foo";
assertEquals( "<hr />Foo",
translate(src) );
}
public void testShortRuler1()
throws Exception
{
String src="-";
assertEquals( "-",
translate(src) );
}
public void testShortRuler2()
throws Exception
{
String src="
assertEquals( "
translate(src) );
}
public void testShortRuler3()
throws Exception
{
String src="
assertEquals( "
translate(src) );
}
public void testLongRuler()
throws Exception
{
String src="
assertEquals( "<hr />",
translate(src) );
}
public void testHeading1()
throws Exception
{
String src="!Hello";
assertEquals( "<h4>Hello</h4>",
translate(src) );
}
public void testHeading2()
throws Exception
{
String src="!!Hello";
assertEquals( "<h3>Hello</h3>",
translate(src) );
}
public void testHeading3()
throws Exception
{
String src="!!!Hello";
assertEquals( "<h2>Hello</h2>",
translate(src) );
}
public void testHeadingHyperlinks()
throws Exception
{
String src="!!![Hello]";
assertEquals( "<h2><u>Hello</u><a href=\"Edit.jsp?page=Hello\">?</a></h2>",
translate(src) );
}
/**
* in 2.0.0, this one throws OutofMemoryError.
*/
public void testBrokenPageText()
throws Exception
{
String translation = translate( brokenPageText );
assertNotNull( translation );
}
/**
* Shortened version of the previous one.
*/
public void testBrokenPageTextShort()
throws Exception
{
String src = "{{{\ncode.}}\n";
assertEquals( "<pre>\ncode.}}\n</pre>\n", translate(src) );
}
/**
* Shortened version of the previous one.
*/
public void testBrokenPageTextShort2()
throws Exception
{
String src = "{{{\ncode.}\n";
assertEquals( "<pre>\ncode.}\n</pre>\n", translate(src) );
}
/**
* ACL tests.
*/
public void testSimpleACL1()
throws Exception
{
String src = "Foobar.[{ALLOW view JanneJalkanen}]";
WikiPage p = new WikiPage( PAGE_NAME );
String res = translate( p, src );
assertEquals("Page text", "Foobar.", res);
AccessControlList acl = p.getAcl();
UserProfile prof = new UserProfile();
prof.setName("JanneJalkanen");
assertTrue( "no read", acl.checkPermission( prof, new ViewPermission() ) );
assertFalse( "has edit", acl.checkPermission( prof, new EditPermission() ) );
}
public void testSimpleACL2()
throws Exception
{
String src = "Foobar.[{ALLOW view JanneJalkanen}]\n"+
"[{DENY view ErikBunn, SuloVilen}]\n"+
"[{ALLOW edit JanneJalkanen, SuloVilen}]";
WikiPage p = new WikiPage( PAGE_NAME );
String res = translate( p, src );
assertEquals("Page text", "Foobar.\n\n", res);
AccessControlList acl = p.getAcl();
UserProfile prof = new UserProfile();
prof.setName("JanneJalkanen");
assertTrue( "no read for JJ", acl.checkPermission( prof, new ViewPermission() ) );
assertTrue( "no edit for JJ", acl.checkPermission( prof, new EditPermission() ) );
prof.setName("ErikBunn");
assertFalse( "read for EB", acl.checkPermission( prof, new ViewPermission() ) );
assertFalse( "has edit for EB", acl.checkPermission( prof, new EditPermission() ) );
prof.setName("SuloVilen");
assertFalse("read for SV", acl.checkPermission( prof, new ViewPermission() ) );
assertTrue( "no edit for SV", acl.checkPermission( prof, new EditPermission() ) );
}
/**
* Metadata tests
*/
public void testSet1()
throws Exception
{
String src = "Foobar.[{SET name=foo}]";
WikiPage p = new WikiPage( PAGE_NAME );
String res = translate( p, src );
assertEquals("Page text", "Foobar.", res);
assertEquals( "foo", p.getAttribute("name") );
}
public void testSet2()
throws Exception
{
String src = "Foobar.[{SET name = foo}]";
WikiPage p = new WikiPage( PAGE_NAME );
String res = translate( p, src );
assertEquals("Page text", "Foobar.", res);
assertEquals( "foo", p.getAttribute("name") );
}
public void testSet3()
throws Exception
{
String src = "Foobar.[{SET name= Janne Jalkanen}]";
WikiPage p = new WikiPage( PAGE_NAME );
String res = translate( p, src );
assertEquals("Page text", "Foobar.", res);
assertEquals( "Janne Jalkanen", p.getAttribute("name") );
}
/**
* Test collection of links.
*/
public void testCollectingLinks()
throws Exception
{
LinkCollector coll = new LinkCollector();
String src = "[Test]";
WikiContext context = new WikiContext( testEngine,
new WikiPage(PAGE_NAME) );
TranslatorReader r = new TranslatorReader( context,
new BufferedReader( new StringReader(src)) );
r.addLocalLinkHook( coll );
r.addExternalLinkHook( coll );
r.addAttachmentLinkHook( coll );
StringWriter out = new StringWriter();
FileUtil.copyContents( r, out );
Collection links = coll.getLinks();
assertEquals( "no links found", 1, links.size() );
assertEquals( "wrong link", "Test", links.iterator().next() );
}
public void testCollectingLinks2()
throws Exception
{
LinkCollector coll = new LinkCollector();
String src = "["+PAGE_NAME+"/Test.txt]";
WikiContext context = new WikiContext( testEngine,
new WikiPage(PAGE_NAME) );
TranslatorReader r = new TranslatorReader( context,
new BufferedReader( new StringReader(src)) );
r.addLocalLinkHook( coll );
r.addExternalLinkHook( coll );
r.addAttachmentLinkHook( coll );
StringWriter out = new StringWriter();
FileUtil.copyContents( r, out );
Collection links = coll.getLinks();
assertEquals( "no links found", 1, links.size() );
assertEquals( "wrong link", PAGE_NAME+"/Test.txt",
links.iterator().next() );
}
public void testCollectingLinksAttachment()
throws Exception
{
// First, make an attachment.
try
{
Attachment att = new Attachment( PAGE_NAME, "TestAtt.txt" );
att.setAuthor( "FirstPost" );
testEngine.getAttachmentManager().storeAttachment( att, testEngine.makeAttachmentFile() );
LinkCollector coll = new LinkCollector();
String src = "[TestAtt.txt]";
WikiContext context = new WikiContext( testEngine,
new WikiPage(PAGE_NAME) );
TranslatorReader r = new TranslatorReader( context,
new BufferedReader( new StringReader(src)) );
r.addLocalLinkHook( coll );
r.addExternalLinkHook( coll );
r.addAttachmentLinkHook( coll );
StringWriter out = new StringWriter();
FileUtil.copyContents( r, out );
Collection links = coll.getLinks();
assertEquals( "no links found", 1, links.size() );
assertEquals( "wrong link", PAGE_NAME+"/TestAtt.txt",
links.iterator().next() );
}
finally
{
String files = testEngine.getWikiProperties().getProperty( BasicAttachmentProvider.PROP_STORAGEDIR );
File storagedir = new File( files, PAGE_NAME+BasicAttachmentProvider.DIR_EXTENSION );
if( storagedir.exists() && storagedir.isDirectory() )
testEngine.deleteAll( storagedir );
}
}
// This is a random find: the following page text caused an eternal loop in V2.0.x.
private static final String brokenPageText =
"Please ''check [RecentChanges].\n" +
"\n" +
"Testing. fewfwefe\n" +
"\n" +
"CHeck [testpage]\n" +
"\n" +
"More testing.\n" +
"dsadsadsa''\n" +
"Is this {{truetype}} or not?\n" +
"What about {{{This}}}?\n" +
"How about {{this?\n" +
"\n" +
"{{{\n" +
"{{text}}\n" +
"}}}\n" +
"goo\n" +
"\n" +
"<b>Not bold</b>\n" +
"\n" +
"motto\n" +
"\n" +
"* This is a list which we\n" +
"shall continue on a other line.\n" +
"* There is a list item here.\n" +
"* Another item.\n" +
"* More stuff, which continues\n" +
"on a second line. And on\n" +
"a third line as well.\n" +
"And a fourth line.\n" +
"* Third item.\n" +
"\n" +
"Foobar.\n" +
"\n" +
"
"\n" +
"!!!Really big heading\n" +
"Text.\n" +
"!! Just a normal heading [with a hyperlink|Main]\n" +
"More text.\n" +
"!Just a small heading.\n" +
"\n" +
"This should be __bold__ text.\n" +
"\n" +
"__more bold text continuing\n" +
"on the next line.__\n" +
"\n" +
"__more bold text continuing\n" +
"\n" +
"on the next paragraph.__\n" +
"\n" +
"\n" +
"This should be normal.\n" +
"\n" +
"Now, let's try ''italic text''.\n" +
"\n" +
"Bulleted lists:\n" +
"* One\n" +
"Or more.\n" +
"* Two\n" +
"\n" +
"** Two.One\n" +
"\n" +
"*** Two.One.One\n" +
"\n" +
"* Three\n" +
"\n" +
"Numbered lists.\n" +
"# One\n" +
"# Two\n" +
"# Three\n" +
"## Three.One\n" +
"## Three.Two\n" +
"## Three.Three\n" +
"### Three.Three.One\n" +
"# Four\n" +
"\n" +
"End?\n" +
"\n" +
"No, let's {{break}} things.\\ {{{ {{{ {{text}} }}} }}}\n" +
"\n" +
"More breaking.\n" +
"\n" +
"{{{\n" +
"code.}}\n" +
"
"author: [Asser], [Ebu], [JanneJalkanen], [Jarmo|mailto:jarmo@regex.com.au]\n";
public static Test suite()
{
return new TestSuite( TranslatorReaderTest.class );
}
} |
package com.concursive.connect.web.modules.wiki.portlets.main;
import com.concursive.commons.web.mvc.beans.GenericBean;
import com.concursive.connect.web.modules.login.dao.User;
import com.concursive.connect.web.modules.profile.dao.Project;
import com.concursive.connect.web.modules.profile.utils.ProjectUtils;
import com.concursive.connect.web.modules.wiki.dao.Wiki;
import com.concursive.connect.web.modules.wiki.dao.WikiComment;
import com.concursive.connect.web.modules.wiki.dao.WikiList;
import com.concursive.connect.web.portal.IPortletAction;
import com.concursive.connect.web.portal.PortalUtils;
import static com.concursive.connect.web.portal.PortalUtils.*;
import javax.portlet.ActionRequest;
import javax.portlet.ActionResponse;
import javax.portlet.PortletException;
import java.sql.Connection;
/**
* Action for saving a wiki comment
*
* @author matt rajkowski
* @created November 3, 2008
*/
public class SaveWikiCommentsAction implements IPortletAction {
public GenericBean processAction(ActionRequest request, ActionResponse response) throws Exception {
// Determine the project container to use
Project project = findProject(request);
if (project == null) {
throw new Exception("Project is null");
}
User user = getUser(request);
if (!ProjectUtils.hasAccess(project.getId(), user, "project-wiki-view")) {
throw new PortletException("Unauthorized to view in this project");
}
// Determine the record to show
String subject = PortalUtils.getPageView(request);
// Parameters
String comment = request.getParameter("comment");
// Find the record to record comments against
Connection db = getConnection(request);
Wiki wiki = WikiList.queryBySubject(db, subject, project.getId());
if (wiki.getId() > -1) {
WikiComment wikiComment = new WikiComment();
wikiComment.setComment(comment);
wikiComment.setWikiId(wiki.getId());
wikiComment.setEnteredBy(user.getId());
wikiComment.setModifiedBy(user.getId());
boolean inserted = wikiComment.insert(db);
if (!inserted) {
return wikiComment;
}
}
// This call will close panels and perform redirects
return (PortalUtils.performRefresh(request, response, "/show/wiki/" + wiki.getSubjectLink()));
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.